Skip to content

Commit

Permalink
Merge from main
Browse files Browse the repository at this point in the history
  • Loading branch information
crickman committed Sep 17, 2024
2 parents d3ded0a + 4c00b79 commit 932a31d
Show file tree
Hide file tree
Showing 18 changed files with 409 additions and 39 deletions.
148 changes: 131 additions & 17 deletions dotnet/samples/Concepts/Agents/ChatCompletion_FunctionTermination.cs
Original file line number Diff line number Diff line change
Expand Up @@ -38,14 +38,8 @@ public async Task UseAutoFunctionInvocationFilterWithAgentInvocationAsync()
await InvokeAgentAsync("What is the special drink?");
await InvokeAgentAsync("Thank you");

// Display the chat history.
Console.WriteLine("================================");
Console.WriteLine("CHAT HISTORY");
Console.WriteLine("================================");
foreach (ChatMessageContent message in chat)
{
this.WriteAgentChatMessage(message);
}
// Display the entire chat history.
WriteChatHistory(chat);

// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
Expand Down Expand Up @@ -91,15 +85,8 @@ public async Task UseAutoFunctionInvocationFilterWithAgentChatAsync()
await InvokeAgentAsync("What is the special drink?");
await InvokeAgentAsync("Thank you");

// Display the chat history.
Console.WriteLine("================================");
Console.WriteLine("CHAT HISTORY");
Console.WriteLine("================================");
ChatMessageContent[] history = await chat.GetChatMessagesAsync().ToArrayAsync();
for (int index = history.Length; index > 0; --index)
{
this.WriteAgentChatMessage(history[index - 1]);
}
// Display the entire chat history.
WriteChatHistory(await chat.GetChatMessagesAsync().ToArrayAsync());

// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
Expand All @@ -115,6 +102,133 @@ async Task InvokeAgentAsync(string input)
}
}

[Fact]
public async Task UseAutoFunctionInvocationFilterWithStreamingAgentInvocationAsync()
{
// Define the agent
ChatCompletionAgent agent =
new()
{
Instructions = "Answer questions about the menu.",
Kernel = CreateKernelWithFilter(),
Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }),
};

KernelPlugin plugin = KernelPluginFactory.CreateFromType<MenuPlugin>();
agent.Kernel.Plugins.Add(plugin);

/// Create the chat history to capture the agent interaction.
ChatHistory chat = [];

// Respond to user input, invoking functions where appropriate.
await InvokeAgentAsync("Hello");
await InvokeAgentAsync("What is the special soup?");
await InvokeAgentAsync("What is the special drink?");
await InvokeAgentAsync("Thank you");

// Display the entire chat history.
WriteChatHistory(chat);

// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
ChatMessageContent message = new(AuthorRole.User, input);
chat.Add(message);
this.WriteAgentChatMessage(message);

int historyCount = chat.Count;

bool isFirst = false;
await foreach (StreamingChatMessageContent response in agent.InvokeStreamingAsync(chat))
{
if (string.IsNullOrEmpty(response.Content))
{
continue;
}

if (!isFirst)
{
Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:");
isFirst = true;
}

Console.WriteLine($"\t > streamed: '{response.Content}'");
}

if (historyCount <= chat.Count)
{
for (int index = historyCount; index < chat.Count; index++)
{
this.WriteAgentChatMessage(chat[index]);
}
}
}
}

[Fact]
public async Task UseAutoFunctionInvocationFilterWithStreamingAgentChatAsync()
{
// Define the agent
ChatCompletionAgent agent =
new()
{
Instructions = "Answer questions about the menu.",
Kernel = CreateKernelWithFilter(),
Arguments = new KernelArguments(new OpenAIPromptExecutionSettings() { FunctionChoiceBehavior = FunctionChoiceBehavior.Auto() }),
};

KernelPlugin plugin = KernelPluginFactory.CreateFromType<MenuPlugin>();
agent.Kernel.Plugins.Add(plugin);

// Create a chat for agent interaction.
AgentGroupChat chat = new();

// Respond to user input, invoking functions where appropriate.
await InvokeAgentAsync("Hello");
await InvokeAgentAsync("What is the special soup?");
await InvokeAgentAsync("What is the special drink?");
await InvokeAgentAsync("Thank you");

// Display the entire chat history.
WriteChatHistory(await chat.GetChatMessagesAsync().ToArrayAsync());

// Local function to invoke agent and display the conversation messages.
async Task InvokeAgentAsync(string input)
{
ChatMessageContent message = new(AuthorRole.User, input);
chat.AddChatMessage(message);
this.WriteAgentChatMessage(message);

bool isFirst = false;
await foreach (StreamingChatMessageContent response in chat.InvokeStreamingAsync(agent))
{
if (string.IsNullOrEmpty(response.Content))
{
continue;
}

if (!isFirst)
{
Console.WriteLine($"\n# {response.Role} - {response.AuthorName ?? "*"}:");
isFirst = true;
}

Console.WriteLine($"\t > streamed: '{response.Content}'");
}
}
}

private void WriteChatHistory(IEnumerable<ChatMessageContent> chat)
{
Console.WriteLine("================================");
Console.WriteLine("CHAT HISTORY");
Console.WriteLine("================================");
foreach (ChatMessageContent message in chat)
{
this.WriteAgentChatMessage(message);
}
}

private Kernel CreateKernelWithFilter()
{
IKernelBuilder builder = Kernel.CreateBuilder();
Expand Down
104 changes: 104 additions & 0 deletions dotnet/samples/Concepts/FunctionCalling/ContextDependentAdvertising.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
// Copyright (c) Microsoft. All rights reserved.

using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;

namespace FunctionCalling;

/// <summary>
/// These samples demonstrate how to advertise functions to AI model based on a context.
/// </summary>
public class ContextDependentAdvertising(ITestOutputHelper output) : BaseTest(output)
{
/// <summary>
/// This sample demonstrates how to advertise functions to AI model based on the context of the chat history.
/// It advertises functions to the AI model based on the game state.
/// For example, if the maze has not been created, advertise the create maze function only to prevent the AI model
/// from adding traps or treasures to the maze before it is created.
/// </summary>
[Fact]
public async Task AdvertiseFunctionsDependingOnContextPerUserInteractionAsync()
{
Kernel kernel = CreateKernel();

IChatCompletionService chatCompletionService = kernel.GetRequiredService<IChatCompletionService>();

// Tracking number of iterations to avoid infinite loop.
int maxIteration = 10;
int iteration = 0;

// Define the functions for AI model to call.
var gameUtils = kernel.ImportPluginFromType<GameUtils>();
KernelFunction createMaze = gameUtils["CreateMaze"];
KernelFunction addTraps = gameUtils["AddTrapsToMaze"];
KernelFunction addTreasures = gameUtils["AddTreasuresToMaze"];
KernelFunction playGame = gameUtils["PlayGame"];

ChatHistory chatHistory = [];
chatHistory.AddUserMessage("I would like to play a maze game with a lot of tricky traps and shiny treasures.");

// Loop until the game has started or the max iteration is reached.
while (!chatHistory.Any(item => item.Content?.Contains("Game started.") ?? false) && iteration < maxIteration)
{
List<KernelFunction> functionsToAdvertise = new();

// Decide game state based on chat history.
bool mazeCreated = chatHistory.Any(item => item.Content?.Contains("Maze created.") ?? false);
bool trapsAdded = chatHistory.Any(item => item.Content?.Contains("Traps added to the maze.") ?? false);
bool treasuresAdded = chatHistory.Any(item => item.Content?.Contains("Treasures added to the maze.") ?? false);

// The maze has not been created yet so advertise the create maze function.
if (!mazeCreated)
{
functionsToAdvertise.Add(createMaze);
}
// The maze has been created so advertise the adding traps and treasures functions.
else if (mazeCreated && (!trapsAdded || !treasuresAdded))
{
functionsToAdvertise.Add(addTraps);
functionsToAdvertise.Add(addTreasures);
}
// Both traps and treasures have been added so advertise the play game function.
else if (treasuresAdded && trapsAdded)
{
functionsToAdvertise.Add(playGame);
}

// Provide the functions to the AI model.
OpenAIPromptExecutionSettings settings = new() { FunctionChoiceBehavior = FunctionChoiceBehavior.Required(functionsToAdvertise) };

// Prompt the AI model.
ChatMessageContent result = await chatCompletionService.GetChatMessageContentAsync(chatHistory, settings, kernel);

Console.WriteLine(result);

iteration++;
}
}

private static Kernel CreateKernel()
{
// Create kernel
IKernelBuilder builder = Kernel.CreateBuilder();

builder.AddOpenAIChatCompletion(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey);

return builder.Build();
}

private sealed class GameUtils
{
[KernelFunction]
public static string CreateMaze() => "Maze created.";

[KernelFunction]
public static string AddTrapsToMaze() => "Traps added to the maze.";

[KernelFunction]
public static string AddTreasuresToMaze() => "Treasures added to the maze.";

[KernelFunction]
public static string PlayGame() => "Game started.";
}
}
10 changes: 7 additions & 3 deletions dotnet/src/Agents/Core/ChatCompletionAgent.cs
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ public override async IAsyncEnumerable<StreamingChatMessageContent> InvokeStream
StringBuilder builder = new();
await foreach (StreamingChatMessageContent message in messages.ConfigureAwait(false))
{
role ??= message.Role;
role = message.Role;
message.Role ??= AuthorRole.Assistant;
message.AuthorName = this.Name;

Expand All @@ -127,8 +127,6 @@ public override async IAsyncEnumerable<StreamingChatMessageContent> InvokeStream
yield return message;
}

chat.Add(new(role ?? AuthorRole.Assistant, builder.ToString()) { AuthorName = this.Name });

// Capture mutated messages related function calling / tools
for (int messageIndex = messageCount; messageIndex < chat.Count; messageIndex++)
{
Expand All @@ -138,6 +136,12 @@ public override async IAsyncEnumerable<StreamingChatMessageContent> InvokeStream

history.Add(message);
}

// Do not duplicate terminated function result to history
if (role != AuthorRole.Tool)
{
history.Add(new(role ?? AuthorRole.Assistant, builder.ToString()) { AuthorName = this.Name });
}
}

internal static (IChatCompletionService service, PromptExecutionSettings? executionSettings) GetChatCompletionService(Kernel kernel, KernelArguments? arguments)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ public static RunCreationOptions GenerateOptions(OpenAIAssistantDefinition defin
RunCreationOptions options =
new()
{
AdditionalInstructions = invocationOptions?.AdditionalInstructions ?? definition.ExecutionOptions?.AdditionalInstructions,
InstructionsOverride = overrideInstructions,
MaxCompletionTokens = ResolveExecutionSetting(invocationOptions?.MaxCompletionTokens, definition.ExecutionOptions?.MaxCompletionTokens),
MaxPromptTokens = ResolveExecutionSetting(invocationOptions?.MaxPromptTokens, definition.ExecutionOptions?.MaxPromptTokens),
Expand Down
6 changes: 6 additions & 0 deletions dotnet/src/Agents/OpenAI/OpenAIAssistantExecutionOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,12 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
/// </remarks>
public sealed class OpenAIAssistantExecutionOptions
{
/// <summary>
/// Appends additional instructions.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? AdditionalInstructions { get; init; }

/// <summary>
/// The maximum number of completion tokens that may be used over the course of the run.
/// </summary>
Expand Down
6 changes: 6 additions & 0 deletions dotnet/src/Agents/OpenAI/OpenAIAssistantInvocationOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,12 @@ public sealed class OpenAIAssistantInvocationOptions
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? ModelName { get; init; }

/// <summary>
/// Appends additional instructions.
/// </summary>
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public string? AdditionalInstructions { get; init; }

/// <summary>
/// Set if code_interpreter tool is enabled.
/// </summary>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,11 @@ public void AssistantRunOptionsFactoryExecutionOptionsNullTest()
new("gpt-anything")
{
Temperature = 0.5F,
ExecutionOptions =
new()
{
AdditionalInstructions = "test",
},
};

// Act
Expand All @@ -33,6 +38,7 @@ public void AssistantRunOptionsFactoryExecutionOptionsNullTest()
Assert.Null(options.InstructionsOverride);
Assert.Null(options.Temperature);
Assert.Null(options.NucleusSamplingFactor);
Assert.Equal("test", options.AdditionalInstructions);
Assert.Empty(options.Metadata);
}

Expand Down Expand Up @@ -79,13 +85,15 @@ public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest()
ExecutionOptions =
new()
{
AdditionalInstructions = "test1",
TruncationMessageCount = 5,
},
};

OpenAIAssistantInvocationOptions invocationOptions =
new()
{
AdditionalInstructions = "test2",
Temperature = 0.9F,
TruncationMessageCount = 8,
EnableJsonResponse = true,
Expand All @@ -98,6 +106,7 @@ public void AssistantRunOptionsFactoryExecutionOptionsOverrideTest()
Assert.NotNull(options);
Assert.Equal(0.9F, options.Temperature);
Assert.Equal(8, options.TruncationStrategy.LastMessages);
Assert.Equal("test2", options.AdditionalInstructions);
Assert.Equal(AssistantResponseFormat.JsonObject, options.ResponseFormat);
Assert.Null(options.NucleusSamplingFactor);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ public void VerifyOpenAIAssistantDefinitionAssignment()
ExecutionOptions =
new()
{
AdditionalInstructions = "test instructions",
MaxCompletionTokens = 1000,
MaxPromptTokens = 1000,
ParallelToolCallsEnabled = false,
Expand All @@ -83,6 +84,7 @@ public void VerifyOpenAIAssistantDefinitionAssignment()
Assert.Equal(2, definition.Temperature);
Assert.Equal(0, definition.TopP);
Assert.NotNull(definition.ExecutionOptions);
Assert.Equal("test instructions", definition.ExecutionOptions.AdditionalInstructions);
Assert.Equal(1000, definition.ExecutionOptions.MaxCompletionTokens);
Assert.Equal(1000, definition.ExecutionOptions.MaxPromptTokens);
Assert.Equal(12, definition.ExecutionOptions.TruncationMessageCount);
Expand Down
Loading

0 comments on commit 932a31d

Please sign in to comment.