diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index 374f1b3e5ac8..e2455f84be12 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -1,8 +1,9 @@
---
name: Bug report
about: Create a report to help us improve
-title: ''
-labels: ''
+title: 'Bug: '
+labels: ["bug"]
+projects: ["semantic-kernel"]
assignees: ''
---
diff --git a/.github/ISSUE_TEMPLATE/feature_graduation.md b/.github/ISSUE_TEMPLATE/feature_graduation.md
new file mode 100644
index 000000000000..37d207ea1888
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_graduation.md
@@ -0,0 +1,29 @@
+---
+name: Feature graduation
+about: Plan the graduation of an experimental feature
+title: 'Graduate XXX feature'
+labels: ["feature_graduation"]
+projects: ["semantic-kernel"]
+assignees: ''
+
+---
+
+---
+name: Feature graduation
+about: Plan the graduation of an experimental feature
+
+---
+
+Checklist to be completed when graduating an experimental feature
+
+- [ ] Notify PM's and EM's that feature is read for graduation
+- [ ] Contact PM for list of sample use cases
+- [ ] Verify there are sample implementations for each of the use cases
+- [ ] Verify telemetry and logging are complete
+- [ ] Verify API docs are complete and arrange to have them published
+- [ ] Make appropriate updates to Learn docs
+- [ ] Make appropriate updates to Concept samples
+- [ ] Male appropriate updates to Blog posts
+- [ ] Verify there are no serious open Issues
+- [ ] Update table in EXPERIMENTS.md
+- [ ] Remove SKEXP flag from the experimental code
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
index 2d490077748e..3289535f2120 100644
--- a/.github/ISSUE_TEMPLATE/feature_request.md
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -1,8 +1,9 @@
---
name: Feature request
about: Suggest an idea for this project
-title: ''
+title: 'New Feature: '
labels: ''
+projects: ["semantic-kernel"]
assignees: ''
---
diff --git a/.github/_typos.toml b/.github/_typos.toml
index a56c70770c47..917745e1ae83 100644
--- a/.github/_typos.toml
+++ b/.github/_typos.toml
@@ -15,6 +15,7 @@ extend-exclude = [
"CodeTokenizerTests.cs",
"test_code_tokenizer.py",
"*response.json",
+ "test_content.txt",
]
[default.extend-words]
@@ -28,6 +29,7 @@ ans = "ans" # Short for answers
arange = "arange" # Method in Python numpy package
prompty = "prompty" # prompty is a format name.
ist = "ist" # German language
+dall = "dall" # OpenAI model name
[default.extend-identifiers]
ags = "ags" # Azure Graph Service
diff --git a/.github/workflows/python-integration-tests.yml b/.github/workflows/python-integration-tests.yml
index b02fc8eae1ed..20516a4164e3 100644
--- a/.github/workflows/python-integration-tests.yml
+++ b/.github/workflows/python-integration-tests.yml
@@ -92,6 +92,10 @@ jobs:
AZURE_AI_SEARCH_API_KEY: ${{secrets.AZURE_AI_SEARCH_API_KEY}}
AZURE_AI_SEARCH_ENDPOINT: ${{secrets.AZURE_AI_SEARCH_ENDPOINT}}
MONGODB_ATLAS_CONNECTION_STRING: ${{secrets.MONGODB_ATLAS_CONNECTION_STRING}}
+ AZURE_KEY_VAULT_ENDPOINT: ${{secrets.AZURE_KEY_VAULT_ENDPOINT}}
+ AZURE_KEY_VAULT_CLIENT_ID: ${{secrets.AZURE_KEY_VAULT_CLIENT_ID}}
+ AZURE_KEY_VAULT_CLIENT_SECRET: ${{secrets.AZURE_KEY_VAULT_CLIENT_SECRET}}
+ ACA_POOL_MANAGEMENT_ENDPOINT: ${{secrets.ACA_POOL_MANAGEMENT_ENDPOINT}}
run: |
if ${{ matrix.os == 'ubuntu-latest' }}; then
docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:latest
@@ -99,6 +103,7 @@ jobs:
cd python
poetry run pytest ./tests/integration -v
+ poetry run pytest ./tests/samples -v
python-integration-tests:
needs: paths-filter
@@ -154,6 +159,10 @@ jobs:
AZURE_AI_SEARCH_API_KEY: ${{secrets.AZURE_AI_SEARCH_API_KEY}}
AZURE_AI_SEARCH_ENDPOINT: ${{secrets.AZURE_AI_SEARCH_ENDPOINT}}
MONGODB_ATLAS_CONNECTION_STRING: ${{secrets.MONGODB_ATLAS_CONNECTION_STRING}}
+ AZURE_KEY_VAULT_ENDPOINT: ${{secrets.AZURE_KEY_VAULT_ENDPOINT}}
+ AZURE_KEY_VAULT_CLIENT_ID: ${{secrets.AZURE_KEY_VAULT_CLIENT_ID}}
+ AZURE_KEY_VAULT_CLIENT_SECRET: ${{secrets.AZURE_KEY_VAULT_CLIENT_SECRET}}
+ ACA_POOL_MANAGEMENT_ENDPOINT: ${{secrets.ACA_POOL_MANAGEMENT_ENDPOINT}}
run: |
if ${{ matrix.os == 'ubuntu-latest' }}; then
docker run -d --name redis-stack-server -p 6379:6379 redis/redis-stack-server:latest
@@ -161,6 +170,7 @@ jobs:
cd python
poetry run pytest ./tests/integration -v
+ poetry run pytest ./tests/samples -v
# This final job is required to satisfy the merge queue. It must only run (or succeed) if no tests failed
python-integration-tests-check:
diff --git a/.github/workflows/python-lint.yml b/.github/workflows/python-lint.yml
index 15f339747c96..3f20ae2f0d02 100644
--- a/.github/workflows/python-lint.yml
+++ b/.github/workflows/python-lint.yml
@@ -1,7 +1,7 @@
name: Python Code Quality Checks
on:
workflow_dispatch:
- pull_request_target:
+ pull_request:
branches: [ "main", "feature*" ]
paths:
- 'python/**'
diff --git a/README.md b/README.md
index c400ede21d35..e8518c0ef1cf 100644
--- a/README.md
+++ b/README.md
@@ -1,9 +1,15 @@
# Semantic Kernel
+## Status
+
+ - Python
[![Python package](https://img.shields.io/pypi/v/semantic-kernel)](https://pypi.org/project/semantic-kernel/)
-[![Nuget package](https://img.shields.io/nuget/vpre/Microsoft.SemanticKernel)](https://www.nuget.org/packages/Microsoft.SemanticKernel/)
-[![dotnet Docker](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-docker.yml/badge.svg?branch=main)](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-docker.yml)
-[![dotnet Windows](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-windows.yml/badge.svg?branch=main)](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-windows.yml)
+ - .NET
+[![Nuget package](https://img.shields.io/nuget/vpre/Microsoft.SemanticKernel)](https://www.nuget.org/packages/Microsoft.SemanticKernel/)[![dotnet Docker](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-docker.yml/badge.svg?branch=main)](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-docker.yml)[![dotnet Windows](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-windows.yml/badge.svg?branch=main)](https://github.com/microsoft/semantic-kernel/actions/workflows/dotnet-ci-windows.yml)
+ - Java
+[![Java CICD Builds](https://github.com/microsoft/semantic-kernel/actions/workflows/java-build.yml/badge.svg?branch=java-development)](https://github.com/microsoft/semantic-kernel/actions/workflows/java-build.yml)[![Maven Central](https://maven-badges.herokuapp.com/maven-central/com.microsoft.semantic-kernel/semantickernel-api/badge.svg)](https://maven-badges.herokuapp.com/maven-central/com.microsoft.semantic-kernel/semantickernel-api)
+
+## Overview
[![License: MIT](https://img.shields.io/github/license/microsoft/semantic-kernel)](https://github.com/microsoft/semantic-kernel/blob/main/LICENSE)
[![Discord](https://img.shields.io/discord/1063152441819942922?label=Discord&logo=discord&logoColor=white&color=d82679)](https://aka.ms/SKDiscord)
@@ -107,6 +113,7 @@ Finally, refer to our API references for more details on the C# and Python APIs:
- [C# API reference](https://learn.microsoft.com/en-us/dotnet/api/microsoft.semantickernel?view=semantic-kernel-dotnet)
- Python API reference (coming soon)
+- Java API reference (coming soon)
## Join the community
diff --git a/docs/EUCLIDEAN_DISTANCE.md b/docs/EUCLIDEAN_DISTANCE.md
index 58e93555b97b..e67cea62664d 100644
--- a/docs/EUCLIDEAN_DISTANCE.md
+++ b/docs/EUCLIDEAN_DISTANCE.md
@@ -1,15 +1,15 @@
-# Euclidean distance
+# Euclidean Distance
Euclidean distance is a mathematical concept that measures the straight-line distance
between two points in a Euclidean space. It is named after the ancient Greek mathematician
Euclid, who is often referred to as the "father of geometry". The formula for calculating
-Euclidean distance is based on the Pythagorean theorem and can be expressed as:
+Euclidean distance is based on the Pythagorean Theorem and can be expressed as:
- d = √(x2 - x1)² + (y2 - y1)²
+$$d = \sqrt{(x_2 - x_1)^2 + (y_2 - y_1)^2}$$
-In higher dimensions, this formula can be generalized to:
+For higher dimensions, this formula can be generalized to:
- d = √(x2 - x1)² + (y2 - y1)² + ... + (zn - zn-1)²
+$$d(p, q) = \sqrt{\sum\limits_{i\=1}^{n} (q_i - p_i)^2}$$
Euclidean distance has many applications in computer science and artificial intelligence,
particularly when working with [embeddings](EMBEDDINGS.md). Embeddings are numerical
diff --git a/docs/decisions/0045-breaking-changes-guidance.md b/docs/decisions/0045-breaking-changes-guidance.md
new file mode 100644
index 000000000000..59e1f5a50f3d
--- /dev/null
+++ b/docs/decisions/0045-breaking-changes-guidance.md
@@ -0,0 +1,40 @@
+---
+status: accepted
+contact: markwallace
+date: 2024-06-10
+deciders: sergeymenshykh, mbolan, rbarreto, dmytrostruk, westey
+consulted:
+informed:
+---
+
+# Guidance for Breaking Changes
+
+## Context and Problem Statement
+
+We must avoid breaking changes in .Net because of the well known [diamond dependency issue](https://learn.microsoft.com/en-us/dotnet/standard/library-guidance/dependencies#diamond-dependencies) where breaking changes between different versions of the same package cause bugs and exceptions at run time.
+
+## Decision Drivers
+
+Breaking changes are only allowed under the following circumstances:
+
+- Updates to an experimental feature i.e. we have learnt something new and need to modify the design of an experimental feature.
+- When one of our dependencies introduces an unavoidable breaking change.
+
+All breaking changes must be clearly documented, definitely in the release notes and possibly also via a migration guide Blog post.
+
+- Include a detailed description of the breaking change in the PR description so that it is included in the release notes.
+- Update Learn Site migration guide documentation and have this published to coincide with the release which includes the breaking change.
+
+In all other cases we must avoid breaking changes. There will be situations where we need to move to accommodate a change to one of our dependencies or introduce a new capability e.g.
+
+- When we find a security issue or a severe bug (e.g. data loss).
+- One of our dependencies introduces a major breaking change e.g. the introduction of the new OpenAI SDK.
+- When we find a severe limitation in our current implementation e.g. when the AI services introduce a new capability.
+
+In these cases we will plan to obsolete the API(s) and provide a documented migration path to the new preferred pattern.
+An example of this will be the switch to the new OpenAI .Net SDK.
+During this transition there will be a period where the new and old API's will be supported to allow customers to migrate.
+
+## Decision Outcome
+
+Chosen option: We must avoid breaking changes in .Net because of the well known diamond dependency issue.
diff --git a/docs/decisions/0046-azure-model-as-a-service.md b/docs/decisions/0046-azure-model-as-a-service.md
new file mode 100644
index 000000000000..a91468e253b0
--- /dev/null
+++ b/docs/decisions/0046-azure-model-as-a-service.md
@@ -0,0 +1,44 @@
+---
+# These are optional elements. Feel free to remove any of them.
+status: { accepted }
+contact: { rogerbarreto, taochen }
+date: { 2024-06-20 }
+deciders: { alliscode, moonbox3, eavanvalkenburg }
+consulted: {}
+informed: {}
+---
+
+# Support for Azure Model-as-a-Service in SK
+
+## Context and Problem Statement
+
+There has been a demand from customers for the implementation of Model-as-a-Service (MaaS) in SK. MaaS, which is also referred to as [serverless API](https://learn.microsoft.com/en-us/azure/ai-studio/how-to/model-catalog-overview#model-deployment-managed-compute-and-serverless-api-pay-as-you-go), is available in [Azure AI Studio](https://learn.microsoft.com/en-us/azure/ai-studio/what-is-ai-studio). This mode of consumption operates on a pay-as-you-go basis, typically using tokens for billing purposes. Clients can access the service via the [Azure AI Model Inference API](https://learn.microsoft.com/en-us/azure/ai-studio/reference/reference-model-inference-api?tabs=azure-studio) or client SDKs.
+
+At present, there is no official support for MaaS in SK. The purpose of this ADR is to examine the constraints of the service and explore potential solutions to enable support for the service in SK via the development of a new AI connector.
+
+## Client SDK
+
+The Azure team will be providing a new client library, namely `Azure.AI.Inference` in .Net and `azure-ai-inference` in Python, for effectively interacting with the service. While the service API is OpenAI-compatible, it is not permissible to use the OpenAI and the Azure OpenAI client libraries for interacting with the service as they are not independent with respect to both the models and their providers. This is because Azure AI Studio features a diverse range of open-source models, other than OpenAI models.
+
+### Limitations
+
+The initial release of the client SDK will only support chat completion and text/image embedding generation, with image generation to be added later.
+
+Plans to support for text completion are currently unclear, and it is highly unlikely that the SDK will ever include support for text completion. As a result, the new AI connector will **NOT** support text completions in the initial version until we get more customer signals or the client SDK adds support.
+
+## AI Connector
+
+### Naming options
+
+- Azure
+- AzureAI
+- AzureAIInference
+- AzureAIModelInference
+
+ Decision: `AzureAIInference`
+
+### Support for model-specific parameters
+
+Models can possess supplementary parameters that are not part of the default API. The service API and the client SDK enable the provision of model-specific parameters. Users can provide model-specific settings via a dedicated argument along with other settings, such as `temperature` and `top_p`, among others.
+
+In the context of SK, execution parameters are categorized under `PromptExecutionSettings`, which is inherited by all connector-specific setting classes. The settings of the new connector will contain a member of type `dictionary`, which will group together the model-specific parameters.
diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props
index d47e0ca791f4..d514e22cb5f4 100644
--- a/dotnet/Directory.Packages.props
+++ b/dotnet/Directory.Packages.props
@@ -8,7 +8,7 @@
-
+
@@ -27,8 +27,8 @@
-
-
+
+
@@ -71,8 +71,8 @@
-
-
+
+
@@ -80,7 +80,7 @@
-
+
@@ -110,7 +110,7 @@
allruntime; build; native; contentfiles; analyzers; buildtransitive
-
+ allruntime; build; native; contentfiles; analyzers; buildtransitive
diff --git a/dotnet/nuget/nuget-package.props b/dotnet/nuget/nuget-package.props
index 5a07d43e119f..6a48e76f58fc 100644
--- a/dotnet/nuget/nuget-package.props
+++ b/dotnet/nuget/nuget-package.props
@@ -1,7 +1,7 @@
- 1.14.1
+ 1.15.0$(VersionPrefix)-$(VersionSuffix)$(VersionPrefix)
diff --git a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs
index 58813da9032a..0802980422cd 100644
--- a/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs
+++ b/dotnet/samples/Concepts/Agents/ComplexChat_NestedShopper.cs
@@ -94,7 +94,7 @@ Select which participant will take the next turn based on the conversation histo
""";
[Fact]
- public async Task RunAsync()
+ public async Task NestedChatWithAggregatorAgentAsync()
{
Console.WriteLine($"! {Model}");
diff --git a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
index 86e6a46cb8ec..68052ef99cf2 100644
--- a/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
+++ b/dotnet/samples/Concepts/Agents/MixedChat_Agents.cs
@@ -33,7 +33,7 @@ Consider suggestions when refining an idea.
""";
[Fact]
- public async Task RunAsync()
+ public async Task ChatWithOpenAIAssistantAgentAndChatCompletionAgentAsync()
{
// Define the agents: one of each type
ChatCompletionAgent agentReviewer =
@@ -55,7 +55,7 @@ await OpenAIAssistantAgent.CreateAsync(
ModelId = this.Model,
});
- // Create a nexus for agent interaction.
+ // Create a chat for agent interaction.
var chat =
new AgentGroupChat(agentWriter, agentReviewer)
{
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
index 3d6f714b7b26..5617784b780c 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_ChartMaker.cs
@@ -21,7 +21,7 @@ public class OpenAIAssistant_ChartMaker(ITestOutputHelper output) : BaseTest(out
private const string AgentInstructions = "Create charts as requested without explanation.";
[Fact]
- public async Task RunAsync()
+ public async Task GenerateChartWithOpenAIAssistantAgentAsync()
{
// Define the agent
OpenAIAssistantAgent agent =
@@ -77,7 +77,7 @@ async Task InvokeAgentAsync(string input)
foreach (var fileReference in message.Items.OfType())
{
- Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}: #{fileReference.FileId}");
+ Console.WriteLine($"# {message.Role} - {message.AuthorName ?? "*"}: @{fileReference.FileId}");
}
}
}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs
index 46b4599c9a10..636f70636126 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_CodeInterpreter.cs
@@ -11,8 +11,10 @@ namespace Agents;
///
public class OpenAIAssistant_CodeInterpreter(ITestOutputHelper output) : BaseTest(output)
{
+ protected override bool ForceOpenAI => true;
+
[Fact]
- public async Task RunAsync()
+ public async Task UseCodeInterpreterToolWithOpenAIAssistantAgentAsync()
{
// Define the agent
OpenAIAssistantAgent agent =
@@ -31,8 +33,7 @@ await OpenAIAssistantAgent.CreateAsync(
// Respond to user input
try
{
- await InvokeAgentAsync("What is the solution to `3x + 2 = 14`?");
- await InvokeAgentAsync("What is the fibinacci sequence until 101?");
+ await InvokeAgentAsync("Use code to determine the values in the Fibonacci sequence that that are less then the value of 101?");
}
finally
{
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs
new file mode 100644
index 000000000000..dbe9d17ba90a
--- /dev/null
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileManipulation.cs
@@ -0,0 +1,83 @@
+// Copyright (c) Microsoft. All rights reserved.
+using System.Text;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Agents;
+using Microsoft.SemanticKernel.Agents.OpenAI;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Resources;
+
+namespace Agents;
+
+///
+/// Demonstrate using code-interpreter to manipulate and generate csv files with .
+///
+public class OpenAIAssistant_FileManipulation(ITestOutputHelper output) : BaseTest(output)
+{
+ ///
+ /// Target OpenAI services.
+ ///
+ protected override bool ForceOpenAI => true;
+
+ [Fact]
+ public async Task AnalyzeCSVFileUsingOpenAIAssistantAgentAsync()
+ {
+ OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
+
+ OpenAIFileReference uploadFile =
+ await fileService.UploadContentAsync(
+ new BinaryContent(await EmbeddedResource.ReadAllAsync("sales.csv"), mimeType: "text/plain"),
+ new OpenAIFileUploadExecutionSettings("sales.csv", OpenAIFilePurpose.Assistants));
+
+ Console.WriteLine(this.ApiKey);
+
+ // Define the agent
+ OpenAIAssistantAgent agent =
+ await OpenAIAssistantAgent.CreateAsync(
+ kernel: new(),
+ config: new(this.ApiKey, this.Endpoint),
+ new()
+ {
+ EnableCodeInterpreter = true, // Enable code-interpreter
+ ModelId = this.Model,
+ FileIds = [uploadFile.Id] // Associate uploaded file
+ });
+
+ // Create a chat for agent interaction.
+ var chat = new AgentGroupChat();
+
+ // Respond to user input
+ try
+ {
+ await InvokeAgentAsync("Which segment had the most sales?");
+ await InvokeAgentAsync("List the top 5 countries that generated the most profit.");
+ await InvokeAgentAsync("Create a tab delimited file report of profit by each country per month.");
+ }
+ finally
+ {
+ await agent.DeleteAsync();
+ await fileService.DeleteFileAsync(uploadFile.Id);
+ }
+
+ // Local function to invoke agent and display the conversation messages.
+ async Task InvokeAgentAsync(string input)
+ {
+ chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
+
+ Console.WriteLine($"# {AuthorRole.User}: '{input}'");
+
+ await foreach (var content in chat.InvokeAsync(agent))
+ {
+ Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
+
+ foreach (var annotation in content.Items.OfType())
+ {
+ Console.WriteLine($"\n* '{annotation.Quote}' => {annotation.FileId}");
+ BinaryContent fileContent = await fileService.GetFileContentAsync(annotation.FileId!);
+ byte[] byteContent = fileContent.Data?.ToArray() ?? [];
+ Console.WriteLine(Encoding.Default.GetString(byteContent));
+ }
+ }
+ }
+ }
+}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs
new file mode 100644
index 000000000000..7537f53da726
--- /dev/null
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_FileService.cs
@@ -0,0 +1,66 @@
+// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using Resources;
+
+namespace Agents;
+
+///
+/// Demonstrate using .
+///
+public class OpenAIAssistant_FileService(ITestOutputHelper output) : BaseTest(output)
+{
+ ///
+ /// Retrieval tool not supported on Azure OpenAI.
+ ///
+ protected override bool ForceOpenAI => true;
+
+ [Fact]
+ public async Task UploadAndRetrieveFilesAsync()
+ {
+ OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
+
+ BinaryContent[] files = [
+ new AudioContent(await EmbeddedResource.ReadAllAsync("test_audio.wav")!, mimeType: "audio/wav") { InnerContent = "test_audio.wav" },
+ new ImageContent(await EmbeddedResource.ReadAllAsync("sample_image.jpg")!, mimeType: "image/jpeg") { InnerContent = "sample_image.jpg" },
+ new ImageContent(await EmbeddedResource.ReadAllAsync("test_image.jpg")!, mimeType: "image/jpeg") { InnerContent = "test_image.jpg" },
+ new BinaryContent(data: await EmbeddedResource.ReadAllAsync("travelinfo.txt"), mimeType: "text/plain") { InnerContent = "travelinfo.txt" }
+ ];
+
+ var fileContents = new Dictionary();
+ foreach (BinaryContent file in files)
+ {
+ OpenAIFileReference result = await fileService.UploadContentAsync(file, new(file.InnerContent!.ToString()!, OpenAIFilePurpose.FineTune));
+ fileContents.Add(result.Id, file);
+ }
+
+ foreach (OpenAIFileReference fileReference in await fileService.GetFilesAsync(OpenAIFilePurpose.FineTune))
+ {
+ // Only interested in the files we uploaded
+ if (!fileContents.ContainsKey(fileReference.Id))
+ {
+ continue;
+ }
+
+ BinaryContent content = await fileService.GetFileContentAsync(fileReference.Id);
+
+ string? mimeType = fileContents[fileReference.Id].MimeType;
+ string? fileName = fileContents[fileReference.Id].InnerContent!.ToString();
+ ReadOnlyMemory data = content.Data ?? new();
+
+ var typedContent = mimeType switch
+ {
+ "image/jpeg" => new ImageContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata },
+ "audio/wav" => new AudioContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata },
+ _ => new BinaryContent(data, mimeType) { Uri = content.Uri, InnerContent = fileName, Metadata = content.Metadata }
+ };
+
+ Console.WriteLine($"\nFile: {fileName} - {mimeType}");
+ Console.WriteLine($"Type: {typedContent}");
+ Console.WriteLine($"Uri: {typedContent.Uri}");
+
+ // Delete the test file remotely
+ await fileService.DeleteFileAsync(fileReference.Id);
+ }
+ }
+}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_MultipleContents.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_MultipleContents.cs
deleted file mode 100644
index 49f36edce0f4..000000000000
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_MultipleContents.cs
+++ /dev/null
@@ -1,146 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-using Azure.AI.OpenAI.Assistants;
-using Microsoft.SemanticKernel;
-using Microsoft.SemanticKernel.Agents;
-using Microsoft.SemanticKernel.Agents.OpenAI;
-using Microsoft.SemanticKernel.ChatCompletion;
-using Microsoft.SemanticKernel.Connectors.OpenAI;
-using Resources;
-
-namespace Agents;
-
-///
-/// Demonstrate using retrieval on .
-///
-public class OpenAIAssistant_MultipleContents(ITestOutputHelper output) : BaseTest(output)
-{
- ///
- /// Retrieval tool not supported on Azure OpenAI.
- ///
- protected override bool ForceOpenAI => true;
-
- [Fact]
- public async Task RunAsync()
- {
- OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
-
- BinaryContent[] files = [
- // Audio is not supported by Assistant API
- // new AudioContent(await EmbeddedResource.ReadAllAsync("test_audio.wav")!, mimeType:"audio/wav", innerContent: "test_audio.wav"),
- new ImageContent(await EmbeddedResource.ReadAllAsync("sample_image.jpg")!, mimeType: "image/jpeg") { InnerContent = "sample_image.jpg" },
- new ImageContent(await EmbeddedResource.ReadAllAsync("test_image.jpg")!, mimeType: "image/jpeg") { InnerContent = "test_image.jpg" },
- new BinaryContent(data: await EmbeddedResource.ReadAllAsync("travelinfo.txt"), mimeType: "text/plain")
- {
- InnerContent = "travelinfo.txt"
- }
- ];
-
- var fileIds = new List();
- foreach (var file in files)
- {
- try
- {
- var uploadFile = await fileService.UploadContentAsync(file,
- new OpenAIFileUploadExecutionSettings(file.InnerContent!.ToString()!, Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose.Assistants));
-
- fileIds.Add(uploadFile.Id);
- }
- catch (HttpOperationException hex)
- {
- Console.WriteLine(hex.ResponseContent);
- Assert.Fail($"Failed to upload file: {hex.Message}");
- }
- }
-
- // Define the agent
- OpenAIAssistantAgent agent =
- await OpenAIAssistantAgent.CreateAsync(
- kernel: new(),
- config: new(this.ApiKey, this.Endpoint),
- new()
- {
- EnableRetrieval = true, // Enable retrieval
- ModelId = this.Model,
- // FileIds = fileIds Currently Assistant API only supports text files, no images or audio.
- FileIds = [fileIds.Last()]
- });
-
- // Create a chat for agent interaction.
- var chat = new AgentGroupChat();
-
- // Respond to user input
- try
- {
- await InvokeAgentAsync("Where did sam go?");
- await InvokeAgentAsync("When does the flight leave Seattle?");
- await InvokeAgentAsync("What is the hotel contact info at the destination?");
- }
- finally
- {
- await agent.DeleteAsync();
- }
-
- // Local function to invoke agent and display the conversation messages.
- async Task InvokeAgentAsync(string input)
- {
- chat.AddChatMessage(new ChatMessageContent(AuthorRole.User, input));
-
- Console.WriteLine($"# {AuthorRole.User}: '{input}'");
-
- await foreach (var content in chat.InvokeAsync(agent))
- {
- Console.WriteLine($"# {content.Role} - {content.AuthorName ?? "*"}: '{content.Content}'");
- }
- }
- }
-
- [Fact]
- public async Task SendingAndRetrievingFilesAsync()
- {
- var openAIClient = new AssistantsClient(TestConfiguration.OpenAI.ApiKey);
- OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
-
- BinaryContent[] files = [
- new AudioContent(await EmbeddedResource.ReadAllAsync("test_audio.wav")!, mimeType: "audio/wav") { InnerContent = "test_audio.wav" },
- new ImageContent(await EmbeddedResource.ReadAllAsync("sample_image.jpg")!, mimeType: "image/jpeg") { InnerContent = "sample_image.jpg" },
- new ImageContent(await EmbeddedResource.ReadAllAsync("test_image.jpg")!, mimeType: "image/jpeg") { InnerContent = "test_image.jpg" },
- new BinaryContent(data: await EmbeddedResource.ReadAllAsync("travelinfo.txt"), mimeType: "text/plain") { InnerContent = "travelinfo.txt" }
- ];
-
- var fileIds = new Dictionary();
- foreach (var file in files)
- {
- var result = await openAIClient.UploadFileAsync(new BinaryData(file.Data), Azure.AI.OpenAI.Assistants.OpenAIFilePurpose.FineTune);
- fileIds.Add(result.Value.Id, file);
- }
-
- foreach (var file in (await openAIClient.GetFilesAsync(Azure.AI.OpenAI.Assistants.OpenAIFilePurpose.FineTune)).Value)
- {
- if (!fileIds.ContainsKey(file.Id))
- {
- continue;
- }
-
- var data = (await openAIClient.GetFileContentAsync(file.Id)).Value;
-
- var mimeType = fileIds[file.Id].MimeType;
- var fileName = fileIds[file.Id].InnerContent!.ToString();
- var metadata = new Dictionary { ["id"] = file.Id };
- var uri = new Uri($"https://api.openai.com/v1/files/{file.Id}/content");
- var content = mimeType switch
- {
- "image/jpeg" => new ImageContent(data, mimeType) { Uri = uri, InnerContent = fileName, Metadata = metadata },
- "audio/wav" => new AudioContent(data, mimeType) { Uri = uri, InnerContent = fileName, Metadata = metadata },
- _ => new BinaryContent(data, mimeType) { Uri = uri, InnerContent = fileName, Metadata = metadata }
- };
-
- Console.WriteLine($"File: {fileName} - {mimeType}");
-
- // Images tostring are different from the graduated contents for retrocompatibility
- Console.WriteLine(content.ToString());
-
- // Delete the test file remotely
- await openAIClient.DeleteFileAsync(file.Id);
- }
- }
-}
diff --git a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs
index 2df655d07630..9c7c9bb46f43 100644
--- a/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs
+++ b/dotnet/samples/Concepts/Agents/OpenAIAssistant_Retrieval.cs
@@ -19,7 +19,7 @@ public class OpenAIAssistant_Retrieval(ITestOutputHelper output) : BaseTest(outp
protected override bool ForceOpenAI => true;
[Fact]
- public async Task RunAsync()
+ public async Task UseRetrievalToolWithOpenAIAssistantAgentAsync()
{
OpenAIFileService fileService = new(TestConfiguration.OpenAI.ApiKey);
diff --git a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs
index 592146da6799..81fbc2492d4a 100644
--- a/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/Connectors_WithMultipleLLMs.cs
@@ -1,82 +1,185 @@
// Copyright (c) Microsoft. All rights reserved.
using Microsoft.SemanticKernel;
-using xRetry;
namespace ChatCompletion;
public class Connectors_WithMultipleLLMs(ITestOutputHelper output) : BaseTest(output)
{
- ///
- /// Show how to run a prompt function and specify a specific service to use.
- ///
- [RetryFact(typeof(HttpOperationException))]
- public async Task RunAsync()
+ private const string ChatPrompt = "Hello AI, what can you do for me?";
+
+ private static Kernel BuildKernel()
{
- Kernel kernel = Kernel.CreateBuilder()
- .AddAzureOpenAIChatCompletion(
- deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
- endpoint: TestConfiguration.AzureOpenAI.Endpoint,
- apiKey: TestConfiguration.AzureOpenAI.ApiKey,
- serviceId: "AzureOpenAIChat",
- modelId: TestConfiguration.AzureOpenAI.ChatModelId)
- .AddOpenAIChatCompletion(
- modelId: TestConfiguration.OpenAI.ChatModelId,
- apiKey: TestConfiguration.OpenAI.ApiKey,
- serviceId: "OpenAIChat")
- .Build();
-
- await RunByServiceIdAsync(kernel, "AzureOpenAIChat");
- await RunByModelIdAsync(kernel, TestConfiguration.OpenAI.ChatModelId);
- await RunByFirstModelIdAsync(kernel, "gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId);
+ return Kernel.CreateBuilder()
+ .AddAzureOpenAIChatCompletion(
+ deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
+ endpoint: TestConfiguration.AzureOpenAI.Endpoint,
+ apiKey: TestConfiguration.AzureOpenAI.ApiKey,
+ serviceId: "AzureOpenAIChat",
+ modelId: TestConfiguration.AzureOpenAI.ChatModelId)
+ .AddOpenAIChatCompletion(
+ modelId: TestConfiguration.OpenAI.ChatModelId,
+ apiKey: TestConfiguration.OpenAI.ApiKey,
+ serviceId: "OpenAIChat")
+ .Build();
}
- private async Task RunByServiceIdAsync(Kernel kernel, string serviceId)
+ ///
+ /// Shows how to invoke a prompt and specify the service id of the preferred AI service. When the prompt is executed the AI Service with the matching service id will be selected.
+ ///
+ /// Service Id
+ [Theory]
+ [InlineData("AzureOpenAIChat")]
+ public async Task InvokePromptByServiceIdAsync(string serviceId)
{
+ var kernel = BuildKernel();
Console.WriteLine($"======== Service Id: {serviceId} ========");
- var prompt = "Hello AI, what can you do for me?";
+ var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId }));
- KernelArguments arguments = [];
- arguments.ExecutionSettings = new Dictionary()
- {
- { serviceId, new PromptExecutionSettings() }
- };
- var result = await kernel.InvokePromptAsync(prompt, arguments);
Console.WriteLine(result.GetValue());
}
- private async Task RunByModelIdAsync(Kernel kernel, string modelId)
+ ///
+ /// Shows how to invoke a prompt and specify the model id of the preferred AI service. When the prompt is executed the AI Service with the matching model id will be selected.
+ ///
+ [Fact]
+ private async Task InvokePromptByModelIdAsync()
{
+ var modelId = TestConfiguration.OpenAI.ChatModelId;
+ var kernel = BuildKernel();
Console.WriteLine($"======== Model Id: {modelId} ========");
- var prompt = "Hello AI, what can you do for me?";
+ var result = await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings() { ModelId = modelId }));
- var result = await kernel.InvokePromptAsync(
- prompt,
- new(new PromptExecutionSettings()
- {
- ModelId = modelId
- }));
Console.WriteLine(result.GetValue());
}
- private async Task RunByFirstModelIdAsync(Kernel kernel, params string[] modelIds)
+ ///
+ /// Shows how to invoke a prompt and specify the service ids of the preferred AI services.
+ /// When the prompt is executed the AI Service will be selected based on the order of the provided service ids.
+ ///
+ [Fact]
+ public async Task InvokePromptFunctionWithFirstMatchingServiceIdAsync()
+ {
+ string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"];
+ var kernel = BuildKernel();
+ Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========");
+
+ var result = await kernel.InvokePromptAsync(ChatPrompt, new(serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId })));
+
+ Console.WriteLine(result.GetValue());
+ }
+
+ ///
+ /// Shows how to invoke a prompt and specify the model ids of the preferred AI services.
+ /// When the prompt is executed the AI Service will be selected based on the order of the provided model ids.
+ ///
+ [Fact]
+ public async Task InvokePromptFunctionWithFirstMatchingModelIdAsync()
{
+ string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId];
+ var kernel = BuildKernel();
Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========");
- var prompt = "Hello AI, what can you do for me?";
+ var result = await kernel.InvokePromptAsync(ChatPrompt, new(modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId })));
- var modelSettings = new Dictionary();
- foreach (var modelId in modelIds)
- {
- modelSettings.Add(modelId, new PromptExecutionSettings() { ModelId = modelId });
- }
- var promptConfig = new PromptTemplateConfig(prompt) { Name = "HelloAI", ExecutionSettings = modelSettings };
+ Console.WriteLine(result.GetValue());
+ }
+
+ ///
+ /// Shows how to create a KernelFunction from a prompt and specify the service ids of the preferred AI services.
+ /// When the function is invoked the AI Service will be selected based on the order of the provided service ids.
+ ///
+ [Fact]
+ public async Task InvokePreconfiguredFunctionWithFirstMatchingServiceIdAsync()
+ {
+ string[] serviceIds = ["NotFound", "AzureOpenAIChat", "OpenAIChat"];
+ var kernel = BuildKernel();
+ Console.WriteLine($"======== Service Ids: {string.Join(", ", serviceIds)} ========");
+
+ var function = kernel.CreateFunctionFromPrompt(ChatPrompt, serviceIds.Select(serviceId => new PromptExecutionSettings { ServiceId = serviceId }));
+ var result = await kernel.InvokeAsync(function);
- var function = kernel.CreateFunctionFromPrompt(promptConfig);
+ Console.WriteLine(result.GetValue());
+ }
+
+ ///
+ /// Shows how to create a KernelFunction from a prompt and specify the model ids of the preferred AI services.
+ /// When the function is invoked the AI Service will be selected based on the order of the provided model ids.
+ ///
+ [Fact]
+ public async Task InvokePreconfiguredFunctionWithFirstMatchingModelIdAsync()
+ {
+ string[] modelIds = ["gpt-4-1106-preview", TestConfiguration.AzureOpenAI.ChatModelId, TestConfiguration.OpenAI.ChatModelId];
+ var kernel = BuildKernel();
+
+ Console.WriteLine($"======== Model Ids: {string.Join(", ", modelIds)} ========");
+ var function = kernel.CreateFunctionFromPrompt(ChatPrompt, modelIds.Select((modelId, index) => new PromptExecutionSettings { ServiceId = $"service-{index}", ModelId = modelId }));
var result = await kernel.InvokeAsync(function);
+
Console.WriteLine(result.GetValue());
}
+
+ ///
+ /// Shows how to invoke a KernelFunction and specify the model id of the AI Service the function will use.
+ ///
+ [Fact]
+ public async Task InvokePreconfiguredFunctionByModelIdAsync()
+ {
+ var modelId = TestConfiguration.OpenAI.ChatModelId;
+ var kernel = BuildKernel();
+ Console.WriteLine($"======== Model Id: {modelId} ========");
+
+ var function = kernel.CreateFunctionFromPrompt(ChatPrompt);
+ var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ModelId = modelId }));
+
+ Console.WriteLine(result.GetValue());
+ }
+
+ ///
+ /// Shows how to invoke a KernelFunction and specify the service id of the AI Service the function will use.
+ ///
+ /// Service Id
+ [Theory]
+ [InlineData("AzureOpenAIChat")]
+ public async Task InvokePreconfiguredFunctionByServiceIdAsync(string serviceId)
+ {
+ var kernel = BuildKernel();
+ Console.WriteLine($"======== Service Id: {serviceId} ========");
+
+ var function = kernel.CreateFunctionFromPrompt(ChatPrompt);
+ var result = await kernel.InvokeAsync(function, new(new PromptExecutionSettings { ServiceId = serviceId }));
+
+ Console.WriteLine(result.GetValue());
+ }
+
+ ///
+ /// Shows when specifying a non-existent ServiceId the kernel throws an exception.
+ ///
+ /// Service Id
+ [Theory]
+ [InlineData("NotFound")]
+ public async Task InvokePromptByNonExistingServiceIdThrowsExceptionAsync(string serviceId)
+ {
+ var kernel = BuildKernel();
+ Console.WriteLine($"======== Service Id: {serviceId} ========");
+
+ await Assert.ThrowsAsync(async () => await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ServiceId = serviceId })));
+ }
+
+ ///
+ /// Shows how in the execution settings when no model id is found it falls back to the default service.
+ ///
+ /// Model Id
+ [Theory]
+ [InlineData("NotFound")]
+ public async Task InvokePromptByNonExistingModelIdUsesDefaultServiceAsync(string modelId)
+ {
+ var kernel = BuildKernel();
+ Console.WriteLine($"======== Model Id: {modelId} ========");
+
+ await kernel.InvokePromptAsync(ChatPrompt, new(new PromptExecutionSettings { ModelId = modelId }));
+ }
}
diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs
index a9ab68aa6281..9534cac09a63 100644
--- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionMultipleChoices.cs
@@ -1,60 +1,133 @@
// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
namespace ChatCompletion;
-// The following example shows how to use Semantic Kernel with streaming Multiple Results Chat Completion.
+///
+/// The following example shows how to use Semantic Kernel with multiple chat completion results.
+///
public class OpenAI_ChatCompletionMultipleChoices(ITestOutputHelper output) : BaseTest(output)
{
+ ///
+ /// Example with multiple chat completion results using .
+ ///
[Fact]
- public Task AzureOpenAIMultiChatCompletionAsync()
+ public async Task MultipleChatCompletionResultsUsingKernelAsync()
{
- Console.WriteLine("======== Azure OpenAI - Multiple Chat Completion ========");
+ var kernel = Kernel
+ .CreateBuilder()
+ .AddOpenAIChatCompletion(
+ modelId: TestConfiguration.OpenAI.ChatModelId,
+ apiKey: TestConfiguration.OpenAI.ApiKey)
+ .Build();
- var chatCompletionService = new AzureOpenAIChatCompletionService(
- deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
- endpoint: TestConfiguration.AzureOpenAI.Endpoint,
- apiKey: TestConfiguration.AzureOpenAI.ApiKey,
- modelId: TestConfiguration.AzureOpenAI.ChatModelId);
+ // Execution settings with configured ResultsPerPrompt property.
+ var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 };
- return ChatCompletionAsync(chatCompletionService);
+ var contents = await kernel.InvokePromptAsync>("Write a paragraph about why AI is awesome", new(executionSettings));
+
+ foreach (var content in contents!)
+ {
+ Console.Write(content.ToString() ?? string.Empty);
+ Console.WriteLine("\n-------------\n");
+ }
}
+ ///
+ /// Example with multiple chat completion results using .
+ ///
[Fact]
- public Task OpenAIMultiChatCompletionAsync()
+ public async Task MultipleChatCompletionResultsUsingChatCompletionServiceAsync()
{
- Console.WriteLine("======== Open AI - Multiple Chat Completion ========");
+ var kernel = Kernel
+ .CreateBuilder()
+ .AddOpenAIChatCompletion(
+ modelId: TestConfiguration.OpenAI.ChatModelId,
+ apiKey: TestConfiguration.OpenAI.ApiKey)
+ .Build();
+
+ // Execution settings with configured ResultsPerPrompt property.
+ var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 };
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage("Write a paragraph about why AI is awesome");
- var chatCompletionService = new OpenAIChatCompletionService(
- TestConfiguration.OpenAI.ChatModelId,
- TestConfiguration.OpenAI.ApiKey);
+ var chatCompletionService = kernel.GetRequiredService();
- return ChatCompletionAsync(chatCompletionService);
+ foreach (var chatMessageContent in await chatCompletionService.GetChatMessageContentsAsync(chatHistory, executionSettings))
+ {
+ Console.Write(chatMessageContent.Content ?? string.Empty);
+ Console.WriteLine("\n-------------\n");
+ }
}
- private async Task ChatCompletionAsync(IChatCompletionService chatCompletionService)
+ ///
+ /// This example shows how to handle multiple results in case if prompt template contains a call to another prompt function.
+ /// is used for result selection.
+ ///
+ [Fact]
+ public async Task MultipleChatCompletionResultsInPromptTemplateAsync()
{
- var executionSettings = new OpenAIPromptExecutionSettings()
- {
- MaxTokens = 200,
- FrequencyPenalty = 0,
- PresencePenalty = 0,
- Temperature = 1,
- TopP = 0.5,
- ResultsPerPrompt = 2,
- };
+ var kernel = Kernel
+ .CreateBuilder()
+ .AddOpenAIChatCompletion(
+ modelId: TestConfiguration.OpenAI.ChatModelId,
+ apiKey: TestConfiguration.OpenAI.ApiKey)
+ .Build();
- var chatHistory = new ChatHistory();
- chatHistory.AddUserMessage("Write one paragraph about why AI is awesome");
+ var executionSettings = new OpenAIPromptExecutionSettings { MaxTokens = 200, ResultsPerPrompt = 3 };
+
+ // Initializing a function with execution settings for multiple results.
+ // We ask AI to write one paragraph, but in execution settings we specified that we want 3 different results for this request.
+ var function = KernelFunctionFactory.CreateFromPrompt("Write a paragraph about why AI is awesome", executionSettings, "GetParagraph");
+ var plugin = KernelPluginFactory.CreateFromFunctions("MyPlugin", [function]);
- foreach (var chatMessageChoice in await chatCompletionService.GetChatMessageContentsAsync(chatHistory, executionSettings))
+ kernel.Plugins.Add(plugin);
+
+ // Add function result selection filter.
+ kernel.FunctionInvocationFilters.Add(new FunctionResultSelectionFilter(this.Output));
+
+ // Inside our main request, we call MyPlugin.GetParagraph function for text summarization.
+ // Taking into account that MyPlugin.GetParagraph function produces 3 results, for text summarization we need to choose only one of them.
+ // Registered filter will be invoked during execution, which will select and return only 1 result, and this result will be inserted in our main request for summarization.
+ var result = await kernel.InvokePromptAsync("Summarize this text: {{MyPlugin.GetParagraph}}");
+
+ // It's possible to check what prompt was rendered for our main request.
+ Console.WriteLine($"Rendered prompt: '{result.RenderedPrompt}'");
+
+ // Output:
+ // Rendered prompt: 'Summarize this text: AI is awesome because...'
+ }
+
+ ///
+ /// Example of filter which is responsible for result selection in case if some function produces multiple results.
+ ///
+ private sealed class FunctionResultSelectionFilter(ITestOutputHelper output) : IFunctionInvocationFilter
+ {
+ public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func next)
{
- Console.Write(chatMessageChoice.Content ?? string.Empty);
- Console.WriteLine("\n-------------\n");
- }
+ await next(context);
+
+ // Selection logic for function which is expected to produce multiple results.
+ if (context.Function.Name == "GetParagraph")
+ {
+ // Get multiple results from function invocation
+ var contents = context.Result.GetValue>()!;
- Console.WriteLine();
+ output.WriteLine("Multiple results:");
+
+ foreach (var content in contents)
+ {
+ output.WriteLine(content.ToString());
+ }
+
+ // Select first result for correct prompt rendering
+ var selectedContent = contents[0];
+ context.Result = new FunctionResult(context.Function, selectedContent, context.Kernel.Culture, selectedContent.Metadata);
+ }
+ }
}
}
diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
index bb33ebb51cab..4836dcf03d9f 100644
--- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
@@ -1,27 +1,36 @@
// Copyright (c) Microsoft. All rights reserved.
+using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.OpenAI;
namespace ChatCompletion;
-// The following example shows how to use Semantic Kernel with streaming Chat Completion
+///
+/// These examples demonstrate the ways different content types are streamed by OpenAI LLM via the chat completion service.
+///
public class OpenAI_ChatCompletionStreaming(ITestOutputHelper output) : BaseTest(output)
{
+ ///
+ /// This example demonstrates chat completion streaming using OpenAI.
+ ///
[Fact]
- public Task OpenAIChatStreamSampleAsync()
+ public Task StreamOpenAIChatAsync()
{
- Console.WriteLine("======== Open AI - ChatGPT Streaming ========");
+ Console.WriteLine("======== Open AI Chat Completion Streaming ========");
OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey);
return this.StartStreamingChatAsync(chatCompletionService);
}
+ ///
+ /// This example demonstrates chat completion streaming using Azure OpenAI.
+ ///
[Fact]
- public Task AzureOpenAIChatStreamSampleAsync()
+ public Task StreamAzureOpenAIChatAsync()
{
- Console.WriteLine("======== Azure Open AI - ChatGPT Streaming ========");
+ Console.WriteLine("======== Azure Open AI Chat Completion Streaming ========");
AzureOpenAIChatCompletionService chatCompletionService = new(
deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
@@ -32,24 +41,98 @@ public Task AzureOpenAIChatStreamSampleAsync()
return this.StartStreamingChatAsync(chatCompletionService);
}
+ ///
+ /// This example demonstrates how the chat completion service streams text content.
+ /// It shows how to access the response update via StreamingChatMessageContent.Content property
+ /// and alternatively via the StreamingChatMessageContent.Items property.
+ ///
+ [Fact]
+ public async Task StreamTextContentAsync()
+ {
+ Console.WriteLine("======== Stream Text Content ========");
+
+ // Create chat completion service
+ AzureOpenAIChatCompletionService chatCompletionService = new(
+ deploymentName: TestConfiguration.AzureOpenAI.ChatDeploymentName,
+ endpoint: TestConfiguration.AzureOpenAI.Endpoint,
+ apiKey: TestConfiguration.AzureOpenAI.ApiKey,
+ modelId: TestConfiguration.AzureOpenAI.ChatModelId);
+
+ // Create chat history with initial system and user messages
+ ChatHistory chatHistory = new("You are a librarian, an expert on books.");
+ chatHistory.AddUserMessage("Hi, I'm looking for book suggestions.");
+ chatHistory.AddUserMessage("I love history and philosophy. I'd like to learn something new about Greece, any suggestion?");
+
+ // Start streaming chat based on the chat history
+ await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory))
+ {
+ // Access the response update via StreamingChatMessageContent.Content property
+ Console.Write(chatUpdate.Content);
+
+ // Alternatively, the response update can be accessed via the StreamingChatMessageContent.Items property
+ Console.Write(chatUpdate.Items.OfType().FirstOrDefault());
+ }
+ }
+
+ ///
+ /// This example demonstrates how the chat completion service streams raw function call content.
+ /// See for a sample demonstrating how to simplify
+ /// function call content building out of streamed function call updates using the .
+ ///
+ [Fact]
+ public async Task StreamFunctionCallContentAsync()
+ {
+ Console.WriteLine("======== Stream Function Call Content ========");
+
+ // Create chat completion service
+ OpenAIChatCompletionService chatCompletionService = new(TestConfiguration.OpenAI.ChatModelId, TestConfiguration.OpenAI.ApiKey);
+
+ // Create kernel with helper plugin.
+ Kernel kernel = new();
+ kernel.ImportPluginFromFunctions("HelperFunctions",
+ [
+ kernel.CreateFunctionFromMethod((string longTestString) => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."),
+ ]);
+
+ // Create execution settings with manual function calling
+ OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
+
+ // Create chat history with initial user question
+ ChatHistory chatHistory = new();
+ chatHistory.AddUserMessage("Hi, what is the current time?");
+
+ // Start streaming chat based on the chat history
+ await foreach (StreamingChatMessageContent chatUpdate in chatCompletionService.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel))
+ {
+ // Getting list of function call updates requested by LLM
+ var streamingFunctionCallUpdates = chatUpdate.Items.OfType();
+
+ // Iterating over function call updates. Please use the unctionCallContentBuilder to simplify function call content building.
+ foreach (StreamingFunctionCallUpdateContent update in streamingFunctionCallUpdates)
+ {
+ Console.WriteLine($"Function call update: callId={update.CallId}, name={update.Name}, arguments={update.Arguments?.Replace("\n", "\\n")}, functionCallIndex={update.FunctionCallIndex}");
+ }
+ }
+ }
+
private async Task StartStreamingChatAsync(IChatCompletionService chatCompletionService)
{
Console.WriteLine("Chat content:");
Console.WriteLine("------------------------");
var chatHistory = new ChatHistory("You are a librarian, expert about books");
- await MessageOutputAsync(chatHistory);
+ OutputLastMessage(chatHistory);
// First user message
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions");
- await MessageOutputAsync(chatHistory);
+ OutputLastMessage(chatHistory);
// First bot assistant message
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant);
// Second user message
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?");
- await MessageOutputAsync(chatHistory);
+ OutputLastMessage(chatHistory);
// Second bot assistant message
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant);
@@ -82,13 +165,11 @@ private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletio
///
/// Outputs the last message of the chat history
///
- private Task MessageOutputAsync(ChatHistory chatHistory)
+ private void OutputLastMessage(ChatHistory chatHistory)
{
var message = chatHistory.Last();
Console.WriteLine($"{message.Role}: {message.Content}");
Console.WriteLine("------------------------");
-
- return Task.CompletedTask;
}
}
diff --git a/dotnet/samples/Concepts/Filtering/AutoFunctionInvocationFiltering.cs b/dotnet/samples/Concepts/Filtering/AutoFunctionInvocationFiltering.cs
index 7d149b038b4a..1e56b8f36878 100644
--- a/dotnet/samples/Concepts/Filtering/AutoFunctionInvocationFiltering.cs
+++ b/dotnet/samples/Concepts/Filtering/AutoFunctionInvocationFiltering.cs
@@ -8,6 +8,9 @@ namespace Filtering;
public class AutoFunctionInvocationFiltering(ITestOutputHelper output) : BaseTest(output)
{
+ ///
+ /// Shows how to use .
+ ///
[Fact]
public async Task AutoFunctionInvocationFilterAsync()
{
@@ -16,7 +19,7 @@ public async Task AutoFunctionInvocationFilterAsync()
builder.AddOpenAIChatCompletion("gpt-4", TestConfiguration.OpenAI.ApiKey);
// This filter outputs information about auto function invocation and returns overridden result.
- builder.Services.AddSingleton(new AutoFunctionInvocationFilterExample(this.Output));
+ builder.Services.AddSingleton(new AutoFunctionInvocationFilter(this.Output));
var kernel = builder.Build();
@@ -40,11 +43,56 @@ public async Task AutoFunctionInvocationFilterAsync()
// Result from auto function invocation filter.
}
- /// Shows syntax for auto function invocation filter.
- private sealed class AutoFunctionInvocationFilterExample(ITestOutputHelper output) : IAutoFunctionInvocationFilter
+ ///
+ /// Shows how to get list of function calls by using .
+ ///
+ [Fact]
+ public async Task GetFunctionCallsWithFilterAsync()
{
- private readonly ITestOutputHelper _output = output;
+ var builder = Kernel.CreateBuilder();
+
+ builder.AddOpenAIChatCompletion("gpt-3.5-turbo-1106", TestConfiguration.OpenAI.ApiKey);
+
+ builder.Services.AddSingleton(new FunctionCallsFilter(this.Output));
+
+ var kernel = builder.Build();
+
+ kernel.ImportPluginFromFunctions("HelperFunctions",
+ [
+ kernel.CreateFunctionFromMethod(() => DateTime.UtcNow.ToString("R"), "GetCurrentUtcTime", "Retrieves the current time in UTC."),
+ kernel.CreateFunctionFromMethod((string cityName) =>
+ cityName switch
+ {
+ "Boston" => "61 and rainy",
+ "London" => "55 and cloudy",
+ "Miami" => "80 and sunny",
+ "Paris" => "60 and rainy",
+ "Tokyo" => "50 and sunny",
+ "Sydney" => "75 and sunny",
+ "Tel Aviv" => "80 and sunny",
+ _ => "31 and snowing",
+ }, "GetWeatherForCity", "Gets the current weather for the specified city"),
+ ]);
+
+ var executionSettings = new OpenAIPromptExecutionSettings
+ {
+ ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions
+ };
+
+ await foreach (var chunk in kernel.InvokePromptStreamingAsync("Check current UTC time and return current weather in Boston city.", new(executionSettings)))
+ {
+ Console.WriteLine(chunk.ToString());
+ }
+ // Output:
+ // Request #0. Function call: HelperFunctions.GetCurrentUtcTime.
+ // Request #0. Function call: HelperFunctions.GetWeatherForCity.
+ // The current UTC time is {time of execution}, and the current weather in Boston is 61°F and rainy.
+ }
+
+ /// Shows available syntax for auto function invocation filter.
+ private sealed class AutoFunctionInvocationFilter(ITestOutputHelper output) : IAutoFunctionInvocationFilter
+ {
public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next)
{
// Example: get function information
@@ -56,14 +104,31 @@ public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext co
// Example: get information about all functions which will be invoked
var functionCalls = FunctionCallContent.GetFunctionCalls(context.ChatHistory.Last());
+ // In function calling functionality there are two loops.
+ // Outer loop is "request" loop - it performs multiple requests to LLM until user ask will be satisfied.
+ // Inner loop is "function" loop - it handles LLM response with multiple function calls.
+
+ // Workflow example:
+ // 1. Request to LLM #1 -> Response with 3 functions to call.
+ // 1.1. Function #1 called.
+ // 1.2. Function #2 called.
+ // 1.3. Function #3 called.
+ // 2. Request to LLM #2 -> Response with 2 functions to call.
+ // 2.1. Function #1 called.
+ // 2.2. Function #2 called.
+
+ // context.RequestSequenceIndex - it's a sequence number of outer/request loop operation.
+ // context.FunctionSequenceIndex - it's a sequence number of inner/function loop operation.
+ // context.FunctionCount - number of functions which will be called per request (based on example above: 3 for first request, 2 for second request).
+
// Example: get request sequence index
- this._output.WriteLine($"Request sequence index: {context.RequestSequenceIndex}");
+ output.WriteLine($"Request sequence index: {context.RequestSequenceIndex}");
// Example: get function sequence index
- this._output.WriteLine($"Function sequence index: {context.FunctionSequenceIndex}");
+ output.WriteLine($"Function sequence index: {context.FunctionSequenceIndex}");
// Example: get total number of functions which will be called
- this._output.WriteLine($"Total number of functions: {context.FunctionCount}");
+ output.WriteLine($"Total number of functions: {context.FunctionCount}");
// Calling next filter in pipeline or function itself.
// By skipping this call, next filters and function won't be invoked, and function call loop will proceed to the next function.
@@ -79,4 +144,24 @@ public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext co
context.Terminate = true;
}
}
+
+ /// Shows how to get list of all function calls per request.
+ private sealed class FunctionCallsFilter(ITestOutputHelper output) : IAutoFunctionInvocationFilter
+ {
+ public async Task OnAutoFunctionInvocationAsync(AutoFunctionInvocationContext context, Func next)
+ {
+ var chatHistory = context.ChatHistory;
+ var functionCalls = FunctionCallContent.GetFunctionCalls(chatHistory.Last()).ToArray();
+
+ if (functionCalls is { Length: > 0 })
+ {
+ foreach (var functionCall in functionCalls)
+ {
+ output.WriteLine($"Request #{context.RequestSequenceIndex}. Function call: {functionCall.PluginName}.{functionCall.FunctionName}.");
+ }
+ }
+
+ await next(context);
+ }
+ }
}
diff --git a/dotnet/samples/Concepts/FunctionCalling/Gemini_FunctionCalling.cs b/dotnet/samples/Concepts/FunctionCalling/Gemini_FunctionCalling.cs
index 0a03b5daecfc..33784679a886 100644
--- a/dotnet/samples/Concepts/FunctionCalling/Gemini_FunctionCalling.cs
+++ b/dotnet/samples/Concepts/FunctionCalling/Gemini_FunctionCalling.cs
@@ -5,7 +5,7 @@
using Microsoft.SemanticKernel.Connectors.Google;
using xRetry;
-namespace AutoFunctionCalling;
+namespace FunctionCalling;
///
/// These examples demonstrate two ways functions called by the Gemini LLM can be invoked using the SK streaming and non-streaming AI API:
@@ -18,10 +18,10 @@ namespace AutoFunctionCalling;
///
/// 2. Manual Invocation by a Caller:
/// Functions called by the LLM are returned to the AI API caller. The caller controls the invocation phase where
-/// they may decide which function to call, when to call them, how to handle exceptions, etc. The caller then
-/// adds the function results or exceptions to the chat history and returns it to the LLM, which reasons about it
+/// they may decide which function to call, when to call them, how to handle exceptions, call them in parallel or sequentially, etc.
+/// The caller then adds the function results or exceptions to the chat history and returns it to the LLM, which reasons about it
/// and generates the final response.
-/// This approach is more manual and requires more manual intervention from the caller.
+/// This approach is manual and provides more control over the function invocation phase to the caller.
///
public sealed class Gemini_FunctionCalling(ITestOutputHelper output) : BaseTest(output)
{
diff --git a/dotnet/samples/Concepts/FunctionCalling/OpenAI_FunctionCalling.cs b/dotnet/samples/Concepts/FunctionCalling/OpenAI_FunctionCalling.cs
index 506239d55323..1b817fbc60fe 100644
--- a/dotnet/samples/Concepts/FunctionCalling/OpenAI_FunctionCalling.cs
+++ b/dotnet/samples/Concepts/FunctionCalling/OpenAI_FunctionCalling.cs
@@ -20,10 +20,10 @@ namespace FunctionCalling;
///
/// 2. Manual Invocation by a Caller:
/// Functions called by the LLM are returned to the AI API caller. The caller controls the invocation phase where
-/// they may decide which function to call, when to call them, how to handle exceptions, etc. The caller then
-/// adds the function results or exceptions to the chat history and returns it to the LLM, which reasons about it
+/// they may decide which function to call, when to call them, how to handle exceptions, call them in parallel or sequentially, etc.
+/// The caller then adds the function results or exceptions to the chat history and returns it to the LLM, which reasons about it
/// and generates the final response.
-/// This approach is more manual and requires more manual intervention from the caller.
+/// This approach is manual and provides more control over the function invocation phase to the caller.
///
public class OpenAI_FunctionCalling(ITestOutputHelper output) : BaseTest(output)
{
@@ -61,54 +61,127 @@ public async Task RunStreamingPromptAutoFunctionCallingAsync()
}
///
- /// This example demonstrates manual function calling with a non-streaming prompt.
+ /// This example demonstrates manual function calling with a non-streaming chat API.
///
[Fact]
- public async Task RunNonStreamingPromptWithManualFunctionCallingAsync()
+ public async Task RunNonStreamingChatAPIWithManualFunctionCallingAsync()
{
Console.WriteLine("Manual function calling with a non-streaming prompt.");
+ // Create kernel and chat service
Kernel kernel = CreateKernel();
IChatCompletionService chat = kernel.GetRequiredService();
+ // Configure the chat service to enable manual function calling
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
+ // Create chat history with the initial user message
ChatHistory chatHistory = new();
chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?");
while (true)
{
+ // Start or continue chat based on the chat history
ChatMessageContent result = await chat.GetChatMessageContentAsync(chatHistory, settings, kernel);
if (result.Content is not null)
{
Console.Write(result.Content);
}
+ // Get function calls from the chat message content and quit the chat loop if no function calls are found.
IEnumerable functionCalls = FunctionCallContent.GetFunctionCalls(result);
if (!functionCalls.Any())
{
break;
}
- chatHistory.Add(result); // Adding LLM response containing function calls(requests) to chat history as it's required by LLMs.
+ // Preserving the original chat message content with function calls in the chat history.
+ chatHistory.Add(result);
+ // Iterating over the requested function calls and invoking them
foreach (FunctionCallContent functionCall in functionCalls)
{
try
{
- FunctionResultContent resultContent = await functionCall.InvokeAsync(kernel); // Executing each function.
+ // Invoking the function
+ FunctionResultContent resultContent = await functionCall.InvokeAsync(kernel);
+ // Adding the function result to the chat history
chatHistory.Add(resultContent.ToChatMessage());
}
catch (Exception ex)
{
- chatHistory.Add(new FunctionResultContent(functionCall, ex).ToChatMessage()); // Adding function result to chat history.
- // Adding exception to chat history.
+ // Adding function exception to the chat history.
+ chatHistory.Add(new FunctionResultContent(functionCall, ex).ToChatMessage());
// or
- //string message = "Error details that LLM can reason about.";
- //chatHistory.Add(new FunctionResultContent(functionCall, message).ToChatMessageContent()); // Adding function result to chat history.
+ //chatHistory.Add(new FunctionResultContent(functionCall, "Error details that LLM can reason about.").ToChatMessage());
+ }
+ }
+
+ Console.WriteLine();
+ }
+ }
+
+ ///
+ /// This example demonstrates manual function calling with a streaming chat API.
+ ///
+ [Fact]
+ public async Task RunStreamingChatAPIWithManualFunctionCallingAsync()
+ {
+ Console.WriteLine("Manual function calling with a streaming prompt.");
+
+ // Create kernel and chat service
+ Kernel kernel = CreateKernel();
+
+ IChatCompletionService chat = kernel.GetRequiredService();
+
+ // Configure the chat service to enable manual function calling
+ OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
+
+ // Create chat history with the initial user message
+ ChatHistory chatHistory = new();
+ chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?");
+
+ while (true)
+ {
+ AuthorRole? authorRole = null;
+ var fccBuilder = new FunctionCallContentBuilder();
+
+ // Start or continue streaming chat based on the chat history
+ await foreach (var streamingContent in chat.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel))
+ {
+ if (streamingContent.Content is not null)
+ {
+ Console.Write(streamingContent.Content);
}
+ authorRole ??= streamingContent.Role;
+ fccBuilder.Append(streamingContent);
+ }
+
+ // Build the function calls from the streaming content and quit the chat loop if no function calls are found
+ var functionCalls = fccBuilder.Build();
+ if (!functionCalls.Any())
+ {
+ break;
+ }
+
+ // Creating and adding chat message content to preserve the original function calls in the chat history.
+ // The function calls are added to the chat message a few lines below.
+ var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null);
+ chatHistory.Add(fcContent);
+
+ // Iterating over the requested function calls and invoking them
+ foreach (var functionCall in functionCalls)
+ {
+ // Adding the original function call to the chat message content
+ fcContent.Items.Add(functionCall);
+
+ // Invoking the function
+ var functionResult = await functionCall.InvokeAsync(kernel);
+
+ // Adding the function result to the chat history
+ chatHistory.Add(functionResult.ToChatMessage());
}
Console.WriteLine();
diff --git a/dotnet/samples/Concepts/Optimization/FrugalGPT.cs b/dotnet/samples/Concepts/Optimization/FrugalGPT.cs
new file mode 100644
index 000000000000..f5ede1764789
--- /dev/null
+++ b/dotnet/samples/Concepts/Optimization/FrugalGPT.cs
@@ -0,0 +1,308 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Runtime.CompilerServices;
+using Microsoft.Extensions.DependencyInjection;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Embeddings;
+using Microsoft.SemanticKernel.Memory;
+using Microsoft.SemanticKernel.PromptTemplates.Handlebars;
+using Microsoft.SemanticKernel.Services;
+
+namespace Optimization;
+
+///
+/// This example shows how to use FrugalGPT techniques to reduce cost and improve LLM-related task performance.
+/// More information here: https://arxiv.org/abs/2305.05176.
+///
+public sealed class FrugalGPT(ITestOutputHelper output) : BaseTest(output)
+{
+ ///
+ /// One of the FrugalGPT techniques is to reduce prompt size when using few-shot prompts.
+ /// If prompt contains a lof of examples to help LLM to provide the best result, it's possible to send only a couple of them to reduce amount of tokens.
+ /// Vector similarity can be used to pick the best examples from example set for specific request.
+ /// Following example shows how to optimize email classification request by reducing prompt size with vector similarity search.
+ ///
+ [Fact]
+ public async Task ReducePromptSizeAsync()
+ {
+ // Define email classification examples with email body and labels.
+ var examples = new List
+ {
+ "Hey, just checking in to see how you're doing! - Personal",
+ "Can you pick up some groceries on your way back home? We need milk and bread. - Personal, Tasks",
+ "Happy Birthday! Wishing you a fantastic day filled with love and joy. - Personal",
+ "Let's catch up over coffee this Saturday. It's been too long! - Personal, Events",
+ "Please review the attached document and provide your feedback by EOD. - Work",
+ "Our team meeting is scheduled for 10 AM tomorrow in the main conference room. - Work",
+ "The quarterly financial report is due next Monday. Ensure all data is updated. - Work, Tasks",
+ "Can you send me the latest version of the project plan? Thanks! - Work",
+ "You're invited to our annual summer picnic! RSVP by June 25th. - Events",
+ "Join us for a webinar on digital marketing trends this Thursday at 3 PM. - Events",
+ "Save the date for our charity gala on September 15th. We hope to see you there! - Events",
+ "Don't miss our customer appreciation event next week. Sign up now! - Events, Notifications",
+ "Your order has been shipped and will arrive by June 20th. - Notifications",
+ "We've updated our policies. Please review the changes. - Notifications",
+ "Your username was successfully changed. If this wasn't you, contact support immediately. - Notifications",
+ "The system upgrade will occur this weekend. - Notifications, Work",
+ "Don't forget to submit your timesheet by 5 PM today. - Tasks, Work",
+ "Pick up the dry cleaning before they close at 7 PM. - Tasks",
+ "Complete the online training module by the end of the week. - Tasks, Work",
+ "Send out the meeting invites for next week's project kickoff. - Tasks, Work"
+ };
+
+ // Initialize kernel with chat completion and embedding generation services.
+ // It's possible to combine different models from different AI providers to achieve the lowest token usage.
+ var kernel = Kernel.CreateBuilder()
+ .AddOpenAIChatCompletion(
+ modelId: "gpt-4",
+ apiKey: TestConfiguration.OpenAI.ApiKey)
+ .AddOpenAITextEmbeddingGeneration(
+ modelId: "text-embedding-3-small",
+ apiKey: TestConfiguration.OpenAI.ApiKey)
+ .Build();
+
+ // Initialize few-shot prompt.
+ var function = kernel.CreateFunctionFromPrompt(
+ new()
+ {
+ Template =
+ """
+ Available classification labels: Personal, Work, Events, Notifications, Tasks
+ Email classification examples:
+ {{#each Examples}}
+ {{this}}
+ {{/each}}
+
+ Email body to classify:
+ {{Request}}
+ """,
+ TemplateFormat = "handlebars"
+ },
+ new HandlebarsPromptTemplateFactory()
+ );
+
+ // Define arguments with few-shot examples and actual email for classification.
+ var arguments = new KernelArguments
+ {
+ ["Examples"] = examples,
+ ["Request"] = "Your dentist appointment is tomorrow at 10 AM. Please remember to bring your insurance card."
+ };
+
+ // Invoke defined function to see initial result.
+ var result = await kernel.InvokeAsync(function, arguments);
+
+ Console.WriteLine(result); // Personal, Notifications
+ Console.WriteLine(result.Metadata?["Usage"]?.AsJson()); // Total tokens: ~430
+
+ // Add few-shot prompt optimization filter.
+ // The filter uses in-memory store for vector similarity search and text embedding generation service to generate embeddings.
+ var memoryStore = new VolatileMemoryStore();
+ var textEmbeddingGenerationService = kernel.GetRequiredService();
+
+ // Register optimization filter.
+ kernel.PromptRenderFilters.Add(new FewShotPromptOptimizationFilter(memoryStore, textEmbeddingGenerationService));
+
+ // Get result again and compare the usage.
+ result = await kernel.InvokeAsync(function, arguments);
+
+ Console.WriteLine(result); // Personal, Notifications
+ Console.WriteLine(result.Metadata?["Usage"]?.AsJson()); // Total tokens: ~150
+ }
+
+ ///
+ /// LLM cascade technique allows to use multiple LLMs sequentially starting from cheaper model,
+ /// evaluate LLM result and return it in case it meets the quality criteria. Otherwise, proceed with next LLM in queue,
+ /// until the result will be acceptable.
+ /// Following example uses mock result generation and evaluation for demonstration purposes.
+ /// Result evaluation examples including BERTScore, BLEU, METEOR and COMET metrics can be found here:
+ /// https://github.com/microsoft/semantic-kernel/tree/main/dotnet/samples/Demos/QualityCheck.
+ ///
+ [Fact]
+ public async Task LLMCascadeAsync()
+ {
+ // Create kernel builder.
+ var builder = Kernel.CreateBuilder();
+
+ // Register chat completion services for demonstration purposes.
+ // This registration is similar to AddAzureOpenAIChatCompletion and AddOpenAIChatCompletion methods.
+ builder.Services.AddSingleton(new MockChatCompletionService("model1", "Hi there! I'm doing well, thank you! How about yourself?"));
+ builder.Services.AddSingleton(new MockChatCompletionService("model2", "Hello! I'm great, thanks for asking. How are you doing today?"));
+ builder.Services.AddSingleton(new MockChatCompletionService("model3", "Hey! I'm fine, thanks. How's your day going so far?"));
+
+ // Register LLM cascade filter with model execution order, acceptance criteria for result and service for output.
+ // In real use-cases, execution order should start from cheaper to more expensive models.
+ // If first model will produce acceptable result, then it will be returned immediately.
+ builder.Services.AddSingleton(new LLMCascadeFilter(
+ modelExecutionOrder: ["model1", "model2", "model3"],
+ acceptanceCriteria: result => result.Contains("Hey!"),
+ output: this.Output));
+
+ // Build kernel.
+ var kernel = builder.Build();
+
+ // Send a request.
+ var result = await kernel.InvokePromptAsync("Hi, how are you today?");
+
+ Console.WriteLine($"\nFinal result: {result}");
+
+ // Output:
+ // Executing request with model: model1
+ // Result from model1: Hi there! I'm doing well, thank you! How about yourself?
+ // Result does not meet the acceptance criteria, moving to the next model.
+
+ // Executing request with model: model2
+ // Result from model2: Hello! I'm great, thanks for asking. How are you doing today?
+ // Result does not meet the acceptance criteria, moving to the next model.
+
+ // Executing request with model: model3
+ // Result from model3: Hey! I'm fine, thanks. How's your day going so far?
+ // Returning result as it meets the acceptance criteria.
+
+ // Final result: Hey! I'm fine, thanks. How's your day going so far?
+ }
+
+ ///
+ /// Few-shot prompt optimization filter which takes all examples from kernel arguments and selects first examples,
+ /// which are similar to original request.
+ ///
+ private sealed class FewShotPromptOptimizationFilter(
+ IMemoryStore memoryStore,
+ ITextEmbeddingGenerationService textEmbeddingGenerationService) : IPromptRenderFilter
+ {
+ ///
+ /// Maximum number of examples to use which are similar to original request.
+ ///
+ private const int TopN = 5;
+
+ ///
+ /// Collection name to use in memory store.
+ ///
+ private const string CollectionName = "examples";
+
+ public async Task OnPromptRenderAsync(PromptRenderContext context, Func next)
+ {
+ // Get examples and original request from arguments.
+ var examples = context.Arguments["Examples"] as List;
+ var request = context.Arguments["Request"] as string;
+
+ if (examples is { Count: > 0 } && !string.IsNullOrEmpty(request))
+ {
+ var memoryRecords = new List();
+
+ // Generate embedding for each example.
+ var embeddings = await textEmbeddingGenerationService.GenerateEmbeddingsAsync(examples);
+
+ // Create memory record instances with example text and embedding.
+ for (var i = 0; i < examples.Count; i++)
+ {
+ memoryRecords.Add(MemoryRecord.LocalRecord(Guid.NewGuid().ToString(), examples[i], "description", embeddings[i]));
+ }
+
+ // Create collection and upsert all memory records for search.
+ // It's possible to do it only once and re-use the same examples for future requests.
+ await memoryStore.CreateCollectionAsync(CollectionName);
+ await memoryStore.UpsertBatchAsync(CollectionName, memoryRecords).ToListAsync();
+
+ // Generate embedding for original request.
+ var requestEmbedding = await textEmbeddingGenerationService.GenerateEmbeddingAsync(request);
+
+ // Find top N examples which are similar to original request.
+ var topNExamples = await memoryStore.GetNearestMatchesAsync(CollectionName, requestEmbedding, TopN).ToListAsync();
+
+ // Override arguments to use only top N examples, which will be sent to LLM.
+ context.Arguments["Examples"] = topNExamples.Select(l => l.Item1.Metadata.Text);
+ }
+
+ // Continue prompt rendering operation.
+ await next(context);
+ }
+ }
+
+ ///
+ /// Example of LLM cascade filter which will invoke a function using multiple LLMs in specific order,
+ /// until the result will meet specified acceptance criteria.
+ ///
+ private sealed class LLMCascadeFilter(
+ List modelExecutionOrder,
+ Predicate acceptanceCriteria,
+ ITestOutputHelper output) : IFunctionInvocationFilter
+ {
+ public async Task OnFunctionInvocationAsync(FunctionInvocationContext context, Func next)
+ {
+ // Get registered chat completion services from kernel.
+ var registeredServices = context.Kernel
+ .GetAllServices()
+ .Select(service => (ModelId: service.GetModelId()!, Service: service));
+
+ // Define order of execution.
+ var order = modelExecutionOrder
+ .Select((value, index) => new { Value = value, Index = index })
+ .ToDictionary(k => k.Value, v => v.Index);
+
+ // Sort services by specified order.
+ var orderedServices = registeredServices.OrderBy(service => order[service.ModelId]);
+
+ // Try to invoke a function with each service and check the result.
+ foreach (var service in orderedServices)
+ {
+ // Define execution settings with model ID.
+ context.Arguments.ExecutionSettings = new Dictionary
+ {
+ { PromptExecutionSettings.DefaultServiceId, new() { ModelId = service.ModelId } }
+ };
+
+ output.WriteLine($"Executing request with model: {service.ModelId}");
+
+ // Invoke a function.
+ await next(context);
+
+ // Get a result.
+ var result = context.Result.ToString()!;
+
+ output.WriteLine($"Result from {service.ModelId}: {result}");
+
+ // Check if result meets specified acceptance criteria.
+ // If yes, stop execution loop, so last result will be returned.
+ if (acceptanceCriteria(result))
+ {
+ output.WriteLine("Returning result as it meets the acceptance criteria.");
+ return;
+ }
+
+ // Otherwise, proceed with next model.
+ output.WriteLine("Result does not meet the acceptance criteria, moving to the next model.\n");
+ }
+
+ // If LLMs didn't return acceptable result, the last result will be returned.
+ // It's also possible to throw an exception in such cases if needed.
+ // throw new Exception("Models didn't return a result that meets the acceptance criteria").
+ }
+ }
+
+ ///
+ /// Mock chat completion service for demonstration purposes.
+ ///
+ private sealed class MockChatCompletionService(string modelId, string mockResult) : IChatCompletionService
+ {
+ public IReadOnlyDictionary Attributes => new Dictionary { { AIServiceExtensions.ModelIdKey, modelId } };
+
+ public Task> GetChatMessageContentsAsync(
+ ChatHistory chatHistory,
+ PromptExecutionSettings? executionSettings = null,
+ Kernel? kernel = null,
+ CancellationToken cancellationToken = default)
+ {
+ return Task.FromResult>([new ChatMessageContent(AuthorRole.Assistant, mockResult)]);
+ }
+
+ public async IAsyncEnumerable GetStreamingChatMessageContentsAsync(
+ ChatHistory chatHistory,
+ PromptExecutionSettings? executionSettings = null,
+ Kernel? kernel = null,
+ [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ yield return new StreamingChatMessageContent(AuthorRole.Assistant, mockResult);
+ }
+ }
+}
diff --git a/dotnet/samples/Concepts/Plugins/OpenAIPlugins.cs b/dotnet/samples/Concepts/Plugins/OpenAIPlugins.cs
index 7608bfd7b08f..77846b0d5290 100644
--- a/dotnet/samples/Concepts/Plugins/OpenAIPlugins.cs
+++ b/dotnet/samples/Concepts/Plugins/OpenAIPlugins.cs
@@ -8,10 +8,13 @@ namespace Plugins;
public class OpenAIPlugins(ITestOutputHelper output) : BaseTest(output)
{
///
- /// Generic template on how to call OpenAI plugins
+ /// This sample shows how to invoke an OpenAI plugin.
///
+ ///
+ /// You must provide the plugin name and a URI to the Open API manifest before running this sample.
+ ///
[Fact(Skip = "Run it only after filling the template below")]
- public async Task RunOpenAIPluginAsync()
+ public async Task InvokeOpenAIPluginAsync()
{
Kernel kernel = new();
@@ -32,8 +35,11 @@ public async Task RunOpenAIPluginAsync()
Console.WriteLine($"Function execution result: {result?.Content}");
}
+ ///
+ /// This sample shows how to invoke the Klarna Get Products function as an OpenAPI plugin.
+ ///
[Fact]
- public async Task CallKlarnaAsync()
+ public async Task InvokeKlarnaGetProductsAsOpenAPIPluginAsync()
{
Kernel kernel = new();
@@ -54,4 +60,63 @@ public async Task CallKlarnaAsync()
Console.WriteLine($"Function execution result: {result?.Content}");
}
+
+ ///
+ /// This sample shows how to use a delegating handler when invoking an OpenAPI function.
+ ///
+ ///
+ /// An instances of will be set in the `HttpRequestMessage.Options` (for .NET 5.0 or higher) or
+ /// in the `HttpRequestMessage.Properties` dictionary (for .NET Standard) with the key `KernelFunctionContextKey`.
+ /// The contains the , and .
+ ///
+ [Fact]
+ public async Task UseDelegatingHandlerWhenInvokingAnOpenAPIFunctionAsync()
+ {
+ using var httpHandler = new HttpClientHandler();
+ using var customHandler = new CustomHandler(httpHandler);
+ using HttpClient httpClient = new(customHandler);
+
+ Kernel kernel = new();
+
+ var plugin = await kernel.ImportPluginFromOpenAIAsync("Klarna", new Uri("https://www.klarna.com/.well-known/ai-plugin.json"), new OpenAIFunctionExecutionParameters(httpClient));
+
+ var arguments = new KernelArguments
+ {
+ ["q"] = "Laptop", // Category or product that needs to be searched for.
+ ["size"] = "3", // Number of products to return
+ ["budget"] = "200", // Maximum price of the matching product in local currency
+ ["countryCode"] = "US" // ISO 3166 country code with 2 characters based on the user location.
+ };
+ // Currently, only US, GB, DE, SE and DK are supported.
+
+ var functionResult = await kernel.InvokeAsync(plugin["productsUsingGET"], arguments);
+
+ var result = functionResult.GetValue();
+
+ Console.WriteLine($"Function execution result: {result?.Content}");
+ }
+
+ ///
+ /// Custom delegating handler to modify the before sending it.
+ ///
+ private sealed class CustomHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler)
+ {
+ protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
+ {
+#if NET5_0_OR_GREATER
+ request.Options.TryGetValue(OpenApiKernelFunctionContext.KernelFunctionContextKey, out var functionContext);
+#else
+ request.Properties.TryGetValue(OpenApiKernelFunctionContext.KernelFunctionContextKey, out var functionContext);
+#endif
+ // Function context is only set when the Plugin is invoked via the Kernel
+ if (functionContext is not null)
+ {
+ // Modify the HttpRequestMessage
+ request.Headers.Add("Kernel-Function-Name", functionContext?.Function?.Name);
+ }
+
+ // Call the next handler in the pipeline
+ return await base.SendAsync(request, cancellationToken);
+ }
+ }
}
diff --git a/dotnet/samples/Concepts/README.md b/dotnet/samples/Concepts/README.md
index f0896534852c..7eaa2a8a7ae6 100644
--- a/dotnet/samples/Concepts/README.md
+++ b/dotnet/samples/Concepts/README.md
@@ -100,6 +100,10 @@ Down below you can find the code snippets that demonstrate the usage of many Sem
- [TextMemoryPlugin_GeminiEmbeddingGeneration](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/TextMemoryPlugin_GeminiEmbeddingGeneration.cs)
- [TextMemoryPlugin_MultipleMemoryStore](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Memory/TextMemoryPlugin_MultipleMemoryStore.cs)
+## Optimization - Examples of different cost and performance optimization techniques
+
+- [FrugalGPT](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Optimization/FrugalGPT.cs)
+
## Planners - Examples on using `Planners`
- [FunctionCallStepwisePlanning](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Concepts/Planners/FunctionCallStepwisePlanning.cs)
diff --git a/dotnet/samples/Concepts/Resources/sales.csv b/dotnet/samples/Concepts/Resources/sales.csv
new file mode 100644
index 000000000000..4a355d11bf83
--- /dev/null
+++ b/dotnet/samples/Concepts/Resources/sales.csv
@@ -0,0 +1,701 @@
+Segment,Country,Product,Units Sold,Sale Price,Gross Sales,Discounts,Sales,COGS,Profit,Date,Month Number,Month Name,Year
+Government,Canada,Carretera,1618.5,20.00,32370.00,0.00,32370.00,16185.00,16185.00,1/1/2014,1,January,2014
+Government,Germany,Carretera,1321,20.00,26420.00,0.00,26420.00,13210.00,13210.00,1/1/2014,1,January,2014
+Midmarket,France,Carretera,2178,15.00,32670.00,0.00,32670.00,21780.00,10890.00,6/1/2014,6,June,2014
+Midmarket,Germany,Carretera,888,15.00,13320.00,0.00,13320.00,8880.00,4440.00,6/1/2014,6,June,2014
+Midmarket,Mexico,Carretera,2470,15.00,37050.00,0.00,37050.00,24700.00,12350.00,6/1/2014,6,June,2014
+Government,Germany,Carretera,1513,350.00,529550.00,0.00,529550.00,393380.00,136170.00,12/1/2014,12,December,2014
+Midmarket,Germany,Montana,921,15.00,13815.00,0.00,13815.00,9210.00,4605.00,3/1/2014,3,March,2014
+Channel Partners,Canada,Montana,2518,12.00,30216.00,0.00,30216.00,7554.00,22662.00,6/1/2014,6,June,2014
+Government,France,Montana,1899,20.00,37980.00,0.00,37980.00,18990.00,18990.00,6/1/2014,6,June,2014
+Channel Partners,Germany,Montana,1545,12.00,18540.00,0.00,18540.00,4635.00,13905.00,6/1/2014,6,June,2014
+Midmarket,Mexico,Montana,2470,15.00,37050.00,0.00,37050.00,24700.00,12350.00,6/1/2014,6,June,2014
+Enterprise,Canada,Montana,2665.5,125.00,333187.50,0.00,333187.50,319860.00,13327.50,7/1/2014,7,July,2014
+Small Business,Mexico,Montana,958,300.00,287400.00,0.00,287400.00,239500.00,47900.00,8/1/2014,8,August,2014
+Government,Germany,Montana,2146,7.00,15022.00,0.00,15022.00,10730.00,4292.00,9/1/2014,9,September,2014
+Enterprise,Canada,Montana,345,125.00,43125.00,0.00,43125.00,41400.00,1725.00,10/1/2013,10,October,2013
+Midmarket,United States of America,Montana,615,15.00,9225.00,0.00,9225.00,6150.00,3075.00,12/1/2014,12,December,2014
+Government,Canada,Paseo,292,20.00,5840.00,0.00,5840.00,2920.00,2920.00,2/1/2014,2,February,2014
+Midmarket,Mexico,Paseo,974,15.00,14610.00,0.00,14610.00,9740.00,4870.00,2/1/2014,2,February,2014
+Channel Partners,Canada,Paseo,2518,12.00,30216.00,0.00,30216.00,7554.00,22662.00,6/1/2014,6,June,2014
+Government,Germany,Paseo,1006,350.00,352100.00,0.00,352100.00,261560.00,90540.00,6/1/2014,6,June,2014
+Channel Partners,Germany,Paseo,367,12.00,4404.00,0.00,4404.00,1101.00,3303.00,7/1/2014,7,July,2014
+Government,Mexico,Paseo,883,7.00,6181.00,0.00,6181.00,4415.00,1766.00,8/1/2014,8,August,2014
+Midmarket,France,Paseo,549,15.00,8235.00,0.00,8235.00,5490.00,2745.00,9/1/2013,9,September,2013
+Small Business,Mexico,Paseo,788,300.00,236400.00,0.00,236400.00,197000.00,39400.00,9/1/2013,9,September,2013
+Midmarket,Mexico,Paseo,2472,15.00,37080.00,0.00,37080.00,24720.00,12360.00,9/1/2014,9,September,2014
+Government,United States of America,Paseo,1143,7.00,8001.00,0.00,8001.00,5715.00,2286.00,10/1/2014,10,October,2014
+Government,Canada,Paseo,1725,350.00,603750.00,0.00,603750.00,448500.00,155250.00,11/1/2013,11,November,2013
+Channel Partners,United States of America,Paseo,912,12.00,10944.00,0.00,10944.00,2736.00,8208.00,11/1/2013,11,November,2013
+Midmarket,Canada,Paseo,2152,15.00,32280.00,0.00,32280.00,21520.00,10760.00,12/1/2013,12,December,2013
+Government,Canada,Paseo,1817,20.00,36340.00,0.00,36340.00,18170.00,18170.00,12/1/2014,12,December,2014
+Government,Germany,Paseo,1513,350.00,529550.00,0.00,529550.00,393380.00,136170.00,12/1/2014,12,December,2014
+Government,Mexico,Velo,1493,7.00,10451.00,0.00,10451.00,7465.00,2986.00,1/1/2014,1,January,2014
+Enterprise,France,Velo,1804,125.00,225500.00,0.00,225500.00,216480.00,9020.00,2/1/2014,2,February,2014
+Channel Partners,Germany,Velo,2161,12.00,25932.00,0.00,25932.00,6483.00,19449.00,3/1/2014,3,March,2014
+Government,Germany,Velo,1006,350.00,352100.00,0.00,352100.00,261560.00,90540.00,6/1/2014,6,June,2014
+Channel Partners,Germany,Velo,1545,12.00,18540.00,0.00,18540.00,4635.00,13905.00,6/1/2014,6,June,2014
+Enterprise,United States of America,Velo,2821,125.00,352625.00,0.00,352625.00,338520.00,14105.00,8/1/2014,8,August,2014
+Enterprise,Canada,Velo,345,125.00,43125.00,0.00,43125.00,41400.00,1725.00,10/1/2013,10,October,2013
+Small Business,Canada,VTT,2001,300.00,600300.00,0.00,600300.00,500250.00,100050.00,2/1/2014,2,February,2014
+Channel Partners,Germany,VTT,2838,12.00,34056.00,0.00,34056.00,8514.00,25542.00,4/1/2014,4,April,2014
+Midmarket,France,VTT,2178,15.00,32670.00,0.00,32670.00,21780.00,10890.00,6/1/2014,6,June,2014
+Midmarket,Germany,VTT,888,15.00,13320.00,0.00,13320.00,8880.00,4440.00,6/1/2014,6,June,2014
+Government,France,VTT,1527,350.00,534450.00,0.00,534450.00,397020.00,137430.00,9/1/2013,9,September,2013
+Small Business,France,VTT,2151,300.00,645300.00,0.00,645300.00,537750.00,107550.00,9/1/2014,9,September,2014
+Government,Canada,VTT,1817,20.00,36340.00,0.00,36340.00,18170.00,18170.00,12/1/2014,12,December,2014
+Government,France,Amarilla,2750,350.00,962500.00,0.00,962500.00,715000.00,247500.00,2/1/2014,2,February,2014
+Channel Partners,United States of America,Amarilla,1953,12.00,23436.00,0.00,23436.00,5859.00,17577.00,4/1/2014,4,April,2014
+Enterprise,Germany,Amarilla,4219.5,125.00,527437.50,0.00,527437.50,506340.00,21097.50,4/1/2014,4,April,2014
+Government,France,Amarilla,1899,20.00,37980.00,0.00,37980.00,18990.00,18990.00,6/1/2014,6,June,2014
+Government,Germany,Amarilla,1686,7.00,11802.00,0.00,11802.00,8430.00,3372.00,7/1/2014,7,July,2014
+Channel Partners,United States of America,Amarilla,2141,12.00,25692.00,0.00,25692.00,6423.00,19269.00,8/1/2014,8,August,2014
+Government,United States of America,Amarilla,1143,7.00,8001.00,0.00,8001.00,5715.00,2286.00,10/1/2014,10,October,2014
+Midmarket,United States of America,Amarilla,615,15.00,9225.00,0.00,9225.00,6150.00,3075.00,12/1/2014,12,December,2014
+Government,France,Paseo,3945,7.00,27615.00,276.15,27338.85,19725.00,7613.85,1/1/2014,1,January,2014
+Midmarket,France,Paseo,2296,15.00,34440.00,344.40,34095.60,22960.00,11135.60,2/1/2014,2,February,2014
+Government,France,Paseo,1030,7.00,7210.00,72.10,7137.90,5150.00,1987.90,5/1/2014,5,May,2014
+Government,France,Velo,639,7.00,4473.00,44.73,4428.27,3195.00,1233.27,11/1/2014,11,November,2014
+Government,Canada,VTT,1326,7.00,9282.00,92.82,9189.18,6630.00,2559.18,3/1/2014,3,March,2014
+Channel Partners,United States of America,Carretera,1858,12.00,22296.00,222.96,22073.04,5574.00,16499.04,2/1/2014,2,February,2014
+Government,Mexico,Carretera,1210,350.00,423500.00,4235.00,419265.00,314600.00,104665.00,3/1/2014,3,March,2014
+Government,United States of America,Carretera,2529,7.00,17703.00,177.03,17525.97,12645.00,4880.97,7/1/2014,7,July,2014
+Channel Partners,Canada,Carretera,1445,12.00,17340.00,173.40,17166.60,4335.00,12831.60,9/1/2014,9,September,2014
+Enterprise,United States of America,Carretera,330,125.00,41250.00,412.50,40837.50,39600.00,1237.50,9/1/2013,9,September,2013
+Channel Partners,France,Carretera,2671,12.00,32052.00,320.52,31731.48,8013.00,23718.48,9/1/2014,9,September,2014
+Channel Partners,Germany,Carretera,766,12.00,9192.00,91.92,9100.08,2298.00,6802.08,10/1/2013,10,October,2013
+Small Business,Mexico,Carretera,494,300.00,148200.00,1482.00,146718.00,123500.00,23218.00,10/1/2013,10,October,2013
+Government,Mexico,Carretera,1397,350.00,488950.00,4889.50,484060.50,363220.00,120840.50,10/1/2014,10,October,2014
+Government,France,Carretera,2155,350.00,754250.00,7542.50,746707.50,560300.00,186407.50,12/1/2014,12,December,2014
+Midmarket,Mexico,Montana,2214,15.00,33210.00,332.10,32877.90,22140.00,10737.90,3/1/2014,3,March,2014
+Small Business,United States of America,Montana,2301,300.00,690300.00,6903.00,683397.00,575250.00,108147.00,4/1/2014,4,April,2014
+Government,France,Montana,1375.5,20.00,27510.00,275.10,27234.90,13755.00,13479.90,7/1/2014,7,July,2014
+Government,Canada,Montana,1830,7.00,12810.00,128.10,12681.90,9150.00,3531.90,8/1/2014,8,August,2014
+Small Business,United States of America,Montana,2498,300.00,749400.00,7494.00,741906.00,624500.00,117406.00,9/1/2013,9,September,2013
+Enterprise,United States of America,Montana,663,125.00,82875.00,828.75,82046.25,79560.00,2486.25,10/1/2013,10,October,2013
+Midmarket,United States of America,Paseo,1514,15.00,22710.00,227.10,22482.90,15140.00,7342.90,2/1/2014,2,February,2014
+Government,United States of America,Paseo,4492.5,7.00,31447.50,314.48,31133.03,22462.50,8670.53,4/1/2014,4,April,2014
+Enterprise,United States of America,Paseo,727,125.00,90875.00,908.75,89966.25,87240.00,2726.25,6/1/2014,6,June,2014
+Enterprise,France,Paseo,787,125.00,98375.00,983.75,97391.25,94440.00,2951.25,6/1/2014,6,June,2014
+Enterprise,Mexico,Paseo,1823,125.00,227875.00,2278.75,225596.25,218760.00,6836.25,7/1/2014,7,July,2014
+Midmarket,Germany,Paseo,747,15.00,11205.00,112.05,11092.95,7470.00,3622.95,9/1/2014,9,September,2014
+Channel Partners,Germany,Paseo,766,12.00,9192.00,91.92,9100.08,2298.00,6802.08,10/1/2013,10,October,2013
+Small Business,United States of America,Paseo,2905,300.00,871500.00,8715.00,862785.00,726250.00,136535.00,11/1/2014,11,November,2014
+Government,France,Paseo,2155,350.00,754250.00,7542.50,746707.50,560300.00,186407.50,12/1/2014,12,December,2014
+Government,France,Velo,3864,20.00,77280.00,772.80,76507.20,38640.00,37867.20,4/1/2014,4,April,2014
+Government,Mexico,Velo,362,7.00,2534.00,25.34,2508.66,1810.00,698.66,5/1/2014,5,May,2014
+Enterprise,Canada,Velo,923,125.00,115375.00,1153.75,114221.25,110760.00,3461.25,8/1/2014,8,August,2014
+Enterprise,United States of America,Velo,663,125.00,82875.00,828.75,82046.25,79560.00,2486.25,10/1/2013,10,October,2013
+Government,Canada,Velo,2092,7.00,14644.00,146.44,14497.56,10460.00,4037.56,11/1/2013,11,November,2013
+Government,Germany,VTT,263,7.00,1841.00,18.41,1822.59,1315.00,507.59,3/1/2014,3,March,2014
+Government,Canada,VTT,943.5,350.00,330225.00,3302.25,326922.75,245310.00,81612.75,4/1/2014,4,April,2014
+Enterprise,United States of America,VTT,727,125.00,90875.00,908.75,89966.25,87240.00,2726.25,6/1/2014,6,June,2014
+Enterprise,France,VTT,787,125.00,98375.00,983.75,97391.25,94440.00,2951.25,6/1/2014,6,June,2014
+Small Business,Germany,VTT,986,300.00,295800.00,2958.00,292842.00,246500.00,46342.00,9/1/2014,9,September,2014
+Small Business,Mexico,VTT,494,300.00,148200.00,1482.00,146718.00,123500.00,23218.00,10/1/2013,10,October,2013
+Government,Mexico,VTT,1397,350.00,488950.00,4889.50,484060.50,363220.00,120840.50,10/1/2014,10,October,2014
+Enterprise,France,VTT,1744,125.00,218000.00,2180.00,215820.00,209280.00,6540.00,11/1/2014,11,November,2014
+Channel Partners,United States of America,Amarilla,1989,12.00,23868.00,238.68,23629.32,5967.00,17662.32,9/1/2013,9,September,2013
+Midmarket,France,Amarilla,321,15.00,4815.00,48.15,4766.85,3210.00,1556.85,11/1/2013,11,November,2013
+Enterprise,Canada,Carretera,742.5,125.00,92812.50,1856.25,90956.25,89100.00,1856.25,4/1/2014,4,April,2014
+Channel Partners,Canada,Carretera,1295,12.00,15540.00,310.80,15229.20,3885.00,11344.20,10/1/2014,10,October,2014
+Small Business,Germany,Carretera,214,300.00,64200.00,1284.00,62916.00,53500.00,9416.00,10/1/2013,10,October,2013
+Government,France,Carretera,2145,7.00,15015.00,300.30,14714.70,10725.00,3989.70,11/1/2013,11,November,2013
+Government,Canada,Carretera,2852,350.00,998200.00,19964.00,978236.00,741520.00,236716.00,12/1/2014,12,December,2014
+Channel Partners,United States of America,Montana,1142,12.00,13704.00,274.08,13429.92,3426.00,10003.92,6/1/2014,6,June,2014
+Government,United States of America,Montana,1566,20.00,31320.00,626.40,30693.60,15660.00,15033.60,10/1/2014,10,October,2014
+Channel Partners,Mexico,Montana,690,12.00,8280.00,165.60,8114.40,2070.00,6044.40,11/1/2014,11,November,2014
+Enterprise,Mexico,Montana,1660,125.00,207500.00,4150.00,203350.00,199200.00,4150.00,11/1/2013,11,November,2013
+Midmarket,Canada,Paseo,2363,15.00,35445.00,708.90,34736.10,23630.00,11106.10,2/1/2014,2,February,2014
+Small Business,France,Paseo,918,300.00,275400.00,5508.00,269892.00,229500.00,40392.00,5/1/2014,5,May,2014
+Small Business,Germany,Paseo,1728,300.00,518400.00,10368.00,508032.00,432000.00,76032.00,5/1/2014,5,May,2014
+Channel Partners,United States of America,Paseo,1142,12.00,13704.00,274.08,13429.92,3426.00,10003.92,6/1/2014,6,June,2014
+Enterprise,Mexico,Paseo,662,125.00,82750.00,1655.00,81095.00,79440.00,1655.00,6/1/2014,6,June,2014
+Channel Partners,Canada,Paseo,1295,12.00,15540.00,310.80,15229.20,3885.00,11344.20,10/1/2014,10,October,2014
+Enterprise,Germany,Paseo,809,125.00,101125.00,2022.50,99102.50,97080.00,2022.50,10/1/2013,10,October,2013
+Enterprise,Mexico,Paseo,2145,125.00,268125.00,5362.50,262762.50,257400.00,5362.50,10/1/2013,10,October,2013
+Channel Partners,France,Paseo,1785,12.00,21420.00,428.40,20991.60,5355.00,15636.60,11/1/2013,11,November,2013
+Small Business,Canada,Paseo,1916,300.00,574800.00,11496.00,563304.00,479000.00,84304.00,12/1/2014,12,December,2014
+Government,Canada,Paseo,2852,350.00,998200.00,19964.00,978236.00,741520.00,236716.00,12/1/2014,12,December,2014
+Enterprise,Canada,Paseo,2729,125.00,341125.00,6822.50,334302.50,327480.00,6822.50,12/1/2014,12,December,2014
+Midmarket,United States of America,Paseo,1925,15.00,28875.00,577.50,28297.50,19250.00,9047.50,12/1/2013,12,December,2013
+Government,United States of America,Paseo,2013,7.00,14091.00,281.82,13809.18,10065.00,3744.18,12/1/2013,12,December,2013
+Channel Partners,France,Paseo,1055,12.00,12660.00,253.20,12406.80,3165.00,9241.80,12/1/2014,12,December,2014
+Channel Partners,Mexico,Paseo,1084,12.00,13008.00,260.16,12747.84,3252.00,9495.84,12/1/2014,12,December,2014
+Government,United States of America,Velo,1566,20.00,31320.00,626.40,30693.60,15660.00,15033.60,10/1/2014,10,October,2014
+Government,Germany,Velo,2966,350.00,1038100.00,20762.00,1017338.00,771160.00,246178.00,10/1/2013,10,October,2013
+Government,Germany,Velo,2877,350.00,1006950.00,20139.00,986811.00,748020.00,238791.00,10/1/2014,10,October,2014
+Enterprise,Germany,Velo,809,125.00,101125.00,2022.50,99102.50,97080.00,2022.50,10/1/2013,10,October,2013
+Enterprise,Mexico,Velo,2145,125.00,268125.00,5362.50,262762.50,257400.00,5362.50,10/1/2013,10,October,2013
+Channel Partners,France,Velo,1055,12.00,12660.00,253.20,12406.80,3165.00,9241.80,12/1/2014,12,December,2014
+Government,Mexico,Velo,544,20.00,10880.00,217.60,10662.40,5440.00,5222.40,12/1/2013,12,December,2013
+Channel Partners,Mexico,Velo,1084,12.00,13008.00,260.16,12747.84,3252.00,9495.84,12/1/2014,12,December,2014
+Enterprise,Mexico,VTT,662,125.00,82750.00,1655.00,81095.00,79440.00,1655.00,6/1/2014,6,June,2014
+Small Business,Germany,VTT,214,300.00,64200.00,1284.00,62916.00,53500.00,9416.00,10/1/2013,10,October,2013
+Government,Germany,VTT,2877,350.00,1006950.00,20139.00,986811.00,748020.00,238791.00,10/1/2014,10,October,2014
+Enterprise,Canada,VTT,2729,125.00,341125.00,6822.50,334302.50,327480.00,6822.50,12/1/2014,12,December,2014
+Government,United States of America,VTT,266,350.00,93100.00,1862.00,91238.00,69160.00,22078.00,12/1/2013,12,December,2013
+Government,Mexico,VTT,1940,350.00,679000.00,13580.00,665420.00,504400.00,161020.00,12/1/2013,12,December,2013
+Small Business,Germany,Amarilla,259,300.00,77700.00,1554.00,76146.00,64750.00,11396.00,3/1/2014,3,March,2014
+Small Business,Mexico,Amarilla,1101,300.00,330300.00,6606.00,323694.00,275250.00,48444.00,3/1/2014,3,March,2014
+Enterprise,Germany,Amarilla,2276,125.00,284500.00,5690.00,278810.00,273120.00,5690.00,5/1/2014,5,May,2014
+Government,Germany,Amarilla,2966,350.00,1038100.00,20762.00,1017338.00,771160.00,246178.00,10/1/2013,10,October,2013
+Government,United States of America,Amarilla,1236,20.00,24720.00,494.40,24225.60,12360.00,11865.60,11/1/2014,11,November,2014
+Government,France,Amarilla,941,20.00,18820.00,376.40,18443.60,9410.00,9033.60,11/1/2014,11,November,2014
+Small Business,Canada,Amarilla,1916,300.00,574800.00,11496.00,563304.00,479000.00,84304.00,12/1/2014,12,December,2014
+Enterprise,France,Carretera,4243.5,125.00,530437.50,15913.13,514524.38,509220.00,5304.38,4/1/2014,4,April,2014
+Government,Germany,Carretera,2580,20.00,51600.00,1548.00,50052.00,25800.00,24252.00,4/1/2014,4,April,2014
+Small Business,Germany,Carretera,689,300.00,206700.00,6201.00,200499.00,172250.00,28249.00,6/1/2014,6,June,2014
+Channel Partners,United States of America,Carretera,1947,12.00,23364.00,700.92,22663.08,5841.00,16822.08,9/1/2014,9,September,2014
+Channel Partners,Canada,Carretera,908,12.00,10896.00,326.88,10569.12,2724.00,7845.12,12/1/2013,12,December,2013
+Government,Germany,Montana,1958,7.00,13706.00,411.18,13294.82,9790.00,3504.82,2/1/2014,2,February,2014
+Channel Partners,France,Montana,1901,12.00,22812.00,684.36,22127.64,5703.00,16424.64,6/1/2014,6,June,2014
+Government,France,Montana,544,7.00,3808.00,114.24,3693.76,2720.00,973.76,9/1/2014,9,September,2014
+Government,Germany,Montana,1797,350.00,628950.00,18868.50,610081.50,467220.00,142861.50,9/1/2013,9,September,2013
+Enterprise,France,Montana,1287,125.00,160875.00,4826.25,156048.75,154440.00,1608.75,12/1/2014,12,December,2014
+Enterprise,Germany,Montana,1706,125.00,213250.00,6397.50,206852.50,204720.00,2132.50,12/1/2014,12,December,2014
+Small Business,France,Paseo,2434.5,300.00,730350.00,21910.50,708439.50,608625.00,99814.50,1/1/2014,1,January,2014
+Enterprise,Canada,Paseo,1774,125.00,221750.00,6652.50,215097.50,212880.00,2217.50,3/1/2014,3,March,2014
+Channel Partners,France,Paseo,1901,12.00,22812.00,684.36,22127.64,5703.00,16424.64,6/1/2014,6,June,2014
+Small Business,Germany,Paseo,689,300.00,206700.00,6201.00,200499.00,172250.00,28249.00,6/1/2014,6,June,2014
+Enterprise,Germany,Paseo,1570,125.00,196250.00,5887.50,190362.50,188400.00,1962.50,6/1/2014,6,June,2014
+Channel Partners,United States of America,Paseo,1369.5,12.00,16434.00,493.02,15940.98,4108.50,11832.48,7/1/2014,7,July,2014
+Enterprise,Canada,Paseo,2009,125.00,251125.00,7533.75,243591.25,241080.00,2511.25,10/1/2014,10,October,2014
+Midmarket,Germany,Paseo,1945,15.00,29175.00,875.25,28299.75,19450.00,8849.75,10/1/2013,10,October,2013
+Enterprise,France,Paseo,1287,125.00,160875.00,4826.25,156048.75,154440.00,1608.75,12/1/2014,12,December,2014
+Enterprise,Germany,Paseo,1706,125.00,213250.00,6397.50,206852.50,204720.00,2132.50,12/1/2014,12,December,2014
+Enterprise,Canada,Velo,2009,125.00,251125.00,7533.75,243591.25,241080.00,2511.25,10/1/2014,10,October,2014
+Small Business,United States of America,VTT,2844,300.00,853200.00,25596.00,827604.00,711000.00,116604.00,2/1/2014,2,February,2014
+Channel Partners,Mexico,VTT,1916,12.00,22992.00,689.76,22302.24,5748.00,16554.24,4/1/2014,4,April,2014
+Enterprise,Germany,VTT,1570,125.00,196250.00,5887.50,190362.50,188400.00,1962.50,6/1/2014,6,June,2014
+Small Business,Canada,VTT,1874,300.00,562200.00,16866.00,545334.00,468500.00,76834.00,8/1/2014,8,August,2014
+Government,Mexico,VTT,1642,350.00,574700.00,17241.00,557459.00,426920.00,130539.00,8/1/2014,8,August,2014
+Midmarket,Germany,VTT,1945,15.00,29175.00,875.25,28299.75,19450.00,8849.75,10/1/2013,10,October,2013
+Government,Canada,Carretera,831,20.00,16620.00,498.60,16121.40,8310.00,7811.40,5/1/2014,5,May,2014
+Government,Mexico,Paseo,1760,7.00,12320.00,369.60,11950.40,8800.00,3150.40,9/1/2013,9,September,2013
+Government,Canada,Velo,3850.5,20.00,77010.00,2310.30,74699.70,38505.00,36194.70,4/1/2014,4,April,2014
+Channel Partners,Germany,VTT,2479,12.00,29748.00,892.44,28855.56,7437.00,21418.56,1/1/2014,1,January,2014
+Midmarket,Mexico,Montana,2031,15.00,30465.00,1218.60,29246.40,20310.00,8936.40,10/1/2014,10,October,2014
+Midmarket,Mexico,Paseo,2031,15.00,30465.00,1218.60,29246.40,20310.00,8936.40,10/1/2014,10,October,2014
+Midmarket,France,Paseo,2261,15.00,33915.00,1356.60,32558.40,22610.00,9948.40,12/1/2013,12,December,2013
+Government,United States of America,Velo,736,20.00,14720.00,588.80,14131.20,7360.00,6771.20,9/1/2013,9,September,2013
+Government,Canada,Carretera,2851,7.00,19957.00,798.28,19158.72,14255.00,4903.72,10/1/2013,10,October,2013
+Small Business,Germany,Carretera,2021,300.00,606300.00,24252.00,582048.00,505250.00,76798.00,10/1/2014,10,October,2014
+Government,United States of America,Carretera,274,350.00,95900.00,3836.00,92064.00,71240.00,20824.00,12/1/2014,12,December,2014
+Midmarket,Canada,Montana,1967,15.00,29505.00,1180.20,28324.80,19670.00,8654.80,3/1/2014,3,March,2014
+Small Business,Germany,Montana,1859,300.00,557700.00,22308.00,535392.00,464750.00,70642.00,8/1/2014,8,August,2014
+Government,Canada,Montana,2851,7.00,19957.00,798.28,19158.72,14255.00,4903.72,10/1/2013,10,October,2013
+Small Business,Germany,Montana,2021,300.00,606300.00,24252.00,582048.00,505250.00,76798.00,10/1/2014,10,October,2014
+Enterprise,Mexico,Montana,1138,125.00,142250.00,5690.00,136560.00,136560.00,0.00,12/1/2014,12,December,2014
+Government,Canada,Paseo,4251,7.00,29757.00,1190.28,28566.72,21255.00,7311.72,1/1/2014,1,January,2014
+Enterprise,Germany,Paseo,795,125.00,99375.00,3975.00,95400.00,95400.00,0.00,3/1/2014,3,March,2014
+Small Business,Germany,Paseo,1414.5,300.00,424350.00,16974.00,407376.00,353625.00,53751.00,4/1/2014,4,April,2014
+Small Business,United States of America,Paseo,2918,300.00,875400.00,35016.00,840384.00,729500.00,110884.00,5/1/2014,5,May,2014
+Government,United States of America,Paseo,3450,350.00,1207500.00,48300.00,1159200.00,897000.00,262200.00,7/1/2014,7,July,2014
+Enterprise,France,Paseo,2988,125.00,373500.00,14940.00,358560.00,358560.00,0.00,7/1/2014,7,July,2014
+Midmarket,Canada,Paseo,218,15.00,3270.00,130.80,3139.20,2180.00,959.20,9/1/2014,9,September,2014
+Government,Canada,Paseo,2074,20.00,41480.00,1659.20,39820.80,20740.00,19080.80,9/1/2014,9,September,2014
+Government,United States of America,Paseo,1056,20.00,21120.00,844.80,20275.20,10560.00,9715.20,9/1/2014,9,September,2014
+Midmarket,United States of America,Paseo,671,15.00,10065.00,402.60,9662.40,6710.00,2952.40,10/1/2013,10,October,2013
+Midmarket,Mexico,Paseo,1514,15.00,22710.00,908.40,21801.60,15140.00,6661.60,10/1/2013,10,October,2013
+Government,United States of America,Paseo,274,350.00,95900.00,3836.00,92064.00,71240.00,20824.00,12/1/2014,12,December,2014
+Enterprise,Mexico,Paseo,1138,125.00,142250.00,5690.00,136560.00,136560.00,0.00,12/1/2014,12,December,2014
+Channel Partners,United States of America,Velo,1465,12.00,17580.00,703.20,16876.80,4395.00,12481.80,3/1/2014,3,March,2014
+Government,Canada,Velo,2646,20.00,52920.00,2116.80,50803.20,26460.00,24343.20,9/1/2013,9,September,2013
+Government,France,Velo,2177,350.00,761950.00,30478.00,731472.00,566020.00,165452.00,10/1/2014,10,October,2014
+Channel Partners,France,VTT,866,12.00,10392.00,415.68,9976.32,2598.00,7378.32,5/1/2014,5,May,2014
+Government,United States of America,VTT,349,350.00,122150.00,4886.00,117264.00,90740.00,26524.00,9/1/2013,9,September,2013
+Government,France,VTT,2177,350.00,761950.00,30478.00,731472.00,566020.00,165452.00,10/1/2014,10,October,2014
+Midmarket,Mexico,VTT,1514,15.00,22710.00,908.40,21801.60,15140.00,6661.60,10/1/2013,10,October,2013
+Government,Mexico,Amarilla,1865,350.00,652750.00,26110.00,626640.00,484900.00,141740.00,2/1/2014,2,February,2014
+Enterprise,Mexico,Amarilla,1074,125.00,134250.00,5370.00,128880.00,128880.00,0.00,4/1/2014,4,April,2014
+Government,Germany,Amarilla,1907,350.00,667450.00,26698.00,640752.00,495820.00,144932.00,9/1/2014,9,September,2014
+Midmarket,United States of America,Amarilla,671,15.00,10065.00,402.60,9662.40,6710.00,2952.40,10/1/2013,10,October,2013
+Government,Canada,Amarilla,1778,350.00,622300.00,24892.00,597408.00,462280.00,135128.00,12/1/2013,12,December,2013
+Government,Germany,Montana,1159,7.00,8113.00,405.65,7707.35,5795.00,1912.35,10/1/2013,10,October,2013
+Government,Germany,Paseo,1372,7.00,9604.00,480.20,9123.80,6860.00,2263.80,1/1/2014,1,January,2014
+Government,Canada,Paseo,2349,7.00,16443.00,822.15,15620.85,11745.00,3875.85,9/1/2013,9,September,2013
+Government,Mexico,Paseo,2689,7.00,18823.00,941.15,17881.85,13445.00,4436.85,10/1/2014,10,October,2014
+Channel Partners,Canada,Paseo,2431,12.00,29172.00,1458.60,27713.40,7293.00,20420.40,12/1/2014,12,December,2014
+Channel Partners,Canada,Velo,2431,12.00,29172.00,1458.60,27713.40,7293.00,20420.40,12/1/2014,12,December,2014
+Government,Mexico,VTT,2689,7.00,18823.00,941.15,17881.85,13445.00,4436.85,10/1/2014,10,October,2014
+Government,Mexico,Amarilla,1683,7.00,11781.00,589.05,11191.95,8415.00,2776.95,7/1/2014,7,July,2014
+Channel Partners,Mexico,Amarilla,1123,12.00,13476.00,673.80,12802.20,3369.00,9433.20,8/1/2014,8,August,2014
+Government,Germany,Amarilla,1159,7.00,8113.00,405.65,7707.35,5795.00,1912.35,10/1/2013,10,October,2013
+Channel Partners,France,Carretera,1865,12.00,22380.00,1119.00,21261.00,5595.00,15666.00,2/1/2014,2,February,2014
+Channel Partners,Germany,Carretera,1116,12.00,13392.00,669.60,12722.40,3348.00,9374.40,2/1/2014,2,February,2014
+Government,France,Carretera,1563,20.00,31260.00,1563.00,29697.00,15630.00,14067.00,5/1/2014,5,May,2014
+Small Business,United States of America,Carretera,991,300.00,297300.00,14865.00,282435.00,247750.00,34685.00,6/1/2014,6,June,2014
+Government,Germany,Carretera,1016,7.00,7112.00,355.60,6756.40,5080.00,1676.40,11/1/2013,11,November,2013
+Midmarket,Mexico,Carretera,2791,15.00,41865.00,2093.25,39771.75,27910.00,11861.75,11/1/2014,11,November,2014
+Government,United States of America,Carretera,570,7.00,3990.00,199.50,3790.50,2850.00,940.50,12/1/2014,12,December,2014
+Government,France,Carretera,2487,7.00,17409.00,870.45,16538.55,12435.00,4103.55,12/1/2014,12,December,2014
+Government,France,Montana,1384.5,350.00,484575.00,24228.75,460346.25,359970.00,100376.25,1/1/2014,1,January,2014
+Enterprise,United States of America,Montana,3627,125.00,453375.00,22668.75,430706.25,435240.00,-4533.75,7/1/2014,7,July,2014
+Government,Mexico,Montana,720,350.00,252000.00,12600.00,239400.00,187200.00,52200.00,9/1/2013,9,September,2013
+Channel Partners,Germany,Montana,2342,12.00,28104.00,1405.20,26698.80,7026.00,19672.80,11/1/2014,11,November,2014
+Small Business,Mexico,Montana,1100,300.00,330000.00,16500.00,313500.00,275000.00,38500.00,12/1/2013,12,December,2013
+Government,France,Paseo,1303,20.00,26060.00,1303.00,24757.00,13030.00,11727.00,2/1/2014,2,February,2014
+Enterprise,United States of America,Paseo,2992,125.00,374000.00,18700.00,355300.00,359040.00,-3740.00,3/1/2014,3,March,2014
+Enterprise,France,Paseo,2385,125.00,298125.00,14906.25,283218.75,286200.00,-2981.25,3/1/2014,3,March,2014
+Small Business,Mexico,Paseo,1607,300.00,482100.00,24105.00,457995.00,401750.00,56245.00,4/1/2014,4,April,2014
+Government,United States of America,Paseo,2327,7.00,16289.00,814.45,15474.55,11635.00,3839.55,5/1/2014,5,May,2014
+Small Business,United States of America,Paseo,991,300.00,297300.00,14865.00,282435.00,247750.00,34685.00,6/1/2014,6,June,2014
+Government,United States of America,Paseo,602,350.00,210700.00,10535.00,200165.00,156520.00,43645.00,6/1/2014,6,June,2014
+Midmarket,France,Paseo,2620,15.00,39300.00,1965.00,37335.00,26200.00,11135.00,9/1/2014,9,September,2014
+Government,Canada,Paseo,1228,350.00,429800.00,21490.00,408310.00,319280.00,89030.00,10/1/2013,10,October,2013
+Government,Canada,Paseo,1389,20.00,27780.00,1389.00,26391.00,13890.00,12501.00,10/1/2013,10,October,2013
+Enterprise,United States of America,Paseo,861,125.00,107625.00,5381.25,102243.75,103320.00,-1076.25,10/1/2014,10,October,2014
+Enterprise,France,Paseo,704,125.00,88000.00,4400.00,83600.00,84480.00,-880.00,10/1/2013,10,October,2013
+Government,Canada,Paseo,1802,20.00,36040.00,1802.00,34238.00,18020.00,16218.00,12/1/2013,12,December,2013
+Government,United States of America,Paseo,2663,20.00,53260.00,2663.00,50597.00,26630.00,23967.00,12/1/2014,12,December,2014
+Government,France,Paseo,2136,7.00,14952.00,747.60,14204.40,10680.00,3524.40,12/1/2013,12,December,2013
+Midmarket,Germany,Paseo,2116,15.00,31740.00,1587.00,30153.00,21160.00,8993.00,12/1/2013,12,December,2013
+Midmarket,United States of America,Velo,555,15.00,8325.00,416.25,7908.75,5550.00,2358.75,1/1/2014,1,January,2014
+Midmarket,Mexico,Velo,2861,15.00,42915.00,2145.75,40769.25,28610.00,12159.25,1/1/2014,1,January,2014
+Enterprise,Germany,Velo,807,125.00,100875.00,5043.75,95831.25,96840.00,-1008.75,2/1/2014,2,February,2014
+Government,United States of America,Velo,602,350.00,210700.00,10535.00,200165.00,156520.00,43645.00,6/1/2014,6,June,2014
+Government,United States of America,Velo,2832,20.00,56640.00,2832.00,53808.00,28320.00,25488.00,8/1/2014,8,August,2014
+Government,France,Velo,1579,20.00,31580.00,1579.00,30001.00,15790.00,14211.00,8/1/2014,8,August,2014
+Enterprise,United States of America,Velo,861,125.00,107625.00,5381.25,102243.75,103320.00,-1076.25,10/1/2014,10,October,2014
+Enterprise,France,Velo,704,125.00,88000.00,4400.00,83600.00,84480.00,-880.00,10/1/2013,10,October,2013
+Government,France,Velo,1033,20.00,20660.00,1033.00,19627.00,10330.00,9297.00,12/1/2013,12,December,2013
+Small Business,Germany,Velo,1250,300.00,375000.00,18750.00,356250.00,312500.00,43750.00,12/1/2014,12,December,2014
+Government,Canada,VTT,1389,20.00,27780.00,1389.00,26391.00,13890.00,12501.00,10/1/2013,10,October,2013
+Government,United States of America,VTT,1265,20.00,25300.00,1265.00,24035.00,12650.00,11385.00,11/1/2013,11,November,2013
+Government,Germany,VTT,2297,20.00,45940.00,2297.00,43643.00,22970.00,20673.00,11/1/2013,11,November,2013
+Government,United States of America,VTT,2663,20.00,53260.00,2663.00,50597.00,26630.00,23967.00,12/1/2014,12,December,2014
+Government,United States of America,VTT,570,7.00,3990.00,199.50,3790.50,2850.00,940.50,12/1/2014,12,December,2014
+Government,France,VTT,2487,7.00,17409.00,870.45,16538.55,12435.00,4103.55,12/1/2014,12,December,2014
+Government,Germany,Amarilla,1350,350.00,472500.00,23625.00,448875.00,351000.00,97875.00,2/1/2014,2,February,2014
+Government,Canada,Amarilla,552,350.00,193200.00,9660.00,183540.00,143520.00,40020.00,8/1/2014,8,August,2014
+Government,Canada,Amarilla,1228,350.00,429800.00,21490.00,408310.00,319280.00,89030.00,10/1/2013,10,October,2013
+Small Business,Germany,Amarilla,1250,300.00,375000.00,18750.00,356250.00,312500.00,43750.00,12/1/2014,12,December,2014
+Midmarket,France,Paseo,3801,15.00,57015.00,3420.90,53594.10,38010.00,15584.10,4/1/2014,4,April,2014
+Government,United States of America,Carretera,1117.5,20.00,22350.00,1341.00,21009.00,11175.00,9834.00,1/1/2014,1,January,2014
+Midmarket,Canada,Carretera,2844,15.00,42660.00,2559.60,40100.40,28440.00,11660.40,6/1/2014,6,June,2014
+Channel Partners,Mexico,Carretera,562,12.00,6744.00,404.64,6339.36,1686.00,4653.36,9/1/2014,9,September,2014
+Channel Partners,Canada,Carretera,2299,12.00,27588.00,1655.28,25932.72,6897.00,19035.72,10/1/2013,10,October,2013
+Midmarket,United States of America,Carretera,2030,15.00,30450.00,1827.00,28623.00,20300.00,8323.00,11/1/2014,11,November,2014
+Government,United States of America,Carretera,263,7.00,1841.00,110.46,1730.54,1315.00,415.54,11/1/2013,11,November,2013
+Enterprise,Germany,Carretera,887,125.00,110875.00,6652.50,104222.50,106440.00,-2217.50,12/1/2013,12,December,2013
+Government,Mexico,Montana,980,350.00,343000.00,20580.00,322420.00,254800.00,67620.00,4/1/2014,4,April,2014
+Government,Germany,Montana,1460,350.00,511000.00,30660.00,480340.00,379600.00,100740.00,5/1/2014,5,May,2014
+Government,France,Montana,1403,7.00,9821.00,589.26,9231.74,7015.00,2216.74,10/1/2013,10,October,2013
+Channel Partners,United States of America,Montana,2723,12.00,32676.00,1960.56,30715.44,8169.00,22546.44,11/1/2014,11,November,2014
+Government,France,Paseo,1496,350.00,523600.00,31416.00,492184.00,388960.00,103224.00,6/1/2014,6,June,2014
+Channel Partners,Canada,Paseo,2299,12.00,27588.00,1655.28,25932.72,6897.00,19035.72,10/1/2013,10,October,2013
+Government,United States of America,Paseo,727,350.00,254450.00,15267.00,239183.00,189020.00,50163.00,10/1/2013,10,October,2013
+Enterprise,Canada,Velo,952,125.00,119000.00,7140.00,111860.00,114240.00,-2380.00,2/1/2014,2,February,2014
+Enterprise,United States of America,Velo,2755,125.00,344375.00,20662.50,323712.50,330600.00,-6887.50,2/1/2014,2,February,2014
+Midmarket,Germany,Velo,1530,15.00,22950.00,1377.00,21573.00,15300.00,6273.00,5/1/2014,5,May,2014
+Government,France,Velo,1496,350.00,523600.00,31416.00,492184.00,388960.00,103224.00,6/1/2014,6,June,2014
+Government,Mexico,Velo,1498,7.00,10486.00,629.16,9856.84,7490.00,2366.84,6/1/2014,6,June,2014
+Small Business,France,Velo,1221,300.00,366300.00,21978.00,344322.00,305250.00,39072.00,10/1/2013,10,October,2013
+Government,France,Velo,2076,350.00,726600.00,43596.00,683004.00,539760.00,143244.00,10/1/2013,10,October,2013
+Midmarket,Canada,VTT,2844,15.00,42660.00,2559.60,40100.40,28440.00,11660.40,6/1/2014,6,June,2014
+Government,Mexico,VTT,1498,7.00,10486.00,629.16,9856.84,7490.00,2366.84,6/1/2014,6,June,2014
+Small Business,France,VTT,1221,300.00,366300.00,21978.00,344322.00,305250.00,39072.00,10/1/2013,10,October,2013
+Government,Mexico,VTT,1123,20.00,22460.00,1347.60,21112.40,11230.00,9882.40,11/1/2013,11,November,2013
+Small Business,Canada,VTT,2436,300.00,730800.00,43848.00,686952.00,609000.00,77952.00,12/1/2013,12,December,2013
+Enterprise,France,Amarilla,1987.5,125.00,248437.50,14906.25,233531.25,238500.00,-4968.75,1/1/2014,1,January,2014
+Government,Mexico,Amarilla,1679,350.00,587650.00,35259.00,552391.00,436540.00,115851.00,9/1/2014,9,September,2014
+Government,United States of America,Amarilla,727,350.00,254450.00,15267.00,239183.00,189020.00,50163.00,10/1/2013,10,October,2013
+Government,France,Amarilla,1403,7.00,9821.00,589.26,9231.74,7015.00,2216.74,10/1/2013,10,October,2013
+Government,France,Amarilla,2076,350.00,726600.00,43596.00,683004.00,539760.00,143244.00,10/1/2013,10,October,2013
+Government,France,Montana,1757,20.00,35140.00,2108.40,33031.60,17570.00,15461.60,10/1/2013,10,October,2013
+Midmarket,United States of America,Paseo,2198,15.00,32970.00,1978.20,30991.80,21980.00,9011.80,8/1/2014,8,August,2014
+Midmarket,Germany,Paseo,1743,15.00,26145.00,1568.70,24576.30,17430.00,7146.30,8/1/2014,8,August,2014
+Midmarket,United States of America,Paseo,1153,15.00,17295.00,1037.70,16257.30,11530.00,4727.30,10/1/2014,10,October,2014
+Government,France,Paseo,1757,20.00,35140.00,2108.40,33031.60,17570.00,15461.60,10/1/2013,10,October,2013
+Government,Germany,Velo,1001,20.00,20020.00,1201.20,18818.80,10010.00,8808.80,8/1/2014,8,August,2014
+Government,Mexico,Velo,1333,7.00,9331.00,559.86,8771.14,6665.00,2106.14,11/1/2014,11,November,2014
+Midmarket,United States of America,VTT,1153,15.00,17295.00,1037.70,16257.30,11530.00,4727.30,10/1/2014,10,October,2014
+Channel Partners,Mexico,Carretera,727,12.00,8724.00,610.68,8113.32,2181.00,5932.32,2/1/2014,2,February,2014
+Channel Partners,Canada,Carretera,1884,12.00,22608.00,1582.56,21025.44,5652.00,15373.44,8/1/2014,8,August,2014
+Government,Mexico,Carretera,1834,20.00,36680.00,2567.60,34112.40,18340.00,15772.40,9/1/2013,9,September,2013
+Channel Partners,Mexico,Montana,2340,12.00,28080.00,1965.60,26114.40,7020.00,19094.40,1/1/2014,1,January,2014
+Channel Partners,France,Montana,2342,12.00,28104.00,1967.28,26136.72,7026.00,19110.72,11/1/2014,11,November,2014
+Government,France,Paseo,1031,7.00,7217.00,505.19,6711.81,5155.00,1556.81,9/1/2013,9,September,2013
+Midmarket,Canada,Velo,1262,15.00,18930.00,1325.10,17604.90,12620.00,4984.90,5/1/2014,5,May,2014
+Government,Canada,Velo,1135,7.00,7945.00,556.15,7388.85,5675.00,1713.85,6/1/2014,6,June,2014
+Government,United States of America,Velo,547,7.00,3829.00,268.03,3560.97,2735.00,825.97,11/1/2014,11,November,2014
+Government,Canada,Velo,1582,7.00,11074.00,775.18,10298.82,7910.00,2388.82,12/1/2014,12,December,2014
+Channel Partners,France,VTT,1738.5,12.00,20862.00,1460.34,19401.66,5215.50,14186.16,4/1/2014,4,April,2014
+Channel Partners,Germany,VTT,2215,12.00,26580.00,1860.60,24719.40,6645.00,18074.40,9/1/2013,9,September,2013
+Government,Canada,VTT,1582,7.00,11074.00,775.18,10298.82,7910.00,2388.82,12/1/2014,12,December,2014
+Government,Canada,Amarilla,1135,7.00,7945.00,556.15,7388.85,5675.00,1713.85,6/1/2014,6,June,2014
+Government,United States of America,Carretera,1761,350.00,616350.00,43144.50,573205.50,457860.00,115345.50,3/1/2014,3,March,2014
+Small Business,France,Carretera,448,300.00,134400.00,9408.00,124992.00,112000.00,12992.00,6/1/2014,6,June,2014
+Small Business,France,Carretera,2181,300.00,654300.00,45801.00,608499.00,545250.00,63249.00,10/1/2014,10,October,2014
+Government,France,Montana,1976,20.00,39520.00,2766.40,36753.60,19760.00,16993.60,10/1/2014,10,October,2014
+Small Business,France,Montana,2181,300.00,654300.00,45801.00,608499.00,545250.00,63249.00,10/1/2014,10,October,2014
+Enterprise,Germany,Montana,2500,125.00,312500.00,21875.00,290625.00,300000.00,-9375.00,11/1/2013,11,November,2013
+Small Business,Canada,Paseo,1702,300.00,510600.00,35742.00,474858.00,425500.00,49358.00,5/1/2014,5,May,2014
+Small Business,France,Paseo,448,300.00,134400.00,9408.00,124992.00,112000.00,12992.00,6/1/2014,6,June,2014
+Enterprise,Germany,Paseo,3513,125.00,439125.00,30738.75,408386.25,421560.00,-13173.75,7/1/2014,7,July,2014
+Midmarket,France,Paseo,2101,15.00,31515.00,2206.05,29308.95,21010.00,8298.95,8/1/2014,8,August,2014
+Midmarket,United States of America,Paseo,2931,15.00,43965.00,3077.55,40887.45,29310.00,11577.45,9/1/2013,9,September,2013
+Government,France,Paseo,1535,20.00,30700.00,2149.00,28551.00,15350.00,13201.00,9/1/2014,9,September,2014
+Small Business,Germany,Paseo,1123,300.00,336900.00,23583.00,313317.00,280750.00,32567.00,9/1/2013,9,September,2013
+Small Business,Canada,Paseo,1404,300.00,421200.00,29484.00,391716.00,351000.00,40716.00,11/1/2013,11,November,2013
+Channel Partners,Mexico,Paseo,2763,12.00,33156.00,2320.92,30835.08,8289.00,22546.08,11/1/2013,11,November,2013
+Government,Germany,Paseo,2125,7.00,14875.00,1041.25,13833.75,10625.00,3208.75,12/1/2013,12,December,2013
+Small Business,France,Velo,1659,300.00,497700.00,34839.00,462861.00,414750.00,48111.00,7/1/2014,7,July,2014
+Government,Mexico,Velo,609,20.00,12180.00,852.60,11327.40,6090.00,5237.40,8/1/2014,8,August,2014
+Enterprise,Germany,Velo,2087,125.00,260875.00,18261.25,242613.75,250440.00,-7826.25,9/1/2014,9,September,2014
+Government,France,Velo,1976,20.00,39520.00,2766.40,36753.60,19760.00,16993.60,10/1/2014,10,October,2014
+Government,United States of America,Velo,1421,20.00,28420.00,1989.40,26430.60,14210.00,12220.60,12/1/2013,12,December,2013
+Small Business,United States of America,Velo,1372,300.00,411600.00,28812.00,382788.00,343000.00,39788.00,12/1/2014,12,December,2014
+Government,Germany,Velo,588,20.00,11760.00,823.20,10936.80,5880.00,5056.80,12/1/2013,12,December,2013
+Channel Partners,Canada,VTT,3244.5,12.00,38934.00,2725.38,36208.62,9733.50,26475.12,1/1/2014,1,January,2014
+Small Business,France,VTT,959,300.00,287700.00,20139.00,267561.00,239750.00,27811.00,2/1/2014,2,February,2014
+Small Business,Mexico,VTT,2747,300.00,824100.00,57687.00,766413.00,686750.00,79663.00,2/1/2014,2,February,2014
+Enterprise,Canada,Amarilla,1645,125.00,205625.00,14393.75,191231.25,197400.00,-6168.75,5/1/2014,5,May,2014
+Government,France,Amarilla,2876,350.00,1006600.00,70462.00,936138.00,747760.00,188378.00,9/1/2014,9,September,2014
+Enterprise,Germany,Amarilla,994,125.00,124250.00,8697.50,115552.50,119280.00,-3727.50,9/1/2013,9,September,2013
+Government,Canada,Amarilla,1118,20.00,22360.00,1565.20,20794.80,11180.00,9614.80,11/1/2014,11,November,2014
+Small Business,United States of America,Amarilla,1372,300.00,411600.00,28812.00,382788.00,343000.00,39788.00,12/1/2014,12,December,2014
+Government,Canada,Montana,488,7.00,3416.00,273.28,3142.72,2440.00,702.72,2/1/2014,2,February,2014
+Government,United States of America,Montana,1282,20.00,25640.00,2051.20,23588.80,12820.00,10768.80,6/1/2014,6,June,2014
+Government,Canada,Paseo,257,7.00,1799.00,143.92,1655.08,1285.00,370.08,5/1/2014,5,May,2014
+Government,United States of America,Amarilla,1282,20.00,25640.00,2051.20,23588.80,12820.00,10768.80,6/1/2014,6,June,2014
+Enterprise,Mexico,Carretera,1540,125.00,192500.00,15400.00,177100.00,184800.00,-7700.00,8/1/2014,8,August,2014
+Midmarket,France,Carretera,490,15.00,7350.00,588.00,6762.00,4900.00,1862.00,11/1/2014,11,November,2014
+Government,Mexico,Carretera,1362,350.00,476700.00,38136.00,438564.00,354120.00,84444.00,12/1/2014,12,December,2014
+Midmarket,France,Montana,2501,15.00,37515.00,3001.20,34513.80,25010.00,9503.80,3/1/2014,3,March,2014
+Government,Canada,Montana,708,20.00,14160.00,1132.80,13027.20,7080.00,5947.20,6/1/2014,6,June,2014
+Government,Germany,Montana,645,20.00,12900.00,1032.00,11868.00,6450.00,5418.00,7/1/2014,7,July,2014
+Small Business,France,Montana,1562,300.00,468600.00,37488.00,431112.00,390500.00,40612.00,8/1/2014,8,August,2014
+Small Business,Canada,Montana,1283,300.00,384900.00,30792.00,354108.00,320750.00,33358.00,9/1/2013,9,September,2013
+Midmarket,Germany,Montana,711,15.00,10665.00,853.20,9811.80,7110.00,2701.80,12/1/2014,12,December,2014
+Enterprise,Mexico,Paseo,1114,125.00,139250.00,11140.00,128110.00,133680.00,-5570.00,3/1/2014,3,March,2014
+Government,Germany,Paseo,1259,7.00,8813.00,705.04,8107.96,6295.00,1812.96,4/1/2014,4,April,2014
+Government,Germany,Paseo,1095,7.00,7665.00,613.20,7051.80,5475.00,1576.80,5/1/2014,5,May,2014
+Government,Germany,Paseo,1366,20.00,27320.00,2185.60,25134.40,13660.00,11474.40,6/1/2014,6,June,2014
+Small Business,Mexico,Paseo,2460,300.00,738000.00,59040.00,678960.00,615000.00,63960.00,6/1/2014,6,June,2014
+Government,United States of America,Paseo,678,7.00,4746.00,379.68,4366.32,3390.00,976.32,8/1/2014,8,August,2014
+Government,Germany,Paseo,1598,7.00,11186.00,894.88,10291.12,7990.00,2301.12,8/1/2014,8,August,2014
+Government,Germany,Paseo,2409,7.00,16863.00,1349.04,15513.96,12045.00,3468.96,9/1/2013,9,September,2013
+Government,Germany,Paseo,1934,20.00,38680.00,3094.40,35585.60,19340.00,16245.60,9/1/2014,9,September,2014
+Government,Mexico,Paseo,2993,20.00,59860.00,4788.80,55071.20,29930.00,25141.20,9/1/2014,9,September,2014
+Government,Germany,Paseo,2146,350.00,751100.00,60088.00,691012.00,557960.00,133052.00,11/1/2013,11,November,2013
+Government,Mexico,Paseo,1946,7.00,13622.00,1089.76,12532.24,9730.00,2802.24,12/1/2013,12,December,2013
+Government,Mexico,Paseo,1362,350.00,476700.00,38136.00,438564.00,354120.00,84444.00,12/1/2014,12,December,2014
+Channel Partners,Canada,Velo,598,12.00,7176.00,574.08,6601.92,1794.00,4807.92,3/1/2014,3,March,2014
+Government,United States of America,Velo,2907,7.00,20349.00,1627.92,18721.08,14535.00,4186.08,6/1/2014,6,June,2014
+Government,Germany,Velo,2338,7.00,16366.00,1309.28,15056.72,11690.00,3366.72,6/1/2014,6,June,2014
+Small Business,France,Velo,386,300.00,115800.00,9264.00,106536.00,96500.00,10036.00,11/1/2013,11,November,2013
+Small Business,Mexico,Velo,635,300.00,190500.00,15240.00,175260.00,158750.00,16510.00,12/1/2014,12,December,2014
+Government,France,VTT,574.5,350.00,201075.00,16086.00,184989.00,149370.00,35619.00,4/1/2014,4,April,2014
+Government,Germany,VTT,2338,7.00,16366.00,1309.28,15056.72,11690.00,3366.72,6/1/2014,6,June,2014
+Government,France,VTT,381,350.00,133350.00,10668.00,122682.00,99060.00,23622.00,8/1/2014,8,August,2014
+Government,Germany,VTT,422,350.00,147700.00,11816.00,135884.00,109720.00,26164.00,8/1/2014,8,August,2014
+Small Business,Canada,VTT,2134,300.00,640200.00,51216.00,588984.00,533500.00,55484.00,9/1/2014,9,September,2014
+Small Business,United States of America,VTT,808,300.00,242400.00,19392.00,223008.00,202000.00,21008.00,12/1/2013,12,December,2013
+Government,Canada,Amarilla,708,20.00,14160.00,1132.80,13027.20,7080.00,5947.20,6/1/2014,6,June,2014
+Government,United States of America,Amarilla,2907,7.00,20349.00,1627.92,18721.08,14535.00,4186.08,6/1/2014,6,June,2014
+Government,Germany,Amarilla,1366,20.00,27320.00,2185.60,25134.40,13660.00,11474.40,6/1/2014,6,June,2014
+Small Business,Mexico,Amarilla,2460,300.00,738000.00,59040.00,678960.00,615000.00,63960.00,6/1/2014,6,June,2014
+Government,Germany,Amarilla,1520,20.00,30400.00,2432.00,27968.00,15200.00,12768.00,11/1/2014,11,November,2014
+Midmarket,Germany,Amarilla,711,15.00,10665.00,853.20,9811.80,7110.00,2701.80,12/1/2014,12,December,2014
+Channel Partners,Mexico,Amarilla,1375,12.00,16500.00,1320.00,15180.00,4125.00,11055.00,12/1/2013,12,December,2013
+Small Business,Mexico,Amarilla,635,300.00,190500.00,15240.00,175260.00,158750.00,16510.00,12/1/2014,12,December,2014
+Government,United States of America,VTT,436.5,20.00,8730.00,698.40,8031.60,4365.00,3666.60,7/1/2014,7,July,2014
+Small Business,Canada,Carretera,1094,300.00,328200.00,29538.00,298662.00,273500.00,25162.00,6/1/2014,6,June,2014
+Channel Partners,Mexico,Carretera,367,12.00,4404.00,396.36,4007.64,1101.00,2906.64,10/1/2013,10,October,2013
+Small Business,Canada,Montana,3802.5,300.00,1140750.00,102667.50,1038082.50,950625.00,87457.50,4/1/2014,4,April,2014
+Government,France,Montana,1666,350.00,583100.00,52479.00,530621.00,433160.00,97461.00,5/1/2014,5,May,2014
+Small Business,France,Montana,322,300.00,96600.00,8694.00,87906.00,80500.00,7406.00,9/1/2013,9,September,2013
+Channel Partners,Canada,Montana,2321,12.00,27852.00,2506.68,25345.32,6963.00,18382.32,11/1/2014,11,November,2014
+Enterprise,France,Montana,1857,125.00,232125.00,20891.25,211233.75,222840.00,-11606.25,11/1/2013,11,November,2013
+Government,Canada,Montana,1611,7.00,11277.00,1014.93,10262.07,8055.00,2207.07,12/1/2013,12,December,2013
+Enterprise,United States of America,Montana,2797,125.00,349625.00,31466.25,318158.75,335640.00,-17481.25,12/1/2014,12,December,2014
+Small Business,Germany,Montana,334,300.00,100200.00,9018.00,91182.00,83500.00,7682.00,12/1/2013,12,December,2013
+Small Business,Mexico,Paseo,2565,300.00,769500.00,69255.00,700245.00,641250.00,58995.00,1/1/2014,1,January,2014
+Government,Mexico,Paseo,2417,350.00,845950.00,76135.50,769814.50,628420.00,141394.50,1/1/2014,1,January,2014
+Midmarket,United States of America,Paseo,3675,15.00,55125.00,4961.25,50163.75,36750.00,13413.75,4/1/2014,4,April,2014
+Small Business,Canada,Paseo,1094,300.00,328200.00,29538.00,298662.00,273500.00,25162.00,6/1/2014,6,June,2014
+Midmarket,France,Paseo,1227,15.00,18405.00,1656.45,16748.55,12270.00,4478.55,10/1/2014,10,October,2014
+Channel Partners,Mexico,Paseo,367,12.00,4404.00,396.36,4007.64,1101.00,2906.64,10/1/2013,10,October,2013
+Small Business,France,Paseo,1324,300.00,397200.00,35748.00,361452.00,331000.00,30452.00,11/1/2014,11,November,2014
+Channel Partners,Germany,Paseo,1775,12.00,21300.00,1917.00,19383.00,5325.00,14058.00,11/1/2013,11,November,2013
+Enterprise,United States of America,Paseo,2797,125.00,349625.00,31466.25,318158.75,335640.00,-17481.25,12/1/2014,12,December,2014
+Midmarket,Mexico,Velo,245,15.00,3675.00,330.75,3344.25,2450.00,894.25,5/1/2014,5,May,2014
+Small Business,Canada,Velo,3793.5,300.00,1138050.00,102424.50,1035625.50,948375.00,87250.50,7/1/2014,7,July,2014
+Government,Germany,Velo,1307,350.00,457450.00,41170.50,416279.50,339820.00,76459.50,7/1/2014,7,July,2014
+Enterprise,Canada,Velo,567,125.00,70875.00,6378.75,64496.25,68040.00,-3543.75,9/1/2014,9,September,2014
+Enterprise,Mexico,Velo,2110,125.00,263750.00,23737.50,240012.50,253200.00,-13187.50,9/1/2014,9,September,2014
+Government,Canada,Velo,1269,350.00,444150.00,39973.50,404176.50,329940.00,74236.50,10/1/2014,10,October,2014
+Channel Partners,United States of America,VTT,1956,12.00,23472.00,2112.48,21359.52,5868.00,15491.52,1/1/2014,1,January,2014
+Small Business,Germany,VTT,2659,300.00,797700.00,71793.00,725907.00,664750.00,61157.00,2/1/2014,2,February,2014
+Government,United States of America,VTT,1351.5,350.00,473025.00,42572.25,430452.75,351390.00,79062.75,4/1/2014,4,April,2014
+Channel Partners,Germany,VTT,880,12.00,10560.00,950.40,9609.60,2640.00,6969.60,5/1/2014,5,May,2014
+Small Business,United States of America,VTT,1867,300.00,560100.00,50409.00,509691.00,466750.00,42941.00,9/1/2014,9,September,2014
+Channel Partners,France,VTT,2234,12.00,26808.00,2412.72,24395.28,6702.00,17693.28,9/1/2013,9,September,2013
+Midmarket,France,VTT,1227,15.00,18405.00,1656.45,16748.55,12270.00,4478.55,10/1/2014,10,October,2014
+Enterprise,Mexico,VTT,877,125.00,109625.00,9866.25,99758.75,105240.00,-5481.25,11/1/2014,11,November,2014
+Government,United States of America,Amarilla,2071,350.00,724850.00,65236.50,659613.50,538460.00,121153.50,9/1/2014,9,September,2014
+Government,Canada,Amarilla,1269,350.00,444150.00,39973.50,404176.50,329940.00,74236.50,10/1/2014,10,October,2014
+Midmarket,Germany,Amarilla,970,15.00,14550.00,1309.50,13240.50,9700.00,3540.50,11/1/2013,11,November,2013
+Government,Mexico,Amarilla,1694,20.00,33880.00,3049.20,30830.80,16940.00,13890.80,11/1/2014,11,November,2014
+Government,Germany,Carretera,663,20.00,13260.00,1193.40,12066.60,6630.00,5436.60,5/1/2014,5,May,2014
+Government,Canada,Carretera,819,7.00,5733.00,515.97,5217.03,4095.00,1122.03,7/1/2014,7,July,2014
+Channel Partners,Germany,Carretera,1580,12.00,18960.00,1706.40,17253.60,4740.00,12513.60,9/1/2014,9,September,2014
+Government,Mexico,Carretera,521,7.00,3647.00,328.23,3318.77,2605.00,713.77,12/1/2014,12,December,2014
+Government,United States of America,Paseo,973,20.00,19460.00,1751.40,17708.60,9730.00,7978.60,3/1/2014,3,March,2014
+Government,Mexico,Paseo,1038,20.00,20760.00,1868.40,18891.60,10380.00,8511.60,6/1/2014,6,June,2014
+Government,Germany,Paseo,360,7.00,2520.00,226.80,2293.20,1800.00,493.20,10/1/2014,10,October,2014
+Channel Partners,France,Velo,1967,12.00,23604.00,2124.36,21479.64,5901.00,15578.64,3/1/2014,3,March,2014
+Midmarket,Mexico,Velo,2628,15.00,39420.00,3547.80,35872.20,26280.00,9592.20,4/1/2014,4,April,2014
+Government,Germany,VTT,360,7.00,2520.00,226.80,2293.20,1800.00,493.20,10/1/2014,10,October,2014
+Government,France,VTT,2682,20.00,53640.00,4827.60,48812.40,26820.00,21992.40,11/1/2013,11,November,2013
+Government,Mexico,VTT,521,7.00,3647.00,328.23,3318.77,2605.00,713.77,12/1/2014,12,December,2014
+Government,Mexico,Amarilla,1038,20.00,20760.00,1868.40,18891.60,10380.00,8511.60,6/1/2014,6,June,2014
+Midmarket,Canada,Amarilla,1630.5,15.00,24457.50,2201.18,22256.33,16305.00,5951.33,7/1/2014,7,July,2014
+Channel Partners,France,Amarilla,306,12.00,3672.00,330.48,3341.52,918.00,2423.52,12/1/2013,12,December,2013
+Channel Partners,United States of America,Carretera,386,12.00,4632.00,463.20,4168.80,1158.00,3010.80,10/1/2013,10,October,2013
+Government,United States of America,Montana,2328,7.00,16296.00,1629.60,14666.40,11640.00,3026.40,9/1/2014,9,September,2014
+Channel Partners,United States of America,Paseo,386,12.00,4632.00,463.20,4168.80,1158.00,3010.80,10/1/2013,10,October,2013
+Enterprise,United States of America,Carretera,3445.5,125.00,430687.50,43068.75,387618.75,413460.00,-25841.25,4/1/2014,4,April,2014
+Enterprise,France,Carretera,1482,125.00,185250.00,18525.00,166725.00,177840.00,-11115.00,12/1/2013,12,December,2013
+Government,United States of America,Montana,2313,350.00,809550.00,80955.00,728595.00,601380.00,127215.00,5/1/2014,5,May,2014
+Enterprise,United States of America,Montana,1804,125.00,225500.00,22550.00,202950.00,216480.00,-13530.00,11/1/2013,11,November,2013
+Midmarket,France,Montana,2072,15.00,31080.00,3108.00,27972.00,20720.00,7252.00,12/1/2014,12,December,2014
+Government,France,Paseo,1954,20.00,39080.00,3908.00,35172.00,19540.00,15632.00,3/1/2014,3,March,2014
+Small Business,Mexico,Paseo,591,300.00,177300.00,17730.00,159570.00,147750.00,11820.00,5/1/2014,5,May,2014
+Midmarket,France,Paseo,2167,15.00,32505.00,3250.50,29254.50,21670.00,7584.50,10/1/2013,10,October,2013
+Government,Germany,Paseo,241,20.00,4820.00,482.00,4338.00,2410.00,1928.00,10/1/2014,10,October,2014
+Midmarket,Germany,Velo,681,15.00,10215.00,1021.50,9193.50,6810.00,2383.50,1/1/2014,1,January,2014
+Midmarket,Germany,Velo,510,15.00,7650.00,765.00,6885.00,5100.00,1785.00,4/1/2014,4,April,2014
+Midmarket,United States of America,Velo,790,15.00,11850.00,1185.00,10665.00,7900.00,2765.00,5/1/2014,5,May,2014
+Government,France,Velo,639,350.00,223650.00,22365.00,201285.00,166140.00,35145.00,7/1/2014,7,July,2014
+Enterprise,United States of America,Velo,1596,125.00,199500.00,19950.00,179550.00,191520.00,-11970.00,9/1/2014,9,September,2014
+Small Business,United States of America,Velo,2294,300.00,688200.00,68820.00,619380.00,573500.00,45880.00,10/1/2013,10,October,2013
+Government,Germany,Velo,241,20.00,4820.00,482.00,4338.00,2410.00,1928.00,10/1/2014,10,October,2014
+Government,Germany,Velo,2665,7.00,18655.00,1865.50,16789.50,13325.00,3464.50,11/1/2014,11,November,2014
+Enterprise,Canada,Velo,1916,125.00,239500.00,23950.00,215550.00,229920.00,-14370.00,12/1/2013,12,December,2013
+Small Business,France,Velo,853,300.00,255900.00,25590.00,230310.00,213250.00,17060.00,12/1/2014,12,December,2014
+Enterprise,Mexico,VTT,341,125.00,42625.00,4262.50,38362.50,40920.00,-2557.50,5/1/2014,5,May,2014
+Midmarket,Mexico,VTT,641,15.00,9615.00,961.50,8653.50,6410.00,2243.50,7/1/2014,7,July,2014
+Government,United States of America,VTT,2807,350.00,982450.00,98245.00,884205.00,729820.00,154385.00,8/1/2014,8,August,2014
+Small Business,Mexico,VTT,432,300.00,129600.00,12960.00,116640.00,108000.00,8640.00,9/1/2014,9,September,2014
+Small Business,United States of America,VTT,2294,300.00,688200.00,68820.00,619380.00,573500.00,45880.00,10/1/2013,10,October,2013
+Midmarket,France,VTT,2167,15.00,32505.00,3250.50,29254.50,21670.00,7584.50,10/1/2013,10,October,2013
+Enterprise,Canada,VTT,2529,125.00,316125.00,31612.50,284512.50,303480.00,-18967.50,11/1/2014,11,November,2014
+Government,Germany,VTT,1870,350.00,654500.00,65450.00,589050.00,486200.00,102850.00,12/1/2013,12,December,2013
+Enterprise,United States of America,Amarilla,579,125.00,72375.00,7237.50,65137.50,69480.00,-4342.50,1/1/2014,1,January,2014
+Government,Canada,Amarilla,2240,350.00,784000.00,78400.00,705600.00,582400.00,123200.00,2/1/2014,2,February,2014
+Small Business,United States of America,Amarilla,2993,300.00,897900.00,89790.00,808110.00,748250.00,59860.00,3/1/2014,3,March,2014
+Channel Partners,Canada,Amarilla,3520.5,12.00,42246.00,4224.60,38021.40,10561.50,27459.90,4/1/2014,4,April,2014
+Government,Mexico,Amarilla,2039,20.00,40780.00,4078.00,36702.00,20390.00,16312.00,5/1/2014,5,May,2014
+Channel Partners,Germany,Amarilla,2574,12.00,30888.00,3088.80,27799.20,7722.00,20077.20,8/1/2014,8,August,2014
+Government,Canada,Amarilla,707,350.00,247450.00,24745.00,222705.00,183820.00,38885.00,9/1/2014,9,September,2014
+Midmarket,France,Amarilla,2072,15.00,31080.00,3108.00,27972.00,20720.00,7252.00,12/1/2014,12,December,2014
+Small Business,France,Amarilla,853,300.00,255900.00,25590.00,230310.00,213250.00,17060.00,12/1/2014,12,December,2014
+Channel Partners,France,Carretera,1198,12.00,14376.00,1581.36,12794.64,3594.00,9200.64,10/1/2013,10,October,2013
+Government,France,Paseo,2532,7.00,17724.00,1949.64,15774.36,12660.00,3114.36,4/1/2014,4,April,2014
+Channel Partners,France,Paseo,1198,12.00,14376.00,1581.36,12794.64,3594.00,9200.64,10/1/2013,10,October,2013
+Midmarket,Canada,Velo,384,15.00,5760.00,633.60,5126.40,3840.00,1286.40,1/1/2014,1,January,2014
+Channel Partners,Germany,Velo,472,12.00,5664.00,623.04,5040.96,1416.00,3624.96,10/1/2014,10,October,2014
+Government,United States of America,VTT,1579,7.00,11053.00,1215.83,9837.17,7895.00,1942.17,3/1/2014,3,March,2014
+Channel Partners,Mexico,VTT,1005,12.00,12060.00,1326.60,10733.40,3015.00,7718.40,9/1/2013,9,September,2013
+Midmarket,United States of America,Amarilla,3199.5,15.00,47992.50,5279.18,42713.33,31995.00,10718.33,7/1/2014,7,July,2014
+Channel Partners,Germany,Amarilla,472,12.00,5664.00,623.04,5040.96,1416.00,3624.96,10/1/2014,10,October,2014
+Channel Partners,Canada,Carretera,1937,12.00,23244.00,2556.84,20687.16,5811.00,14876.16,2/1/2014,2,February,2014
+Government,Germany,Carretera,792,350.00,277200.00,30492.00,246708.00,205920.00,40788.00,3/1/2014,3,March,2014
+Small Business,Germany,Carretera,2811,300.00,843300.00,92763.00,750537.00,702750.00,47787.00,7/1/2014,7,July,2014
+Enterprise,France,Carretera,2441,125.00,305125.00,33563.75,271561.25,292920.00,-21358.75,10/1/2014,10,October,2014
+Midmarket,Canada,Carretera,1560,15.00,23400.00,2574.00,20826.00,15600.00,5226.00,11/1/2013,11,November,2013
+Government,Mexico,Carretera,2706,7.00,18942.00,2083.62,16858.38,13530.00,3328.38,11/1/2013,11,November,2013
+Government,Germany,Montana,766,350.00,268100.00,29491.00,238609.00,199160.00,39449.00,1/1/2014,1,January,2014
+Government,Germany,Montana,2992,20.00,59840.00,6582.40,53257.60,29920.00,23337.60,10/1/2013,10,October,2013
+Midmarket,Mexico,Montana,2157,15.00,32355.00,3559.05,28795.95,21570.00,7225.95,12/1/2014,12,December,2014
+Small Business,Canada,Paseo,873,300.00,261900.00,28809.00,233091.00,218250.00,14841.00,1/1/2014,1,January,2014
+Government,Mexico,Paseo,1122,20.00,22440.00,2468.40,19971.60,11220.00,8751.60,3/1/2014,3,March,2014
+Government,Canada,Paseo,2104.5,350.00,736575.00,81023.25,655551.75,547170.00,108381.75,7/1/2014,7,July,2014
+Channel Partners,Canada,Paseo,4026,12.00,48312.00,5314.32,42997.68,12078.00,30919.68,7/1/2014,7,July,2014
+Channel Partners,France,Paseo,2425.5,12.00,29106.00,3201.66,25904.34,7276.50,18627.84,7/1/2014,7,July,2014
+Government,Canada,Paseo,2394,20.00,47880.00,5266.80,42613.20,23940.00,18673.20,8/1/2014,8,August,2014
+Midmarket,Mexico,Paseo,1984,15.00,29760.00,3273.60,26486.40,19840.00,6646.40,8/1/2014,8,August,2014
+Enterprise,France,Paseo,2441,125.00,305125.00,33563.75,271561.25,292920.00,-21358.75,10/1/2014,10,October,2014
+Government,Germany,Paseo,2992,20.00,59840.00,6582.40,53257.60,29920.00,23337.60,10/1/2013,10,October,2013
+Small Business,Canada,Paseo,1366,300.00,409800.00,45078.00,364722.00,341500.00,23222.00,11/1/2014,11,November,2014
+Government,France,Velo,2805,20.00,56100.00,6171.00,49929.00,28050.00,21879.00,9/1/2013,9,September,2013
+Midmarket,Mexico,Velo,655,15.00,9825.00,1080.75,8744.25,6550.00,2194.25,9/1/2013,9,September,2013
+Government,Mexico,Velo,344,350.00,120400.00,13244.00,107156.00,89440.00,17716.00,10/1/2013,10,October,2013
+Government,Canada,Velo,1808,7.00,12656.00,1392.16,11263.84,9040.00,2223.84,11/1/2014,11,November,2014
+Channel Partners,France,VTT,1734,12.00,20808.00,2288.88,18519.12,5202.00,13317.12,1/1/2014,1,January,2014
+Enterprise,Mexico,VTT,554,125.00,69250.00,7617.50,61632.50,66480.00,-4847.50,1/1/2014,1,January,2014
+Government,Canada,VTT,2935,20.00,58700.00,6457.00,52243.00,29350.00,22893.00,11/1/2013,11,November,2013
+Enterprise,Germany,Amarilla,3165,125.00,395625.00,43518.75,352106.25,379800.00,-27693.75,1/1/2014,1,January,2014
+Government,Mexico,Amarilla,2629,20.00,52580.00,5783.80,46796.20,26290.00,20506.20,1/1/2014,1,January,2014
+Enterprise,France,Amarilla,1433,125.00,179125.00,19703.75,159421.25,171960.00,-12538.75,5/1/2014,5,May,2014
+Enterprise,Mexico,Amarilla,947,125.00,118375.00,13021.25,105353.75,113640.00,-8286.25,9/1/2013,9,September,2013
+Government,Mexico,Amarilla,344,350.00,120400.00,13244.00,107156.00,89440.00,17716.00,10/1/2013,10,October,2013
+Midmarket,Mexico,Amarilla,2157,15.00,32355.00,3559.05,28795.95,21570.00,7225.95,12/1/2014,12,December,2014
+Government,United States of America,Paseo,380,7.00,2660.00,292.60,2367.40,1900.00,467.40,9/1/2013,9,September,2013
+Government,Mexico,Carretera,886,350.00,310100.00,37212.00,272888.00,230360.00,42528.00,6/1/2014,6,June,2014
+Enterprise,Canada,Carretera,2416,125.00,302000.00,36240.00,265760.00,289920.00,-24160.00,9/1/2013,9,September,2013
+Enterprise,Mexico,Carretera,2156,125.00,269500.00,32340.00,237160.00,258720.00,-21560.00,10/1/2014,10,October,2014
+Midmarket,Canada,Carretera,2689,15.00,40335.00,4840.20,35494.80,26890.00,8604.80,11/1/2014,11,November,2014
+Midmarket,United States of America,Montana,677,15.00,10155.00,1218.60,8936.40,6770.00,2166.40,3/1/2014,3,March,2014
+Small Business,France,Montana,1773,300.00,531900.00,63828.00,468072.00,443250.00,24822.00,4/1/2014,4,April,2014
+Government,Mexico,Montana,2420,7.00,16940.00,2032.80,14907.20,12100.00,2807.20,9/1/2014,9,September,2014
+Government,Canada,Montana,2734,7.00,19138.00,2296.56,16841.44,13670.00,3171.44,10/1/2014,10,October,2014
+Government,Mexico,Montana,1715,20.00,34300.00,4116.00,30184.00,17150.00,13034.00,10/1/2013,10,October,2013
+Small Business,France,Montana,1186,300.00,355800.00,42696.00,313104.00,296500.00,16604.00,12/1/2013,12,December,2013
+Small Business,United States of America,Paseo,3495,300.00,1048500.00,125820.00,922680.00,873750.00,48930.00,1/1/2014,1,January,2014
+Government,Mexico,Paseo,886,350.00,310100.00,37212.00,272888.00,230360.00,42528.00,6/1/2014,6,June,2014
+Enterprise,Mexico,Paseo,2156,125.00,269500.00,32340.00,237160.00,258720.00,-21560.00,10/1/2014,10,October,2014
+Government,Mexico,Paseo,905,20.00,18100.00,2172.00,15928.00,9050.00,6878.00,10/1/2014,10,October,2014
+Government,Mexico,Paseo,1715,20.00,34300.00,4116.00,30184.00,17150.00,13034.00,10/1/2013,10,October,2013
+Government,France,Paseo,1594,350.00,557900.00,66948.00,490952.00,414440.00,76512.00,11/1/2014,11,November,2014
+Small Business,Germany,Paseo,1359,300.00,407700.00,48924.00,358776.00,339750.00,19026.00,11/1/2014,11,November,2014
+Small Business,Mexico,Paseo,2150,300.00,645000.00,77400.00,567600.00,537500.00,30100.00,11/1/2014,11,November,2014
+Government,Mexico,Paseo,1197,350.00,418950.00,50274.00,368676.00,311220.00,57456.00,11/1/2014,11,November,2014
+Midmarket,Mexico,Paseo,380,15.00,5700.00,684.00,5016.00,3800.00,1216.00,12/1/2013,12,December,2013
+Government,Mexico,Paseo,1233,20.00,24660.00,2959.20,21700.80,12330.00,9370.80,12/1/2014,12,December,2014
+Government,Mexico,Velo,1395,350.00,488250.00,58590.00,429660.00,362700.00,66960.00,7/1/2014,7,July,2014
+Government,United States of America,Velo,986,350.00,345100.00,41412.00,303688.00,256360.00,47328.00,10/1/2014,10,October,2014
+Government,Mexico,Velo,905,20.00,18100.00,2172.00,15928.00,9050.00,6878.00,10/1/2014,10,October,2014
+Channel Partners,Canada,VTT,2109,12.00,25308.00,3036.96,22271.04,6327.00,15944.04,5/1/2014,5,May,2014
+Midmarket,France,VTT,3874.5,15.00,58117.50,6974.10,51143.40,38745.00,12398.40,7/1/2014,7,July,2014
+Government,Canada,VTT,623,350.00,218050.00,26166.00,191884.00,161980.00,29904.00,9/1/2013,9,September,2013
+Government,United States of America,VTT,986,350.00,345100.00,41412.00,303688.00,256360.00,47328.00,10/1/2014,10,October,2014
+Enterprise,United States of America,VTT,2387,125.00,298375.00,35805.00,262570.00,286440.00,-23870.00,11/1/2014,11,November,2014
+Government,Mexico,VTT,1233,20.00,24660.00,2959.20,21700.80,12330.00,9370.80,12/1/2014,12,December,2014
+Government,United States of America,Amarilla,270,350.00,94500.00,11340.00,83160.00,70200.00,12960.00,2/1/2014,2,February,2014
+Government,France,Amarilla,3421.5,7.00,23950.50,2874.06,21076.44,17107.50,3968.94,7/1/2014,7,July,2014
+Government,Canada,Amarilla,2734,7.00,19138.00,2296.56,16841.44,13670.00,3171.44,10/1/2014,10,October,2014
+Midmarket,United States of America,Amarilla,2548,15.00,38220.00,4586.40,33633.60,25480.00,8153.60,11/1/2013,11,November,2013
+Government,France,Carretera,2521.5,20.00,50430.00,6051.60,44378.40,25215.00,19163.40,1/1/2014,1,January,2014
+Channel Partners,Mexico,Montana,2661,12.00,31932.00,3831.84,28100.16,7983.00,20117.16,5/1/2014,5,May,2014
+Government,Germany,Paseo,1531,20.00,30620.00,3674.40,26945.60,15310.00,11635.60,12/1/2014,12,December,2014
+Government,France,VTT,1491,7.00,10437.00,1252.44,9184.56,7455.00,1729.56,3/1/2014,3,March,2014
+Government,Germany,VTT,1531,20.00,30620.00,3674.40,26945.60,15310.00,11635.60,12/1/2014,12,December,2014
+Channel Partners,Canada,Amarilla,2761,12.00,33132.00,3975.84,29156.16,8283.00,20873.16,9/1/2013,9,September,2013
+Midmarket,United States of America,Carretera,2567,15.00,38505.00,5005.65,33499.35,25670.00,7829.35,6/1/2014,6,June,2014
+Midmarket,United States of America,VTT,2567,15.00,38505.00,5005.65,33499.35,25670.00,7829.35,6/1/2014,6,June,2014
+Government,Canada,Carretera,923,350.00,323050.00,41996.50,281053.50,239980.00,41073.50,3/1/2014,3,March,2014
+Government,France,Carretera,1790,350.00,626500.00,81445.00,545055.00,465400.00,79655.00,3/1/2014,3,March,2014
+Government,Germany,Carretera,442,20.00,8840.00,1149.20,7690.80,4420.00,3270.80,9/1/2013,9,September,2013
+Government,United States of America,Montana,982.5,350.00,343875.00,44703.75,299171.25,255450.00,43721.25,1/1/2014,1,January,2014
+Government,United States of America,Montana,1298,7.00,9086.00,1181.18,7904.82,6490.00,1414.82,2/1/2014,2,February,2014
+Channel Partners,Mexico,Montana,604,12.00,7248.00,942.24,6305.76,1812.00,4493.76,6/1/2014,6,June,2014
+Government,Mexico,Montana,2255,20.00,45100.00,5863.00,39237.00,22550.00,16687.00,7/1/2014,7,July,2014
+Government,Canada,Montana,1249,20.00,24980.00,3247.40,21732.60,12490.00,9242.60,10/1/2014,10,October,2014
+Government,United States of America,Paseo,1438.5,7.00,10069.50,1309.04,8760.47,7192.50,1567.97,1/1/2014,1,January,2014
+Small Business,Germany,Paseo,807,300.00,242100.00,31473.00,210627.00,201750.00,8877.00,1/1/2014,1,January,2014
+Government,United States of America,Paseo,2641,20.00,52820.00,6866.60,45953.40,26410.00,19543.40,2/1/2014,2,February,2014
+Government,Germany,Paseo,2708,20.00,54160.00,7040.80,47119.20,27080.00,20039.20,2/1/2014,2,February,2014
+Government,Canada,Paseo,2632,350.00,921200.00,119756.00,801444.00,684320.00,117124.00,6/1/2014,6,June,2014
+Enterprise,Canada,Paseo,1583,125.00,197875.00,25723.75,172151.25,189960.00,-17808.75,6/1/2014,6,June,2014
+Channel Partners,Mexico,Paseo,571,12.00,6852.00,890.76,5961.24,1713.00,4248.24,7/1/2014,7,July,2014
+Government,France,Paseo,2696,7.00,18872.00,2453.36,16418.64,13480.00,2938.64,8/1/2014,8,August,2014
+Midmarket,Canada,Paseo,1565,15.00,23475.00,3051.75,20423.25,15650.00,4773.25,10/1/2014,10,October,2014
+Government,Canada,Paseo,1249,20.00,24980.00,3247.40,21732.60,12490.00,9242.60,10/1/2014,10,October,2014
+Government,Germany,Paseo,357,350.00,124950.00,16243.50,108706.50,92820.00,15886.50,11/1/2014,11,November,2014
+Channel Partners,Germany,Paseo,1013,12.00,12156.00,1580.28,10575.72,3039.00,7536.72,12/1/2014,12,December,2014
+Midmarket,France,Velo,3997.5,15.00,59962.50,7795.13,52167.38,39975.00,12192.38,1/1/2014,1,January,2014
+Government,Canada,Velo,2632,350.00,921200.00,119756.00,801444.00,684320.00,117124.00,6/1/2014,6,June,2014
+Government,France,Velo,1190,7.00,8330.00,1082.90,7247.10,5950.00,1297.10,6/1/2014,6,June,2014
+Channel Partners,Mexico,Velo,604,12.00,7248.00,942.24,6305.76,1812.00,4493.76,6/1/2014,6,June,2014
+Midmarket,Germany,Velo,660,15.00,9900.00,1287.00,8613.00,6600.00,2013.00,9/1/2013,9,September,2013
+Channel Partners,Mexico,Velo,410,12.00,4920.00,639.60,4280.40,1230.00,3050.40,10/1/2014,10,October,2014
+Small Business,Mexico,Velo,2605,300.00,781500.00,101595.00,679905.00,651250.00,28655.00,11/1/2013,11,November,2013
+Channel Partners,Germany,Velo,1013,12.00,12156.00,1580.28,10575.72,3039.00,7536.72,12/1/2014,12,December,2014
+Enterprise,Canada,VTT,1583,125.00,197875.00,25723.75,172151.25,189960.00,-17808.75,6/1/2014,6,June,2014
+Midmarket,Canada,VTT,1565,15.00,23475.00,3051.75,20423.25,15650.00,4773.25,10/1/2014,10,October,2014
+Enterprise,Canada,Amarilla,1659,125.00,207375.00,26958.75,180416.25,199080.00,-18663.75,1/1/2014,1,January,2014
+Government,France,Amarilla,1190,7.00,8330.00,1082.90,7247.10,5950.00,1297.10,6/1/2014,6,June,2014
+Channel Partners,Mexico,Amarilla,410,12.00,4920.00,639.60,4280.40,1230.00,3050.40,10/1/2014,10,October,2014
+Channel Partners,Germany,Amarilla,1770,12.00,21240.00,2761.20,18478.80,5310.00,13168.80,12/1/2013,12,December,2013
+Government,Mexico,Carretera,2579,20.00,51580.00,7221.20,44358.80,25790.00,18568.80,4/1/2014,4,April,2014
+Government,United States of America,Carretera,1743,20.00,34860.00,4880.40,29979.60,17430.00,12549.60,5/1/2014,5,May,2014
+Government,United States of America,Carretera,2996,7.00,20972.00,2936.08,18035.92,14980.00,3055.92,10/1/2013,10,October,2013
+Government,Germany,Carretera,280,7.00,1960.00,274.40,1685.60,1400.00,285.60,12/1/2014,12,December,2014
+Government,France,Montana,293,7.00,2051.00,287.14,1763.86,1465.00,298.86,2/1/2014,2,February,2014
+Government,United States of America,Montana,2996,7.00,20972.00,2936.08,18035.92,14980.00,3055.92,10/1/2013,10,October,2013
+Midmarket,Germany,Paseo,278,15.00,4170.00,583.80,3586.20,2780.00,806.20,2/1/2014,2,February,2014
+Government,Canada,Paseo,2428,20.00,48560.00,6798.40,41761.60,24280.00,17481.60,3/1/2014,3,March,2014
+Midmarket,United States of America,Paseo,1767,15.00,26505.00,3710.70,22794.30,17670.00,5124.30,9/1/2014,9,September,2014
+Channel Partners,France,Paseo,1393,12.00,16716.00,2340.24,14375.76,4179.00,10196.76,10/1/2014,10,October,2014
+Government,Germany,VTT,280,7.00,1960.00,274.40,1685.60,1400.00,285.60,12/1/2014,12,December,2014
+Channel Partners,France,Amarilla,1393,12.00,16716.00,2340.24,14375.76,4179.00,10196.76,10/1/2014,10,October,2014
+Channel Partners,United States of America,Amarilla,2015,12.00,24180.00,3385.20,20794.80,6045.00,14749.80,12/1/2013,12,December,2013
+Small Business,Mexico,Carretera,801,300.00,240300.00,33642.00,206658.00,200250.00,6408.00,7/1/2014,7,July,2014
+Enterprise,France,Carretera,1023,125.00,127875.00,17902.50,109972.50,122760.00,-12787.50,9/1/2013,9,September,2013
+Small Business,Canada,Carretera,1496,300.00,448800.00,62832.00,385968.00,374000.00,11968.00,10/1/2014,10,October,2014
+Small Business,United States of America,Carretera,1010,300.00,303000.00,42420.00,260580.00,252500.00,8080.00,10/1/2014,10,October,2014
+Midmarket,Germany,Carretera,1513,15.00,22695.00,3177.30,19517.70,15130.00,4387.70,11/1/2014,11,November,2014
+Midmarket,Canada,Carretera,2300,15.00,34500.00,4830.00,29670.00,23000.00,6670.00,12/1/2014,12,December,2014
+Enterprise,Mexico,Carretera,2821,125.00,352625.00,49367.50,303257.50,338520.00,-35262.50,12/1/2013,12,December,2013
+Government,Canada,Montana,2227.5,350.00,779625.00,109147.50,670477.50,579150.00,91327.50,1/1/2014,1,January,2014
+Government,Germany,Montana,1199,350.00,419650.00,58751.00,360899.00,311740.00,49159.00,4/1/2014,4,April,2014
+Government,Canada,Montana,200,350.00,70000.00,9800.00,60200.00,52000.00,8200.00,5/1/2014,5,May,2014
+Government,Canada,Montana,388,7.00,2716.00,380.24,2335.76,1940.00,395.76,9/1/2014,9,September,2014
+Government,Mexico,Montana,1727,7.00,12089.00,1692.46,10396.54,8635.00,1761.54,10/1/2013,10,October,2013
+Midmarket,Canada,Montana,2300,15.00,34500.00,4830.00,29670.00,23000.00,6670.00,12/1/2014,12,December,2014
+Government,Mexico,Paseo,260,20.00,5200.00,728.00,4472.00,2600.00,1872.00,2/1/2014,2,February,2014
+Midmarket,Canada,Paseo,2470,15.00,37050.00,5187.00,31863.00,24700.00,7163.00,9/1/2013,9,September,2013
+Midmarket,Canada,Paseo,1743,15.00,26145.00,3660.30,22484.70,17430.00,5054.70,10/1/2013,10,October,2013
+Channel Partners,United States of America,Paseo,2914,12.00,34968.00,4895.52,30072.48,8742.00,21330.48,10/1/2014,10,October,2014
+Government,France,Paseo,1731,7.00,12117.00,1696.38,10420.62,8655.00,1765.62,10/1/2014,10,October,2014
+Government,Canada,Paseo,700,350.00,245000.00,34300.00,210700.00,182000.00,28700.00,11/1/2014,11,November,2014
+Channel Partners,Canada,Paseo,2222,12.00,26664.00,3732.96,22931.04,6666.00,16265.04,11/1/2013,11,November,2013
+Government,United States of America,Paseo,1177,350.00,411950.00,57673.00,354277.00,306020.00,48257.00,11/1/2014,11,November,2014
+Government,France,Paseo,1922,350.00,672700.00,94178.00,578522.00,499720.00,78802.00,11/1/2013,11,November,2013
+Enterprise,Mexico,Velo,1575,125.00,196875.00,27562.50,169312.50,189000.00,-19687.50,2/1/2014,2,February,2014
+Government,United States of America,Velo,606,20.00,12120.00,1696.80,10423.20,6060.00,4363.20,4/1/2014,4,April,2014
+Small Business,United States of America,Velo,2460,300.00,738000.00,103320.00,634680.00,615000.00,19680.00,7/1/2014,7,July,2014
+Small Business,Canada,Velo,269,300.00,80700.00,11298.00,69402.00,67250.00,2152.00,10/1/2013,10,October,2013
+Small Business,Germany,Velo,2536,300.00,760800.00,106512.00,654288.00,634000.00,20288.00,11/1/2013,11,November,2013
+Government,Mexico,VTT,2903,7.00,20321.00,2844.94,17476.06,14515.00,2961.06,3/1/2014,3,March,2014
+Small Business,United States of America,VTT,2541,300.00,762300.00,106722.00,655578.00,635250.00,20328.00,8/1/2014,8,August,2014
+Small Business,Canada,VTT,269,300.00,80700.00,11298.00,69402.00,67250.00,2152.00,10/1/2013,10,October,2013
+Small Business,Canada,VTT,1496,300.00,448800.00,62832.00,385968.00,374000.00,11968.00,10/1/2014,10,October,2014
+Small Business,United States of America,VTT,1010,300.00,303000.00,42420.00,260580.00,252500.00,8080.00,10/1/2014,10,October,2014
+Government,France,VTT,1281,350.00,448350.00,62769.00,385581.00,333060.00,52521.00,12/1/2013,12,December,2013
+Small Business,Canada,Amarilla,888,300.00,266400.00,37296.00,229104.00,222000.00,7104.00,3/1/2014,3,March,2014
+Enterprise,United States of America,Amarilla,2844,125.00,355500.00,49770.00,305730.00,341280.00,-35550.00,5/1/2014,5,May,2014
+Channel Partners,France,Amarilla,2475,12.00,29700.00,4158.00,25542.00,7425.00,18117.00,8/1/2014,8,August,2014
+Midmarket,Canada,Amarilla,1743,15.00,26145.00,3660.30,22484.70,17430.00,5054.70,10/1/2013,10,October,2013
+Channel Partners,United States of America,Amarilla,2914,12.00,34968.00,4895.52,30072.48,8742.00,21330.48,10/1/2014,10,October,2014
+Government,France,Amarilla,1731,7.00,12117.00,1696.38,10420.62,8655.00,1765.62,10/1/2014,10,October,2014
+Government,Mexico,Amarilla,1727,7.00,12089.00,1692.46,10396.54,8635.00,1761.54,10/1/2013,10,October,2013
+Midmarket,Mexico,Amarilla,1870,15.00,28050.00,3927.00,24123.00,18700.00,5423.00,11/1/2013,11,November,2013
+Enterprise,France,Carretera,1174,125.00,146750.00,22012.50,124737.50,140880.00,-16142.50,8/1/2014,8,August,2014
+Enterprise,Germany,Carretera,2767,125.00,345875.00,51881.25,293993.75,332040.00,-38046.25,8/1/2014,8,August,2014
+Enterprise,Germany,Carretera,1085,125.00,135625.00,20343.75,115281.25,130200.00,-14918.75,10/1/2014,10,October,2014
+Small Business,Mexico,Montana,546,300.00,163800.00,24570.00,139230.00,136500.00,2730.00,10/1/2014,10,October,2014
+Government,Germany,Paseo,1158,20.00,23160.00,3474.00,19686.00,11580.00,8106.00,3/1/2014,3,March,2014
+Midmarket,Canada,Paseo,1614,15.00,24210.00,3631.50,20578.50,16140.00,4438.50,4/1/2014,4,April,2014
+Government,Mexico,Paseo,2535,7.00,17745.00,2661.75,15083.25,12675.00,2408.25,4/1/2014,4,April,2014
+Government,Mexico,Paseo,2851,350.00,997850.00,149677.50,848172.50,741260.00,106912.50,5/1/2014,5,May,2014
+Midmarket,Canada,Paseo,2559,15.00,38385.00,5757.75,32627.25,25590.00,7037.25,8/1/2014,8,August,2014
+Government,United States of America,Paseo,267,20.00,5340.00,801.00,4539.00,2670.00,1869.00,10/1/2013,10,October,2013
+Enterprise,Germany,Paseo,1085,125.00,135625.00,20343.75,115281.25,130200.00,-14918.75,10/1/2014,10,October,2014
+Midmarket,Germany,Paseo,1175,15.00,17625.00,2643.75,14981.25,11750.00,3231.25,10/1/2014,10,October,2014
+Government,United States of America,Paseo,2007,350.00,702450.00,105367.50,597082.50,521820.00,75262.50,11/1/2013,11,November,2013
+Government,Mexico,Paseo,2151,350.00,752850.00,112927.50,639922.50,559260.00,80662.50,11/1/2013,11,November,2013
+Channel Partners,United States of America,Paseo,914,12.00,10968.00,1645.20,9322.80,2742.00,6580.80,12/1/2014,12,December,2014
+Government,France,Paseo,293,20.00,5860.00,879.00,4981.00,2930.00,2051.00,12/1/2014,12,December,2014
+Channel Partners,Mexico,Velo,500,12.00,6000.00,900.00,5100.00,1500.00,3600.00,3/1/2014,3,March,2014
+Midmarket,France,Velo,2826,15.00,42390.00,6358.50,36031.50,28260.00,7771.50,5/1/2014,5,May,2014
+Enterprise,France,Velo,663,125.00,82875.00,12431.25,70443.75,79560.00,-9116.25,9/1/2014,9,September,2014
+Small Business,United States of America,Velo,2574,300.00,772200.00,115830.00,656370.00,643500.00,12870.00,11/1/2013,11,November,2013
+Enterprise,United States of America,Velo,2438,125.00,304750.00,45712.50,259037.50,292560.00,-33522.50,12/1/2013,12,December,2013
+Channel Partners,United States of America,Velo,914,12.00,10968.00,1645.20,9322.80,2742.00,6580.80,12/1/2014,12,December,2014
+Government,Canada,VTT,865.5,20.00,17310.00,2596.50,14713.50,8655.00,6058.50,7/1/2014,7,July,2014
+Midmarket,Germany,VTT,492,15.00,7380.00,1107.00,6273.00,4920.00,1353.00,7/1/2014,7,July,2014
+Government,United States of America,VTT,267,20.00,5340.00,801.00,4539.00,2670.00,1869.00,10/1/2013,10,October,2013
+Midmarket,Germany,VTT,1175,15.00,17625.00,2643.75,14981.25,11750.00,3231.25,10/1/2014,10,October,2014
+Enterprise,Canada,VTT,2954,125.00,369250.00,55387.50,313862.50,354480.00,-40617.50,11/1/2013,11,November,2013
+Enterprise,Germany,VTT,552,125.00,69000.00,10350.00,58650.00,66240.00,-7590.00,11/1/2014,11,November,2014
+Government,France,VTT,293,20.00,5860.00,879.00,4981.00,2930.00,2051.00,12/1/2014,12,December,2014
+Small Business,France,Amarilla,2475,300.00,742500.00,111375.00,631125.00,618750.00,12375.00,3/1/2014,3,March,2014
+Small Business,Mexico,Amarilla,546,300.00,163800.00,24570.00,139230.00,136500.00,2730.00,10/1/2014,10,October,2014
+Government,Mexico,Montana,1368,7.00,9576.00,1436.40,8139.60,6840.00,1299.60,2/1/2014,2,February,2014
+Government,Canada,Paseo,723,7.00,5061.00,759.15,4301.85,3615.00,686.85,4/1/2014,4,April,2014
+Channel Partners,United States of America,VTT,1806,12.00,21672.00,3250.80,18421.20,5418.00,13003.20,5/1/2014,5,May,2014
diff --git a/dotnet/samples/Demos/CodeInterpreterPlugin/README.md b/dotnet/samples/Demos/CodeInterpreterPlugin/README.md
index a1e6a007f728..084fed1b41e4 100644
--- a/dotnet/samples/Demos/CodeInterpreterPlugin/README.md
+++ b/dotnet/samples/Demos/CodeInterpreterPlugin/README.md
@@ -31,3 +31,21 @@ OpenAI__ChatModelId
# Azure Container Apps
AzureContainerApps__Endpoint
```
+
+### Usage Example
+
+User: Upload the file c:\temp\code-interpreter\test-file.txt
+
+Assistant: The file test-file.txt has been successfully uploaded.
+
+User: How many files I have uploaded ?
+
+Assistant: You have uploaded 1 file.
+
+User: Show me the contents of this file
+
+Assistant: The contents of the file "test-file.txt" are as follows:
+
+```text
+the contents of the file
+```
\ No newline at end of file
diff --git a/dotnet/samples/Demos/FunctionInvocationApproval/README.md b/dotnet/samples/Demos/FunctionInvocationApproval/README.md
new file mode 100644
index 000000000000..99ff202e45fd
--- /dev/null
+++ b/dotnet/samples/Demos/FunctionInvocationApproval/README.md
@@ -0,0 +1,44 @@
+# Function Invocation Approval
+
+This console application shows how to use function invocation filter (`IFunctionInvocationFilter`) to invoke a Kernel Function only if such operation was approved.
+If function invocation was rejected, the result will contain the reason why, so the LLM can respond appropriately.
+
+The application uses a sample plugin which builds software by following these development stages: collection of requirements, design, implementation, testing and deployment.
+
+Each step can be approved or rejected. Based on that, the LLM will decide how to proceed.
+
+## Configuring Secrets
+
+The example requires credentials to access OpenAI or Azure OpenAI.
+
+If you have set up those credentials as secrets within Secret Manager or through environment variables for other samples from the solution in which this project is found, they will be re-used.
+
+### To set your secrets with Secret Manager:
+
+```
+cd dotnet/samples/Demos/FunctionInvocationApproval
+
+dotnet user-secrets init
+
+dotnet user-secrets set "OpenAI:ChatModelId" "..."
+dotnet user-secrets set "OpenAI:ApiKey" "..."
+
+dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "..."
+dotnet user-secrets set "AzureOpenAI:Endpoint" "https://... .openai.azure.com/"
+dotnet user-secrets set "AzureOpenAI:ApiKey" "..."
+```
+
+### To set your secrets with environment variables
+
+Use these names:
+
+```
+# OpenAI
+OpenAI__ChatModelId
+OpenAI__ApiKey
+
+# Azure OpenAI
+AzureOpenAI__ChatDeploymentName
+AzureOpenAI__Endpoint
+AzureOpenAI__ApiKey
+```
diff --git a/dotnet/samples/Demos/HomeAutomation/README.md b/dotnet/samples/Demos/HomeAutomation/README.md
index 09907e5363e5..aa5c33cec248 100644
--- a/dotnet/samples/Demos/HomeAutomation/README.md
+++ b/dotnet/samples/Demos/HomeAutomation/README.md
@@ -12,7 +12,7 @@ If you have set up those credentials as secrets within Secret Manager or through
### To set your secrets with Secret Manager:
```
-cd dotnet/samples/HouseAutomation
+cd dotnet/samples/Demos/HouseAutomation
dotnet user-secrets init
diff --git a/dotnet/samples/GettingStartedWithAgents/README.md b/dotnet/samples/GettingStartedWithAgents/README.md
index 4cbca4f8e5d5..39952506548c 100644
--- a/dotnet/samples/GettingStartedWithAgents/README.md
+++ b/dotnet/samples/GettingStartedWithAgents/README.md
@@ -22,7 +22,7 @@ Example|Description
[Step1_Agent](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step1_Agent.cs)|How to create and use an agent.
[Step2_Plugins](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step2_Plugins.cs)|How to associate plug-ins with an agent.
[Step3_Chat](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step3_Chat.cs)|How to create a conversation between agents.
-[Step4_KernelFunctionStrategies](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/Step4_KernelFunctionStrategies/Step1_Agent.cs)|How to utilize a `KernelFunction` as a _chat strategy_.
+[Step4_KernelFunctionStrategies](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step4_KernelFunctionStrategies.cs)|How to utilize a `KernelFunction` as a _chat strategy_.
[Step5_JsonResult](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step5_JsonResult.cs)|How to have an agent produce JSON.
[Step6_DependencyInjection](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step6_DependencyInjection.cs)|How to define dependency injection patterns for agents.
[Step7_OpenAIAssistant](https://github.com/microsoft/semantic-kernel/blob/main/dotnet/samples/GettingStartedWithAgents/Step7_OpenAIAssistant.cs)|How to create an Open AI Assistant agent.
diff --git a/dotnet/src/Agents/Abstractions/AgentChat.cs b/dotnet/src/Agents/Abstractions/AgentChat.cs
index 2ab5e75a276c..26b51928c362 100644
--- a/dotnet/src/Agents/Abstractions/AgentChat.cs
+++ b/dotnet/src/Agents/Abstractions/AgentChat.cs
@@ -223,8 +223,8 @@ protected async IAsyncEnumerable InvokeAgentAsync(
this.History.Add(message);
messages.Add(message);
- // Don't expose internal messages to caller.
- if (message.Role == AuthorRole.Tool || message.Items.All(i => i is FunctionCallContent))
+ // Don't expose function-call and function-result messages to caller.
+ if (message.Items.All(i => i is FunctionCallContent || i is FunctionResultContent))
{
continue;
}
@@ -239,7 +239,7 @@ protected async IAsyncEnumerable InvokeAgentAsync(
this._agentChannels
.Where(kvp => kvp.Value != channel)
.Select(kvp => new ChannelReference(kvp.Value, kvp.Key));
- this._broadcastQueue.Enqueue(channelRefs, messages);
+ this._broadcastQueue.Enqueue(channelRefs, messages.Where(m => m.Role != AuthorRole.Tool).ToArray());
this.Logger.LogInformation("[{MethodName}] Invoked agent {AgentType}: {AgentId}", nameof(InvokeAgentAsync), agent.GetType(), agent.Id);
}
diff --git a/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs b/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs
index 4b1752f88462..843327d77f6a 100644
--- a/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs
+++ b/dotnet/src/Agents/Core/Chat/TerminationStrategy.cs
@@ -49,7 +49,7 @@ public abstract class TerminationStrategy
///
/// Evaluate the input message and determine if the chat has met its completion criteria.
///
- /// The agent actively interacting with the nexus.
+ /// The agent actively interacting with the chat.
/// The most recent message
/// The to monitor for cancellation requests. The default is .
/// True to terminate chat loop.
diff --git a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
index 380bd5877e85..222ea5c5be88 100644
--- a/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
+++ b/dotnet/src/Agents/OpenAI/Agents.OpenAI.csproj
@@ -24,6 +24,7 @@
+
diff --git a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
index 742aa874a301..9665fb680498 100644
--- a/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
+++ b/dotnet/src/Agents/OpenAI/Extensions/KernelFunctionExtensions.cs
@@ -15,7 +15,7 @@ internal static class KernelFunctionExtensions
/// The plugin name
/// The delimiter character
/// An OpenAI tool definition
- public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName, char delimiter)
+ public static FunctionToolDefinition ToToolDefinition(this KernelFunction function, string pluginName, string delimiter)
{
var metadata = function.Metadata;
if (metadata.Parameters.Count > 0)
@@ -47,10 +47,10 @@ public static FunctionToolDefinition ToToolDefinition(this KernelFunction functi
required,
};
- return new FunctionToolDefinition(function.GetQualifiedName(pluginName, delimiter), function.Description, BinaryData.FromObjectAsJson(spec));
+ return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName, delimiter), function.Description, BinaryData.FromObjectAsJson(spec));
}
- return new FunctionToolDefinition(function.GetQualifiedName(pluginName, delimiter), function.Description);
+ return new FunctionToolDefinition(FunctionName.ToFullyQualifiedName(function.Name, pluginName, delimiter), function.Description);
}
private static string ConvertType(Type? type)
@@ -86,12 +86,4 @@ TypeCode.Int64 or TypeCode.UInt64 or
_ => "object",
};
}
-
- ///
- /// Produce a fully qualified toolname.
- ///
- public static string GetQualifiedName(this KernelFunction function, string pluginName, char delimiter)
- {
- return $"{pluginName}{delimiter}{function.Name}";
- }
}
diff --git a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
index cd8e2880b669..0d8b20b5b931 100644
--- a/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
+++ b/dotnet/src/Agents/OpenAI/OpenAIAssistantChannel.cs
@@ -19,7 +19,7 @@ namespace Microsoft.SemanticKernel.Agents.OpenAI;
internal sealed class OpenAIAssistantChannel(AssistantsClient client, string threadId, OpenAIAssistantConfiguration.PollingConfiguration pollingConfiguration)
: AgentChannel
{
- private const char FunctionDelimiter = '-';
+ private const string FunctionDelimiter = "-";
private static readonly HashSet s_pollingStatuses =
[
@@ -94,7 +94,8 @@ protected override async IAsyncEnumerable InvokeAsync(
this.Logger.LogInformation("[{MethodName}] Created run: {RunId}", nameof(InvokeAsync), run.Id);
// Evaluate status and process steps and messages, as encountered.
- HashSet processedMessageIds = [];
+ HashSet processedStepIds = [];
+ Dictionary functionSteps = [];
do
{
@@ -113,65 +114,106 @@ protected override async IAsyncEnumerable InvokeAsync(
this.Logger.LogDebug("[{MethodName}] Processing run steps: {RunId}", nameof(InvokeAsync), run.Id);
// Execute functions in parallel and post results at once.
- var tasks = steps.Data.SelectMany(step => ExecuteStep(agent, step, cancellationToken)).ToArray();
- if (tasks.Length > 0)
+ FunctionCallContent[] activeFunctionSteps = steps.Data.SelectMany(step => ParseFunctionStep(agent, step)).ToArray();
+ if (activeFunctionSteps.Length > 0)
{
- ToolOutput[]? results = await Task.WhenAll(tasks).ConfigureAwait(false);
+ // Emit function-call content
+ yield return GenerateFunctionCallContent(agent.GetName(), activeFunctionSteps);
- await this._client.SubmitToolOutputsToRunAsync(run, results, cancellationToken).ConfigureAwait(false);
+ // Invoke functions for each tool-step
+ IEnumerable> functionResultTasks = ExecuteFunctionSteps(agent, activeFunctionSteps, cancellationToken);
+
+ // Block for function results
+ FunctionResultContent[] functionResults = await Task.WhenAll(functionResultTasks).ConfigureAwait(false);
+
+ // Process tool output
+ ToolOutput[] toolOutputs = GenerateToolOutputs(functionResults);
+
+ await this._client.SubmitToolOutputsToRunAsync(run, toolOutputs, cancellationToken).ConfigureAwait(false);
}
if (this.Logger.IsEnabled(LogLevel.Information)) // Avoid boxing if not enabled
{
- this.Logger.LogInformation("[{MethodName}] Processed #{MessageCount} run steps: {RunId}", nameof(InvokeAsync), tasks.Length, run.Id);
+ this.Logger.LogInformation("[{MethodName}] Processed #{MessageCount} run steps: {RunId}", nameof(InvokeAsync), activeFunctionSteps.Length, run.Id);
}
}
// Enumerate completed messages
this.Logger.LogDebug("[{MethodName}] Processing run messages: {RunId}", nameof(InvokeAsync), run.Id);
- IEnumerable messageDetails =
+ IEnumerable completedStepsToProcess =
steps
- .OrderBy(s => s.CompletedAt)
- .Select(s => s.StepDetails)
- .OfType()
- .Where(d => !processedMessageIds.Contains(d.MessageCreation.MessageId));
+ .Where(s => s.CompletedAt.HasValue && !processedStepIds.Contains(s.Id))
+ .OrderBy(s => s.CreatedAt);
int messageCount = 0;
- foreach (RunStepMessageCreationDetails detail in messageDetails)
+ foreach (RunStep completedStep in completedStepsToProcess)
{
- ++messageCount;
-
- // Retrieve the message
- ThreadMessage? message = await this.RetrieveMessageAsync(detail, cancellationToken).ConfigureAwait(false);
-
- if (message is not null)
+ if (completedStep.Type.Equals(RunStepType.ToolCalls))
{
- AuthorRole role = new(message.Role.ToString());
+ RunStepToolCallDetails toolCallDetails = (RunStepToolCallDetails)completedStep.StepDetails;
- foreach (MessageContent itemContent in message.ContentItems)
+ foreach (RunStepToolCall toolCall in toolCallDetails.ToolCalls)
{
ChatMessageContent? content = null;
- // Process text content
- if (itemContent is MessageTextContent contentMessage)
+ // Process code-interpreter content
+ if (toolCall is RunStepCodeInterpreterToolCall toolCodeInterpreter)
{
- content = GenerateTextMessageContent(agent.GetName(), role, contentMessage);
+ content = GenerateCodeInterpreterContent(agent.GetName(), toolCodeInterpreter);
}
- // Process image content
- else if (itemContent is MessageImageFileContent contentImage)
+ // Process function result content
+ else if (toolCall is RunStepFunctionToolCall toolFunction)
{
- content = GenerateImageFileContent(agent.GetName(), role, contentImage);
+ FunctionCallContent functionStep = functionSteps[toolFunction.Id]; // Function step always captured on invocation
+ content = GenerateFunctionResultContent(agent.GetName(), functionStep, toolFunction.Output);
}
if (content is not null)
{
+ ++messageCount;
+
yield return content;
}
}
}
+ else if (completedStep.Type.Equals(RunStepType.MessageCreation))
+ {
+ RunStepMessageCreationDetails messageCreationDetails = (RunStepMessageCreationDetails)completedStep.StepDetails;
- processedMessageIds.Add(detail.MessageCreation.MessageId);
+ // Retrieve the message
+ ThreadMessage? message = await this.RetrieveMessageAsync(messageCreationDetails, cancellationToken).ConfigureAwait(false);
+
+ if (message is not null)
+ {
+ AuthorRole role = new(message.Role.ToString());
+
+ foreach (MessageContent itemContent in message.ContentItems)
+ {
+ ChatMessageContent? content = null;
+
+ // Process text content
+ if (itemContent is MessageTextContent contentMessage)
+ {
+ content = GenerateTextMessageContent(agent.GetName(), role, contentMessage);
+ }
+ // Process image content
+ else if (itemContent is MessageImageFileContent contentImage)
+ {
+ content = GenerateImageFileContent(agent.GetName(), role, contentImage);
+ }
+
+ if (content is not null)
+ {
+ ++messageCount;
+
+ yield return content;
+ }
+ }
+ }
+ }
+
+ processedStepIds.Add(completedStep.Id);
}
if (this.Logger.IsEnabled(LogLevel.Information)) // Avoid boxing if not enabled
@@ -213,6 +255,34 @@ async Task> PollRunStatusAsync()
return await this._client.GetRunStepsAsync(run, cancellationToken: cancellationToken).ConfigureAwait(false);
}
+
+ // Local function to capture kernel function state for further processing (participates in method closure).
+ IEnumerable ParseFunctionStep(OpenAIAssistantAgent agent, RunStep step)
+ {
+ if (step.Status == RunStepStatus.InProgress && step.StepDetails is RunStepToolCallDetails callDetails)
+ {
+ foreach (RunStepFunctionToolCall toolCall in callDetails.ToolCalls.OfType())
+ {
+ var nameParts = FunctionName.Parse(toolCall.Name, FunctionDelimiter);
+
+ KernelArguments functionArguments = [];
+ if (!string.IsNullOrWhiteSpace(toolCall.Arguments))
+ {
+ Dictionary arguments = JsonSerializer.Deserialize>(toolCall.Arguments)!;
+ foreach (var argumentKvp in arguments)
+ {
+ functionArguments[argumentKvp.Key] = argumentKvp.Value.ToString();
+ }
+ }
+
+ var content = new FunctionCallContent(nameParts.Name, nameParts.PluginName, toolCall.Id, functionArguments);
+
+ functionSteps.Add(toolCall.Id, content);
+
+ yield return content;
+ }
+ }
+ }
}
///
@@ -324,48 +394,79 @@ private static ChatMessageContent GenerateImageFileContent(string agentName, Aut
return messageContent;
}
- private static IEnumerable> ExecuteStep(OpenAIAssistantAgent agent, RunStep step, CancellationToken cancellationToken)
+ private static ChatMessageContent GenerateCodeInterpreterContent(string agentName, RunStepCodeInterpreterToolCall contentCodeInterpreter)
{
- // Process all of the steps that require action
- if (step.Status == RunStepStatus.InProgress && step.StepDetails is RunStepToolCallDetails callDetails)
- {
- foreach (RunStepFunctionToolCall toolCall in callDetails.ToolCalls.OfType())
+ return
+ new ChatMessageContent(
+ AuthorRole.Tool,
+ [
+ new TextContent(contentCodeInterpreter.Input)
+ ])
{
- // Run function
- yield return ProcessFunctionStepAsync(toolCall.Id, toolCall);
- }
- }
+ AuthorName = agentName,
+ };
+ }
- // Local function for processing the run-step (participates in method closure).
- async Task ProcessFunctionStepAsync(string callId, RunStepFunctionToolCall functionDetails)
+ private static ChatMessageContent GenerateFunctionCallContent(string agentName, FunctionCallContent[] functionSteps)
+ {
+ ChatMessageContent functionCallContent = new(AuthorRole.Tool, content: null)
{
- object result = await InvokeFunctionCallAsync().ConfigureAwait(false);
- if (result is not string toolResult)
- {
- toolResult = JsonSerializer.Serialize(result);
- }
+ AuthorName = agentName
+ };
- return new ToolOutput(callId, toolResult!);
+ functionCallContent.Items.AddRange(functionSteps);
- async Task
-
-
+
+
@@ -21,12 +21,12 @@
-
+
-
-
+
+
diff --git a/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs b/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs
index 1a3d20ed187c..7482dc723518 100644
--- a/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Core/ClientBase.cs
@@ -14,7 +14,7 @@ namespace Microsoft.SemanticKernel.Connectors.Google.Core;
internal abstract class ClientBase
{
- private readonly Func>? _bearerTokenProvider;
+ private readonly Func>? _bearerTokenProvider;
protected ILogger Logger { get; }
@@ -23,7 +23,7 @@ internal abstract class ClientBase
protected ClientBase(
HttpClient httpClient,
ILogger? logger,
- Func> bearerTokenProvider)
+ Func> bearerTokenProvider)
: this(httpClient, logger)
{
Verify.NotNull(bearerTokenProvider);
diff --git a/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiChatCompletionClient.cs b/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiChatCompletionClient.cs
index 087a1c2bf2f8..e52b5f4e6bd6 100644
--- a/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiChatCompletionClient.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiChatCompletionClient.cs
@@ -125,7 +125,7 @@ public GeminiChatCompletionClient(
public GeminiChatCompletionClient(
HttpClient httpClient,
string modelId,
- Func> bearerTokenProvider,
+ Func> bearerTokenProvider,
string location,
string projectId,
VertexAIVersion apiVersion,
diff --git a/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiTokenCounterClient.cs b/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiTokenCounterClient.cs
index f382ded93357..8616c8a88dd5 100644
--- a/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiTokenCounterClient.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Core/Gemini/Clients/GeminiTokenCounterClient.cs
@@ -57,7 +57,7 @@ public GeminiTokenCounterClient(
public GeminiTokenCounterClient(
HttpClient httpClient,
string modelId,
- Func> bearerTokenProvider,
+ Func> bearerTokenProvider,
string location,
string projectId,
VertexAIVersion apiVersion,
diff --git a/dotnet/src/Connectors/Connectors.Google/Core/VertexAI/VertexAIEmbeddingClient.cs b/dotnet/src/Connectors/Connectors.Google/Core/VertexAI/VertexAIEmbeddingClient.cs
index 6b00fd70b43b..62525f4ef67b 100644
--- a/dotnet/src/Connectors/Connectors.Google/Core/VertexAI/VertexAIEmbeddingClient.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Core/VertexAI/VertexAIEmbeddingClient.cs
@@ -31,7 +31,7 @@ internal sealed class VertexAIEmbeddingClient : ClientBase
public VertexAIEmbeddingClient(
HttpClient httpClient,
string modelId,
- Func> bearerTokenProvider,
+ Func> bearerTokenProvider,
string location,
string projectId,
VertexAIVersion apiVersion,
diff --git a/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIKernelBuilderExtensions.cs
index e8432e1c1c4c..f87da9cbc56e 100644
--- a/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIKernelBuilderExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIKernelBuilderExtensions.cs
@@ -37,7 +37,7 @@ public static class VertexAIKernelBuilderExtensions
public static IKernelBuilder AddVertexAIGeminiChatCompletion(
this IKernelBuilder builder,
string modelId,
- Func> bearerTokenProvider,
+ Func> bearerTokenProvider,
string location,
string projectId,
VertexAIVersion apiVersion = VertexAIVersion.V1,
@@ -122,7 +122,7 @@ public static IKernelBuilder AddVertexAIGeminiChatCompletion(
public static IKernelBuilder AddVertexAIEmbeddingGeneration(
this IKernelBuilder builder,
string modelId,
- Func> bearerTokenProvider,
+ Func> bearerTokenProvider,
string location,
string projectId,
VertexAIVersion apiVersion = VertexAIVersion.V1,
diff --git a/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIMemoryBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIMemoryBuilderExtensions.cs
index bdb37008726e..10d7264dc26e 100644
--- a/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIMemoryBuilderExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIMemoryBuilderExtensions.cs
@@ -33,7 +33,7 @@ public static class VertexAIMemoryBuilderExtensions
public static MemoryBuilder WithVertexAITextEmbeddingGeneration(
this MemoryBuilder builder,
string modelId,
- Func> bearerTokenProvider,
+ Func> bearerTokenProvider,
string location,
string projectId,
VertexAIVersion apiVersion = VertexAIVersion.V1,
diff --git a/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIServiceCollectionExtensions.cs
index 0ccfeb7deda9..c60aa979477f 100644
--- a/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIServiceCollectionExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Extensions/VertexAIServiceCollectionExtensions.cs
@@ -35,7 +35,7 @@ public static class VertexAIServiceCollectionExtensions
public static IServiceCollection AddVertexAIGeminiChatCompletion(
this IServiceCollection services,
string modelId,
- Func> bearerTokenProvider,
+ Func> bearerTokenProvider,
string location,
string projectId,
VertexAIVersion apiVersion = VertexAIVersion.V1,
@@ -116,7 +116,7 @@ public static IServiceCollection AddVertexAIGeminiChatCompletion(
public static IServiceCollection AddVertexAIEmbeddingGeneration(
this IServiceCollection services,
string modelId,
- Func> bearerTokenProvider,
+ Func> bearerTokenProvider,
string location,
string projectId,
VertexAIVersion apiVersion = VertexAIVersion.V1,
diff --git a/dotnet/src/Connectors/Connectors.Google/Services/VertexAIGeminiChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Google/Services/VertexAIGeminiChatCompletionService.cs
index 4ca2ed9f1bd4..38db5f410314 100644
--- a/dotnet/src/Connectors/Connectors.Google/Services/VertexAIGeminiChatCompletionService.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Services/VertexAIGeminiChatCompletionService.cs
@@ -39,7 +39,7 @@ public VertexAIGeminiChatCompletionService(
VertexAIVersion apiVersion = VertexAIVersion.V1,
HttpClient? httpClient = null,
ILoggerFactory? loggerFactory = null)
- : this(modelId, () => Task.FromResult(bearerKey), location, projectId, apiVersion, httpClient, loggerFactory)
+ : this(modelId, () => new ValueTask(bearerKey), location, projectId, apiVersion, httpClient, loggerFactory)
{
Verify.NotNullOrWhiteSpace(bearerKey);
}
@@ -61,7 +61,7 @@ public VertexAIGeminiChatCompletionService(
///
public VertexAIGeminiChatCompletionService(
string modelId,
- Func> bearerTokenProvider,
+ Func> bearerTokenProvider,
string location,
string projectId,
VertexAIVersion apiVersion = VertexAIVersion.V1,
diff --git a/dotnet/src/Connectors/Connectors.Google/Services/VertexAITextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.Google/Services/VertexAITextEmbeddingGenerationService.cs
index 92389dc00cdb..a9f9b55e06a9 100644
--- a/dotnet/src/Connectors/Connectors.Google/Services/VertexAITextEmbeddingGenerationService.cs
+++ b/dotnet/src/Connectors/Connectors.Google/Services/VertexAITextEmbeddingGenerationService.cs
@@ -39,7 +39,7 @@ public VertexAITextEmbeddingGenerationService(
VertexAIVersion apiVersion = VertexAIVersion.V1,
HttpClient? httpClient = null,
ILoggerFactory? loggerFactory = null)
- : this(modelId, () => Task.FromResult(bearerKey), location, projectId, apiVersion, httpClient, loggerFactory)
+ : this(modelId, () => new ValueTask(bearerKey), location, projectId, apiVersion, httpClient, loggerFactory)
{
Verify.NotNullOrWhiteSpace(bearerKey);
}
@@ -61,7 +61,7 @@ public VertexAITextEmbeddingGenerationService(
///
public VertexAITextEmbeddingGenerationService(
string modelId,
- Func> bearerTokenProvider,
+ Func> bearerTokenProvider,
string location,
string projectId,
VertexAIVersion apiVersion = VertexAIVersion.V1,
diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/Core/HuggingFaceMessageApiClient.cs b/dotnet/src/Connectors/Connectors.HuggingFace/Core/HuggingFaceMessageApiClient.cs
index 66bd8cdbf365..468f24490edb 100644
--- a/dotnet/src/Connectors/Connectors.HuggingFace/Core/HuggingFaceMessageApiClient.cs
+++ b/dotnet/src/Connectors/Connectors.HuggingFace/Core/HuggingFaceMessageApiClient.cs
@@ -85,9 +85,8 @@ internal async IAsyncEnumerable StreamCompleteChatM
var endpoint = this.GetChatGenerationEndpoint();
var huggingFaceExecutionSettings = HuggingFacePromptExecutionSettings.FromExecutionSettings(executionSettings);
- huggingFaceExecutionSettings.ModelId ??= this._clientCore.ModelId;
- var request = this.CreateChatRequest(chatHistory, huggingFaceExecutionSettings);
+ var request = this.CreateChatRequest(chatHistory, huggingFaceExecutionSettings, modelId);
request.Stream = true;
using var activity = ModelDiagnostics.StartCompletionActivity(endpoint, modelId, this._clientCore.ModelProvider, chatHistory, huggingFaceExecutionSettings);
@@ -149,8 +148,7 @@ internal async Task> CompleteChatMessageAsync(
var endpoint = this.GetChatGenerationEndpoint();
var huggingFaceExecutionSettings = HuggingFacePromptExecutionSettings.FromExecutionSettings(executionSettings);
- huggingFaceExecutionSettings.ModelId ??= this._clientCore.ModelId;
- var request = this.CreateChatRequest(chatHistory, huggingFaceExecutionSettings);
+ var request = this.CreateChatRequest(chatHistory, huggingFaceExecutionSettings, modelId);
using var activity = ModelDiagnostics.StartCompletionActivity(endpoint, modelId, this._clientCore.ModelProvider, chatHistory, huggingFaceExecutionSettings);
using var httpRequestMessage = this._clientCore.CreatePost(request, endpoint, this._clientCore.ApiKey);
@@ -276,7 +274,8 @@ private async IAsyncEnumerable ProcessChatResponseS
private ChatCompletionRequest CreateChatRequest(
ChatHistory chatHistory,
- HuggingFacePromptExecutionSettings huggingFaceExecutionSettings)
+ HuggingFacePromptExecutionSettings huggingFaceExecutionSettings,
+ string modelId)
{
HuggingFaceClient.ValidateMaxTokens(huggingFaceExecutionSettings.MaxTokens);
@@ -287,7 +286,7 @@ private ChatCompletionRequest CreateChatRequest(
JsonSerializer.Serialize(huggingFaceExecutionSettings));
}
- var request = ChatCompletionRequest.FromChatHistoryAndExecutionSettings(chatHistory, huggingFaceExecutionSettings);
+ var request = ChatCompletionRequest.FromChatHistoryAndExecutionSettings(chatHistory, huggingFaceExecutionSettings, modelId);
return request;
}
diff --git a/dotnet/src/Connectors/Connectors.HuggingFace/Core/Models/ChatCompletionRequest.cs b/dotnet/src/Connectors/Connectors.HuggingFace/Core/Models/ChatCompletionRequest.cs
index e3f930fecfb9..886e13f18bda 100644
--- a/dotnet/src/Connectors/Connectors.HuggingFace/Core/Models/ChatCompletionRequest.cs
+++ b/dotnet/src/Connectors/Connectors.HuggingFace/Core/Models/ChatCompletionRequest.cs
@@ -102,8 +102,9 @@ internal sealed class ChatCompletionRequest
///
/// Chat history to be used for the request.
/// Execution settings to be used for the request.
- /// TexGenerationtRequest object.
- internal static ChatCompletionRequest FromChatHistoryAndExecutionSettings(ChatHistory chatHistory, HuggingFacePromptExecutionSettings executionSettings)
+ /// Model id to use if value in prompt execution settings is not set.
+ /// TexGenerationRequest object.
+ internal static ChatCompletionRequest FromChatHistoryAndExecutionSettings(ChatHistory chatHistory, HuggingFacePromptExecutionSettings executionSettings, string modelId)
{
return new ChatCompletionRequest
{
@@ -118,7 +119,7 @@ internal static ChatCompletionRequest FromChatHistoryAndExecutionSettings(ChatHi
Temperature = executionSettings.Temperature,
Stop = executionSettings.Stop,
MaxTokens = executionSettings.MaxTokens,
- Model = executionSettings.ModelId ?? TextGenerationInferenceDefaultModel,
+ Model = executionSettings.ModelId ?? modelId ?? TextGenerationInferenceDefaultModel,
TopP = executionSettings.TopP,
TopLogProbs = executionSettings.TopLogProbs
};
diff --git a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs
index 70d6210fc355..d9d5b67ee4af 100644
--- a/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStore.cs
@@ -2,6 +2,7 @@
using System;
using System.Collections.Generic;
+using System.Collections.ObjectModel;
using System.Diagnostics;
using System.Linq;
using System.Runtime.CompilerServices;
@@ -22,11 +23,62 @@ namespace Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL;
///
public class AzureCosmosDBNoSQLMemoryStore : IMemoryStore, IDisposable
{
+ private const string EmbeddingPath = "/embedding";
+
private readonly CosmosClient _cosmosClient;
private readonly VectorEmbeddingPolicy _vectorEmbeddingPolicy;
private readonly IndexingPolicy _indexingPolicy;
private readonly string _databaseName;
+ ///
+ /// Initiates a AzureCosmosDBNoSQLMemoryStore instance using a Azure Cosmos DB connection string
+ /// and other properties required for vector search.
+ ///
+ /// Connection string required to connect to Azure Cosmos DB.
+ /// The database name to connect to.
+ /// The number of dimensions the embedding vectors to be stored.
+ /// The data type of the embedding vectors to be stored.
+ /// The type of index to use for the embedding vectors to be stored.
+ /// The application name to use in requests.
+ public AzureCosmosDBNoSQLMemoryStore(
+ string connectionString,
+ string databaseName,
+ ulong dimensions,
+ VectorDataType vectorDataType,
+ VectorIndexType vectorIndexType,
+ string? applicationName = null)
+ : this(
+ new CosmosClient(
+ connectionString,
+ new CosmosClientOptions
+ {
+ ApplicationName = applicationName ?? HttpHeaderConstant.Values.UserAgent,
+ Serializer = new CosmosSystemTextJsonSerializer(JsonSerializerOptions.Default),
+ }),
+ databaseName,
+ new VectorEmbeddingPolicy(
+ [
+ new Embedding
+ {
+ DataType = vectorDataType,
+ Dimensions = dimensions,
+ DistanceFunction = DistanceFunction.Cosine,
+ Path = EmbeddingPath,
+ }
+ ]),
+ new IndexingPolicy
+ {
+ VectorIndexes = new Collection {
+ new()
+ {
+ Path = EmbeddingPath,
+ Type = vectorIndexType,
+ },
+ },
+ })
+ {
+ }
+
///
/// Initiates a AzureCosmosDBNoSQLMemoryStore instance using a Azure Cosmos DB connection string
/// and other properties required for vector search.
@@ -71,14 +123,29 @@ public AzureCosmosDBNoSQLMemoryStore(
VectorEmbeddingPolicy vectorEmbeddingPolicy,
IndexingPolicy indexingPolicy)
{
- if (!vectorEmbeddingPolicy.Embeddings.Any(e => e.Path == "/embedding"))
+ var embedding = vectorEmbeddingPolicy.Embeddings.FirstOrDefault(e => e.Path == EmbeddingPath);
+ if (embedding is null)
{
throw new InvalidOperationException($"""
In order for {nameof(GetNearestMatchAsync)} to function, {nameof(vectorEmbeddingPolicy)} should
- contain an embedding path at /embedding. It's also recommended to include a that path in the
+ contain an embedding path at {EmbeddingPath}. It's also recommended to include that path in the
{nameof(indexingPolicy)} to improve performance and reduce cost for searches.
""");
}
+ else if (embedding.DistanceFunction != DistanceFunction.Cosine)
+ {
+ throw new InvalidOperationException($"""
+ In order for {nameof(GetNearestMatchAsync)} to reliably return relevance information, the {nameof(DistanceFunction)} should
+ be specified as {nameof(DistanceFunction)}.{nameof(DistanceFunction.Cosine)}.
+ """);
+ }
+ else if (embedding.DataType != VectorDataType.Float16 && embedding.DataType != VectorDataType.Float32)
+ {
+ throw new NotSupportedException($"""
+ Only {nameof(VectorDataType)}.{nameof(VectorDataType.Float16)} and {nameof(VectorDataType)}.{nameof(VectorDataType.Float32)}
+ are supported.
+ """);
+ }
this._cosmosClient = cosmosClient;
this._databaseName = databaseName;
this._vectorEmbeddingPolicy = vectorEmbeddingPolicy;
@@ -164,6 +231,12 @@ public async Task UpsertAsync(
MemoryRecord record,
CancellationToken cancellationToken = default)
{
+ // In some cases we're expected to generate the key to use. Do so if one isn't provided.
+ if (string.IsNullOrEmpty(record.Key))
+ {
+ record.Key = Guid.NewGuid().ToString();
+ }
+
var result = await this._cosmosClient
.GetDatabase(this._databaseName)
.GetContainer(collectionName)
@@ -193,6 +266,7 @@ public async IAsyncEnumerable UpsertBatchAsync(
bool withEmbedding = false,
CancellationToken cancellationToken = default)
{
+ // TODO: Consider using a query when `withEmbedding` is false to avoid passing it over the wire.
var result = await this._cosmosClient
.GetDatabase(this._databaseName)
.GetContainer(collectionName)
@@ -330,9 +404,10 @@ ORDER BY VectorDistance(x.embedding, @embedding)
{
foreach (var memoryRecord in await feedIterator.ReadNextAsync(cancellationToken).ConfigureAwait(false))
{
- if (memoryRecord.SimilarityScore >= minRelevanceScore)
+ var relevanceScore = (memoryRecord.SimilarityScore + 1) / 2;
+ if (relevanceScore >= minRelevanceScore)
{
- yield return (memoryRecord, memoryRecord.SimilarityScore);
+ yield return (memoryRecord, relevanceScore);
}
}
}
diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs
index 84e844800e84..aee0735507c5 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/Database.cs
@@ -56,30 +56,13 @@ public async Task CreateCollectionAsync(SqliteConnection conn, string collection
await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
}
- public async Task UpdateAsync(SqliteConnection conn,
+ public async Task UpsertAsync(SqliteConnection conn,
string collection, string key, string? metadata, string? embedding, string? timestamp, CancellationToken cancellationToken = default)
{
using SqliteCommand cmd = conn.CreateCommand();
cmd.CommandText = $@"
- UPDATE {TableName}
- SET metadata=@metadata, embedding=@embedding, timestamp=@timestamp
- WHERE collection=@collection
- AND key=@key ";
- cmd.Parameters.AddWithValue("@collection", collection);
- cmd.Parameters.AddWithValue("@key", key);
- cmd.Parameters.AddWithValue("@metadata", metadata ?? string.Empty);
- cmd.Parameters.AddWithValue("@embedding", embedding ?? string.Empty);
- cmd.Parameters.AddWithValue("@timestamp", timestamp ?? string.Empty);
- await cmd.ExecuteNonQueryAsync(cancellationToken).ConfigureAwait(false);
- }
-
- public async Task InsertOrIgnoreAsync(SqliteConnection conn,
- string collection, string key, string? metadata, string? embedding, string? timestamp, CancellationToken cancellationToken = default)
- {
- using SqliteCommand cmd = conn.CreateCommand();
- cmd.CommandText = $@"
- INSERT OR IGNORE INTO {TableName}(collection, key, metadata, embedding, timestamp)
- VALUES(@collection, @key, @metadata, @embedding, @timestamp); ";
+ INSERT OR REPLACE INTO {TableName}(collection, key, metadata, embedding, timestamp)
+ VALUES(@collection, @key, @metadata, @embedding, @timestamp);";
cmd.Parameters.AddWithValue("@collection", collection);
cmd.Parameters.AddWithValue("@key", key);
cmd.Parameters.AddWithValue("@metadata", metadata ?? string.Empty);
diff --git a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs
index bdceb8884885..1dbe176146ce 100644
--- a/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs
+++ b/dotnet/src/Connectors/Connectors.Memory.Sqlite/SqliteMemoryStore.cs
@@ -246,18 +246,8 @@ private async Task InternalUpsertAsync(SqliteConnection connection, stri
{
record.Key = record.Metadata.Id;
- // Update
- await this._dbConnector.UpdateAsync(
- conn: connection,
- collection: collectionName,
- key: record.Key,
- metadata: record.GetSerializedMetadata(),
- embedding: JsonSerializer.Serialize(record.Embedding, JsonOptionsCache.Default),
- timestamp: ToTimestampString(record.Timestamp),
- cancellationToken: cancellationToken).ConfigureAwait(false);
-
- // Insert if entry does not exists
- await this._dbConnector.InsertOrIgnoreAsync(
+ // Insert or replace
+ await this._dbConnector.UpsertAsync(
conn: connection,
collection: collectionName,
key: record.Key,
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
index 2a0cf135adaa..8059077d8bf4 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/AzureSdk/ClientCore.cs
@@ -561,33 +561,6 @@ internal async Task> GetChatMessageContentsAsy
return [chat.Last()];
}
-
- static void AddResponseMessage(ChatCompletionsOptions chatOptions, ChatHistory chat, string? result, string? errorMessage, ChatCompletionsToolCall toolCall, ILogger logger)
- {
- // Log any error
- if (errorMessage is not null && logger.IsEnabled(LogLevel.Debug))
- {
- Debug.Assert(result is null);
- logger.LogDebug("Failed to handle tool request ({ToolId}). {Error}", toolCall.Id, errorMessage);
- }
-
- // Add the tool response message to the chat options
- result ??= errorMessage ?? string.Empty;
- chatOptions.Messages.Add(new ChatRequestToolMessage(result, toolCall.Id));
-
- // Add the tool response message to the chat history.
- var message = new ChatMessageContent(role: AuthorRole.Tool, content: result, metadata: new Dictionary { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } });
-
- if (toolCall is ChatCompletionsFunctionToolCall functionCall)
- {
- // Add an item of type FunctionResultContent to the ChatMessageContent.Items collection in addition to the function result stored as a string in the ChatMessageContent.Content property.
- // This will enable migration to the new function calling model and facilitate the deprecation of the current one in the future.
- var functionName = FunctionName.Parse(functionCall.Name, OpenAIFunction.NameSeparator);
- message.Items.Add(new FunctionResultContent(functionName.Name, functionName.PluginName, functionCall.Id, result));
- }
-
- chat.Add(message);
- }
}
// Update tool use information for the next go-around based on having completed another iteration.
@@ -721,6 +694,16 @@ internal async IAsyncEnumerable GetStreamingC
}
var openAIStreamingChatMessageContent = new OpenAIStreamingChatMessageContent(update, update.ChoiceIndex ?? 0, this.DeploymentOrModelName, metadata) { AuthorName = streamedName };
+
+ if (update.ToolCallUpdate is StreamingFunctionToolCallUpdate functionCallUpdate)
+ {
+ openAIStreamingChatMessageContent.Items.Add(new StreamingFunctionCallUpdateContent(
+ callId: functionCallUpdate.Id,
+ name: functionCallUpdate.Name,
+ arguments: functionCallUpdate.ArgumentsUpdate,
+ functionCallIndex: functionCallUpdate.ToolCallIndex));
+ }
+
streamedContents?.Add(openAIStreamingChatMessageContent);
yield return openAIStreamingChatMessageContent;
}
@@ -728,12 +711,13 @@ internal async IAsyncEnumerable GetStreamingC
// Translate all entries into ChatCompletionsFunctionToolCall instances.
toolCalls = OpenAIFunctionToolCall.ConvertToolCallUpdatesToChatCompletionsFunctionToolCalls(
ref toolCallIdsByIndex, ref functionNamesByIndex, ref functionArgumentBuildersByIndex);
+
// Translate all entries into FunctionCallContent instances for diagnostics purposes.
- functionCallContents = ModelDiagnostics.IsSensitiveEventsEnabled() ? toolCalls.Select(this.GetFunctionCallContent).ToArray() : null;
+ functionCallContents = this.GetFunctionCallContents(toolCalls).ToArray();
}
finally
{
- activity?.EndStreaming(streamedContents, functionCallContents);
+ activity?.EndStreaming(streamedContents, ModelDiagnostics.IsSensitiveEventsEnabled() ? functionCallContents : null);
await responseEnumerator.DisposeAsync();
}
}
@@ -764,17 +748,7 @@ internal async IAsyncEnumerable GetStreamingC
// Add the original assistant message to the chatOptions; this is required for the service
// to understand the tool call responses.
chatOptions.Messages.Add(GetRequestMessage(streamedRole ?? default, content, streamedName, toolCalls));
- // Add the result message to the caller's chat history
- var newChatMessageContent = new OpenAIChatMessageContent(streamedRole ?? default, content, this.DeploymentOrModelName, toolCalls, metadata)
- {
- AuthorName = streamedName
- };
- // Add the tool call messages to the new chat message content for diagnostics purposes.
- foreach (var functionCall in functionCallContents ?? [])
- {
- newChatMessageContent.Items.Add(functionCall);
- }
- chat.Add(newChatMessageContent);
+ chat.Add(this.GetChatMessage(streamedRole ?? default, content, toolCalls, functionCallContents, metadata, streamedName));
// Respond to each tooling request.
for (int toolCallIndex = 0; toolCallIndex < toolCalls.Length; toolCallIndex++)
@@ -784,7 +758,7 @@ internal async IAsyncEnumerable GetStreamingC
// We currently only know about function tool calls. If it's anything else, we'll respond with an error.
if (string.IsNullOrEmpty(toolCall.Name))
{
- AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, result: null, "Error: Tool call was not a function call.", this.Logger);
+ AddResponseMessage(chatOptions, chat, result: null, "Error: Tool call was not a function call.", toolCall, this.Logger);
continue;
}
@@ -796,7 +770,7 @@ internal async IAsyncEnumerable GetStreamingC
}
catch (JsonException)
{
- AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, result: null, "Error: Function call arguments were invalid JSON.", this.Logger);
+ AddResponseMessage(chatOptions, chat, result: null, "Error: Function call arguments were invalid JSON.", toolCall, this.Logger);
continue;
}
@@ -806,14 +780,14 @@ internal async IAsyncEnumerable GetStreamingC
if (chatExecutionSettings.ToolCallBehavior?.AllowAnyRequestedKernelFunction is not true &&
!IsRequestableTool(chatOptions, openAIFunctionToolCall))
{
- AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, result: null, "Error: Function call request for a function that wasn't defined.", this.Logger);
+ AddResponseMessage(chatOptions, chat, result: null, "Error: Function call request for a function that wasn't defined.", toolCall, this.Logger);
continue;
}
// Find the function in the kernel and populate the arguments.
if (!kernel!.Plugins.TryGetFunctionAndArguments(openAIFunctionToolCall, out KernelFunction? function, out KernelArguments? functionArgs))
{
- AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, result: null, "Error: Requested function could not be found.", this.Logger);
+ AddResponseMessage(chatOptions, chat, result: null, "Error: Requested function could not be found.", toolCall, this.Logger);
continue;
}
@@ -848,7 +822,7 @@ internal async IAsyncEnumerable GetStreamingC
catch (Exception e)
#pragma warning restore CA1031 // Do not catch general exception types
{
- AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, result: null, $"Error: Exception while invoking function. {e.Message}", this.Logger);
+ AddResponseMessage(chatOptions, chat, result: null, $"Error: Exception while invoking function. {e.Message}", toolCall, this.Logger);
continue;
}
finally
@@ -862,7 +836,7 @@ internal async IAsyncEnumerable GetStreamingC
object functionResultValue = functionResult.GetValue() ?? string.Empty;
var stringResult = ProcessFunctionResult(functionResultValue, chatExecutionSettings.ToolCallBehavior);
- AddResponseMessage(chatOptions, chat, streamedRole, toolCall, metadata, stringResult, errorMessage: null, this.Logger);
+ AddResponseMessage(chatOptions, chat, stringResult, errorMessage: null, toolCall, this.Logger);
// If filter requested termination, returning latest function result and breaking request iteration loop.
if (invocationContext.Terminate)
@@ -877,22 +851,6 @@ internal async IAsyncEnumerable GetStreamingC
yield return new OpenAIStreamingChatMessageContent(lastChatMessage.Role, lastChatMessage.Content);
yield break;
}
-
- static void AddResponseMessage(
- ChatCompletionsOptions chatOptions, ChatHistory chat, ChatRole? streamedRole, ChatCompletionsToolCall tool, IReadOnlyDictionary? metadata,
- string? result, string? errorMessage, ILogger logger)
- {
- if (errorMessage is not null && logger.IsEnabled(LogLevel.Debug))
- {
- Debug.Assert(result is null);
- logger.LogDebug("Failed to handle tool request ({ToolId}). {Error}", tool.Id, errorMessage);
- }
-
- // Add the tool response message to both the chat options and to the chat history.
- result ??= errorMessage ?? string.Empty;
- chatOptions.Messages.Add(new ChatRequestToolMessage(result, tool.Id));
- chat.AddMessage(AuthorRole.Tool, result, metadata: new Dictionary { { OpenAIChatMessageContent.ToolIdProperty, tool.Id } });
- }
}
// Update tool use information for the next go-around based on having completed another iteration.
@@ -1391,58 +1349,106 @@ private OpenAIChatMessageContent GetChatMessage(ChatChoice chatChoice, ChatCompl
{
var message = new OpenAIChatMessageContent(chatChoice.Message, this.DeploymentOrModelName, GetChatChoiceMetadata(responseData, chatChoice));
- foreach (var toolCall in chatChoice.Message.ToolCalls)
+ message.Items.AddRange(this.GetFunctionCallContents(chatChoice.Message.ToolCalls));
+
+ return message;
+ }
+
+ private OpenAIChatMessageContent GetChatMessage(ChatRole chatRole, string content, ChatCompletionsFunctionToolCall[] toolCalls, FunctionCallContent[]? functionCalls, IReadOnlyDictionary? metadata, string? authorName)
+ {
+ var message = new OpenAIChatMessageContent(chatRole, content, this.DeploymentOrModelName, toolCalls, metadata)
{
- // Adding items of 'FunctionCallContent' type to the 'Items' collection even though the function calls are available via the 'ToolCalls' property.
- // This allows consumers to work with functions in an LLM-agnostic way.
- if (toolCall is ChatCompletionsFunctionToolCall functionToolCall)
- {
- var functionCallContent = this.GetFunctionCallContent(functionToolCall);
- message.Items.Add(functionCallContent);
- }
+ AuthorName = authorName,
+ };
+
+ if (functionCalls is not null)
+ {
+ message.Items.AddRange(functionCalls);
}
return message;
}
- private FunctionCallContent GetFunctionCallContent(ChatCompletionsFunctionToolCall toolCall)
+ private IEnumerable GetFunctionCallContents(IEnumerable toolCalls)
{
- KernelArguments? arguments = null;
- Exception? exception = null;
- try
+ List? result = null;
+
+ foreach (var toolCall in toolCalls)
{
- arguments = JsonSerializer.Deserialize(toolCall.Arguments);
- if (arguments is not null)
+ // Adding items of 'FunctionCallContent' type to the 'Items' collection even though the function calls are available via the 'ToolCalls' property.
+ // This allows consumers to work with functions in an LLM-agnostic way.
+ if (toolCall is ChatCompletionsFunctionToolCall functionToolCall)
{
- // Iterate over copy of the names to avoid mutating the dictionary while enumerating it
- var names = arguments.Names.ToArray();
- foreach (var name in names)
+ Exception? exception = null;
+ KernelArguments? arguments = null;
+ try
{
- arguments[name] = arguments[name]?.ToString();
+ arguments = JsonSerializer.Deserialize(functionToolCall.Arguments);
+ if (arguments is not null)
+ {
+ // Iterate over copy of the names to avoid mutating the dictionary while enumerating it
+ var names = arguments.Names.ToArray();
+ foreach (var name in names)
+ {
+ arguments[name] = arguments[name]?.ToString();
+ }
+ }
+ }
+ catch (JsonException ex)
+ {
+ exception = new KernelException("Error: Function call arguments were invalid JSON.", ex);
+
+ if (this.Logger.IsEnabled(LogLevel.Debug))
+ {
+ this.Logger.LogDebug(ex, "Failed to deserialize function arguments ({FunctionName}/{FunctionId}).", functionToolCall.Name, functionToolCall.Id);
+ }
}
+
+ var functionName = FunctionName.Parse(functionToolCall.Name, OpenAIFunction.NameSeparator);
+
+ var functionCallContent = new FunctionCallContent(
+ functionName: functionName.Name,
+ pluginName: functionName.PluginName,
+ id: functionToolCall.Id,
+ arguments: arguments)
+ {
+ InnerContent = functionToolCall,
+ Exception = exception
+ };
+
+ result ??= [];
+ result.Add(functionCallContent);
}
}
- catch (JsonException ex)
- {
- exception = new KernelException("Error: Function call arguments were invalid JSON.", ex);
- if (this.Logger.IsEnabled(LogLevel.Debug))
- {
- this.Logger.LogDebug(ex, "Failed to deserialize function arguments ({FunctionName}/{FunctionId}).", toolCall.Name, toolCall.Id);
- }
+ return result ?? Enumerable.Empty();
+ }
+
+ private static void AddResponseMessage(ChatCompletionsOptions chatOptions, ChatHistory chat, string? result, string? errorMessage, ChatCompletionsToolCall toolCall, ILogger logger)
+ {
+ // Log any error
+ if (errorMessage is not null && logger.IsEnabled(LogLevel.Debug))
+ {
+ Debug.Assert(result is null);
+ logger.LogDebug("Failed to handle tool request ({ToolId}). {Error}", toolCall.Id, errorMessage);
}
- var functionName = FunctionName.Parse(toolCall.Name, OpenAIFunction.NameSeparator);
+ // Add the tool response message to the chat options
+ result ??= errorMessage ?? string.Empty;
+ chatOptions.Messages.Add(new ChatRequestToolMessage(result, toolCall.Id));
- return new FunctionCallContent(
- functionName: functionName.Name,
- pluginName: functionName.PluginName,
- id: toolCall.Id,
- arguments: arguments)
+ // Add the tool response message to the chat history.
+ var message = new ChatMessageContent(role: AuthorRole.Tool, content: result, metadata: new Dictionary { { OpenAIChatMessageContent.ToolIdProperty, toolCall.Id } });
+
+ if (toolCall is ChatCompletionsFunctionToolCall functionCall)
{
- InnerContent = toolCall,
- Exception = exception
- };
+ // Add an item of type FunctionResultContent to the ChatMessageContent.Items collection in addition to the function result stored as a string in the ChatMessageContent.Content property.
+ // This will enable migration to the new function calling model and facilitate the deprecation of the current one in the future.
+ var functionName = FunctionName.Parse(functionCall.Name, OpenAIFunction.NameSeparator);
+ message.Items.Add(new FunctionResultContent(functionName.Name, functionName.PluginName, functionCall.Id, result));
+ }
+
+ chat.Add(message);
}
private static void ValidateMaxTokens(int? maxTokens)
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/CompatibilitySuppressions.xml b/dotnet/src/Connectors/Connectors.OpenAI/CompatibilitySuppressions.xml
index 1dd99a9223a4..3477ed220ea0 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/CompatibilitySuppressions.xml
+++ b/dotnet/src/Connectors/Connectors.OpenAI/CompatibilitySuppressions.xml
@@ -1,6 +1,20 @@
+
+ CP0002
+ F:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose.Assistants
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0002
+ F:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose.FineTune
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+ CP0002M:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFileService.GetFileContent(System.String,System.Threading.CancellationToken)
@@ -8,6 +22,41 @@
lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dlltrue
+
+ CP0002
+ M:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAITextToImageService.#ctor(System.String,System.String,System.Net.Http.HttpClient,Microsoft.Extensions.Logging.ILoggerFactory)
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0002
+ M:Microsoft.SemanticKernel.OpenAIServiceCollectionExtensions.AddOpenAITextToImage(Microsoft.Extensions.DependencyInjection.IServiceCollection,System.String,System.String,System.String)
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0002
+ M:Microsoft.SemanticKernel.OpenAIServiceCollectionExtensions.AddOpenAITextToImage(Microsoft.SemanticKernel.IKernelBuilder,System.String,System.String,System.String,System.Net.Http.HttpClient)
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0002
+ F:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose.Assistants
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0002
+ F:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose.FineTune
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+ CP0002M:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFileService.GetFileContent(System.String,System.Threading.CancellationToken)
@@ -15,4 +64,53 @@
lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dlltrue
+
+ CP0002
+ M:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAITextToImageService.#ctor(System.String,System.String,System.Net.Http.HttpClient,Microsoft.Extensions.Logging.ILoggerFactory)
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0002
+ M:Microsoft.SemanticKernel.OpenAIServiceCollectionExtensions.AddOpenAITextToImage(Microsoft.Extensions.DependencyInjection.IServiceCollection,System.String,System.String,System.String)
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0002
+ M:Microsoft.SemanticKernel.OpenAIServiceCollectionExtensions.AddOpenAITextToImage(Microsoft.SemanticKernel.IKernelBuilder,System.String,System.String,System.String,System.Net.Http.HttpClient)
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0007
+ T:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0007
+ T:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0008
+ T:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/net8.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
+
+ CP0008
+ T:Microsoft.SemanticKernel.Connectors.OpenAI.OpenAIFilePurpose
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ lib/netstandard2.0/Microsoft.SemanticKernel.Connectors.OpenAI.dll
+ true
+
\ No newline at end of file
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs b/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs
index a01b2d08fa8d..8d87720fa89f 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFilePurpose.cs
@@ -1,22 +1,99 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Diagnostics.CodeAnalysis;
namespace Microsoft.SemanticKernel.Connectors.OpenAI;
///
-/// Defines the purpose associated with the uploaded file.
+/// Defines the purpose associated with the uploaded file:
+/// https://platform.openai.com/docs/api-reference/files/object#files/object-purpose
///
[Experimental("SKEXP0010")]
-public enum OpenAIFilePurpose
+public readonly struct OpenAIFilePurpose : IEquatable
{
///
- /// File to be used by assistants for model processing.
+ /// File to be used by assistants as input.
///
- Assistants,
+ public static OpenAIFilePurpose Assistants { get; } = new("assistants");
///
- /// File to be used by fine-tuning jobs.
+ /// File produced as assistants output.
///
- FineTune,
+ public static OpenAIFilePurpose AssistantsOutput { get; } = new("assistants_output");
+
+ ///
+ /// Files uploaded as a batch of API requests
+ ///
+ public static OpenAIFilePurpose Batch { get; } = new("batch");
+
+ ///
+ /// File produced as result of a file included as a batch request.
+ ///
+ public static OpenAIFilePurpose BatchOutput { get; } = new("batch_output");
+
+ ///
+ /// File to be used as input to fine-tune a model.
+ ///
+ public static OpenAIFilePurpose FineTune { get; } = new("fine-tune");
+
+ ///
+ /// File produced as result of fine-tuning a model.
+ ///
+ public static OpenAIFilePurpose FineTuneResults { get; } = new("fine-tune-results");
+
+ ///
+ /// File to be used for Assistants image file inputs.
+ ///
+ public static OpenAIFilePurpose Vision { get; } = new("vision");
+
+ ///
+ /// Gets the label associated with this .
+ ///
+ public string Label { get; }
+
+ ///
+ /// Creates a new instance with the provided label.
+ ///
+ /// The label to associate with this .
+ public OpenAIFilePurpose(string label)
+ {
+ Verify.NotNullOrWhiteSpace(label, nameof(label));
+ this.Label = label!;
+ }
+
+ ///
+ /// Returns a value indicating whether two instances are equivalent, as determined by a
+ /// case-insensitive comparison of their labels.
+ ///
+ /// the first instance to compare
+ /// the second instance to compare
+ /// true if left and right are both null or have equivalent labels; false otherwise
+ public static bool operator ==(OpenAIFilePurpose left, OpenAIFilePurpose right)
+ => left.Equals(right);
+
+ ///
+ /// Returns a value indicating whether two instances are not equivalent, as determined by a
+ /// case-insensitive comparison of their labels.
+ ///
+ /// the first instance to compare
+ /// the second instance to compare
+ /// false if left and right are both null or have equivalent labels; true otherwise
+ public static bool operator !=(OpenAIFilePurpose left, OpenAIFilePurpose right)
+ => !(left == right);
+
+ ///
+ public override bool Equals([NotNullWhen(true)] object? obj)
+ => obj is OpenAIFilePurpose otherPurpose && this == otherPurpose;
+
+ ///
+ public bool Equals(OpenAIFilePurpose other)
+ => string.Equals(this.Label, other.Label, StringComparison.OrdinalIgnoreCase);
+
+ ///
+ public override int GetHashCode()
+ => StringComparer.OrdinalIgnoreCase.GetHashCode(this.Label);
+
+ ///
+ public override string ToString() => this.Label;
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs b/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs
index cc61734f44c8..690954448eea 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/Files/OpenAIFileService.cs
@@ -112,7 +112,8 @@ public async Task DeleteFileAsync(string id, CancellationToken cancellationToken
public async Task GetFileContentAsync(string id, CancellationToken cancellationToken = default)
{
Verify.NotNull(id, nameof(id));
- var (stream, mimetype) = await this.StreamGetRequestAsync($"{this._serviceUri}/{id}/content", cancellationToken).ConfigureAwait(false);
+ var contentUri = $"{this._serviceUri}/{id}/content";
+ var (stream, mimetype) = await this.StreamGetRequestAsync(contentUri, cancellationToken).ConfigureAwait(false);
using (stream)
{
@@ -123,7 +124,12 @@ public async Task GetFileContentAsync(string id, CancellationToke
#else
await stream.CopyToAsync(memoryStream, cancellationToken).ConfigureAwait(false);
#endif
- return new BinaryContent(memoryStream.ToArray(), mimetype);
+ return
+ new(memoryStream.ToArray(), mimetype)
+ {
+ Metadata = new Dictionary() { { "id", id } },
+ Uri = new Uri(contentUri),
+ };
}
}
@@ -147,9 +153,19 @@ public async Task GetFileAsync(string id, CancellationToken
///
/// The to monitor for cancellation requests. The default is .
/// The metadata of all uploaded files.
- public async Task> GetFilesAsync(CancellationToken cancellationToken = default)
+ public Task> GetFilesAsync(CancellationToken cancellationToken = default)
+ => this.GetFilesAsync(null, cancellationToken);
+
+ ///
+ /// Retrieve metadata for previously uploaded files
+ ///
+ /// The purpose of the files by which to filter.
+ /// The to monitor for cancellation requests. The default is .
+ /// The metadata of all uploaded files.
+ public async Task> GetFilesAsync(OpenAIFilePurpose? filePurpose, CancellationToken cancellationToken = default)
{
- var result = await this.ExecuteGetRequestAsync(this._serviceUri.ToString(), cancellationToken).ConfigureAwait(false);
+ var serviceUri = filePurpose.HasValue && !string.IsNullOrEmpty(filePurpose.Value.Label) ? $"{this._serviceUri}?purpose={filePurpose}" : this._serviceUri.ToString();
+ var result = await this.ExecuteGetRequestAsync(serviceUri, cancellationToken).ConfigureAwait(false);
return result.Data.Select(this.ConvertFileReference).ToArray();
}
@@ -167,7 +183,7 @@ public async Task UploadContentAsync(BinaryContent fileCont
Verify.NotNull(fileContent.Data, nameof(fileContent.Data));
using var formData = new MultipartFormDataContent();
- using var contentPurpose = new StringContent(this.ConvertPurpose(settings.Purpose));
+ using var contentPurpose = new StringContent(settings.Purpose.Label);
using var contentFile = new ByteArrayContent(fileContent.Data.Value.ToArray());
formData.Add(contentPurpose, "purpose");
formData.Add(contentFile, "file", settings.FileName);
@@ -281,26 +297,10 @@ private OpenAIFileReference ConvertFileReference(FileInfo result)
FileName = result.FileName,
CreatedTimestamp = DateTimeOffset.FromUnixTimeSeconds(result.CreatedAt).UtcDateTime,
SizeInBytes = result.Bytes ?? 0,
- Purpose = this.ConvertPurpose(result.Purpose),
+ Purpose = new(result.Purpose),
};
}
- private OpenAIFilePurpose ConvertPurpose(string purpose) =>
- purpose.ToUpperInvariant() switch
- {
- "ASSISTANTS" => OpenAIFilePurpose.Assistants,
- "FINE-TUNE" => OpenAIFilePurpose.FineTune,
- _ => throw new KernelException($"Unknown {nameof(OpenAIFilePurpose)}: {purpose}."),
- };
-
- private string ConvertPurpose(OpenAIFilePurpose purpose) =>
- purpose switch
- {
- OpenAIFilePurpose.Assistants => "assistants",
- OpenAIFilePurpose.FineTune => "fine-tune",
- _ => throw new KernelException($"Unknown {nameof(OpenAIFilePurpose)}: {purpose}."),
- };
-
private sealed class FileInfoList
{
[JsonPropertyName("data")]
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs
index b9d8b861dbc7..80cc60944965 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/OpenAIServiceCollectionExtensions.cs
@@ -1309,6 +1309,7 @@ public static IServiceCollection AddAzureOpenAITextToImage(
/// The instance to augment.
/// OpenAI API key, see https://platform.openai.com/account/api-keys
/// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// The model to use for image generation.
/// A local identifier for the given AI service
/// The HttpClient to use with this service.
/// The same instance as .
@@ -1317,6 +1318,7 @@ public static IKernelBuilder AddOpenAITextToImage(
this IKernelBuilder builder,
string apiKey,
string? orgId = null,
+ string? modelId = null,
string? serviceId = null,
HttpClient? httpClient = null)
{
@@ -1327,6 +1329,7 @@ public static IKernelBuilder AddOpenAITextToImage(
new OpenAITextToImageService(
apiKey,
orgId,
+ modelId,
HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
serviceProvider.GetService()));
@@ -1339,12 +1342,14 @@ public static IKernelBuilder AddOpenAITextToImage(
/// The instance to augment.
/// OpenAI API key, see https://platform.openai.com/account/api-keys
/// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// The model to use for image generation.
/// A local identifier for the given AI service
/// The same instance as .
[Experimental("SKEXP0010")]
public static IServiceCollection AddOpenAITextToImage(this IServiceCollection services,
string apiKey,
string? orgId = null,
+ string? modelId = null,
string? serviceId = null)
{
Verify.NotNull(services);
@@ -1354,6 +1359,7 @@ public static IServiceCollection AddOpenAITextToImage(this IServiceCollection se
new OpenAITextToImageService(
apiKey,
orgId,
+ modelId,
HttpClientProvider.GetHttpClient(serviceProvider),
serviceProvider.GetService()));
}
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs
index 08dad90554c8..335fe8cad5ee 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/OpenAITextToImageService.cs
@@ -8,6 +8,7 @@
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
+using Microsoft.SemanticKernel.Services;
using Microsoft.SemanticKernel.TextToImage;
namespace Microsoft.SemanticKernel.Connectors.OpenAI;
@@ -35,25 +36,37 @@ public sealed class OpenAITextToImageService : ITextToImageService
///
private readonly string _authorizationHeaderValue;
+ ///
+ /// The model to use for image generation.
+ ///
+ private readonly string? _modelId;
+
///
/// Initializes a new instance of the class.
///
/// OpenAI API key, see https://platform.openai.com/account/api-keys
/// OpenAI organization id. This is usually optional unless your account belongs to multiple organizations.
+ /// The model to use for image generation.
/// Custom for HTTP requests.
/// The to use for logging. If null, no logging will be performed.
public OpenAITextToImageService(
string apiKey,
string? organization = null,
+ string? modelId = null,
HttpClient? httpClient = null,
ILoggerFactory? loggerFactory = null)
{
Verify.NotNullOrWhiteSpace(apiKey);
this._authorizationHeaderValue = $"Bearer {apiKey}";
this._organizationHeaderValue = organization;
+ this._modelId = modelId;
this._core = new(httpClient, loggerFactory?.CreateLogger(this.GetType()));
this._core.AddAttribute(OpenAIClientCore.OrganizationKey, organization);
+ if (modelId is not null)
+ {
+ this._core.AddAttribute(AIServiceExtensions.ModelIdKey, modelId);
+ }
this._core.RequestCreated += (_, request) =>
{
@@ -77,10 +90,11 @@ public Task GenerateImageAsync(string description, int width, int height
throw new ArgumentOutOfRangeException(nameof(width), width, "OpenAI can generate only square images of size 256x256, 512x512, or 1024x1024.");
}
- return this.GenerateImageAsync(description, width, height, "url", x => x.Url, cancellationToken);
+ return this.GenerateImageAsync(this._modelId, description, width, height, "url", x => x.Url, cancellationToken);
}
private async Task GenerateImageAsync(
+ string? model,
string description,
int width, int height,
string format, Func extractResponse,
@@ -90,6 +104,7 @@ private async Task GenerateImageAsync(
var requestBody = JsonSerializer.Serialize(new TextToImageRequest
{
+ Model = model,
Prompt = description,
Size = $"{width}x{height}",
Count = 1,
diff --git a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs
index b5988a91cda4..70b5ac5418ee 100644
--- a/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs
+++ b/dotnet/src/Connectors/Connectors.OpenAI/TextToImage/TextToImageRequest.cs
@@ -9,31 +9,34 @@ namespace Microsoft.SemanticKernel.Connectors.OpenAI;
///
internal sealed class TextToImageRequest
{
+ ///
+ /// Model to use for image generation
+ ///
+ [JsonPropertyName("model")]
+ [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
+ public string? Model { get; set; }
+
///
/// Image prompt
///
[JsonPropertyName("prompt")]
- [JsonPropertyOrder(1)]
public string Prompt { get; set; } = string.Empty;
///
/// Image size
///
[JsonPropertyName("size")]
- [JsonPropertyOrder(2)]
public string Size { get; set; } = "256x256";
///
/// How many images to generate
///
[JsonPropertyName("n")]
- [JsonPropertyOrder(3)]
public int Count { get; set; } = 1;
///
/// Image format, "url" or "b64_json"
///
[JsonPropertyName("response_format")]
- [JsonPropertyOrder(4)]
public string Format { get; set; } = "url";
}
diff --git a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs
index 46334a06fb48..1f31ec076edd 100644
--- a/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs
+++ b/dotnet/src/Connectors/Connectors.UnitTests/OpenAI/TextToImage/OpenAITextToImageServiceTests.cs
@@ -40,6 +40,7 @@ public void ConstructorWorksCorrectly(bool includeLoggerFactory)
// Assert
Assert.NotNull(service);
Assert.Equal("organization", service.Attributes["Organization"]);
+ Assert.False(service.Attributes.ContainsKey("ModelId"));
}
[Theory]
@@ -51,7 +52,8 @@ public void ConstructorWorksCorrectly(bool includeLoggerFactory)
public async Task GenerateImageWorksCorrectlyAsync(int width, int height, bool expectedException)
{
// Arrange
- var service = new OpenAITextToImageService("api-key", "organization", this._httpClient);
+ var service = new OpenAITextToImageService("api-key", "organization", "dall-e-3", this._httpClient);
+ Assert.Equal("dall-e-3", service.Attributes["ModelId"]);
this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
{
Content = new StringContent("""
diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/README.md b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/README.md
index 90bd07b0bc06..fef9a8dae35b 100644
--- a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/README.md
+++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/README.md
@@ -3,7 +3,7 @@
## Requirements
1. **Azure OpenAI**: go to the [Azure OpenAI Quickstart](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/quickstart)
- and deploy an instance of Azure OpenAI, deploy a model like "text-davinci-003" find your Endpoint and API key.
+ and deploy an instance of Azure OpenAI, deploy a model like "gpt-35-turbo-instruct" find your Endpoint and API key.
2. **OpenAI**: go to [OpenAI](https://platform.openai.com) to register and procure your API key.
3. **Azure Bing Web Search API**: go to [Bing Web Search API](https://www.microsoft.com/en-us/bing/apis/bing-web-search-api)
and select `Try Now` to get started.
@@ -25,13 +25,13 @@ To set your secrets with Secret Manager:
cd dotnet/src/IntegrationTests
dotnet user-secrets init
-dotnet user-secrets set "OpenAI:ServiceId" "text-davinci-003"
-dotnet user-secrets set "OpenAI:ModelId" "text-davinci-003"
+dotnet user-secrets set "OpenAI:ServiceId" "gpt-3.5-turbo-instruct"
+dotnet user-secrets set "OpenAI:ModelId" "gpt-3.5-turbo-instruct"
dotnet user-secrets set "OpenAI:ChatModelId" "gpt-4"
dotnet user-secrets set "OpenAI:ApiKey" "..."
-dotnet user-secrets set "AzureOpenAI:ServiceId" "azure-text-davinci-003"
-dotnet user-secrets set "AzureOpenAI:DeploymentName" "text-davinci-003"
+dotnet user-secrets set "AzureOpenAI:ServiceId" "azure-gpt-35-turbo-instruct"
+dotnet user-secrets set "AzureOpenAI:DeploymentName" "gpt-35-turbo-instruct"
dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "gpt-4"
dotnet user-secrets set "AzureOpenAI:Endpoint" "https://contoso.openai.azure.com/"
dotnet user-secrets set "AzureOpenAI:ApiKey" "..."
@@ -56,14 +56,14 @@ For example:
```json
{
"OpenAI": {
- "ServiceId": "text-davinci-003",
- "ModelId": "text-davinci-003",
+ "ServiceId": "gpt-3.5-turbo-instruct",
+ "ModelId": "gpt-3.5-turbo-instruct",
"ChatModelId": "gpt-4",
"ApiKey": "sk-...."
},
"AzureOpenAI": {
- "ServiceId": "azure-text-davinci-003",
- "DeploymentName": "text-davinci-003",
+ "ServiceId": "gpt-35-turbo-instruct",
+ "DeploymentName": "gpt-35-turbo-instruct",
"ChatDeploymentName": "gpt-4",
"Endpoint": "https://contoso.openai.azure.com/",
"ApiKey": "...."
@@ -95,7 +95,7 @@ When setting environment variables, use a double underscore (i.e. "\_\_") to del
```bash
export OpenAI__ApiKey="sk-...."
export AzureOpenAI__ApiKey="...."
- export AzureOpenAI__DeploymentName="azure-text-davinci-003"
+ export AzureOpenAI__DeploymentName="gpt-35-turbo-instruct"
export AzureOpenAI__ChatDeploymentName="gpt-4"
export AzureOpenAIEmbeddings__DeploymentName="azure-text-embedding-ada-002"
export AzureOpenAI__Endpoint="https://contoso.openai.azure.com/"
@@ -107,7 +107,7 @@ When setting environment variables, use a double underscore (i.e. "\_\_") to del
```ps
$env:OpenAI__ApiKey = "sk-...."
$env:AzureOpenAI__ApiKey = "...."
- $env:AzureOpenAI__DeploymentName = "azure-text-davinci-003"
+ $env:AzureOpenAI__DeploymentName = "gpt-35-turbo-instruct"
$env:AzureOpenAI__ChatDeploymentName = "gpt-4"
$env:AzureOpenAIEmbeddings__DeploymentName = "azure-text-embedding-ada-002"
$env:AzureOpenAI__Endpoint = "https://contoso.openai.azure.com/"
diff --git a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/testsettings.json b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/testsettings.json
index 2b5e41c5cbd7..e2ce917f9732 100644
--- a/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/testsettings.json
+++ b/dotnet/src/Experimental/Orchestration.Flow.IntegrationTests/testsettings.json
@@ -1,12 +1,12 @@
{
"OpenAI": {
- "ServiceId": "text-davinci-003",
- "ModelId": "text-davinci-003",
+ "ServiceId": "gpt-3.5-turbo-instruct",
+ "ModelId": "gpt-3.5-turbo-instruct",
"ApiKey": ""
},
"AzureOpenAI": {
- "ServiceId": "azure-text-davinci-003",
- "DeploymentName": "text-davinci-003",
+ "ServiceId": "azure-gpt-35-turbo-instruct",
+ "DeploymentName": "gpt-35-turbo-instruct",
"ChatDeploymentName": "gpt-4",
"Endpoint": "",
"ApiKey": ""
diff --git a/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs b/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs
index d86c1681b96e..67abae8ef61c 100644
--- a/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs
+++ b/dotnet/src/Experimental/Orchestration.Flow/FlowOrchestrator.cs
@@ -73,6 +73,6 @@ public async Task ExecuteFlowAsync(
}
var executor = new FlowExecutor(this._kernelBuilder, this._flowStatusProvider, this._globalPluginCollection, this._config);
- return await executor.ExecuteFlowAsync(flow, sessionId, input, kernelArguments ?? new KernelArguments(null)).ConfigureAwait(false);
+ return await executor.ExecuteFlowAsync(flow, sessionId, input, kernelArguments ?? new KernelArguments()).ConfigureAwait(false);
}
}
diff --git a/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelFunctionHelpers.cs b/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelFunctionHelpers.cs
index 9f9b599ef9b6..9cb98b446e68 100644
--- a/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelFunctionHelpers.cs
+++ b/dotnet/src/Extensions/PromptTemplates.Handlebars/Helpers/KernelHelpers/KernelFunctionHelpers.cs
@@ -226,7 +226,7 @@ private static void ProcessPositionalArguments(KernelFunctionMetadata functionMe
// Deserialize any JSON content or return the content as a string
if (restApiOperationResponse.ContentType?.IndexOf("application/json", StringComparison.OrdinalIgnoreCase) >= 0)
{
- var parsedJson = JsonValue.Parse(restApiOperationResponse.Content.ToString() ?? string.Empty);
+ var parsedJson = JsonValue.Parse(restApiOperationResponse.Content?.ToString() ?? string.Empty);
return KernelHelpersUtils.DeserializeJsonNode(parsedJson);
}
diff --git a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs
index 3bcb963571b7..98126638dc62 100644
--- a/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/OpenApiKernelExtensions.cs
@@ -280,7 +280,7 @@ internal static KernelFunction CreateRestApiFunction(
var logger = loggerFactory?.CreateLogger(typeof(OpenApiKernelExtensions)) ?? NullLogger.Instance;
- async Task ExecuteAsync(KernelArguments variables, CancellationToken cancellationToken)
+ async Task ExecuteAsync(Kernel kernel, KernelFunction function, KernelArguments variables, CancellationToken cancellationToken)
{
try
{
@@ -314,6 +314,9 @@ async Task ExecuteAsync(KernelArguments variables, Can
var options = new RestApiOperationRunOptions
{
+ Kernel = kernel,
+ KernelFunction = function,
+ KernelArguments = arguments,
ServerUrlOverride = executionParameters?.ServerUrlOverride,
ApiHostUrl = documentUri is not null ? new Uri(documentUri.GetLeftPart(UriPartial.Authority)) : null
};
@@ -364,12 +367,12 @@ async Task ExecuteAsync(KernelArguments variables, Can
}
///
- /// Converts operation id to valid SK Function name.
+ /// Converts operation id to valid name.
/// A function name can contain only ASCII letters, digits, and underscores.
///
/// The operation id.
/// The logger.
- /// Valid SK Function name.
+ /// Valid KernelFunction name.
private static string ConvertOperationIdToValidFunctionName(string operationId, ILogger logger)
{
try
@@ -380,7 +383,7 @@ private static string ConvertOperationIdToValidFunctionName(string operationId,
catch (ArgumentException)
{
// The exception indicates that the operationId is not a valid function name.
- // To comply with the SK Function name requirements, it needs to be converted or sanitized.
+ // To comply with the KernelFunction name requirements, it needs to be converted or sanitized.
// Therefore, it should not be re-thrown, but rather swallowed to allow the conversion below.
}
diff --git a/dotnet/src/Functions/Functions.OpenApi/Extensions/RestApiOperationResponseExtensions.cs b/dotnet/src/Functions/Functions.OpenApi/Extensions/RestApiOperationResponseExtensions.cs
index 46f694b2afb4..c377f5e6f1a7 100644
--- a/dotnet/src/Functions/Functions.OpenApi/Extensions/RestApiOperationResponseExtensions.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/Extensions/RestApiOperationResponseExtensions.cs
@@ -33,7 +33,7 @@ public static bool IsValid(this RestApiOperationResponse response)
return true;
}
- return response.ContentType switch
+ return response.ContentType! switch
{
var ct when ct.StartsWith("application/json", StringComparison.OrdinalIgnoreCase) => ValidateJson(response),
var ct when ct.StartsWith("application/xml", StringComparison.OrdinalIgnoreCase) => ValidateXml(response),
@@ -47,7 +47,7 @@ private static bool ValidateJson(RestApiOperationResponse response)
try
{
var jsonSchema = JsonSchema.FromText(JsonSerializer.Serialize(response.ExpectedSchema));
- using var contentDoc = JsonDocument.Parse(response.Content.ToString() ?? "");
+ using var contentDoc = JsonDocument.Parse(response.Content?.ToString() ?? string.Empty);
var result = jsonSchema.Evaluate(contentDoc);
return result.IsValid;
}
diff --git a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs
index 36c2f58cca1a..af65b1c59825 100644
--- a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperation.cs
@@ -5,6 +5,7 @@
using System.Linq;
using System.Net.Http;
using System.Text.Json.Nodes;
+using System.Web;
namespace Microsoft.SemanticKernel.Plugins.OpenApi;
@@ -238,7 +239,7 @@ private string BuildPath(string pathTemplate, IDictionary argum
var node = OpenApiTypeConverter.Convert(parameter.Name, parameter.Type, argument);
// Serializing the parameter and adding it to the path.
- pathTemplate = pathTemplate.Replace($"{{{parameter.Name}}}", node.ToString().Trim('"'));
+ pathTemplate = pathTemplate.Replace($"{{{parameter.Name}}}", HttpUtility.UrlEncode(serializer.Invoke(parameter, node)));
}
return pathTemplate;
diff --git a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationRunOptions.cs b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationRunOptions.cs
index bf716e8f371c..1462145b9ea3 100644
--- a/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationRunOptions.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/Model/RestApiOperationRunOptions.cs
@@ -18,4 +18,19 @@ internal sealed class RestApiOperationRunOptions
/// The URL of REST API host.
///
public Uri? ApiHostUrl { get; set; }
+
+ ///
+ /// The Kernel instance used for the operation run.
+ ///
+ public Kernel? Kernel { get; set; }
+
+ ///
+ /// The Kernel function whose invocation triggered the operation run.
+ ///
+ public KernelFunction? KernelFunction { get; set; }
+
+ ///
+ /// The Kernel arguments whose associated with the operation run.
+ ///
+ public KernelArguments? KernelArguments { get; set; }
}
diff --git a/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelFunctionContext.cs b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelFunctionContext.cs
new file mode 100644
index 000000000000..b40b65bdd57c
--- /dev/null
+++ b/dotnet/src/Functions/Functions.OpenApi/OpenApiKernelFunctionContext.cs
@@ -0,0 +1,48 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Net.Http;
+
+namespace Microsoft.SemanticKernel.Plugins.OpenApi;
+
+///
+/// Class with data related to an Open API invocation.
+///
+public sealed class OpenApiKernelFunctionContext
+{
+ ///
+ /// Key to access the in the .
+ ///
+#if NET5_0_OR_GREATER
+ public static readonly HttpRequestOptionsKey KernelFunctionContextKey = new("KernelFunctionContext");
+#else
+ public static readonly string KernelFunctionContextKey = "KernelFunctionContext";
+#endif
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The associated with this context.
+ /// The associated with this context.
+ /// The associated with this context.
+ internal OpenApiKernelFunctionContext(Kernel? kernel, KernelFunction? function, KernelArguments? arguments)
+ {
+ this.Kernel = kernel;
+ this.Function = function;
+ this.Arguments = arguments;
+ }
+
+ ///
+ /// Gets the .
+ ///
+ public Kernel? Kernel { get; }
+
+ ///
+ /// Gets the .
+ ///
+ public KernelFunction? Function { get; }
+
+ ///
+ /// Gets the .
+ ///
+ public KernelArguments? Arguments { get; }
+}
diff --git a/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs b/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs
index 6f541b9dc55d..b7bc593c76b2 100644
--- a/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/RestApiOperationRunner.cs
@@ -142,7 +142,7 @@ public Task RunAsync(
var operationPayload = this.BuildOperationPayload(operation, arguments);
- return this.SendAsync(url, operation.Method, headers, operationPayload.Payload, operationPayload.Content, operation.Responses.ToDictionary(item => item.Key, item => item.Value.Schema), cancellationToken);
+ return this.SendAsync(url, operation.Method, headers, operationPayload.Payload, operationPayload.Content, operation.Responses.ToDictionary(item => item.Key, item => item.Value.Schema), options, cancellationToken);
}
#region private
@@ -156,6 +156,7 @@ public Task RunAsync(
/// HTTP request payload.
/// HTTP request content.
/// The dictionary of expected response schemas.
+ /// Options for REST API operation run.
/// The cancellation token.
/// Response content and content type
private async Task SendAsync(
@@ -165,10 +166,17 @@ private async Task SendAsync(
object? payload = null,
HttpContent? requestContent = null,
IDictionary? expectedSchemas = null,
+ RestApiOperationRunOptions? options = null,
CancellationToken cancellationToken = default)
{
using var requestMessage = new HttpRequestMessage(method, url);
+#if NET5_0_OR_GREATER
+ requestMessage.Options.Set(OpenApiKernelFunctionContext.KernelFunctionContextKey, new OpenApiKernelFunctionContext(options?.Kernel, options?.KernelFunction, options?.KernelArguments));
+#else
+ requestMessage.Properties.Add(OpenApiKernelFunctionContext.KernelFunctionContextKey, new OpenApiKernelFunctionContext(options?.Kernel, options?.KernelFunction, options?.KernelArguments));
+#endif
+
await this._authCallback(requestMessage, cancellationToken).ConfigureAwait(false);
if (requestContent is not null)
@@ -193,7 +201,7 @@ private async Task SendAsync(
{
using var responseMessage = await this._httpClient.SendWithSuccessCheckAsync(requestMessage, cancellationToken).ConfigureAwait(false);
- var response = await SerializeResponseContentAsync(requestMessage, payload, responseMessage.Content).ConfigureAwait(false);
+ var response = await SerializeResponseContentAsync(requestMessage, payload, responseMessage).ConfigureAwait(false);
response.ExpectedSchema ??= GetExpectedSchema(expectedSchemas, responseMessage.StatusCode);
@@ -228,11 +236,21 @@ private async Task SendAsync(
///
/// The HttpRequestMessage associated with the HTTP request.
/// The payload sent in the HTTP request.
- /// The HttpContent object containing the response content to be serialized.
+ /// The HttpResponseMessage object containing the response content to be serialized.
/// The serialized content.
- private static async Task SerializeResponseContentAsync(HttpRequestMessage request, object? payload, HttpContent content)
+ private static async Task SerializeResponseContentAsync(HttpRequestMessage request, object? payload, HttpResponseMessage responseMessage)
{
- var contentType = content.Headers.ContentType;
+ if (responseMessage.StatusCode == HttpStatusCode.NoContent)
+ {
+ return new RestApiOperationResponse(null, null)
+ {
+ RequestMethod = request.Method.Method,
+ RequestUri = request.RequestUri,
+ RequestPayload = payload,
+ };
+ }
+
+ var contentType = responseMessage.Content.Headers.ContentType;
var mediaType = contentType?.MediaType ?? throw new KernelException("No media type available.");
@@ -256,7 +274,7 @@ private static async Task SerializeResponseContentAsyn
}
// Serialize response content and return it
- var serializedContent = await serializer.Invoke(content).ConfigureAwait(false);
+ var serializedContent = await serializer.Invoke(responseMessage.Content).ConfigureAwait(false);
return new RestApiOperationResponse(serializedContent, contentType!.ToString())
{
diff --git a/dotnet/src/Functions/Functions.OpenApi/Serialization/FormStyleParameterSerializer.cs b/dotnet/src/Functions/Functions.OpenApi/Serialization/FormStyleParameterSerializer.cs
index 0f985f3d8197..917f94750a29 100644
--- a/dotnet/src/Functions/Functions.OpenApi/Serialization/FormStyleParameterSerializer.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/Serialization/FormStyleParameterSerializer.cs
@@ -24,7 +24,8 @@ public static string Serialize(RestApiOperationParameter parameter, JsonNode arg
Verify.NotNull(parameter);
Verify.NotNull(argument);
- if (parameter.Style != RestApiOperationParameterStyle.Form)
+ var style = parameter.Style ?? RestApiOperationParameterStyle.Form;
+ if (style != RestApiOperationParameterStyle.Form)
{
throw new NotSupportedException($"Unsupported Rest API operation parameter style '{parameter.Style}' for parameter '{parameter.Name}'");
}
@@ -35,7 +36,13 @@ public static string Serialize(RestApiOperationParameter parameter, JsonNode arg
return SerializeArrayParameter(parameter, argument);
}
- // Handling parameters of primitive and removing extra quotes added by the JsonValue for string values.
+ // Handling parameters where the underlying value is already a string.
+ if (argument is JsonValue jsonValue && jsonValue.TryGetValue(out string? value))
+ {
+ return $"{parameter.Name}={HttpUtility.UrlEncode(value)}";
+ }
+
+ // Handling parameters of any arbitrary type by using JSON format without enclosing quotes.
return $"{parameter.Name}={HttpUtility.UrlEncode(argument.ToString().Trim('"'))}";
}
diff --git a/dotnet/src/Functions/Functions.OpenApi/Serialization/SimpleStyleParameterSerializer.cs b/dotnet/src/Functions/Functions.OpenApi/Serialization/SimpleStyleParameterSerializer.cs
index 98da72fbdb76..9104ed9635dc 100644
--- a/dotnet/src/Functions/Functions.OpenApi/Serialization/SimpleStyleParameterSerializer.cs
+++ b/dotnet/src/Functions/Functions.OpenApi/Serialization/SimpleStyleParameterSerializer.cs
@@ -23,7 +23,8 @@ public static string Serialize(RestApiOperationParameter parameter, JsonNode arg
Verify.NotNull(parameter);
Verify.NotNull(argument);
- if (parameter.Style != RestApiOperationParameterStyle.Simple)
+ var style = parameter.Style ?? RestApiOperationParameterStyle.Simple;
+ if (style != RestApiOperationParameterStyle.Simple)
{
throw new NotSupportedException($"Unsupported Rest API operation parameter style '{parameter.Style}' for parameter '{parameter.Name}'");
}
@@ -34,7 +35,13 @@ public static string Serialize(RestApiOperationParameter parameter, JsonNode arg
return SerializeArrayParameter(parameter, argument);
}
- // Handling parameters of primitive and removing extra quotes added by the JsonValue for string values.
+ // Handling parameters where the underlying value is already a string.
+ if (argument is JsonValue jsonValue && jsonValue.TryGetValue(out string? value))
+ {
+ return value;
+ }
+
+ // Handling parameters of any arbitrary type by using JSON format without enclosing quotes.
return argument.ToString().Trim('"');
}
diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationRunnerTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationRunnerTests.cs
index c48f551c36f4..b836ec18ed80 100644
--- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationRunnerTests.cs
+++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationRunnerTests.cs
@@ -1103,6 +1103,109 @@ public async Task ItShouldReturnRequestUriAndContentAsync()
Assert.Equal("{\"name\":\"fake-name-value\",\"attributes\":{\"enabled\":true}}", ((JsonObject)result.RequestPayload).ToJsonString());
}
+ [Fact]
+ public async Task ItShouldHandleNoContentAsync()
+ {
+ // Arrange
+ this._httpMessageHandlerStub!.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.NoContent);
+
+ List payloadProperties =
+ [
+ new("name", "string", true, []),
+ new("attributes", "object", false,
+ [
+ new("enabled", "boolean", false, []),
+ ])
+ ];
+
+ var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties);
+
+ var operation = new RestApiOperation(
+ "fake-id",
+ new Uri("https://fake-random-test-host"),
+ "fake-path",
+ HttpMethod.Post,
+ "fake-description",
+ [],
+ payload
+ );
+
+ var arguments = new KernelArguments
+ {
+ { "name", "fake-name-value" },
+ { "enabled", true }
+ };
+
+ var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, enableDynamicPayload: true);
+
+ // Act
+ var result = await sut.RunAsync(operation, arguments);
+
+ // Assert
+ Assert.NotNull(result.RequestMethod);
+ Assert.Equal(HttpMethod.Post.Method, result.RequestMethod);
+ Assert.NotNull(result.RequestUri);
+ Assert.Equal("https://fake-random-test-host/fake-path", result.RequestUri.AbsoluteUri);
+ Assert.NotNull(result.RequestPayload);
+ Assert.IsType(result.RequestPayload);
+ Assert.Equal("{\"name\":\"fake-name-value\",\"attributes\":{\"enabled\":true}}", ((JsonObject)result.RequestPayload).ToJsonString());
+ }
+
+ [Fact]
+ public async Task ItShouldSetHttpRequestMessageOptionsAsync()
+ {
+ // Arrange
+ this._httpMessageHandlerStub.ResponseToReturn.Content = new StringContent("fake-content", Encoding.UTF8, MediaTypeNames.Application.Json);
+
+ List payloadProperties =
+ [
+ new("name", "string", true, []),
+ new("attributes", "object", false,
+ [
+ new("enabled", "boolean", false, []),
+ ])
+ ];
+
+ var payload = new RestApiOperationPayload(MediaTypeNames.Application.Json, payloadProperties);
+
+ var operation = new RestApiOperation(
+ "fake-id",
+ new Uri("https://fake-random-test-host"),
+ "fake-path",
+ HttpMethod.Post,
+ "fake-description",
+ [],
+ payload
+ );
+
+ var arguments = new KernelArguments
+ {
+ { "name", "fake-name-value" },
+ { "enabled", true }
+ };
+
+ var options = new RestApiOperationRunOptions()
+ {
+ Kernel = new(),
+ KernelFunction = KernelFunctionFactory.CreateFromMethod(() => false),
+ KernelArguments = arguments,
+ };
+
+ var sut = new RestApiOperationRunner(this._httpClient, this._authenticationHandlerMock.Object, enableDynamicPayload: true);
+
+ // Act
+ var result = await sut.RunAsync(operation, arguments, options);
+
+ // Assert
+ var requestMessage = this._httpMessageHandlerStub.RequestMessage;
+ Assert.NotNull(requestMessage);
+ Assert.True(requestMessage.Options.TryGetValue(OpenApiKernelFunctionContext.KernelFunctionContextKey, out var kernelFunctionContext));
+ Assert.NotNull(kernelFunctionContext);
+ Assert.Equal(options.Kernel, kernelFunctionContext.Kernel);
+ Assert.Equal(options.KernelFunction, kernelFunctionContext.Function);
+ Assert.Equal(options.KernelArguments, kernelFunctionContext.Arguments);
+ }
+
public class SchemaTestData : IEnumerable
{
public IEnumerator GetEnumerator()
@@ -1185,15 +1288,17 @@ public void Dispose()
private sealed class HttpMessageHandlerStub : DelegatingHandler
{
- public HttpRequestHeaders? RequestHeaders { get; private set; }
+ public HttpRequestHeaders? RequestHeaders => this.RequestMessage?.Headers;
- public HttpContentHeaders? ContentHeaders { get; private set; }
+ public HttpContentHeaders? ContentHeaders => this.RequestMessage?.Content?.Headers;
public byte[]? RequestContent { get; private set; }
- public Uri? RequestUri { get; private set; }
+ public Uri? RequestUri => this.RequestMessage?.RequestUri;
+
+ public HttpMethod? Method => this.RequestMessage?.Method;
- public HttpMethod? Method { get; private set; }
+ public HttpRequestMessage? RequestMessage { get; private set; }
public HttpResponseMessage ResponseToReturn { get; set; }
@@ -1207,11 +1312,8 @@ public HttpMessageHandlerStub()
protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
{
- this.Method = request.Method;
- this.RequestUri = request.RequestUri;
- this.RequestHeaders = request.Headers;
+ this.RequestMessage = request;
this.RequestContent = request.Content is null ? null : await request.Content.ReadAsByteArrayAsync(cancellationToken);
- this.ContentHeaders = request.Content?.Headers;
return await Task.FromResult(this.ResponseToReturn);
}
diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs
index b4d7b17469e2..c9f082b329a3 100644
--- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs
+++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/RestApiOperationTests.cs
@@ -64,7 +64,7 @@ public void ItShouldUseHostUrlOverrideIfProvided()
}
[Fact]
- public void ItShouldReplacePathParametersByValuesFromArguments()
+ public void ItShouldBuildOperationUrlWithPathParametersFromArguments()
{
// Arrange
var parameters = new List {
@@ -106,6 +106,49 @@ public void ItShouldReplacePathParametersByValuesFromArguments()
Assert.Equal("https://fake-random-test-host/v1/34/other_fake_path_section", url.OriginalString);
}
+ [Fact]
+ public void ItShouldBuildOperationUrlWithEncodedArguments()
+ {
+ // Arrange
+ var parameters = new List {
+ new(
+ name: "p1",
+ type: "string",
+ isRequired: true,
+ expand: false,
+ location: RestApiOperationParameterLocation.Path,
+ style: RestApiOperationParameterStyle.Simple),
+ new(
+ name: "p2",
+ type: "string",
+ isRequired: true,
+ expand: false,
+ location: RestApiOperationParameterLocation.Path,
+ style: RestApiOperationParameterStyle.Simple)
+ };
+
+ var sut = new RestApiOperation(
+ "fake_id",
+ new Uri("https://fake-random-test-host"),
+ "/{p1}/{p2}/other_fake_path_section",
+ HttpMethod.Get,
+ "fake_description",
+ parameters
+ );
+
+ var arguments = new Dictionary
+ {
+ { "p1", "foo:bar" },
+ { "p2", "foo/bar" }
+ };
+
+ // Act
+ var url = sut.BuildOperationUrl(arguments);
+
+ // Assert
+ Assert.Equal("https://fake-random-test-host/foo%3abar/foo%2fbar/other_fake_path_section", url.OriginalString);
+ }
+
[Fact]
public void ShouldBuildResourceUrlWithoutQueryString()
{
@@ -148,6 +191,112 @@ public void ShouldBuildResourceUrlWithoutQueryString()
Assert.Equal($"{fakeHostUrlOverride}/fake-path-value/", url.OriginalString);
}
+ [Fact]
+ public void ItShouldBuildQueryString()
+ {
+ // Arrange
+ var parameters = new List {
+ new(
+ name: "since_create_time",
+ type: "string",
+ isRequired: false,
+ expand: false,
+ location: RestApiOperationParameterLocation.Query),
+ new(
+ name: "before_create_time",
+ type: "string",
+ isRequired: false,
+ expand: false,
+ location: RestApiOperationParameterLocation.Query),
+ };
+
+ var sut = new RestApiOperation(
+ "fake_id",
+ new Uri("https://fake-random-test-host"),
+ "fake-path/",
+ HttpMethod.Get,
+ "fake_description",
+ parameters);
+
+ var arguments = new Dictionary
+ {
+ { "since_create_time", "2024-01-01T00:00:00+00:00" },
+ { "before_create_time", "2024-05-01T00:00:00+00:00" },
+ };
+
+ // Act
+ var queryString = sut.BuildQueryString(arguments);
+
+ // Assert
+ Assert.Equal("since_create_time=2024-01-01T00%3A00%3A00%2B00%3A00&before_create_time=2024-05-01T00%3A00%3A00%2B00%3A00", queryString, ignoreCase: true);
+ }
+
+ [Fact]
+ public void ItShouldBuildQueryStringWithQuotes()
+ {
+ // Arrange
+ var parameters = new List {
+ new(
+ name: "has_quotes",
+ type: "string",
+ isRequired: false,
+ expand: false,
+ location: RestApiOperationParameterLocation.Query)
+ };
+
+ var sut = new RestApiOperation(
+ "fake_id",
+ new Uri("https://fake-random-test-host"),
+ "fake-path/",
+ HttpMethod.Get,
+ "fake_description",
+ parameters);
+
+ var arguments = new Dictionary
+ {
+ { "has_quotes", "\"Quoted Value\"" },
+ };
+
+ // Act
+ var queryString = sut.BuildQueryString(arguments);
+
+ // Assert
+ Assert.Equal("has_quotes=%22Quoted+Value%22", queryString, ignoreCase: true);
+ }
+
+ [Fact]
+ public void ItShouldBuildQueryStringForArray()
+ {
+ // Arrange
+ var parameters = new List {
+ new(
+ name: "times",
+ type: "array",
+ isRequired: false,
+ expand: false,
+ location: RestApiOperationParameterLocation.Query),
+ };
+
+ var sut = new RestApiOperation(
+ "fake_id",
+ new Uri("https://fake-random-test-host"),
+ "fake-path/",
+ HttpMethod.Get,
+ "fake_description",
+ parameters);
+
+ var arguments = new Dictionary
+ {
+ { "times", new string[] { "2024-01-01T00:00:00+00:00", "2024-05-01T00:00:00+00:00" } },
+ };
+
+ // Act
+ var queryString = sut.BuildQueryString(arguments);
+
+ // Assert
+ Assert.Equal("times=2024-01-01T00%3A00%3A00%2B00%3A00,2024-05-01T00%3A00%3A00%2B00%3A00", queryString, ignoreCase: true);
+ }
+
[Fact]
public void ItShouldRenderHeaderValuesFromArguments()
{
diff --git a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/FormStyleParametersSerializerTests.cs b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/FormStyleParametersSerializerTests.cs
index 852a88c79b78..3d27259b0936 100644
--- a/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/FormStyleParametersSerializerTests.cs
+++ b/dotnet/src/Functions/Functions.UnitTests/OpenApi/Serialization/FormStyleParametersSerializerTests.cs
@@ -75,7 +75,7 @@ public void ItShouldCreateParameterForPrimitiveValue()
}
[Fact]
- public void ItShouldCreateParameterForStringValue()
+ public void ItShouldCreateParameterForDateTimeValue()
{
// Arrange
var parameter = new RestApiOperationParameter(
@@ -95,6 +95,28 @@ public void ItShouldCreateParameterForStringValue()
Assert.Equal("id=2023-12-06T11%3a53%3a36Z", result);
}
+ [Theory]
+ [InlineData("2024-01-01T00:00:00+00:00", "2024-01-01T00%3a00%3a00%2b00%3a00")]
+ public void ItShouldCreateParameterForStringValue(string value, string encodedValue)
+ {
+ // Arrange
+ var parameter = new RestApiOperationParameter(
+ name: "id",
+ type: "string",
+ isRequired: true,
+ expand: false,
+ location: RestApiOperationParameterLocation.Query,
+ style: RestApiOperationParameterStyle.Form);
+
+ // Act
+ var result = FormStyleParameterSerializer.Serialize(parameter, JsonValue.Create(value));
+
+ // Assert
+ Assert.NotNull(result);
+
+ Assert.Equal($"id={encodedValue}", result);
+ }
+
[Theory]
[InlineData(":", "%3a")]
[InlineData("/", "%2f")]
diff --git a/dotnet/src/IntegrationTests/Connectors/HuggingFace/ChatCompletion/HuggingFaceChatCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/HuggingFace/ChatCompletion/HuggingFaceChatCompletionTests.cs
new file mode 100644
index 000000000000..cca6f6703fcb
--- /dev/null
+++ b/dotnet/src/IntegrationTests/Connectors/HuggingFace/ChatCompletion/HuggingFaceChatCompletionTests.cs
@@ -0,0 +1,137 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Text;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Configuration;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.Connectors.HuggingFace;
+using Xunit;
+
+namespace SemanticKernel.IntegrationTests.Connectors.HuggingFace.ChatCompletion;
+
+///
+/// Integration tests for .
+///
+///
+/// Instructions for setting up a Text Generation Inference (TGI) endpoint, see: https://huggingface.co/blog/tgi-messages-api
+///
+public sealed class HuggingFaceChatCompletionTests
+{
+ private const string Endpoint = "https://.endpoints.huggingface.cloud/v1/";
+ private const string Model = "tgi";
+
+ private readonly IConfigurationRoot _configuration;
+
+ public HuggingFaceChatCompletionTests()
+ {
+ // Load configuration
+ this._configuration = new ConfigurationBuilder()
+ .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true)
+ .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true)
+ .AddEnvironmentVariables()
+ .AddUserSecrets()
+ .Build();
+ }
+
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task GetChatMessageContentsAsync()
+ {
+ // Arrange
+ var chatHistory = new ChatHistory
+ {
+ new ChatMessageContent(AuthorRole.System, "Use C# 12 features."),
+ new ChatMessageContent(AuthorRole.User, "Write a C# Hello world?")
+ };
+ var huggingFaceRemote = new HuggingFaceChatCompletionService(Model, endpoint: new Uri(Endpoint), apiKey: this.GetApiKey());
+
+ // Act
+ var response = await huggingFaceRemote.GetChatMessageContentsAsync(chatHistory, new HuggingFacePromptExecutionSettings() { MaxNewTokens = 50 });
+
+ // Assert
+ Assert.NotNull(response);
+ Assert.Single(response);
+ Assert.True(response[0].Content?.Length > 0);
+ }
+
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task GetStreamingChatMessageContentsAsync()
+ {
+ // Arrange
+ var chatHistory = new ChatHistory
+ {
+ new ChatMessageContent(AuthorRole.System, "Use C# 12 features."),
+ new ChatMessageContent(AuthorRole.User, "Write a C# Hello world?")
+ };
+ var huggingFaceRemote = new HuggingFaceChatCompletionService(Model, endpoint: new Uri(Endpoint), apiKey: this.GetApiKey());
+
+ // Act
+ var response = new StringBuilder();
+ await foreach (var update in huggingFaceRemote.GetStreamingChatMessageContentsAsync(chatHistory, new HuggingFacePromptExecutionSettings() { MaxNewTokens = 50 }))
+ {
+ if (update.Content is { Length: > 0 })
+ {
+ response.Append(update.Content);
+ }
+ }
+
+ // Assert
+ Assert.NotNull(response);
+ Assert.True(response.Length > 0);
+ }
+
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task InvokeKernelFunctionAsync()
+ {
+ // Arrange
+ Kernel kernel = Kernel.CreateBuilder()
+ .AddHuggingFaceChatCompletion(Model, endpoint: new Uri(Endpoint), apiKey: this.GetApiKey())
+ .Build();
+
+ var kernelFunction = kernel.CreateFunctionFromPrompt("Write a C# Hello world", new HuggingFacePromptExecutionSettings
+ {
+ MaxNewTokens = 50,
+ });
+
+ // Act
+ var response = await kernel.InvokeAsync(kernelFunction);
+
+ // Assert
+ Assert.NotNull(response);
+ Assert.True(response.ToString().Length > 0);
+ }
+
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task InvokeKernelFunctionStreamingAsync()
+ {
+ // Arrange
+ Kernel kernel = Kernel.CreateBuilder()
+ .AddHuggingFaceChatCompletion(Model, endpoint: new Uri(Endpoint), apiKey: this.GetApiKey())
+ .Build();
+
+ var kernelFunction = kernel.CreateFunctionFromPrompt("Write a C# Hello world", new HuggingFacePromptExecutionSettings
+ {
+ MaxNewTokens = 50,
+ });
+
+ // Act
+ var response = new StringBuilder();
+ await foreach (var update in kernel.InvokeStreamingAsync(kernelFunction))
+ {
+ if (update.ToString() is { Length: > 0 })
+ {
+ response.Append(update.ToString());
+ }
+ }
+
+ // Assert
+ Assert.NotNull(response);
+ Assert.True(response.ToString().Length > 0);
+ }
+
+ private string GetApiKey()
+ {
+ return this._configuration.GetSection("HuggingFace:ApiKey").Get()!;
+ }
+}
diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTests.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTests.cs
index 0e8aee320856..e75116e34893 100644
--- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTests.cs
@@ -1,9 +1,14 @@
// Copyright (c) Microsoft. All rights reserved.
using System;
+using System.Collections.Generic;
+using System.Collections.ObjectModel;
using System.Linq;
+using System.Threading;
using System.Threading.Tasks;
+using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.AzureCosmosDBNoSQL;
+using Microsoft.SemanticKernel.Embeddings;
using Microsoft.SemanticKernel.Memory;
using MongoDB.Driver;
using Xunit;
@@ -117,6 +122,54 @@ public async Task ItCanGetNearestMatchesAsync(int limit, bool withEmbeddings)
await memoryStore.DeleteCollectionAsync(collectionName);
}
+ [Theory(Skip = SkipReason)]
+ [InlineData(true)]
+ [InlineData(false)]
+ public async Task ItCanSaveReferenceGetTextAndSearchTextAsync(bool withEmbedding)
+ {
+ var collectionName = this._fixture.CollectionName;
+ var memoryStore = this._fixture.MemoryStore;
+ var textMemory = new SemanticTextMemory(memoryStore, new MockTextEmbeddingGenerationService());
+ var textToStore = "SampleText";
+ var id = "MyExternalId";
+ var source = "MyExternalSource";
+ var refId = await textMemory.SaveReferenceAsync(collectionName, textToStore, id, source);
+ Assert.NotNull(refId);
+
+ var expectedQueryResult = new MemoryQueryResult(
+ new MemoryRecordMetadata(isReference: true, id, text: "", description: "", source, additionalMetadata: ""),
+ 1.0,
+ withEmbedding ? DataHelper.VectorSearchTestEmbedding : null);
+
+ var queryResult = await textMemory.GetAsync(collectionName, refId, withEmbedding);
+ AssertQueryResultEqual(expectedQueryResult, queryResult, withEmbedding);
+
+ var searchResults = await textMemory.SearchAsync(collectionName, textToStore, withEmbeddings: withEmbedding).ToListAsync();
+ Assert.Equal(1, searchResults?.Count);
+ AssertQueryResultEqual(expectedQueryResult, searchResults?[0], compareEmbeddings: true);
+
+ await textMemory.RemoveAsync(collectionName, refId);
+ }
+
+ private static void AssertQueryResultEqual(MemoryQueryResult expected, MemoryQueryResult? actual, bool compareEmbeddings)
+ {
+ Assert.NotNull(actual);
+ Assert.Equal(expected.Relevance, actual.Relevance);
+ Assert.Equal(expected.Metadata.Id, actual.Metadata.Id);
+ Assert.Equal(expected.Metadata.Text, actual.Metadata.Text);
+ Assert.Equal(expected.Metadata.Description, actual.Metadata.Description);
+ Assert.Equal(expected.Metadata.ExternalSourceName, actual.Metadata.ExternalSourceName);
+ Assert.Equal(expected.Metadata.AdditionalMetadata, actual.Metadata.AdditionalMetadata);
+ Assert.Equal(expected.Metadata.IsReference, actual.Metadata.IsReference);
+
+ if (compareEmbeddings)
+ {
+ Assert.NotNull(expected.Embedding);
+ Assert.NotNull(actual.Embedding);
+ Assert.Equal(expected.Embedding.Value.Span, actual.Embedding.Value.Span);
+ }
+ }
+
private static void AssertMemoryRecordEqual(
MemoryRecord expectedRecord,
MemoryRecord actualRecord,
@@ -147,4 +200,15 @@ private static void AssertMemoryRecordEqual(
Assert.True(actualRecord.Embedding.Span.IsEmpty);
}
}
+
+ private sealed class MockTextEmbeddingGenerationService : ITextEmbeddingGenerationService
+ {
+ public IReadOnlyDictionary Attributes { get; } = ReadOnlyDictionary.Empty;
+
+ public Task>> GenerateEmbeddingsAsync(IList data, Kernel? kernel = null, CancellationToken cancellationToken = default)
+ {
+ IList> result = new List> { DataHelper.VectorSearchTestEmbedding };
+ return Task.FromResult(result);
+ }
+ }
}
diff --git a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs
index 93cbea170f40..1df46166e63f 100644
--- a/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Memory/AzureCosmosDBNoSQL/AzureCosmosDBNoSQLMemoryStoreTestsFixture.cs
@@ -1,7 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
using System;
-using System.Collections.ObjectModel;
using System.Threading.Tasks;
using Microsoft.Azure.Cosmos;
using Microsoft.Extensions.Configuration;
@@ -35,28 +34,9 @@ public AzureCosmosDBNoSQLMemoryStoreTestsFixture()
this.MemoryStore = new AzureCosmosDBNoSQLMemoryStore(
connectionString,
this.DatabaseName,
- new VectorEmbeddingPolicy(
- new Collection
- {
- new()
- {
- DataType = VectorDataType.Float32,
- Dimensions = 3,
- DistanceFunction = DistanceFunction.Cosine,
- Path = "/embedding"
- }
- }),
- new()
- {
- VectorIndexes = new Collection {
- new()
- {
- Path = "/embedding",
- Type = VectorIndexType.Flat,
- },
- },
- }
- );
+ dimensions: 3,
+ VectorDataType.Float32,
+ VectorIndexType.Flat);
}
public Task InitializeAsync()
diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs
index a2285a1c4dd5..03cd3429d4b0 100644
--- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAICompletionTests.cs
@@ -397,6 +397,38 @@ public async Task AzureOpenAIInvokePromptTestAsync()
// Assert
Assert.Contains("Pike Place", actual.GetValue(), StringComparison.OrdinalIgnoreCase);
+ Assert.NotNull(actual.Metadata);
+ }
+
+ [Fact]
+ public async Task AzureOpenAIInvokePromptWithMultipleResultsTestAsync()
+ {
+ // Arrange
+ this._kernelBuilder.Services.AddSingleton(this._logger);
+ var builder = this._kernelBuilder;
+ this.ConfigureAzureOpenAIChatAsText(builder);
+ Kernel target = builder.Build();
+
+ var prompt = "Where is the most famous fish market in Seattle, Washington, USA?";
+
+ var executionSettings = new OpenAIPromptExecutionSettings() { MaxTokens = 150, ResultsPerPrompt = 3 };
+
+ // Act
+ FunctionResult actual = await target.InvokePromptAsync(prompt, new(executionSettings));
+
+ // Assert
+ Assert.Null(actual.Metadata);
+
+ var chatMessageContents = actual.GetValue>();
+
+ Assert.NotNull(chatMessageContents);
+ Assert.Equal(executionSettings.ResultsPerPrompt, chatMessageContents.Count);
+
+ foreach (var chatMessageContent in chatMessageContents)
+ {
+ Assert.NotNull(chatMessageContent.Metadata);
+ Assert.Contains("Pike Place", chatMessageContent.Content, StringComparison.OrdinalIgnoreCase);
+ }
}
[Fact]
@@ -434,7 +466,7 @@ public async Task MultipleServiceLoadPromptConfigTestAsync()
{
"name": "FishMarket2",
"execution_settings": {
- "azure-text-davinci-003": {
+ "azure-gpt-35-turbo-instruct": {
"max_tokens": 256
}
}
diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs
new file mode 100644
index 000000000000..30b0c3d1115b
--- /dev/null
+++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIFileServiceTests.cs
@@ -0,0 +1,156 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Threading.Tasks;
+using Microsoft.Extensions.Configuration;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.Connectors.OpenAI;
+using SemanticKernel.IntegrationTests.TestSettings;
+using Xunit;
+using Xunit.Abstractions;
+
+namespace SemanticKernel.IntegrationTests.Connectors.OpenAI;
+
+#pragma warning disable xUnit1004 // Contains test methods used in manual verification. Disable warning for this file only.
+
+public sealed class OpenAIFileServiceTests(ITestOutputHelper output) : IDisposable
+{
+ private readonly IConfigurationRoot _configuration = new ConfigurationBuilder()
+ .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true)
+ .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true)
+ .AddEnvironmentVariables()
+ .AddUserSecrets()
+ .Build();
+
+ [Theory(Skip = "OpenAI will often throttle requests. This test is for manual verification.")]
+ [InlineData("test_image_001.jpg", "image/jpeg")]
+ [InlineData("test_content.txt", "text/plain")]
+ public async Task OpenAIFileServiceLifecycleAsync(string fileName, string mimeType)
+ {
+ // Arrange
+ OpenAIFileService fileService = this.CreateOpenAIFileService();
+
+ // Act & Assert
+ await this.VerifyFileServiceLifecycleAsync(fileService, fileName, mimeType);
+ }
+
+ [Theory]
+ [InlineData("test_image_001.jpg", "image/jpeg")]
+ [InlineData("test_content.txt", "text/plain")]
+ public async Task AzureOpenAIFileServiceLifecycleAsync(string fileName, string mimeType)
+ {
+ // Arrange
+ OpenAIFileService fileService = this.CreateOpenAIFileService();
+
+ // Act & Assert
+ await this.VerifyFileServiceLifecycleAsync(fileService, fileName, mimeType);
+ }
+
+ private async Task VerifyFileServiceLifecycleAsync(OpenAIFileService fileService, string fileName, string mimeType)
+ {
+ // Setup file content
+ await using FileStream fileStream = File.OpenRead($"./TestData/{fileName}");
+ BinaryData sourceData = await BinaryData.FromStreamAsync(fileStream);
+ BinaryContent sourceContent = new(sourceData.ToArray(), mimeType);
+
+ // Upload file with unsupported purpose (failure case)
+ await Assert.ThrowsAsync(() => fileService.UploadContentAsync(sourceContent, new(fileName, OpenAIFilePurpose.AssistantsOutput)));
+
+ // Upload file with wacky purpose (failure case)
+ await Assert.ThrowsAsync(() => fileService.UploadContentAsync(sourceContent, new(fileName, new OpenAIFilePurpose("pretend"))));
+
+ // Upload file
+ OpenAIFileReference fileReference = await fileService.UploadContentAsync(sourceContent, new(fileName, OpenAIFilePurpose.FineTune));
+ try
+ {
+ AssertFileReferenceEquals(fileReference, fileName, sourceData.Length, OpenAIFilePurpose.FineTune);
+
+ // Retrieve files by different purpose
+ Dictionary fileMap = await GetFilesAsync(fileService, OpenAIFilePurpose.Assistants);
+ Assert.DoesNotContain(fileReference.Id, fileMap.Keys);
+
+ // Retrieve files by wacky purpose (failure case)
+ await Assert.ThrowsAsync(() => GetFilesAsync(fileService, new OpenAIFilePurpose("pretend")));
+
+ // Retrieve files by expected purpose
+ fileMap = await GetFilesAsync(fileService, OpenAIFilePurpose.FineTune);
+ Assert.Contains(fileReference.Id, fileMap.Keys);
+ AssertFileReferenceEquals(fileMap[fileReference.Id], fileName, sourceData.Length, OpenAIFilePurpose.FineTune);
+
+ // Retrieve files by no specific purpose
+ fileMap = await GetFilesAsync(fileService);
+ Assert.Contains(fileReference.Id, fileMap.Keys);
+ AssertFileReferenceEquals(fileMap[fileReference.Id], fileName, sourceData.Length, OpenAIFilePurpose.FineTune);
+
+ // Retrieve file by id
+ OpenAIFileReference file = await fileService.GetFileAsync(fileReference.Id);
+ AssertFileReferenceEquals(file, fileName, sourceData.Length, OpenAIFilePurpose.FineTune);
+
+ // Retrieve file content
+ BinaryContent retrievedContent = await fileService.GetFileContentAsync(fileReference.Id);
+ Assert.NotNull(retrievedContent.Data);
+ Assert.NotNull(retrievedContent.Uri);
+ Assert.NotNull(retrievedContent.Metadata);
+ Assert.Equal(fileReference.Id, retrievedContent.Metadata["id"]);
+ Assert.Equal(sourceContent.Data!.Value.Length, retrievedContent.Data.Value.Length);
+ }
+ finally
+ {
+ // Delete file
+ await fileService.DeleteFileAsync(fileReference.Id);
+ }
+ }
+
+ private static void AssertFileReferenceEquals(OpenAIFileReference fileReference, string expectedFileName, int expectedSize, OpenAIFilePurpose expectedPurpose)
+ {
+ Assert.Equal(expectedFileName, fileReference.FileName);
+ Assert.Equal(expectedPurpose, fileReference.Purpose);
+ Assert.Equal(expectedSize, fileReference.SizeInBytes);
+ }
+
+ private static async Task> GetFilesAsync(OpenAIFileService fileService, OpenAIFilePurpose? purpose = null)
+ {
+ IEnumerable files = await fileService.GetFilesAsync(purpose);
+ Dictionary fileIds = files.DistinctBy(f => f.Id).ToDictionary(f => f.Id);
+ return fileIds;
+ }
+
+ #region internals
+
+ private readonly XunitLogger _logger = new(output);
+ private readonly RedirectOutput _testOutputHelper = new(output);
+
+ public void Dispose()
+ {
+ this._logger.Dispose();
+ this._testOutputHelper.Dispose();
+ }
+
+ private OpenAIFileService CreateOpenAIFileService()
+ {
+ var openAIConfiguration = this._configuration.GetSection("OpenAI").Get();
+
+ Assert.NotNull(openAIConfiguration);
+ Assert.NotNull(openAIConfiguration.ApiKey);
+ Assert.NotNull(openAIConfiguration.ServiceId);
+
+ return new(openAIConfiguration.ApiKey, openAIConfiguration.ServiceId, loggerFactory: this._logger);
+ }
+
+ private OpenAIFileService CreateAzureOpenAIFileService()
+ {
+ var azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAI").Get();
+
+ Assert.NotNull(azureOpenAIConfiguration);
+ Assert.NotNull(azureOpenAIConfiguration.Endpoint);
+ Assert.NotNull(azureOpenAIConfiguration.ApiKey);
+ Assert.NotNull(azureOpenAIConfiguration.ServiceId);
+
+ return new(new Uri(azureOpenAIConfiguration.Endpoint), azureOpenAIConfiguration.ApiKey, azureOpenAIConfiguration.ServiceId, loggerFactory: this._logger);
+ }
+
+ #endregion
+}
diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToImageTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToImageTests.cs
new file mode 100644
index 000000000000..e133f91ee547
--- /dev/null
+++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAITextToImageTests.cs
@@ -0,0 +1,85 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Threading.Tasks;
+using Microsoft.Extensions.Configuration;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.TextToImage;
+using SemanticKernel.IntegrationTests.TestSettings;
+using Xunit;
+
+namespace SemanticKernel.IntegrationTests.Connectors.OpenAI;
+public sealed class OpenAITextToImageTests
+{
+ private readonly IConfigurationRoot _configuration = new ConfigurationBuilder()
+ .AddJsonFile(path: "testsettings.json", optional: false, reloadOnChange: true)
+ .AddJsonFile(path: "testsettings.development.json", optional: true, reloadOnChange: true)
+ .AddEnvironmentVariables()
+ .AddUserSecrets()
+ .Build();
+
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task OpenAITextToImageTestAsync()
+ {
+ // Arrange
+ OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAITextToImage").Get();
+ Assert.NotNull(openAIConfiguration);
+
+ var kernel = Kernel.CreateBuilder()
+ .AddOpenAITextToImage(apiKey: openAIConfiguration.ApiKey)
+ .Build();
+
+ var service = kernel.GetRequiredService();
+
+ // Act
+ var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 512, 512);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotEmpty(result);
+ }
+
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task OpenAITextToImageByModelTestAsync()
+ {
+ // Arrange
+ OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("OpenAITextToImage").Get();
+ Assert.NotNull(openAIConfiguration);
+
+ var kernel = Kernel.CreateBuilder()
+ .AddOpenAITextToImage(apiKey: openAIConfiguration.ApiKey, modelId: openAIConfiguration.ModelId)
+ .Build();
+
+ var service = kernel.GetRequiredService();
+
+ // Act
+ var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 1024, 1024);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotEmpty(result);
+ }
+
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task AzureOpenAITextToImageTestAsync()
+ {
+ // Arrange
+ AzureOpenAIConfiguration? azureOpenAIConfiguration = this._configuration.GetSection("AzureOpenAITextToImage").Get();
+ Assert.NotNull(azureOpenAIConfiguration);
+
+ var kernel = Kernel.CreateBuilder()
+ .AddAzureOpenAITextToImage(
+ azureOpenAIConfiguration.DeploymentName,
+ azureOpenAIConfiguration.Endpoint,
+ azureOpenAIConfiguration.ApiKey)
+ .Build();
+
+ var service = kernel.GetRequiredService();
+
+ // Act
+ var result = await service.GenerateImageAsync("The sun rises in the east and sets in the west.", 1024, 1024);
+
+ // Assert
+ Assert.NotNull(result);
+ Assert.NotEmpty(result);
+ }
+}
diff --git a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs
index ebfcccd31472..049287fbbc14 100644
--- a/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/OpenAI/OpenAIToolsTests.cs
@@ -460,6 +460,253 @@ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFu
Assert.NotNull(getWeatherForCityFunctionCallResult.Result);
}
+ [Fact]
+ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForManualFunctionCallingForStreamingAsync()
+ {
+ // Arrange
+ var kernel = this.InitializeKernel(importHelperPlugin: true);
+
+ var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
+
+ var sut = kernel.GetRequiredService();
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?");
+
+ string? result = null;
+
+ // Act
+ while (true)
+ {
+ AuthorRole? authorRole = null;
+ var fccBuilder = new FunctionCallContentBuilder();
+ var textContent = new StringBuilder();
+
+ await foreach (var streamingContent in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel))
+ {
+ textContent.Append(streamingContent.Content);
+ authorRole ??= streamingContent.Role;
+ fccBuilder.Append(streamingContent);
+ }
+
+ var functionCalls = fccBuilder.Build();
+ if (functionCalls.Any())
+ {
+ var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null);
+ chatHistory.Add(fcContent);
+
+ // Iterating over the requested function calls and invoking them
+ foreach (var functionCall in functionCalls)
+ {
+ fcContent.Items.Add(functionCall);
+
+ var functionResult = await functionCall.InvokeAsync(kernel);
+
+ chatHistory.Add(functionResult.ToChatMessage());
+ }
+
+ continue;
+ }
+
+ result = textContent.ToString();
+ break;
+ }
+
+ // Assert
+ Assert.Contains("rain", result, StringComparison.InvariantCultureIgnoreCase);
+ }
+
+ [Fact]
+ public async Task ConnectorAgnosticFunctionCallingModelClassesCanBeUsedForAutoFunctionCallingForStreamingAsync()
+ {
+ // Arrange
+ var kernel = this.InitializeKernel(importHelperPlugin: true);
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?");
+
+ var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.AutoInvokeKernelFunctions };
+
+ var sut = kernel.GetRequiredService();
+
+ var result = new StringBuilder();
+
+ // Act
+ await foreach (var contentUpdate in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel))
+ {
+ result.Append(contentUpdate.Content);
+ }
+
+ // Assert
+ Assert.Equal(5, chatHistory.Count);
+
+ var userMessage = chatHistory[0];
+ Assert.Equal(AuthorRole.User, userMessage.Role);
+
+ // LLM requested the current time.
+ var getCurrentTimeFunctionCallRequestMessage = chatHistory[1];
+ Assert.Equal(AuthorRole.Assistant, getCurrentTimeFunctionCallRequestMessage.Role);
+
+ var getCurrentTimeFunctionCallRequest = getCurrentTimeFunctionCallRequestMessage.Items.OfType().Single();
+ Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallRequest.FunctionName);
+ Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallRequest.PluginName);
+ Assert.NotNull(getCurrentTimeFunctionCallRequest.Id);
+
+ // Connector invoked the GetCurrentUtcTime function and added result to chat history.
+ var getCurrentTimeFunctionCallResultMessage = chatHistory[2];
+ Assert.Equal(AuthorRole.Tool, getCurrentTimeFunctionCallResultMessage.Role);
+ Assert.Single(getCurrentTimeFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call.
+
+ var getCurrentTimeFunctionCallResult = getCurrentTimeFunctionCallResultMessage.Items.OfType().Single();
+ Assert.Equal("GetCurrentUtcTime", getCurrentTimeFunctionCallResult.FunctionName);
+ Assert.Equal("HelperFunctions", getCurrentTimeFunctionCallResult.PluginName);
+ Assert.Equal(getCurrentTimeFunctionCallRequest.Id, getCurrentTimeFunctionCallResult.CallId);
+ Assert.NotNull(getCurrentTimeFunctionCallResult.Result);
+
+ // LLM requested the weather for Boston.
+ var getWeatherForCityFunctionCallRequestMessage = chatHistory[3];
+ Assert.Equal(AuthorRole.Assistant, getWeatherForCityFunctionCallRequestMessage.Role);
+
+ var getWeatherForCityFunctionCallRequest = getWeatherForCityFunctionCallRequestMessage.Items.OfType().Single();
+ Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallRequest.FunctionName);
+ Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallRequest.PluginName);
+ Assert.NotNull(getWeatherForCityFunctionCallRequest.Id);
+
+ // Connector invoked the Get_Weather_For_City function and added result to chat history.
+ var getWeatherForCityFunctionCallResultMessage = chatHistory[4];
+ Assert.Equal(AuthorRole.Tool, getWeatherForCityFunctionCallResultMessage.Role);
+ Assert.Single(getWeatherForCityFunctionCallResultMessage.Items.OfType()); // Current function calling model adds TextContent item representing the result of the function call.
+
+ var getWeatherForCityFunctionCallResult = getWeatherForCityFunctionCallResultMessage.Items.OfType().Single();
+ Assert.Equal("Get_Weather_For_City", getWeatherForCityFunctionCallResult.FunctionName);
+ Assert.Equal("HelperFunctions", getWeatherForCityFunctionCallResult.PluginName);
+ Assert.Equal(getWeatherForCityFunctionCallRequest.Id, getWeatherForCityFunctionCallResult.CallId);
+ Assert.NotNull(getWeatherForCityFunctionCallResult.Result);
+ }
+
+ [Fact(Skip = "The test is temporarily disabled until a more stable solution is found.")]
+ public async Task ConnectorAgnosticFunctionCallingModelClassesCanPassFunctionExceptionToConnectorForStreamingAsync()
+ {
+ // Arrange
+ var kernel = this.InitializeKernel(importHelperPlugin: true);
+
+ var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
+
+ var sut = kernel.GetRequiredService();
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddSystemMessage("If you are unable to answer the question for whatever reason, please add the 'error' keyword to the response.");
+ chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?");
+
+ string? result = null;
+
+ // Act
+ while (true)
+ {
+ AuthorRole? authorRole = null;
+ var fccBuilder = new FunctionCallContentBuilder();
+ var textContent = new StringBuilder();
+
+ await foreach (var streamingContent in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel))
+ {
+ textContent.Append(streamingContent.Content);
+ authorRole ??= streamingContent.Role;
+ fccBuilder.Append(streamingContent);
+ }
+
+ var functionCalls = fccBuilder.Build();
+ if (functionCalls.Any())
+ {
+ var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null);
+ chatHistory.Add(fcContent);
+
+ // Iterating over the requested function calls and invoking them
+ foreach (var functionCall in functionCalls)
+ {
+ fcContent.Items.Add(functionCall);
+
+ // Simulating an exception
+ var exception = new OperationCanceledException("The operation was canceled due to timeout.");
+
+ chatHistory.Add(new FunctionResultContent(functionCall, exception).ToChatMessage());
+ }
+
+ continue;
+ }
+
+ result = textContent.ToString();
+ break;
+ }
+
+ // Assert
+ Assert.Contains("error", result, StringComparison.InvariantCultureIgnoreCase);
+ }
+
+ [Fact]
+ public async Task ConnectorAgnosticFunctionCallingModelClassesSupportSimulatedFunctionCallsForStreamingAsync()
+ {
+ // Arrange
+ var kernel = this.InitializeKernel(importHelperPlugin: true);
+
+ var settings = new OpenAIPromptExecutionSettings() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
+
+ var sut = kernel.GetRequiredService();
+
+ var chatHistory = new ChatHistory();
+ chatHistory.AddSystemMessage("if there's a tornado warning, please add the 'tornado' keyword to the response.");
+ chatHistory.AddUserMessage("Given the current time of day and weather, what is the likely color of the sky in Boston?");
+
+ string? result = null;
+
+ // Act
+ while (true)
+ {
+ AuthorRole? authorRole = null;
+ var fccBuilder = new FunctionCallContentBuilder();
+ var textContent = new StringBuilder();
+
+ await foreach (var streamingContent in sut.GetStreamingChatMessageContentsAsync(chatHistory, settings, kernel))
+ {
+ textContent.Append(streamingContent.Content);
+ authorRole ??= streamingContent.Role;
+ fccBuilder.Append(streamingContent);
+ }
+
+ var functionCalls = fccBuilder.Build();
+ if (functionCalls.Any())
+ {
+ var fcContent = new ChatMessageContent(role: authorRole ?? default, content: null);
+ chatHistory.Add(fcContent);
+
+ // Iterating over the requested function calls and invoking them
+ foreach (var functionCall in functionCalls)
+ {
+ fcContent.Items.Add(functionCall);
+
+ var functionResult = await functionCall.InvokeAsync(kernel);
+
+ chatHistory.Add(functionResult.ToChatMessage());
+ }
+
+ // Adding a simulated function call to the connector response message
+ var simulatedFunctionCall = new FunctionCallContent("weather-alert", id: "call_123");
+ fcContent.Items.Add(simulatedFunctionCall);
+
+ // Adding a simulated function result to chat history
+ var simulatedFunctionResult = "A Tornado Watch has been issued, with potential for severe thunderstorms causing unusual sky colors like green, yellow, or dark gray. Stay informed and follow safety instructions from authorities.";
+ chatHistory.Add(new FunctionResultContent(simulatedFunctionCall, simulatedFunctionResult).ToChatMessage());
+
+ continue;
+ }
+
+ result = textContent.ToString();
+ break;
+ }
+
+ // Assert
+ Assert.Contains("tornado", result, StringComparison.InvariantCultureIgnoreCase);
+ }
+
private Kernel InitializeKernel(bool importHelperPlugin = false)
{
OpenAIConfiguration? openAIConfiguration = this._configuration.GetSection("Planners:OpenAI").Get();
diff --git a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs
index cae56a022f7b..87fb3e1c888d 100644
--- a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs
+++ b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithComplexObjectsTest.cs
@@ -1,9 +1,9 @@
// Copyright (c) Microsoft. All rights reserved.
-using Microsoft.SemanticKernel;
using System.IO;
using System.Text.Json.Nodes;
using System.Threading.Tasks;
+using Microsoft.SemanticKernel;
using Xunit;
namespace SemanticKernel.IntegrationTests.CrossLanguage;
diff --git a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs
index 9fad909d790a..12d7166e0bb5 100644
--- a/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs
+++ b/dotnet/src/IntegrationTests/CrossLanguage/PromptWithHelperFunctionsTest.cs
@@ -1,10 +1,10 @@
// Copyright (c) Microsoft. All rights reserved.
-using Microsoft.SemanticKernel;
using System;
using System.IO;
using System.Text.Json.Nodes;
using System.Threading.Tasks;
+using Microsoft.SemanticKernel;
using Xunit;
namespace SemanticKernel.IntegrationTests.CrossLanguage;
diff --git a/dotnet/src/IntegrationTests/Plugins/RepairServiceTests.cs b/dotnet/src/IntegrationTests/Plugins/RepairServiceTests.cs
index f5da4448ef02..9d8610806d8c 100644
--- a/dotnet/src/IntegrationTests/Plugins/RepairServiceTests.cs
+++ b/dotnet/src/IntegrationTests/Plugins/RepairServiceTests.cs
@@ -2,6 +2,7 @@
using System.Net.Http;
using System.Text.Json;
using System.Text.Json.Serialization;
+using System.Threading;
using System.Threading.Tasks;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Plugins.OpenApi;
@@ -106,6 +107,59 @@ public async Task HttpOperationExceptionIncludeRequestInfoAsync()
}
}
+ [Fact(Skip = "This test is for manual verification.")]
+ public async Task UseDelegatingHandlerAsync()
+ {
+ // Arrange
+ var kernel = new Kernel();
+ using var stream = System.IO.File.OpenRead("Plugins/repair-service.json");
+
+ using var httpHandler = new HttpClientHandler();
+ using var customHandler = new CustomHandler(httpHandler);
+ using HttpClient httpClient = new(customHandler);
+
+ var plugin = await kernel.ImportPluginFromOpenApiAsync(
+ "RepairService",
+ stream,
+ new OpenAIFunctionExecutionParameters(httpClient) { IgnoreNonCompliantErrors = true, EnableDynamicPayload = false });
+
+ // List All Repairs
+ var result = await plugin["listRepairs"].InvokeAsync(kernel);
+
+ Assert.NotNull(result);
+ var repairs = JsonSerializer.Deserialize(result.ToString());
+ Assert.True(repairs?.Length > 0);
+ var count = repairs?.Length ?? 0;
+
+ // Create Repair - oil change
+ var arguments = new KernelArguments
+ {
+ ["payload"] = """{ "title": "Engine oil change", "description": "Need to drain the old engine oil and replace it with fresh oil.", "assignedTo": "", "date": "", "image": "" }"""
+ };
+ result = await plugin["createRepair"].InvokeAsync(kernel, arguments);
+
+ Assert.NotNull(result);
+ Assert.Equal("New repair created", result.ToString());
+
+ // Create Repair - brake pads change
+ arguments = new KernelArguments
+ {
+ ["payload"] = """{ "title": "Brake pads change", "description": "Need to replace the brake pads on all wheels.", "assignedTo": "", "date": "", "image": "" }"""
+ };
+ result = await plugin["createRepair"].InvokeAsync(kernel, arguments);
+
+ Assert.NotNull(result);
+ Assert.Equal("New repair created", result.ToString());
+
+ // List All Repairs
+ result = await plugin["listRepairs"].InvokeAsync(kernel);
+
+ Assert.NotNull(result);
+ repairs = JsonSerializer.Deserialize(result.ToString());
+ Assert.True(repairs?.Length > 0);
+ Assert.Equal(count + 2, repairs?.Length);
+ }
+
public class Repair
{
[JsonPropertyName("id")]
@@ -126,4 +180,22 @@ public class Repair
[JsonPropertyName("image")]
public string? Image { get; set; }
}
+
+ private sealed class CustomHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler)
+ {
+ protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
+ {
+#if NET5_0_OR_GREATER
+ request.Options.TryGetValue(OpenApiKernelFunctionContext.KernelFunctionContextKey, out var context);
+#else
+ request.Properties.TryGetValue(OpenApiKernelFunctionContext.KernelFunctionContextKey, out var context);
+#endif
+
+ // Modify the HttpRequestMessage
+ request.Headers.Add("Kernel-Function-Name", context?.Function?.Name);
+
+ // Call the next handler in the pipeline
+ return await base.SendAsync(request, cancellationToken);
+ }
+ }
}
diff --git a/dotnet/src/IntegrationTests/README.md b/dotnet/src/IntegrationTests/README.md
index 4a16b6018543..bc2234acda64 100644
--- a/dotnet/src/IntegrationTests/README.md
+++ b/dotnet/src/IntegrationTests/README.md
@@ -3,7 +3,7 @@
## Requirements
1. **Azure OpenAI**: go to the [Azure OpenAI Quickstart](https://learn.microsoft.com/en-us/azure/cognitive-services/openai/quickstart)
- and deploy an instance of Azure OpenAI, deploy a model like "text-davinci-003" find your Endpoint and API key.
+ and deploy an instance of Azure OpenAI, deploy a model like "gpt-35-turbo-instruct" find your Endpoint and API key.
2. **OpenAI**: go to [OpenAI](https://platform.openai.com) to register and procure your API key.
3. **HuggingFace API key**: see https://huggingface.co/docs/huggingface_hub/guides/inference for details.
4. **Azure Bing Web Search API**: go to [Bing Web Search API](https://www.microsoft.com/en-us/bing/apis/bing-web-search-api)
@@ -29,13 +29,18 @@ To set your secrets with Secret Manager:
cd dotnet/src/IntegrationTests
dotnet user-secrets init
-dotnet user-secrets set "OpenAI:ServiceId" "text-davinci-003"
-dotnet user-secrets set "OpenAI:ModelId" "text-davinci-003"
+
+dotnet user-secrets set "OpenAI:ServiceId" "gpt-3.5-turbo-instruct"
+dotnet user-secrets set "OpenAI:ModelId" "gpt-3.5-turbo-instruct"
dotnet user-secrets set "OpenAI:ChatModelId" "gpt-4"
dotnet user-secrets set "OpenAI:ApiKey" "..."
-dotnet user-secrets set "AzureOpenAI:ServiceId" "azure-text-davinci-003"
-dotnet user-secrets set "AzureOpenAI:DeploymentName" "text-davinci-003"
+dotnet user-secrets set "OpenAITextToImage:ServiceId" "dall-e-3"
+dotnet user-secrets set "OpenAITextToImage:ModelId" "dall-e-3"
+dotnet user-secrets set "OpenAITextToImage:ApiKey" "..."
+
+dotnet user-secrets set "AzureOpenAI:ServiceId" "azure-gpt-35-turbo-instruct"
+dotnet user-secrets set "AzureOpenAI:DeploymentName" "gpt-35-turbo-instruct"
dotnet user-secrets set "AzureOpenAI:ChatDeploymentName" "gpt-4"
dotnet user-secrets set "AzureOpenAI:Endpoint" "https://contoso.openai.azure.com/"
dotnet user-secrets set "AzureOpenAI:ApiKey" "..."
@@ -45,14 +50,21 @@ dotnet user-secrets set "AzureOpenAIEmbeddings:DeploymentName" "text-embedding-a
dotnet user-secrets set "AzureOpenAIEmbeddings:Endpoint" "https://contoso.openai.azure.com/"
dotnet user-secrets set "AzureOpenAIEmbeddings:ApiKey" "..."
+dotnet user-secrets set "AzureOpenAIAudioToText:ServiceId" "azure-audio-to-text"
dotnet user-secrets set "AzureOpenAIAudioToText:DeploymentName" "whisper-1"
dotnet user-secrets set "AzureOpenAIAudioToText:Endpoint" "https://contoso.openai.azure.com/"
dotnet user-secrets set "AzureOpenAIAudioToText:ApiKey" "..."
+dotnet user-secrets set "AzureOpenAITextToAudio:ServiceId" "azure-text-to-audio"
dotnet user-secrets set "AzureOpenAITextToAudio:DeploymentName" "tts-1"
dotnet user-secrets set "AzureOpenAITextToAudio:Endpoint" "https://contoso.openai.azure.com/"
dotnet user-secrets set "AzureOpenAITextToAudio:ApiKey" "..."
+dotnet user-secrets set "AzureOpenAITextToImage:ServiceId" "azure-text-to-image"
+dotnet user-secrets set "AzureOpenAITextToImage:DeploymentName" "dall-e-3"
+dotnet user-secrets set "AzureOpenAITextToImage:Endpoint" "https://contoso.openai.azure.com/"
+dotnet user-secrets set "AzureOpenAITextToImage:ApiKey" "..."
+
dotnet user-secrets set "MistralAI:ChatModel" "mistral-large-latest"
dotnet user-secrets set "MistralAI:EmbeddingModel" "mistral-embed"
dotnet user-secrets set "MistralAI:ApiKey" "..."
@@ -82,14 +94,14 @@ For example:
```json
{
"OpenAI": {
- "ServiceId": "text-davinci-003",
- "ModelId": "text-davinci-003",
+ "ServiceId": "gpt-3.5-turbo-instruct",
+ "ModelId": "gpt-3.5-turbo-instruct",
"ChatModelId": "gpt-4",
"ApiKey": "sk-...."
},
"AzureOpenAI": {
- "ServiceId": "azure-text-davinci-003",
- "DeploymentName": "text-davinci-003",
+ "ServiceId": "azure-gpt-35-turbo-instruct",
+ "DeploymentName": "gpt-35-turbo-instruct",
"ChatDeploymentName": "gpt-4",
"Endpoint": "https://contoso.openai.azure.com/",
"ApiKey": "...."
@@ -127,7 +139,7 @@ When setting environment variables, use a double underscore (i.e. "\_\_") to del
```bash
export OpenAI__ApiKey="sk-...."
export AzureOpenAI__ApiKey="...."
- export AzureOpenAI__DeploymentName="azure-text-davinci-003"
+ export AzureOpenAI__DeploymentName="gpt-35-turbo-instruct"
export AzureOpenAI__ChatDeploymentName="gpt-4"
export AzureOpenAIEmbeddings__DeploymentName="azure-text-embedding-ada-002"
export AzureOpenAI__Endpoint="https://contoso.openai.azure.com/"
@@ -141,7 +153,7 @@ When setting environment variables, use a double underscore (i.e. "\_\_") to del
```ps
$env:OpenAI__ApiKey = "sk-...."
$env:AzureOpenAI__ApiKey = "...."
- $env:AzureOpenAI__DeploymentName = "azure-text-davinci-003"
+ $env:AzureOpenAI__DeploymentName = "gpt-35-turbo-instruct"
$env:AzureOpenAI__ChatDeploymentName = "gpt-4"
$env:AzureOpenAIEmbeddings__DeploymentName = "azure-text-embedding-ada-002"
$env:AzureOpenAI__Endpoint = "https://contoso.openai.azure.com/"
diff --git a/dotnet/src/IntegrationTests/TestData/test_content.txt b/dotnet/src/IntegrationTests/TestData/test_content.txt
new file mode 100644
index 000000000000..447ce0649e56
--- /dev/null
+++ b/dotnet/src/IntegrationTests/TestData/test_content.txt
@@ -0,0 +1,9 @@
+Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Amet dictum sit amet justo donec enim diam vulputate ut. Nibh ipsum consequat nisl vel pretium lectus. Urna nec tincidunt praesent semper feugiat. Tristique nulla aliquet enim tortor. Ut morbi tincidunt augue interdum velit euismod in pellentesque massa. Ullamcorper morbi tincidunt ornare massa eget egestas purus viverra. Commodo ullamcorper a lacus vestibulum sed arcu non. Volutpat ac tincidunt vitae semper quis lectus nulla. Sem nulla pharetra diam sit amet nisl. Viverra aliquet eget sit amet tellus cras adipiscing enim eu.
+
+Morbi blandit cursus risus at ultrices mi tempus. Sagittis orci a scelerisque purus. Iaculis nunc sed augue lacus viverra. Accumsan sit amet nulla facilisi morbi tempus iaculis. Nisl rhoncus mattis rhoncus urna neque. Commodo odio aenean sed adipiscing diam donec adipiscing tristique. Tristique senectus et netus et malesuada fames. Nascetur ridiculus mus mauris vitae ultricies leo integer. Ut sem viverra aliquet eget. Sed egestas egestas fringilla phasellus faucibus scelerisque.
+
+In tellus integer feugiat scelerisque varius morbi. Vitae proin sagittis nisl rhoncus mattis rhoncus urna neque. Cum sociis natoque penatibus et magnis dis. Iaculis at erat pellentesque adipiscing commodo elit at imperdiet dui. Praesent semper feugiat nibh sed pulvinar proin gravida hendrerit lectus. Consectetur a erat nam at lectus urna. Hac habitasse platea dictumst vestibulum rhoncus est pellentesque elit. Aliquam vestibulum morbi blandit cursus risus at ultrices. Eu non diam phasellus vestibulum lorem sed. Risus pretium quam vulputate dignissim suspendisse in est. Elit scelerisque mauris pellentesque pulvinar pellentesque habitant morbi. At varius vel pharetra vel turpis nunc eget. Aliquam malesuada bibendum arcu vitae. At consectetur lorem donec massa. Mi sit amet mauris commodo. Maecenas volutpat blandit aliquam etiam erat velit. Nullam ac tortor vitae purus faucibus ornare suspendisse.
+
+Facilisi nullam vehicula ipsum a arcu cursus vitae. Commodo sed egestas egestas fringilla phasellus. Lacus luctus accumsan tortor posuere ac ut consequat. Adipiscing commodo elit at imperdiet dui accumsan sit. Non tellus orci ac auctor augue. Viverra aliquet eget sit amet tellus. Luctus venenatis lectus magna fringilla urna porttitor rhoncus dolor. Mattis enim ut tellus elementum. Nunc sed id semper risus. At augue eget arcu dictum.
+
+Ullamcorper a lacus vestibulum sed arcu non. Vitae tortor condimentum lacinia quis vel. Dui faucibus in ornare quam viverra. Vel pharetra vel turpis nunc eget. In egestas erat imperdiet sed euismod nisi porta lorem mollis. Lacus vestibulum sed arcu non odio euismod lacinia at quis. Augue mauris augue neque gravida in. Ornare quam viverra orci sagittis. Lacus suspendisse faucibus interdum posuere lorem ipsum. Arcu vitae elementum curabitur vitae nunc sed velit dignissim. Diam quam nulla porttitor massa id neque. Gravida dictum fusce ut placerat orci nulla pellentesque. Mus mauris vitae ultricies leo integer malesuada nunc vel risus. Donec pretium vulputate sapien nec sagittis aliquam. Velit egestas dui id ornare. Sed elementum tempus egestas sed sed risus pretium quam vulputate.
\ No newline at end of file
diff --git a/dotnet/src/IntegrationTests/prompts/GenerateStory.yaml b/dotnet/src/IntegrationTests/prompts/GenerateStory.yaml
index fc5ecd88f34e..d3612b594d59 100644
--- a/dotnet/src/IntegrationTests/prompts/GenerateStory.yaml
+++ b/dotnet/src/IntegrationTests/prompts/GenerateStory.yaml
@@ -1,6 +1,6 @@
name: GenerateStory
template: |
- Tell a story about {{$topic}} that is {{$length}} sentences long.
+ Tell a story about {{$topic}} that is {{$length}} sentences long. Include {{$topic}} words in response.
template_format: semantic-kernel
description: A function that generates a story about a topic.
input_variables:
diff --git a/dotnet/src/IntegrationTests/prompts/GenerateStoryHandlebars.yaml b/dotnet/src/IntegrationTests/prompts/GenerateStoryHandlebars.yaml
index b1cb891fb706..891a33f30747 100644
--- a/dotnet/src/IntegrationTests/prompts/GenerateStoryHandlebars.yaml
+++ b/dotnet/src/IntegrationTests/prompts/GenerateStoryHandlebars.yaml
@@ -1,6 +1,6 @@
name: GenerateStory
template: |
- Tell a story about {{topic}} that is {{length}} sentences long.
+ Tell a story about {{topic}} that is {{length}} sentences long. Include {{topic}} words in response.
template_format: handlebars
description: A function that generates a story about a topic.
input_variables:
diff --git a/dotnet/src/IntegrationTests/testsettings.json b/dotnet/src/IntegrationTests/testsettings.json
index 353b97a32ec7..39ec5c4d3b1c 100644
--- a/dotnet/src/IntegrationTests/testsettings.json
+++ b/dotnet/src/IntegrationTests/testsettings.json
@@ -1,12 +1,12 @@
{
"OpenAI": {
- "ServiceId": "text-davinci-003",
- "ModelId": "text-davinci-003",
+ "ServiceId": "gpt-3.5-turbo-instruct",
+ "ModelId": "gpt-3.5-turbo-instruct",
"ApiKey": ""
},
"AzureOpenAI": {
- "ServiceId": "azure-text-davinci-003",
- "DeploymentName": "text-davinci-003",
+ "ServiceId": "azure-gpt-35-turbo-instruct",
+ "DeploymentName": "gpt-35-turbo-instruct",
"ChatDeploymentName": "gpt-4",
"Endpoint": "",
"ApiKey": ""
diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs
index dadf49c15d27..8e65d7dcd88a 100644
--- a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs
+++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs
@@ -89,17 +89,16 @@ public void WriteLine(string? format, params object?[] args)
///
/// This method can be substituted by Console.WriteLine when used in Console apps.
///
+ /// The message
public void WriteLine(string? message)
- => this.Output.WriteLine(message);
+ => this.Output.WriteLine(message ?? string.Empty);
///
/// Current interface ITestOutputHelper does not have a Write method. This extension method adds it to make it analogous to Console.Write when used in Console apps.
///
/// Target object to write
public void Write(object? target = null)
- {
- this.Output.WriteLine(target ?? string.Empty);
- }
+ => this.Output.WriteLine(target ?? string.Empty);
protected sealed class LoggingHandler(HttpMessageHandler innerHandler, ITestOutputHelper output) : DelegatingHandler(innerHandler)
{
diff --git a/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs b/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs
index 3b53a9e5bda2..3425d187e4fd 100644
--- a/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs
+++ b/dotnet/src/InternalUtilities/src/Diagnostics/ModelDiagnostics.cs
@@ -325,13 +325,13 @@ private static void SetCompletionResponse(
int? promptTokens,
int? completionTokens)
{
- if (!IsModelDiagnosticsEnabled())
+ if (!IsModelDiagnosticsEnabled() || choices.Count == 0)
{
return;
}
// Assuming all metadata is in the last chunk of the choice
- switch (choices.FirstOrDefault().Value.FirstOrDefault())
+ switch (choices.FirstOrDefault().Value?.FirstOrDefault())
{
case StreamingTextContent:
var textCompletions = choices.Select(choiceContents =>
diff --git a/dotnet/src/Plugins/Plugins.Core/CodeInterpreter/SessionsPythonPlugin.cs b/dotnet/src/Plugins/Plugins.Core/CodeInterpreter/SessionsPythonPlugin.cs
index e61b5ec2c5b4..6c92763f3fe4 100644
--- a/dotnet/src/Plugins/Plugins.Core/CodeInterpreter/SessionsPythonPlugin.cs
+++ b/dotnet/src/Plugins/Plugins.Core/CodeInterpreter/SessionsPythonPlugin.cs
@@ -21,7 +21,7 @@ namespace Microsoft.SemanticKernel.Plugins.Core.CodeInterpreter;
public partial class SessionsPythonPlugin
{
private static readonly string s_assemblyVersion = typeof(Kernel).Assembly.GetName().Version!.ToString();
-
+ private const string ApiVersion = "2024-02-02-preview";
private readonly Uri _poolManagementEndpoint;
private readonly SessionsPythonSettings _settings;
private readonly Func>? _authTokenProvider;
@@ -97,7 +97,7 @@ public async Task ExecuteCodeAsync([Description("The valid Python code t
await this.AddHeadersAsync(httpClient).ConfigureAwait(false);
- using var request = new HttpRequestMessage(HttpMethod.Post, this._poolManagementEndpoint + "python/execute")
+ using var request = new HttpRequestMessage(HttpMethod.Post, this._poolManagementEndpoint + $"python/execute?api-version={ApiVersion}")
{
Content = new StringContent(JsonSerializer.Serialize(requestBody), Encoding.UTF8, "application/json")
};
@@ -155,7 +155,7 @@ public async Task UploadFileAsync(
await this.AddHeadersAsync(httpClient).ConfigureAwait(false);
using var fileContent = new ByteArrayContent(File.ReadAllBytes(localFilePath));
- using var request = new HttpRequestMessage(HttpMethod.Post, $"{this._poolManagementEndpoint}python/uploadFile?identifier={this._settings.SessionId}")
+ using var request = new HttpRequestMessage(HttpMethod.Post, $"{this._poolManagementEndpoint}python/uploadFile?identifier={this._settings.SessionId}&api-version={ApiVersion}")
{
Content = new MultipartFormDataContent
{
@@ -194,7 +194,7 @@ public async Task DownloadFileAsync(
using var httpClient = this._httpClientFactory.CreateClient();
await this.AddHeadersAsync(httpClient).ConfigureAwait(false);
- var response = await httpClient.GetAsync(new Uri($"{this._poolManagementEndpoint}python/downloadFile?identifier={this._settings.SessionId}&filename={remoteFilePath}")).ConfigureAwait(false);
+ var response = await httpClient.GetAsync(new Uri($"{this._poolManagementEndpoint}python/downloadFile?identifier={this._settings.SessionId}&filename={remoteFilePath}&api-version={ApiVersion}")).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
var errorBody = await response.Content.ReadAsStringAsync().ConfigureAwait(false);
@@ -230,7 +230,7 @@ public async Task> ListFilesAsync()
using var httpClient = this._httpClientFactory.CreateClient();
await this.AddHeadersAsync(httpClient).ConfigureAwait(false);
- var response = await httpClient.GetAsync(new Uri($"{this._poolManagementEndpoint}python/files?identifier={this._settings.SessionId}")).ConfigureAwait(false);
+ var response = await httpClient.GetAsync(new Uri($"{this._poolManagementEndpoint}python/files?identifier={this._settings.SessionId}&api-version={ApiVersion}")).ConfigureAwait(false);
if (!response.IsSuccessStatusCode)
{
diff --git a/dotnet/src/Plugins/Plugins.Core/PromptFunctionConstants.cs b/dotnet/src/Plugins/Plugins.Core/PromptFunctionConstants.cs
index 34b90cc9bb90..03bbd9783299 100644
--- a/dotnet/src/Plugins/Plugins.Core/PromptFunctionConstants.cs
+++ b/dotnet/src/Plugins/Plugins.Core/PromptFunctionConstants.cs
@@ -10,7 +10,7 @@ internal static class PromptFunctionConstants
END CONTENT TO SUMMARIZE.
-Summarize the conversation in 'CONTENT TO SUMMARIZE', identifying main points of discussion and any conclusions that were reached.
+Summarize the conversation in 'CONTENT TO SUMMARIZE', identifying main points of discussion and any conclusions that were reached, in the language that best fits the content.
Do not incorporate other general knowledge.
Summary is in plain text, in complete sentences, with no markup or tags.
@@ -19,10 +19,14 @@ Do not incorporate other general knowledge.
internal const string GetConversationActionItemsDefinition =
"""
- You are an action item extractor. You will be given chat history and need to make note of action items mentioned in the chat.
+ You are an action item extractor. You will be given chat history or content and need to make note of action items mentioned.
Extract action items from the content if there are any. If there are no action, return nothing. If a single field is missing, use an empty string.
Return the action items in json.
+ Guidelines:
+ Action items are specific tasks or requests that someone needs to complete.
+ Routine statements or general comments about habits or preferences should not be considered action items.
+
Possible statuses for action items are: Open, Closed, In Progress.
EXAMPLE INPUT WITH ACTION ITEMS:
@@ -43,6 +47,23 @@ Return the action items in json.
]
}
+ EXAMPLE INPUT WITH IMPLIED ACTION ITEMS:
+
+ I need a list of vegan breakfast recipes. Can you get that for me?
+
+ EXAMPLE OUTPUT:
+ {
+ "actionItems": [
+ {
+ "owner": "",
+ "actionItem": "Give a list of breakfast recipes that are vegan friendly",
+ "dueDate": "",
+ "status": "Open",
+ "notes": ""
+ }
+ ]
+ }
+
EXAMPLE INPUT WITHOUT ACTION ITEMS:
John Doe said: "Hey I'm going to the store, do you need anything?"
diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs
index 7c572509056c..05f473b1b792 100644
--- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/AuthorRole.cs
@@ -32,7 +32,7 @@ namespace Microsoft.SemanticKernel.ChatCompletion;
public static AuthorRole Tool { get; } = new("tool");
///
- /// Gets the label associated with this AuthorRole.
+ /// Gets the label associated with this .
///
///
/// The label is what will be serialized into the "role" message field of the Chat Message format.
@@ -40,9 +40,9 @@ namespace Microsoft.SemanticKernel.ChatCompletion;
public string Label { get; }
///
- /// Creates a new AuthorRole instance with the provided label.
+ /// Creates a new instance with the provided label.
///
- /// The label to associate with this AuthorRole.
+ /// The label to associate with this .
[JsonConstructor]
public AuthorRole(string label)
{
@@ -51,21 +51,21 @@ public AuthorRole(string label)
}
///
- /// Returns a value indicating whether two AuthorRole instances are equivalent, as determined by a
+ /// Returns a value indicating whether two instances are equivalent, as determined by a
/// case-insensitive comparison of their labels.
///
- /// the first AuthorRole instance to compare
- /// the second AuthorRole instance to compare
+ /// the first instance to compare
+ /// the second instance to compare
/// true if left and right are both null or have equivalent labels; false otherwise
public static bool operator ==(AuthorRole left, AuthorRole right)
=> left.Equals(right);
///
- /// Returns a value indicating whether two AuthorRole instances are not equivalent, as determined by a
+ /// Returns a value indicating whether two instances are not equivalent, as determined by a
/// case-insensitive comparison of their labels.
///
- /// the first AuthorRole instance to compare
- /// the second AuthorRole instance to compare
+ /// the first instance to compare
+ /// the second instance to compare
/// false if left and right are both null or have equivalent labels; true otherwise
public static bool operator !=(AuthorRole left, AuthorRole right)
=> !(left == right);
@@ -80,8 +80,8 @@ public bool Equals(AuthorRole other)
///
public override int GetHashCode()
- => StringComparer.OrdinalIgnoreCase.GetHashCode(this.Label ?? string.Empty);
+ => StringComparer.OrdinalIgnoreCase.GetHashCode(this.Label);
///
- public override string ToString() => this.Label ?? string.Empty;
+ public override string ToString() => this.Label;
}
diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs
index 102faca62de8..a452d979c4f5 100644
--- a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/ChatCompletionServiceExtensions.cs
@@ -53,7 +53,7 @@ public static Task> GetChatMessageContentsAsyn
/// The AI execution settings (optional).
/// The containing services, plugins, and other state for use throughout the operation.
/// The to monitor for cancellation requests. The default is .
- /// List of different chat results generated by the remote model
+ /// Single chat message content generated by the remote model.
public static async Task GetChatMessageContentAsync(
this IChatCompletionService chatCompletionService,
string prompt,
diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/StreamingKernelContentItemCollection.cs b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/StreamingKernelContentItemCollection.cs
new file mode 100644
index 000000000000..d3dbac4f919d
--- /dev/null
+++ b/dotnet/src/SemanticKernel.Abstractions/AI/ChatCompletion/StreamingKernelContentItemCollection.cs
@@ -0,0 +1,142 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+
+namespace Microsoft.SemanticKernel.ChatCompletion;
+
+#pragma warning disable CA1033 // Interface methods should be callable by child types
+
+///
+/// Contains collection of streaming kernel content items of type .
+///
+[Experimental("SKEXP0001")]
+public sealed class StreamingKernelContentItemCollection : IList, IReadOnlyList
+{
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ public StreamingKernelContentItemCollection()
+ {
+ this._items = [];
+ }
+
+ ///
+ /// Gets or sets the content item at the specified index in the collection.
+ ///
+ /// The index of the content item to get or set.
+ /// The content item at the specified index.
+ /// is null.
+ /// The was not valid for this collection.
+ public StreamingKernelContent this[int index]
+ {
+ get => this._items[index];
+ set
+ {
+ Verify.NotNull(value);
+ this._items[index] = value;
+ }
+ }
+
+ ///
+ /// Gets the number of content items in the collection.
+ ///
+ public int Count => this._items.Count;
+
+ ///
+ /// Adds a content item to the collection.
+ ///
+ /// The content item to add.
+ /// is null.
+ public void Add(StreamingKernelContent item)
+ {
+ Verify.NotNull(item);
+ this._items.Add(item);
+ }
+
+ ///
+ /// Removes all content items from the collection.
+ ///
+ public void Clear() => this._items.Clear();
+
+ ///
+ /// Determines whether a content item is in the collection.
+ ///
+ /// The content item to locate.
+ /// True if the content item is found in the collection; otherwise, false.
+ /// is null.
+ public bool Contains(StreamingKernelContent item)
+ {
+ Verify.NotNull(item);
+ return this._items.Contains(item);
+ }
+
+ ///
+ /// Copies all of the content items in the collection to an array, starting at the specified destination array index.
+ ///
+ /// The destination array into which the content items should be copied.
+ /// The zero-based index into at which copying should begin.
+ /// is null.
+ /// The number of content items in the collection is greater than the available space from to the end of .
+ /// is less than 0.
+ public void CopyTo(StreamingKernelContent[] array, int arrayIndex) => this._items.CopyTo(array, arrayIndex);
+
+ ///
+ /// Searches for the specified content item and returns the index of the first occurrence.
+ ///
+ /// The content item to locate.
+ /// The index of the first found occurrence of the specified content item; -1 if the content item could not be found.
+ /// is null.
+ public int IndexOf(StreamingKernelContent item)
+ {
+ Verify.NotNull(item);
+ return this._items.IndexOf(item);
+ }
+
+ ///
+ /// Inserts a content item into the collection at the specified index.
+ ///
+ /// The index at which the content item should be inserted.
+ /// The content item to insert.
+ /// is null.
+ public void Insert(int index, StreamingKernelContent item)
+ {
+ Verify.NotNull(item);
+ this._items.Insert(index, item);
+ }
+
+ ///
+ /// Removes the first occurrence of the specified content item from the collection.
+ ///
+ /// The content item to remove from the collection.
+ /// True if the item was successfully removed; false if it wasn't located in the collection.
+ /// is null.
+ public bool Remove(StreamingKernelContent item)
+ {
+ Verify.NotNull(item);
+ return this._items.Remove(item);
+ }
+
+ ///
+ /// Removes the content item at the specified index from the collection.
+ ///
+ /// The index of the content item to remove.
+ public void RemoveAt(int index) => this._items.RemoveAt(index);
+
+ ///
+ bool ICollection.IsReadOnly => false;
+
+ ///
+ IEnumerator IEnumerable.GetEnumerator() => this._items.GetEnumerator();
+
+ ///
+ IEnumerator IEnumerable.GetEnumerator() => this._items.GetEnumerator();
+
+ #region private
+
+ private readonly List _items;
+
+ #endregion
+}
diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs
index bce11b356e0f..f10ccaa3ff39 100644
--- a/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/AI/PromptExecutionSettings.cs
@@ -3,6 +3,7 @@
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
+using System.Diagnostics.CodeAnalysis;
using System.Text.Json.Serialization;
using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.TextGeneration;
@@ -27,6 +28,27 @@ public class PromptExecutionSettings
///
public static string DefaultServiceId => "default";
+ ///
+ /// Service identifier.
+ /// This identifies the service these settings are configured for e.g., azure_openai_eastus, openai, ollama, huggingface, etc.
+ ///
+ ///
+ /// When provided, this service identifier will be the key in a dictionary collection of execution settings for both and .
+ /// If not provided the service identifier will be the default value in .
+ ///
+ [Experimental("SKEXP0001")]
+ [JsonPropertyName("service_id")]
+ public string? ServiceId
+ {
+ get => this._serviceId;
+
+ set
+ {
+ this.ThrowIfFrozen();
+ this._serviceId = value;
+ }
+ }
+
///
/// Model identifier.
/// This identifies the AI model these settings are configured for e.g., gpt-4, gpt-3.5-turbo
@@ -93,6 +115,7 @@ public virtual PromptExecutionSettings Clone()
return new()
{
ModelId = this.ModelId,
+ ServiceId = this.ServiceId,
ExtensionData = this.ExtensionData is not null ? new Dictionary(this.ExtensionData) : null
};
}
@@ -113,6 +136,7 @@ protected void ThrowIfFrozen()
private string? _modelId;
private IDictionary? _extensionData;
+ private string? _serviceId;
#endregion
}
diff --git a/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/TextGenerationExtensions.cs b/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/TextGenerationExtensions.cs
index bf955ff2ebc1..0f044184e1bc 100644
--- a/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/TextGenerationExtensions.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/AI/TextGeneration/TextGenerationExtensions.cs
@@ -23,7 +23,7 @@ public static class TextGenerationExtensions
/// The AI execution settings (optional).
/// The containing services, plugins, and other state for use throughout the operation.
/// The to monitor for cancellation requests. The default is .
- /// List of different text results generated by the remote model
+ /// Single text content generated by the remote model.
public static async Task GetTextContentAsync(
this ITextGenerationService textGenerationService,
string prompt,
@@ -34,7 +34,7 @@ public static async Task GetTextContentAsync(
.Single();
///
- /// Get a single text generation result for the standardized prompt and settings.
+ /// Get a text generation results for the standardized prompt and settings.
///
/// Text generation service
/// The standardized prompt input.
@@ -42,7 +42,7 @@ public static async Task GetTextContentAsync(
/// The containing services, plugins, and other state for use throughout the operation.
/// The to monitor for cancellation requests. The default is .
/// List of different text results generated by the remote model
- internal static async Task GetTextContentWithDefaultParserAsync(
+ internal static async Task> GetTextContentsWithDefaultParserAsync(
this ITextGenerationService textGenerationService,
string prompt,
PromptExecutionSettings? executionSettings = null,
@@ -52,12 +52,14 @@ internal static async Task GetTextContentWithDefaultParserAsync(
if (textGenerationService is IChatCompletionService chatCompletion
&& ChatPromptParser.TryParse(prompt, out var chatHistory))
{
- var chatMessage = await chatCompletion.GetChatMessageContentAsync(chatHistory, executionSettings, kernel, cancellationToken).ConfigureAwait(false);
- return new TextContent(chatMessage.Content, chatMessage.ModelId, chatMessage.InnerContent, chatMessage.Encoding, chatMessage.Metadata);
+ var chatMessages = await chatCompletion.GetChatMessageContentsAsync(chatHistory, executionSettings, kernel, cancellationToken).ConfigureAwait(false);
+ return chatMessages
+ .Select(chatMessage => new TextContent(chatMessage.Content, chatMessage.ModelId, chatMessage.InnerContent, chatMessage.Encoding, chatMessage.Metadata))
+ .ToArray();
}
// When using against text generations, the prompt will be used as is.
- return await textGenerationService.GetTextContentAsync(prompt, executionSettings, kernel, cancellationToken).ConfigureAwait(false);
+ return await textGenerationService.GetTextContentsAsync(prompt, executionSettings, kernel, cancellationToken).ConfigureAwait(false);
}
///
diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/ChatMessageContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/ChatMessageContent.cs
index d9c31c50982c..24ff3cf19438 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Contents/ChatMessageContent.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Contents/ChatMessageContent.cs
@@ -41,18 +41,12 @@ public string? Content
}
set
{
- if (value is null)
- {
- return;
- }
-
var textContent = this.Items.OfType().FirstOrDefault();
if (textContent is not null)
{
textContent.Text = value;
- textContent.Encoding = this.Encoding;
}
- else
+ else if (value is not null)
{
this.Items.Add(new TextContent(
text: value,
diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/FunctionCallContentBuilder.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/FunctionCallContentBuilder.cs
new file mode 100644
index 000000000000..42abeab610f5
--- /dev/null
+++ b/dotnet/src/SemanticKernel.Abstractions/Contents/FunctionCallContentBuilder.cs
@@ -0,0 +1,161 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System;
+using System.Collections.Generic;
+using System.Diagnostics.CodeAnalysis;
+using System.Linq;
+using System.Text;
+using System.Text.Json;
+
+namespace Microsoft.SemanticKernel;
+
+///
+/// A builder class for creating objects from incremental function call updates represented by .
+///
+[Experimental("SKEXP0001")]
+public sealed class FunctionCallContentBuilder
+{
+ private Dictionary? _functionCallIdsByIndex = null;
+ private Dictionary? _functionNamesByIndex = null;
+ private Dictionary? _functionArgumentBuildersByIndex = null;
+
+ ///
+ /// Extracts function call updates from the content and track them for later building.
+ ///
+ /// The content to extract function call updates from.
+ public void Append(StreamingChatMessageContent content)
+ {
+ var streamingFunctionCallUpdates = content.Items.OfType();
+
+ foreach (var update in streamingFunctionCallUpdates)
+ {
+ TrackStreamingFunctionCallUpdate(update,
+ ref this._functionCallIdsByIndex,
+ ref this._functionNamesByIndex,
+ ref this._functionArgumentBuildersByIndex);
+ }
+ }
+
+ ///
+ /// Builds a list of out of function call updates tracked by the method.
+ ///
+ /// A list of objects.
+ public IReadOnlyList Build()
+ {
+ FunctionCallContent[]? functionCalls = null;
+
+ if (this._functionCallIdsByIndex is { Count: > 0 })
+ {
+ functionCalls = new FunctionCallContent[this._functionCallIdsByIndex.Count];
+
+ for (int i = 0; i < this._functionCallIdsByIndex.Count; i++)
+ {
+ KeyValuePair functionCallIndexAndId = this._functionCallIdsByIndex.ElementAt(i);
+
+ string? pluginName = null;
+ string functionName = string.Empty;
+
+ if (this._functionNamesByIndex?.TryGetValue(functionCallIndexAndId.Key, out string? fqn) ?? false)
+ {
+ var functionFullyQualifiedName = Microsoft.SemanticKernel.FunctionName.Parse(fqn);
+ pluginName = functionFullyQualifiedName.PluginName;
+ functionName = functionFullyQualifiedName.Name;
+ }
+
+ (KernelArguments? arguments, Exception? exception) = this.GetFunctionArguments(functionCallIndexAndId.Key);
+
+ functionCalls[i] = new FunctionCallContent(
+ functionName: functionName,
+ pluginName: pluginName,
+ id: functionCallIndexAndId.Value,
+ arguments)
+ {
+ Exception = exception
+ };
+ }
+ }
+
+ return functionCalls ?? [];
+ }
+
+ ///
+ /// Gets function arguments for a given function call index.
+ ///
+ /// The function call index to get the function arguments for.
+ /// A tuple containing the KernelArguments and an Exception if any.
+ private (KernelArguments? Arguments, Exception? Exception) GetFunctionArguments(int functionCallIndex)
+ {
+ if (this._functionArgumentBuildersByIndex is null ||
+ !this._functionArgumentBuildersByIndex.TryGetValue(functionCallIndex, out StringBuilder? functionArgumentsBuilder))
+ {
+ return (null, null);
+ }
+
+ var argumentsString = functionArgumentsBuilder.ToString();
+ if (string.IsNullOrEmpty(argumentsString))
+ {
+ return (null, null);
+ }
+
+ Exception? exception = null;
+ KernelArguments? arguments = null;
+ try
+ {
+ arguments = JsonSerializer.Deserialize(argumentsString);
+ if (arguments is { Count: > 0 })
+ {
+ var names = arguments.Names.ToArray();
+ foreach (var name in names)
+ {
+ arguments[name] = arguments[name]?.ToString();
+ }
+ }
+ }
+ catch (JsonException ex)
+ {
+ exception = new KernelException("Error: Function call arguments were invalid JSON.", ex);
+ }
+
+ return (arguments, exception);
+ }
+
+ ///
+ /// Tracks streaming function call update contents.
+ ///
+ /// The streaming function call update content to track.
+ /// The dictionary of function call IDs by function call index.
+ /// The dictionary of function names by function call index.
+ /// The dictionary of function argument builders by function call index.
+ private static void TrackStreamingFunctionCallUpdate(StreamingFunctionCallUpdateContent update, ref Dictionary? functionCallIdsByIndex, ref Dictionary? functionNamesByIndex, ref Dictionary? functionArgumentBuildersByIndex)
+ {
+ if (update is null)
+ {
+ // Nothing to track.
+ return;
+ }
+
+ // If we have an call id, ensure the index is being tracked. Even if it's not a function update,
+ // we want to keep track of it so we can send back an error.
+ if (update.CallId is string id)
+ {
+ (functionCallIdsByIndex ??= [])[update.FunctionCallIndex] = id;
+ }
+
+ // Ensure we're tracking the function's name.
+ if (update.Name is string name)
+ {
+ (functionNamesByIndex ??= [])[update.FunctionCallIndex] = name;
+ }
+
+ // Ensure we're tracking the function's arguments.
+ if (update.Arguments is string argumentsUpdate)
+ {
+ if (!(functionArgumentBuildersByIndex ??= []).TryGetValue(update.FunctionCallIndex, out StringBuilder? arguments))
+ {
+ functionArgumentBuildersByIndex[update.FunctionCallIndex] = arguments = new();
+ }
+
+ arguments.Append(argumentsUpdate);
+ }
+ }
+}
diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs
index 5a14e6cb56d7..5cc7afb582ed 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingChatMessageContent.cs
@@ -2,6 +2,7 @@
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
+using System.Linq;
using System.Text;
using System.Text.Json.Serialization;
using Microsoft.SemanticKernel.ChatCompletion;
@@ -17,9 +18,46 @@ namespace Microsoft.SemanticKernel;
public class StreamingChatMessageContent : StreamingKernelContent
{
///
- /// Text associated to the message payload
+ /// A convenience property to get or set the text of the first item in the collection of type.
///
- public string? Content { get; set; }
+ public string? Content
+ {
+ get
+ {
+ var textContent = this.Items.OfType().FirstOrDefault();
+ return textContent?.Text;
+ }
+ set
+ {
+ var textContent = this.Items.OfType().FirstOrDefault();
+ if (textContent is not null)
+ {
+ textContent.Text = value;
+ }
+ else if (value is not null)
+ {
+ this.Items.Add(new StreamingTextContent(
+ text: value,
+ choiceIndex: this.ChoiceIndex,
+ modelId: this.ModelId,
+ innerContent: this.InnerContent,
+ encoding: this.Encoding,
+ metadata: this.Metadata
+ ));
+ }
+ }
+ }
+
+ ///
+ /// Chat message content items.
+ ///
+ [JsonIgnore]
+ [Experimental("SKEXP0001")]
+ public StreamingKernelContentItemCollection Items
+ {
+ get => this._items ??= [];
+ set => this._items = value;
+ }
///
/// Name of the author of the message
@@ -34,10 +72,32 @@ public class StreamingChatMessageContent : StreamingKernelContent
public AuthorRole? Role { get; set; }
///
- /// The encoding of the text content.
+ /// A convenience property to get or set the encoding of the first item in the collection of type.
///
[JsonIgnore]
- public Encoding Encoding { get; set; }
+ public Encoding Encoding
+ {
+ get
+ {
+ var textContent = this.Items.OfType().FirstOrDefault();
+ if (textContent is not null)
+ {
+ return textContent.Encoding;
+ }
+
+ return this._encoding;
+ }
+ set
+ {
+ this._encoding = value;
+
+ var textContent = this.Items.OfType().FirstOrDefault();
+ if (textContent is not null)
+ {
+ textContent.Encoding = value;
+ }
+ }
+ }
///
/// Initializes a new instance of the class.
@@ -55,7 +115,7 @@ public StreamingChatMessageContent(AuthorRole? role, string? content, object? in
{
this.Role = role;
this.Content = content;
- this.Encoding = encoding ?? Encoding.UTF8;
+ this._encoding = encoding ?? Encoding.UTF8;
}
///
@@ -63,4 +123,7 @@ public StreamingChatMessageContent(AuthorRole? role, string? content, object? in
///
public override byte[] ToByteArray() => this.Encoding.GetBytes(this.ToString());
+
+ private StreamingKernelContentItemCollection? _items;
+ private Encoding _encoding;
}
diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingFunctionCallUpdateContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingFunctionCallUpdateContent.cs
new file mode 100644
index 000000000000..3d186681f481
--- /dev/null
+++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingFunctionCallUpdateContent.cs
@@ -0,0 +1,60 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Diagnostics.CodeAnalysis;
+using System.Text;
+
+namespace Microsoft.SemanticKernel;
+
+///
+/// Represents a function streaming call requested by LLM.
+///
+[Experimental("SKEXP0001")]
+public class StreamingFunctionCallUpdateContent : StreamingKernelContent
+{
+ ///
+ /// The function call ID.
+ ///
+ public string? CallId { get; init; }
+
+ ///
+ /// The function name.
+ ///
+ public string? Name { get; init; }
+
+ ///
+ /// The function arguments that can come as full or partial.
+ ///
+ public string? Arguments { get; init; }
+
+ ///
+ /// The function call index.
+ ///
+ public int FunctionCallIndex { get; init; }
+
+ ///
+ /// Creates a new instance of the class.
+ ///
+ /// The function call ID.
+ /// The function name.
+ /// The function original arguments.
+ /// The function call index.
+ public StreamingFunctionCallUpdateContent(string? callId = null, string? name = null, string? arguments = null, int functionCallIndex = 0)
+ {
+ this.CallId = callId;
+ this.Name = name;
+ this.Arguments = arguments;
+ this.FunctionCallIndex = functionCallIndex;
+ }
+
+ ///
+ public override string ToString()
+ {
+ return nameof(StreamingFunctionCallUpdateContent);
+ }
+
+ ///
+ public override byte[] ToByteArray()
+ {
+ return Encoding.UTF8.GetBytes(this.ToString());
+ }
+}
diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingKernelContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingKernelContent.cs
index 0285eafe92c1..59231bf3c4f9 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingKernelContent.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingKernelContent.cs
@@ -13,7 +13,7 @@ public abstract class StreamingKernelContent
///
/// In a scenario of multiple choices per request, this represents zero-based index of the choice in the streaming sequence
///
- public int ChoiceIndex { get; }
+ public int ChoiceIndex { get; set; }
///
/// The inner content representation. Use this to bypass the current abstraction.
@@ -22,17 +22,17 @@ public abstract class StreamingKernelContent
/// The usage of this property is considered "unsafe". Use it only if strictly necessary.
///
[JsonIgnore]
- public object? InnerContent { get; }
+ public object? InnerContent { get; set; }
///
/// The model ID used to generate the content.
///
- public string? ModelId { get; }
+ public string? ModelId { get; set; }
///
/// The metadata associated with the content.
///
- public IReadOnlyDictionary? Metadata { get; }
+ public IReadOnlyDictionary? Metadata { get; set; }
///
/// Abstract string representation of the chunk in a way it could compose/append with previous chunks.
@@ -52,6 +52,13 @@ public abstract class StreamingKernelContent
/// Byte array representation of the chunk
public abstract byte[] ToByteArray();
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ protected StreamingKernelContent()
+ {
+ }
+
///
/// Initializes a new instance of the class.
///
diff --git a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingTextContent.cs b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingTextContent.cs
index f13d03820c60..766656be5a3b 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingTextContent.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Contents/StreamingTextContent.cs
@@ -15,7 +15,7 @@ public class StreamingTextContent : StreamingKernelContent
///
/// Text associated to the update
///
- public string? Text { get; }
+ public string? Text { get; set; }
///
/// The encoding of the text content.
diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs
index d7776f83f24a..eda736b3f583 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Functions/KernelArguments.cs
@@ -21,6 +21,7 @@ public sealed class KernelArguments : IDictionary, IReadOnlyDic
{
/// Dictionary of name/values for all the arguments in the instance.
private readonly Dictionary _arguments;
+ private IReadOnlyDictionary? _executionSettings;
///
/// Initializes a new instance of the class with the specified AI execution settings.
@@ -36,12 +37,36 @@ public KernelArguments()
///
/// The prompt execution settings.
public KernelArguments(PromptExecutionSettings? executionSettings)
+ : this(executionSettings is null ? null : [executionSettings])
{
- this._arguments = new(StringComparer.OrdinalIgnoreCase);
+ }
+ ///
+ /// Initializes a new instance of the class with the specified AI execution settings.
+ ///
+ /// The prompt execution settings.
+ public KernelArguments(IEnumerable? executionSettings)
+ {
+ this._arguments = new(StringComparer.OrdinalIgnoreCase);
if (executionSettings is not null)
{
- this.ExecutionSettings = new Dictionary() { { PromptExecutionSettings.DefaultServiceId, executionSettings } };
+ var newExecutionSettings = new Dictionary();
+ foreach (var settings in executionSettings)
+ {
+ var targetServiceId = settings.ServiceId ?? PromptExecutionSettings.DefaultServiceId;
+ if (newExecutionSettings.ContainsKey(targetServiceId))
+ {
+ var exceptionMessage = (targetServiceId == PromptExecutionSettings.DefaultServiceId)
+ ? $"Multiple prompt execution settings with the default service id '{PromptExecutionSettings.DefaultServiceId}' or no service id have been provided. Specify a single default prompt execution settings and provide a unique service id for all other instances."
+ : $"Multiple prompt execution settings with the service id '{targetServiceId}' have been provided. Provide a unique service id for all instances.";
+
+ throw new ArgumentException(exceptionMessage, nameof(executionSettings));
+ }
+
+ newExecutionSettings[targetServiceId] = settings;
+ }
+
+ this.ExecutionSettings = newExecutionSettings;
}
}
@@ -65,7 +90,30 @@ public KernelArguments(IDictionary source, Dictionary
/// Gets or sets the prompt execution settings.
///
- public IReadOnlyDictionary? ExecutionSettings { get; set; }
+ ///
+ /// The settings dictionary is keyed by the service ID, or for the default execution settings.
+ /// When setting, the service id of each must match the key in the dictionary.
+ ///
+ public IReadOnlyDictionary? ExecutionSettings
+ {
+ get => this._executionSettings;
+ set
+ {
+ if (value is { Count: > 0 })
+ {
+ foreach (var kv in value!)
+ {
+ // Ensures that if a service id is specified it needs to match to the current key in the dictionary.
+ if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId) && kv.Key != kv.Value.ServiceId)
+ {
+ throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings));
+ }
+ }
+ }
+
+ this._executionSettings = value;
+ }
+ }
///
/// Gets the number of arguments contained in the .
diff --git a/dotnet/src/SemanticKernel.Abstractions/Functions/RestApiOperationResponse.cs b/dotnet/src/SemanticKernel.Abstractions/Functions/RestApiOperationResponse.cs
index 5cfe2d09c850..d76077624557 100644
--- a/dotnet/src/SemanticKernel.Abstractions/Functions/RestApiOperationResponse.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/Functions/RestApiOperationResponse.cs
@@ -14,12 +14,12 @@ public sealed class RestApiOperationResponse
///
/// Gets the content of the response.
///
- public object Content { get; }
+ public object? Content { get; }
///
/// Gets the content type of the response.
///
- public string ContentType { get; }
+ public string? ContentType { get; }
///
/// The expected schema of the response as advertised in the OpenAPI operation.
@@ -47,7 +47,7 @@ public sealed class RestApiOperationResponse
/// The content of the response.
/// The content type of the response.
/// The schema against which the response body should be validated.
- public RestApiOperationResponse(object content, string contentType, KernelJsonSchema? expectedSchema = null)
+ public RestApiOperationResponse(object? content, string? contentType, KernelJsonSchema? expectedSchema = null)
{
this.Content = content;
this.ContentType = contentType;
diff --git a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs
index 1a55cbbff837..1cce254ec1a8 100644
--- a/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs
+++ b/dotnet/src/SemanticKernel.Abstractions/PromptTemplate/PromptTemplateConfig.cs
@@ -178,6 +178,7 @@ public List InputVariables
///
///
/// The settings dictionary is keyed by the service ID, or for the default execution settings.
+ /// When setting, the service id of each must match the key in the dictionary.
///
[JsonPropertyName("execution_settings")]
public Dictionary ExecutionSettings
@@ -186,6 +187,19 @@ public Dictionary ExecutionSettings
set
{
Verify.NotNull(value);
+
+ if (value.Count != 0)
+ {
+ foreach (var kv in value)
+ {
+ // Ensures that if a service id is provided it must match the key in the dictionary.
+ if (!string.IsNullOrWhiteSpace(kv.Value.ServiceId) && kv.Key != kv.Value.ServiceId)
+ {
+ throw new ArgumentException($"Service id '{kv.Value.ServiceId}' must match the key '{kv.Key}'.", nameof(this.ExecutionSettings));
+ }
+ }
+ }
+
this._executionSettings = value;
}
}
@@ -224,7 +238,13 @@ public void AddExecutionSettings(PromptExecutionSettings settings, string? servi
{
Verify.NotNull(settings);
- var key = serviceId ?? PromptExecutionSettings.DefaultServiceId;
+ if (!string.IsNullOrWhiteSpace(serviceId) && !string.IsNullOrWhiteSpace(settings.ServiceId))
+ {
+ throw new ArgumentException($"Service id must not be passed when '{nameof(settings.ServiceId)}' is already provided in execution settings.", nameof(serviceId));
+ }
+
+ var key = serviceId ?? settings.ServiceId ?? PromptExecutionSettings.DefaultServiceId;
+
if (this.ExecutionSettings.ContainsKey(key))
{
throw new ArgumentException($"Execution settings for service id '{key}' already exists.", nameof(serviceId));
diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs
index 25d384d51351..f6f0a805f4a6 100644
--- a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs
+++ b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFactory.cs
@@ -4,6 +4,7 @@
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
+using System.Linq;
using System.Reflection;
using Microsoft.Extensions.Logging;
@@ -107,6 +108,37 @@ public static KernelFunction CreateFromPrompt(
string? templateFormat = null,
IPromptTemplateFactory? promptTemplateFactory = null,
ILoggerFactory? loggerFactory = null) =>
+ KernelFunctionFromPrompt.Create(
+ promptTemplate,
+ CreateSettingsDictionary(executionSettings is null ? null : [executionSettings]),
+ functionName,
+ description,
+ templateFormat,
+ promptTemplateFactory,
+ loggerFactory);
+
+ ///
+ /// Creates a instance for a prompt specified via a prompt template.
+ ///
+ /// Prompt template for the function.
+ /// Default execution settings to use when invoking this prompt function.
+ /// The name to use for the function. If null, it will default to a randomly generated name.
+ /// The description to use for the function.
+ /// The template format of . This must be provided if is not null.
+ ///
+ /// The to use when interpreting the into a .
+ /// If null, a default factory will be used.
+ ///
+ /// The to use for logging. If null, no logging will be performed.
+ /// The created for invoking the prompt.
+ public static KernelFunction CreateFromPrompt(
+ string promptTemplate,
+ IEnumerable? executionSettings,
+ string? functionName = null,
+ string? description = null,
+ string? templateFormat = null,
+ IPromptTemplateFactory? promptTemplateFactory = null,
+ ILoggerFactory? loggerFactory = null) =>
KernelFunctionFromPrompt.Create(promptTemplate, CreateSettingsDictionary(executionSettings), functionName, description, templateFormat, promptTemplateFactory, loggerFactory);
///
@@ -141,10 +173,6 @@ public static KernelFunction CreateFromPrompt(
/// Wraps the specified settings into a dictionary with the default service ID as the key.
///
[return: NotNullIfNotNull(nameof(settings))]
- private static Dictionary? CreateSettingsDictionary(PromptExecutionSettings? settings) =>
- settings is null ? null :
- new Dictionary(1)
- {
- { PromptExecutionSettings.DefaultServiceId, settings },
- };
+ private static Dictionary? CreateSettingsDictionary(IEnumerable? settings) =>
+ settings?.ToDictionary(s => s.ServiceId ?? PromptExecutionSettings.DefaultServiceId, s => s);
}
diff --git a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs
index 44a799a8c42a..1d357b05679f 100644
--- a/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs
+++ b/dotnet/src/SemanticKernel.Core/Functions/KernelFunctionFromPrompt.cs
@@ -123,38 +123,29 @@ protected override async ValueTask InvokeCoreAsync(
{
this.AddDefaultValues(arguments);
- var result = await this.RenderPromptAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
+ var promptRenderingResult = await this.RenderPromptAsync(kernel, arguments, cancellationToken).ConfigureAwait(false);
#pragma warning disable CS0612 // Events are deprecated
- if (result.RenderedEventArgs?.Cancel is true)
+ if (promptRenderingResult.RenderedEventArgs?.Cancel is true)
{
throw new OperationCanceledException($"A {nameof(Kernel)}.{nameof(Kernel.PromptRendered)} event handler requested cancellation after prompt rendering.");
}
#pragma warning restore CS0612 // Events are deprecated
// Return function result if it was set in prompt filter.
- if (result.FunctionResult is not null)
+ if (promptRenderingResult.FunctionResult is not null)
{
- result.FunctionResult.RenderedPrompt = result.RenderedPrompt;
- return result.FunctionResult;
+ promptRenderingResult.FunctionResult.RenderedPrompt = promptRenderingResult.RenderedPrompt;
+ return promptRenderingResult.FunctionResult;
}
- if (result.AIService is IChatCompletionService chatCompletion)
+ return promptRenderingResult.AIService switch
{
- var chatContent = await chatCompletion.GetChatMessageContentAsync(result.RenderedPrompt, result.ExecutionSettings, kernel, cancellationToken).ConfigureAwait(false);
- this.CaptureUsageDetails(chatContent.ModelId, chatContent.Metadata, this._logger);
- return new FunctionResult(this, chatContent, kernel.Culture, chatContent.Metadata) { RenderedPrompt = result.RenderedPrompt };
- }
-
- if (result.AIService is ITextGenerationService textGeneration)
- {
- var textContent = await textGeneration.GetTextContentWithDefaultParserAsync(result.RenderedPrompt, result.ExecutionSettings, kernel, cancellationToken).ConfigureAwait(false);
- this.CaptureUsageDetails(textContent.ModelId, textContent.Metadata, this._logger);
- return new FunctionResult(this, textContent, kernel.Culture, textContent.Metadata) { RenderedPrompt = result.RenderedPrompt };
- }
-
- // The service selector didn't find an appropriate service. This should only happen with a poorly implemented selector.
- throw new NotSupportedException($"The AI service {result.AIService.GetType()} is not supported. Supported services are {typeof(IChatCompletionService)} and {typeof(ITextGenerationService)}");
+ IChatCompletionService chatCompletion => await this.GetChatCompletionResultAsync(chatCompletion, kernel, promptRenderingResult, cancellationToken).ConfigureAwait(false),
+ ITextGenerationService textGeneration => await this.GetTextGenerationResultAsync(textGeneration, kernel, promptRenderingResult, cancellationToken).ConfigureAwait(false),
+ // The service selector didn't find an appropriate service. This should only happen with a poorly implemented selector.
+ _ => throw new NotSupportedException($"The AI service {promptRenderingResult.AIService.GetType()} is not supported. Supported services are {typeof(IChatCompletionService)} and {typeof(ITextGenerationService)}")
+ };
}
///
@@ -449,5 +440,67 @@ private void CaptureUsageDetails(string? modelId, IReadOnlyDictionary GetChatCompletionResultAsync(
+ IChatCompletionService chatCompletion,
+ Kernel kernel,
+ PromptRenderingResult promptRenderingResult,
+ CancellationToken cancellationToken)
+ {
+ var chatContents = await chatCompletion.GetChatMessageContentsAsync(
+ promptRenderingResult.RenderedPrompt,
+ promptRenderingResult.ExecutionSettings,
+ kernel,
+ cancellationToken).ConfigureAwait(false);
+
+ if (chatContents is { Count: 0 })
+ {
+ return new FunctionResult(this, culture: kernel.Culture) { RenderedPrompt = promptRenderingResult.RenderedPrompt };
+ }
+
+ // Usage details are global and duplicated for each chat message content, use first one to get usage information
+ var chatContent = chatContents[0];
+ this.CaptureUsageDetails(chatContent.ModelId, chatContent.Metadata, this._logger);
+
+ // If collection has one element, return single result
+ if (chatContents.Count == 1)
+ {
+ return new FunctionResult(this, chatContent, kernel.Culture, chatContent.Metadata) { RenderedPrompt = promptRenderingResult.RenderedPrompt };
+ }
+
+ // Otherwise, return multiple results
+ return new FunctionResult(this, chatContents, kernel.Culture) { RenderedPrompt = promptRenderingResult.RenderedPrompt };
+ }
+
+ private async Task GetTextGenerationResultAsync(
+ ITextGenerationService textGeneration,
+ Kernel kernel,
+ PromptRenderingResult promptRenderingResult,
+ CancellationToken cancellationToken)
+ {
+ var textContents = await textGeneration.GetTextContentsWithDefaultParserAsync(
+ promptRenderingResult.RenderedPrompt,
+ promptRenderingResult.ExecutionSettings,
+ kernel,
+ cancellationToken).ConfigureAwait(false);
+
+ if (textContents is { Count: 0 })
+ {
+ return new FunctionResult(this, culture: kernel.Culture) { RenderedPrompt = promptRenderingResult.RenderedPrompt };
+ }
+
+ // Usage details are global and duplicated for each text content, use first one to get usage information
+ var textContent = textContents[0];
+ this.CaptureUsageDetails(textContent.ModelId, textContent.Metadata, this._logger);
+
+ // If collection has one element, return single result
+ if (textContents.Count == 1)
+ {
+ return new FunctionResult(this, textContent, kernel.Culture, textContent.Metadata) { RenderedPrompt = promptRenderingResult.RenderedPrompt };
+ }
+
+ // Otherwise, return multiple results
+ return new FunctionResult(this, textContents, kernel.Culture) { RenderedPrompt = promptRenderingResult.RenderedPrompt };
+ }
+
#endregion
}
diff --git a/dotnet/src/SemanticKernel.Core/KernelExtensions.cs b/dotnet/src/SemanticKernel.Core/KernelExtensions.cs
index a05340a64775..6a96395cedea 100644
--- a/dotnet/src/SemanticKernel.Core/KernelExtensions.cs
+++ b/dotnet/src/SemanticKernel.Core/KernelExtensions.cs
@@ -109,6 +109,42 @@ public static KernelFunction CreateFunctionFromPrompt(
kernel.LoggerFactory);
}
+ ///
+ /// Creates a instance for a prompt specified via a prompt template.
+ ///
+ /// The containing services, plugins, and other state for use throughout the operation.
+ /// Prompt template for the function.
+ /// List of execution settings to use when invoking this prompt function.
+ /// The name to use for the function. If null, it will default to a randomly generated name.
+ /// The description to use for the function.
+ /// The template format of . This must be provided if is not null.
+ ///
+ /// The to use when interpreting the into a .
+ /// If null, a default factory will be used.
+ ///
+ /// The created for invoking the prompt.
+ public static KernelFunction CreateFunctionFromPrompt(
+ this Kernel kernel,
+ string promptTemplate,
+ IEnumerable? executionSettings,
+ string? functionName = null,
+ string? description = null,
+ string? templateFormat = null,
+ IPromptTemplateFactory? promptTemplateFactory = null)
+ {
+ Verify.NotNull(kernel);
+ Verify.NotNull(promptTemplate);
+
+ return KernelFunctionFactory.CreateFromPrompt(
+ promptTemplate,
+ executionSettings,
+ functionName,
+ description,
+ templateFormat,
+ promptTemplateFactory,
+ kernel.LoggerFactory);
+ }
+
///
/// Creates a instance for a prompt specified via a prompt template configuration.
///
diff --git a/dotnet/src/SemanticKernel.Core/PromptTemplate/EchoPromptTemplate.cs b/dotnet/src/SemanticKernel.Core/PromptTemplate/EchoPromptTemplate.cs
new file mode 100644
index 000000000000..c66b5db31566
--- /dev/null
+++ b/dotnet/src/SemanticKernel.Core/PromptTemplate/EchoPromptTemplate.cs
@@ -0,0 +1,33 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Threading;
+using System.Threading.Tasks;
+
+namespace Microsoft.SemanticKernel;
+
+///
+/// Implementation of that just returns the prompt template.
+///
+internal sealed class EchoPromptTemplate : IPromptTemplate
+{
+ private readonly PromptTemplateConfig _promptConfig;
+ private readonly Task _renderResult;
+
+ ///
+ /// Constructor for .
+ ///
+ /// Prompt template configuration
+ internal EchoPromptTemplate(PromptTemplateConfig promptConfig)
+ {
+ Verify.NotNull(promptConfig, nameof(promptConfig));
+ Verify.NotNull(promptConfig.Template, nameof(promptConfig.Template));
+
+ this._promptConfig = promptConfig;
+ this._renderResult = Task.FromResult(this._promptConfig.Template);
+ }
+
+ ///
+#pragma warning disable VSTHRD003 // Avoid awaiting foreign Tasks
+ public Task RenderAsync(Kernel kernel, KernelArguments? arguments = null, CancellationToken cancellationToken = default) => this._renderResult;
+#pragma warning restore VSTHRD003 // Avoid awaiting foreign Tasks
+}
diff --git a/dotnet/src/SemanticKernel.Core/PromptTemplate/EchoPromptTemplateFactory.cs b/dotnet/src/SemanticKernel.Core/PromptTemplate/EchoPromptTemplateFactory.cs
new file mode 100644
index 000000000000..fe7697d19547
--- /dev/null
+++ b/dotnet/src/SemanticKernel.Core/PromptTemplate/EchoPromptTemplateFactory.cs
@@ -0,0 +1,24 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Diagnostics.CodeAnalysis;
+
+namespace Microsoft.SemanticKernel;
+
+///
+/// Provides an implementation of which creates no operation instances of .
+///
+public sealed class EchoPromptTemplateFactory : IPromptTemplateFactory
+{
+ ///
+ /// Singleton instance of .
+ ///
+ public static EchoPromptTemplateFactory Instance { get; } = new EchoPromptTemplateFactory();
+
+ ///
+ public bool TryCreate(PromptTemplateConfig templateConfig, [NotNullWhen(true)] out IPromptTemplate? result)
+ {
+ result = new EchoPromptTemplate(templateConfig);
+
+ return true;
+ }
+}
diff --git a/dotnet/src/SemanticKernel.Core/PromptTemplate/KernelPromptTemplate.cs b/dotnet/src/SemanticKernel.Core/PromptTemplate/KernelPromptTemplate.cs
index 132e18bc2edb..83abe231becb 100644
--- a/dotnet/src/SemanticKernel.Core/PromptTemplate/KernelPromptTemplate.cs
+++ b/dotnet/src/SemanticKernel.Core/PromptTemplate/KernelPromptTemplate.cs
@@ -27,7 +27,7 @@ namespace Microsoft.SemanticKernel;
internal sealed class KernelPromptTemplate : IPromptTemplate
{
///
- /// Constructor for PromptTemplate.
+ /// Constructor for .
///
/// Prompt template configuration
/// Flag indicating whether to allow potentially dangerous content to be inserted into the prompt
diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/StreamingKernelContentItemCollectionTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/StreamingKernelContentItemCollectionTests.cs
new file mode 100644
index 000000000000..b85c61e01546
--- /dev/null
+++ b/dotnet/src/SemanticKernel.UnitTests/AI/ChatCompletion/StreamingKernelContentItemCollectionTests.cs
@@ -0,0 +1,218 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Collections;
+using System.Collections.Generic;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Xunit;
+
+namespace SemanticKernel.UnitTests.AI.ChatCompletion;
+
+public class StreamingKernelContentItemCollectionTests
+{
+ [Fact]
+ public void ItShouldBeEmptyByDefault()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+
+ // Assert
+ Assert.Empty(collection);
+ }
+
+ [Fact]
+ public void ItShouldBePossibleToAddItemToTheCollection()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+ var item = new StreamingTextContent("fake-text");
+
+ // Act
+ collection.Add(item);
+
+ // Assert
+ Assert.Single(collection);
+ Assert.Same(item, collection[0]);
+ }
+
+ [Fact]
+ public void ItShouldBePossibleToAccessItemByIndex()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+
+ var item1 = new StreamingTextContent("fake-text");
+ collection.Add(item1);
+
+ // Act
+ var retrievedItem = collection[0];
+
+ // Assert
+ Assert.Same(item1, retrievedItem);
+ }
+
+ [Fact]
+ public void ItShouldBeEmptyAfterClear()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+ collection.Add(new StreamingTextContent("fake-text"));
+
+ // Act
+ collection.Clear();
+
+ // Assert
+ Assert.Empty(collection);
+ }
+
+ [Fact]
+ public void ItShouldContainItemAfterAdd()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+ var item = new StreamingTextContent("fake-text");
+
+ // Act
+ collection.Add(item);
+
+ // Assert
+ Assert.Contains(item, collection);
+ }
+
+ [Fact]
+ public void ItShouldCopyItemsToAnArray()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+ var item1 = new StreamingTextContent("fake-text1");
+ var item2 = new StreamingTextContent("fake-text2");
+ collection.Add(item1);
+ collection.Add(item2);
+
+ // Act
+ var array = new StreamingKernelContent[2];
+ collection.CopyTo(array, 0);
+
+ // Assert
+ Assert.Equal(new[] { item1, item2 }, array);
+ }
+
+ [Fact]
+ public void ItShouldReturnIndexOfItem()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+ var item1 = new StreamingTextContent("fake-text1");
+ var item2 = new StreamingTextContent("fake-text2");
+ collection.Add(item1);
+ collection.Add(item2);
+
+ // Act
+ var index = collection.IndexOf(item2);
+
+ // Assert
+ Assert.Equal(1, index);
+ }
+
+ [Fact]
+ public void ItShouldInsertItemIntoCollection()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+ var item1 = new StreamingTextContent("fake-text1");
+ var item2 = new StreamingTextContent("fake-text2");
+ collection.Add(item1);
+
+ // Act
+ collection.Insert(0, item2);
+
+ // Assert
+ Assert.Equal(new[] { item2, item1 }, collection);
+ }
+
+ [Fact]
+ public void ItShouldRemoveItemFromCollection()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+ var item1 = new StreamingTextContent("fake-text1");
+ var item2 = new StreamingTextContent("fake-text2");
+ collection.Add(item1);
+ collection.Add(item2);
+
+ // Act
+ collection.Remove(item1);
+
+ // Assert
+ Assert.Equal(new[] { item2 }, collection);
+ }
+
+ [Fact]
+ public void ItShouldRemoveItemAtSpecifiedIndex()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+ var item1 = new StreamingTextContent("fake-text1");
+ var item2 = new StreamingTextContent("fake-text2");
+ collection.Add(item1);
+ collection.Add(item2);
+
+ // Act
+ collection.RemoveAt(0);
+
+ // Assert
+ Assert.Equal(new[] { item2 }, collection);
+ }
+
+ [Fact]
+ public void ItIsNotReadOnly()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+
+ // Assert
+ Assert.False(((ICollection)collection).IsReadOnly);
+ }
+
+ [Fact]
+ public void ItShouldReturnEnumerator()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+ var item1 = new StreamingTextContent("fake-text1");
+ var item2 = new StreamingTextContent("fake-text2");
+ collection.Add(item1);
+ collection.Add(item2);
+
+ // Act
+ var enumerator = ((IEnumerable)collection).GetEnumerator();
+
+ // Assert
+ Assert.True(enumerator.MoveNext());
+ Assert.Same(item1, enumerator.Current);
+ Assert.True(enumerator.MoveNext());
+ Assert.Same(item2, enumerator.Current);
+ Assert.False(enumerator.MoveNext());
+ }
+
+ [Fact]
+ public void ItShouldReturnGenericEnumerator()
+ {
+ // Arrange
+ var collection = new StreamingKernelContentItemCollection();
+ var item1 = new StreamingTextContent("fake-text1");
+ var item2 = new StreamingTextContent("fake-text2");
+ collection.Add(item1);
+ collection.Add(item2);
+
+ // Act
+ var enumerator = ((IEnumerable)collection).GetEnumerator();
+
+ // Assert
+ Assert.True(enumerator.MoveNext());
+ Assert.Same(item1, enumerator.Current);
+ Assert.True(enumerator.MoveNext());
+ Assert.Same(item2, enumerator.Current);
+ Assert.False(enumerator.MoveNext());
+ }
+}
diff --git a/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs b/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs
index 83257b701112..dd822a091175 100644
--- a/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs
+++ b/dotnet/src/SemanticKernel.UnitTests/AI/PromptExecutionSettingsTests.cs
@@ -14,6 +14,8 @@ public void PromptExecutionSettingsCloneWorksAsExpected()
// Arrange
string configPayload = """
{
+ "model_id": "gpt-3",
+ "service_id": "service-1",
"max_tokens": 60,
"temperature": 0.5,
"top_p": 0.0,
@@ -30,6 +32,36 @@ public void PromptExecutionSettingsCloneWorksAsExpected()
Assert.NotNull(clone);
Assert.Equal(executionSettings.ModelId, clone.ModelId);
Assert.Equivalent(executionSettings.ExtensionData, clone.ExtensionData);
+ Assert.Equal(executionSettings.ServiceId, clone.ServiceId);
+ }
+
+ [Fact]
+ public void PromptExecutionSettingsSerializationWorksAsExpected()
+ {
+ // Arrange
+ string configPayload = """
+ {
+ "model_id": "gpt-3",
+ "service_id": "service-1",
+ "max_tokens": 60,
+ "temperature": 0.5,
+ "top_p": 0.0,
+ "presence_penalty": 0.0,
+ "frequency_penalty": 0.0
+ }
+ """;
+
+ // Act
+ var executionSettings = JsonSerializer.Deserialize(configPayload);
+
+ // Assert
+ Assert.NotNull(executionSettings);
+ Assert.Equal("gpt-3", executionSettings.ModelId);
+ Assert.Equal("service-1", executionSettings.ServiceId);
+ Assert.Equal(60, ((JsonElement)executionSettings.ExtensionData!["max_tokens"]).GetInt32());
+ Assert.Equal(0.5, ((JsonElement)executionSettings.ExtensionData!["temperature"]).GetDouble());
+ Assert.Equal(0.0, ((JsonElement)executionSettings.ExtensionData!["top_p"]).GetDouble());
+ Assert.Equal(0.0, ((JsonElement)executionSettings.ExtensionData!["presence_penalty"]).GetDouble());
}
[Fact]
diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs
index a25376128f2d..fdbd4cae0524 100644
--- a/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs
+++ b/dotnet/src/SemanticKernel.UnitTests/Contents/ChatMessageContentTests.cs
@@ -55,8 +55,10 @@ public void ContentPropertySetterShouldAddTextContentToItemsCollection()
Assert.Contains(sut.Items, item => item is TextContent textContent && textContent.Text == "fake-content");
}
- [Fact]
- public void ContentPropertySetterShouldUpdateContentOfFirstTextContentItem()
+ [Theory]
+ [InlineData(null)]
+ [InlineData("fake-content-1-update")]
+ public void ContentPropertySetterShouldUpdateContentOfFirstTextContentItem(string? content)
{
// Arrange
var items = new ChatMessageContentItemCollection
@@ -68,10 +70,23 @@ public void ContentPropertySetterShouldUpdateContentOfFirstTextContentItem()
var sut = new ChatMessageContent(AuthorRole.User, items: items)
{
- Content = "fake-content-1-update"
+ Content = content
+ };
+
+ Assert.Equal(content, ((TextContent)sut.Items[1]).Text);
+ }
+
+ [Fact]
+ public void ContentPropertySetterShouldNotAddTextContentToItemsCollection()
+ {
+ // Arrange
+ var sut = new ChatMessageContent(AuthorRole.User, content: null)
+ {
+ Content = null
};
- Assert.Equal("fake-content-1-update", ((TextContent)sut.Items[1]).Text);
+ // Assert
+ Assert.Empty(sut.Items);
}
[Fact]
diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/FunctionCallContentBuilderTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/FunctionCallContentBuilderTests.cs
new file mode 100644
index 000000000000..452f4e9fbca7
--- /dev/null
+++ b/dotnet/src/SemanticKernel.UnitTests/Contents/FunctionCallContentBuilderTests.cs
@@ -0,0 +1,138 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Linq;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Xunit;
+
+namespace SemanticKernel.UnitTests.Contents;
+
+public class FunctionCallContentBuilderTests
+{
+ [Fact]
+ public void ItShouldBuildFunctionCallContentForOneFunction()
+ {
+ // Arrange
+ var sut = new FunctionCallContentBuilder();
+
+ // Act
+ var update1 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 1, functionCallIndex: 2, callId: "f_101", name: null, arguments: null);
+ sut.Append(update1);
+
+ var update2 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 1, functionCallIndex: 2, callId: null, name: "WeatherUtils-GetTemperature", arguments: null);
+ sut.Append(update2);
+
+ var update3 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 1, functionCallIndex: 2, callId: null, name: null, arguments: "{\"city\":");
+ sut.Append(update3);
+
+ var update4 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 1, functionCallIndex: 2, callId: null, name: null, arguments: "\"Seattle\"}");
+ sut.Append(update4);
+
+ var functionCalls = sut.Build();
+
+ // Assert
+ var functionCall = Assert.Single(functionCalls);
+
+ Assert.Equal("f_101", functionCall.Id);
+ Assert.Equal("WeatherUtils", functionCall.PluginName);
+ Assert.Equal("GetTemperature", functionCall.FunctionName);
+
+ Assert.NotNull(functionCall.Arguments);
+ Assert.Equal("Seattle", functionCall.Arguments["city"]);
+
+ Assert.Null(functionCall.Exception);
+ }
+
+ [Fact]
+ public void ItShouldBuildFunctionCallContentForManyFunctions()
+ {
+ // Arrange
+ var sut = new FunctionCallContentBuilder();
+
+ // Act
+ var f1_update1 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 0, functionCallIndex: 1, callId: "f_1", name: "WeatherUtils-GetTemperature", arguments: null);
+ sut.Append(f1_update1);
+
+ var f2_update1 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 0, functionCallIndex: 2, callId: null, name: "WeatherUtils-GetHumidity", arguments: null);
+ sut.Append(f2_update1);
+
+ var f2_update2 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 0, functionCallIndex: 2, callId: "f_2", name: null, arguments: null);
+ sut.Append(f2_update2);
+
+ var f1_update2 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 0, functionCallIndex: 1, callId: null, name: null, arguments: "{\"city\":");
+ sut.Append(f1_update2);
+
+ var f2_update3 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 0, functionCallIndex: 2, callId: null, name: null, arguments: "{\"city\":");
+ sut.Append(f2_update3);
+
+ var f1_update3 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 0, functionCallIndex: 1, callId: null, name: null, arguments: "\"Seattle\"}");
+ sut.Append(f1_update3);
+
+ var f2_update4 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 0, functionCallIndex: 2, callId: null, name: null, arguments: "\"Georgia\"}");
+ sut.Append(f2_update4);
+
+ var functionCalls = sut.Build();
+
+ // Assert
+ Assert.Equal(2, functionCalls.Count);
+
+ var functionCall1 = functionCalls.ElementAt(0);
+ Assert.Equal("f_1", functionCall1.Id);
+ Assert.Equal("WeatherUtils", functionCall1.PluginName);
+ Assert.Equal("GetTemperature", functionCall1.FunctionName);
+ Assert.Equal("Seattle", functionCall1.Arguments?["city"]);
+ Assert.Null(functionCall1.Exception);
+
+ var functionCall2 = functionCalls.ElementAt(1);
+ Assert.Equal("f_2", functionCall2.Id);
+ Assert.Equal("WeatherUtils", functionCall2.PluginName);
+ Assert.Equal("GetHumidity", functionCall2.FunctionName);
+ Assert.Equal("Georgia", functionCall2.Arguments?["city"]);
+ Assert.Null(functionCall2.Exception);
+ }
+
+ [Fact]
+ public void ItShouldCaptureArgumentsDeserializationException()
+ {
+ // Arrange
+ var sut = new FunctionCallContentBuilder();
+
+ // Act
+ var update1 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 1, functionCallIndex: 2, callId: "f_101", name: "WeatherUtils-GetTemperature", arguments: null);
+ sut.Append(update1);
+
+ var update2 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 1, functionCallIndex: 2, callId: null, name: null, arguments: "{\"city\":");
+ sut.Append(update2);
+
+ // Invalid JSON - double closing braces - }}
+ var update3 = CreateStreamingContentWithFunctionCallUpdate(choiceIndex: 1, functionCallIndex: 2, callId: null, name: null, arguments: "\"Seattle\"}}");
+ sut.Append(update3);
+
+ var functionCalls = sut.Build();
+
+ // Assert
+ var functionCall = Assert.Single(functionCalls);
+
+ Assert.Equal("f_101", functionCall.Id);
+ Assert.Equal("WeatherUtils", functionCall.PluginName);
+ Assert.Equal("GetTemperature", functionCall.FunctionName);
+ Assert.Null(functionCall.Arguments);
+ Assert.NotNull(functionCall.Exception);
+ }
+
+ private static StreamingChatMessageContent CreateStreamingContentWithFunctionCallUpdate(int choiceIndex, int functionCallIndex, string? callId, string? name, string? arguments)
+ {
+ var content = new StreamingChatMessageContent(AuthorRole.Assistant, null);
+
+ content.Items.Add(new StreamingFunctionCallUpdateContent
+ {
+ ChoiceIndex = choiceIndex,
+ FunctionCallIndex = functionCallIndex,
+ CallId = callId,
+ Name = name,
+ Arguments = arguments,
+ });
+
+ return content;
+ }
+}
diff --git a/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingChatMessageContentTests.cs b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingChatMessageContentTests.cs
new file mode 100644
index 000000000000..f7f7c5e43be7
--- /dev/null
+++ b/dotnet/src/SemanticKernel.UnitTests/Contents/StreamingChatMessageContentTests.cs
@@ -0,0 +1,161 @@
+// Copyright (c) Microsoft. All rights reserved.
+
+using System.Text;
+using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Xunit;
+
+namespace SemanticKernel.UnitTests.Contents;
+public class StreamingChatMessageContentTests
+{
+ [Fact]
+ public void ConstructorShouldAddTextContentToItemsCollectionIfContentProvided()
+ {
+ // Arrange & act
+ var sut = new StreamingChatMessageContent(AuthorRole.User, "fake-content");
+
+ // Assert
+ Assert.Single(sut.Items);
+
+ Assert.Contains(sut.Items, item => item is StreamingTextContent textContent && textContent.Text == "fake-content");
+ }
+
+ [Fact]
+ public void ConstructorShouldNodAddTextContentToItemsCollectionIfNoContentProvided()
+ {
+ // Arrange & act
+ var sut = new StreamingChatMessageContent(AuthorRole.User, content: null);
+
+ // Assert
+ Assert.Empty(sut.Items);
+ }
+
+ [Fact]
+ public void ContentPropertySetterShouldAddTextContentToItemsCollection()
+ {
+ // Arrange
+ var sut = new StreamingChatMessageContent(AuthorRole.User, content: null)
+ {
+ Content = "fake-content"
+ };
+
+ // Assert
+ Assert.Single(sut.Items);
+
+ Assert.Contains(sut.Items, item => item is StreamingTextContent textContent && textContent.Text == "fake-content");
+ }
+
+ [Fact]
+ public void ContentPropertySetterShouldNotAddTextContentToItemsCollection()
+ {
+ // Arrange
+ var sut = new StreamingChatMessageContent(AuthorRole.User, content: null)
+ {
+ Content = null
+ };
+
+ // Assert
+ Assert.Empty(sut.Items);
+ }
+
+ [Theory]
+ [InlineData(null)]
+ [InlineData("content-update")]
+ public void ContentPropertySetterShouldUpdateContentOfFirstTextContentItem(string? content)
+ {
+ // Arrange
+ var items = new StreamingKernelContentItemCollection
+ {
+ new StreamingTextContent("fake-content-1"),
+ new StreamingTextContent("fake-content-2")
+ };
+
+ var sut = new StreamingChatMessageContent(AuthorRole.User, content: null);
+ sut.Items = items;
+ sut.Content = content;
+
+ Assert.Equal(content, ((StreamingTextContent)sut.Items[0]).Text);
+ }
+
+ [Fact]
+ public void ContentPropertyGetterShouldReturnNullIfThereAreNoTextContentItems()
+ {
+ // Arrange and act
+ var sut = new StreamingChatMessageContent(AuthorRole.User, content: null);
+
+ // Assert
+ Assert.Null(sut.Content);
+ Assert.Equal(string.Empty, sut.ToString());
+ }
+
+ [Fact]
+ public void ContentPropertyGetterShouldReturnContentOfTextContentItem()
+ {
+ // Arrange
+ var sut = new StreamingChatMessageContent(AuthorRole.User, "fake-content");
+
+ // Act and assert
+ Assert.Equal("fake-content", sut.Content);
+ Assert.Equal("fake-content", sut.ToString());
+ }
+
+ [Fact]
+ public void ContentPropertyGetterShouldReturnContentOfTheFirstTextContentItem()
+ {
+ // Arrange
+ var items = new StreamingKernelContentItemCollection
+ {
+ new StreamingTextContent("fake-content-1"),
+ new StreamingTextContent("fake-content-2")
+ };
+
+ var sut = new StreamingChatMessageContent(AuthorRole.User, content: null)
+ {
+ Items = items
+ };
+
+ // Act and assert
+ Assert.Equal("fake-content-1", sut.Content);
+ }
+
+ [Fact]
+ public void ItShouldBePossibleToSetAndGetEncodingEvenIfThereAreNoItems()
+ {
+ // Arrange
+ var sut = new StreamingChatMessageContent(AuthorRole.User, content: null)
+ {
+ Encoding = Encoding.UTF32
+ };
+
+ // Assert
+ Assert.Empty(sut.Items);
+ Assert.Equal(Encoding.UTF32, sut.Encoding);
+ }
+
+ [Fact]
+ public void EncodingPropertySetterShouldUpdateEncodingTextContentItem()
+ {
+ // Arrange
+ var sut = new StreamingChatMessageContent(AuthorRole.User, content: "fake-content")
+ {
+ Encoding = Encoding.UTF32
+ };
+
+ // Assert
+ Assert.Single(sut.Items);
+ Assert.Equal(Encoding.UTF32, ((StreamingTextContent)sut.Items[0]).Encoding);
+ }
+
+ [Fact]
+ public void EncodingPropertyGetterShouldReturnEncodingOfTextContentItem()
+ {
+ // Arrange
+ var sut = new StreamingChatMessageContent(AuthorRole.User, content: "fake-content");
+
+ // Act
+ ((StreamingTextContent)sut.Items[0]).Encoding = Encoding.Latin1;
+
+ // Assert
+ Assert.Equal(Encoding.Latin1, sut.Encoding);
+ }
+}
diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelArgumentsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelArgumentsTests.cs
index a9d1625e79e7..8899668fd573 100644
--- a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelArgumentsTests.cs
+++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelArgumentsTests.cs
@@ -1,6 +1,8 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Collections.Generic;
+using System.Linq;
using Microsoft.SemanticKernel;
using Xunit;
@@ -62,6 +64,43 @@ public void ItCanBeCreatedWithBothExecutionSettingsAndArguments()
Assert.Equal("fake-value", argument.Value);
}
+ [Fact]
+ public void ItCanBeCreatedWithMultipleExecutionSettingsAndArguments()
+ {
+ // Arrange
+ var executionSettings1 = new PromptExecutionSettings();
+ var executionSettings2 = new PromptExecutionSettings() { ServiceId = "service-2" };
+ var executionSettings3 = new PromptExecutionSettings() { ServiceId = "service-3" };
+
+ // Act
+ KernelArguments sut = new([executionSettings1, executionSettings2, executionSettings3]) { { "fake-key", "fake-value" } };
+
+ // Assert
+ Assert.Same(executionSettings1, sut.ExecutionSettings?[PromptExecutionSettings.DefaultServiceId]);
+ Assert.Same(executionSettings2, sut.ExecutionSettings?["service-2"]);
+ Assert.Same(executionSettings3, sut.ExecutionSettings?["service-3"]);
+
+ var argument = Assert.Single(sut);
+ Assert.Equal("fake-key", argument.Key);
+ Assert.Equal("fake-value", argument.Value);
+ }
+
+ [Theory]
+ [InlineData(null, null)]
+ [InlineData("default", null)]
+ [InlineData(null, "default")]
+ [InlineData("service1", null, "service1")]
+ [InlineData(null, "service2", "service2")]
+ [InlineData("service1", "service2", "service3", null, "service1")]
+ public void ItCannotBeCreatedWithMultipleExecutionSettingsWithClashingServiceIdOrWithoutServiceIdSet(params string?[] serviceIds)
+ {
+ // Arrange
+ var executionSettingsList = serviceIds?.Select(serviceId => new PromptExecutionSettings() { ServiceId = serviceId }).ToList();
+
+ // Act & Assert
+ Assert.Throws(() => new KernelArguments(executionSettingsList) { { "fake-key", "fake-value" } });
+ }
+
[Fact]
public void ItCanPerformCaseInsensitiveSearch()
{
diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelExtensionsTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelExtensionsTests.cs
index ea36d8864d17..57010c640b91 100644
--- a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelExtensionsTests.cs
+++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelExtensionsTests.cs
@@ -1,6 +1,12 @@
// Copyright (c) Microsoft. All rights reserved.
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Extensions.DependencyInjection;
using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
+using Microsoft.SemanticKernel.TextGeneration;
+using Moq;
using Xunit;
namespace SemanticKernel.UnitTests.Functions;
@@ -65,6 +71,37 @@ public void CreatePluginFromDescriptionAndFunctions()
Assert.True(plugin.Contains("Function2"));
}
+ [Fact]
+ public async Task CreateFunctionFromPromptWithMultipleSettingsUseCorrectServiceAsync()
+ {
+ // Arrange
+ var mockTextGeneration1 = new Mock();
+ var mockTextGeneration2 = new Mock();
+ var fakeTextContent = new TextContent("llmResult");
+ var fakeChatContent = new ChatMessageContent(AuthorRole.User, "content");
+
+ mockTextGeneration1.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeTextContent]);
+ mockTextGeneration2.Setup(c => c.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeChatContent]);
+
+ IKernelBuilder builder = Kernel.CreateBuilder();
+ builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object);
+ builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object);
+ builder.Services.AddKeyedSingleton("service3", mockTextGeneration1.Object);
+ Kernel kernel = builder.Build();
+
+ KernelFunction function = kernel.CreateFunctionFromPrompt("coolfunction", [
+ new PromptExecutionSettings { ServiceId = "service5" }, // Should ignore this as service5 is not registered
+ new PromptExecutionSettings { ServiceId = "service2" },
+ ]);
+
+ // Act
+ await kernel.InvokeAsync(function);
+
+ // Assert
+ mockTextGeneration1.Verify(a => a.GetTextContentsAsync("coolfunction", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never());
+ mockTextGeneration2.Verify(a => a.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Once());
+ }
+
[Fact]
public void ImportPluginFromFunctions()
{
diff --git a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromPromptTests.cs b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromPromptTests.cs
index ae9838e77414..a1080983efc1 100644
--- a/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromPromptTests.cs
+++ b/dotnet/src/SemanticKernel.UnitTests/Functions/KernelFunctionFromPromptTests.cs
@@ -116,6 +116,60 @@ public async Task ItUsesServiceIdWhenProvidedAsync()
mockTextGeneration2.Verify(a => a.GetTextContentsAsync("template", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never());
}
+ [Fact]
+ public async Task ItUsesServiceIdWhenProvidedInMethodAsync()
+ {
+ // Arrange
+ var mockTextGeneration1 = new Mock();
+ var mockTextGeneration2 = new Mock();
+ var fakeTextContent = new TextContent("llmResult");
+
+ mockTextGeneration1.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeTextContent]);
+ mockTextGeneration2.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeTextContent]);
+
+ IKernelBuilder builder = Kernel.CreateBuilder();
+ builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object);
+ builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object);
+ Kernel kernel = builder.Build();
+
+ var func = kernel.CreateFunctionFromPrompt("my prompt", [new PromptExecutionSettings { ServiceId = "service2" }]);
+
+ // Act
+ await kernel.InvokeAsync(func);
+
+ // Assert
+ mockTextGeneration1.Verify(a => a.GetTextContentsAsync("my prompt", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never());
+ mockTextGeneration2.Verify(a => a.GetTextContentsAsync("my prompt", It.IsAny(), It.IsAny(), It.IsAny()), Times.Once());
+ }
+
+ [Fact]
+ public async Task ItUsesChatServiceIdWhenProvidedInMethodAsync()
+ {
+ // Arrange
+ var mockTextGeneration1 = new Mock();
+ var mockTextGeneration2 = new Mock();
+ var fakeTextContent = new TextContent("llmResult");
+ var fakeChatContent = new ChatMessageContent(AuthorRole.User, "content");
+
+ mockTextGeneration1.Setup(c => c.GetTextContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeTextContent]);
+ mockTextGeneration2.Setup(c => c.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeChatContent]);
+
+ IKernelBuilder builder = Kernel.CreateBuilder();
+ builder.Services.AddKeyedSingleton("service1", mockTextGeneration1.Object);
+ builder.Services.AddKeyedSingleton("service2", mockTextGeneration2.Object);
+ builder.Services.AddKeyedSingleton("service3", mockTextGeneration1.Object);
+ Kernel kernel = builder.Build();
+
+ var func = kernel.CreateFunctionFromPrompt("my prompt", [new PromptExecutionSettings { ServiceId = "service2" }]);
+
+ // Act
+ await kernel.InvokeAsync(func);
+
+ // Assert
+ mockTextGeneration1.Verify(a => a.GetTextContentsAsync("my prompt", It.IsAny(), It.IsAny(), It.IsAny()), Times.Never());
+ mockTextGeneration2.Verify(a => a.GetChatMessageContentsAsync(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny()), Times.Once());
+ }
+
[Fact]
public async Task ItFailsIfInvalidServiceIdIsProvidedAsync()
{
@@ -654,6 +708,197 @@ public async Task ItUsesPromptAsUserMessageAsync(KernelInvocationType invocation
Assert.Equal("Test prompt as user message", messageContent.Content);
}
+ [Theory]
+ [InlineData("semantic-kernel", "This is my prompt {{$input}}")]
+ [InlineData("handlebars", "This is my prompt {{input}}")]
+ public async Task ItUsesPromptWithEchoPromptTemplateFactoryAsync(string templateFormat, string template)
+ {
+ // Arrange
+ var mockTextGeneration = new Mock();
+ var fakeTextContent = new TextContent(template);
+
+ mockTextGeneration.Setup(c => c.GetTextContentsAsync(It.Is(p => p.Equals(template, StringComparison.Ordinal)), It.IsAny(), It.IsAny(), It.IsAny())).ReturnsAsync([fakeTextContent]);
+
+ IKernelBuilder builder = Kernel.CreateBuilder();
+ builder.Services.AddKeyedSingleton("x", mockTextGeneration.Object);
+ Kernel kernel = builder.Build();
+
+ var promptConfig = new PromptTemplateConfig(template) { TemplateFormat = templateFormat };
+ var func = kernel.CreateFunctionFromPrompt(promptConfig, promptTemplateFactory: new EchoPromptTemplateFactory());
+ var args = new KernelArguments();
+ args["input"] = "Some Input";
+
+ // Act
+ var result = await kernel.InvokeAsync(func, args);
+
+ // Assert
+ mockTextGeneration.Verify(a => a.GetTextContentsAsync(template, It.IsAny(), It.IsAny(), It.IsAny()), Times.Once());
+ Assert.Equal(template, result.GetValue());
+ }
+
+ [Fact]
+ public async Task InvokePromptAsyncWithTextGenerationReturnsSingleResultAsync()
+ {
+ // Arrange
+ var expectedTextContent = new TextContent("text", "model-id", metadata: new Dictionary { { "key", "value" } });
+ var mockTextGenerationService = this.GetMockTextGenerationService(textContents: [expectedTextContent]);
+
+ KernelBuilder builder = new();
+ builder.Services.AddTransient((sp) => mockTextGenerationService.Object);
+ Kernel kernel = builder.Build();
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Prompt");
+
+ // Assert
+ Assert.Equal("text", result.GetValue());
+ Assert.Equal("text", result.GetValue()!.ToString());
+
+ var actualTextContent = result.GetValue();
+
+ Assert.NotNull(actualTextContent);
+ Assert.Equal(result.Metadata, actualTextContent.Metadata);
+
+ Assert.Equal(expectedTextContent.ModelId, actualTextContent.ModelId);
+ Assert.Equal(expectedTextContent.Text, actualTextContent.Text);
+ Assert.Equal(expectedTextContent.Metadata, actualTextContent.Metadata);
+ }
+
+ [Fact]
+ public async Task InvokePromptAsyncWithTextGenerationReturnsMultipleResultsAsync()
+ {
+ // Arrange
+ List expectedTextContents =
+ [
+ new TextContent("text1", "model-id", metadata: new Dictionary { { "key1", "value1" } }),
+ new TextContent("text2", "model-id", metadata: new Dictionary { { "key2", "value2" } }),
+ ];
+
+ var mockTextGenerationService = this.GetMockTextGenerationService(textContents: expectedTextContents);
+
+ KernelBuilder builder = new();
+ builder.Services.AddTransient((sp) => mockTextGenerationService.Object);
+ Kernel kernel = builder.Build();
+
+ // Act
+ var result = await kernel.InvokePromptAsync("Prompt");
+
+ // Assert
+ Assert.Throws(() => result.GetValue());
+ Assert.Throws(() => result.GetValue());
+
+ var actualTextContents = result.GetValue>();
+
+ Assert.NotNull(actualTextContents);
+ Assert.Null(result.Metadata);
+
+ Assert.Equal(expectedTextContents.Count, actualTextContents.Count);
+
+ for (var i = 0; i < expectedTextContents.Count; i++)
+ {
+ Assert.Equal(expectedTextContents[i].ModelId, actualTextContents[i].ModelId);
+ Assert.Equal(expectedTextContents[i].Text, actualTextContents[i].Text);
+ Assert.Equal(expectedTextContents[i].Metadata, actualTextContents[i].Metadata);
+ }
+ }
+
+ [Fact]
+ public async Task InvokePromptAsyncWithChatCompletionReturnsSingleResultAsync()
+ {
+ // Arrange
+ var expectedChatMessageContent = new ChatMessageContent(AuthorRole.Assistant, "chat-message", "model-id", new Dictionary { { "key", "value" } });
+ var mockChatCompletionService = this.GetMockChatCompletionService(chatMessageContents: [expectedChatMessageContent]);
+
+ KernelBuilder builder = new();
+ builder.Services.AddTransient