-
Notifications
You must be signed in to change notification settings - Fork 3.4k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
.Net: Adds an AI connector for Anthropic #3476
Changes from all commits
243ce62
ab5af9c
412b0be
6b3a817
3a13170
a9a26ae
444674c
dd0a562
b244b88
2bac8c6
78033b3
4cf3d8c
3268947
bcf4593
d64b004
965dc1c
2dff0f1
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,29 @@ | ||
// Copyright (c) Microsoft. All rights reserved. | ||
|
||
using System; | ||
using System.Threading.Tasks; | ||
using Microsoft.SemanticKernel; | ||
|
||
// Example usage of Anthropic's chat completion service. | ||
// ReSharper disable once InconsistentNaming | ||
public static class Example72_AnthropicChatCompletion | ||
{ | ||
public static async Task RunAsync() | ||
{ | ||
Console.WriteLine("=== Example with Anthropic Chat Completion ==="); | ||
|
||
var cfg = TestConfiguration.Anthropic; | ||
|
||
var kernel = new KernelBuilder() | ||
.WithAnthropicChatCompletionService(cfg.ModelId, cfg.ApiKey, cfg.ServiceId, true, true) | ||
.Build(); | ||
|
||
var semanticFunction = kernel.CreateFunctionFromPrompt("{{$input}}"); | ||
var ask = "In the classic Star Wars films, who was Luke Skywalker's father?"; | ||
|
||
var response = await kernel.RunAsync(ask, semanticFunction); | ||
|
||
Console.WriteLine($"Ask: {ask}"); | ||
Console.WriteLine($"Response: {response}"); | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,256 @@ | ||
// Copyright (c) Microsoft. All rights reserved. | ||
|
||
using System; | ||
using System.Collections.Generic; | ||
using System.IO; | ||
using System.Linq; | ||
using System.Net.Http; | ||
using System.Runtime.CompilerServices; | ||
using System.Text; | ||
using System.Text.Json; | ||
using System.Threading; | ||
using System.Threading.Tasks; | ||
using Microsoft.Extensions.Logging; | ||
using Microsoft.SemanticKernel.AI; | ||
using Microsoft.SemanticKernel.AI.ChatCompletion; | ||
using Microsoft.SemanticKernel.AI.TextCompletion; | ||
using Microsoft.SemanticKernel.Http; | ||
|
||
namespace Microsoft.SemanticKernel.Connectors.AI.Anthropic; | ||
|
||
/// <summary> | ||
/// A chat completion connector for the Anthropic API. | ||
/// </summary> | ||
public class AnthropicChatCompletion : IChatCompletion, ITextCompletion, IDisposable | ||
{ | ||
private const string BaseUrl = "https://api.anthropic.com/v1/complete"; | ||
private const int BufferSize = 4096; | ||
|
||
private readonly ILogger? _log; | ||
private readonly HttpClient _httpClient; | ||
private readonly bool _disposeHttpClient; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Is there any situation where this needs to be disposed? If it came from externally, we musn't. And if we created it from the internal factory, we don't need to. |
||
private readonly string _model; | ||
private readonly string _apiKey; | ||
private bool _disposed = false; | ||
|
||
/// <summary> | ||
/// Initializes a new instance of the <see cref="AnthropicChatCompletion"/> class. | ||
/// </summary> | ||
/// <param name="modelId">The ID of the Anthropic model to use for chat completion.</param> | ||
/// <param name="apiKey">The API key to use for authentication with the Anthropic API.</param> | ||
/// <param name="httpClient">The <see cref="HttpClient"/> instance to use for making HTTP requests to the Anthropic API.</param> | ||
/// <param name="loggerFactory">The <see cref="ILoggerFactory"/> instance to use for logging.</param> | ||
public AnthropicChatCompletion(string modelId, string apiKey, HttpClient? httpClient = null, ILoggerFactory? loggerFactory = null) | ||
{ | ||
this._httpClient = httpClient ?? new HttpClient(); | ||
this._disposeHttpClient = httpClient == null; | ||
this._model = modelId; | ||
this._apiKey = apiKey; | ||
this._log = loggerFactory?.CreateLogger<AnthropicChatCompletion>(); | ||
} | ||
|
||
private HttpRequestMessage CreateHttpRequest(AnthropicRequest request) | ||
{ | ||
var json = JsonSerializer.Serialize(request); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It'd be more efficient (skipping an intermediate string) to use SerializeToUTF8 and create a ByteArrayContent. |
||
var content = new StringContent(json, Encoding.UTF8, "application/json"); | ||
var httpRequest = new HttpRequestMessage(HttpMethod.Post, BaseUrl) | ||
{ | ||
Content = content | ||
}; | ||
httpRequest.Headers.Add("x-api-key", this._apiKey); | ||
return httpRequest; | ||
} | ||
|
||
private async Task<HttpResponseMessage> SendAsync(AnthropicRequest request, CancellationToken cancellationToken) | ||
{ | ||
using var httpRequest = this.CreateHttpRequest(request); | ||
try | ||
{ | ||
return await this._httpClient.SendWithSuccessCheckAsync(httpRequest, cancellationToken).ConfigureAwait(false); | ||
} | ||
catch (HttpOperationException e) when (!string.IsNullOrWhiteSpace(e.ResponseContent)) | ||
{ | ||
this._log?.LogError(e, "Error sending request to Anthropic API: {Error}", e.ResponseContent); | ||
var error = JsonSerializer.Deserialize<AnthropicError>(e.ResponseContent!); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What if this throws an exception? |
||
if (error == null) | ||
{ | ||
throw; | ||
} | ||
throw new HttpOperationException($"Error sending request to Anthropic API: {error.Error.Type} - {error.Error.Message}", e); | ||
} | ||
} | ||
|
||
private static string ToPrompt(ChatHistory chat) | ||
{ | ||
var promptBuilder = new StringBuilder(); | ||
foreach (var message in chat.Where(message => message.Role == AuthorRole.User || message.Role == AuthorRole.Assistant)) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It'd be more efficient to just do this as an if check at the beginning of the while loop rather than using a Where |
||
{ | ||
promptBuilder.AppendLine(); | ||
promptBuilder.AppendLine(); | ||
promptBuilder.Append(message.Role == AuthorRole.User ? "Human: " : "Assistant: "); | ||
promptBuilder.AppendLine(message.Content); | ||
} | ||
|
||
if (chat.Count > 0 && chat.Last().Role != AuthorRole.Assistant) | ||
{ | ||
promptBuilder.AppendLine(); | ||
promptBuilder.AppendLine(); | ||
promptBuilder.Append("Assistant: "); | ||
} | ||
|
||
return promptBuilder.ToString(); | ||
} | ||
|
||
private static string ToPrompt(string text) | ||
{ | ||
var promptBuilder = new StringBuilder(); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This would be better as just string concatenation. |
||
promptBuilder.AppendLine(); | ||
promptBuilder.AppendLine(); | ||
promptBuilder.Append("Human: "); | ||
promptBuilder.AppendLine(text); | ||
promptBuilder.AppendLine(); | ||
promptBuilder.AppendLine(); | ||
promptBuilder.Append("Assistant: "); | ||
return promptBuilder.ToString(); | ||
} | ||
|
||
private async Task<HttpResponseMessage> SendAsync(string prompt, bool stream, AIRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) | ||
{ | ||
requestSettings ??= new AnthropicRequestSettings(); | ||
if (requestSettings is not AnthropicRequestSettings settings) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This should work even if the settings aren't from anthropic. |
||
{ | ||
throw new ArgumentException("Request settings must be an instance of AnthropicRequestSettings", | ||
nameof(requestSettings)); | ||
} | ||
|
||
var request = new AnthropicRequest(settings, prompt, stream); | ||
return await this.SendAsync(request, cancellationToken).ConfigureAwait(false); | ||
} | ||
|
||
private async Task<IEnumerable<ChatResult>> InternalGetCompletionsAsync(string prompt, AIRequestSettings? requestSettings = null, | ||
CancellationToken cancellationToken = default) | ||
{ | ||
using var httpResponse = await this.SendAsync(prompt, false, requestSettings, cancellationToken).ConfigureAwait(false); | ||
var content = await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false); | ||
var response = JsonSerializer.Deserialize<AnthropicResponse>(content) ?? throw new HttpOperationException($"Error deserializing response from Anthropic API: {content}"); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What if Deserialize throws? |
||
if (response.StopReason == "max_tokens") | ||
{ | ||
this._log?.LogWarning("Claude stopped because it reached the max tokens limit"); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This doesn't seem to qualify as an app-level warning situation. |
||
} | ||
|
||
return new[] { new ChatResult(response) }; | ||
} | ||
|
||
private async IAsyncEnumerable<ChatResult> InternalGetStreamingCompletionsAsync(string prompt, AIRequestSettings? requestSettings = null, | ||
[EnumeratorCancellation] CancellationToken cancellationToken = default) | ||
{ | ||
using var httpResponse = await this.SendAsync(prompt, true, requestSettings, cancellationToken).ConfigureAwait(false); | ||
var stream = await httpResponse.Content.ReadAsStreamAsync().ConfigureAwait(false); | ||
using var reader = new StreamReader(stream); | ||
|
||
var buffer = new char[BufferSize]; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This could use the array pool. |
||
|
||
while (!reader.EndOfStream) | ||
{ | ||
var readCount = await reader.ReadAsync(buffer, 0, BufferSize).ConfigureAwait(false); | ||
var content = new string(buffer, 0, readCount); | ||
var response = JsonSerializer.Deserialize<AnthropicResponse>(content) ?? throw new HttpOperationException($"Error deserializing response from Anthropic API: {content}"); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This should use spans rather than allocating new intermediate strings. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This doesn't look right. What happens if the read doesn't read the full json payload? |
||
yield return new ChatResult(response); | ||
} | ||
} | ||
|
||
/// <inheritdoc/> | ||
public IReadOnlyDictionary<string, string> Attributes => new Dictionary<string, string>(); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is allocating a new dictionary on every access. |
||
|
||
/// <inheritdoc/> | ||
public ChatHistory CreateNewChat(string? instructions = null) | ||
{ | ||
var chat = new ChatHistory(); | ||
if (!string.IsNullOrWhiteSpace(instructions)) | ||
{ | ||
chat.AddSystemMessage(instructions!); | ||
} | ||
return chat; | ||
} | ||
|
||
/// <inheritdoc/> | ||
public async Task<IReadOnlyList<IChatResult>> GetChatCompletionsAsync(ChatHistory chat, AIRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) | ||
{ | ||
if (chat == null) | ||
{ | ||
throw new ArgumentNullException(nameof(chat)); | ||
} | ||
|
||
return (await this.InternalGetCompletionsAsync(ToPrompt(chat), requestSettings, cancellationToken).ConfigureAwait(false)) | ||
.Cast<IChatResult>() | ||
.ToList(); | ||
} | ||
|
||
/// <inheritdoc/> | ||
public async IAsyncEnumerable<IChatStreamingResult> GetStreamingChatCompletionsAsync(ChatHistory chat, AIRequestSettings? requestSettings = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) | ||
{ | ||
if (chat == null) | ||
{ | ||
throw new ArgumentNullException(nameof(chat)); | ||
} | ||
|
||
await foreach (var result in this.InternalGetStreamingCompletionsAsync(ToPrompt(chat), requestSettings, cancellationToken).ConfigureAwait(false)) | ||
{ | ||
yield return result; | ||
} | ||
} | ||
|
||
/// <summary> | ||
/// Releases the unmanaged resources used by the AnthropicChatCompletion and optionally releases the managed resources. | ||
/// </summary> | ||
/// <param name="disposing">true to release both managed and unmanaged resources; false to release only unmanaged resources.</param> | ||
protected virtual void Dispose(bool disposing) | ||
{ | ||
if (!this._disposed) | ||
{ | ||
if (disposing) | ||
{ | ||
// Dispose managed resources here | ||
this._httpClient.Dispose(); | ||
} | ||
|
||
// Dispose unmanaged resources here | ||
} | ||
|
||
this._disposed = true; | ||
} | ||
|
||
/// <inheritdoc/> | ||
public void Dispose() | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This doesn't need to be disposable. |
||
{ | ||
this.Dispose(true); | ||
GC.SuppressFinalize(this); | ||
} | ||
|
||
/// <inheritdoc/> | ||
public async Task<IReadOnlyList<ITextResult>> GetCompletionsAsync(string text, AIRequestSettings? requestSettings = null, CancellationToken cancellationToken = default) | ||
{ | ||
if (string.IsNullOrWhiteSpace(text)) | ||
{ | ||
throw new ArgumentNullException(nameof(text)); | ||
} | ||
|
||
return (await this.InternalGetCompletionsAsync(ToPrompt(text), requestSettings, cancellationToken).ConfigureAwait(false)) | ||
.Cast<ITextResult>() | ||
.ToList(); | ||
} | ||
|
||
/// <inheritdoc/> | ||
public async IAsyncEnumerable<ITextStreamingResult> GetStreamingCompletionsAsync(string text, AIRequestSettings? requestSettings = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) | ||
{ | ||
if (string.IsNullOrWhiteSpace(text)) | ||
{ | ||
throw new ArgumentNullException(nameof(text)); | ||
} | ||
|
||
await foreach (var result in this.InternalGetStreamingCompletionsAsync(ToPrompt(text), requestSettings, cancellationToken).ConfigureAwait(false)) | ||
{ | ||
yield return result; | ||
} | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,35 @@ | ||
// Copyright (c) Microsoft. All rights reserved. | ||
|
||
using System.Text.Json.Serialization; | ||
|
||
namespace Microsoft.SemanticKernel.Connectors.AI.Anthropic; | ||
|
||
/// <summary> | ||
/// Details of the error response from the Anthropic API. | ||
/// </summary> | ||
public class AnthropicErrorDetails | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. sealed |
||
{ | ||
/// <summary> | ||
/// The error type. | ||
/// </summary> | ||
[JsonPropertyName("type")] | ||
public string Type { get; init; } = string.Empty; | ||
|
||
/// <summary> | ||
/// The error message. | ||
/// </summary> | ||
[JsonPropertyName("message")] | ||
public string Message { get; init; } = string.Empty; | ||
} | ||
|
||
/// <summary> | ||
/// Represents an error response from the Anthropic API. | ||
/// </summary> | ||
public class AnthropicError | ||
{ | ||
/// <summary> | ||
/// The error details. | ||
/// </summary> | ||
[JsonPropertyName("error")] | ||
public AnthropicErrorDetails Error { get; init; } = new AnthropicErrorDetails(); | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
sealed