From 748487f17ff43a802979c8270cbea311ef526703 Mon Sep 17 00:00:00 2001 From: jamesrochabrun Date: Sat, 1 Feb 2025 22:51:50 -0800 Subject: [PATCH 1/2] Adding reasoning content --- .../Public/ResponseModels/Chat/ChatCompletionChunkObject.swift | 3 +++ .../Public/ResponseModels/Chat/ChatCompletionObject.swift | 3 +++ 2 files changed, 6 insertions(+) diff --git a/Sources/OpenAI/Public/ResponseModels/Chat/ChatCompletionChunkObject.swift b/Sources/OpenAI/Public/ResponseModels/Chat/ChatCompletionChunkObject.swift index 73d8fc1..cd31688 100644 --- a/Sources/OpenAI/Public/ResponseModels/Chat/ChatCompletionChunkObject.swift +++ b/Sources/OpenAI/Public/ResponseModels/Chat/ChatCompletionChunkObject.swift @@ -45,6 +45,8 @@ public struct ChatCompletionChunkObject: Decodable { /// The contents of the chunk message. public let content: String? + /// The reasoning content generated by the model, if available. + public let reasoningContent: String? /// The tool calls generated by the model, such as function calls. public let toolCalls: [ToolCall]? /// The name and arguments of a function that should be called, as generated by the model. @@ -57,6 +59,7 @@ public struct ChatCompletionChunkObject: Decodable { enum CodingKeys: String, CodingKey { case content + case reasoningContent = "reasoning_content" case toolCalls = "tool_calls" case functionCall = "function_call" case role diff --git a/Sources/OpenAI/Public/ResponseModels/Chat/ChatCompletionObject.swift b/Sources/OpenAI/Public/ResponseModels/Chat/ChatCompletionObject.swift index 3ab3468..2871146 100644 --- a/Sources/OpenAI/Public/ResponseModels/Chat/ChatCompletionObject.swift +++ b/Sources/OpenAI/Public/ResponseModels/Chat/ChatCompletionObject.swift @@ -50,6 +50,8 @@ public struct ChatCompletionObject: Decodable { public let functionCall: FunctionCall? /// The role of the author of this message. public let role: String + /// The reasoning content generated by the model, if available. + public let reasoningContent: String? /// Provided by the Vision API. public let finishDetails: FinishDetails? /// The refusal message generated by the model. @@ -86,6 +88,7 @@ public struct ChatCompletionObject: Decodable { case functionCall = "function_call" case role case finishDetails = "finish_details" + case reasoningContent = "reasoning_content" case refusal case audio } From 328ad6306f2129dc9861df9035ddb7fe4d8f0b71 Mon Sep 17 00:00:00 2001 From: jamesrochabrun Date: Sat, 1 Feb 2025 22:59:59 -0800 Subject: [PATCH 2/2] adding reasining content --- README.md | 66 ++++++++++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 61 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 3b5f19e..87e2c43 100644 --- a/README.md +++ b/README.md @@ -134,7 +134,6 @@ let service = OpenAIServiceFactory.service(apiKey: apiKey, organizationID: ogani That's all you need to begin accessing the full range of OpenAI endpoints. - ### How to get the status code of network errors You may want to build UI around the type of error that the API returns. @@ -3289,19 +3288,76 @@ For more inofrmation about the `OpenRouter` api visit its [documentation](https: The [DeepSeek](https://api-docs.deepseek.com/) API uses an API format compatible with OpenAI. By modifying the configuration, you can use SwiftOpenAI to access the DeepSeek API. +Creating the service + ```swift -// Creating the service let apiKey = "your_api_key" let service = OpenAIServiceFactory.service( apiKey: apiKey, overrideBaseURL: "https://api.deepseek.com") +``` -// Making a request +Non-Streaming Example +```swift let prompt = "What is the Manhattan project?" -let parameters = ChatCompletionParameters(messages: [.init(role: .user, content: .text(prompt))], model: .custom("deepseek-reasoner")) -let stream = service.startStreamedChat(parameters: parameters) +let parameters = ChatCompletionParameters( + messages: [.init(role: .user, content: .text(prompt))], + model: .custom("deepseek-reasoner") +) + +do { + let result = try await service.chat(parameters: parameters) + + // Access the response content + if let content = result.choices.first?.message.content { + print("Response: \(content)") + } + + // Access reasoning content if available + if let reasoning = result.choices.first?.message.reasoningContent { + print("Reasoning: \(reasoning)") + } +} catch { + print("Error: \(error)") +} +``` + +Streaming Example + +```swift +let prompt = "What is the Manhattan project?" +let parameters = ChatCompletionParameters( + messages: [.init(role: .user, content: .text(prompt))], + model: .custom("deepseek-reasoner") +) + +// Start the stream +do { + let stream = try await service.startStreamedChat(parameters: parameters) + for try await result in stream { + let content = result.choices.first?.delta.content ?? "" + self.message += content + + // Optional: Handle reasoning content if available + if let reasoning = result.choices.first?.delta.reasoningContent { + self.reasoningMessage += reasoning + } + } +} catch APIError.responseUnsuccessful(let description, let statusCode) { + self.errorMessage = "Network error with status code: \(statusCode) and description: \(description)" +} catch { + self.errorMessage = error.localizedDescription +} +``` + +Notes + +- The DeepSeek API is compatible with OpenAI's format but uses different model names +- Use .custom("deepseek-reasoner") to specify the DeepSeek model +- The `reasoningContent` field is optional and specific to DeepSeek's API +- Error handling follows the same pattern as standard OpenAI requests. ``` For more inofrmation about the `DeepSeek` api visit its [documentation](https://api-docs.deepseek.com).