diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample.xcodeproj/project.pbxproj b/Examples/SwiftOpenAIExample/SwiftOpenAIExample.xcodeproj/project.pbxproj index f7dcc51..06daa6f 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample.xcodeproj/project.pbxproj +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample.xcodeproj/project.pbxproj @@ -43,6 +43,10 @@ 7B7239AB2AF6294C00646679 /* URLImageView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7239AA2AF6294C00646679 /* URLImageView.swift */; }; 7B7239AE2AF9FF0000646679 /* ChatFunctionsCallStreamProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7239AD2AF9FF0000646679 /* ChatFunctionsCallStreamProvider.swift */; }; 7B7239B12AF9FF3C00646679 /* ChatFunctionsCalllStreamDemoView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B7239B02AF9FF3C00646679 /* ChatFunctionsCalllStreamDemoView.swift */; }; + 7B99C2E72C0718DE00E701B3 /* FilesPicker.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B99C2E62C0718DE00E701B3 /* FilesPicker.swift */; }; + 7B99C2E92C0718FF00E701B3 /* FileAttachmentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B99C2E82C0718FF00E701B3 /* FileAttachmentView.swift */; }; + 7B99C2EB2C07191200E701B3 /* AttachmentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B99C2EA2C07191200E701B3 /* AttachmentView.swift */; }; + 7B99C2ED2C071B1600E701B3 /* FilesPickerProvider.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7B99C2EC2C071B1600E701B3 /* FilesPickerProvider.swift */; }; 7BA788CD2AE23A48008825D5 /* SwiftOpenAIExampleApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BA788CC2AE23A48008825D5 /* SwiftOpenAIExampleApp.swift */; }; 7BA788CF2AE23A48008825D5 /* ApiKeyIntroView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 7BA788CE2AE23A48008825D5 /* ApiKeyIntroView.swift */; }; 7BA788D12AE23A49008825D5 /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 7BA788D02AE23A49008825D5 /* Assets.xcassets */; }; @@ -115,6 +119,10 @@ 7B7239AA2AF6294C00646679 /* URLImageView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = URLImageView.swift; sourceTree = ""; }; 7B7239AD2AF9FF0000646679 /* ChatFunctionsCallStreamProvider.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatFunctionsCallStreamProvider.swift; sourceTree = ""; }; 7B7239B02AF9FF3C00646679 /* ChatFunctionsCalllStreamDemoView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ChatFunctionsCalllStreamDemoView.swift; sourceTree = ""; }; + 7B99C2E62C0718DE00E701B3 /* FilesPicker.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FilesPicker.swift; sourceTree = ""; }; + 7B99C2E82C0718FF00E701B3 /* FileAttachmentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FileAttachmentView.swift; sourceTree = ""; }; + 7B99C2EA2C07191200E701B3 /* AttachmentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AttachmentView.swift; sourceTree = ""; }; + 7B99C2EC2C071B1600E701B3 /* FilesPickerProvider.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FilesPickerProvider.swift; sourceTree = ""; }; 7BA788C92AE23A48008825D5 /* SwiftOpenAIExample.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SwiftOpenAIExample.app; sourceTree = BUILT_PRODUCTS_DIR; }; 7BA788CC2AE23A48008825D5 /* SwiftOpenAIExampleApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SwiftOpenAIExampleApp.swift; sourceTree = ""; }; 7BA788CE2AE23A48008825D5 /* ApiKeyIntroView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ApiKeyIntroView.swift; sourceTree = ""; }; @@ -297,6 +305,17 @@ path = SharedModels; sourceTree = ""; }; + 7B99C2E52C0718CD00E701B3 /* Files */ = { + isa = PBXGroup; + children = ( + 7B99C2E62C0718DE00E701B3 /* FilesPicker.swift */, + 7B99C2E82C0718FF00E701B3 /* FileAttachmentView.swift */, + 7B99C2EA2C07191200E701B3 /* AttachmentView.swift */, + 7B99C2EC2C071B1600E701B3 /* FilesPickerProvider.swift */, + ); + path = Files; + sourceTree = ""; + }; 7BA788C02AE23A48008825D5 = { isa = PBXGroup; children = ( @@ -321,6 +340,7 @@ isa = PBXGroup; children = ( 7BA788CC2AE23A48008825D5 /* SwiftOpenAIExampleApp.swift */, + 7B99C2E52C0718CD00E701B3 /* Files */, 7B7239AF2AF9FF1D00646679 /* SharedModels */, 7B7239A92AF6294200646679 /* SharedUI */, 7B1268032B08241200400694 /* Assistants */, @@ -564,6 +584,7 @@ 7B7239AB2AF6294C00646679 /* URLImageView.swift in Sources */, 7B7239B12AF9FF3C00646679 /* ChatFunctionsCalllStreamDemoView.swift in Sources */, 7BBE7EAB2B02E8FC0096A693 /* ChatMessageDisplayModel.swift in Sources */, + 7B99C2E92C0718FF00E701B3 /* FileAttachmentView.swift in Sources */, 7BBE7EA52B02E8A70096A693 /* Sizes.swift in Sources */, 7B7239A22AF6260D00646679 /* ChatDisplayMessage.swift in Sources */, 0DF957862BB543F100DD2013 /* AIProxyIntroView.swift in Sources */, @@ -572,6 +593,7 @@ 7B436B962AE24A04003CE281 /* OptionsListView.swift in Sources */, 7BBE7EDE2B03718E0096A693 /* ChatFunctionCallProvider.swift in Sources */, 7B7239A62AF628F800646679 /* ChatDisplayMessageView.swift in Sources */, + 7B99C2ED2C071B1600E701B3 /* FilesPickerProvider.swift in Sources */, 7B7239A02AF625F200646679 /* ChatFluidConversationProvider.swift in Sources */, 7BA788CF2AE23A48008825D5 /* ApiKeyIntroView.swift in Sources */, 7BA788CD2AE23A48008825D5 /* SwiftOpenAIExampleApp.swift in Sources */, @@ -586,9 +608,11 @@ 7B436BAD2AE788FB003CE281 /* FineTuningJobDemoView.swift in Sources */, 7B436BB02AE79369003CE281 /* FilesDemoView.swift in Sources */, 7BBE7E912AFCA52A0096A693 /* ChatVisionDemoView.swift in Sources */, + 7B99C2EB2C07191200E701B3 /* AttachmentView.swift in Sources */, 7B436BAB2AE788F1003CE281 /* FineTuningJobProvider.swift in Sources */, 7B7239A42AF6289900646679 /* ChatStreamFluidConversationDemoView.swift in Sources */, 7BA788FC2AE23B42008825D5 /* AudioDemoView.swift in Sources */, + 7B99C2E72C0718DE00E701B3 /* FilesPicker.swift in Sources */, 7B1268072B08247C00400694 /* AssistantConfigurationProvider.swift in Sources */, 7B436BBE2AE7ABDA003CE281 /* ModelsDemoView.swift in Sources */, 7B436BA32AE25962003CE281 /* ChatDemoView.swift in Sources */, diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift index e16c09c..b2bfe43 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationDemoView.swift @@ -37,7 +37,10 @@ struct AssistantConfigurationDemoView: View { @State private var isAvatarLoading = false @State private var showAvatarFlow = false private let service: OpenAIService - + @State private var fileIDS: [String] = [] + /// Used mostly to display already uploaded files if any. + @State private var filePickerInitialActions: [FilePickerAction] = [] + var isCodeInterpreterOn: Binding { Binding( get: { @@ -72,6 +75,23 @@ struct AssistantConfigurationDemoView: View { ) } + var isFileSearchOn: Binding { + Binding( + get: { + let contains = + self.parameters.tools.contains { $0.displayToolType == .fileSearch } == true + return contains + }, + set: { newValue in + if newValue { + self.parameters.tools.append(AssistantObject.Tool(type: .fileSearch)) + } else { + self.parameters.tools.removeAll { $0.displayToolType == .fileSearch } + } + } + ) + } + init(service: OpenAIService) { self.service = service _provider = State(initialValue: AssistantConfigurationProvider(service: service)) @@ -84,6 +104,7 @@ struct AssistantConfigurationDemoView: View { inputViews capabilities footerActions + knowledge } .padding() }.sheet(isPresented: $showAvatarFlow) { @@ -186,11 +207,22 @@ struct AssistantConfigurationDemoView: View { InputView(title: "Capabilities") { VStack(spacing: 16) { CheckboxRow(title: "Code interpreter", isChecked: isCodeInterpreterOn) + CheckboxRow(title: "File Search", isChecked: isFileSearchOn) CheckboxRow(title: "DALLĀ·E Image Generation", isChecked: isDalleToolOn) } } .inputViewStyle(.init(verticalPadding: 16.0)) } + + // TODO: Add a demo to create a vector store and add files in to it. + var knowledge: some View { + FilesPicker( + service: service, + sectionTitle: "Knowledge", + actionTitle: "Upload files", + fileIDS: $fileIDS, + actions: $filePickerInitialActions) + } } extension Binding where Value == String? { diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift index 840bb65..4234cde 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Assistants/AssistantConfigurationProvider.swift @@ -67,4 +67,9 @@ import SwiftOpenAI debugPrint("\(error)") } } + + // TODO: Create demo for this. + func createVStore ()async throws { + let _ = try await service.createVectorStore(parameters: .init(name: "Personal Data")) + } } diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/AttachmentView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/AttachmentView.swift new file mode 100644 index 0000000..40107c7 --- /dev/null +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/AttachmentView.swift @@ -0,0 +1,51 @@ +// +// AttachmentView.swift +// SwiftOpenAIExample +// +// Created by James Rochabrun on 5/29/24. +// + +import SwiftUI + +struct AttachmentView: View { + + let fileName: String + @Binding var actionTrigger: Bool + let isLoading: Bool + + var body: some View { + HStack(spacing: Sizes.spacingExtraSmall) { + HStack { + if isLoading == true { + ProgressView() + .frame(width: 10, height: 10) + .padding(.horizontal, Sizes.spacingExtraSmall) + } else { + Image(systemName: "doc") + .resizable() + .aspectRatio(contentMode: .fit) + .frame(width: 10) + .foregroundColor(.secondary) + } + Text(fileName) + .font(.caption2) + } + Button { + actionTrigger = true + + } label: { + Image(systemName: "xmark.circle.fill") + } + .disabled(isLoading) + } + .padding(.leading, Sizes.spacingMedium) + .background( + RoundedRectangle(cornerRadius: 8) + .stroke(.gray.opacity(0.5), lineWidth: 0.5) + ) + } +} + +#Preview { + AttachmentView(fileName: "Mydocument.pdf", actionTrigger: .constant(true), isLoading: true) +} diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift new file mode 100644 index 0000000..a525e69 --- /dev/null +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FileAttachmentView.swift @@ -0,0 +1,118 @@ +// +// FileAttachmentView.swift +// SwiftOpenAIExample +// +// Created by James Rochabrun on 5/29/24. +// + +import SwiftUI +import SwiftOpenAI + +struct FileAttachmentView: View { + + init( + service: OpenAIService, + action: FilePickerAction, + fileUploadedCompletion: @escaping (_ file: FileObject) -> Void, + fileDeletedCompletion: @escaping (_ parameters: FilePickerAction, _ id: String) -> Void) + { + self.fileProvider = FilesPickerProvider(service: service) + self.action = action + self.fileUploadedCompletion = fileUploadedCompletion + self.fileDeletedCompletion = fileDeletedCompletion + } + + func newUploadedFileView( + parameters: FileParameters) + -> some View + { + AttachmentView(fileName: fileObject?.filename ?? parameters.fileName, actionTrigger: $deleted, isLoading: fileObject == nil || deleted) + .disabled(fileObject == nil) + .opacity(fileObject == nil ? 0.3 : 1) + .onFirstAppear { + Task { + fileObject = try await fileProvider.uploadFile(parameters: parameters) + } + } + .onChange(of: fileObject) { oldValue, newValue in + if oldValue != newValue, let newValue { + fileUploadedCompletion(newValue) + } + } + } + + func previousUploadedFileView( + id: String) + -> some View + { + AttachmentView(fileName: fileObject?.filename ?? "Document", actionTrigger: $deleted, isLoading: fileObject == nil || deleted) + .onFirstAppear { + Task { + fileObject = try await fileProvider.retrieveFileWith(id: id) + } + } + } + + var body: some View { + Group { + switch action { + case .request(let parameters): + newUploadedFileView(parameters: parameters) + case .retrieveAndDisplay(let id): + previousUploadedFileView(id: id) + } + } + .onChange(of: deleted) { oldValue, newValue in + if oldValue != newValue, newValue { + Task { + if let fileObject { + fileDeleteStatus = try await fileProvider.deleteFileWith(id: fileObject.id) + } + } + } + } + .onChange(of: fileDeleteStatus) { oldValue, newValue in + if oldValue != newValue, let newValue, newValue.deleted { + fileDeletedCompletion(action, newValue.id) + } + } + } + + // MARK: Private + + private let fileProvider: FilesPickerProvider + private let fileUploadedCompletion: (_ file: FileObject) -> Void + private let fileDeletedCompletion: (_ action: FilePickerAction, _ id: String) -> Void + private let action: FilePickerAction + @State private var fileObject: FileObject? + @State private var fileDeleteStatus: DeletionStatus? + @State private var deleted: Bool = false +} + + +private struct OnFirstAppear: ViewModifier { + let perform: () -> Void + + @State private var firstTime = true + + func body(content: Content) -> some View { + content.onAppear { + if firstTime { + firstTime = false + perform() + } + } + } +} + +extension View { + func onFirstAppear(perform: @escaping () -> Void) -> some View { + modifier(OnFirstAppear(perform: perform)) + } +} + +extension DeletionStatus: Equatable { + public static func == (lhs: DeletionStatus, rhs: DeletionStatus) -> Bool { + lhs.id == rhs.id + } +} diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPicker.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPicker.swift new file mode 100644 index 0000000..5c266fa --- /dev/null +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPicker.swift @@ -0,0 +1,144 @@ +// +// FilesPicker.swift +// SwiftOpenAIExample +// +// Created by James Rochabrun on 5/29/24. +// + +import SwiftUI +import SwiftOpenAI + +extension FileObject: Equatable { + public static func == (lhs: FileObject, rhs: FileObject) -> Bool { + lhs.id == rhs.id + } +} + +extension FileParameters: Equatable, Identifiable { + public static func == (lhs: FileParameters, rhs: FileParameters) -> Bool { + lhs.file == rhs.file && + lhs.fileName == rhs.fileName && + lhs.purpose == rhs.purpose + } + + public var id: String { + fileName + } +} + + +// MARK: FilePickerAction + +enum FilePickerAction: Identifiable, Equatable { + + case request(FileParameters) + case retrieveAndDisplay(id: String) + + var id: String { + switch self { + case .request(let fileParameters): return fileParameters.id + case .retrieveAndDisplay(let id): return id + } + } +} + +// MARK: FilesPicker + +struct FilesPicker: View { + + @State private var presentImporter = false + @Binding private var actions: [FilePickerAction] + @Binding private var fileIDS: [String] + private let service: OpenAIService + private let sectionTitle: String? + private let actionTitle: String + + init( + service: OpenAIService, + sectionTitle: String? = nil, + actionTitle: String, + fileIDS: Binding<[String]>, + actions: Binding<[FilePickerAction]>) + { + self.service = service + self.sectionTitle = sectionTitle + self.actionTitle = actionTitle + _fileIDS = fileIDS + _actions = actions + } + + var body: some View { + VStack(alignment: .leading) { + Group { + if let sectionTitle { + VStack { + Text(sectionTitle) + Button { + presentImporter = true + } label: { + Text(actionTitle) + } + } + + } else { + Button { + presentImporter = true + } label: { + Text(actionTitle) + } + } + } + .fileImporter( + isPresented: $presentImporter, + allowedContentTypes: [.pdf, .text, .mp3, .mpeg], + allowsMultipleSelection: true) { result in + switch result { + case .success(let files): + files.forEach { file in + // gain access to the directory + let gotAccess = file.startAccessingSecurityScopedResource() + guard gotAccess else { return } + if + let data = try? Data(contentsOf: file.absoluteURL) { + let parameter = FileParameters(fileName: file.lastPathComponent, file: data, purpose: "assistants") + self.actions.append(.request(parameter)) + } + file.stopAccessingSecurityScopedResource() + } + case .failure(let error): + print(error) + } + } + ForEach(actions, id: \.id) { action in + FileAttachmentView( + service: service, + action: action) { fileResponse in + fileIDS.append(fileResponse.id) + } fileDeletedCompletion: { actionToDelete, deletedFileID in + /// Remove file ids from network request. + fileIDS.removeAll(where: { id in + id == deletedFileID + }) + /// Update UI + actions.removeAll { action in + actionToDelete.id == action.id + } + } + } + } + .frame(maxWidth: .infinity, alignment: .leading) + } +} + + +#Preview { + FilesPicker(service: OpenAIServiceFactory.service(apiKey: ""), sectionTitle: "Knowledge", actionTitle: "Uplodad File", fileIDS: .constant(["s"]), actions: .constant( + [.retrieveAndDisplay(id: "id1"), + .retrieveAndDisplay(id: "id2"), + .retrieveAndDisplay(id: "id3"), + .retrieveAndDisplay(id: "id4"), + .retrieveAndDisplay(id: "id5"), + .retrieveAndDisplay(id: "id6")] + )) + .padding() +} diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift new file mode 100644 index 0000000..6c95b52 --- /dev/null +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Files/FilesPickerProvider.swift @@ -0,0 +1,56 @@ +// +// FilesPickerProvider.swift +// SwiftOpenAIExample +// +// Created by James Rochabrun on 5/29/24. +// + +import SwiftOpenAI +import SwiftUI + +final class FilesPickerProvider { + + private let service: OpenAIService + + var files: [FileObject] = [] + var uploadedFile: FileObject? = nil + var deletedStatus: DeletionStatus? = nil + var retrievedFile: FileObject? = nil + var fileContent: [[String: Any]] = [] + + init(service: OpenAIService) { + self.service = service + } + + func listFiles() async throws { + files = try await service.listFiles().data + } + + func uploadFile( + parameters: FileParameters) + async throws -> FileObject? + { + try await service.uploadFile(parameters: parameters) + } + + func deleteFileWith( + id: String) + async throws -> DeletionStatus? + { + try await service.deleteFileWith(id: id) + } + + func retrieveFileWith( + id: String) + async throws -> FileObject? + { + try await service.retrieveFileWith(id: id) + } + + func retrieveContentForFileWith( + id: String) + async throws + { + fileContent = try await service.retrieveContentForFileWith(id: id) + } +} diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift index 5242864..b0c9725 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/SharedUI/ChatDisplayMessageView.swift @@ -27,8 +27,8 @@ struct ChatDisplayMessageView: View { }.first ?? "" let urls = content.compactMap { contentItem -> URL? in - if case .imageUrl(let url) = contentItem { - return url + if case .imageUrl(let imageDetail) = contentItem { + return imageDetail.url } else { return nil } diff --git a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift index 6407f46..33ecd0a 100644 --- a/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift +++ b/Examples/SwiftOpenAIExample/SwiftOpenAIExample/Vision/ChatVisionDemoView.swift @@ -85,12 +85,12 @@ struct ChatVisionDemoView: View { /// Make the request let content: [ChatCompletionParameters.Message.ContentType.MessageContent] = [ .text(prompt) - ] + selectedImageURLS.map { .imageUrl($0) } + ] + selectedImageURLS.map { .imageUrl(.init(url: $0)) } resetInput() try await chatProvider.startStreamedChat(parameters: .init( messages: [.init(role: .user, content: .contentArray(content ))], - model: .gpt4VisionPreview, maxTokens: 300), content: content) + model: .gpt4o, maxTokens: 300), content: content) } } label: { Image(systemName: "paperplane") diff --git a/README.md b/README.md index 0c43cce..fd218af 100644 --- a/README.md +++ b/README.md @@ -422,10 +422,32 @@ public struct ChatCompletionParameters: Encodable { try container.encode(contentArray) } } - - public enum MessageContent: Encodable { + public enum MessageContent: Encodable, Equatable, Hashable { + case text(String) - case imageUrl(URL) + case imageUrl(ImageDetail) + + public struct ImageDetail: Encodable, Equatable, Hashable { + + public let url: URL + public let detail: String? + + enum CodingKeys: String, CodingKey { + case url + case detail + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(url, forKey: .url) + try container.encode(detail, forKey: .detail) + } + + public init(url: URL, detail: String? = nil) { + self.url = url + self.detail = detail + } + } enum CodingKeys: String, CodingKey { case type @@ -439,12 +461,32 @@ public struct ChatCompletionParameters: Encodable { case .text(let text): try container.encode("text", forKey: .type) try container.encode(text, forKey: .text) - case .imageUrl(let url): + case .imageUrl(let imageDetail): try container.encode("image_url", forKey: .type) - try container.encode(url, forKey: .imageUrl) + try container.encode(imageDetail, forKey: .imageUrl) } } - } + + public func hash(into hasher: inout Hasher) { + switch self { + case .text(let string): + hasher.combine(string) + case .imageUrl(let imageDetail): + hasher.combine(imageDetail) + } + } + + public static func ==(lhs: MessageContent, rhs: MessageContent) -> Bool { + switch (lhs, rhs) { + case let (.text(a), .text(b)): + return a == b + case let (.imageUrl(a), .imageUrl(b)): + return a == b + default: + return false + } + } + } } public enum Role: String { @@ -1048,7 +1090,7 @@ Usage ```swift let imageURL = "https://upload.wikimedia.org/wikipedia/commons/thumb/d/dd/Gfp-wisconsin-madison-the-nature-boardwalk.jpg/2560px-Gfp-wisconsin-madison-the-nature-boardwalk.jpg" let prompt = "What is this?" -let messageContent: [ChatCompletionParameters.Message.ContentType.MessageContent] = [.text(prompt), .imageUrl(imageURL)] // Users can add as many `.imageUrl` instances to the service. +let messageContent: [ChatCompletionParameters.Message.ContentType.MessageContent] = [.text(prompt), .imageUrl(.init(url: imageURL)] // Users can add as many `.imageUrl` instances to the service. let parameters = ChatCompletionParameters(messages: [.init(role: .user, content: .contentArray(messageContent))], model: .gpt4o) let chatCompletionObject = try await service.startStreamedChat(parameters: parameters) ``` diff --git a/Sources/OpenAI/Azure/DefaultOpenAIAzureService.swift b/Sources/OpenAI/Azure/DefaultOpenAIAzureService.swift index 31e79ad..57cecab 100644 --- a/Sources/OpenAI/Azure/DefaultOpenAIAzureService.swift +++ b/Sources/OpenAI/Azure/DefaultOpenAIAzureService.swift @@ -637,7 +637,7 @@ final public class DefaultOpenAIAzureService: OpenAIService { let request = try AzureOpenAIAPI.vectorStoreFile(.create(vectorStoreID: vectorStoreID)).request( apiKey: apiKey, organizationID: nil, - method: .post, + method: .post, params: parameters, queryItems: initialQueryItems, betaHeaderField: Self.assistantsBetaV2, diff --git a/Sources/OpenAI/Public/Parameters/Chat/ChatCompletionParameters.swift b/Sources/OpenAI/Public/Parameters/Chat/ChatCompletionParameters.swift index ad54d1e..cbf8177 100644 --- a/Sources/OpenAI/Public/Parameters/Chat/ChatCompletionParameters.swift +++ b/Sources/OpenAI/Public/Parameters/Chat/ChatCompletionParameters.swift @@ -100,7 +100,29 @@ public struct ChatCompletionParameters: Encodable { public enum MessageContent: Encodable, Equatable, Hashable { case text(String) - case imageUrl(URL) + case imageUrl(ImageDetail) + + public struct ImageDetail: Encodable, Equatable, Hashable { + + public let url: URL + public let detail: String? + + enum CodingKeys: String, CodingKey { + case url + case detail + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(url, forKey: .url) + try container.encode(detail, forKey: .detail) + } + + public init(url: URL, detail: String? = nil) { + self.url = url + self.detail = detail + } + } enum CodingKeys: String, CodingKey { case type @@ -114,30 +136,30 @@ public struct ChatCompletionParameters: Encodable { case .text(let text): try container.encode("text", forKey: .type) try container.encode(text, forKey: .text) - case .imageUrl(let url): + case .imageUrl(let imageDetail): try container.encode("image_url", forKey: .type) - try container.encode(url, forKey: .imageUrl) + try container.encode(imageDetail, forKey: .imageUrl) } } public func hash(into hasher: inout Hasher) { - switch self { - case .text(let string): - hasher.combine(string) - case .imageUrl(let url): - hasher.combine(url) - } + switch self { + case .text(let string): + hasher.combine(string) + case .imageUrl(let imageDetail): + hasher.combine(imageDetail) + } } public static func ==(lhs: MessageContent, rhs: MessageContent) -> Bool { - switch (lhs, rhs) { - case let (.text(a), .text(b)): - return a == b - case let (.imageUrl(a), .imageUrl(b)): - return a == b - default: - return false - } + switch (lhs, rhs) { + case let (.text(a), .text(b)): + return a == b + case let (.imageUrl(a), .imageUrl(b)): + return a == b + default: + return false + } } } }