diff --git a/Demo/DemoChat/Sources/ChatStore.swift b/Demo/DemoChat/Sources/ChatStore.swift index 51ee6b11..23e6bbb1 100644 --- a/Demo/DemoChat/Sources/ChatStore.swift +++ b/Demo/DemoChat/Sources/ChatStore.swift @@ -54,7 +54,7 @@ public final class ChatStore: ObservableObject { @MainActor func sendMessage( - _ message: Message, + _ message: MessageModel, conversationId: Conversation.ID, model: Model ) async { @@ -103,7 +103,7 @@ public final class ChatStore: ObservableObject { query: ChatQuery( model: model, messages: conversation.messages.map { message in - Chat(role: message.role, content: message.content) + Message(role: message.role, content: message.content) }, functions: functions ) @@ -128,7 +128,7 @@ public final class ChatStore: ObservableObject { finishReason == "function_call" { messageText += "Function call: name=\(functionCallName) arguments=\(functionCallArguments)" } - let message = Message( + let message = MessageModel( id: partialChatResult.id, role: choice.delta.role ?? .assistant, content: messageText, @@ -137,7 +137,7 @@ public final class ChatStore: ObservableObject { if let existingMessageIndex = existingMessages.firstIndex(where: { $0.id == partialChatResult.id }) { // Meld into previous message let previousMessage = existingMessages[existingMessageIndex] - let combinedMessage = Message( + let combinedMessage = MessageModel( id: message.id, // id stays the same for different deltas role: message.role, content: previousMessage.content + message.content, diff --git a/Demo/DemoChat/Sources/MiscStore.swift b/Demo/DemoChat/Sources/MiscStore.swift index e2e732e5..ec29b3a8 100644 --- a/Demo/DemoChat/Sources/MiscStore.swift +++ b/Demo/DemoChat/Sources/MiscStore.swift @@ -38,13 +38,13 @@ public final class MiscStore: ObservableObject { @Published var moderationConversationError: Error? @MainActor - func sendModerationMessage(_ message: Message) async { + func sendModerationMessage(_ message: MessageModel) async { moderationConversation.messages.append(message) await completeModerationChat(message: message) } @MainActor - func completeModerationChat(message: Message) async { + func completeModerationChat(message: MessageModel) async { moderationConversationError = nil @@ -75,7 +75,7 @@ public final class MiscStore: ObservableObject { \(circleEmoji(for: result.categories.violenceGraphic)) Violence/Graphic """ - let message = Message( + let message = MessageModel( id: response.id, role: .assistant, content: content, diff --git a/Demo/DemoChat/Sources/Models/Conversation.swift b/Demo/DemoChat/Sources/Models/Conversation.swift index 7d6f82b8..5022aa00 100644 --- a/Demo/DemoChat/Sources/Models/Conversation.swift +++ b/Demo/DemoChat/Sources/Models/Conversation.swift @@ -8,7 +8,7 @@ import Foundation struct Conversation { - init(id: String, messages: [Message] = []) { + init(id: String, messages: [MessageModel] = []) { self.id = id self.messages = messages } @@ -16,7 +16,7 @@ struct Conversation { typealias ID = String let id: String - var messages: [Message] + var messages: [MessageModel] } extension Conversation: Equatable, Identifiable {} diff --git a/Demo/DemoChat/Sources/Models/Message.swift b/Demo/DemoChat/Sources/Models/Message.swift index afea9099..3428b663 100644 --- a/Demo/DemoChat/Sources/Models/Message.swift +++ b/Demo/DemoChat/Sources/Models/Message.swift @@ -8,11 +8,11 @@ import Foundation import OpenAI -struct Message { +struct MessageModel { var id: String - var role: Chat.Role + var role: Message.Role var content: String var createdAt: Date } -extension Message: Equatable, Codable, Hashable, Identifiable {} +extension MessageModel: Equatable, Codable, Hashable, Identifiable {} diff --git a/Demo/DemoChat/Sources/UI/ChatView.swift b/Demo/DemoChat/Sources/UI/ChatView.swift index 1b872c21..5fbfa20a 100644 --- a/Demo/DemoChat/Sources/UI/ChatView.swift +++ b/Demo/DemoChat/Sources/UI/ChatView.swift @@ -49,7 +49,7 @@ public struct ChatView: View { sendMessage: { message, selectedModel in Task { await store.sendMessage( - Message( + MessageModel( id: idProvider(), role: .user, content: message, diff --git a/Demo/DemoChat/Sources/UI/DetailView.swift b/Demo/DemoChat/Sources/UI/DetailView.swift index 9e2a07e9..17d63801 100644 --- a/Demo/DemoChat/Sources/UI/DetailView.swift +++ b/Demo/DemoChat/Sources/UI/DetailView.swift @@ -199,7 +199,7 @@ struct DetailView: View { } struct ChatBubble: View { - let message: Message + let message: MessageModel private var assistantBackgroundColor: Color { #if os(iOS) @@ -264,10 +264,10 @@ struct DetailView_Previews: PreviewProvider { conversation: Conversation( id: "1", messages: [ - Message(id: "1", role: .assistant, content: "Hello, how can I help you today?", createdAt: Date(timeIntervalSinceReferenceDate: 0)), - Message(id: "2", role: .user, content: "I need help with my subscription.", createdAt: Date(timeIntervalSinceReferenceDate: 100)), - Message(id: "3", role: .assistant, content: "Sure, what seems to be the problem with your subscription?", createdAt: Date(timeIntervalSinceReferenceDate: 200)), - Message(id: "4", role: .function, content: + MessageModel(id: "1", role: .assistant, content: "Hello, how can I help you today?", createdAt: Date(timeIntervalSinceReferenceDate: 0)), + MessageModel(id: "2", role: .user, content: "I need help with my subscription.", createdAt: Date(timeIntervalSinceReferenceDate: 100)), + MessageModel(id: "3", role: .assistant, content: "Sure, what seems to be the problem with your subscription?", createdAt: Date(timeIntervalSinceReferenceDate: 200)), + MessageModel(id: "4", role: .function, content: """ get_current_weather({ "location": "Glasgow, Scotland", diff --git a/Demo/DemoChat/Sources/UI/ModerationChatView.swift b/Demo/DemoChat/Sources/UI/ModerationChatView.swift index 41658845..4960a0d4 100644 --- a/Demo/DemoChat/Sources/UI/ModerationChatView.swift +++ b/Demo/DemoChat/Sources/UI/ModerationChatView.swift @@ -24,7 +24,7 @@ public struct ModerationChatView: View { sendMessage: { message, _ in Task { await store.sendModerationMessage( - Message( + MessageModel( id: idProvider(), role: .user, content: message, diff --git a/Sources/OpenAI/Public/Models/Chat/ChatContent.swift b/Sources/OpenAI/Public/Models/Chat/ChatContent.swift new file mode 100644 index 00000000..1a0f12d9 --- /dev/null +++ b/Sources/OpenAI/Public/Models/Chat/ChatContent.swift @@ -0,0 +1,84 @@ +// +// ChatContent.swift +// +// +// Created by Federico Vitale on 14/11/23. +// + +import Foundation + +public struct ChatContent: Codable, Equatable { + public let type: ChatContentType + public let value: String + + enum CodingKeys: CodingKey { + case type + case value + } + + public enum ChatContentType: String, Codable { + case text + case imageUrl = "image_url" + } + + public struct ChatImageUrl: Codable, Equatable { + let url: String + + enum CodingKeys: CodingKey { + case url + } + } + + public static func text(_ text: String) -> Self { + Self.init(text) + } + + public static func imageUrl(_ url: String) -> Self { + Self.init(type: .imageUrl, value: url) + } + + public init(type: ChatContentType, value: String) { + self.type = type + self.value = value + } + + public init(_ text: String) { + self.type = .text + self.value = text + } + + // Custom encoding since the `value` key is variable based on the `type` + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: ChatContent.CodingKeys.self) + var dynamicContainer = encoder.container(keyedBy: DynamicKey.self) + + try container.encode(type, forKey: .type) + + switch self.type { + case .text: + try dynamicContainer.encode(value, forKey: .init(stringValue: "text")) + break + case .imageUrl: + var nested = dynamicContainer.nestedContainer(keyedBy: ChatImageUrl.CodingKeys.self, forKey: .init(stringValue: "image_url")) + try nested.encode(value, forKey: .url) + break + } + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + self.type = try container.decode(ChatContentType.self, forKey: .type) + + let dynamicContainer = try decoder.container(keyedBy: DynamicKey.self) + + switch self.type { + case .text: + self.value = try dynamicContainer.decode(String.self, forKey: .init(stringValue: "text")) + break + case .imageUrl: + let nested = try dynamicContainer.nestedContainer(keyedBy: ChatImageUrl.CodingKeys.self, forKey: .init(stringValue: "image_url")) + self.value = try nested.decode(String.self, forKey: .url) + break + } + } +} diff --git a/Sources/OpenAI/Public/Models/Chat/ChatFunction.swift b/Sources/OpenAI/Public/Models/Chat/ChatFunction.swift new file mode 100644 index 00000000..2738a3e2 --- /dev/null +++ b/Sources/OpenAI/Public/Models/Chat/ChatFunction.swift @@ -0,0 +1,39 @@ +// +// ChatFunction.swift +// +// +// Created by Federico Vitale on 14/11/23. +// + +import Foundation + +/// Only available for **ASSISTANT** user type. +public struct ChatFunctionCall: Codable, Equatable { + /// The name of the function to call. + public let name: String? + /// The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function. + public let arguments: String? + + public init(name: String?, arguments: String?) { + self.name = name + self.arguments = arguments + } +} + +public struct ChatFunctionDeclaration: Codable, Equatable { + /// The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64. + public let name: String + + /// The description of what the function does. + public let description: String + + /// The parameters the functions accepts, described as a JSON Schema object. + public let parameters: JSONSchema + + public init(name: String, description: String, parameters: JSONSchema) { + self.name = name + self.description = description + self.parameters = parameters + } +} + diff --git a/Sources/OpenAI/Public/Models/Chat/ChatQuery+.swift b/Sources/OpenAI/Public/Models/Chat/ChatQuery+.swift new file mode 100644 index 00000000..9457d567 --- /dev/null +++ b/Sources/OpenAI/Public/Models/Chat/ChatQuery+.swift @@ -0,0 +1,50 @@ +// +// ChatQuery+.swift +// +// +// Created by Federico Vitale on 14/11/23. +// + +import Foundation + + +extension ChatQuery { + // See more https://platform.openai.com/docs/guides/text-generation/json-mode + public struct ResponseFormat: Codable, Equatable { + public static let jsonObject = ResponseFormat(type: .jsonObject) + public static let text = ResponseFormat(type: .text) + + public let type: Self.ResponseFormatType + + public enum ResponseFormatType: String, Codable, Equatable { + case jsonObject = "json_object" + case text + } + } + + public enum FunctionCall: Codable, Equatable { + case none + case auto + case function(String) + + enum CodingKeys: String, CodingKey { + case none + case auto + case function = "name" + } + + public func encode(to encoder: Encoder) throws { + switch self { + case .none: + var container = encoder.singleValueContainer() + try container.encode(CodingKeys.none.rawValue) + case .auto: + var container = encoder.singleValueContainer() + try container.encode(CodingKeys.auto.rawValue) + case .function(let name): + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(name, forKey: .function) + } + } + } +} diff --git a/Sources/OpenAI/Public/Models/Chat/ChatQuery.swift b/Sources/OpenAI/Public/Models/Chat/ChatQuery.swift new file mode 100644 index 00000000..01cee052 --- /dev/null +++ b/Sources/OpenAI/Public/Models/Chat/ChatQuery.swift @@ -0,0 +1,119 @@ +// +// ChatQuery.swift +// +// +// Created by Sergii Kryvoblotskyi on 02/04/2023. +// + +import Foundation + +public enum ToolChoice: String, Codable { + case auto + case none +} + +public struct ChatQuery: Equatable, Codable, Streamable { + /// ID of the model to use. Currently, only gpt-3.5-turbo and gpt-3.5-turbo-0301 are supported. + public let model: Model + /// An object specifying the format that the model must output. + public let responseFormat: ResponseFormat? + /// The messages to generate chat completions for + public let messages: [Message] + /// A list of functions the model may generate JSON inputs for. + public let functions: [ChatFunctionDeclaration]? + + public let tools: [ChatTool]? + + /// Controls how the model responds to function calls. "none" means the model does not call a function, and responds to the end-user. "auto" means the model can pick between and end-user or calling a function. Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. "none" is the default when no functions are present. "auto" is the default if functions are present. + public let functionCall: FunctionCall? + /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and We generally recommend altering this or top_p but not both. + public let temperature: Double? + /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. + public let topP: Double? + /// How many chat completion choices to generate for each input message. + public let n: Int? + /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. + public let stop: [String]? + /// The maximum number of tokens to generate in the completion. + public let maxTokens: Int? + /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics. + public let presencePenalty: Double? + /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. + public let frequencyPenalty: Double? + /// Modify the likelihood of specified tokens appearing in the completion. + public let logitBias: [String:Int]? + /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. + public let user: String? + + /// Controls which (if any) function is called by the model. + /// `none` means the model will not call a function and instead generates a message. + /// `auto` means the model can pick between generating a message or calling a function + /// + /// Specifying a particular function via ``Tool.ToolValue`` forces the model to call that function. + /// + /// `none` is default when no functions are present + /// `auto` is default if functions are present + public let toolChoice: EnumOrCodable? + + var stream: Bool = false + + + enum CodingKeys: String, CodingKey { + case model + case messages + case functions + case functionCall = "function_call" + case temperature + case topP = "top_p" + case n + case stream + case stop + case maxTokens = "max_tokens" + case presencePenalty = "presence_penalty" + case frequencyPenalty = "frequency_penalty" + case logitBias = "logit_bias" + case user + case responseFormat = "response_format" + case tools + case toolChoice = "tool_choice" + } + + public init( + model: Model, + messages: [Message], + responseFormat: ResponseFormat? = nil, + tools: [ChatTool]? = nil, + toolChoice: EnumOrCodable? = nil, + functions: [ChatFunctionDeclaration]? = nil, + functionCall: FunctionCall? = nil, + temperature: Double? = nil, + topP: Double? = nil, + n: Int? = nil, + stop: [String]? = nil, + maxTokens: Int? = nil, + presencePenalty: Double? = nil, + frequencyPenalty: Double? = nil, + logitBias: [String : Int]? = nil, + user: String? = nil, + stream: Bool = false + ) { + self.model = model + self.messages = messages + self.tools = tools + self.toolChoice = toolChoice + self.functions = functions + self.functionCall = functionCall + self.temperature = temperature + self.topP = topP + self.n = n + self.responseFormat = responseFormat + self.stop = stop + self.maxTokens = maxTokens + self.presencePenalty = presencePenalty + self.frequencyPenalty = frequencyPenalty + self.logitBias = logitBias + self.user = user + self.stream = stream + } +} + diff --git a/Sources/OpenAI/Public/Models/ChatResult.swift b/Sources/OpenAI/Public/Models/Chat/ChatResult.swift similarity index 97% rename from Sources/OpenAI/Public/Models/ChatResult.swift rename to Sources/OpenAI/Public/Models/Chat/ChatResult.swift index f1a80a0c..ca78618c 100644 --- a/Sources/OpenAI/Public/Models/ChatResult.swift +++ b/Sources/OpenAI/Public/Models/Chat/ChatResult.swift @@ -13,7 +13,7 @@ public struct ChatResult: Codable, Equatable { public let index: Int /// Exists only if it is a complete message. - public let message: Chat + public let message: Message /// Exists only if it is a complete message. public let finishReason: String? diff --git a/Sources/OpenAI/Public/Models/ChatStreamResult.swift b/Sources/OpenAI/Public/Models/Chat/ChatStreamResult.swift similarity index 87% rename from Sources/OpenAI/Public/Models/ChatStreamResult.swift rename to Sources/OpenAI/Public/Models/Chat/ChatStreamResult.swift index 4d69713c..a058d1a9 100644 --- a/Sources/OpenAI/Public/Models/ChatStreamResult.swift +++ b/Sources/OpenAI/Public/Models/Chat/ChatStreamResult.swift @@ -12,16 +12,21 @@ public struct ChatStreamResult: Codable, Equatable { public struct Choice: Codable, Equatable { public struct Delta: Codable, Equatable { public let content: String? - public let role: Chat.Role? + public let role: Message.Role? /// The name of the author of this message. `name` is required if role is `function`, and it should be the name of the function whose response is in the `content`. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. public let name: String? + + @available(*, deprecated, message: "use toolCalls instead") public let functionCall: ChatFunctionCall? + + public let toolCalls: [ToolCall]? enum CodingKeys: String, CodingKey { case role case content case name case functionCall = "function_call" + case toolCalls = "tool_calls" } } diff --git a/Sources/OpenAI/Public/Models/Chat/ChatTool.swift b/Sources/OpenAI/Public/Models/Chat/ChatTool.swift new file mode 100644 index 00000000..3ced74db --- /dev/null +++ b/Sources/OpenAI/Public/Models/Chat/ChatTool.swift @@ -0,0 +1,191 @@ +// +// ChatTool.swift +// +// +// Created by Federico Vitale on 14/11/23. +// + +import Foundation + +public struct ChatTool: Codable, Equatable { + public let type: ToolType + public let value: ToolValue + + enum CodingKeys: CodingKey { + case type + case value + } + + public init(type: ToolType, value: ToolValue) { + self.type = type + self.value = value + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + var dynamicContainer = encoder.container(keyedBy: DynamicKey.self) + + try container.encode(type, forKey: .type) + + switch value { + case .function(let function): + try dynamicContainer.encode(function, forKey: .init(stringValue: "function")) + break + } + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + let dynamicContainer = try decoder.container(keyedBy: DynamicKey.self) + self.type = try container.decode(ChatTool.ToolType.self, forKey: .type) + + switch self.type { + case .function: + self.value = try dynamicContainer.decode(ChatTool.ToolValue.self, forKey: .init(stringValue: "function")) + break + } + } +} + + +extension ChatTool { + public enum ToolType: String, Codable { + case function + } + + public struct Function: Codable, Equatable { + /// The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64. + public let name: String + + /// The parameters the functions accepts, described as a JSON Schema object. See the guide for examples, and the JSON Schema reference for documentation about the format. + public let parameters: JSONSchema + + /// A description of what the function does, used by the model to choose when and how to call the function. + public let description: String? + + public init(name: String, description: String? = nil, parameters: JSONSchema = .empty) { + self.name = name + self.parameters = parameters + self.description = description + } + } + + public enum ToolValue: Codable, Equatable { + case function(Function) + + public func encode(to encoder: Encoder) throws { + var container = encoder.singleValueContainer() + + switch self { + case .function(let function): + try container.encode(function) + break + } + } + + + public init(from decoder: Decoder) throws { + let container = try decoder.singleValueContainer() + + if let function = try? container.decode(Function.self) { + self = .function(function) + } else { + throw DecodingError.dataCorrupted( + DecodingError.Context( + codingPath: decoder.codingPath, + debugDescription: "Invalid data encountered when decoding StringOrCodable" + ) + ) + } + } + } +} + +public struct ToolCall: Codable, Equatable { + public let index: Int + public let id: String + public let type: ChatTool.ToolType + public let value: ToolCallValue + + public init(index: Int, id: String, type: ChatTool.ToolType, value: ToolCallValue) { + self.index = index + self.id = id + self.type = type + self.value = value + } + + enum CodingKeys: CodingKey { + case index + case id + case type + case value + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + var dynamicContainer = encoder.container(keyedBy: DynamicKey.self) + + try container.encode(type, forKey: .type) + try container.encode(id, forKey: .id) + try container.encode(index, forKey: .index) + + switch value { + case .function(let function): + try dynamicContainer.encode(function, forKey: .init(stringValue: "function")) + break + } + } + + public init(from decoder: Decoder) throws { + let container = try decoder.container(keyedBy: CodingKeys.self) + let dynamicContainer = try decoder.container(keyedBy: DynamicKey.self) + self.type = try container.decode(ChatTool.ToolType.self, forKey: .type) + self.id = try container.decode(String.self, forKey: .id) + self.index = try container.decode(Int.self, forKey: .index) + + switch self.type { + case .function: + self.value = try dynamicContainer.decode(ToolCallValue.self, forKey: .init(stringValue: "function")) + break + } + } + + public enum ToolCallValue: Codable, Equatable { + case function(Function) + + public func encode(to encoder: Encoder) throws { + var container = encoder.singleValueContainer() + + switch self { + case .function(let function): + try container.encode(function) + break + } + } + + + public init(from decoder: Decoder) throws { + let container = try decoder.singleValueContainer() + + if let function = try? container.decode(Function.self) { + self = .function(function) + } else { + throw DecodingError.dataCorrupted( + DecodingError.Context( + codingPath: decoder.codingPath, + debugDescription: "Invalid data encountered when decoding StringOrCodable" + ) + ) + } + } + } + + public struct Function : Codable, Equatable { + public let name: String? + public let arguments: String? + + public static func withName(_ name: String, arguments: String? = nil) -> Self { + return Function(name: name, arguments: arguments) + } + } +} diff --git a/Sources/OpenAI/Public/Models/Chat/JSONSchema.swift b/Sources/OpenAI/Public/Models/Chat/JSONSchema.swift new file mode 100644 index 00000000..466de0f0 --- /dev/null +++ b/Sources/OpenAI/Public/Models/Chat/JSONSchema.swift @@ -0,0 +1,140 @@ +// +// JSONSchema.swift +// +// +// Created by Federico Vitale on 14/11/23. +// + +import Foundation + +/// See the [guide](/docs/guides/gpt/function-calling) for examples, and the [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for documentation about the format. +public struct JSONSchema: Codable, Equatable { + public let type: JSONType + public let properties: [String: Property]? + public let required: [String]? + public let pattern: String? + public let const: String? + public let enumValues: [String]? + public let multipleOf: Int? + public let minimum: Int? + public let maximum: Int? + + // OpenAI Docs says: + // To describe a function that accepts no parameters, provide the value {"type": "object", "properties": {}}. + public static let empty = JSONSchema(type: .object, properties: [:]) + + private enum CodingKeys: String, CodingKey { + case type, properties, required, pattern, const + case enumValues = "enum" + case multipleOf, minimum, maximum + } + + public struct Property: Codable, Equatable { + public let type: JSONType + public let description: String? + public let format: String? + public let items: Items? + public let required: [String]? + public let pattern: String? + public let const: String? + public let enumValues: [String]? + public let multipleOf: Int? + public let minimum: Double? + public let maximum: Double? + public let minItems: Int? + public let maxItems: Int? + public let uniqueItems: Bool? + + public static func string(description: String?=nil, enumValues: [String]?=nil) -> Self { + return Property(type: .string, description: description, enumValues: enumValues) + } + + public static func boolean(description: String?=nil) -> Self { + return Property(type: .boolean, description: description) + } + + public static func number(description: String?=nil) -> Self { + return Property(type: .number, description: description) + } + + private enum CodingKeys: String, CodingKey { + case type, description, format, items, required, pattern, const + case enumValues = "enum" + case multipleOf, minimum, maximum + case minItems, maxItems, uniqueItems + } + + public init(type: JSONType, description: String? = nil, format: String? = nil, items: Items? = nil, required: [String]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Double? = nil, maximum: Double? = nil, minItems: Int? = nil, maxItems: Int? = nil, uniqueItems: Bool? = nil) { + self.type = type + self.description = description + self.format = format + self.items = items + self.required = required + self.pattern = pattern + self.const = const + self.enumValues = enumValues + self.multipleOf = multipleOf + self.minimum = minimum + self.maximum = maximum + self.minItems = minItems + self.maxItems = maxItems + self.uniqueItems = uniqueItems + } + } + + public enum JSONType: String, Codable { + case integer + case string + case boolean + case array + case object + case number + case `null` = "null" + } + + public struct Items: Codable, Equatable { + public let type: JSONType + public let properties: [String: Property]? + public let pattern: String? + public let const: String? + public let enumValues: [String]? + public let multipleOf: Int? + public let minimum: Double? + public let maximum: Double? + public let minItems: Int? + public let maxItems: Int? + public let uniqueItems: Bool? + + private enum CodingKeys: String, CodingKey { + case type, properties, pattern, const + case enumValues = "enum" + case multipleOf, minimum, maximum, minItems, maxItems, uniqueItems + } + + public init(type: JSONType, properties: [String : Property]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Double? = nil, maximum: Double? = nil, minItems: Int? = nil, maxItems: Int? = nil, uniqueItems: Bool? = nil) { + self.type = type + self.properties = properties + self.pattern = pattern + self.const = const + self.enumValues = enumValues + self.multipleOf = multipleOf + self.minimum = minimum + self.maximum = maximum + self.minItems = minItems + self.maxItems = maxItems + self.uniqueItems = uniqueItems + } + } + + public init(type: JSONType, properties: [String : Property]? = nil, required: [String]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Int? = nil, maximum: Int? = nil) { + self.type = type + self.properties = properties + self.required = required + self.pattern = pattern + self.const = const + self.enumValues = enumValues + self.multipleOf = multipleOf + self.minimum = minimum + self.maximum = maximum + } +} diff --git a/Sources/OpenAI/Public/Models/Chat/Message.swift b/Sources/OpenAI/Public/Models/Chat/Message.swift new file mode 100644 index 00000000..a7c72280 --- /dev/null +++ b/Sources/OpenAI/Public/Models/Chat/Message.swift @@ -0,0 +1,129 @@ +// +// Message.swift +// +// +// Created by Federico Vitale on 14/11/23. +// + +import Foundation + +public struct Message: Codable, Equatable { + public typealias StringOrChatContent = Codable + + public let role: Role + /// The contents of the message. `content` is required for all messages except assistant messages with function calls. + public let content: StringOrCodable<[ChatContent]>? + + /// The name of the author of this message. `name` is required if role is `function`, and it should be the name of the function whose response is in the `content`. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. + public let name: String? + + @available(*, deprecated, message: "use toolCalls instead") + public let functionCall: ChatFunctionCall? + + public let toolCalls: [ToolCall]? + + public enum Role: String, Codable, Equatable { + case system + case assistant + case user + case function + } + + enum CodingKeys: String, CodingKey { + case role + case content + case name + case functionCall = "function_call" + case toolCalls = "tool_calls" + } + + public init( + role: Role, + content codable: StringOrChatContent? = nil, + name: String? = nil, + toolCalls: [ToolCall]? = nil + ) { + let stringOrCodable: StringOrCodable<[ChatContent]>? + + if let string = codable as? String { + stringOrCodable = .string(string) + } else if let arr = codable as? [ChatContent] { + stringOrCodable = .object(arr) + } else { + stringOrCodable = nil + } + + self.init(role: role, content: stringOrCodable, name: name, toolCalls: toolCalls) + } + + public init( + role: Role, + content codable: StringOrChatContent? = nil, + name: String? = nil, + functionCall: ChatFunctionCall? = nil + ) { + let stringOrCodable: StringOrCodable<[ChatContent]>? + + if let string = codable as? String { + stringOrCodable = .string(string) + } else if let arr = codable as? [ChatContent] { + stringOrCodable = .object(arr) + } else { + stringOrCodable = nil + } + + self.init(role: role, content: stringOrCodable, name: name, functionCall: functionCall) + } + + public init(role: Role, content: StringOrCodable<[ChatContent]>? = nil, name: String? = nil) { + self.role = role + self.content = content + self.name = name + self.functionCall = nil + self.toolCalls = [] + } + + public init(role: Role, content: StringOrCodable<[ChatContent]>? = nil, name: String? = nil, functionCall: ChatFunctionCall?) { + self.role = role + self.content = content + self.name = name + self.functionCall = functionCall + self.toolCalls = [] + } + + public init( + role: Role, + content: StringOrCodable<[ChatContent]>? = nil, + name: String? = nil, + toolCalls: [ToolCall]? + ) { + self.role = role + self.content = content + self.name = name + self.toolCalls = toolCalls + self.functionCall = nil + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) + try container.encode(role, forKey: .role) + + if let name = name { + try container.encode(name, forKey: .name) + } + + if let functionCall = functionCall { + try container.encode(functionCall, forKey: .functionCall) + } + + if let toolCalls = toolCalls { + try container.encode(toolCalls, forKey: .toolCalls) + } + + // Should add 'nil' to 'content' property for function calling response + // See https://openai.com/blog/function-calling-and-other-api-updates + if content != nil || (role == .assistant && toolCalls != nil && functionCall != nil) { + try container.encode(content, forKey: .content) + } + } +} diff --git a/Sources/OpenAI/Public/Models/ChatQuery.swift b/Sources/OpenAI/Public/Models/ChatQuery.swift deleted file mode 100644 index 58be8f16..00000000 --- a/Sources/OpenAI/Public/Models/ChatQuery.swift +++ /dev/null @@ -1,318 +0,0 @@ -// -// ChatQuery.swift -// -// -// Created by Sergii Kryvoblotskyi on 02/04/2023. -// - -import Foundation - -// See more https://platform.openai.com/docs/guides/text-generation/json-mode -public struct ResponseFormat: Codable, Equatable { - public static let jsonObject = ResponseFormat(type: .jsonObject) - public static let text = ResponseFormat(type: .text) - - public let type: Self.ResponseFormatType - - public enum ResponseFormatType: String, Codable, Equatable { - case jsonObject = "json_object" - case text - } -} - -public struct Chat: Codable, Equatable { - public let role: Role - /// The contents of the message. `content` is required for all messages except assistant messages with function calls. - public let content: String? - /// The name of the author of this message. `name` is required if role is `function`, and it should be the name of the function whose response is in the `content`. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. - public let name: String? - public let functionCall: ChatFunctionCall? - - public enum Role: String, Codable, Equatable { - case system - case assistant - case user - case function - } - - enum CodingKeys: String, CodingKey { - case role - case content - case name - case functionCall = "function_call" - } - - public init(role: Role, content: String? = nil, name: String? = nil, functionCall: ChatFunctionCall? = nil) { - self.role = role - self.content = content - self.name = name - self.functionCall = functionCall - } - - public func encode(to encoder: Encoder) throws { - var container = encoder.container(keyedBy: CodingKeys.self) - try container.encode(role, forKey: .role) - - if let name = name { - try container.encode(name, forKey: .name) - } - - if let functionCall = functionCall { - try container.encode(functionCall, forKey: .functionCall) - } - - // Should add 'nil' to 'content' property for function calling response - // See https://openai.com/blog/function-calling-and-other-api-updates - if content != nil || (role == .assistant && functionCall != nil) { - try container.encode(content, forKey: .content) - } - } -} - -public struct ChatFunctionCall: Codable, Equatable { - /// The name of the function to call. - public let name: String? - /// The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function. - public let arguments: String? - - public init(name: String?, arguments: String?) { - self.name = name - self.arguments = arguments - } -} - - -/// See the [guide](/docs/guides/gpt/function-calling) for examples, and the [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for documentation about the format. -public struct JSONSchema: Codable, Equatable { - public let type: JSONType - public let properties: [String: Property]? - public let required: [String]? - public let pattern: String? - public let const: String? - public let enumValues: [String]? - public let multipleOf: Int? - public let minimum: Int? - public let maximum: Int? - - private enum CodingKeys: String, CodingKey { - case type, properties, required, pattern, const - case enumValues = "enum" - case multipleOf, minimum, maximum - } - - public struct Property: Codable, Equatable { - public let type: JSONType - public let description: String? - public let format: String? - public let items: Items? - public let required: [String]? - public let pattern: String? - public let const: String? - public let enumValues: [String]? - public let multipleOf: Int? - public let minimum: Double? - public let maximum: Double? - public let minItems: Int? - public let maxItems: Int? - public let uniqueItems: Bool? - - private enum CodingKeys: String, CodingKey { - case type, description, format, items, required, pattern, const - case enumValues = "enum" - case multipleOf, minimum, maximum - case minItems, maxItems, uniqueItems - } - - public init(type: JSONType, description: String? = nil, format: String? = nil, items: Items? = nil, required: [String]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Double? = nil, maximum: Double? = nil, minItems: Int? = nil, maxItems: Int? = nil, uniqueItems: Bool? = nil) { - self.type = type - self.description = description - self.format = format - self.items = items - self.required = required - self.pattern = pattern - self.const = const - self.enumValues = enumValues - self.multipleOf = multipleOf - self.minimum = minimum - self.maximum = maximum - self.minItems = minItems - self.maxItems = maxItems - self.uniqueItems = uniqueItems - } - } - - public enum JSONType: String, Codable { - case integer = "integer" - case string = "string" - case boolean = "boolean" - case array = "array" - case object = "object" - case number = "number" - case `null` = "null" - } - - public struct Items: Codable, Equatable { - public let type: JSONType - public let properties: [String: Property]? - public let pattern: String? - public let const: String? - public let enumValues: [String]? - public let multipleOf: Int? - public let minimum: Double? - public let maximum: Double? - public let minItems: Int? - public let maxItems: Int? - public let uniqueItems: Bool? - - private enum CodingKeys: String, CodingKey { - case type, properties, pattern, const - case enumValues = "enum" - case multipleOf, minimum, maximum, minItems, maxItems, uniqueItems - } - - public init(type: JSONType, properties: [String : Property]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Double? = nil, maximum: Double? = nil, minItems: Int? = nil, maxItems: Int? = nil, uniqueItems: Bool? = nil) { - self.type = type - self.properties = properties - self.pattern = pattern - self.const = const - self.enumValues = enumValues - self.multipleOf = multipleOf - self.minimum = minimum - self.maximum = maximum - self.minItems = minItems - self.maxItems = maxItems - self.uniqueItems = uniqueItems - } - } - - public init(type: JSONType, properties: [String : Property]? = nil, required: [String]? = nil, pattern: String? = nil, const: String? = nil, enumValues: [String]? = nil, multipleOf: Int? = nil, minimum: Int? = nil, maximum: Int? = nil) { - self.type = type - self.properties = properties - self.required = required - self.pattern = pattern - self.const = const - self.enumValues = enumValues - self.multipleOf = multipleOf - self.minimum = minimum - self.maximum = maximum - } -} - -public struct ChatFunctionDeclaration: Codable, Equatable { - /// The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64. - public let name: String - - /// The description of what the function does. - public let description: String - - /// The parameters the functions accepts, described as a JSON Schema object. - public let parameters: JSONSchema - - public init(name: String, description: String, parameters: JSONSchema) { - self.name = name - self.description = description - self.parameters = parameters - } -} - -public struct ChatQueryFunctionCall: Codable, Equatable { - /// The name of the function to call. - public let name: String? - /// The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function. - public let arguments: String? -} - -public struct ChatQuery: Equatable, Codable, Streamable { - /// ID of the model to use. Currently, only gpt-3.5-turbo and gpt-3.5-turbo-0301 are supported. - public let model: Model - /// An object specifying the format that the model must output. - public let responseFormat: ResponseFormat? - /// The messages to generate chat completions for - public let messages: [Chat] - /// A list of functions the model may generate JSON inputs for. - public let functions: [ChatFunctionDeclaration]? - /// Controls how the model responds to function calls. "none" means the model does not call a function, and responds to the end-user. "auto" means the model can pick between and end-user or calling a function. Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. "none" is the default when no functions are present. "auto" is the default if functions are present. - public let functionCall: FunctionCall? - /// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and We generally recommend altering this or top_p but not both. - public let temperature: Double? - /// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. - public let topP: Double? - /// How many chat completion choices to generate for each input message. - public let n: Int? - /// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. - public let stop: [String]? - /// The maximum number of tokens to generate in the completion. - public let maxTokens: Int? - /// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics. - public let presencePenalty: Double? - /// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim. - public let frequencyPenalty: Double? - /// Modify the likelihood of specified tokens appearing in the completion. - public let logitBias: [String:Int]? - /// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. - public let user: String? - - var stream: Bool = false - - public enum FunctionCall: Codable, Equatable { - case none - case auto - case function(String) - - enum CodingKeys: String, CodingKey { - case none = "none" - case auto = "auto" - case function = "name" - } - - public func encode(to encoder: Encoder) throws { - switch self { - case .none: - var container = encoder.singleValueContainer() - try container.encode(CodingKeys.none.rawValue) - case .auto: - var container = encoder.singleValueContainer() - try container.encode(CodingKeys.auto.rawValue) - case .function(let name): - var container = encoder.container(keyedBy: CodingKeys.self) - try container.encode(name, forKey: .function) - } - } - } - - enum CodingKeys: String, CodingKey { - case model - case messages - case functions - case functionCall = "function_call" - case temperature - case topP = "top_p" - case n - case stream - case stop - case maxTokens = "max_tokens" - case presencePenalty = "presence_penalty" - case frequencyPenalty = "frequency_penalty" - case logitBias = "logit_bias" - case user - case responseFormat = "response_format" - } - - public init(model: Model, messages: [Chat], responseFormat: ResponseFormat? = nil, functions: [ChatFunctionDeclaration]? = nil, functionCall: FunctionCall? = nil, temperature: Double? = nil, topP: Double? = nil, n: Int? = nil, stop: [String]? = nil, maxTokens: Int? = nil, presencePenalty: Double? = nil, frequencyPenalty: Double? = nil, logitBias: [String : Int]? = nil, user: String? = nil, stream: Bool = false) { - self.model = model - self.messages = messages - self.functions = functions - self.functionCall = functionCall - self.temperature = temperature - self.topP = topP - self.n = n - self.responseFormat = responseFormat - self.stop = stop - self.maxTokens = maxTokens - self.presencePenalty = presencePenalty - self.frequencyPenalty = frequencyPenalty - self.logitBias = logitBias - self.user = user - self.stream = stream - } -} diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift index 909704ca..dc74190a 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Async.swift @@ -12,6 +12,8 @@ import Foundation @available(tvOS 13.0, *) @available(watchOS 6.0, *) public extension OpenAIProtocol { + + @available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.") func completions( query: CompletionsQuery ) async throws -> CompletionsResult { @@ -27,6 +29,7 @@ public extension OpenAIProtocol { } } + @available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chatsStream instead.") func completionsStream( query: CompletionsQuery ) -> AsyncThrowingStream { diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift index da8b7dfb..f63e1f48 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol+Combine.swift @@ -15,6 +15,7 @@ import Combine @available(watchOS 6.0, *) public extension OpenAIProtocol { + @available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.") func completions(query: CompletionsQuery) -> AnyPublisher { Future { completions(query: query, completion: $0) @@ -22,6 +23,7 @@ public extension OpenAIProtocol { .eraseToAnyPublisher() } + @available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.") func completionsStream(query: CompletionsQuery) -> AnyPublisher, Error> { let progress = PassthroughSubject, Error>() completionsStream(query: query) { result in diff --git a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift index 6519e8fe..07d53b19 100644 --- a/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift +++ b/Sources/OpenAI/Public/Protocols/OpenAIProtocol.swift @@ -23,7 +23,8 @@ public protocol OpenAIProtocol { - Parameters: - query: A `CompletionsQuery` object containing the input parameters for the API request. This includes the prompt, model, temperature, max tokens, and other settings. - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `CompletionsResult` object with the generated completions, or an error if the request failed. - **/ + */ + @available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.") func completions(query: CompletionsQuery, completion: @escaping (Result) -> Void) /** @@ -41,7 +42,8 @@ public protocol OpenAIProtocol { - query: A `CompletionsQuery` object containing the input parameters for the API request. This includes the prompt, model, temperature, max tokens, and other settings. - onResult: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `CompletionsResult` object with the generated completions, or an error if the request failed. - completion: A closure that is being called when all chunks are delivered or uncrecoverable error occured - **/ + */ + @available(*, deprecated, message: "CompletionsStream is now marked 'Legacy' in OpenAI API, use chatsStream instead.") func completionsStream(query: CompletionsQuery, onResult: @escaping (Result) -> Void, completion: ((Error?) -> Void)?) /** @@ -58,7 +60,7 @@ public protocol OpenAIProtocol { - Parameters: - query: An `ImagesQuery` object containing the input parameters for the API request. This includes the query parameters such as the text prompt, image size, and other settings. - completion: A closure which receives the result when the API request finishes. The closure's parameter, `Result`, will contain either the `ImagesResult` object with the generated images, or an error if the request failed. - **/ + */ func images(query: ImagesQuery, completion: @escaping (Result) -> Void) /** diff --git a/Sources/OpenAI/Public/Utilities/CodableUtilities.swift b/Sources/OpenAI/Public/Utilities/CodableUtilities.swift new file mode 100644 index 00000000..2919b213 --- /dev/null +++ b/Sources/OpenAI/Public/Utilities/CodableUtilities.swift @@ -0,0 +1,118 @@ +// +// CodableUtilities.swift +// +// +// Created by Federico Vitale on 09/11/23. +// + +import Foundation + + +/// Allows having dynamic keys in codables. +struct DynamicKey: CodingKey { + var stringValue: String + var intValue: Int? + + init(stringValue: String) { + self.stringValue = stringValue + } + + init?(intValue: Int) { + self.intValue = intValue + self.stringValue = "\(intValue)" + } +} + + +/// Allows to encode/decode ``Chat`` or ``Codable`` (T) +/// ```swift +/// struct Person: Codable, Equatable { +/// let name: StringOrCodable +/// +/// struct FullName { +/// firstName: String +/// lastName: String +/// } +/// } +/// +/// let person = Person(name: .string("John Doe")) +/// let fullNamePerson = Person(name: .object(.init(firstName: "John", lastName: "Doe"))) +/// ``` +public enum StringOrCodable: Equatable, Codable where T: Equatable { + case string(String) + case object(T) + + enum CodingKeys: CodingKey { + case string + case object + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.singleValueContainer() + + switch self { + case .string(let string): + try container.encode(string) + case .object(let object): + try container.encode(object) + } + } + + + public init(from decoder: Decoder) throws { + let container = try decoder.singleValueContainer() + + if let string = try? container.decode(String.self) { + self = .string(string) + } else if let object = try? container.decode(T.self) { + self = .object(object) + } else { + throw DecodingError.dataCorrupted( + DecodingError.Context( + codingPath: decoder.codingPath, + debugDescription: "Invalid data encountered when decoding StringOrCodable" + ) + ) + } + } +} + + +public enum EnumOrCodable: Equatable, Codable where E: Equatable, C: Equatable { + case `enum`(E) + case codable(C) + + enum CodingKeys: CodingKey { + case objectA + case objectB + } + + public func encode(to encoder: Encoder) throws { + var container = encoder.singleValueContainer() + + switch self { + case .codable(let value): + try container.encode(value) + case .enum(let value): + try container.encode(value) + } + } + + + public init(from decoder: Decoder) throws { + let container = try decoder.singleValueContainer() + + if let value = try? container.decode(E.self) { + self = .enum(value) + } else if let value = try? container.decode(C.self) { + self = .codable(value) + } else { + throw DecodingError.dataCorrupted( + DecodingError.Context( + codingPath: decoder.codingPath, + debugDescription: "Invalid data encountered when decoding EnumOrCodable" + ) + ) + } + } +} diff --git a/Tests/OpenAITests/CodableUtilsTests.swift b/Tests/OpenAITests/CodableUtilsTests.swift new file mode 100644 index 00000000..351f2ba6 --- /dev/null +++ b/Tests/OpenAITests/CodableUtilsTests.swift @@ -0,0 +1,84 @@ +// +// File.swift +// +// +// Created by Federico Vitale on 09/11/23. +// + +import XCTest +@testable import OpenAI + +fileprivate class TestUtils { + static func decode( + _ jsonString: String, + _ expectedValue: T + ) throws { + let data = jsonString.data(using: .utf8)! + let decoded = try JSONDecoder().decode(T.self, from: data) + + XCTAssertEqual(decoded, expectedValue) + } + + static func encode( + _ value: T, + _ expectedJson: String + ) throws { + let source = try jsonDataAsNSDictionary(JSONEncoder().encode(value)) + let expected = try jsonDataAsNSDictionary(expectedJson.data(using: .utf8)!) + + XCTAssertEqual(source, expected) + } + + static func jsonDataAsNSDictionary(_ data: Data) throws -> NSDictionary { + return NSDictionary(dictionary: try JSONSerialization.jsonObject(with: data, options: []) as! [String: Any]) + } +} + + +class CodableUtilsTests: XCTestCase { + func testStringOrCodable_String() throws { + struct Person: Codable, Equatable { + let name: StringOrCodable + + struct FullName: Codable, Equatable { + let firstName: String + let lastName: String + } + } + + let jsonString = """ + { + "name": "test" + } + """ + + let value = Person(name: .string("test")) + + try TestUtils.encode(value, jsonString) + try TestUtils.decode(jsonString, value) + } + + func testStringOrCodable_Object() throws { + struct Person: Codable, Equatable { + let name: StringOrCodable + + struct FullName: Codable, Equatable { + let firstName: String + let lastName: String + } + } + + let jsonString = """ + { + "name": { "firstName": "first", "lastName": "last" } + } + """ + + let value = Person(name: .object(.init(firstName: "first", lastName: "last"))) + + try TestUtils.encode(value, jsonString) + try TestUtils.decode(jsonString, value) + } +} + + diff --git a/Tests/OpenAITests/OpenAITests.swift b/Tests/OpenAITests/OpenAITests.swift index f195317e..2de4e1fe 100644 --- a/Tests/OpenAITests/OpenAITests.swift +++ b/Tests/OpenAITests/OpenAITests.swift @@ -23,6 +23,7 @@ class OpenAITests: XCTestCase { self.openAI = OpenAI(configuration: configuration, session: self.urlSession) } + @available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.") func testCompletions() async throws { let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, maxTokens: 100, topP: 1, frequencyPenalty: 0, presencePenalty: 0, stop: ["\\n"]) let expectedResult = CompletionsResult(id: "foo", object: "bar", created: 100500, model: .babbage, choices: [ @@ -34,6 +35,7 @@ class OpenAITests: XCTestCase { XCTAssertEqual(result, expectedResult) } + @available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.") func testCompletionsAPIError() async throws { let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, maxTokens: 100, topP: 1, frequencyPenalty: 0, presencePenalty: 0, stop: ["\\n"]) let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") @@ -101,25 +103,70 @@ class OpenAITests: XCTestCase { } func testChats() async throws { - let query = ChatQuery(model: .gpt4, messages: [ - .init(role: .system, content: "You are Librarian-GPT. You know everything about the books."), - .init(role: .user, content: "Who wrote Harry Potter?") - ]) - let chatResult = ChatResult(id: "id-12312", object: "foo", created: 100, model: .gpt3_5Turbo, choices: [ - .init(index: 0, message: .init(role: .system, content: "bar"), finishReason: "baz"), - .init(index: 0, message: .init(role: .user, content: "bar1"), finishReason: "baz1"), - .init(index: 0, message: .init(role: .assistant, content: "bar2"), finishReason: "baz2") + let query = ChatQuery(model: .gpt4, messages: [ + .init(role: .system, content: "You are Librarian-GPT. You know everything about the books.", toolCalls: nil), + .init(role: .user, content: "Who wrote Harry Potter?", toolCalls: nil), + ]) + let chatResult = ChatResult(id: "id-12312", object: "foo", created: 100, model: .gpt3_5Turbo, choices: [ + .init(index: 0, message: .init(role: .system, content: "bar", toolCalls: nil), finishReason: "baz"), + .init(index: 0, message: .init(role: .user, content: "bar1", toolCalls: nil), finishReason: "baz1"), + .init(index: 0, message: .init(role: .assistant, content: "bar2", toolCalls: nil), finishReason: "baz2") ], usage: .init(promptTokens: 100, completionTokens: 200, totalTokens: 300)) - try self.stub(result: chatResult) + try self.stub(result: chatResult) - let result = try await openAI.chats(query: query) - XCTAssertEqual(result, chatResult) + let result = try await openAI.chats(query: query) + + XCTAssertEqual(result, chatResult) + } + + func testChatsTools() async throws { + let tools: [ChatTool] = [ + ChatTool(type: .function, value: .function( + .init( + name: "get_weather", + description: "Get the current weather in the given location", + parameters: .init( + type: .object, + properties: [ + "location": .string(description: "The city and state, e.g San Francisco, CA"), + "unit": .string(enumValues: ["celsius", "fahrenheit"]) + ], + required: ["location"] + ) + ) + )) + ] + + let messages: [Message] = [ + .init(role: .system, content: "You are Weather-GPT. You know everything about the weather.", toolCalls: nil), + .init(role: .user, content: "What's the weather like in Boston?", toolCalls: nil), + ] + + let query = ChatQuery(model: .gpt3_5Turbo_1106, messages: messages, tools: tools) + + let chatResult = ChatResult( + id: "id-12312", + object: "foo", + created: 100, + model: .gpt3_5Turbo, + choices: [ + .init(index: 0, message: .init(role: .system, content: "bar", toolCalls: nil), finishReason: "baz"), + .init(index: 0, message: .init(role: .user, content: "bar1", toolCalls: nil), finishReason: "baz1"), + .init(index: 0, message: .init(role: .assistant, content: "bar2", toolCalls: nil), finishReason: "baz2") + ], + usage: .init(promptTokens: 100, completionTokens: 200, totalTokens: 300) + ) + + try self.stub(result: chatResult) + + let result = try await openAI.chats(query: query) + XCTAssertEqual(result, chatResult) } func testChatsFunction() async throws { let query = ChatQuery(model: .gpt3_5Turbo_1106, messages: [ - .init(role: .system, content: "You are Weather-GPT. You know everything about the weather."), - .init(role: .user, content: "What's the weather like in Boston?"), + .init(role: .system, content: "You are Weather-GPT. You know everything about the weather.", toolCalls: nil), + .init(role: .user, content: "What's the weather like in Boston?", toolCalls: nil), ], functions: [ .init(name: "get_current_weather", description: "Get the current weather in a given location", parameters: .init(type: .object, properties: [ "location": .init(type: .string, description: "The city and state, e.g. San Francisco, CA"), @@ -128,9 +175,9 @@ class OpenAITests: XCTestCase { ], functionCall: .auto) let chatResult = ChatResult(id: "id-12312", object: "foo", created: 100, model: .gpt3_5Turbo, choices: [ - .init(index: 0, message: .init(role: .system, content: "bar"), finishReason: "baz"), - .init(index: 0, message: .init(role: .user, content: "bar1"), finishReason: "baz1"), - .init(index: 0, message: .init(role: .assistant, content: "bar2"), finishReason: "baz2") + .init(index: 0, message: .init(role: .system, content: "bar", toolCalls: nil), finishReason: "baz"), + .init(index: 0, message: .init(role: .user, content: "bar1", toolCalls: nil), finishReason: "baz1"), + .init(index: 0, message: .init(role: .assistant, content: "bar2", toolCalls: nil), finishReason: "baz2") ], usage: .init(promptTokens: 100, completionTokens: 200, totalTokens: 300)) try self.stub(result: chatResult) @@ -140,8 +187,8 @@ class OpenAITests: XCTestCase { func testChatsError() async throws { let query = ChatQuery(model: .gpt4, messages: [ - .init(role: .system, content: "You are Librarian-GPT. You know everything about the books."), - .init(role: .user, content: "Who wrote Harry Potter?") + .init(role: .system, content: "You are Librarian-GPT. You know everything about the books.", toolCalls: nil), + .init(role: .user, content: "Who wrote Harry Potter?", toolCalls: nil) ]) let inError = APIError(message: "foo", type: "bar", param: "baz", code: "100") self.stub(error: inError) diff --git a/Tests/OpenAITests/OpenAITestsCombine.swift b/Tests/OpenAITests/OpenAITestsCombine.swift index e2b58458..78080df9 100644 --- a/Tests/OpenAITests/OpenAITestsCombine.swift +++ b/Tests/OpenAITests/OpenAITestsCombine.swift @@ -25,6 +25,7 @@ final class OpenAITestsCombine: XCTestCase { self.openAI = OpenAI(configuration: configuration, session: self.urlSession) } + @available(*, deprecated, message: "Completions are now marked 'Legacy' in OpenAI API, use chats instead.") func testCompletions() throws { let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, maxTokens: 100, topP: 1, frequencyPenalty: 0, presencePenalty: 0, stop: ["\\n"]) let expectedResult = CompletionsResult(id: "foo", object: "bar", created: 100500, model: .babbage, choices: [ @@ -38,13 +39,13 @@ final class OpenAITestsCombine: XCTestCase { func testChats() throws { let query = ChatQuery(model: .gpt4, messages: [ - .init(role: .system, content: "You are Librarian-GPT. You know everything about the books."), - .init(role: .user, content: "Who wrote Harry Potter?") + .init(role: .system, content: "You are Librarian-GPT. You know everything about the books.", toolCalls: nil), + .init(role: .user, content: "Who wrote Harry Potter?", toolCalls: nil) ]) let chatResult = ChatResult(id: "id-12312", object: "foo", created: 100, model: .gpt3_5Turbo, choices: [ - .init(index: 0, message: .init(role: .system, content: "bar"), finishReason: "baz"), - .init(index: 0, message: .init(role: .user, content: "bar1"), finishReason: "baz1"), - .init(index: 0, message: .init(role: .assistant, content: "bar2"), finishReason: "baz2") + .init(index: 0, message: .init(role: .system, content: "bar", toolCalls: nil), finishReason: "baz"), + .init(index: 0, message: .init(role: .user, content: "bar1", toolCalls: nil), finishReason: "baz1"), + .init(index: 0, message: .init(role: .assistant, content: "bar2", toolCalls: nil), finishReason: "baz2") ], usage: .init(promptTokens: 100, completionTokens: 200, totalTokens: 300)) try self.stub(result: chatResult) let result = try awaitPublisher(openAI.chats(query: query)) diff --git a/Tests/OpenAITests/OpenAITestsDecoder.swift b/Tests/OpenAITests/OpenAITestsDecoder.swift index 70b611cf..7ef74ad5 100644 --- a/Tests/OpenAITests/OpenAITestsDecoder.swift +++ b/Tests/OpenAITests/OpenAITestsDecoder.swift @@ -106,7 +106,7 @@ class OpenAITestsDecoder: XCTestCase { """ let expectedValue = ChatResult(id: "chatcmpl-123", object: "chat.completion", created: 1677652288, model: .gpt4, choices: [ - .init(index: 0, message: Chat(role: .assistant, content: "Hello, world!"), finishReason: "stop") + .init(index: 0, message: Message(role: .assistant, content: "Hello, world!", toolCalls: nil), finishReason: "stop") ], usage: .init(promptTokens: 9, completionTokens: 12, totalTokens: 21)) try decode(data, expectedValue) } @@ -140,12 +140,57 @@ class OpenAITestsDecoder: XCTestCase { XCTAssertEqual(imageQueryAsDict, expectedValueAsDict) } + + func testToolEncode() async throws { + let value: ChatTool = ChatTool(type: .function, value: .function( + .init( + name: "test", + description: "test", + parameters: .init( + type: .object, + properties: [ + "location": .string(description: "location.desc"), + "unit": .string(enumValues: ["unit.A", "unit.B"]) + ], + required: ["location"] + ) + ) + )) + + + let expectedValue = """ + { + "type": "function", + "function": { + "name": "test", + "description": "test", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "location.desc" + }, + "unit": { "type": "string", "enum": ["unit.A", "unit.B"] } + }, + "required": ["location"] + } + } + } + """ + + // To compare serialized JSONs we first convert them both into NSDictionary which are comparable (unline native swift dictionaries) + let result = try jsonDataAsNSDictionary(JSONEncoder().encode(value)) + let expectedResult = try jsonDataAsNSDictionary(expectedValue.data(using: .utf8)!) + + XCTAssertEqual(result, expectedResult) + } func testChatQueryWithFunctionCall() async throws { let chatQuery = ChatQuery( model: .gpt3_5Turbo, messages: [ - Chat(role: .user, content: "What's the weather like in Boston?") + Message(role: .user, content: "What's the weather like in Boston?", toolCalls: nil) ], responseFormat: .init(type: .jsonObject), functions: [ @@ -200,6 +245,104 @@ class OpenAITestsDecoder: XCTestCase { XCTAssertEqual(chatQueryAsDict, expectedValueAsDict) } + + func testChatContentImage() async throws { + let expectedValue = """ + { + "type": "image_url", + "image_url": { + "url": "https://example.com" + } + } + """ + + let data = ChatContent(type: .imageUrl, value: "https://example.com") + + // To compare serialized JSONs we first convert them both into NSDictionary which are comparable (unline native swift dictionaries) + let resultDict = try jsonDataAsNSDictionary(JSONEncoder().encode(data)) + let expectedValueAsDict = try jsonDataAsNSDictionary(expectedValue.data(using: .utf8)!) + + XCTAssertEqual(resultDict, expectedValueAsDict) + } + + func testChatContentText() async throws { + let expectedValue = """ + { + "type": "text", + "text": "hello world" + } + """ + + let data = ChatContent(type: .text, value: "hello world") + + // To compare serialized JSONs we first convert them both into NSDictionary which are comparable (unline native swift dictionaries) + let resultDict = try jsonDataAsNSDictionary(JSONEncoder().encode(data)) + let expectedValueAsDict = try jsonDataAsNSDictionary(expectedValue.data(using: .utf8)!) + + XCTAssertEqual(resultDict, expectedValueAsDict) + } + + func testChatResultWithToolCall() async throws { + let data = """ + { + "id": "chatcmpl-1234", + "object": "chat.completion", + "created": 1677652288, + "model": "gpt-3.5-turbo", + "choices": [ + { + "index": 0, + "message": { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "index": 0, + "id": "id", + "type": "function", + "function": { + "name": "get_current_weather" + } + } + ] + }, + "finish_reason": "stop" + } + ], + "usage": { + "prompt_tokens": 82, + "completion_tokens": 18, + "total_tokens": 100 + } + } + """ + + let expectedValue = ChatResult( + id: "chatcmpl-1234", + object: "chat.completion", + created: 1677652288, + model: .gpt3_5Turbo, + choices: [ + .init( + index: 0, + message: Message(role: .assistant, toolCalls: [ + .init( + index: 0, + id: "id", + type: .function, + value: .function( + .withName("get_current_weather") + ) + ) + ]), + finishReason: "stop" + ) + ], + usage: .init(promptTokens: 82, completionTokens: 18, totalTokens: 100) + ) + + try decode(data, expectedValue) + } func testChatCompletionWithFunctionCall() async throws { let data = """ @@ -235,12 +378,14 @@ class OpenAITestsDecoder: XCTestCase { created: 1677652288, model: .gpt3_5Turbo, choices: [ - .init(index: 0, message: - Chat(role: .assistant, - functionCall: ChatFunctionCall(name: "get_current_weather", arguments: nil)), - finishReason: "function_call") + .init( + index: 0, + message: Message(role: .assistant, functionCall: .init(name: "get_current_weather", arguments: nil)), + finishReason: "function_call" + ) ], usage: .init(promptTokens: 82, completionTokens: 18, totalTokens: 100)) + try decode(data, expectedValue) } @@ -403,4 +548,36 @@ class OpenAITestsDecoder: XCTestCase { let expectedValue = AudioTranslationResult(text: "Hello, world!") try decode(data, expectedValue) } + + func testTool() async throws { + let data = """ + { + "type": "function", + "function": { + "name": "test_name", + "description": "test_desc", + "parameters": { + "type": "object", + "properties": { + "city": { + "type": "string", + "description": "param_desc_test" + } + } + } + } + } + """ + + let value = ChatTool(type: .function, value: .function(.init( + name: "test_name", + description: "test_desc", + parameters: .init(type: .object, properties: [ + "city": .init(type: .string, description: "param_desc_test") + ]) + ) + )) + + try decode(data, value) + } }