Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

New content format #122

Closed
wants to merge 24 commits into from
Closed
Show file tree
Hide file tree
Changes from 11 commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ struct CompletionsResult: Codable, Equatable {
**Example**

```swift
let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, max_tokens: 100, top_p: 1, frequency_penalty: 0, presence_penalty: 0, stop: ["\\n"])
let query = CompletionsQuery(model: .textDavinci_003, prompt: "What is 42?", temperature: 0, maxTokens: 100, topP: 1, frequencyPenalty: 0, presencePenalty: 0, stop: ["\\n"])
openAI.completions(query: query) { result in
//Handle result here
}
Expand Down
72 changes: 72 additions & 0 deletions Sources/OpenAI/Public/Models/Chat/Chat.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
//
// File.swift
//
//
// Created by Federico Vitale on 14/11/23.
//

import Foundation

public struct Chat: Codable, Equatable {
ingvarus-bc marked this conversation as resolved.
Show resolved Hide resolved
public let role: Role
/// The contents of the message. `content` is required for all messages except assistant messages with function calls.
public let content: StringOrCodable<[ChatContent]>?

/// The name of the author of this message. `name` is required if role is `function`, and it should be the name of the function whose response is in the `content`. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters.
public let name: String?
public let functionCall: ChatFunctionCall?

public enum Role: String, Codable, Equatable {
case system
case assistant
case user
case function
}

enum CodingKeys: String, CodingKey {
case role
case content
case name
case functionCall = "function_call"
}

public init(role: Role, content codable: Codable? = nil ,name: String? = nil, functionCall: ChatFunctionCall? = nil) {
Copy link
Contributor

@ingvarus-bc ingvarus-bc Nov 14, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What do you think about typealiasing this Codable? to something like StringOrChatContent(as a suggestion, maybe you'll come up with something better), because I am testing it now and it is not very clear what to pass there 🤔

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

absolutely nice idea!

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

maybe also throwing an error if passing the wrong codable?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah, that would be neat 🔥

rawnly marked this conversation as resolved.
Show resolved Hide resolved
let stringOrCodable: StringOrCodable<[ChatContent]>?

if let string = codable as? String {
stringOrCodable = .string(string)
} else if let arr = codable as? [ChatContent] {
stringOrCodable = .object(arr)
} else {
stringOrCodable = nil
}

self.init(role: role, contents: stringOrCodable, name: name, functionCall: functionCall)
}

public init(role: Role, contents: StringOrCodable<[ChatContent]>? = nil, name: String? = nil, functionCall: ChatFunctionCall? = nil) {
self.role = role
self.content = contents
self.name = name
self.functionCall = functionCall
}

public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(role, forKey: .role)

if let name = name {
try container.encode(name, forKey: .name)
}

if let functionCall = functionCall {
try container.encode(functionCall, forKey: .functionCall)
}

// Should add 'nil' to 'content' property for function calling response
// See https://openai.com/blog/function-calling-and-other-api-updates
if content != nil || (role == .assistant && functionCall != nil) {
try container.encode(content, forKey: .content)
}
}
}
84 changes: 84 additions & 0 deletions Sources/OpenAI/Public/Models/Chat/ChatContent.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
//
// File.swift
//
//
// Created by Federico Vitale on 14/11/23.
//

import Foundation

public struct ChatContent: Codable, Equatable {
let type: ChatContentType
let value: String

public enum ChatContentType: String, Codable {
case text
case imageUrl = "image_url"
}

public struct ImageUrl: Codable, Equatable {
let url: String

enum CodingKeys: CodingKey {
case url
}
}

enum CodingKeys: CodingKey {
case type
case value
}

public static func text(_ text: String) -> Self {
Self.init(text)
}

public static func imageUrl(_ url: String) -> Self {
Self.init(type: .imageUrl, value: url)
}

public init(type: ChatContentType, value: String) {
self.type = type
self.value = value
}

public init(_ text: String) {
self.type = .text
self.value = text
}

// we need to perform a custom encoding since the `value` key is variable based on the `type`
public func encode(to encoder: Encoder) throws {
var container = encoder.container(keyedBy: ChatContent.CodingKeys.self)
var dynamicContainer = encoder.container(keyedBy: DynamicKey.self)

try container.encode(type, forKey: .type)

switch self.type {
case .text:
try dynamicContainer.encode(value, forKey: .init(stringValue: "text"))
break
case .imageUrl:
var nested = dynamicContainer.nestedContainer(keyedBy: ImageUrl.CodingKeys.self, forKey: .init(stringValue: "image_url"))
try nested.encode(value, forKey: .url)
break
}
}

public init(from decoder: Decoder) throws {
let container = try decoder.container(keyedBy: CodingKeys.self)
self.type = try container.decode(ChatContentType.self, forKey: .type)

let dynamicContainer = try decoder.container(keyedBy: DynamicKey.self)

switch self.type {
case .text:
self.value = try dynamicContainer.decode(String.self, forKey: .init(stringValue: "text"))
break
case .imageUrl:
let nested = try dynamicContainer.nestedContainer(keyedBy: ImageUrl.CodingKeys.self, forKey: .init(stringValue: "image_url"))
self.value = try nested.decode(String.self, forKey: .url)
break
}
}
}
38 changes: 38 additions & 0 deletions Sources/OpenAI/Public/Models/Chat/ChatFunction.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
//
// ChatFunction.swift
//
//
// Created by Federico Vitale on 14/11/23.
//

import Foundation

public struct ChatFunctionCall: Codable, Equatable {
/// The name of the function to call.
public let name: String?
/// The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your function.
public let arguments: String?

public init(name: String?, arguments: String?) {
self.name = name
self.arguments = arguments
}
}

public struct ChatFunctionDeclaration: Codable, Equatable {
/// The name of the function to be called. Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length of 64.
public let name: String

/// The description of what the function does.
public let description: String

/// The parameters the functions accepts, described as a JSON Schema object.
public let parameters: JSONSchema

public init(name: String, description: String, parameters: JSONSchema) {
self.name = name
self.description = description
self.parameters = parameters
}
}

50 changes: 50 additions & 0 deletions Sources/OpenAI/Public/Models/Chat/ChatQuery+Extension.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
//
// File.swift
//
//
// Created by Federico Vitale on 14/11/23.
//

import Foundation


extension ChatQuery {
// See more https://platform.openai.com/docs/guides/text-generation/json-mode
public struct ResponseFormat: Codable, Equatable {
public static let jsonObject = ResponseFormat(type: .jsonObject)
public static let text = ResponseFormat(type: .text)

public let type: Self.ResponseFormatType

public enum ResponseFormatType: String, Codable, Equatable {
case jsonObject = "json_object"
case text
}
}

public enum FunctionCall: Codable, Equatable {
case none
case auto
case function(String)

enum CodingKeys: String, CodingKey {
case none = "none"
case auto = "auto"
case function = "name"
}

public func encode(to encoder: Encoder) throws {
switch self {
case .none:
var container = encoder.singleValueContainer()
try container.encode(CodingKeys.none.rawValue)
case .auto:
var container = encoder.singleValueContainer()
try container.encode(CodingKeys.auto.rawValue)
case .function(let name):
var container = encoder.container(keyedBy: CodingKeys.self)
try container.encode(name, forKey: .function)
}
}
}
}
119 changes: 119 additions & 0 deletions Sources/OpenAI/Public/Models/Chat/ChatQuery.swift
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
//
// ChatQuery.swift
//
//
// Created by Sergii Kryvoblotskyi on 02/04/2023.
//

import Foundation

public enum ToolChoice: String, Codable {
case auto
case none
}

public struct ChatQuery: Equatable, Codable, Streamable {
/// ID of the model to use. Currently, only gpt-3.5-turbo and gpt-3.5-turbo-0301 are supported.
public let model: Model
/// An object specifying the format that the model must output.
public let responseFormat: ResponseFormat?
/// The messages to generate chat completions for
public let messages: [Chat]
/// A list of functions the model may generate JSON inputs for.
public let functions: [ChatFunctionDeclaration]?

public let tools: [Tool]?

/// Controls how the model responds to function calls. "none" means the model does not call a function, and responds to the end-user. "auto" means the model can pick between and end-user or calling a function. Specifying a particular function via `{"name": "my_function"}` forces the model to call that function. "none" is the default when no functions are present. "auto" is the default if functions are present.
public let functionCall: FunctionCall?
/// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and We generally recommend altering this or top_p but not both.
public let temperature: Double?
/// An alternative to sampling with temperature, called nucleus sampling, where the model considers the results of the tokens with top_p probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered.
public let topP: Double?
/// How many chat completion choices to generate for each input message.
public let n: Int?
/// Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.
public let stop: [String]?
/// The maximum number of tokens to generate in the completion.
public let maxTokens: Int?
/// Number between -2.0 and 2.0. Positive values penalize new tokens based on whether they appear in the text so far, increasing the model's likelihood to talk about new topics.
public let presencePenalty: Double?
/// Number between -2.0 and 2.0. Positive values penalize new tokens based on their existing frequency in the text so far, decreasing the model's likelihood to repeat the same line verbatim.
public let frequencyPenalty: Double?
/// Modify the likelihood of specified tokens appearing in the completion.
public let logitBias: [String:Int]?
/// A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse.
public let user: String?

/// Controls which (if any) function is called by the model.
/// `none` means the model will not call a function and instead generates a message.
/// `auto` means the model can pick between generating a message or calling a function
///
/// Specifying a particular function via ``Tool.ToolValue`` forces the model to call that function.
///
/// `none` is default when no functions are present
/// `auto` is default if functions are present
public let toolChoice: AnyOf<ToolChoice, Tool.ToolValue>?

var stream: Bool = false


enum CodingKeys: String, CodingKey {
case model
case messages
case functions
case functionCall = "function_call"
case temperature
case topP = "top_p"
case n
case stream
case stop
case maxTokens = "max_tokens"
case presencePenalty = "presence_penalty"
case frequencyPenalty = "frequency_penalty"
case logitBias = "logit_bias"
case user
case responseFormat = "response_format"
case tools
case toolChoice = "tool_choice"
}

public init(
model: Model,
messages: [Chat],
responseFormat: ResponseFormat? = nil,
tools: [Tool]? = nil,
toolChoice: AnyOf<ToolChoice, Tool.ToolValue>? = nil,
functions: [ChatFunctionDeclaration]? = nil,
functionCall: FunctionCall? = nil,
temperature: Double? = nil,
topP: Double? = nil,
n: Int? = nil,
stop: [String]? = nil,
maxTokens: Int? = nil,
presencePenalty: Double? = nil,
frequencyPenalty: Double? = nil,
logitBias: [String : Int]? = nil,
user: String? = nil,
stream: Bool = false
) {
self.model = model
self.messages = messages
self.tools = tools
self.toolChoice = toolChoice
self.functions = functions
self.functionCall = functionCall
self.temperature = temperature
self.topP = topP
self.n = n
self.responseFormat = responseFormat
self.stop = stop
self.maxTokens = maxTokens
self.presencePenalty = presencePenalty
self.frequencyPenalty = frequencyPenalty
self.logitBias = logitBias
self.user = user
self.stream = stream
}
}

Loading