diff --git a/README.md b/README.md index 1af3627..4905f54 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,18 @@ let req = ChatCompletionRequest { role: chat_completion::MessageRole::user, content: String::from("Hello OpenAI!"), }], + functions: None, + function_call: None, + temperature: None, + top_p: None, + n: None, + stream: None, + stop: None, + max_tokens: None, + presence_penalty: None, + frequency_penalty: None, + logit_bias: None, + user: None, }; ``` @@ -62,6 +74,16 @@ async fn main() -> Result<(), Box> { }], functions: None, function_call: None, + temperature: None, + top_p: None, + n: None, + stream: None, + stop: None, + max_tokens: None, + presence_penalty: None, + frequency_penalty: None, + logit_bias: None, + user: None, }; let result = client.chat_completion(req).await?; println!("{:?}", result.choices[0].message.content); diff --git a/examples/chat_completion.rs b/examples/chat_completion.rs index 9326f1c..af2deab 100644 --- a/examples/chat_completion.rs +++ b/examples/chat_completion.rs @@ -15,6 +15,16 @@ async fn main() -> Result<(), Box> { }], functions: None, function_call: None, + temperature: None, + top_p: None, + n: None, + stream: None, + stop: None, + max_tokens: None, + presence_penalty: None, + frequency_penalty: None, + logit_bias: None, + user: None, }; let result = client.chat_completion(req).await?; println!("{:?}", result.choices[0].message.content); diff --git a/examples/function_call.rs b/examples/function_call.rs index de91b89..9c76d85 100644 --- a/examples/function_call.rs +++ b/examples/function_call.rs @@ -48,6 +48,16 @@ async fn main() -> Result<(), Box> { }), }]), function_call: Some("auto".to_string()), + temperature: None, + top_p: None, + n: None, + stream: None, + stop: None, + max_tokens: None, + presence_penalty: None, + frequency_penalty: None, + logit_bias: None, + user: None, }; let result = client.chat_completion(req).await?; diff --git a/src/v1/chat_completion.rs b/src/v1/chat_completion.rs index 9233f0f..94ac7a9 100644 --- a/src/v1/chat_completion.rs +++ b/src/v1/chat_completion.rs @@ -21,6 +21,26 @@ pub struct ChatCompletionRequest { pub functions: Option>, #[serde(skip_serializing_if = "Option::is_none")] pub function_call: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub temperature: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub top_p: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub n: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub stream: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub stop: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub max_tokens: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub presence_penalty: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub frequency_penalty: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub logit_bias: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub user: Option, } #[derive(Debug, Serialize, Deserialize)]