Skip to content

Commit

Permalink
Merge branch 'main' of github.com:dongri/openai-api-rs
Browse files Browse the repository at this point in the history
  • Loading branch information
Dongri Jin committed Jul 9, 2023
2 parents d87fa3e + 3b5a970 commit 4521588
Show file tree
Hide file tree
Showing 4 changed files with 62 additions and 0 deletions.
22 changes: 22 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,18 @@ let req = ChatCompletionRequest {
role: chat_completion::MessageRole::user,
content: String::from("Hello OpenAI!"),
}],
functions: None,
function_call: None,
temperature: None,
top_p: None,
n: None,
stream: None,
stop: None,
max_tokens: None,
presence_penalty: None,
frequency_penalty: None,
logit_bias: None,
user: None,
};
```

Expand Down Expand Up @@ -62,6 +74,16 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
}],
functions: None,
function_call: None,
temperature: None,
top_p: None,
n: None,
stream: None,
stop: None,
max_tokens: None,
presence_penalty: None,
frequency_penalty: None,
logit_bias: None,
user: None,
};
let result = client.chat_completion(req).await?;
println!("{:?}", result.choices[0].message.content);
Expand Down
10 changes: 10 additions & 0 deletions examples/chat_completion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,16 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
}],
functions: None,
function_call: None,
temperature: None,
top_p: None,
n: None,
stream: None,
stop: None,
max_tokens: None,
presence_penalty: None,
frequency_penalty: None,
logit_bias: None,
user: None,
};
let result = client.chat_completion(req).await?;
println!("{:?}", result.choices[0].message.content);
Expand Down
10 changes: 10 additions & 0 deletions examples/function_call.rs
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,16 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
}),
}]),
function_call: Some("auto".to_string()),
temperature: None,
top_p: None,
n: None,
stream: None,
stop: None,
max_tokens: None,
presence_penalty: None,
frequency_penalty: None,
logit_bias: None,
user: None,
};

let result = client.chat_completion(req).await?;
Expand Down
20 changes: 20 additions & 0 deletions src/v1/chat_completion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,26 @@ pub struct ChatCompletionRequest {
pub functions: Option<Vec<Function>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub function_call: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub temperature: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_p: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub n: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stream: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stop: Option<Vec<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_tokens: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub presence_penalty: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub frequency_penalty: Option<f64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub logit_bias: Option<HashMap<String, i32>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub user: Option<String>,
}

#[derive(Debug, Serialize, Deserialize)]
Expand Down

0 comments on commit 4521588

Please sign in to comment.