Skip to content

Commit

Permalink
Refactor API options in ChatGPT class and add custom model and max to…
Browse files Browse the repository at this point in the history
…kens settings
  • Loading branch information
tk42 committed Apr 20, 2024
1 parent 23a83fd commit 0681b42
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 9 deletions.
15 changes: 8 additions & 7 deletions src/api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,21 @@ export class ChatGPT {
system_role: string,
user_prompt: string,
apiKey: string,
model = "gpt-3.5-turbo",
temperature = 0,
max_tokens = 150,
top_p = 0.95,
frequency_penalty = 0,
presence_penalty = 0.5): Promise<string> {
model: string = 'gpt-3.5-turbo',
max_tokens: number = 150,
temperature: number = 0,
top_p: number = 0.95,
frequency_penalty: number = 0,
presence_penalty: number = 0.5,
): Promise<string> {

const headers = {
'Content-Type': 'application/json',
'Authorization': `Bearer ${apiKey}`,
};

const body = JSON.stringify({
model: 'gpt-3.5-turbo',
model: model,
messages: [
{ "role": "system", "content": system_role },
{ "role": "user", "content": user_prompt },
Expand Down
8 changes: 7 additions & 1 deletion src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,13 @@ export default class AutoClassifierPlugin extends Plugin {

// ------- [API Processing] -------
// Call API
const responseRaw = await ChatGPT.callAPI(system_role, user_prompt, this.settings.apiKey);
const responseRaw = await ChatGPT.callAPI(
system_role,
user_prompt,
this.settings.apiKey,
this.settings.commandOption.model,
this.settings.commandOption.max_tokens,
);
const jsonRegex = /reliability[\s\S]*?:\s*([\d.]+)[\s\S]*?output[\s\S]*?:\s*"([^"^}]+)/;
const match = responseRaw.match(jsonRegex);
let resOutput;
Expand Down
34 changes: 33 additions & 1 deletion src/settings.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@ export interface CommandOption {

chat_role: string;
prmpt_template: string;
model: string;
max_tokens: number;
}


Expand Down Expand Up @@ -75,7 +77,9 @@ export const DEFAULT_SETTINGS: AutoClassifierSettings = {
useCustomCommand: false,

chat_role: DEFAULT_CHAT_ROLE,
prmpt_template: DEFAULT_PROMPT_TEMPLATE
prmpt_template: DEFAULT_PROMPT_TEMPLATE,
model: "gpt-3.5-turbo",
max_tokens: 150,
},
};

Expand Down Expand Up @@ -480,6 +484,34 @@ export class AutoClassifierSettingTab extends PluginSettingTab {
})
});
customChatRoleEl.descEl.createSpan({text: 'Define custom role to ChatGPT system.'});

new Setting(containerEl)
.setName('Custom Model')
.setDesc("ID of the model to use. See https://platform.openai.com/docs/models")
.setClass('setting-item-child')
.addText((text) =>
text
.setPlaceholder('gpt-3.5-turbo')
.setValue(commandOption.model)
.onChange(async (value) => {
commandOption.model = value;
await this.plugin.saveSettings();
})
);

new Setting(containerEl)
.setName('Custom Max Tokens')
.setDesc("The maximum number of tokens that can be generated in the completion.")
.setClass('setting-item-child')
.addText((text) =>
text
.setPlaceholder('150')
.setValue(String(commandOption.max_tokens))
.onChange(async (value) => {
commandOption.max_tokens = parseInt(value);
await this.plugin.saveSettings();
})
);
}
}

Expand Down

0 comments on commit 0681b42

Please sign in to comment.