Skip to content

Commit

Permalink
token count
Browse files Browse the repository at this point in the history
  • Loading branch information
aeltorio committed Oct 5, 2024
1 parent 5c06638 commit 934891e
Show file tree
Hide file tree
Showing 4 changed files with 126 additions and 20 deletions.
117 changes: 105 additions & 12 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 3 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,8 +29,10 @@
"@fluentui/react-components": "^9.54.17",
"@fluentui/react-icons": "^2.0.260",
"@sctg/ai-sdk": "0.0.3",
"es6-promise": "^4.2.8",
"@sctg/sentencepiece-js": "^1.3.0",
"buffer": "^6.0.3",
"core-js": "^3.38.1",
"es6-promise": "^4.2.8",
"react": "^18.3.1",
"react-dom": "^18.3.1",
"regenerator-runtime": "^0.14.1"
Expand Down
24 changes: 17 additions & 7 deletions src/aipane/aipane.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,20 @@
import { Groq } from "@sctg/ai-sdk";
import config from "../config.json";
import type { AIModel, AIPrompt, AIProvider } from "./AIPrompt";
import { SentencePieceProcessor, cleanText, llama_3_1_tokeniser_b64 } from "@sctg/sentencepiece-js";
import { Buffer } from "buffer";

// eslint-disable-next-line no-undef
globalThis.Buffer = Buffer;

const TOKEN_MARGIN = 20;
async function countTokens(text: string): Promise<number> {
let cleaned = cleanText(text);
let spp = new SentencePieceProcessor();
await spp.loadFromB64StringModel(llama_3_1_tokeniser_b64);
let ids = spp.encodeIds(cleaned);
return ids.length;
}

async function groqRequest(
provider: AIProvider,
Expand All @@ -23,13 +37,9 @@ async function groqRequest(
apiKey,
dangerouslyAllowBrowser: true,
proxy: provider.aiproxied ? proxyUrl : undefined,
// fetch: async (url: any, init?: any): Promise<any> => {
// console.log("About to make a request", url, init);
// const response = await fetch(url, { mode: "no-cors", ...init });
// console.log("Got response", response);
// return response;
// },
});
const tokenCount = await countTokens(systemText + usertext);
console.log(`Token count: ${tokenCount}`);
const chatCompletion = await groq.chat.completions.create({
messages: [
{
Expand All @@ -43,7 +53,7 @@ async function groqRequest(
],
model: model.id,
temperature: 1,
//max_tokens: model.max_tokens,
max_tokens: model.max_tokens - tokenCount - TOKEN_MARGIN,
top_p: 1,
stream: true,
stop: null,
Expand Down
1 change: 1 addition & 0 deletions webpack.config.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ module.exports = async (env, options) => {
},
resolve: {
extensions: [".ts", ".tsx", ".html", ".js"],
fallback: { url: false, fs: false, module: false },
},
module: {
rules: [
Expand Down

0 comments on commit 934891e

Please sign in to comment.