Skip to content
This repository has been archived by the owner on Apr 18, 2024. It is now read-only.

Commit

Permalink
track llmReqId
Browse files Browse the repository at this point in the history
  • Loading branch information
patcher9 committed Feb 10, 2024
1 parent 7359dea commit 8ed00fe
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 36 deletions.
1 change: 1 addition & 0 deletions src/anthropic.js
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ export default function initAnthropic({ llm, dokuUrl, apiKey, environment, appli
const duration = (end - start) / 1000;

const data = {
llmReqId: response.id,
environment: environment,
applicationName: applicationName,
sourceLanguage: 'Javascript',
Expand Down
51 changes: 17 additions & 34 deletions src/cohere.js
Original file line number Diff line number Diff line change
@@ -1,29 +1,5 @@
import {sendData} from './helpers.js';

/**
* Counts the number of tokens in the given text.
*
* @param {string} text - The input text.
* @return {number} - The calculated number of tokens.
*
* @jsondoc
* {
* "description": "Counts the number of tokens in the given text",
* "params": [{"name": "text", "type": "string", "description": "Text"}],
* "returns": {"type": "number", "description": "Number of tokens."}
* }
*/
function countTokens(text) {
const tokensPerWord = 2.5;

// Split the text into words
const words = text.split(/\s+/);

// Calculate the number of tokens
const numTokens = Math.round(words.length * tokensPerWord);

return numTokens;
}
/**
* Initializes Cohere functionality with performance tracking and data logging.
*
Expand Down Expand Up @@ -72,13 +48,14 @@ export default function initCohere({ llm, dokuUrl, apiKey, environment, applicat

for (const generation of response.generations) {
const data = {
llmReqId: generation.id,
environment: environment,
applicationName: applicationName,
sourceLanguage: 'Javascript',
endpoint: 'cohere.generate',
skipResp: skipResp,
completionTokens: countTokens(generation.text),
promptTokens: countTokens(prompt),
completionTokens: response.meta["billedUnits"]["outputTokens"],
promptTokens: response.meta["billedUnits"]["inputTokens"],
requestDuration: duration,
model: model,
prompt: prompt,
Expand All @@ -89,7 +66,8 @@ export default function initCohere({ llm, dokuUrl, apiKey, environment, applicat
if (!params.hasOwnProperty('stream') || params.stream !== true) {
data.finishReason = generation.finish_reason;
}
await sendData(data, dokuUrl, apiKey);
console.log(data);
//await sendData(data, dokuUrl, apiKey);
}

return response;
Expand Down Expand Up @@ -131,6 +109,7 @@ export default function initCohere({ llm, dokuUrl, apiKey, environment, applicat
const prompt = params.message;

const data = {
llmReqId: response.response_id,
environment: environment,
applicationName: applicationName,
sourceLanguage: 'Javascript',
Expand All @@ -139,9 +118,9 @@ export default function initCohere({ llm, dokuUrl, apiKey, environment, applicat
requestDuration: duration,
model: model,
prompt: prompt,
promptTokens: response.meta["billed_units"]["output_tokens"],
completionTokens: response.meta["billed_units"]["input_tokens"],
totalTokens: response.token_count["billed_tokens"],
promptTokens: response.meta["billedUnits"]["outputTokens"],
completionTokens: response.meta["billedUnits"]["inputTokens"],
totalTokens: response.token_count["billedUnits"],
response: response.text,
};

Expand Down Expand Up @@ -169,12 +148,15 @@ export default function initCohere({ llm, dokuUrl, apiKey, environment, applicat

data.response = ""
for await (const message of response) {
if (message.eventType === "stream-end") {
data.llmReqId = message.response.response_id;
data.promptTokens = message.response.meta.billed_units["input_tokens"];
data.completionTokens = message.response.meta.billed_units["output_tokens"];
}
data.response += message.eventType === "text-generation" ? message.text : "";
// Pass the message along so it's not consumed
yield message; // this allows the message to flow back to the original caller
}
data.promptTokens = countTokens(prompt)
data.completionTokens = countTokens(data.response)
data.totalTokens = data.promptTokens + data.completionTokens

const end = performance.now();
Expand All @@ -195,14 +177,15 @@ export default function initCohere({ llm, dokuUrl, apiKey, environment, applicat
const prompt = params.text;

const data = {
llmReqId: response.id,
environment: environment,
applicationName: applicationName,
sourceLanguage: 'Javascript',
endpoint: 'cohere.summarize',
skipResp: skipResp,
requestDuration: duration,
completionTokens: response.meta["billed_units"]["output_tokens"],
promptTokens: response.meta["billed_units"]["input_tokens"],
completionTokens: response.meta["billedUnits"]["outputTokens"],
promptTokens: response.meta["billedUnits"]["inputTokens"],
model: model,
prompt: prompt,
response: response.summary,
Expand Down
14 changes: 12 additions & 2 deletions src/openai.js
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ export default function initOpenAI({ llm, dokuUrl, apiKey, environment, applicat
dataResponse += content;
passThroughStream.push(chunk); // Push chunk to the pass-through stream
}
var responseId = chunk.id;
}
passThroughStream.push(null); // Signal end of the pass-through stream

Expand Down Expand Up @@ -93,6 +94,7 @@ export default function initOpenAI({ llm, dokuUrl, apiKey, environment, applicat

// Prepare the data object for Doku
const data = {
llmReqId: responseId,
environment: environment,
applicationName: applicationName,
sourceLanguage: 'Javascript',
Expand Down Expand Up @@ -136,6 +138,7 @@ export default function initOpenAI({ llm, dokuUrl, apiKey, environment, applicat
}
let prompt = formattedMessages.join("\n");
const data = {
llmReqId: response.id,
environment: environment,
applicationName: applicationName,
sourceLanguage: 'Javascript',
Expand Down Expand Up @@ -169,6 +172,7 @@ export default function initOpenAI({ llm, dokuUrl, apiKey, environment, applicat
data.promptTokens = response.usage.prompt_tokens;
data.totalTokens = response.usage.total_tokens;
}

await sendData(data, dokuUrl, apiKey);

return response;
Expand Down Expand Up @@ -198,6 +202,7 @@ export default function initOpenAI({ llm, dokuUrl, apiKey, environment, applicat
dataResponse += content;
passThroughStream.push(chunk); // Push chunk to the pass-through stream
}
var responseId = chunk.id;
}
passThroughStream.push(null); // Signal end of the pass-through stream

Expand All @@ -206,6 +211,7 @@ export default function initOpenAI({ llm, dokuUrl, apiKey, environment, applicat
const duration = (end - start) / 1000;
// Prepare the data object for Doku
const data = {
llmReqId: responseId,
environment: environment,
applicationName: applicationName,
sourceLanguage: 'Javascript',
Expand All @@ -229,6 +235,7 @@ export default function initOpenAI({ llm, dokuUrl, apiKey, environment, applicat
const duration = (end - start) / 1000;

const data = {
llmReqId: response.id,
environment: environment,
applicationName: applicationName,
sourceLanguage: 'Javascript',
Expand Down Expand Up @@ -308,7 +315,7 @@ export default function initOpenAI({ llm, dokuUrl, apiKey, environment, applicat
skipResp: skipResp,
requestDuration: duration,
model: params.model,
finetuneJobId: response.id,
llmReqId: response.id,
finetuneJobStatus: response.status,
};

Expand All @@ -331,9 +338,10 @@ export default function initOpenAI({ llm, dokuUrl, apiKey, environment, applicat
}

const quality = params.quality ?? 'standard';

var responseId = response.created;
for (const item of response.data) {
const data = {
llmReqId: responseId,
environment: environment,
applicationName: applicationName,
sourceLanguage: 'Javascript',
Expand Down Expand Up @@ -365,8 +373,10 @@ export default function initOpenAI({ llm, dokuUrl, apiKey, environment, applicat
if (params.response_format && params.response_format === 'b64_json') {
imageFormat = 'b64_json';
}
var responseId = response.created;
for (const item of response.data) {
const data = {
llmReqId: responseId,
environment: environment,
applicationName: applicationName,
sourceLanguage: 'Javascript',
Expand Down

0 comments on commit 8ed00fe

Please sign in to comment.