From 1d995a663e8c6a51e595af089fe3a2607add62f8 Mon Sep 17 00:00:00 2001 From: patcher99 Date: Fri, 8 Mar 2024 16:55:45 +0530 Subject: [PATCH 1/3] anthropic support --- package-lock.json | 19 +----- package.json | 1 - src/anthropic.js | 159 ++++++++++++++++++++++++++++++++++++++-------- src/index.js | 2 +- 4 files changed, 136 insertions(+), 45 deletions(-) diff --git a/package-lock.json b/package-lock.json index 1adc316..ce9d437 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,7 +9,6 @@ "version": "0.0.4", "license": "ISC", "dependencies": { - "@anthropic-ai/tokenizer": "^0.0.4", "stream": "^0.0.2" }, "devDependencies": { @@ -30,15 +29,6 @@ "node": ">=0.10.0" } }, - "node_modules/@anthropic-ai/tokenizer": { - "version": "0.0.4", - "resolved": "https://registry.npmjs.org/@anthropic-ai/tokenizer/-/tokenizer-0.0.4.tgz", - "integrity": "sha512-EHRKbxlxlc8W4KCBEseByJ7YwyYCmgu9OyN59H9+IYIGPoKv8tXyQXinkeGDI+cI8Tiuz9wk2jZb/kK7AyvL7g==", - "dependencies": { - "@types/node": "^18.11.18", - "tiktoken": "^1.0.10" - } - }, "node_modules/@eslint-community/eslint-utils": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", @@ -167,6 +157,7 @@ "version": "18.19.15", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.15.tgz", "integrity": "sha512-AMZ2UWx+woHNfM11PyAEQmfSxi05jm9OlkxczuHeEqmvwPkYj6MWv44gbzDPefYOLysTOFyI3ziiy2ONmUZfpA==", + "dev": true, "dependencies": { "undici-types": "~5.26.4" } @@ -2127,11 +2118,6 @@ "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", "dev": true }, - "node_modules/tiktoken": { - "version": "1.0.13", - "resolved": "https://registry.npmjs.org/tiktoken/-/tiktoken-1.0.13.tgz", - "integrity": "sha512-JaL9ZnvTbGFMDIBeGdVkLt4qWTeCPw+n7Ock+wceAGRenuHA6nOOvMJFliNDyXsjg2osGKJWsXtO2xc74VxyDw==" - }, "node_modules/to-regex-range": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", @@ -2177,7 +2163,8 @@ "node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==" + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true }, "node_modules/uri-js": { "version": "4.4.1", diff --git a/package.json b/package.json index cae796c..ec4d620 100644 --- a/package.json +++ b/package.json @@ -24,7 +24,6 @@ "author": "Doku Labs", "license": "ISC", "dependencies": { - "@anthropic-ai/tokenizer": "^0.0.4", "stream": "^0.0.2" }, "devDependencies": { diff --git a/src/anthropic.js b/src/anthropic.js index 21d2bda..51208ea 100644 --- a/src/anthropic.js +++ b/src/anthropic.js @@ -1,5 +1,5 @@ import {sendData} from './helpers.js'; -import {countTokens} from '@anthropic-ai/tokenizer'; +import { Readable } from 'stream'; /** * Initializes Anthropic functionality with performance tracking. @@ -31,33 +31,138 @@ import {countTokens} from '@anthropic-ai/tokenizer'; * } */ export default function initAnthropic({ llm, dokuUrl, apiKey, environment, applicationName, skipResp }) { - const originalCompletionsCreate = llm.completions.create; + const originalMessagesCreate = llm.messages.create; // Define wrapped method - llm.completions.create = async function(params) { - const start = performance.now(); - const response = await originalCompletionsCreate.apply(this, params); - const end = performance.now(); - const duration = (end - start) / 1000; - - const data = { - llmReqId: response.id, - environment: environment, - applicationName: applicationName, - sourceLanguage: 'Javascript', - endpoint: 'anthropic.completions', - skipResp: skipResp, - completionTokens: countTokens(response.completion), - promptTokens: countTokens(prompt), - requestDuration: duration, - model: params.model, - prompt: params.prompt, - finishReason: response.stop_reason, - response: response.completion, - }; - - await sendData(data, dokuUrl, apiKey); - - return response; + llm.messages.create = async function(params) { + let streaming = params.stream || false; + if (streaming) { + // Call original method + const start = performance.now(); + const originalResponseStream = await originalMessagesCreate.call(this, params); + + // Create a pass-through stream + const passThroughStream = new Readable({ + read() {}, + objectMode: true // Set to true because the chunks are objects + }); + + let dataResponse = ''; + var responseId = ''; + var promptTokens = 0; + var completionTokens = 0; + + // Immediately-invoked async function to handle streaming + (async () => { + for await (const chunk of originalResponseStream) { + if (chunk.type === 'message_start') { + responseId = chunk.message.id; + promptTokens = chunk.message.usage.input_tokens; + passThroughStream.push(chunk); // Push chunk to the pass-through stream + } + else if (chunk.type === 'content_block_delta') { + dataResponse += chunk.delta.text; + passThroughStream.push(chunk); // Push chunk to the pass-through stream + } + else if (chunk.type === 'message_delta') { + completionTokens = chunk.usage.output_tokens; + passThroughStream.push(chunk); // Push chunk to the pass-through stream + } + } + passThroughStream.push(null); // Signal end of the pass-through stream + + // Process response data after stream has ended + const end = performance.now(); + const duration = (end - start) / 1000; + + let formattedMessages = []; + for (let message of params.messages) { + let role = message.role; + let content = message.content; + + if (Array.isArray(content)) { + let contentStr = content.map(item => { + if (item.type) { + return `${item.type}: ${item.text || item.image_url}`; + } else { + return `text: ${item.text}`; + } + }).join(", "); + formattedMessages.push(`${role}: ${contentStr}`); + } else { + formattedMessages.push(`${role}: ${content}`); + } + } + let prompt = formattedMessages.join("\n"); + + // Prepare the data object for Doku + const data = { + llmReqId: responseId, + environment: environment, + applicationName: applicationName, + sourceLanguage: 'Javascript', + endpoint: 'anthropic.messages', + skipResp: skipResp, + requestDuration: duration, + model: params.model, + prompt: prompt, + response: dataResponse, + promptTokens: promptTokens, + completionTokens: completionTokens, + }; + data.totalTokens = data.promptTokens + data.completionTokens; + + await sendData(data, dokuUrl, apiKey); + })(); + + // Return the pass-through stream to the original caller + return passThroughStream; + } + else{ + const start = performance.now(); + const response = await originalMessagesCreate.call(this, params); + const end = performance.now(); + const duration = (end - start) / 1000; + let formattedMessages = []; + for (let message of params.messages) { + let role = message.role; + let content = message.content; + + if (Array.isArray(content)) { + let contentStr = content.map(item => { + if (item.type) { + return `${item.type}: ${item.text || item.image_url}`; + } else { + return `text: ${item.text}`; + } + }).join(", "); + formattedMessages.push(`${role}: ${contentStr}`); + } else { + formattedMessages.push(`${role}: ${content}`); + } + } + let prompt = formattedMessages.join("\n"); + + const data = { + llmReqId: response.id, + environment: environment, + applicationName: applicationName, + sourceLanguage: 'Javascript', + endpoint: 'anthropic.messages', + skipResp: skipResp, + completionTokens: response.usage.input_tokens, + promptTokens: response.usage.output_tokens, + requestDuration: duration, + model: params.model, + prompt: prompt, + finishReason: response.stop_reason, + response: response.content[0].text, + }; + data.totalTokens = data.promptTokens + data.completionTokens; + + await sendData(data, dokuUrl, apiKey); + + return response; + } }; } diff --git a/src/index.js b/src/index.js index 7836047..e8caeba 100644 --- a/src/index.js +++ b/src/index.js @@ -56,7 +56,7 @@ function init({ llm, dokuUrl, apiKey, environment="default", applicationName="de initOpenAI({ llm, dokuUrl, apiKey, environment, applicationName, skipResp }); } else if (llm.generate && typeof llm.rerank === 'function') { initCohere({ llm, dokuUrl, apiKey, environment, applicationName, skipResp }); - } else if (typeof llm.summarize=== 'function') { + } else if (llm.messages && typeof llm.messages.create === 'function') { initAnthropic({ llm, dokuUrl, apiKey, environment, applicationName, skipResp }); } } From 36b2e5f423bc5738339c09ddc81ac2e7ab3adf76 Mon Sep 17 00:00:00 2001 From: patcher99 Date: Fri, 8 Mar 2024 17:02:01 +0530 Subject: [PATCH 2/3] add testing --- package-lock.json | 18 ++++++++++++++++++ package.json | 1 + tests/anthropic.test.mjs | 20 ++++++++++++++++++++ 3 files changed, 39 insertions(+) create mode 100644 tests/anthropic.test.mjs diff --git a/package-lock.json b/package-lock.json index ce9d437..04591ac 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,6 +12,7 @@ "stream": "^0.0.2" }, "devDependencies": { + "@anthropic-ai/sdk": "^0.17.1", "chai": "^5.0.3", "cohere-ai": "^7.7.3", "eslint": "^8.56.0", @@ -29,6 +30,23 @@ "node": ">=0.10.0" } }, + "node_modules/@anthropic-ai/sdk": { + "version": "0.17.1", + "resolved": "https://registry.npmjs.org/@anthropic-ai/sdk/-/sdk-0.17.1.tgz", + "integrity": "sha512-ke/JGfaa4sc1PB58L4B9hXI/BlJphXc696+cVX8Z8gQt51l++a9umZTN/7UymV8Dcat6KKYNQE8P8yeeyAldHg==", + "dev": true, + "dependencies": { + "@types/node": "^18.11.18", + "@types/node-fetch": "^2.6.4", + "abort-controller": "^3.0.0", + "agentkeepalive": "^4.2.1", + "digest-fetch": "^1.3.0", + "form-data-encoder": "1.7.2", + "formdata-node": "^4.3.2", + "node-fetch": "^2.6.7", + "web-streams-polyfill": "^3.2.1" + } + }, "node_modules/@eslint-community/eslint-utils": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.4.0.tgz", diff --git a/package.json b/package.json index ec4d620..ff7a116 100644 --- a/package.json +++ b/package.json @@ -27,6 +27,7 @@ "stream": "^0.0.2" }, "devDependencies": { + "@anthropic-ai/sdk": "^0.17.1", "chai": "^5.0.3", "cohere-ai": "^7.7.3", "eslint": "^8.56.0", diff --git a/tests/anthropic.test.mjs b/tests/anthropic.test.mjs new file mode 100644 index 0000000..df83287 --- /dev/null +++ b/tests/anthropic.test.mjs @@ -0,0 +1,20 @@ +import Anthropic from '@anthropic-ai/sdk'; +import {expect} from 'chai'; +import DokuMetry from '../src/index.js'; + +describe('Anthropic Test', () => { + const anthropic = new Anthropic({ + apiKey: process.env.ANTHROPIC_API_TOKEN, + }); + + it('should return a response with type as "message"', async () => { + DokuMetry.init({llm: anthropic, dokuUrl: process.env.DOKU_URL, apiKey: process.env.DOKU_TOKEN, environment: "dokumetry-testing", applicationName: "dokumetry-node-test", skipResp: false}); + const message = await anthropic.messages.create({ + model: "claude-3-opus-20240229", + max_tokens: 1024, + messages: [{ role: "user", content: "Hello, Doku!" }], + }); + + expect(message.type).to.equal('message'); + }).timeout(10000); +}); \ No newline at end of file From cf8dd1bb678d546746a64fc61f1b974bcc0e7815 Mon Sep 17 00:00:00 2001 From: patcher99 Date: Fri, 8 Mar 2024 17:17:11 +0530 Subject: [PATCH 3/3] update anthropic token --- .github/workflows/tests.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 00aaa34..4729927 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -12,6 +12,7 @@ on: env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_TOKEN }} COHERE_API_TOKEN: ${{ secrets.COHERE_API_TOKEN }} + ANTHROPIC_API_TOKEN: ${{ secrets.ANTHROPIC_API_TOKEN }} DOKU_URL: ${{ secrets.DOKU_URL }} DOKU_TOKEN: ${{ secrets.DOKU_TOKEN }}