Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Next.js 15 for Hub #3452

Merged
merged 18 commits into from
Nov 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,9 @@
"pnpm": {
"overrides": {
"@headlessui/react": "^2.2.0"
},
"patchedDependencies": {
"react-select": "patches/react-select.patch"
}
},
"version": "0.0.1-0",
Expand Down
253 changes: 0 additions & 253 deletions packages/ai/src/LLMClient.ts

This file was deleted.

97 changes: 97 additions & 0 deletions packages/ai/src/LLMClient/AnthropicProvider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
import Anthropic from "@anthropic-ai/sdk";

import { MODEL_CONFIGS } from "../modelConfigs.js";
import { squiggleSystemPrompt } from "../prompts.js";
import { Message, StandardizedChatCompletion } from "./types.js";

function convertToClaudeMessages(history: Message[]): Anthropic.MessageParam[] {
return history
.filter((msg) => msg.role !== "system")
.map((msg) => ({
role: msg.role as Exclude<Message["role"], "system">,
content: msg.content,
}));
}

function compressAssistantMessages(messages: Message[]): Message[] {
return messages.reduce((acc, current, index, array) => {
if (current.role !== "assistant") {
acc.push(current);
} else if (index === 0 || array[index - 1].role !== "assistant") {
acc.push(current);
} else {
acc[acc.length - 1].content += "\n\n" + current.content;
}
return acc;
}, [] as Message[]);
}

function extractTextContent(content: Anthropic.ContentBlock[]): string {
return content
.filter(
(block): block is Extract<Anthropic.ContentBlock, { type: "text" }> =>
block.type === "text"
)
.map((block) => block.text)
.join("\n");
}

function convertClaudeToStandardFormat(
claudeResponse: Anthropic.Message
): StandardizedChatCompletion {
return {
id: claudeResponse.id,
object: "chat.completion",
created: Date.now(),
model: claudeResponse.model,
content: extractTextContent(claudeResponse.content),
role: "assistant",
finish_reason: claudeResponse.stop_reason || null,
usage: {
prompt_tokens: claudeResponse.usage.input_tokens,
completion_tokens: claudeResponse.usage.output_tokens,
total_tokens:
claudeResponse.usage.input_tokens + claudeResponse.usage.output_tokens,
},
};
}

export class AnthropicProvider {
client: Anthropic;

constructor(
apiKey: string,
private modelConfig: Extract<
(typeof MODEL_CONFIGS)[number],
{ provider: "anthropic" }
>
) {
this.client = new Anthropic({
apiKey,
});
}

async run(conversationHistory: Message[]) {
const compressedMessages = compressAssistantMessages(conversationHistory);
const claudeMessages = convertToClaudeMessages(compressedMessages);

if (claudeMessages.length === 0) {
throw new Error("At least one message is required");
}

const completion = await this.client.beta.promptCaching.messages.create({
max_tokens: this.modelConfig.maxTokens,
messages: claudeMessages,
model: this.modelConfig.model,
system: [
{
text: squiggleSystemPrompt,
type: "text",
cache_control: { type: "ephemeral" },
},
],
});

return convertClaudeToStandardFormat(completion as Anthropic.Message);
}
}
10 changes: 10 additions & 0 deletions packages/ai/src/LLMClient/LLMError.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
// Common error class for OpenAI and Anthropic errors.
export class LLMError extends Error {
constructor(
message: string,
// LLMStepInstance will use `kind` to decide whether the errors was critical or minor.
public kind: "timeout" | "balance" | "other"
) {
super(message);
}
}
Loading