Skip to content

Commit

Permalink
Merge pull request #3412 from quantified-uncertainty/ai-log-improvements
Browse files Browse the repository at this point in the history
Ai Log Improvements
  • Loading branch information
OAGr authored Oct 9, 2024
2 parents 25091dd + 19acf40 commit 8e49796
Show file tree
Hide file tree
Showing 7 changed files with 110 additions and 60 deletions.
30 changes: 25 additions & 5 deletions packages/ai/src/LLMClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ import OpenAI from "openai";
import { LlmId, MODEL_CONFIGS } from "./modelConfigs.js";
import { squiggleSystemContent } from "./prompts.js";

const TIMEOUT_MS = 60000;

export type Message = {
role: "system" | "user" | "assistant";
content: string;
Expand Down Expand Up @@ -175,6 +177,14 @@ export class LLMClient {
}

try {
const timeoutPromise = new Promise((_, reject) =>
setTimeout(
() =>
reject(new Error(`API call timed out after ${TIMEOUT_MS / 1000}s`)),
TIMEOUT_MS
)
);

if (selectedModelConfig.provider === "anthropic") {
const anthropicClient = this.getAnthropicClient();
const compressedMessages =
Expand All @@ -185,8 +195,8 @@ export class LLMClient {
throw new Error("At least one message is required");
}

const completion =
await anthropicClient.beta.promptCaching.messages.create({
const completionPromise =
anthropicClient.beta.promptCaching.messages.create({
max_tokens: selectedModelConfig.maxTokens,
messages: claudeMessages,
model: selectedModelConfig.model,
Expand All @@ -199,7 +209,11 @@ export class LLMClient {
],
});

return convertClaudeToStandardFormat(completion);
const completion = await Promise.race([
completionPromise,
timeoutPromise,
]);
return convertClaudeToStandardFormat(completion as Anthropic.Message);
} else {
const openaiClient = this.getOpenAIClient();
const messages = selectedModelConfig.allowsSystemPrompt
Expand All @@ -215,15 +229,21 @@ export class LLMClient {
{ role: "assistant", content: "Okay." },
...conversationHistory,
];
const completion = await openaiClient.chat.completions.create({
const completionPromise = openaiClient.chat.completions.create({
model: selectedModelConfig.model,
messages: messages.map((msg) => ({
role: msg.role as "system" | "user" | "assistant",
content: msg.content,
})),
});

return convertOpenAIToStandardFormat(completion);
const completion = await Promise.race([
completionPromise,
timeoutPromise,
]);
return convertOpenAIToStandardFormat(
completion as OpenAI.Chat.Completions.ChatCompletion
);
}
} catch (error) {
console.error("Error in API call:", error);
Expand Down
22 changes: 20 additions & 2 deletions packages/ai/src/LLMStep.ts
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ export class LLMStepInstance<const Shape extends StepShape = StepShape> {
) {
this.startTime = Date.now();
this.id = crypto.randomUUID();
this.logger = new Logger();
this.logger = new Logger(this.workflow.id, this.workflow.getStepCount());
this.inputs = inputs;
}

Expand All @@ -113,7 +113,7 @@ export class LLMStepInstance<const Shape extends StepShape = StepShape> {
return this.conversationMessages;
}

async run() {
async _run() {
if (this.state.kind !== "PENDING") {
return;
}
Expand Down Expand Up @@ -148,6 +148,24 @@ export class LLMStepInstance<const Shape extends StepShape = StepShape> {
}
}

async run() {
this.log({
type: "info",
message: `Step "${this.template.name}" started`,
});

await this._run();

const completionMessage = `Step "${this.template.name}" completed with status: ${this.state.kind}${
this.state.kind !== "PENDING" && `, in ${this.state.durationMs / 1000}s`
}`;

this.log({
type: "info",
message: completionMessage,
});
}

getState() {
return this.state;
}
Expand Down
93 changes: 58 additions & 35 deletions packages/ai/src/Logger.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ export type LogEntry =
| InfoLogEntry
| WarnLogEntry
| ErrorLogEntry
| CodeRunErrorLogEntry
| SuccessLogEntry
| HighlightLogEntry
| LlmResponseLogEntry;
Expand All @@ -19,8 +18,6 @@ export function getLogEntryFullName(entry: LogEntry): string {
return "⚠️ Warning";
case "error":
return "🚫 System Error";
case "codeRunError":
return "❌ Code Run Error";
case "success":
return "✅ Success";
case "highlight":
Expand All @@ -32,32 +29,58 @@ export function getLogEntryFullName(entry: LogEntry): string {
}
}

function displayLog(log: LogEntry): void {
switch (log.type) {
case "info":
console.log(chalk.blue(`[INFO] ${log.message}`));
break;
case "warn":
console.warn(chalk.yellow(`[WARN] ${log.message}`));
break;
case "error":
console.error(chalk.red(`[ERROR] ${log.message}`));
break;
case "codeRunError":
console.error(chalk.red.bold(`[CODE_RUN_ERROR] ${log.error}`));
break;
case "success":
console.log(chalk.green(`[SUCCESS] ${log.message}`));
break;
case "highlight":
console.log(chalk.magenta(`[HIGHLIGHT] ${log.message}`));
break;
case "llmResponse":
console.log(chalk.cyan(`[LLM_RESPONSE] ${log.content}`));
break;
break;
default:
throw log satisfies never;
const logTypes = {
info: { fn: console.log, color: chalk.white, maxLines: Infinity },
warn: { fn: console.warn, color: chalk.yellow, maxLines: Infinity },
error: { fn: console.error, color: chalk.red, maxLines: 5 },
success: { fn: console.log, color: chalk.green, maxLines: Infinity },
highlight: { fn: console.log, color: chalk.magenta, maxLines: Infinity },
llmResponse: { fn: console.log, color: chalk.cyan, maxLines: 3 },
};

function displayLog(
log: LogEntry,
workflowId: string,
stepIndex: number
): void {
const prefix = chalk.gray(`[workflow:${workflowId}, step:${stepIndex}]`);
const indent = " "; // Two spaces for indentation

function formatMessage(message: string, maxLines: number = Infinity): string {
const lines = message.split("\n");
const truncated = lines.slice(0, maxLines);
if (lines.length > maxLines) {
truncated.push("...");
}
return truncated
.map((line, index) => (index === 0 ? line : indent + line))
.join("\n");
}

function logWithColor(
logFn: typeof console.log,
type: string,
color: (text: string) => string,
message: string,
maxLines: number = Infinity
) {
logFn(
`${prefix} ${chalk.gray(`[${color(type)}] ${formatMessage(message, maxLines)}`)}`
);
}

const logType = logTypes[log.type as keyof typeof logTypes];
if (logType) {
const message = "message" in log ? log.message : log.content;
logWithColor(
logType.fn,
log.type.toUpperCase(),
logType.color,
message,
logType.maxLines
);
} else {
console.log(`${prefix} Unknown log type: ${log.type}`);
}
}

Expand All @@ -81,11 +104,6 @@ type ErrorLogEntry = {
message: string;
};

type CodeRunErrorLogEntry = {
type: "codeRunError";
error: string;
};

type SuccessLogEntry = {
type: "success";
message: string;
Expand All @@ -107,8 +125,13 @@ type LlmResponseLogEntry = {
export class Logger {
logs: TimestampedLogEntry[] = [];

constructor(
private workflowId: string,
private stepIndex: number
) {}

log(log: LogEntry): void {
this.logs.push({ timestamp: new Date(), entry: log });
displayLog(log);
displayLog(log, this.workflowId, this.stepIndex);
}
}
10 changes: 1 addition & 9 deletions packages/ai/src/generateSummary.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,18 +53,12 @@ function generateErrorSummary(workflow: Workflow): string {
let errorSummary = "";

steps.forEach((step, index) => {
const errors = step
.getLogs()
.filter(
(log) => log.entry.type === "error" || log.entry.type === "codeRunError"
);
const errors = step.getLogs().filter((log) => log.entry.type === "error");
if (errors.length > 0) {
errorSummary += `### ❌ Step ${index + 1} (${step.template.name})\n`;
errors.forEach((error) => {
if (error.entry.type === "error") {
errorSummary += `- 🔴 ${error.entry.message}\n`;
} else if (error.entry.type === "codeRunError") {
errorSummary += `- 🔴 ${error.entry.error}\n`;
}
});
}
Expand Down Expand Up @@ -119,8 +113,6 @@ function getFullMessage(log: TimestampedLogEntry): string {
case "success":
case "highlight":
return log.entry.message;
case "codeRunError":
return log.entry.error;
case "llmResponse": {
const llmResponse = log.entry;
return `<details>
Expand Down
4 changes: 0 additions & 4 deletions packages/ai/src/steps/fixCodeUntilItRunsStep.ts
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,6 @@ export const fixCodeUntilItRunsStep = new LLMStepTemplate(
context.setOutput("code", newCodeResult.value);
} else {
context.fail("MINOR", newCodeResult.value);
context.log({
type: "codeRunError",
error: newCodeResult.value,
});
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions packages/ai/src/steps/generateCodeStep.ts
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ export const generateCodeStep = new LLMStepTemplate(
context.setOutput("code", state.value);
} else {
context.log({
type: "codeRunError",
error: state.value,
type: "error",
message: state.value,
});
}
}
Expand Down
7 changes: 4 additions & 3 deletions packages/ai/src/workflows/Workflow.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
import chalk from "chalk";

import { generateSummary } from "../generateSummary.js";
import {
calculatePriceMultipleCalls,
Expand Down Expand Up @@ -172,7 +170,6 @@ export class Workflow {
});
await step.run();

console.log(chalk.cyan(`Finishing step ${step.template.name}`));
this.dispatchEvent({
type: "stepFinished",
payload: { step },
Expand Down Expand Up @@ -202,6 +199,10 @@ export class Workflow {
return this.steps;
}

getStepCount(): number {
return this.steps.length;
}

private getCurrentStep(): LLMStepInstance | undefined {
return this.steps.at(-1);
}
Expand Down

0 comments on commit 8e49796

Please sign in to comment.