From a81d7349cb546746b1eb116bad4676bd7c13043d Mon Sep 17 00:00:00 2001 From: Vyacheslav Matyukhin Date: Tue, 19 Nov 2024 16:29:58 -0300 Subject: [PATCH] fix runTimeMs in workflow.getFinalResult --- packages/ai/src/LLMStepInstance.ts | 27 ++++++++++++++++----------- packages/ai/src/workflows/Workflow.ts | 18 ++++++++++++++++-- 2 files changed, 32 insertions(+), 13 deletions(-) diff --git a/packages/ai/src/LLMStepInstance.ts b/packages/ai/src/LLMStepInstance.ts index c638460f5f..938914bd8c 100644 --- a/packages/ai/src/LLMStepInstance.ts +++ b/packages/ai/src/LLMStepInstance.ts @@ -57,7 +57,10 @@ export class LLMStepInstance< public readonly inputs: StepParams["inputs"]; - private startTime: StepParams["startTime"]; + // This is the moment step was _created_, not when it was run. + // But it shouldn't matter, for now; workflows run steps immediately. + public startTime: StepParams["startTime"]; + private conversationMessages: StepParams["conversationMessages"]; public llmMetricsList: StepParams["llmMetricsList"]; @@ -117,25 +120,36 @@ export class LLMStepInstance< return this.conversationMessages; } - async _run() { + // Runs the PENDING step; its state will be updated to DONE or FAILED. + async run() { if (this._state.kind !== "PENDING") { return; } + this.log({ + type: "info", + message: `Step "${this.template.name}" started`, + }); + const limits = this.workflow.checkResourceLimits(); if (limits) { this.fail("CRITICAL", limits); return; } + // Prepare the execution context, which will be passed to the step implementation. const executeContext: ExecuteContext = { log: (log) => this.log(log), queryLLM: (promptPair) => this.queryLLM(promptPair), fail: (errorType, message) => { + // `context.fail` throws instead of proxying to `this.fail`. This allows + // us to simplify the return signature of step implementations - + // `context.fail` is `never`, so we don't need to return anything. throw new FailError(errorType, message); }, }; + // Run the step implementation; catch all errors and turn them into FAILED states. try { const result = await this.template.execute(executeContext, this.inputs); @@ -174,15 +188,6 @@ export class LLMStepInstance< ); } } - } - - async run() { - this.log({ - type: "info", - message: `Step "${this.template.name}" started`, - }); - - await this._run(); const completionMessage = `Step "${this.template.name}" completed with status: ${this._state.kind}${ this._state.kind !== "PENDING" && `, in ${this._state.durationMs / 1000}s` diff --git a/packages/ai/src/workflows/Workflow.ts b/packages/ai/src/workflows/Workflow.ts index db30ac33d3..402b75e6c7 100644 --- a/packages/ai/src/workflows/Workflow.ts +++ b/packages/ai/src/workflows/Workflow.ts @@ -114,6 +114,8 @@ export class Workflow { public readonly inputs: Inputs; public llmConfig: LlmConfig; + // This field is somewhat broken - it's set to `Date.now()`, even when the workflow was deserialized from the database. + // It's better to use `steps[0].startTime` as the start time, if you're sure that the workflow has already started and so it has at least one step. public startTime: number; private steps: LLMStepInstance[]; @@ -322,8 +324,20 @@ export class Workflow { const isValid = finalStep.step.getState().kind === "DONE"; - const endTime = Date.now(); - const runTimeMs = endTime - this.startTime; + const lastStep = this.steps.at(-1); + if (!lastStep) { + throw new Error("No steps found"); + } + + const lastStepState = finalStep.step.getState(); + if (lastStepState.kind === "PENDING") { + throw new Error("Last step is still pending"); + } + + const startTime = this.steps[0].startTime; + const endTime = lastStep.startTime + lastStepState.durationMs; + const runTimeMs = endTime - startTime; + const { totalPrice, llmRunCount } = this.getLlmMetrics(); const logSummary = generateSummary(this);