Skip to content

Commit

Permalink
feat: adding util to handle calculation of duration of chatSync
Browse files Browse the repository at this point in the history
  • Loading branch information
NarwhalChen committed Jan 15, 2025
1 parent 7fde8a2 commit 7f69b6b
Show file tree
Hide file tree
Showing 14 changed files with 90 additions and 220 deletions.
19 changes: 5 additions & 14 deletions backend/src/build-system/handlers/backend/code-generate/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,8 @@ import {
removeCodeBlockFences,
} from 'src/build-system/utils/strings';
import { BuildMonitor } from 'src/build-system/monitor';
import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';
import { MessageInterface } from 'src/common/model-provider/types';

/**
* BackendCodeHandler is responsible for generating the backend codebase
Expand Down Expand Up @@ -63,21 +65,10 @@ export class BackendCodeHandler implements BuildHandler<string> {
this.logger.debug('Generated backend code prompt.');

try {
const startTime = new Date();
// Invoke the language model to generate the backend code
const modelResponse = await context.model.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: backendCodePrompt, role: 'system' }],
});
const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'generateBackendCode',
backendCodePrompt,
modelResponse,
);
let messages: MessageInterface[] = [{content: backendCodePrompt, role: 'system'}];
const modelResponse = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateBackendCode', this.id);

const generatedCode = formatResponse(modelResponse);

const uuid = context.getGlobalContext('projectUUID');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ import * as path from 'path';
import { prompts } from './prompt';
import { formatResponse } from 'src/build-system/utils/strings';
import { BuildMonitor } from 'src/build-system/monitor';
import { MessageInterface } from 'src/common/model-provider/types';
import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';

// TODO(Sma1lboy): we need a better way to handle handler pre requirements
/**
Expand Down Expand Up @@ -49,21 +51,9 @@ export class BackendFileReviewHandler implements BuildHandler<string> {
backendCode,
);

const startTime = new Date();
const modelResponse = await context.model.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: filePrompt, role: 'system' }],
});
let messages: MessageInterface[] = [{content: filePrompt, role: 'system'}];
const modelResponse = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateBackendCode', this.id);

const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'identifyBackendFilesToModify',
filePrompt,
modelResponse,
);
const filesToModify = this.parseFileIdentificationResponse(modelResponse);
this.logger.log(`Files to modify: ${filesToModify.join(', ')}`);

Expand All @@ -85,20 +75,9 @@ export class BackendFileReviewHandler implements BuildHandler<string> {

const startTime = new Date();
// Get modified content
const response = await context.model.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: modificationPrompt, role: 'system' }],
});
let messages: MessageInterface[] = [{content: modificationPrompt, role: 'system'}];
const response = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateFileModification', this.id);

const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'generateFileModification',
modificationPrompt,
modelResponse,
);
// Extract new content and write back
const newContent = formatResponse(response);
await fs.writeFile(filePath, newContent, 'utf-8');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@ import {
import { Logger } from '@nestjs/common';
import { removeCodeBlockFences } from 'src/build-system/utils/strings';
import { BuildMonitor } from 'src/build-system/monitor';
import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';
import { MessageInterface } from 'src/common/model-provider/types';

type BackendRequirementResult = {
overview: string;
Expand Down Expand Up @@ -55,21 +57,8 @@ export class BackendRequirementHandler

let backendOverview: string;
try {
const startTime = new Date();
backendOverview = await context.model.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: overviewPrompt, role: 'system' }],
});

const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'generateBackendOverviewPrompt',
overviewPrompt,
backendOverview,
);
let messages: MessageInterface[] = [{content: overviewPrompt, role: 'system'}];
backendOverview = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateBackendOverviewPrompt', this.id);
} catch (error) {
this.logger.error('Error generating backend overview:', error);
return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import { prompts } from './prompt';
import { Logger } from '@nestjs/common';
import { removeCodeBlockFences } from 'src/build-system/utils/strings';
import { BuildMonitor } from 'src/build-system/monitor';
import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';
import { MessageInterface } from 'src/common/model-provider/types';

export class DatabaseRequirementHandler implements BuildHandler<string> {
readonly id = 'op:DATABASE_REQ';
Expand All @@ -20,22 +22,9 @@ export class DatabaseRequirementHandler implements BuildHandler<string> {
projectName,
datamapDoc,
);
const model = ModelProvider.getInstance();

const startTime = new Date();
const dbRequirementsContent = await model.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: prompt, role: 'system' }],
});
const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'generateDatabaseRequirementPrompt',
prompt,
dbRequirementsContent,
);
let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
const dbRequirementsContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateDatabaseRequirementPrompt', this.id);

return {
success: true,
data: removeCodeBlockFences(dbRequirementsContent),
Expand Down
51 changes: 8 additions & 43 deletions backend/src/build-system/handlers/database/schemas/schemas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ import { saveGeneratedCode } from 'src/build-system/utils/files';
import * as path from 'path';
import { formatResponse } from 'src/build-system/utils/strings';
import { BuildMonitor } from 'src/build-system/monitor';
import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';
import { MessageInterface } from 'src/common/model-provider/types';

/**
* DBSchemaHandler is responsible for generating database schemas based on provided requirements.
Expand Down Expand Up @@ -71,20 +73,8 @@ export class DBSchemaHandler implements BuildHandler {

let dbAnalysis: string;
try {
const startTime = new Date();
const analysisResponse = await context.model.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: analysisPrompt, role: 'system' }],
});
const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'analyzeDatabaseRequirements',
analysisPrompt,
analysisResponse,
);
let messages: MessageInterface[] = [{content: analysisPrompt, role: 'system'}];
const analysisResponse = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'analyzeDatabaseRequirements', this.id);
dbAnalysis = analysisResponse;
} catch (error) {
this.logger.error('Error during database requirements analysis:', error);
Expand Down Expand Up @@ -114,21 +104,8 @@ export class DBSchemaHandler implements BuildHandler {

let schemaContent: string;
try {
const startTime = new Date();
const schemaResponse = await context.model.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: schemaPrompt, role: 'system' }],
});

const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'generateDatabaseSchema',
schemaPrompt,
schemaResponse,
);
let messages: MessageInterface[] = [{content: schemaPrompt, role: 'system'}];
const schemaResponse = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateBackendCode', this.id);
schemaContent = formatResponse(schemaResponse);
} catch (error) {
this.logger.error('Error during schema generation:', error);
Expand All @@ -148,20 +125,8 @@ export class DBSchemaHandler implements BuildHandler {

let validationResponse: string;
try {
const startTime = new Date();
const validationResult = await context.model.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: validationPrompt, role: 'system' }],
});
const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'validateDatabaseSchema',
validationPrompt,
validationResult,
);
let messages: MessageInterface[] = [{content: validationPrompt, role: 'system'}];
const validationResult = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateBackendCode', this.id);
validationResponse = formatResponse(validationResult);
} catch (error) {
this.logger.error('Error during schema validation:', error);
Expand Down
19 changes: 5 additions & 14 deletions backend/src/build-system/handlers/file-manager/file-arch/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ import {
parseGenerateTag,
} from 'src/build-system/utils/strings';
import { BuildMonitor } from 'src/build-system/monitor';
import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';
import { MessageInterface } from 'src/common/model-provider/types';

export class FileArchGenerateHandler implements BuildHandler<string> {
readonly id = 'op:FILE:ARCH';
Expand Down Expand Up @@ -57,20 +59,9 @@ export class FileArchGenerateHandler implements BuildHandler<string> {
};
}
try {
const startTime = new Date();
fileArchContent = await context.model.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: prompt, role: 'system' }],
});
const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'generateFileArch',
prompt,
fileArchContent,
);
let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
fileArchContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateFileArch', this.id);

const tagContent = parseGenerateTag(fileArchContent);
jsonData = extractJsonFromText(tagContent);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@ import { prompts } from './prompt';
import { Logger } from '@nestjs/common';
import { removeCodeBlockFences } from 'src/build-system/utils/strings';
import { BuildMonitor } from 'src/build-system/monitor';
import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';
import { MessageInterface } from 'src/common/model-provider/types';

/**
* FileStructureHandler is responsible for generating the project's file and folder structure
Expand Down Expand Up @@ -85,21 +87,8 @@ export class FileStructureHandler implements BuildHandler<FileStructOutput> {
let fileStructureContent: string;
try {
// Invoke the language model to generate the file structure content

const startTime = new Date();
fileStructureContent = await context.model.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: prompt, role: 'system' }],
});
const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'generateCommonFileStructure',
prompt,
fileStructureContent,
);
let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
fileStructureContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateCommonFileStructure', this.id);
} catch (error) {
this.logger.error('Error during file structure generation:', error);
return {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import { ModelProvider } from 'src/common/model-provider';
import { Logger } from '@nestjs/common';
import { removeCodeBlockFences } from 'src/build-system/utils/strings';
import { BuildMonitor } from 'src/build-system/monitor';
import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';
import { MessageInterface } from 'src/common/model-provider/types';

export class PRDHandler implements BuildHandler {
readonly id = 'op:PRD';
Expand All @@ -27,31 +29,19 @@ export class PRDHandler implements BuildHandler {
);

// Send the prompt to the LLM server and process the response
const prdContent = await this.generatePRDFromLLM(prompt);
const prdContent = await this.generatePRDFromLLM(context, prompt);

return {
success: true,
data: removeCodeBlockFences(prdContent),
};
}

private async generatePRDFromLLM(prompt: string): Promise<string> {
private async generatePRDFromLLM(context: BuilderContext, prompt: string): Promise<string> {
const modelProvider = ModelProvider.getInstance();

const startTime = new Date();
const prdContent = await modelProvider.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: prompt, role: 'system' }],
});
const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'generatePRDFromLLM',
prompt,
prdContent,
);
let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
const prdContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generatePRDFromLLM', this.id);
this.logger.log('Received full PRD content from LLM server.');
return prdContent;
}
Expand Down
18 changes: 4 additions & 14 deletions backend/src/build-system/handlers/ux/datamap/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ import { prompts } from './prompt';
import { Logger } from '@nestjs/common';
import { removeCodeBlockFences } from 'src/build-system/utils/strings';
import { BuildMonitor } from 'src/build-system/monitor';
import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';
import { MessageInterface } from 'src/common/model-provider/types';

/**
* Handler for generating the UX Data Map document.
Expand All @@ -24,20 +26,8 @@ export class UXDatamapHandler implements BuildHandler<string> {
'web', // TODO: change platform dynamically if needed
);

const startTime = new Date();
const uxDatamapContent = await context.model.chatSync({
model: 'gpt-4o-mini',
messages: [{ content: prompt, role: 'system' }],
});
const endTime = new Date();
const duration = endTime.getTime() - startTime.getTime();
BuildMonitor.timeRecorder(
duration,
this.id,
'generateUXDataMap',
prompt,
uxDatamapContent,
);
let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
const uxDatamapContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateUXDataMap', this.id);
Logger.log('UX Data Map Content: ', uxDatamapContent);

return {
Expand Down
Loading

0 comments on commit 7f69b6b

Please sign in to comment.