Skip to content

Commit

Permalink
[autofix.ci] apply automated fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
autofix-ci[bot] authored Jan 15, 2025
1 parent a719fd1 commit 958226a
Show file tree
Hide file tree
Showing 17 changed files with 11,508 additions and 14,298 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -146,6 +146,5 @@ describe('Build Sequence Test', () => {
expect(result.success).toBe(true);
expect(result.metrics).toBeDefined();
console.log(`Logs saved to: ${result.logFolderPath}`);

}, 60000000);
});
16 changes: 11 additions & 5 deletions backend/src/build-system/handlers/backend/code-generate/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@ import { BuilderContext } from 'src/build-system/context';
import { generateBackendCodePrompt } from './prompt';
import { saveGeneratedCode } from 'src/build-system/utils/files';
import * as path from 'path';
import {
formatResponse,
} from 'src/build-system/utils/strings';
import { formatResponse } from 'src/build-system/utils/strings';
import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';
import { MessageInterface } from 'src/common/model-provider/types';
import {
Expand Down Expand Up @@ -74,8 +72,16 @@ export class BackendCodeHandler implements BuildHandler<string> {
let generatedCode: string;
try {
// Invoke the language model to generate the backend code
let messages: MessageInterface[] = [{content: backendCodePrompt, role: 'system'}];
const modelResponse = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateBackendCode', this.id);
const messages: MessageInterface[] = [
{ content: backendCodePrompt, role: 'system' },
];
const modelResponse = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateBackendCode',
this.id,
);

generatedCode = formatResponse(modelResponse);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,18 @@ export class BackendFileReviewHandler implements BuildHandler<string> {
);

let modelResponse: string;

try {

Check failure on line 67 in backend/src/build-system/handlers/backend/file-review/file-review.ts

View workflow job for this annotation

GitHub Actions / autofix

Unnecessary try/catch wrapper
let messages: MessageInterface[] = [{content: filePrompt, role: 'system'}];
modelResponse = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateBackendCode', this.id);
const messages: MessageInterface[] = [
{ content: filePrompt, role: 'system' },
];
modelResponse = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateBackendCode',
this.id,
);
} catch (error) {
throw error;
}
Expand All @@ -89,9 +97,17 @@ export class BackendFileReviewHandler implements BuildHandler<string> {
backendCode,
);

let messages: MessageInterface[] = [{content: modificationPrompt, role: 'system'}];
let response = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateBackendFile', this.id);

const messages: MessageInterface[] = [
{ content: modificationPrompt, role: 'system' },
];
const response = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateBackendFile',
this.id,
);

const newContent = formatResponse(response);
if (!newContent) {
throw new FileModificationError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,7 @@ import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';
import { MessageInterface } from 'src/common/model-provider/types';
import {
MissingConfigurationError,

ModelUnavailableError,

ResponseParsingError,
} from 'src/build-system/errors';

Expand Down Expand Up @@ -68,10 +66,18 @@ export class BackendRequirementHandler
);

let backendOverview: string;

try {

Check failure on line 70 in backend/src/build-system/handlers/backend/requirements-document/index.ts

View workflow job for this annotation

GitHub Actions / autofix

Unnecessary try/catch wrapper
let messages: MessageInterface[] = [{content: overviewPrompt, role: 'system'}];
backendOverview = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateBackendOverviewPrompt', this.id);
const messages: MessageInterface[] = [
{ content: overviewPrompt, role: 'system' },
];
backendOverview = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateBackendOverviewPrompt',
this.id,
);

if (!backendOverview) {
throw new ModelUnavailableError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,10 +41,17 @@ export class DatabaseRequirementHandler implements BuildHandler<string> {
let dbRequirementsContent: string;

try {
let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
dbRequirementsContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateDatabaseRequirementPrompt', this.id);


const messages: MessageInterface[] = [
{ content: prompt, role: 'system' },
];
dbRequirementsContent = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateDatabaseRequirementPrompt',
this.id,
);

if (!dbRequirementsContent) {
throw new ModelUnavailableError(
'The model did not respond within the expected time.',
Expand Down
48 changes: 39 additions & 9 deletions backend/src/build-system/handlers/database/schemas/schemas.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,11 @@ import * as path from 'path';
import { formatResponse } from 'src/build-system/utils/strings';
import { chatSyncWithClocker } from 'src/build-system/utils/handler-helper';
import { MessageInterface } from 'src/common/model-provider/types';
import { FileWriteError, ModelUnavailableError, ResponseParsingError } from 'src/build-system/errors';
import {
FileWriteError,
ModelUnavailableError,
ResponseParsingError,
} from 'src/build-system/errors';

/**
* DBSchemaHandler is responsible for generating database schemas based on provided requirements.
Expand Down Expand Up @@ -72,8 +76,16 @@ export class DBSchemaHandler implements BuildHandler {

let dbAnalysis: string;
try {
let messages: MessageInterface[] = [{content: analysisPrompt, role: 'system'}];
dbAnalysis = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'analyzeDatabaseRequirements', this.id);
const messages: MessageInterface[] = [
{ content: analysisPrompt, role: 'system' },
];
dbAnalysis = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'analyzeDatabaseRequirements',
this.id,
);
} catch (error) {
this.logger.error('Error during database requirements analysis:', error);
return {
Expand Down Expand Up @@ -102,8 +114,16 @@ export class DBSchemaHandler implements BuildHandler {

let schemaContent: string;
try {
let messages: MessageInterface[] = [{content: schemaPrompt, role: 'system'}];
const schemaResponse = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateBackendCode', this.id);
const messages: MessageInterface[] = [
{ content: schemaPrompt, role: 'system' },
];
const schemaResponse = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateBackendCode',
this.id,
);
schemaContent = formatResponse(schemaResponse);
} catch (error) {
this.logger.error('Error during schema generation:', error);
Expand All @@ -123,14 +143,24 @@ export class DBSchemaHandler implements BuildHandler {

let validationResponse: string;
try {
let messages: MessageInterface[] = [{content: validationPrompt, role: 'system'}];
const validationResult = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateBackendCode', this.id);
const messages: MessageInterface[] = [
{ content: validationPrompt, role: 'system' },
];
const validationResult = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateBackendCode',
this.id,
);
validationResponse = formatResponse(validationResult);
} catch (error) {
this.logger.error('Error during schema validation:', error);
return {
success: false,
error: new ModelUnavailableError('Failed to validate the generated database schema.'),
error: new ModelUnavailableError(
'Failed to validate the generated database schema.',
),
};
}

Expand Down Expand Up @@ -171,4 +201,4 @@ export class DBSchemaHandler implements BuildHandler {
data: schemaContent,
};
}
}
}
13 changes: 10 additions & 3 deletions backend/src/build-system/handlers/file-manager/file-arch/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,17 @@ export class FileArchGenerateHandler implements BuildHandler<string> {
);

try {

Check failure on line 41 in backend/src/build-system/handlers/file-manager/file-arch/index.ts

View workflow job for this annotation

GitHub Actions / autofix

Unnecessary try/catch wrapper
const messages: MessageInterface[] = [
{ content: prompt, role: 'system' },
];
const fileArchContent = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateFileArch',
this.id,
);

let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
const fileArchContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateFileArch', this.id);

if (!fileArchContent) {
throw new ModelUnavailableError(
'The model did not respond within the expected time.',
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,17 @@ export class FileStructureHandler implements BuildHandler<FileStructOutput> {
let fileStructureContent: string;
try {
// Invoke the language model to generate the file structure content
let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
fileStructureContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateCommonFileStructure', this.id);

const messages: MessageInterface[] = [
{ content: prompt, role: 'system' },
];
fileStructureContent = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateCommonFileStructure',
this.id,
);

if (!fileStructureContent || fileStructureContent.trim() === '') {
throw new ResponseParsingError(
`Generated content is empty during op:FILE:STRUCT.`,
Expand All @@ -70,8 +78,16 @@ export class FileStructureHandler implements BuildHandler<FileStructOutput> {

let fileStructureJsonContent: string;
try {
let messages: MessageInterface[] = [{content: convertToJsonPrompt, role: 'system'}];
fileStructureJsonContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'convertToJsonPrompt', this.id);
const messages: MessageInterface[] = [
{ content: convertToJsonPrompt, role: 'system' },
];
fileStructureJsonContent = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'convertToJsonPrompt',
this.id,
);

if (!fileStructureJsonContent || fileStructureJsonContent.trim() === '') {
throw new ResponseParsingError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ export class PRDHandler implements BuildHandler {

try {
// Send the prompt to the LLM server and process the response
const prdContent = await this.generatePRDFromLLM(context, prompt);
const prdContent = await this.generatePRDFromLLM(context, prompt);

if (!prdContent || prdContent.trim() === '') {
throw new ResponseParsingError('Generated PRD content is empty.');
Expand All @@ -60,10 +60,21 @@ export class PRDHandler implements BuildHandler {
throw new ResponseParsingError('Failed to generate PRD.');
}
}
private async generatePRDFromLLM(context: BuilderContext,prompt: string): Promise<string> {
private async generatePRDFromLLM(
context: BuilderContext,
prompt: string,
): Promise<string> {
try {

Check failure on line 67 in backend/src/build-system/handlers/product-manager/product-requirements-document/prd.ts

View workflow job for this annotation

GitHub Actions / autofix

Unnecessary try/catch wrapper
let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
const prdContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generatePRDFromLLM', this.id);
const messages: MessageInterface[] = [
{ content: prompt, role: 'system' },
];
const prdContent = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generatePRDFromLLM',
this.id,
);
if (!prdContent || prdContent.trim() === '') {
throw new ModelUnavailableError(
'LLM server returned empty PRD content.',
Expand Down
13 changes: 10 additions & 3 deletions backend/src/build-system/handlers/ux/datamap/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,11 +40,18 @@ export class UXDatamapHandler implements BuildHandler<string> {
'web', // TODO: change platform dynamically if needed
);


try {

Check failure on line 43 in backend/src/build-system/handlers/ux/datamap/index.ts

View workflow job for this annotation

GitHub Actions / autofix

Unnecessary try/catch wrapper
// Generate UX Data Map content using the language model
let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
const uxDatamapContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateUXDataMap', this.id);
const messages: MessageInterface[] = [
{ content: prompt, role: 'system' },
];
const uxDatamapContent = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateUXDataMap',
this.id,
);
if (!uxDatamapContent || uxDatamapContent.trim() === '') {
throw new ResponseParsingError(
'Generated UX Data Map content is empty.',
Expand Down
19 changes: 15 additions & 4 deletions backend/src/build-system/handlers/ux/sitemap-document/uxsmd.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ export class UXSMDHandler implements BuildHandler<string> {
);

// Send the prompt to the LLM server and process the response

try {

Check failure on line 46 in backend/src/build-system/handlers/ux/sitemap-document/uxsmd.ts

View workflow job for this annotation

GitHub Actions / autofix

Unnecessary try/catch wrapper
// Generate UXSMD content using the language model
const uxsmdContent = await this.generateUXSMDFromLLM(context, prompt);
Expand All @@ -65,10 +65,21 @@ export class UXSMDHandler implements BuildHandler<string> {
}
}

private async generateUXSMDFromLLM(context: BuilderContext, prompt: string): Promise<string> {
private async generateUXSMDFromLLM(
context: BuilderContext,
prompt: string,
): Promise<string> {
try {
let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
const uxsmdContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateUXSMDFromLLM', this.id);
const messages: MessageInterface[] = [
{ content: prompt, role: 'system' },
];
const uxsmdContent = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateUXSMDFromLLM',
this.id,
);
this.logger.log('Received full UXSMD content from LLM server.');
return uxsmdContent;
} catch (error) {
Expand Down
13 changes: 10 additions & 3 deletions backend/src/build-system/handlers/ux/sitemap-structure/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,18 @@ export class UXSitemapStructureHandler implements BuildHandler<string> {
'web', // TODO: Change platform dynamically if necessary
);


try {
// Generate UX Structure content using the language model
let messages: MessageInterface[] = [{content: prompt, role: 'system'}];
const uxStructureContent = await chatSyncWithClocker(context, messages, 'gpt-4o-mini', 'generateUXSiteMapStructre', this.id);
const messages: MessageInterface[] = [
{ content: prompt, role: 'system' },
];
const uxStructureContent = await chatSyncWithClocker(
context,
messages,
'gpt-4o-mini',
'generateUXSiteMapStructre',
this.id,
);

if (!uxStructureContent || uxStructureContent.trim() === '') {
this.logger.error('Generated UX Sitemap Structure content is empty.');
Expand Down
Loading

0 comments on commit 958226a

Please sign in to comment.