From a4cd86feaa9fab1fa548239781710fc3846cf45b Mon Sep 17 00:00:00 2001 From: Julien Vignoud Date: Mon, 1 Jul 2024 12:08:56 +0200 Subject: [PATCH] Remove print --- cli/src/benchmark_gpt.ts | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/cli/src/benchmark_gpt.ts b/cli/src/benchmark_gpt.ts index 8fe6dd165..030851723 100644 --- a/cli/src/benchmark_gpt.ts +++ b/cli/src/benchmark_gpt.ts @@ -80,11 +80,10 @@ async function main(args: Required): Promise { console.log(`\tmodel type ${modelType} \n\tbatch size ${batchSize} \n\tcontext length ${contextLength}`) let epochTime = performance.now() - for (let epochsCounter = 0; epochsCounter < epochsCount; epochsCounter++) { + for (let epochsCounter = 1; epochsCounter <= epochsCount; epochsCounter++) { const [_, logs] = await async_iterator.gather(model.train(preprocessedDataset)) epochTime = (performance.now() - epochTime) const msPerToken = epochTime / (batchSize * contextLength * iterationsPerEpoch * epochsCounter) - console.log(epochTime, batchSize, contextLength ,iterationsPerEpoch ,epochsCounter) console.log(`\t\tTraining time: ${msPerToken.toFixed(2)} ms/token
${logs.peakMemory.toFixed(2)} GB`) }