diff --git a/packages/node-core/CHANGELOG.md b/packages/node-core/CHANGELOG.md index 3419260da6..451d461b0e 100644 --- a/packages/node-core/CHANGELOG.md +++ b/packages/node-core/CHANGELOG.md @@ -5,6 +5,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). ## [Unreleased] +### Removed +- `scale-batch-size` flag as it had no use (#2275) ## [7.3.0] - 2024-02-23 ### Added diff --git a/packages/node-core/src/configure/NodeConfig.ts b/packages/node-core/src/configure/NodeConfig.ts index 84ce1cb0d1..6352ddc4a4 100644 --- a/packages/node-core/src/configure/NodeConfig.ts +++ b/packages/node-core/src/configure/NodeConfig.ts @@ -50,7 +50,6 @@ export interface IConfig { readonly storeCacheUpperLimit: number; readonly storeGetCacheSize: number; readonly storeCacheAsync: boolean; - readonly scaleBatchSize?: boolean; readonly storeFlushInterval: number; readonly isTest?: boolean; readonly root?: string; @@ -275,10 +274,6 @@ export class NodeConfig implements IConfig { return !!this._config.pgCa; } - get scaleBatchSize(): boolean { - return !!this._config.scaleBatchSize; - } - get postgresCACert(): string | undefined { if (!this._config.pgCa) { return undefined; diff --git a/packages/node-core/src/indexer/fetch.service.ts b/packages/node-core/src/indexer/fetch.service.ts index 08d400afc4..500bc51a27 100644 --- a/packages/node-core/src/indexer/fetch.service.ts +++ b/packages/node-core/src/indexer/fetch.service.ts @@ -4,20 +4,19 @@ import assert from 'assert'; import {OnApplicationShutdown} from '@nestjs/common'; import {EventEmitter2} from '@nestjs/event-emitter'; -import {Interval, SchedulerRegistry} from '@nestjs/schedule'; +import {SchedulerRegistry} from '@nestjs/schedule'; import {DictionaryQueryEntry, BaseDataSource, IProjectNetworkConfig} from '@subql/types-core'; import {range, uniq, without} from 'lodash'; import {NodeConfig} from '../configure'; import {IndexerEvent} from '../events'; import {getLogger} from '../logger'; -import {checkMemoryUsage, cleanedBatchBlocks, delay, transformBypassBlocks, waitForBatchSize} from '../utils'; +import {cleanedBatchBlocks, delay, transformBypassBlocks, waitForBatchSize} from '../utils'; import {IBlockDispatcher} from './blockDispatcher'; import {DictionaryService} from './dictionary.service'; import {DynamicDsService} from './dynamic-ds.service'; import {IProjectService} from './types'; const logger = getLogger('FetchService'); -const CHECK_MEMORY_INTERVAL = 60000; export abstract class BaseFetchService< DS extends BaseDataSource, @@ -28,7 +27,6 @@ export abstract class BaseFetchService< private _latestBestHeight?: number; private _latestFinalizedHeight?: number; private isShutdown = false; - private batchSizeScale = 1; private bypassBlocks: number[] = []; protected abstract buildDictionaryQueryEntries(dataSources: DS[]): DictionaryQueryEntry[]; @@ -159,17 +157,6 @@ export abstract class BaseFetchService< return this.latestFinalizedHeight; } - @Interval(CHECK_MEMORY_INTERVAL) - checkBatchScale(): void { - if (this.nodeConfig.scaleBatchSize) { - const scale = checkMemoryUsage(this.batchSizeScale, this.nodeConfig); - - if (this.batchSizeScale !== scale) { - this.batchSizeScale = scale; - } - } - } - async getFinalizedBlockHead(): Promise { try { const currentFinalizedHeight = await this.getFinalizedHeight(); diff --git a/packages/node-core/src/utils/batch-size.ts b/packages/node-core/src/utils/batch-size.ts index 3b7e455942..4a0e93b538 100644 --- a/packages/node-core/src/utils/batch-size.ts +++ b/packages/node-core/src/utils/batch-size.ts @@ -3,40 +3,10 @@ import {getHeapStatistics} from 'v8'; import {Mutex} from 'async-mutex'; -import {NodeConfig} from '../configure/NodeConfig'; import {getLogger} from '../logger'; -const HIGH_THRESHOLD = 0.85; -const LOW_THRESHOLD = 0.6; - const logger = getLogger('memory'); -export function checkMemoryUsage(batchSizeScale: number, nodeConfig: NodeConfig): number { - const memoryData = getHeapStatistics(); - const ratio = memoryData.used_heap_size / memoryData.heap_size_limit; - if (nodeConfig.profiler) { - logger.info(`Heap Statistics: ${JSON.stringify(memoryData)}`); - logger.info(`Heap Usage: ${ratio}`); - } - - let scale = batchSizeScale; - - if (ratio > HIGH_THRESHOLD) { - if (scale > 0) { - scale = Math.max(scale - 0.1, 0); - logger.debug(`Heap usage: ${ratio}, decreasing batch size by 10%`); - } - } - - if (ratio < LOW_THRESHOLD) { - if (scale < 1) { - scale = Math.min(scale + 0.1, 1); - logger.debug(`Heap usage: ${ratio} increasing batch size by 10%`); - } - } - return scale; -} - export const memoryLock = new Mutex(); export async function waitForBatchSize(sizeInBytes: number): Promise { diff --git a/packages/node-core/src/yargs.ts b/packages/node-core/src/yargs.ts index 9acf4bf181..1e4275759d 100644 --- a/packages/node-core/src/yargs.ts +++ b/packages/node-core/src/yargs.ts @@ -152,12 +152,6 @@ export function yargsBuilder< type: 'number', default: 100, }, - 'scale-batch-size': { - type: 'boolean', - demandOption: false, - describe: 'scale batch size based on memory usage', - default: false, - }, 'store-cache-threshold': { demandOption: false, describe: 'Store cache will flush data to the database when number of records excess this threshold',