Skip to content

Commit

Permalink
Fix get bigInt from jsonb, and deepclone from cache model (#2504)
Browse files Browse the repository at this point in the history
* Fix get bigInt from jsonb, and deepclone from cache model

* Update packages/node-core/src/utils/graphql.ts

Co-authored-by: Scott Twiname <[email protected]>

* Update packages/node-core/src/utils/graphql.ts

Co-authored-by: Scott Twiname <[email protected]>

* update

---------

Co-authored-by: Scott Twiname <[email protected]>
  • Loading branch information
jiqiang90 and stwiname authored Jul 28, 2024
1 parent 723fced commit 509ea5e
Show file tree
Hide file tree
Showing 4 changed files with 93 additions and 49 deletions.
4 changes: 4 additions & 0 deletions packages/node-core/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

## [Unreleased]

### Fixed
- Fixed get and set data not been deepCloned and data is not mutable
- Improved get bigInt type from json type

## [13.0.0] - 2024-07-25
### Changed
- Breaking change: Require indexing environment timezone set to UTC, avoid inconsistent result from cache and database (#2495)
Expand Down
66 changes: 41 additions & 25 deletions packages/node-core/src/indexer/storeCache/cacheModel.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

import {GraphQLModelsType} from '@subql/utils';
import {Sequelize, DataTypes, QueryTypes} from '@subql/x-sequelize';
import {padStart} from 'lodash';
import {cloneDeep, padStart} from 'lodash';
import {DbOption, modelsTypeToModelAttributes, NodeConfig} from '../../';
import {CachedModel} from './cacheModel';

Expand Down Expand Up @@ -378,20 +378,24 @@ describe('cacheModel integration', () => {
await sequelize.close();
});

async function setDefaultData(id: string, height: number, data?: any): Promise<void> {
cacheModel.set(
id,
data ?? {
id,
selfStake: BigInt(1000000000000000000000n),
oneEntity: {testItem: 'test', amount: BigInt(8000000000000000000000n)},
delegators: [{delegator: '0x02', amount: BigInt(1000000000000000000000n)}],
randomNArray: [1, 2, 3, 4, 5],
},
height
);
await flush(height + 1);
}

describe('cached data and db data compare', () => {
it('bigint value in jsonb', async () => {
cacheModel.set(
`0x01`,
{
id: `0x01`,
selfStake: BigInt(1000000000000000000000n),
oneEntity: {testItem: 'test', amount: BigInt(8000000000000000000000n)},
delegators: [{delegator: '0x02', amount: BigInt(1000000000000000000000n)}],
randomNArray: [1, 2, 3, 4, 5],
},
1
);
await flush(2);
await setDefaultData('0x01', 1);

// force clear get cache
(cacheModel as any).getCache.clear();
Expand Down Expand Up @@ -533,18 +537,13 @@ describe('cacheModel integration', () => {
});

it('empty array test, compare db result with cache data', async () => {
cacheModel.set(
`0x09`,
{
id: `0x09`,
selfStake: BigInt(1000000000000000000000n),
oneEntity: {testItem: 'test', amount: BigInt(8000000000000000000000n)},
delegators: [{delegator: '0x02', amount: BigInt(1000000000000000000000n)}],
randomNArray: undefined,
},
1
);
await flush(2);
await setDefaultData('0x09', 1, {
id: '0x09',
selfStake: BigInt(1000000000000000000000n),
oneEntity: {testItem: 'test', amount: BigInt(8000000000000000000000n)},
delegators: [{delegator: '0x02', amount: BigInt(1000000000000000000000n)}],
randomNArray: undefined,
});

// Cache value 1, before cache is cleared
const resCache1 = await cacheModel.get('0x09');
Expand All @@ -567,5 +566,22 @@ describe('cacheModel integration', () => {
// We are expecting DB value and set cache value can be difference, field value can be undefined and null
expect(res0).not.toEqual(resCache1);
});

it('get and update, without save, get again should not be updated value', async () => {
await setDefaultData(`0x10`, 1);

// Db value
const res0 = await cacheModel.get('0x10');
const copiedRes0 = cloneDeep(res0);

// Update the value
res0?.delegators.push({delegator: '0x11', amount: BigInt(9000000000000000000000n)});

// Get it again
const res1 = await cacheModel.get('0x10');

// should be same before updated
expect(res1).toEqual(copiedRes0);
});
});
});
36 changes: 19 additions & 17 deletions packages/node-core/src/indexer/storeCache/cacheModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ export class CachedModel<
T extends {id: string; __block_range?: (number | null)[] | Fn} = {
id: string;
__block_range?: (number | null)[] | Fn;
}
},
>
extends Cacheable
implements ICachedModel<T>, ICachedModelControl
Expand Down Expand Up @@ -93,7 +93,7 @@ export class CachedModel<
!latestSetRecord.removed &&
this.removeCache[id].removedAtBlock <= latestSetRecord.startHeight
) {
return latestSetRecord.data;
return cloneDeep(latestSetRecord.data);
}
return;
}
Expand All @@ -117,7 +117,7 @@ export class CachedModel<
this.getCache.set(id, record);
}
}
return record;
return cloneDeep(record);
}

return cloneDeep(this.getCache.get(id));
Expand Down Expand Up @@ -219,10 +219,11 @@ export class CachedModel<
if (this.setCache[id] === undefined) {
this.setCache[id] = new SetValueModel();
}
this.setCache[id].set(data, blockHeight, this.getNextStoreOperationIndex());
const copiedData = cloneDeep(data);
this.setCache[id].set(copiedData, blockHeight, this.getNextStoreOperationIndex());
// Experimental, this means getCache keeps duplicate data from setCache,
// we can remove this once memory is too full.
this.getCache.set(id, data);
this.getCache.set(id, copiedData);
// Handle remove cache, when removed data been created again
if (this.removeCache[id] && this.removeCache[id].removedAtBlock === blockHeight) {
delete this.removeCache[id];
Expand Down Expand Up @@ -350,15 +351,22 @@ export class CachedModel<
}
}

private filterRecordsWithHeight(blockHeight: number): FilteredHeightRecords<T> {
return {
removeRecords: Object.entries(this.removeCache).reduce((acc, [key, value]) => {
if (value.removedAtBlock <= blockHeight) {
private filterRemoveRecordByHeight(blockHeight: number, lessEqt: boolean): Record<string, RemoveValue> {
return Object.entries(this.removeCache).reduce(
(acc, [key, value]) => {
if (lessEqt ? value.removedAtBlock <= blockHeight : value.removedAtBlock > blockHeight) {
acc[key] = value;
}

return acc;
}, {} as Record<string, RemoveValue>),
},
{} as Record<string, RemoveValue>
);
}

private filterRecordsWithHeight(blockHeight: number): FilteredHeightRecords<T> {
return {
removeRecords: this.filterRemoveRecordByHeight(blockHeight, true),
setRecords: Object.entries(this.setCache).reduce((acc, [key, value]) => {
const newValue = value.fromBelowHeight(blockHeight + 1);
if (newValue.getValues().length) {
Expand Down Expand Up @@ -422,13 +430,7 @@ export class CachedModel<
return acc;
}, {} as SetData<T>);

this.removeCache = Object.entries(this.removeCache).reduce((acc, [key, value]) => {
if (value.removedAtBlock > blockHeight) {
newCounter++;
acc[key] = value;
}
return acc;
}, {} as Record<string, RemoveValue>);
this.removeCache = this.filterRemoveRecordByHeight(blockHeight, false);

this.flushableRecordCounter = newCounter;
}
Expand Down
36 changes: 29 additions & 7 deletions packages/node-core/src/utils/graphql.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import {
isBuffer,
isNull,
GraphQLEntityField,
GraphQLJsonFieldType,
} from '@subql/utils';
import {ModelAttributes, ModelAttributeColumnOptions} from '@subql/x-sequelize';
import {isArray, isObject} from 'lodash';
Expand Down Expand Up @@ -120,7 +121,7 @@ export function getColumnOption(
if (!dataValue || !field.jsonInterface) {
return null;
}
return field.isArray ? dataValue.map((v: any) => processGetJson(v)) : processGetJson(dataValue);
return field.isArray ? dataValue.map((v: any) => processGetJson(field, v)) : processGetJson(field, dataValue);
};
columnOption.set = function (val: unknown) {
if (val === undefined || isNull(val)) {
Expand All @@ -140,14 +141,35 @@ export function getColumnOption(
return columnOption;
}

function processGetJson(data: any): any {
return JSON.parse(JSON.stringify(data), (key, value) => {
// regex to check if the value is a bigint string
if (typeof value === 'string' && /^-?\d+n$/.test(value)) {
return BigInt(value.slice(0, -1));
/***
* Process nested json get, output is same as input value type
* @param field
* @param value
*/
function processGetJson(field: GraphQLEntityField | GraphQLJsonFieldType, value: any): any {
// bigIntFields and nestJsonFields from this level in the entity/json
const bigIntFields = field.jsonInterface?.fields.filter((f) => f.type === 'BigInt');
const nestJsonFields = field.jsonInterface?.fields.filter((f) => f.jsonInterface);
const processBigIntFields = (value: any) => {
if (bigIntFields) {
for (const bigIntField of bigIntFields) {
// If null is passed, we should not convert it to BigInt
if (value[bigIntField.name] !== undefined && value[bigIntField.name] !== null) {
value[bigIntField.name] = BigInt(value[bigIntField.name].slice(0, -1));
}
}
}
return value;
});
};
if (nestJsonFields) {
for (const nestJsonField of nestJsonFields) {
// have a nest field, and nest field is json type, also value is defined
if (nestJsonField.jsonInterface && value[nestJsonField.name]) {
value[nestJsonField.name] = processGetJson(nestJsonField, value[nestJsonField.name]);
}
}
}
return processBigIntFields(value);
}

function processSetJson(data: any): any {
Expand Down

0 comments on commit 509ea5e

Please sign in to comment.