From d8dc9c21551e1f0bd1d1ef4812ed93c8d8a9d72c Mon Sep 17 00:00:00 2001 From: kirillgroshkov Date: Sun, 21 Jan 2024 16:43:48 +0100 Subject: [PATCH] fix: CommonDao BM/DBM now extends BaseDBEntity BaseDBEntity now has non-optional id (and optional created/updated). This is another step in (hopefully) simplifying DB model constraints --- src/adapter/cachedb/cache.db.model.ts | 4 +- src/adapter/cachedb/cache.db.ts | 29 +++--- src/adapter/file/file.db.model.ts | 6 +- src/adapter/file/file.db.ts | 33 +++---- .../file/inMemory.persistence.plugin.ts | 2 +- .../file/localFile.persistence.plugin.ts | 10 +- src/adapter/file/noop.persistence.plugin.ts | 6 +- src/adapter/inmemory/inMemory.db.ts | 36 ++++---- src/adapter/inmemory/queryInMemory.ts | 7 +- src/base.common.db.ts | 30 ++---- src/common.db.ts | 33 +++---- src/commondao/common.dao.model.ts | 28 +++--- src/commondao/common.dao.test.ts | 12 +-- src/commondao/common.dao.ts | 92 ++++++++++--------- src/db.model.ts | 11 +-- src/pipeline/dbPipelineBackup.ts | 2 +- src/pipeline/dbPipelineCopy.ts | 15 ++- src/pipeline/dbPipelineRestore.ts | 9 +- src/query/dbQuery.test.ts | 8 +- src/query/dbQuery.ts | 20 ++-- src/timeseries/commonTimeSeriesDao.ts | 2 +- src/transaction/dbTransaction.util.ts | 6 +- src/validation/index.ts | 6 +- yarn.lock | 70 ++++++++------ 24 files changed, 234 insertions(+), 243 deletions(-) diff --git a/src/adapter/cachedb/cache.db.model.ts b/src/adapter/cachedb/cache.db.model.ts index 41fca4b..9226401 100644 --- a/src/adapter/cachedb/cache.db.model.ts +++ b/src/adapter/cachedb/cache.db.model.ts @@ -1,4 +1,4 @@ -import { CommonLogger, PartialObjectWithId } from '@naturalcycles/js-lib' +import { CommonLogger, ObjectWithId } from '@naturalcycles/js-lib' import { CommonDB } from '../../common.db' import { CommonDBCreateOptions, @@ -62,7 +62,7 @@ export interface CacheDBOptions extends CommonDBOptions { onlyCache?: boolean } -export interface CacheDBSaveOptions +export interface CacheDBSaveOptions extends CacheDBOptions, CommonDBSaveOptions {} diff --git a/src/adapter/cachedb/cache.db.ts b/src/adapter/cachedb/cache.db.ts index cd73511..3af4bbd 100644 --- a/src/adapter/cachedb/cache.db.ts +++ b/src/adapter/cachedb/cache.db.ts @@ -3,8 +3,7 @@ import { _isTruthy, JsonSchemaObject, JsonSchemaRootObject, - PartialObjectWithId, - Saved, + ObjectWithId, StringMap, } from '@naturalcycles/js-lib' import { ReadableTyped } from '@naturalcycles/nodejs-lib' @@ -59,13 +58,13 @@ export class CacheDB extends BaseCommonDB implements CommonDB { return await this.cfg.downstreamDB.getTables() } - override async getTableSchema( + override async getTableSchema( table: string, ): Promise> { return await this.cfg.downstreamDB.getTableSchema(table) } - override async createTable( + override async createTable( table: string, schema: JsonSchemaObject, opt: CacheDBCreateOptions = {}, @@ -79,12 +78,12 @@ export class CacheDB extends BaseCommonDB implements CommonDB { } } - override async getByIds( + override async getByIds( table: string, ids: string[], opt: CacheDBSaveOptions = {}, - ): Promise[]> { - const resultMap: StringMap> = {} + ): Promise { + const resultMap: StringMap = {} const missingIds: string[] = [] if (!opt.skipCache && !this.cfg.skipCache) { @@ -125,7 +124,7 @@ export class CacheDB extends BaseCommonDB implements CommonDB { return ids.map(id => resultMap[id]).filter(_isTruthy) } - override async saveBatch( + override async saveBatch( table: string, rows: ROW[], opt: CacheDBSaveOptions = {}, @@ -154,10 +153,10 @@ export class CacheDB extends BaseCommonDB implements CommonDB { } } - override async runQuery( + override async runQuery( q: DBQuery, opt: CacheDBSaveOptions = {}, - ): Promise>> { + ): Promise> { if (!opt.onlyCache && !this.cfg.onlyCache) { const { rows, ...queryResult } = await this.cfg.downstreamDB.runQuery(q, opt) @@ -184,7 +183,7 @@ export class CacheDB extends BaseCommonDB implements CommonDB { return { rows, ...queryResult } } - override async runQueryCount( + override async runQueryCount( q: DBQuery, opt: CacheDBOptions = {}, ): Promise { @@ -201,10 +200,10 @@ export class CacheDB extends BaseCommonDB implements CommonDB { return count } - override streamQuery( + override streamQuery( q: DBQuery, opt: CacheDBStreamOptions = {}, - ): ReadableTyped> { + ): ReadableTyped { if (!opt.onlyCache && !this.cfg.onlyCache) { const stream = this.cfg.downstreamDB.streamQuery(q, opt) @@ -240,7 +239,7 @@ export class CacheDB extends BaseCommonDB implements CommonDB { return stream } - override async deleteByQuery( + override async deleteByQuery( q: DBQuery, opt: CacheDBOptions = {}, ): Promise { @@ -272,7 +271,7 @@ export class CacheDB extends BaseCommonDB implements CommonDB { return deletedIds } - override async updateByQuery( + override async updateByQuery( q: DBQuery, patch: DBPatch, opt: CacheDBOptions = {}, diff --git a/src/adapter/file/file.db.model.ts b/src/adapter/file/file.db.model.ts index 0ff9588..db405a1 100644 --- a/src/adapter/file/file.db.model.ts +++ b/src/adapter/file/file.db.model.ts @@ -1,11 +1,11 @@ -import { CommonLogger, PartialObjectWithId, Saved } from '@naturalcycles/js-lib' +import { CommonLogger, ObjectWithId } from '@naturalcycles/js-lib' import { DBSaveBatchOperation } from '../../db.model' import type { DBQueryOrder } from '../../query/dbQuery' export interface FileDBPersistencePlugin { ping: () => Promise getTables: () => Promise - loadFile: (table: string) => Promise[]> + loadFile: (table: string) => Promise saveFiles: (ops: DBSaveBatchOperation[]) => Promise } @@ -15,7 +15,7 @@ export interface FileDBCfg { /** * @default undefined, which means "insertion order" */ - sortOnSave?: DBQueryOrder + sortOnSave?: DBQueryOrder /** * @default true diff --git a/src/adapter/file/file.db.ts b/src/adapter/file/file.db.ts index 0cebd6d..98367ef 100644 --- a/src/adapter/file/file.db.ts +++ b/src/adapter/file/file.db.ts @@ -9,8 +9,7 @@ import { JsonSchemaRootObject, _filterUndefinedValues, _assert, - Saved, - PartialObjectWithId, + ObjectWithId, } from '@naturalcycles/js-lib' import { readableCreate, ReadableTyped, dimGrey } from '@naturalcycles/nodejs-lib' import { @@ -74,16 +73,16 @@ export class FileDB extends BaseCommonDB implements CommonDB { return tables } - override async getByIds( + override async getByIds( table: string, ids: string[], _opt?: CommonDBOptions, - ): Promise[]> { + ): Promise { const byId = _by(await this.loadFile(table), r => r.id) return ids.map(id => byId[id]!).filter(Boolean) } - override async saveBatch( + override async saveBatch( table: string, rows: ROW[], _opt?: CommonDBSaveOptions, @@ -91,7 +90,7 @@ export class FileDB extends BaseCommonDB implements CommonDB { if (!rows.length) return // save some api calls // 1. Load the whole file - const byId = _by(await this.loadFile>(table), r => r.id) + const byId = _by(await this.loadFile(table), r => r.id) // 2. Merge with new data (using ids) let saved = 0 @@ -111,23 +110,23 @@ export class FileDB extends BaseCommonDB implements CommonDB { } } - override async runQuery( + override async runQuery( q: DBQuery, _opt?: CommonDBOptions, - ): Promise>> { + ): Promise> { return { rows: queryInMemory(q, await this.loadFile(q.table)), } } - override async runQueryCount( + override async runQueryCount( q: DBQuery, _opt?: CommonDBOptions, ): Promise { return (await this.loadFile(q.table)).length } - override streamQuery( + override streamQuery( q: DBQuery, opt?: CommonDBStreamOptions, ): ReadableTyped { @@ -141,7 +140,7 @@ export class FileDB extends BaseCommonDB implements CommonDB { return readable } - override async deleteByQuery( + override async deleteByQuery( q: DBQuery, _opt?: CommonDBOptions, ): Promise { @@ -181,7 +180,7 @@ export class FileDB extends BaseCommonDB implements CommonDB { return deleted } - override async getTableSchema( + override async getTableSchema( table: string, ): Promise> { const rows = await this.loadFile(table) @@ -192,7 +191,7 @@ export class FileDB extends BaseCommonDB implements CommonDB { } // wrapper, to handle logging - async loadFile(table: string): Promise[]> { + async loadFile(table: string): Promise { const started = this.logStarted(`loadFile(${table})`) const rows = await this.cfg.plugin.loadFile(table) this.logFinished(started, `loadFile(${table}) ${rows.length} row(s)`) @@ -200,7 +199,7 @@ export class FileDB extends BaseCommonDB implements CommonDB { } // wrapper, to handle logging, sorting rows before saving - async saveFile(table: string, _rows: ROW[]): Promise { + async saveFile(table: string, _rows: ROW[]): Promise { // if (!_rows.length) return // NO, it should be able to save file with 0 rows! // Sort the rows, if needed @@ -212,9 +211,7 @@ export class FileDB extends BaseCommonDB implements CommonDB { this.logFinished(started, op) } - async saveFiles( - ops: DBSaveBatchOperation[], - ): Promise { + async saveFiles(ops: DBSaveBatchOperation[]): Promise { if (!ops.length) return const op = `saveFiles ${ops.length} op(s):\n` + ops.map(o => `${o.table} (${o.rows.length})`).join('\n') @@ -227,7 +224,7 @@ export class FileDB extends BaseCommonDB implements CommonDB { // return new FileDBTransaction(this) // } - sortRows(rows: ROW[]): ROW[] { + sortRows(rows: ROW[]): ROW[] { rows = rows.map(r => _filterUndefinedValues(r)) if (this.cfg.sortOnSave) { diff --git a/src/adapter/file/inMemory.persistence.plugin.ts b/src/adapter/file/inMemory.persistence.plugin.ts index 83ce468..08bc717 100644 --- a/src/adapter/file/inMemory.persistence.plugin.ts +++ b/src/adapter/file/inMemory.persistence.plugin.ts @@ -18,7 +18,7 @@ export class InMemoryPersistencePlugin implements FileDBPersistencePlugin { return Object.values(this.data[table] || ({} as any)) } - async saveFiles(ops: DBSaveBatchOperation[]): Promise { + async saveFiles(ops: DBSaveBatchOperation[]): Promise { ops.forEach(op => { this.data[op.table] = _by(op.rows, r => r.id) }) diff --git a/src/adapter/file/localFile.persistence.plugin.ts b/src/adapter/file/localFile.persistence.plugin.ts index a925fba..15e6031 100644 --- a/src/adapter/file/localFile.persistence.plugin.ts +++ b/src/adapter/file/localFile.persistence.plugin.ts @@ -2,7 +2,7 @@ import fs from 'node:fs' import fsp from 'node:fs/promises' import { Readable } from 'node:stream' import { createGzip, createUnzip } from 'node:zlib' -import { pMap, PartialObjectWithId, Saved } from '@naturalcycles/js-lib' +import { ObjectWithId, pMap } from '@naturalcycles/js-lib' import { transformJsonParse, transformSplit, @@ -48,7 +48,7 @@ export class LocalFilePersistencePlugin implements FileDBPersistencePlugin { .map(f => f.split('.ndjson')[0]!) } - async loadFile(table: string): Promise[]> { + async loadFile(table: string): Promise { await fs2.ensureDirAsync(this.cfg.storagePath) const ext = `ndjson${this.cfg.gzip ? '.gz' : ''}` const filePath = `${this.cfg.storagePath}/${table}.${ext}` @@ -57,7 +57,7 @@ export class LocalFilePersistencePlugin implements FileDBPersistencePlugin { const transformUnzip = this.cfg.gzip ? [createUnzip()] : [] - const rows: Saved[] = [] + const rows: ROW[] = [] await _pipeline([ fs.createReadStream(filePath), @@ -70,11 +70,11 @@ export class LocalFilePersistencePlugin implements FileDBPersistencePlugin { return rows } - async saveFiles(ops: DBSaveBatchOperation[]): Promise { + async saveFiles(ops: DBSaveBatchOperation[]): Promise { await pMap(ops, async op => await this.saveFile(op.table, op.rows), { concurrency: 16 }) } - async saveFile(table: string, rows: ROW[]): Promise { + async saveFile(table: string, rows: ROW[]): Promise { await fs2.ensureDirAsync(this.cfg.storagePath) const ext = `ndjson${this.cfg.gzip ? '.gz' : ''}` const filePath = `${this.cfg.storagePath}/${table}.${ext}` diff --git a/src/adapter/file/noop.persistence.plugin.ts b/src/adapter/file/noop.persistence.plugin.ts index 5391e62..5e7525f 100644 --- a/src/adapter/file/noop.persistence.plugin.ts +++ b/src/adapter/file/noop.persistence.plugin.ts @@ -1,4 +1,4 @@ -import { PartialObjectWithId, Saved } from '@naturalcycles/js-lib' +import { ObjectWithId } from '@naturalcycles/js-lib' import { DBSaveBatchOperation } from '../../db.model' import { FileDBPersistencePlugin } from './file.db.model' @@ -9,9 +9,9 @@ export class NoopPersistencePlugin implements FileDBPersistencePlugin { return [] } - async loadFile(_table: string): Promise[]> { + async loadFile(_table: string): Promise { return [] } - async saveFiles(_ops: DBSaveBatchOperation[]): Promise {} + async saveFiles(_ops: DBSaveBatchOperation[]): Promise {} } diff --git a/src/adapter/inmemory/inMemory.db.ts b/src/adapter/inmemory/inMemory.db.ts index 0a3d19b..a68c6bc 100644 --- a/src/adapter/inmemory/inMemory.db.ts +++ b/src/adapter/inmemory/inMemory.db.ts @@ -16,8 +16,6 @@ import { CommonLogger, _deepCopy, _assert, - PartialObjectWithId, - Saved, } from '@naturalcycles/js-lib' import { bufferReviver, @@ -154,7 +152,7 @@ export class InMemoryDB implements CommonDB { return Object.keys(this.data).filter(t => t.startsWith(this.cfg.tablesPrefix)) } - async getTableSchema( + async getTableSchema( _table: string, ): Promise> { const table = this.cfg.tablesPrefix + _table @@ -164,7 +162,7 @@ export class InMemoryDB implements CommonDB { } } - async createTable( + async createTable( _table: string, _schema: JsonSchemaObject, opt: CommonDBCreateOptions = {}, @@ -177,17 +175,17 @@ export class InMemoryDB implements CommonDB { } } - async getByIds( + async getByIds( _table: string, ids: string[], _opt?: CommonDBOptions, - ): Promise[]> { + ): Promise { const table = this.cfg.tablesPrefix + _table this.data[table] ||= {} - return ids.map(id => this.data[table]![id] as Saved).filter(Boolean) + return ids.map(id => this.data[table]![id] as ROW).filter(Boolean) } - async saveBatch( + async saveBatch( _table: string, rows: ROW[], opt: CommonDBSaveOptions = {}, @@ -218,13 +216,13 @@ export class InMemoryDB implements CommonDB { }) } - async deleteByQuery( + async deleteByQuery( q: DBQuery, _opt?: CommonDBOptions, ): Promise { const table = this.cfg.tablesPrefix + q.table if (!this.data[table]) return 0 - const ids = queryInMemory(q, Object.values(this.data[table]!) as Saved[]).map(r => r.id) + const ids = queryInMemory(q, Object.values(this.data[table]!) as ROW[]).map(r => r.id) return await this.deleteByIds(q.table, ids) } @@ -242,7 +240,7 @@ export class InMemoryDB implements CommonDB { return count } - async updateByQuery( + async updateByQuery( q: DBQuery, patch: DBPatch, ): Promise { @@ -264,15 +262,15 @@ export class InMemoryDB implements CommonDB { return rows.length } - async runQuery( + async runQuery( q: DBQuery, _opt?: CommonDBOptions, - ): Promise>> { + ): Promise> { const table = this.cfg.tablesPrefix + q.table - return { rows: queryInMemory(q, Object.values(this.data[table] || {}) as Saved[]) } + return { rows: queryInMemory(q, Object.values(this.data[table] || {}) as ROW[]) } } - async runQueryCount( + async runQueryCount( q: DBQuery, _opt?: CommonDBOptions, ): Promise { @@ -280,10 +278,10 @@ export class InMemoryDB implements CommonDB { return queryInMemory(q, Object.values(this.data[table] || {})).length } - streamQuery( + streamQuery( q: DBQuery, _opt?: CommonDBOptions, - ): ReadableTyped> { + ): ReadableTyped { const table = this.cfg.tablesPrefix + q.table return Readable.from(queryInMemory(q, Object.values(this.data[table] || {}) as ROW[])) } @@ -390,7 +388,7 @@ export class InMemoryDBTransaction implements DBTransaction { // used to enforce forbidReadAfterWrite setting writeOperationHappened = false - async getByIds( + async getByIds( table: string, ids: string[], opt?: CommonDBOptions, @@ -405,7 +403,7 @@ export class InMemoryDBTransaction implements DBTransaction { return await this.db.getByIds(table, ids, opt) } - async saveBatch( + async saveBatch( table: string, rows: ROW[], opt?: CommonDBSaveOptions, diff --git a/src/adapter/inmemory/queryInMemory.ts b/src/adapter/inmemory/queryInMemory.ts index f937c17..c8b2048 100644 --- a/src/adapter/inmemory/queryInMemory.ts +++ b/src/adapter/inmemory/queryInMemory.ts @@ -1,4 +1,4 @@ -import { _pick, PartialObjectWithId } from '@naturalcycles/js-lib' +import { _pick, ObjectWithId } from '@naturalcycles/js-lib' import { DBQuery, DBQueryFilterOperator } from '../../query/dbQuery' type FilterFn = (v: any, val: any) => boolean @@ -18,10 +18,7 @@ const FILTER_FNS: Record = { // Important: q.table is not used in this function, so tablesPrefix is not needed. // But should be careful here.. -export function queryInMemory( - q: DBQuery, - rows: ROW[] = [], -): ROW[] { +export function queryInMemory(q: DBQuery, rows: ROW[] = []): ROW[] { // .filter // eslint-disable-next-line unicorn/no-array-reduce rows = q._filters.reduce((rows, filter) => { diff --git a/src/base.common.db.ts b/src/base.common.db.ts index abd7155..341cced 100644 --- a/src/base.common.db.ts +++ b/src/base.common.db.ts @@ -1,9 +1,4 @@ -import { - JsonSchemaObject, - JsonSchemaRootObject, - PartialObjectWithId, - Saved, -} from '@naturalcycles/js-lib' +import { JsonSchemaObject, JsonSchemaRootObject, ObjectWithId } from '@naturalcycles/js-lib' import { ReadableTyped } from '@naturalcycles/nodejs-lib' import { CommonDB, CommonDBSupport, CommonDBType } from './common.db' import { @@ -36,31 +31,28 @@ export class BaseCommonDB implements CommonDB { throw new Error('getTables is not implemented') } - async getTableSchema( + async getTableSchema( table: string, ): Promise> { throw new Error('getTableSchema is not implemented') } - async createTable( + async createTable( table: string, schema: JsonSchemaObject, ): Promise { // no-op } - async getByIds( - table: string, - ids: string[], - ): Promise[]> { + async getByIds(table: string, ids: string[]): Promise { throw new Error('getByIds is not implemented') } - async deleteByQuery(q: DBQuery): Promise { + async deleteByQuery(q: DBQuery): Promise { throw new Error('deleteByQuery is not implemented') } - async updateByQuery( + async updateByQuery( q: DBQuery, patch: DBPatch, opt?: CommonDBOptions, @@ -68,17 +60,15 @@ export class BaseCommonDB implements CommonDB { throw new Error('updateByQuery is not implemented') } - async runQuery( - q: DBQuery, - ): Promise>> { + async runQuery(q: DBQuery): Promise> { throw new Error('runQuery is not implemented') } - async runQueryCount(q: DBQuery): Promise { + async runQueryCount(q: DBQuery): Promise { throw new Error('runQueryCount is not implemented') } - async saveBatch( + async saveBatch( table: string, rows: ROW[], opt?: CommonDBSaveOptions, @@ -86,7 +76,7 @@ export class BaseCommonDB implements CommonDB { throw new Error('saveBatch is not implemented') } - streamQuery(q: DBQuery): ReadableTyped> { + streamQuery(q: DBQuery): ReadableTyped { throw new Error('streamQuery is not implemented') } diff --git a/src/common.db.ts b/src/common.db.ts index a77f37b..e6f30be 100644 --- a/src/common.db.ts +++ b/src/common.db.ts @@ -1,9 +1,4 @@ -import { - JsonSchemaObject, - JsonSchemaRootObject, - PartialObjectWithId, - Saved, -} from '@naturalcycles/js-lib' +import { JsonSchemaObject, JsonSchemaRootObject, ObjectWithId } from '@naturalcycles/js-lib' import type { ReadableTyped } from '@naturalcycles/nodejs-lib' import { CommonDBCreateOptions, @@ -69,15 +64,13 @@ export interface CommonDB { * * This is important for the code to rely on it, and it's verified by dbTest */ - getTableSchema: ( - table: string, - ) => Promise> + getTableSchema: (table: string) => Promise> /** * Will do like `create table ...` for mysql. * Caution! dropIfExists defaults to false. If set to true - will actually DROP the table! */ - createTable: ( + createTable: ( table: string, schema: JsonSchemaObject, opt?: CommonDBCreateOptions, @@ -88,36 +81,36 @@ export interface CommonDB { * Order of items returned is not guaranteed to match order of ids. * (Such limitation exists because Datastore doesn't support it). */ - getByIds: ( + getByIds: ( table: string, ids: string[], opt?: CommonDBOptions, - ) => Promise[]> + ) => Promise // QUERY /** * Order by 'id' is not supported by all implementations (for example, Datastore doesn't support it). */ - runQuery: ( + runQuery: ( q: DBQuery, opt?: CommonDBOptions, - ) => Promise>> + ) => Promise> - runQueryCount: ( + runQueryCount: ( q: DBQuery, opt?: CommonDBOptions, ) => Promise - streamQuery: ( + streamQuery: ( q: DBQuery, opt?: CommonDBStreamOptions, - ) => ReadableTyped> + ) => ReadableTyped // SAVE /** * rows can have missing ids only if DB supports auto-generating them (like mysql auto_increment). */ - saveBatch: ( + saveBatch: ( table: string, rows: ROW[], opt?: CommonDBSaveOptions, @@ -134,7 +127,7 @@ export interface CommonDB { * Returns number of deleted items. * Not supported by all implementations (e.g Datastore will always return same number as number of ids). */ - deleteByQuery: ( + deleteByQuery: ( q: DBQuery, opt?: CommonDBOptions, ) => Promise @@ -157,7 +150,7 @@ export interface CommonDB { * * Returns number of rows affected. */ - updateByQuery: ( + updateByQuery: ( q: DBQuery, patch: DBPatch, opt?: CommonDBOptions, diff --git a/src/commondao/common.dao.model.ts b/src/commondao/common.dao.model.ts index e92bf98..6aa7bde 100644 --- a/src/commondao/common.dao.model.ts +++ b/src/commondao/common.dao.model.ts @@ -1,8 +1,8 @@ import { AnyObject, + BaseDBEntity, CommonLogger, ErrorMode, - PartialObjectWithId, Promisable, Saved, ZodError, @@ -19,11 +19,7 @@ import { import { CommonDB } from '../common.db' import { CommonDBCreateOptions, CommonDBOptions, CommonDBSaveOptions } from '../db.model' -export interface CommonDaoHooks< - BM extends PartialObjectWithId, - DBM extends PartialObjectWithId, - TM, -> { +export interface CommonDaoHooks { /** * Allows to override the id generation function. * By default it uses `stringId` from nodejs-lib @@ -64,7 +60,7 @@ export interface CommonDaoHooks< */ beforeDBMValidate: (dbm: Partial) => Partial - beforeDBMToBM: (dbm: Saved) => Partial | Promise> + beforeDBMToBM: (dbm: DBM) => Partial | Promise> beforeBMToDBM: (bm: BM) => Partial | Promise> beforeBMToTM: (bm: BM) => Partial @@ -106,7 +102,7 @@ export interface CommonDaoHooks< * It still applies to BM "transitively", during dbmToBM * (e.g after loaded from the Database). */ - anonymize: (dbm: Saved) => Saved + anonymize: (dbm: DBM) => DBM /** * If hook is defined - allows to prevent or modify the error thrown. @@ -137,8 +133,8 @@ export enum CommonDaoLogLevel { } export interface CommonDaoCfg< - BM extends PartialObjectWithId, - DBM extends PartialObjectWithId = BM, + BM extends BaseDBEntity, + DBM extends BaseDBEntity = BM, TM extends AnyObject = BM, > { db: CommonDB @@ -289,10 +285,8 @@ export interface CommonDaoOptions extends CommonDBOptions { table?: string } -export interface CommonDaoSaveOptions< - BM extends PartialObjectWithId, - DBM extends PartialObjectWithId, -> extends CommonDaoSaveBatchOptions { +export interface CommonDaoSaveOptions + extends CommonDaoSaveBatchOptions { /** * If provided - a check will be made. * If the object for saving equals to the object passed to `skipIfEquals` - save operation will be skipped. @@ -307,7 +301,7 @@ export interface CommonDaoSaveOptions< /** * All properties default to undefined. */ -export interface CommonDaoSaveBatchOptions +export interface CommonDaoSaveBatchOptions extends CommonDaoOptions, CommonDBSaveOptions { /** @@ -322,10 +316,10 @@ export interface CommonDaoSaveBatchOptions ensureUniqueId?: boolean } -export interface CommonDaoStreamDeleteOptions +export interface CommonDaoStreamDeleteOptions extends CommonDaoStreamOptions {} -export interface CommonDaoStreamSaveOptions +export interface CommonDaoStreamSaveOptions extends CommonDaoSaveBatchOptions, CommonDaoStreamOptions {} diff --git a/src/commondao/common.dao.test.ts b/src/commondao/common.dao.test.ts index 83c97df..c368622 100644 --- a/src/commondao/common.dao.test.ts +++ b/src/commondao/common.dao.test.ts @@ -1,13 +1,5 @@ import { mockTime, MOCK_TS_2018_06_21 } from '@naturalcycles/dev-lib/dist/testing' -import { - ErrorMode, - _omit, - _range, - _sortBy, - pTry, - pExpectedError, - Saved, -} from '@naturalcycles/js-lib' +import { ErrorMode, _omit, _range, _sortBy, pTry, pExpectedError } from '@naturalcycles/js-lib' import { AjvSchema, AjvValidationError, @@ -176,7 +168,7 @@ test('patchById', async () => { }) test('patch', async () => { - const item: Saved = await dao.save({ + const item: TestItemBM = await dao.save({ id: 'id1', k1: 'k1', }) diff --git a/src/commondao/common.dao.ts b/src/commondao/common.dao.ts index 602dd96..4be9a68 100644 --- a/src/commondao/common.dao.ts +++ b/src/commondao/common.dao.ts @@ -9,6 +9,7 @@ import { _passthroughPredicate, _since, _truncate, + _typeCast, _uniqBy, AnyObject, AppError, @@ -19,11 +20,11 @@ import { JsonSchemaObject, JsonSchemaRootObject, ObjectWithId, - PartialObjectWithId, pMap, Saved, SKIP, UnixTimestampMillisNumber, + UnsavedId, ZodSchema, ZodValidationError, zSafeValidate, @@ -77,8 +78,8 @@ const isCI = !!process.env['CI'] * TM = Transport model (optimized to be sent over the wire) */ export class CommonDao< - BM extends PartialObjectWithId, - DBM extends PartialObjectWithId = BM, + BM extends BaseDBEntity, + DBM extends BaseDBEntity = BM, TM extends AnyObject = BM, > { constructor(public cfg: CommonDaoCfg) { @@ -130,7 +131,7 @@ export class CommonDao< const table = opt.table || this.cfg.table const started = this.logStarted(op, table) - let dbm = (await (opt.tx || this.cfg.db).getByIds(table, [id]))[0] + let dbm = (await (opt.tx || this.cfg.db).getByIds>(table, [id]))[0] if (dbm && !opt.raw && this.cfg.hooks!.afterLoad) { dbm = (await this.cfg.hooks!.afterLoad(dbm)) || undefined } @@ -170,7 +171,7 @@ export class CommonDao< const op = `getByIdAsDBM(${id})` const table = opt.table || this.cfg.table const started = this.logStarted(op, table) - let [dbm] = await (opt.tx || this.cfg.db).getByIds(table, [id]) + let [dbm] = await (opt.tx || this.cfg.db).getByIds>(table, [id]) if (dbm && !opt.raw && this.cfg.hooks!.afterLoad) { dbm = (await this.cfg.hooks!.afterLoad(dbm)) || undefined } @@ -189,7 +190,7 @@ export class CommonDao< const op = `getByIdAsTM(${id})` const table = opt.table || this.cfg.table const started = this.logStarted(op, table) - let [dbm] = await (opt.tx || this.cfg.db).getByIds(table, [id]) + let [dbm] = await (opt.tx || this.cfg.db).getByIds>(table, [id]) if (dbm && !opt.raw && this.cfg.hooks!.afterLoad) { dbm = (await this.cfg.hooks!.afterLoad(dbm)) || undefined } @@ -209,7 +210,7 @@ export class CommonDao< const op = `getByIds ${ids.length} id(s) (${_truncate(ids.slice(0, 10).join(', '), 50)})` const table = opt.table || this.cfg.table const started = this.logStarted(op, table) - let dbms = await (opt.tx || this.cfg.db).getByIds(table, ids) + let dbms = await (opt.tx || this.cfg.db).getByIds>(table, ids) if (!opt.raw && this.cfg.hooks!.afterLoad && dbms.length) { dbms = (await pMap(dbms, async dbm => await this.cfg.hooks!.afterLoad!(dbm))).filter( _isTruthy, @@ -226,7 +227,7 @@ export class CommonDao< const op = `getByIdsAsDBM ${ids.length} id(s) (${_truncate(ids.slice(0, 10).join(', '), 50)})` const table = opt.table || this.cfg.table const started = this.logStarted(op, table) - let dbms = await (opt.tx || this.cfg.db).getByIds(table, ids) + let dbms = await (opt.tx || this.cfg.db).getByIds>(table, ids) if (!opt.raw && this.cfg.hooks!.afterLoad && dbms.length) { dbms = (await pMap(dbms, async dbm => await this.cfg.hooks!.afterLoad!(dbm))).filter( _isTruthy, @@ -283,7 +284,7 @@ export class CommonDao< } } - private async ensureUniqueId(table: string, dbm: Saved): Promise { + private async ensureUniqueId(table: string, dbm: DBM): Promise { // todo: retry N times const existing = await this.cfg.db.getByIds(table, [dbm.id]) if (existing.length) { @@ -351,7 +352,7 @@ export class CommonDao< q.table = opt.table || q.table const op = `runQuery(${q.pretty()})` const started = this.logStarted(op, q.table) - let { rows, ...queryResult } = await this.cfg.db.runQuery(q, opt) + let { rows, ...queryResult } = await this.cfg.db.runQuery>(q, opt) const partialQuery = !!q._selectedFieldNames if (!opt.raw && this.cfg.hooks!.afterLoad && rows.length) { rows = (await pMap(rows, async dbm => await this.cfg.hooks!.afterLoad!(dbm))).filter( @@ -379,7 +380,7 @@ export class CommonDao< q.table = opt.table || q.table const op = `runQueryAsDBM(${q.pretty()})` const started = this.logStarted(op, q.table) - let { rows, ...queryResult } = await this.cfg.db.runQuery(q, opt) + let { rows, ...queryResult } = await this.cfg.db.runQuery>(q, opt) if (!opt.raw && this.cfg.hooks!.afterLoad && rows.length) { rows = (await pMap(rows, async dbm => await this.cfg.hooks!.afterLoad!(dbm))).filter( _isTruthy, @@ -404,7 +405,7 @@ export class CommonDao< q.table = opt.table || q.table const op = `runQueryAsTM(${q.pretty()})` const started = this.logStarted(op, q.table) - let { rows, ...queryResult } = await this.cfg.db.runQuery(q, opt) + let { rows, ...queryResult } = await this.cfg.db.runQuery>(q, opt) if (!opt.raw && this.cfg.hooks!.afterLoad && rows.length) { rows = (await pMap(rows, async dbm => await this.cfg.hooks!.afterLoad!(dbm))).filter( _isTruthy, @@ -678,7 +679,7 @@ export class CommonDao< * Mutates! * "Returns", just to have a type of "Saved" */ - assignIdCreatedUpdated(obj: T, opt: CommonDaoOptions = {}): Saved { + assignIdCreatedUpdated(obj: Partial, opt: CommonDaoOptions = {}): T { const now = Math.floor(Date.now() / 1000) if (this.cfg.useCreatedProperty) { @@ -693,14 +694,14 @@ export class CommonDao< obj.id ||= this.cfg.hooks!.createNaturalId?.(obj as any) || this.cfg.hooks!.createRandomId!() } - return obj as Saved + return obj as T } // SAVE /** * Mutates with id, created, updated */ - async save(bm: BM, opt: CommonDaoSaveOptions = {}): Promise> { + async save(bm: UnsavedId, opt: CommonDaoSaveOptions = {}): Promise> { this.requireWriteAccess() if (opt.skipIfEquals && _deepJsonEquals(bm, opt.skipIfEquals)) { @@ -710,11 +711,12 @@ export class CommonDao< const idWasGenerated = !bm.id && this.cfg.generateId this.assignIdCreatedUpdated(bm, opt) // mutates + _typeCast>(bm) let dbm = await this.bmToDBM(bm, opt) if (this.cfg.hooks!.beforeSave) { dbm = (await this.cfg.hooks!.beforeSave(dbm))! - if (dbm === null) return bm as any + if (dbm === null) return bm } const table = opt.table || this.cfg.table @@ -738,7 +740,7 @@ export class CommonDao< } this.logSaveResult(started, op, table) - return bm as Saved + return bm } /** @@ -750,18 +752,18 @@ export class CommonDao< * Similar to `patch`, but doesn't load the object from the Database. */ async savePatch( - bm: Saved, + bm: BM, patch: Partial, opt: CommonDaoSaveBatchOptions, ): Promise> { - const patched: Saved = { + const patched: BM = { ...bm, ...patch, } if (_deepJsonEquals(bm, patched)) { // Skipping the save operation, as data is the same - return bm + return bm as Saved } // Actually apply the patch by mutating the original object (by design) @@ -828,7 +830,7 @@ export class CommonDao< * It still loads the row from the DB. */ async patch( - bm: Saved, + bm: BM, patch: Partial, opt: CommonDaoSaveBatchOptions = {}, ): Promise> { @@ -846,7 +848,7 @@ export class CommonDao< if (_deepJsonEquals(loaded, bm)) { // Skipping the save operation, as data is the same - return bm + return bm as Saved } // Make `bm` exactly the same as `loaded` @@ -862,7 +864,7 @@ export class CommonDao< * Like patch, but runs all operations within a Transaction. */ async patchInTransaction( - bm: Saved, + bm: BM, patch: Partial, opt?: CommonDaoSaveBatchOptions, ): Promise> { @@ -871,7 +873,10 @@ export class CommonDao< }) } - async saveAsDBM(dbm: DBM, opt: CommonDaoSaveBatchOptions = {}): Promise> { + async saveAsDBM( + dbm: UnsavedId, + opt: CommonDaoSaveBatchOptions = {}, + ): Promise> { this.requireWriteAccess() const table = opt.table || this.cfg.table @@ -911,12 +916,15 @@ export class CommonDao< return row } - async saveBatch(bms: BM[], opt: CommonDaoSaveBatchOptions = {}): Promise[]> { + async saveBatch( + bms: UnsavedId[], + opt: CommonDaoSaveBatchOptions = {}, + ): Promise[]> { if (!bms.length) return [] this.requireWriteAccess() const table = opt.table || this.cfg.table bms.forEach(bm => this.assignIdCreatedUpdated(bm, opt)) - let dbms = await this.bmsToDBM(bms, opt) + let dbms = await this.bmsToDBM(bms as BM[], opt) if (this.cfg.hooks!.beforeSave && dbms.length) { dbms = (await pMap(dbms, async dbm => await this.cfg.hooks!.beforeSave!(dbm))).filter( @@ -956,7 +964,7 @@ export class CommonDao< } async saveBatchAsDBM( - dbms: DBM[], + dbms: UnsavedId[], opt: CommonDaoSaveBatchOptions = {}, ): Promise[]> { if (!dbms.length) return [] @@ -1183,13 +1191,13 @@ export class CommonDao< // CONVERSIONS async dbmToBM(_dbm: undefined, opt?: CommonDaoOptions): Promise - async dbmToBM(_dbm?: Saved, opt?: CommonDaoOptions): Promise> - async dbmToBM(_dbm?: Saved, opt: CommonDaoOptions = {}): Promise | undefined> { + async dbmToBM(_dbm?: DBM, opt?: CommonDaoOptions): Promise> + async dbmToBM(_dbm?: DBM, opt: CommonDaoOptions = {}): Promise | undefined> { if (!_dbm) return // optimization: no need to run full joi DBM validation, cause BM validation will be run // const dbm = this.anyToDBM(_dbm, opt) - let dbm: Saved = { ..._dbm, ...this.cfg.hooks!.parseNaturalId!(_dbm.id) } + let dbm: DBM = { ..._dbm, ...this.cfg.hooks!.parseNaturalId!(_dbm.id) } if (opt.anonymize) { dbm = this.cfg.hooks!.anonymize!(dbm) @@ -1203,7 +1211,7 @@ export class CommonDao< return this.validateAndConvert(bm, this.cfg.bmSchema, DBModelType.BM, opt) } - async dbmsToBM(dbms: Saved[], opt: CommonDaoOptions = {}): Promise[]> { + async dbmsToBM(dbms: DBM[], opt: CommonDaoOptions = {}): Promise[]> { return await pMap(dbms, async dbm => await this.dbmToBM(dbm, opt)) } @@ -1239,7 +1247,7 @@ export class CommonDao< anyToDBM(dbm: undefined, opt?: CommonDaoOptions): undefined anyToDBM(dbm?: any, opt?: CommonDaoOptions): Saved - anyToDBM(dbm?: Saved, opt: CommonDaoOptions = {}): Saved | undefined { + anyToDBM(dbm?: DBM, opt: CommonDaoOptions = {}): Saved | undefined { if (!dbm) return // this shouldn't be happening on load! but should on save! @@ -1255,13 +1263,13 @@ export class CommonDao< return this.validateAndConvert(dbm, this.cfg.dbmSchema, DBModelType.DBM, opt) } - anyToDBMs(entities: Saved[], opt: CommonDaoOptions = {}): Saved[] { + anyToDBMs(entities: DBM[], opt: CommonDaoOptions = {}): Saved[] { return entities.map(entity => this.anyToDBM(entity, opt)) } bmToTM(bm: undefined, opt?: CommonDaoOptions): TM | undefined - bmToTM(bm?: Saved, opt?: CommonDaoOptions): TM - bmToTM(bm?: Saved, opt?: CommonDaoOptions): TM | undefined { + bmToTM(bm?: BM, opt?: CommonDaoOptions): TM + bmToTM(bm?: BM, opt?: CommonDaoOptions): TM | undefined { if (bm === undefined) return // optimization: 1 validation is enough @@ -1276,7 +1284,7 @@ export class CommonDao< return this.validateAndConvert(tm, this.cfg.tmSchema, DBModelType.TM, opt) } - bmsToTM(bms: Saved[], opt: CommonDaoOptions = {}): TM[] { + bmsToTM(bms: BM[], opt: CommonDaoOptions = {}): TM[] { // try/catch? return bms.map(bm => this.bmToTM(bm, opt)) } @@ -1479,7 +1487,7 @@ export class CommonDaoTransaction { } } - async getById( + async getById( dao: CommonDao, id?: string | null, opt?: CommonDaoOptions, @@ -1487,7 +1495,7 @@ export class CommonDaoTransaction { return await dao.getById(id, { ...opt, tx: this.tx }) } - async getByIds( + async getByIds( dao: CommonDao, ids: string[], opt?: CommonDaoOptions, @@ -1509,17 +1517,17 @@ export class CommonDaoTransaction { // } // } - async save( + async save( dao: CommonDao, - bm: BM, + bm: UnsavedId, opt?: CommonDaoSaveBatchOptions, ): Promise> { return (await this.saveBatch(dao, [bm], opt))[0]! } - async saveBatch( + async saveBatch( dao: CommonDao, - bms: BM[], + bms: UnsavedId[], opt?: CommonDaoSaveBatchOptions, ): Promise[]> { return await dao.saveBatch(bms, { ...opt, tx: this.tx }) diff --git a/src/db.model.ts b/src/db.model.ts index d822251..0f1e1e2 100644 --- a/src/db.model.ts +++ b/src/db.model.ts @@ -1,4 +1,4 @@ -import type { PartialObjectWithId } from '@naturalcycles/js-lib' +import { ObjectWithId } from '@naturalcycles/js-lib' import { CommonDB } from './common.db' /** @@ -55,8 +55,7 @@ export interface CommonDBOptions { /** * All properties default to undefined. */ -export interface CommonDBSaveOptions - extends CommonDBOptions { +export interface CommonDBSaveOptions extends CommonDBOptions { excludeFromIndexes?: (keyof ROW)[] /** @@ -89,9 +88,9 @@ export interface RunQueryResult { endCursor?: string } -export type DBOperation = DBSaveBatchOperation | DBDeleteByIdsOperation +export type DBOperation = DBSaveBatchOperation | DBDeleteByIdsOperation -export interface DBSaveBatchOperation { +export interface DBSaveBatchOperation { type: 'saveBatch' table: string rows: ROW[] @@ -131,6 +130,6 @@ export class DBIncrement { } } -export type DBPatch = Partial< +export type DBPatch = Partial< Record > diff --git a/src/pipeline/dbPipelineBackup.ts b/src/pipeline/dbPipelineBackup.ts index 9caff3e..ed97163 100644 --- a/src/pipeline/dbPipelineBackup.ts +++ b/src/pipeline/dbPipelineBackup.ts @@ -78,7 +78,7 @@ export interface DBPipelineBackupOptions extends TransformLogProgressOptions { * But if queryPerTable is set for a table - it will override the Query that is ran for that table * (and ignore sinceUpdated, sinceUpdatedPerTable, limit, and any other properties that modify the query). */ - queryPerTable?: StringMap + queryPerTable?: StringMap> /** * Directory path to store dumped files. Will create `${tableName}.ndjson` (or .ndjson.gz if gzip=true) files. diff --git a/src/pipeline/dbPipelineCopy.ts b/src/pipeline/dbPipelineCopy.ts index 815b9b4..7d5248b 100644 --- a/src/pipeline/dbPipelineCopy.ts +++ b/src/pipeline/dbPipelineCopy.ts @@ -1,4 +1,11 @@ -import { AsyncMapper, ErrorMode, pMap, _passthroughMapper, localTime } from '@naturalcycles/js-lib' +import { + AsyncMapper, + ErrorMode, + pMap, + _passthroughMapper, + localTime, + BaseDBEntity, +} from '@naturalcycles/js-lib' import { NDJsonStats, transformBuffer, @@ -80,7 +87,7 @@ export interface DBPipelineCopyOptions extends TransformLogProgressOptions { */ transformMapOptions?: TransformMapOptions - saveOptionsPerTable?: Record + saveOptionsPerTable?: Record> } /** @@ -118,13 +125,13 @@ export async function dbPipelineCopy(opt: DBPipelineCopyOptions): Promise { - let q = DBQuery.create(table).limit(limit) + let q = DBQuery.create(table).limit(limit) if (sinceUpdated) { q = q.filter('updated', '>=', sinceUpdated) } - const saveOptions: CommonDBSaveOptions = saveOptionsPerTable[table] || {} + const saveOptions: CommonDBSaveOptions = saveOptionsPerTable[table] || {} const mapper = mapperPerTable[table] || _passthroughMapper const stream = dbInput.streamQuery(q) diff --git a/src/pipeline/dbPipelineRestore.ts b/src/pipeline/dbPipelineRestore.ts index cb0adbd..f31aaff 100644 --- a/src/pipeline/dbPipelineRestore.ts +++ b/src/pipeline/dbPipelineRestore.ts @@ -7,9 +7,10 @@ import { _hb, _mapValues, _passthroughMapper, - SavedDBEntity, localTime, JsonSchemaObject, + BaseDBEntity, + Saved, } from '@naturalcycles/js-lib' import { NDJsonStats, @@ -111,7 +112,7 @@ export interface DBPipelineRestoreOptions extends TransformLogProgressOptions { */ transformMapOptions?: TransformMapOptions - saveOptionsPerTable?: Record + saveOptionsPerTable?: Record> } /** @@ -194,7 +195,7 @@ export async function dbPipelineRestore(opt: DBPipelineRestoreOptions): Promise< async table => { const gzip = tablesToGzip.has(table) const filePath = `${inputDirPath}/${table}.ndjson` + (gzip ? '.gz' : '') - const saveOptions: CommonDBSaveOptions = saveOptionsPerTable[table] || {} + const saveOptions: CommonDBSaveOptions = saveOptionsPerTable[table] || {} const started = Date.now() let rows = 0 @@ -216,7 +217,7 @@ export async function dbPipelineRestore(opt: DBPipelineRestoreOptions): Promise< }), transformLimit({ limit }), ...(sinceUpdated - ? [transformFilterSync(r => r.updated >= sinceUpdated)] + ? [transformFilterSync>(r => r.updated >= sinceUpdated)] : []), transformMap(mapperPerTable[table] || _passthroughMapper, { errorMode, diff --git a/src/query/dbQuery.test.ts b/src/query/dbQuery.test.ts index 84ddffd..92c105c 100644 --- a/src/query/dbQuery.test.ts +++ b/src/query/dbQuery.test.ts @@ -7,13 +7,17 @@ test('DBQuery', () => { }) test('prettyConditions', () => { - const q = new DBQuery('TestKind').filter('a', '>', 5) + const q = new DBQuery('TestKind').filter('a', '>', 5) expect(q.prettyConditions()).toEqual(['a>5']) expect(q.pretty()).toBe('a>5') }) test('toJson, fromJson', () => { - const q = new DBQuery('TestKind').filter('a', '>', 5).order('a', true).select(['a', 'b']).limit(3) + const q = new DBQuery('TestKind') + .filter('a', '>', 5) + .order('a', true) + .select(['a', 'b']) + .limit(3) // const json = JSON.stringify(q, null, 2) // console.log(json) diff --git a/src/query/dbQuery.ts b/src/query/dbQuery.ts index c6d5426..d7f878b 100644 --- a/src/query/dbQuery.ts +++ b/src/query/dbQuery.ts @@ -4,8 +4,8 @@ import { Saved, AnyObject, _objectAssign, - PartialObjectWithId, - AnyPartialObjectWithId, + BaseDBEntity, + ObjectWithId, } from '@naturalcycles/js-lib' import { ReadableTyped } from '@naturalcycles/nodejs-lib' import { @@ -62,13 +62,13 @@ export const dbQueryFilterOperatorValues: DBQueryFilterOperator[] = [ 'array-contains-any', ] -export interface DBQueryFilter { +export interface DBQueryFilter { name: keyof ROW op: DBQueryFilterOperator val: any } -export interface DBQueryOrder { +export interface DBQueryOrder { name: keyof ROW descending?: boolean } @@ -83,19 +83,17 @@ export interface DBQueryOrder is the type of **queried** object (so e.g `key of DBM` can be used), not **returned** object. */ -export class DBQuery { +export class DBQuery { constructor(public table: string) {} /** * Convenience method. */ - static create( - table: string, - ): DBQuery { + static create(table: string): DBQuery { return new DBQuery(table) } - static fromPlainObject( + static fromPlainObject( obj: Partial> & { table: string }, ): DBQuery { return Object.assign(new DBQuery(obj.table), obj) @@ -239,8 +237,8 @@ export class DBQuery { * DBQuery that has additional method to support Fluent API style. */ export class RunnableDBQuery< - BM extends PartialObjectWithId, - DBM extends PartialObjectWithId = BM, + BM extends BaseDBEntity, + DBM extends BaseDBEntity = BM, TM extends AnyObject = BM, > extends DBQuery { /** diff --git a/src/timeseries/commonTimeSeriesDao.ts b/src/timeseries/commonTimeSeriesDao.ts index 17ea6cd..64dddc7 100644 --- a/src/timeseries/commonTimeSeriesDao.ts +++ b/src/timeseries/commonTimeSeriesDao.ts @@ -78,7 +78,7 @@ export class CommonTimeSeriesDao { } async query(q: TimeSeriesQuery): Promise { - const dbq = DBQuery.create(`${q.series}${_TIMESERIES_RAW}`).order('ts') + const dbq = DBQuery.create(`${q.series}${_TIMESERIES_RAW}`).order('ts') if (q.fromIncl) dbq.filter('ts', '>=', q.fromIncl) if (q.toExcl) dbq.filter('ts', '<', q.toExcl) diff --git a/src/transaction/dbTransaction.util.ts b/src/transaction/dbTransaction.util.ts index bebd0fc..1ec539e 100644 --- a/src/transaction/dbTransaction.util.ts +++ b/src/transaction/dbTransaction.util.ts @@ -1,4 +1,4 @@ -import { PartialObjectWithId } from '@naturalcycles/js-lib' +import { ObjectWithId } from '@naturalcycles/js-lib' import type { CommonDB } from '../common.db' import { CommonDBOptions, CommonDBSaveOptions, DBTransaction } from '../db.model' @@ -100,7 +100,7 @@ export class FakeDBTransaction implements DBTransaction { // no-op async rollback(): Promise {} - async getByIds( + async getByIds( table: string, ids: string[], opt?: CommonDBOptions, @@ -113,7 +113,7 @@ export class FakeDBTransaction implements DBTransaction { // ): Promise> { // return await this.db.runQuery(q, opt) // } - async saveBatch( + async saveBatch( table: string, rows: ROW[], opt?: CommonDBSaveOptions, diff --git a/src/validation/index.ts b/src/validation/index.ts index 704779d..555817e 100644 --- a/src/validation/index.ts +++ b/src/validation/index.ts @@ -21,7 +21,7 @@ export const commonDBOptionsSchema = objectSchema({ ['skipCache' as any]: booleanSchema.optional(), }) -export const commonDBSaveOptionsSchema = objectSchema({ +export const commonDBSaveOptionsSchema = objectSchema>({ excludeFromIndexes: arraySchema(stringSchema).optional(), }).concat(commonDBOptionsSchema) @@ -29,13 +29,13 @@ export const dbQueryFilterOperatorSchema = Joi.string().v ...dbQueryFilterOperatorValues, ) -export const dbQueryFilterSchema = objectSchema({ +export const dbQueryFilterSchema = objectSchema>({ name: stringSchema, op: dbQueryFilterOperatorSchema, val: anySchema, }) -export const dbQueryOrderSchema = objectSchema({ +export const dbQueryOrderSchema = objectSchema>({ name: stringSchema, descending: booleanSchema.optional(), }) diff --git a/yarn.lock b/yarn.lock index 53764a3..70c573f 100644 --- a/yarn.lock +++ b/yarn.lock @@ -789,9 +789,9 @@ "@jridgewell/sourcemap-codec" "^1.4.10" "@jridgewell/trace-mapping@^0.3.12", "@jridgewell/trace-mapping@^0.3.17", "@jridgewell/trace-mapping@^0.3.18", "@jridgewell/trace-mapping@^0.3.9": - version "0.3.21" - resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.21.tgz#5dc1df7b3dc4a6209e503a924e1ca56097a2bb15" - integrity sha512-SRfKmRe1KvYnxjEMtxEr+J4HIeMX5YBg/qhRHpxEIGjhX1rshcHlnFUE9K0GazhVKWM7B+nARSkV8LuvJdJ5/g== + version "0.3.22" + resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.22.tgz#72a621e5de59f5f1ef792d0793a82ee20f645e4c" + integrity sha512-Wf963MzWtA2sjrNt+g18IAln9lKnlRp+K2eH4jjIoF1wYeq3aMREpG09xhlhdzS0EjwU7qmUJYangWa+151vZw== dependencies: "@jridgewell/resolve-uri" "^3.1.0" "@jridgewell/sourcemap-codec" "^1.4.14" @@ -858,17 +858,17 @@ yargs "^17.0.0" "@naturalcycles/js-lib@^14.0.0", "@naturalcycles/js-lib@^14.116.0": - version "14.204.1" - resolved "https://registry.yarnpkg.com/@naturalcycles/js-lib/-/js-lib-14.204.1.tgz#d81838e4a8cd889b734637c2b26a750d5758ef10" - integrity sha512-+2E30+MKFdlxnkbsBHBcbi8aTTPl3AZRe+NUzgimJoJw+7wJS0k6v2DVbrmQ1MCcEyQ4gV5jhOQT8iHEtDDSDw== + version "14.206.0" + resolved "https://registry.yarnpkg.com/@naturalcycles/js-lib/-/js-lib-14.206.0.tgz#529be8c590f31cd7dd432032387cca239b860c8f" + integrity sha512-PAoaYwrViznvASPixkaXCvonbaLEHs3yb4maQm5b1SXkiQq6viR8NLzaI+8/Rt3iWveP4BScluYYqfbuPJCtgg== dependencies: tslib "^2.0.0" zod "^3.20.2" "@naturalcycles/nodejs-lib@^13.0.1", "@naturalcycles/nodejs-lib@^13.0.2", "@naturalcycles/nodejs-lib@^13.1.1": - version "13.7.0" - resolved "https://registry.yarnpkg.com/@naturalcycles/nodejs-lib/-/nodejs-lib-13.7.0.tgz#8d49d85d9c3165f20a9bbc08024bc213f5318cae" - integrity sha512-mPGMLS5pBP8U9ToVwM91+7ydir+fCUqYE3NVDotvfQW6jYf+LExKqnyMDGCEnwkGZhkafvGHbHBh5eqluyXsWA== + version "13.7.1" + resolved "https://registry.yarnpkg.com/@naturalcycles/nodejs-lib/-/nodejs-lib-13.7.1.tgz#2369c0a1b28de70bdf87fff067a2e6b2a651c2df" + integrity sha512-y0hVINujeLSVtggnfJEfavZMqLu32/ERIG+EcSgPHK4+MpnKMmpOkh3b59LjaVPs8JDvnCDWRTIPCOOllm1oLw== dependencies: "@naturalcycles/js-lib" "^14.0.0" "@types/js-yaml" "^4.0.9" @@ -934,9 +934,9 @@ integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA== "@sinonjs/commons@^3.0.0": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.0.tgz#beb434fe875d965265e04722ccfc21df7f755d72" - integrity sha512-jXBtWAF4vmdNmZgD5FoKsVLv3rPgDnLgPbU84LIJ3otV44vJlDRokVng5v8NFJdCf/da9legHcKaRuZs4L7faA== + version "3.0.1" + resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-3.0.1.tgz#1029357e44ca901a615585f6d27738dbc89084cd" + integrity sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ== dependencies: type-detect "4.0.8" @@ -1596,7 +1596,7 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== -base64-js@^1.2.0: +base64-js@^1.2.0, base64-js@^1.3.1: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== @@ -1695,6 +1695,14 @@ buffer-from@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.2.tgz#2b146a6fd72e80b4f55d255f35ed59a3a9a41bd5" integrity sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ== +buffer@^5.2.1: + version "5.7.1" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" + integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== + dependencies: + base64-js "^1.3.1" + ieee754 "^1.1.13" + builtin-modules@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-3.3.0.tgz#cae62812b89801e9656336e46223e030386be7b6" @@ -1966,9 +1974,9 @@ cookie@0.5.0: integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== core-js-compat@^3.34.0: - version "3.35.0" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.35.0.tgz#c149a3d1ab51e743bc1da61e39cb51f461a41873" - integrity sha512-5blwFAddknKeNgsjBzilkdQ0+YK8L1PfqPYq40NOYMYFSS38qj+hpTcLLWwpIwA2A5bje/x5jmVn2tzUMg9IVw== + version "3.35.1" + resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.35.1.tgz#215247d7edb9e830efa4218ff719beb2803555e2" + integrity sha512-sftHa5qUJY3rs9Zht1WEnmkvXputCyDBczPnr7QDgL8n3qrF3CMXY4VPSYtOLLiOUJcah2WNXREd48iOl6mQIw== dependencies: browserslist "^4.22.2" @@ -2291,9 +2299,9 @@ dot-prop@^5.1.0: is-obj "^2.0.0" dotenv@^16.0.0: - version "16.3.1" - resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.1.tgz#369034de7d7e5b120972693352a3bf112172cc3e" - integrity sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ== + version "16.3.2" + resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.3.2.tgz#3cb611ce5a63002dbabf7c281bc331f69d28f03f" + integrity sha512-HTlk5nmhkm8F6JcdXvHIzaorzCoziNQT9mGxLPVXW8wJF1TiGSL60ZGB4gHWabHOaMmWmhvk2/lPHfnBiT78AQ== ecdsa-sig-formatter@1.0.11: version "1.0.11" @@ -2308,9 +2316,9 @@ ee-first@1.1.1: integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== electron-to-chromium@^1.4.601: - version "1.4.639" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.639.tgz#c6f9cc685f9efb2980d2cfc95a27f8142c9adf28" - integrity sha512-CkKf3ZUVZchr+zDpAlNLEEy2NJJ9T64ULWaDgy3THXXlPVPkLu3VOs9Bac44nebVtdwl2geSj6AxTtGDOxoXhg== + version "1.4.640" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.640.tgz#76290a36fa4b5f1f4cadaf1fc582478ebb3ac246" + integrity sha512-z/6oZ/Muqk4BaE7P69bXhUhpJbUM9ZJeka43ZwxsDshKtePns4mhBlh8bU5+yrnOnz3fhG82XLzGUXazOmsWnA== emittery@^0.13.1: version "0.13.1" @@ -3170,10 +3178,11 @@ husky@^8.0.1: integrity sha512-+dQSyqPh4x1hlO1swXBiNb2HzTDN1I2IGLQx1GrBuiqFJfoMrnZWwVmatvSiO+Iz8fBUnf+lekwNo4c2LlXItg== hyperid@^3.0.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/hyperid/-/hyperid-3.1.1.tgz#50fe8a75ff3ada74dacaf5a3761fb031bdf541c7" - integrity sha512-RveV33kIksycSf7HLkq1sHB5wW0OwuX8ot8MYnY++gaaPXGFfKpBncHrAWxdpuEeRlazUMGWefwP1w6o6GaumA== + version "3.2.0" + resolved "https://registry.yarnpkg.com/hyperid/-/hyperid-3.2.0.tgz#b3b160a27f5791cdc8b0557f7bd2ef0616c6c218" + integrity sha512-PdTtDo+Rmza9nEhTunaDSUKwbC69TIzLEpZUwiB6f+0oqmY0UPfhyHCPt6K1NQ4WFv5yJBTG5vELztVWP+nEVQ== dependencies: + buffer "^5.2.1" uuid "^8.3.2" uuid-parse "^1.1.0" @@ -3191,6 +3200,11 @@ iconv-lite@0.6: dependencies: safer-buffer ">= 2.1.2 < 3.0.0" +ieee754@^1.1.13: + version "1.2.1" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" + integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== + ignore@^5.2.0, ignore@^5.2.4: version "5.3.0" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.3.0.tgz#67418ae40d34d6999c95ff56016759c718c82f78" @@ -4951,9 +4965,9 @@ reusify@^1.0.4: integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== rfdc@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b" - integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA== + version "1.3.1" + resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.1.tgz#2b6d4df52dffe8bb346992a10ea9451f24373a8f" + integrity sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg== rimraf@^3.0.2: version "3.0.2"