Skip to content

Commit

Permalink
build: use per-repo lockfile
Browse files Browse the repository at this point in the history
  • Loading branch information
tokebe committed Nov 3, 2023
1 parent c4eb2bb commit 8733ec8
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 8 deletions.
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
},
"homepage": "https://github.com/biothings/bte_trapi_query_graph_handler#readme",
"devDependencies": {
"@types/async": "^3.2.22",
"@types/debug": "^4.1.10",
"@types/jest": "^29.5.6",
"@types/lodash": "^4.14.200",
Expand Down Expand Up @@ -64,7 +65,7 @@
"debug": "^4.3.4",
"ioredis": "^5.3.2",
"lodash": "^4.17.21",
"lz4": "^0.6.5",
"lz4-napi": "^2.2.0",
"redlock": "5.0.0-beta.2"
}
}
14 changes: 7 additions & 7 deletions src/cache_handler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ const debug = Debug('bte:biothings-explorer-trapi:cache_handler');
import LogEntry, { StampedLog } from './log_entry';
import async from 'async';
import helper from './helper';
import lz4 from 'lz4';
import { compressSync, uncompressSync } from 'lz4-napi';
import chunker from 'stream-chunker';
import { Readable, Transform } from 'stream';
import { Record, RecordPackage } from '@biothings-explorer/api-response-transform';
Expand Down Expand Up @@ -34,7 +34,7 @@ class DelimitedChunksDecoder extends Transform {
const parts = this._buffer.split(',');
this._buffer = parts.pop();
parts.forEach((part) => {
const parsedPart = JSON.parse(lz4.decode(Buffer.from(part, 'base64url')).toString());
const parsedPart: unknown = JSON.parse(uncompressSync(Buffer.from(part, 'base64url')).toString());
if (Array.isArray(parsedPart)) {
parsedPart.forEach((obj) => this.push(obj));
} else {
Expand All @@ -49,12 +49,12 @@ class DelimitedChunksDecoder extends Transform {
_flush(callback: (error?: Error | null | undefined, data?: unknown) => void): void {
try {
if (this._buffer.length) {
const final = JSON.parse(lz4.decode(Buffer.from(this._buffer, 'base64url')).toString());
const final: unknown = JSON.parse(uncompressSync(Buffer.from(this._buffer, 'base64url')).toString());
callback(null, final);
}
callback();
} catch (error) {
callback(error);
callback(error as Error);
}
}
}
Expand All @@ -72,7 +72,7 @@ class DelimitedChunksEncoder extends Transform {
_transform(obj: unknown, encoding: unknown, callback: () => void) {
this._buffer.push(obj); // stringify/compress 64 objects at a time limits compress calls
if (this._buffer.length === 64) {
const compressedPart = lz4.encode(JSON.stringify(this._buffer)).toString('base64url') + ',';
const compressedPart = compressSync(JSON.stringify(this._buffer)).toString('base64url') + ',';
this.push(compressedPart);
this._buffer = [];
}
Expand All @@ -82,12 +82,12 @@ class DelimitedChunksEncoder extends Transform {
_flush(callback: (error?: Error | null | undefined, data?: unknown) => void) {
try {
if (this._buffer.length) {
callback(null, lz4.encode(JSON.stringify(this._buffer)).toString('base64url') + ',');
callback(null, compressSync(JSON.stringify(this._buffer)).toString('base64url') + ',');
return;
}
callback();
} catch (error) {
callback(error);
callback(error as Error);
}
}
}
Expand Down

0 comments on commit 8733ec8

Please sign in to comment.