diff --git a/.env.dev b/.env.dev index 5bce9cc..b48daab 100644 --- a/.env.dev +++ b/.env.dev @@ -15,4 +15,6 @@ PARALLEL=4 DEFAULT_PAGE_SIZE=10 MAX_PAGE_SIZE=100 -INDICES=["App-Name", "app", "domain", "namespace"] \ No newline at end of file +INDICES=["App-Name", "app", "domain", "namespace"] + +CACHING=1 \ No newline at end of file diff --git a/.env.docker b/.env.docker index 5318fe0..15c3255 100644 --- a/.env.docker +++ b/.env.docker @@ -15,4 +15,6 @@ PARALLEL=4 DEFAULT_PAGE_SIZE=10 MAX_PAGE_SIZE=100 -INDICES=["App-Name", "app", "domain", "namespace"] \ No newline at end of file +INDICES=["App-Name", "app", "domain", "namespace"] + +CACHING=1 \ No newline at end of file diff --git a/.eslintrc.json b/.eslintrc.json index 7197c51..aa21604 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -19,6 +19,7 @@ "require-jsdoc": 0, "max-len": 0, "camelcase": 0, - "guard-for-in": 0 + "guard-for-in": 0, + "prefer-spread": 0 } } diff --git a/README.md b/README.md index 907c4b7..33cb056 100644 --- a/README.md +++ b/README.md @@ -43,6 +43,8 @@ PORT=3000 PARALLEL=4 INDICES=["App-Name", "app", "domain", "namespace"] + +CACHING=1 ``` Make sure you copy this configuration to `.env`. diff --git a/docs/DEV.md b/docs/DEV.md index 41e877f..0b6d599 100755 --- a/docs/DEV.md +++ b/docs/DEV.md @@ -63,6 +63,8 @@ DEFAULT_PAGE_SIZE=10 MAX_PAGE_SIZE=100 INDICES=["App-Name", "app", "domain", "namespace"] + +CACHING=1 ``` Make sure you copy this configuration to `.env`. diff --git a/docs/README.md b/docs/README.md index cf25bd8..a5c0799 100755 --- a/docs/README.md +++ b/docs/README.md @@ -46,6 +46,8 @@ DEFAULT_PAGE_SIZE=10 MAX_PAGE_SIZE=100 INDICES=["App-Name", "app", "domain", "namespace"] + +CACHING=1 ``` Make sure you copy this configuration to `.env`. diff --git a/package.json b/package.json index 13c31b6..a5ba942 100644 --- a/package.json +++ b/package.json @@ -41,6 +41,7 @@ "express-pg-session": "^1.1.0", "express-session": "^1.17.1", "fs": "^0.0.1-security", + "fs-jetpack": "^4.1.0", "graphql": "^15.5.0", "graphql-fields": "^2.0.3", "js-sha256": "^0.9.0", diff --git a/src/caching/ans.caching.ts b/src/caching/ans.caching.ts new file mode 100644 index 0000000..95fbe4d --- /dev/null +++ b/src/caching/ans.caching.ts @@ -0,0 +1,39 @@ +import {dir, write, remove} from 'fs-jetpack'; +import {DataItemJson} from 'arweave-bundles'; +import {ansBundles} from '../utility/ans.utility'; +import {getDataFromChunks} from '../query/node.query'; +import {tagToUTF8} from '../query/transaction.query'; + +export async function streamAndCacheAns(id: string): Promise { + try { + dir(`${process.cwd()}/cache/tx`); + + const rawData = await getDataFromChunks(id); + const ansTxs = await ansBundles.unbundleData(rawData.toString('utf-8')); + + const ansTxsConverted: Array = []; + + for (let i = 0; i < ansTxs.length; i++) { + const ansTx = ansTxs[i]; + const newAnsTx: DataItemJson = { + id: ansTx.id, + owner: ansTx.owner, + target: ansTx.target, + nonce: ansTx.nonce, + data: ansTx.data, + signature: ansTx.signature, + tags: tagToUTF8(ansTx.tags), + }; + + ansTxsConverted.push(newAnsTx); + } + + write(`${process.cwd()}/cache/tx/${id}`, JSON.stringify(ansTxsConverted, null, 2)); + + return true; + } catch (error) { + remove(`${process.cwd()}/cache/tx/${id}`); + console.error(`error caching data from ${id}, please note that this may be a cancelled transaction`.red.bold); + throw error; + } +} diff --git a/src/caching/file.caching.ts b/src/caching/file.caching.ts new file mode 100644 index 0000000..084b1fa --- /dev/null +++ b/src/caching/file.caching.ts @@ -0,0 +1,15 @@ +import {exists} from 'fs-jetpack'; +import {streamAndCacheTx} from './stream.caching'; +import {streamAndCacheAns} from './ans.caching'; + +export async function cacheFile(id: string) { + if (exists(`${process.cwd()}/cache/tx/${id}`) === false) { + await streamAndCacheTx(id); + } +} + +export async function cacheAnsFile(id: string) { + if (exists(`${process.cwd()}/cache/tx/${id}`) === false) { + await streamAndCacheAns(id); + } +} diff --git a/src/caching/stream.caching.ts b/src/caching/stream.caching.ts new file mode 100644 index 0000000..f2e55dc --- /dev/null +++ b/src/caching/stream.caching.ts @@ -0,0 +1,29 @@ +import {createWriteStream} from 'fs'; +import {dir, remove} from 'fs-jetpack'; +import {getTransactionOffset, getChunk} from '../query/chunk.query'; + +export async function streamAndCacheTx(id: string): Promise { + try { + dir(`${process.cwd()}/cache/tx`); + + const fileStream = createWriteStream(`${process.cwd()}/cache/tx/${id}`, {flags: 'w'}); + const {startOffset, endOffset} = await getTransactionOffset(id); + + let byte = 0; + + while (startOffset + byte < endOffset) { + const chunk = await getChunk(startOffset + byte); + byte += chunk.parsed_chunk.length; + + fileStream.write(Buffer.from(chunk.parsed_chunk)); + } + + fileStream.end(); + + return true; + } catch (error) { + remove(`${process.cwd()}/cache/tx/${id}`); + console.error(`error caching data from ${id}, please note that this may be a cancelled transaction`.red.bold); + throw error; + } +} diff --git a/src/database/sync.database.ts b/src/database/sync.database.ts index c4ab220..e232904 100644 --- a/src/database/sync.database.ts +++ b/src/database/sync.database.ts @@ -200,7 +200,7 @@ export async function storeTransaction(tx: string, height: number, retry: boolea export async function processAns(id: string, height: number, retry: boolean = true) { try { const ansPayload = await getDataFromChunks(id); - const ansTxs = await ansBundles.unbundleData(ansPayload); + const ansTxs = await ansBundles.unbundleData(ansPayload.toString('utf-8')); await processANSTransaction(ansTxs, height); } catch (error) { diff --git a/src/graphql/query.graphql.ts b/src/graphql/query.graphql.ts index cc17d81..4bf1594 100644 --- a/src/graphql/query.graphql.ts +++ b/src/graphql/query.graphql.ts @@ -102,7 +102,7 @@ export async function generateQuery(params: QueryParams): Promise if (indexed === false) { names.push(tag.name); values.push.apply(values, tag.values); - + runSubQuery = true; } } @@ -141,7 +141,7 @@ export async function generateQuery(params: QueryParams): Promise query.orderByRaw(orderByClauses[sortOrder]); } - query.orderByRaw(`transactions.created_at DESC`); + query.orderByRaw('transactions.created_at DESC'); return query; } diff --git a/src/query/chunk.query.ts b/src/query/chunk.query.ts index 96eadcc..a15ab18 100644 --- a/src/query/chunk.query.ts +++ b/src/query/chunk.query.ts @@ -13,7 +13,7 @@ export interface ChunkType { data_path: string; chunk: string; parsed_chunk: Uint8Array; - response_chunk: string; + response_chunk: Buffer; } export const decoder = new TextDecoder(); @@ -33,13 +33,13 @@ export async function getTransactionOffset(id: string): Promise { +export async function getChunk(offset: number, retry: boolean = true, retryCount: number = 5): Promise { try { const payload = await get(`${grabNode()}/chunk/${offset}`); const body = JSON.parse(payload.text); const parsed_chunk = b64UrlToBuffer(body.chunk); - const response_chunk = decoder.decode(parsed_chunk); + const response_chunk = Buffer.from(parsed_chunk); return { tx_path: body.tx_path, @@ -49,8 +49,8 @@ export async function getChunk(offset: number, retry: boolean = true): Promise 0) { + return getChunk(offset, true, retryCount - 1); } else { throw error; } diff --git a/src/query/node.query.ts b/src/query/node.query.ts index 0f592e6..8f780ed 100644 --- a/src/query/node.query.ts +++ b/src/query/node.query.ts @@ -45,24 +45,24 @@ export function getDataAsStream(id: string) { return get(`${grabNode()}/${id}`); } -export async function getDataFromChunks(id: string, retry: boolean = true): Promise { +export async function getDataFromChunks(id: string, retry: boolean = true): Promise { try { const {startOffset, endOffset} = await getTransactionOffset(id); let byte = 0; - let chunks = ''; + let chunks = Buffer.from(''); while (startOffset + byte < endOffset) { const chunk = await getChunk(startOffset + byte); byte += chunk.parsed_chunk.length; - chunks += chunk.response_chunk; + chunks = Buffer.concat([chunks, chunk.response_chunk]); } return chunks; } catch (error) { if (retry) { console.error(`error retrieving data from ${id}, please note that this may be a cancelled transaction`.red.bold); - return await getDataFromChunks(id, false); + return await getDataFromChunks(id, true); } else { throw error; } diff --git a/src/route/data.route.ts b/src/route/data.route.ts index bea3e19..5a5c762 100644 --- a/src/route/data.route.ts +++ b/src/route/data.route.ts @@ -1,8 +1,8 @@ - +import {exists} from 'fs-jetpack'; import {Request, Response} from 'express'; import {stringToBip39, stringToHash} from '../utility/bip39.utility'; import {transaction as getTransaction, tagValue} from '../query/transaction.query'; -import {getTransactionOffset, getChunk} from '../query/chunk.query'; +import {cacheFile, cacheAnsFile} from '../caching/file.caching'; export const dataRouteRegex = /^\/?([a-zA-Z0-9-_]{43})\/?$|^\/?([a-zA-Z0-9-_]{43})\/(.*)$/i; export const pathRegex = /^\/?([a-z0-9-_]{43})/i; @@ -34,19 +34,21 @@ export async function dataRoute(req: Request, res: Response) { const metadata = await getTransaction(transaction); const contentType = tagValue(metadata.tags, 'Content-Type'); + const ans102 = tagValue(metadata.tags, 'Bundle-Type') === 'ANS-102'; res.setHeader('content-type', contentType); - const {startOffset, endOffset} = await getTransactionOffset(transaction); - - let byte = 0; - - while (startOffset + byte < endOffset) { - const chunk = await getChunk(startOffset + byte); - byte += chunk.parsed_chunk.length; - res.write(chunk.response_chunk); + if (ans102) { + await cacheAnsFile(transaction); + } else { + await cacheFile(transaction); + } + + if (exists(`${process.cwd()}/cache/tx/${transaction}`)) { + res.status(200); + res.sendFile(`${process.cwd()}/cache/tx/${transaction}`); + } else { + res.status(500); + res.json({status: 'ERROR', message: 'Could not retrieve tx, please retry'}); } - - res.status(200); - res.end(); } diff --git a/yarn.lock b/yarn.lock index 36cbf84..f6094d6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3956,6 +3956,14 @@ fs-capacitor@^2.0.4: resolved "https://registry.yarnpkg.com/fs-capacitor/-/fs-capacitor-2.0.4.tgz#5a22e72d40ae5078b4fe64fe4d08c0d3fc88ad3c" integrity sha512-8S4f4WsCryNw2mJJchi46YgB6CR5Ze+4L1h8ewl9tEpL4SJ3ZO+c/bS4BWhB8bK+O3TMqhuZarTitd0S0eh2pA== +fs-jetpack@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/fs-jetpack/-/fs-jetpack-4.1.0.tgz#d693fcffd3cedbd8829226967866b9e89f290f0f" + integrity sha512-h4nHLIcCaxnXfUWhwP+mLnar03R2DBlqicNvKJG44TJob8RV6GB8EKNwJgSaBeDAfqWhqq01y+Ao96vRwpXlPw== + dependencies: + minimatch "^3.0.2" + rimraf "^2.6.3" + fs.realpath@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" @@ -5581,7 +5589,7 @@ minimalistic-crypto-utils@^1.0.1: resolved "https://registry.yarnpkg.com/minimalistic-crypto-utils/-/minimalistic-crypto-utils-1.0.1.tgz#f6c00c1c0b082246e5c4d99dfb8c7c083b2b582a" integrity sha1-9sAMHAsIIkblxNmd+4x8CDsrWCo= -minimatch@3.0.4, minimatch@^3.0.4: +minimatch@3.0.4, minimatch@^3.0.2, minimatch@^3.0.4: version "3.0.4" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== @@ -6707,6 +6715,13 @@ rfc4648@^1.4.0: resolved "https://registry.yarnpkg.com/rfc4648/-/rfc4648-1.4.0.tgz#c75b2856ad2e2d588b6ddb985d556f1f7f2a2abd" integrity sha512-3qIzGhHlMHA6PoT6+cdPKZ+ZqtxkIvg8DZGKA5z6PQ33/uuhoJ+Ws/D/J9rXW6gXodgH8QYlz2UCl+sdUDmNIg== +rimraf@^2.6.3: + version "2.7.1" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" + integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w== + dependencies: + glob "^7.1.3" + rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a"