diff --git a/packages/core/src/enums/RequestType.ts b/packages/core/src/enums/RequestType.ts index 98d40e1675..57764bd1e4 100644 --- a/packages/core/src/enums/RequestType.ts +++ b/packages/core/src/enums/RequestType.ts @@ -10,6 +10,8 @@ enum RequestType { Prefetch = 'prefetch', /** Lower priority, often used for background computations in the worker */ Compute = 'compute', + /** Lowest Priority, used to cache adjascent studies using GoogleSheetService */ + PreCache = 'precache', } export default RequestType; diff --git a/packages/core/src/requestPool/imageLoadPoolManager.ts b/packages/core/src/requestPool/imageLoadPoolManager.ts index 7cf1b3d92b..c0a204419e 100644 --- a/packages/core/src/requestPool/imageLoadPoolManager.ts +++ b/packages/core/src/requestPool/imageLoadPoolManager.ts @@ -39,5 +39,6 @@ imageLoadPoolManager.grabDelay = 0; imageLoadPoolManager.setMaxSimultaneousRequests(RequestType.Interaction, 1000); imageLoadPoolManager.setMaxSimultaneousRequests(RequestType.Thumbnail, 1000); imageLoadPoolManager.setMaxSimultaneousRequests(RequestType.Prefetch, 1000); +imageLoadPoolManager.setMaxSimultaneousRequests(RequestType.PreCache, 1000); export default imageLoadPoolManager; diff --git a/packages/core/src/requestPool/imageRetrievalPoolManager.ts b/packages/core/src/requestPool/imageRetrievalPoolManager.ts index 03194d1454..d337b95ea5 100644 --- a/packages/core/src/requestPool/imageRetrievalPoolManager.ts +++ b/packages/core/src/requestPool/imageRetrievalPoolManager.ts @@ -20,6 +20,7 @@ imageRetrievalPoolManager.setMaxSimultaneousRequests( 200 ); imageRetrievalPoolManager.setMaxSimultaneousRequests(RequestType.Prefetch, 200); +imageRetrievalPoolManager.setMaxSimultaneousRequests(RequestType.PreCache, 200); imageRetrievalPoolManager.grabDelay = 0; export default imageRetrievalPoolManager; diff --git a/packages/core/src/requestPool/requestPoolManager.ts b/packages/core/src/requestPool/requestPoolManager.ts index 27f8393a3a..a0f0d56634 100644 --- a/packages/core/src/requestPool/requestPoolManager.ts +++ b/packages/core/src/requestPool/requestPoolManager.ts @@ -79,6 +79,7 @@ class RequestPoolManager { thumbnail: 0, prefetch: 0, compute: 0, + precache: 0, }; /* maximum number of requests of each type. */ public maxNumRequests: { @@ -86,6 +87,7 @@ class RequestPoolManager { thumbnail: number; prefetch: number; compute: number; + precache: number; }; /* A public property that is used to set the delay between requests. */ public grabDelay: number; @@ -104,6 +106,7 @@ class RequestPoolManager { thumbnail: { 0: [] }, prefetch: { 0: [] }, compute: { 0: [] }, + precache: { 0: [] }, }; this.grabDelay = 5; @@ -114,6 +117,7 @@ class RequestPoolManager { thumbnail: 0, prefetch: 0, compute: 0, + precache: 0, }; this.maxNumRequests = { @@ -121,6 +125,7 @@ class RequestPoolManager { thumbnail: 6, prefetch: 5, compute: 15, + precache: 5, }; } @@ -271,12 +276,16 @@ class RequestPoolManager { RequestType.Prefetch ); const hasRemainingComputeRequests = this.sendRequests(RequestType.Compute); + const hasRemainingPreCacheRequests = this.sendRequests( + RequestType.PreCache + ); if ( !hasRemainingInteractionRequests && !hasRemainingThumbnailRequests && !hasRemainingPrefetchRequests && - !hasRemainingComputeRequests + !hasRemainingComputeRequests && + !hasRemainingPreCacheRequests ) { this.awake = false; } diff --git a/packages/core/src/webWorkerManager/webWorkerManager.js b/packages/core/src/webWorkerManager/webWorkerManager.js index 038acb387d..b690dfe550 100644 --- a/packages/core/src/webWorkerManager/webWorkerManager.js +++ b/packages/core/src/webWorkerManager/webWorkerManager.js @@ -213,6 +213,42 @@ class CentralizedWorkerManager { worker.terminate(); }); } + + postMessage(workerName, message) { + const workerProperties = this.workerRegistry[workerName]; + if (!workerProperties) { + console.error(`Worker type '${workerName}' is not registered.`); + return; + } + + workerProperties.nativeWorkers.forEach((worker) => + worker.postMessage(message) + ); + } + + addEventListener(workerName, eventType = 'message', listener) { + const workerProperties = this.workerRegistry[workerName]; + if (!workerProperties) { + console.error(`Worker type '${workerName}' is not registered.`); + return; + } + + workerProperties.nativeWorkers.forEach((worker) => + worker.addEventListener(eventType, listener) + ); + } + + removeEventListener(workerName, eventType = 'message', listener) { + const workerProperties = this.workerRegistry[workerName]; + if (!workerProperties) { + console.error(`Worker type '${workerName}' is not registered.`); + return; + } + + workerProperties.nativeWorkers.forEach((worker) => + worker.removeEventListener(eventType, listener) + ); + } } export default CentralizedWorkerManager; diff --git a/packages/dicomImageLoader/src/imageLoader/internal/loadTarRequest.ts b/packages/dicomImageLoader/src/imageLoader/internal/loadTarRequest.ts new file mode 100644 index 0000000000..dadf3e5c22 --- /dev/null +++ b/packages/dicomImageLoader/src/imageLoader/internal/loadTarRequest.ts @@ -0,0 +1,180 @@ +import { metaData, getWebWorkerManager, Enums } from '@cornerstonejs/core'; +import tarFileManager from '../wadors/tarFileManager'; +import { getOptions } from './index'; +import { + FILE_STREAMING_WORKER_NAME, + MAXIMUM_WORKER_FETCH_SIZE, +} from '../wadors/registerFileStreaming'; + +interface tarImageUrl { + tarUrl: string; + dicomPath: string; +} +const THRESHOLD = 10000; + +const tarPromises = {}; + +function loadTarRequest( + uri: string, + imageId: string, + headers: Record = {} +): Promise { + const instance = metaData.get('instance', imageId); + const { FileOffsets } = instance; + const { tarUrl } = parseuri(uri); + const { Range } = headers; + const handledOffsets = getHandledOffsets(FileOffsets, Range); + const extractedFile = tarFileManager.get(tarUrl, handledOffsets); + + if (extractedFile) { + return new Promise((resolveRequest, rejectRequest) => { + try { + resolveRequest(extractedFile.buffer); + } catch (error) { + rejectRequest(error); + } + }); + } + + const webWorkerManager = getWebWorkerManager(); + let tarPromise: Promise; + + if (tarPromises[tarUrl]) { + tarPromise = tarPromises[tarUrl]; + } else { + tarPromise = new Promise(async (resolveTar, rejectTar) => { + if ( + tarFileManager.getTotalSize() + THRESHOLD > + MAXIMUM_WORKER_FETCH_SIZE + ) { + throw new Error( + `fileStreaming.ts: Maximum size(${MAXIMUM_WORKER_FETCH_SIZE}) for fetching files reached` + ); + } + + const options = getOptions(); + const beforeSendHeaders = options.beforeSend(); + + function handleFirstChunk(evt) { + const { url, position, fileArraybuffer } = evt.data; + + if (url === tarUrl) { + tarFileManager.set(url, { data: fileArraybuffer, position }); + + webWorkerManager.removeEventListener( + FILE_STREAMING_WORKER_NAME, + 'message', + handleFirstChunk + ); + } + } + + webWorkerManager.addEventListener( + FILE_STREAMING_WORKER_NAME, + 'message', + handleFirstChunk + ); + + webWorkerManager + .executeTask( + FILE_STREAMING_WORKER_NAME, + 'stream', + { + url: tarUrl, + headers: beforeSendHeaders, + }, + { requestType: Enums.RequestType.Prefetch } + ) + .then(() => { + resolveTar(); + }) + .catch((error) => { + webWorkerManager.removeEventListener( + FILE_STREAMING_WORKER_NAME, + 'message', + handleFirstChunk + ); + rejectTar(error); + }) + .finally(() => delete tarPromises[tarUrl]); + }); + + tarPromises[tarUrl] = tarPromise; + } + + return new Promise(async (resolveRequest, rejectRequest) => { + function handleChunkAppend(evt) { + const { url, position, isAppending } = evt.data; + + isAppending && tarFileManager.setPosition(url, position); + + if (position > handledOffsets.endByte && url === tarUrl) { + try { + const file = tarFileManager.get(url, handledOffsets); + + webWorkerManager.removeEventListener( + FILE_STREAMING_WORKER_NAME, + 'message', + handleChunkAppend + ); + + resolveRequest(file.buffer); + } catch (error) { + rejectRequest(error); + } + } + } + + webWorkerManager.addEventListener( + FILE_STREAMING_WORKER_NAME, + 'message', + handleChunkAppend + ); + + await tarPromise.catch((error) => { + webWorkerManager.removeEventListener( + FILE_STREAMING_WORKER_NAME, + 'message', + handleChunkAppend + ); + rejectRequest(error); + }); + }); +} + +function parseuri(uri: string): tarImageUrl { + const [tarUrl, dicomPath] = uri.split('.tar://'); + return { tarUrl: tarUrl + '.tar', dicomPath }; +} + +function parseRangeHeader(rangeHeader: string): { + start?: number; + end?: number; +} { + if (!rangeHeader) { + return {}; + } + + const parts = rangeHeader.split('='); + const rangePart = parts[1]; + const rangeParts = rangePart.split('-'); + const start = parseInt(rangeParts[0], 10); + const end = parseInt(rangeParts[1], 10); + + return { start, end }; +} + +function getHandledOffsets( + fileOffsets: { startByte: number; endByte: number }, + rangeHeader: string +): { startByte: number; endByte: number } { + const { startByte: fileStartByte, endByte: fileEndByte } = fileOffsets; + const { start, end } = parseRangeHeader(rangeHeader); + + const startByte = start ? fileStartByte + start : fileStartByte; + const endByte = end ? fileStartByte + end : fileEndByte; + + return { startByte, endByte }; +} + +export default loadTarRequest; diff --git a/packages/dicomImageLoader/src/imageLoader/internal/tarRequest.ts b/packages/dicomImageLoader/src/imageLoader/internal/tarRequest.ts new file mode 100644 index 0000000000..5cc4c0962f --- /dev/null +++ b/packages/dicomImageLoader/src/imageLoader/internal/tarRequest.ts @@ -0,0 +1,82 @@ +import { Types, Enums, metaData } from '@cornerstonejs/core'; +import { LoaderXhrRequestPromise } from '../../types'; +import { CornerstoneWadoRsLoaderOptions } from '../wadors/loadImage'; +import parseImageId from '../wadouri/parseImageId'; +import loadTarRequest from './loadTarRequest'; +import external from '../../externalModules'; + +export default function tarRequest( + url: string, + imageId: string, + defaultHeaders: Record = {}, + options: CornerstoneWadoRsLoaderOptions = {} +): LoaderXhrRequestPromise<{ + contentType: string; + pixelData: Uint8Array; + imageQualityStatus: Enums.ImageQualityStatus; + percentComplete: number; +}> | void { + const { pixelDataFrame } = parseImageId(imageId); + let imagePromise; + + const instance = metaData.get('instance', imageId) || {}; + const { CustomOffsetTable, CustomOffsetTableLengths } = instance; + + if (CustomOffsetTable && CustomOffsetTableLengths) { + const startByte = CustomOffsetTable[pixelDataFrame]; + const endByte = startByte + CustomOffsetTableLengths[pixelDataFrame]; + const headerRange = `bytes=0-${CustomOffsetTable[0] - 1}`; + const pixelDataRange = `bytes=${startByte}-${endByte}`; + + const headerPromise = loadTarRequest(url, imageId, { + ...defaultHeaders, + Range: headerRange, + }); + const pixelDataPromise = loadTarRequest(url, imageId, { + ...defaultHeaders, + Range: pixelDataRange, + }); + + imagePromise = Promise.all([headerPromise, pixelDataPromise]).then( + (results) => ({ headerBuffer: results[0], pixelDataBuffer: results[1] }) + ); + } else { + imagePromise = loadTarRequest(url, imageId, defaultHeaders).then( + (arraybuffer) => ({ + fileBuffer: arraybuffer, + }) + ); + } + + return imagePromise.then((result) => { + const { headerBuffer, pixelDataBuffer, fileBuffer } = result; + const dataSet = external.dicomParser.parseDicom( + new Uint8Array(fileBuffer || headerBuffer), + { ...(headerBuffer && { untilTag: 'x7fe00010' }) } + ); + const transferSyntax = dataSet.string('x00020010'); + let pixelData; + + if (fileBuffer) { + const pixelDataElement = dataSet.elements.x7fe00010; + let { dataOffset, length } = pixelDataElement; + if (pixelDataElement.hadUndefinedLength) { + ({ position: dataOffset, length } = pixelDataElement.fragments[0]); + } else { + // Adding 8 bytes for 4 bytes tag + 4 bytes length for uncomppressed pixelData + dataOffset += 8; + } + const slice = fileBuffer.slice(dataOffset, dataOffset + length); + pixelData = new Uint8Array(slice); + } else { + pixelData = new Uint8Array(pixelDataBuffer); + } + + return { + contentType: `transfer-syntax=${transferSyntax}`, + imageQualityStatus: Enums.ImageQualityStatus.FULL_RESOLUTION, + pixelData, + percentComplete: 100, + }; + }); +} diff --git a/packages/dicomImageLoader/src/imageLoader/wadors/getPixelData.ts b/packages/dicomImageLoader/src/imageLoader/wadors/getPixelData.ts index 4a6e7984de..3aa7333bcc 100644 --- a/packages/dicomImageLoader/src/imageLoader/wadors/getPixelData.ts +++ b/packages/dicomImageLoader/src/imageLoader/wadors/getPixelData.ts @@ -2,6 +2,7 @@ import { xhrRequest } from '../internal/index'; // import rangeRequest from '../internal/rangeRequest'; import streamRequest from '../internal/streamRequest'; import rangeRequest from '../internal/rangeRequest'; +import tarRequest from '../internal/tarRequest'; import extractMultipart from './extractMultipart'; import { getImageQualityStatus } from './getImageQualityStatus'; import { CornerstoneWadoRsLoaderOptions } from './loadImage'; @@ -46,6 +47,12 @@ function getPixelData( return streamRequest(url, imageId, headers, options); } + // Use the tar loader for dicomtar sceme + if (imageId.includes('dicomtar')) { + const url = imageId.split('dicomtar:')[1]; + return tarRequest(url, imageId, headers); + } + /** * Not progressively rendering, use regular xhr request. */ diff --git a/packages/dicomImageLoader/src/imageLoader/wadors/index.ts b/packages/dicomImageLoader/src/imageLoader/wadors/index.ts index 76458e97ba..5d190d8e45 100644 --- a/packages/dicomImageLoader/src/imageLoader/wadors/index.ts +++ b/packages/dicomImageLoader/src/imageLoader/wadors/index.ts @@ -11,6 +11,7 @@ import getPixelData from './getPixelData'; import metaDataManager from './metaDataManager'; import loadImage from './loadImage'; import register from './register'; +import tarFileManager from './tarFileManager'; const metaData = { getNumberString, @@ -27,4 +28,5 @@ export default { loadImage, metaDataManager, register, + tarFileManager, }; diff --git a/packages/dicomImageLoader/src/imageLoader/wadors/register.ts b/packages/dicomImageLoader/src/imageLoader/wadors/register.ts index 3451be2d5e..274da5f45b 100644 --- a/packages/dicomImageLoader/src/imageLoader/wadors/register.ts +++ b/packages/dicomImageLoader/src/imageLoader/wadors/register.ts @@ -1,8 +1,14 @@ import loadImage from './loadImage'; import { metaDataProvider } from './metaData/index'; +import { registerFileStreamingWebWorker } from './registerFileStreaming'; export default function (cornerstone) { // register wadors scheme and metadata provider cornerstone.registerImageLoader('wadors', loadImage); + cornerstone.registerImageLoader('dicomtar', loadImage); + cornerstone.metaData.addProvider(metaDataProvider); + + // register file streaming web worker + registerFileStreamingWebWorker(); } diff --git a/packages/dicomImageLoader/src/imageLoader/wadors/registerFileStreaming.ts b/packages/dicomImageLoader/src/imageLoader/wadors/registerFileStreaming.ts new file mode 100644 index 0000000000..4077ecab5c --- /dev/null +++ b/packages/dicomImageLoader/src/imageLoader/wadors/registerFileStreaming.ts @@ -0,0 +1,27 @@ +import { getWebWorkerManager } from '@cornerstonejs/core'; + +export const FILE_STREAMING_WORKER_NAME = 'file-streaming-test'; +export const MAXIMUM_WORKER_FETCH_SIZE = 2 * 1_073_741_824; // 2 x 1 GB + +export function registerFileStreamingWebWorker() { + const workerFn = () => { + return new Worker( + new URL('../../workers/fileStreaming.ts?v=12', import.meta.url), + { name: FILE_STREAMING_WORKER_NAME } + ); + }; + + const workerManager = getWebWorkerManager(); + + const options = { + maxWorkerInstances: 1, + }; + + workerManager.registerWorker(FILE_STREAMING_WORKER_NAME, workerFn, options); + + workerManager.executeTask( + FILE_STREAMING_WORKER_NAME, + 'setMaxFetchSize', + MAXIMUM_WORKER_FETCH_SIZE + ); +} diff --git a/packages/dicomImageLoader/src/imageLoader/wadors/tarFileManager.ts b/packages/dicomImageLoader/src/imageLoader/wadors/tarFileManager.ts new file mode 100644 index 0000000000..30fe70be04 --- /dev/null +++ b/packages/dicomImageLoader/src/imageLoader/wadors/tarFileManager.ts @@ -0,0 +1,67 @@ +import { getWebWorkerManager } from '@cornerstonejs/core'; +import { FILE_STREAMING_WORKER_NAME } from './registerFileStreaming'; + +let tarFiles: Record = {}; + +function set( + url: string, + tarFile: { data: Uint8Array; position: number } +): void { + tarFiles[url] = tarFile; +} + +function get( + url: string, + offsets: { startByte: number; endByte: number } +): Uint8Array | null { + if (!tarFiles[url] || tarFiles[url].position <= offsets.endByte) { + return null; + } + + return tarFiles[url].data.slice(offsets.startByte, offsets.endByte + 1); +} + +function setPosition(url: string, position: number) { + if (tarFiles[url]) { + tarFiles[url].position = position; + } +} + +function getTotalSize(): number { + return Object.values(tarFiles).reduce((total, { position }) => { + return total + position; + }, 0); +} + +function remove(url: string): void { + const removedSize = tarFiles[url].position; + delete tarFiles[url]; + + const workerManager = getWebWorkerManager(); + workerManager.executeTask( + FILE_STREAMING_WORKER_NAME, + 'decreaseFetchedSize', + removedSize + ); +} + +function purge(): void { + const totalSize = getTotalSize(); + tarFiles = {}; + + const workerManager = getWebWorkerManager(); + workerManager.executeTask( + FILE_STREAMING_WORKER_NAME, + 'decreaseFetchedSize', + totalSize + ); +} + +export default { + set, + get, + setPosition, + getTotalSize, + remove, + purge, +}; diff --git a/packages/dicomImageLoader/src/imageLoader/wadouri/loadImage.ts b/packages/dicomImageLoader/src/imageLoader/wadouri/loadImage.ts index 640242a508..a4e03b3561 100644 --- a/packages/dicomImageLoader/src/imageLoader/wadouri/loadImage.ts +++ b/packages/dicomImageLoader/src/imageLoader/wadouri/loadImage.ts @@ -180,7 +180,7 @@ function loadImageWithRange( ): Types.IImageLoadObject { const start = new Date().getTime(); const instance = metaData.get('instance', imageId); - const { ExtendedOffsetTable, ExtendedOffsetTableLengths } = instance; + const { CustomOffsetTable, CustomOffsetTableLengths } = instance; const headerPromise: Promise<{ dataSet; headerArrayBuffer }> = new Promise( (resolve) => { @@ -189,11 +189,11 @@ function loadImageWithRange( dataSet: loadedDataSets[sharedCacheKey]?.dataSet, headerArrayBuffer: loadedDataSets[ sharedCacheKey - ].dataSet.byteArray.slice(0, ExtendedOffsetTable[0] - 1), + ].dataSet.byteArray.slice(0, CustomOffsetTable[0] - 1), }); } else { loader(sharedCacheKey, imageId, { - Range: `bytes=0-${ExtendedOffsetTable[0] - 1}`, + Range: `bytes=0-${CustomOffsetTable[0] - 1}`, }).then((arraybuffer) => { const dataSet = external.dicomParser.parseDicom( new Uint8Array(arraybuffer), @@ -206,8 +206,8 @@ function loadImageWithRange( } ); - const startByte = ExtendedOffsetTable[frameIndex]; - const endByte = startByte + ExtendedOffsetTableLengths[frameIndex]; + const startByte = CustomOffsetTable[frameIndex]; + const endByte = startByte + CustomOffsetTableLengths[frameIndex]; const pixelDataPromise = loader(sharedCacheKey, imageId, { Range: `bytes=${startByte}-${endByte}`, }).then((arraybuffer) => ({ pixelDataArrayBuffer: arraybuffer })); @@ -223,7 +223,7 @@ function loadImageWithRange( values; const loadEnd = new Date().getTime(); const pixelData = new Uint8Array(pixelDataArrayBuffer); - const transferSyntax = instance._meta.TransferSyntaxUID; + const transferSyntax = dataSet.string('x00020010'); if (!dataSetCacheManager.isLoaded(sharedCacheKey)) { dataSet.elements.x7fe00010 = {}; @@ -244,8 +244,8 @@ function loadImageWithRange( const completeByteArray = new Uint8Array( dataSet.byteArray.byteLength + pixelDataArrayBuffer.byteLength ); - completeByteArray.set(dataSet.byteArray.byteLength); - completeByteArray.set(pixelData, headerArrayBuffer.byteLength); + completeByteArray.set(dataSet.byteArray); + completeByteArray.set(pixelData, dataSet.byteArray.byteLength); dataSet.byteArray = completeByteArray; loadedDataSets[sharedCacheKey].cacheCount++; @@ -360,7 +360,7 @@ function loadImage( } const instance = metaData.get('instance', imageId); - if (instance?.ExtendedOffsetTable && instance?.ExtendedOffsetTableLengths) { + if (instance?.CustomOffsetTable && instance?.CustomOffsetTableLengths) { // Fetch only a single frame pixeldata of a multiframe dicom file. return loadImageWithRange( imageId, @@ -381,7 +381,7 @@ function loadImage( return loadImageFromPromise( dataSetPromise, imageId, - parsedImageId.frame, + parsedImageId.pixelDataFrame, parsedImageId.url, options ); diff --git a/packages/dicomImageLoader/src/workers/fileStreaming.ts b/packages/dicomImageLoader/src/workers/fileStreaming.ts new file mode 100644 index 0000000000..059c670a1f --- /dev/null +++ b/packages/dicomImageLoader/src/workers/fileStreaming.ts @@ -0,0 +1,88 @@ +import { expose } from 'comlink'; + +const fileStreaming = { + maxFetchSize: 0, + fetchedSize: 0, + + setMaxFetchSize(size) { + if (size) { + this.maxFetchSize = size; + } + }, + + decreaseFetchedSize(size) { + if (size && size <= this.fetchedSize) { + this.fetchedSize -= size; + } + }, + + async stream(args) { + const { url, headers } = args; + const controller = new AbortController(); + + const response = await fetch(url, { + headers: { ...headers }, + signal: controller.signal, + }).catch((error) => { + throw error; + }); + + const reader = response.body.getReader(); + let result: ReadableStreamReadResult = await reader.read(); + let completed = result.done; + const totalLength = response.headers.get('Content-Length') || 0; + const firstChunk = result.value; + let position = firstChunk.length; + + if (this.maxFetchSize && this.fetchedSize + position > this.maxFetchSize) { + controller.abort(); + throw new Error( + `fileStreaming.ts: Maximum size(${this.maxFetchSize}) for fetching files reached` + ); + } + + this.fetchedSize += position; + let sharedArraybuffer = new SharedArrayBuffer(+totalLength); + let fileArraybuffer = new Uint8Array(sharedArraybuffer); + fileArraybuffer.set(firstChunk); + postMessage({ url, position, fileArraybuffer }); + + while (!completed) { + result = await reader.read(); + + if (result.done) { + completed = true; + continue; + } + + const chunk = result.value; + + if ( + this.maxFetchSize && + this.fetchedSize + chunk.length > this.maxFetchSize + ) { + sharedArraybuffer = null; + fileArraybuffer = null; + controller.abort(); + throw new Error( + `fileStreaming.ts: Maximum size(${this.maxFetchSize}) for fetching files reached` + ); + } + + this.fetchedSize += chunk.length; + fileArraybuffer.set(chunk, position); + position += chunk.length; + + postMessage({ + isAppending: true, + url, + position: position, + }); + } + + sharedArraybuffer = null; + fileArraybuffer = null; + }, +}; + +expose(fileStreaming);