From 94b5fbac5dc603c4eb14fe05d01b0e0bf8cf3370 Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Fri, 27 Sep 2024 16:07:13 -0400 Subject: [PATCH 01/12] added files for trk --- package-lock.json | 30 +- package.json | 23 +- .../enabled_async_computation_modules.ts | 1 + src/datasource/enabled_backend_modules.ts | 1 + src/datasource/enabled_frontend_modules.ts | 1 + src/datasource/trk/async_computation.ts | 0 src/datasource/trk/backend.ts | 405 ++++++++++++ src/datasource/trk/base.ts | 57 ++ src/datasource/trk/frontend.ts | 616 ++++++++++++++++++ src/datasource/trk/register_default.ts | 20 + src/skeleton/decode_precomputed_skeleton.ts | 7 + 11 files changed, 1156 insertions(+), 5 deletions(-) create mode 100644 src/datasource/trk/async_computation.ts create mode 100644 src/datasource/trk/backend.ts create mode 100644 src/datasource/trk/base.ts create mode 100644 src/datasource/trk/frontend.ts create mode 100644 src/datasource/trk/register_default.ts diff --git a/package-lock.json b/package-lock.json index d27acab7a..1e7ee9b2d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,6 +9,7 @@ "version": "2.40.1", "license": "Apache-2.0", "dependencies": { + "axios": "^1.7.7", "codemirror": "^5.61.1", "gl-matrix": "3.1.0", "glsl-editor": "^1.0.0", @@ -3224,6 +3225,17 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.12.0.tgz", "integrity": "sha512-NmWvPnx0F1SfrQbYwOi7OeaNGokp9XhzNioJ/CSBs8Qa4vxug81mhJEAVZwxXuBmYB5KDRfMq/F3RR0BIU7sWg==" }, + "node_modules/axios": { + "version": "1.7.7", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.7.tgz", + "integrity": "sha512-S4kL7XrjgBmvdGut0sN3yJxqYzrDOnivkBiN0OFs6hLiUam3UPvswUo0kqGyhqUZGEOytHyumEdXsAkgCOUf3Q==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, "node_modules/b4a": { "version": "1.6.6", "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz", @@ -6885,7 +6897,6 @@ "version": "1.15.6", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", - "dev": true, "funding": [ { "type": "individual", @@ -7161,6 +7172,20 @@ "node": ">= 10.0.0" } }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/form-data-encoder": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/form-data-encoder/-/form-data-encoder-2.1.4.tgz", @@ -10957,8 +10982,7 @@ "node_modules/proxy-from-env": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", - "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", - "dev": true + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" }, "node_modules/psl": { "version": "1.9.0", diff --git a/package.json b/package.json index 19131a977..2965f1225 100644 --- a/package.json +++ b/package.json @@ -77,6 +77,7 @@ "webpack-merge": "^6.0.1" }, "dependencies": { + "axios": "^1.7.7", "codemirror": "^5.61.1", "gl-matrix": "3.1.0", "glsl-editor": "^1.0.0", @@ -96,8 +97,8 @@ "type": "module", "exports": { ".": "./src/main_module.ts", - "./unstable/*.js": "./src/*.ts", - "./unstable/*": "./src/*" + "./*.js": "./src/*.ts", + "./*": "./src/*" }, "imports": { "#src/third_party/jpgjs/jpg.js": "./src/third_party/jpgjs/jpg.js", @@ -335,6 +336,24 @@ "neuroglancer/datasource/render:disabled": "./src/datasource/render/register_default.ts", "default": "./src/datasource/render/register_default.ts" }, + "#datasource/trk/backend": { + "neuroglancer/datasource/trk:enabled": "./src/datasource/trk/backend.ts", + "neuroglancer/datasource:none_by_default": "./src/util/false.ts", + "neuroglancer/datasource/trk:disabled": "./src/datasource/trk/backend.ts", + "default": "./src/datasource/trk/backend.ts" + }, + "#datasource/trk/async_computation": { + "neuroglancer/datasource/trk:enabled": "./src/datasource/trk/async_computation.ts", + "neuroglancer/datasource:none_by_default": "./src/util/false.ts", + "neuroglancer/datasource/trk:disabled": "./src/datasource/trk/async_computation.ts", + "default": "./src/datasource/trk/async_computation.ts" + }, + "#datasource/trk/register_default": { + "neuroglancer/datasource/trk:enabled": "./src/datasource/trk/register_default.ts", + "neuroglancer/datasource:none_by_default": "./src/util/false.ts", + "neuroglancer/datasource/trk:disabled": "./src/datasource/trk/register_default.ts", + "default": "./src/datasource/trk/register_default.ts" + }, "#datasource/vtk/backend": { "neuroglancer/datasource/vtk:enabled": "./src/datasource/vtk/backend.ts", "neuroglancer/datasource:none_by_default": "./src/util/false.ts", diff --git a/src/datasource/enabled_async_computation_modules.ts b/src/datasource/enabled_async_computation_modules.ts index 94f0d6043..6b5b2e5a4 100644 --- a/src/datasource/enabled_async_computation_modules.ts +++ b/src/datasource/enabled_async_computation_modules.ts @@ -9,5 +9,6 @@ import "#datasource/nifti/async_computation"; import "#datasource/obj/async_computation"; import "#datasource/precomputed/async_computation"; import "#datasource/render/async_computation"; +import "#datasource/trk/async_computation"; import "#datasource/vtk/async_computation"; import "#datasource/zarr/async_computation"; diff --git a/src/datasource/enabled_backend_modules.ts b/src/datasource/enabled_backend_modules.ts index 926dea0f2..20b5a82f9 100644 --- a/src/datasource/enabled_backend_modules.ts +++ b/src/datasource/enabled_backend_modules.ts @@ -11,5 +11,6 @@ import "#datasource/obj/backend"; import "#datasource/precomputed/backend"; import "#datasource/python/backend"; import "#datasource/render/backend"; +import "#datasource/trk/backend"; import "#datasource/vtk/backend"; import "#datasource/zarr/backend"; diff --git a/src/datasource/enabled_frontend_modules.ts b/src/datasource/enabled_frontend_modules.ts index 96b934b7b..b7778d92c 100644 --- a/src/datasource/enabled_frontend_modules.ts +++ b/src/datasource/enabled_frontend_modules.ts @@ -15,5 +15,6 @@ import "#datasource/nifti/register_default"; import "#datasource/obj/register_default"; import "#datasource/precomputed/register_default"; import "#datasource/render/register_default"; +import "#datasource/trk/register_default"; import "#datasource/vtk/register_default"; import "#datasource/zarr/register_default"; diff --git a/src/datasource/trk/async_computation.ts b/src/datasource/trk/async_computation.ts new file mode 100644 index 000000000..e69de29bb diff --git a/src/datasource/trk/backend.ts b/src/datasource/trk/backend.ts new file mode 100644 index 000000000..6de468cb9 --- /dev/null +++ b/src/datasource/trk/backend.ts @@ -0,0 +1,405 @@ +/** + * @license + * Copyright 2016 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// eslint-disable-next-line import/no-extraneous-dependencies +import axios from 'axios'; +import { decodeGzip } from "#src/async_computation/decode_gzip_request.js"; +import { requestAsyncComputation } from "#src/async_computation/request.js"; +import type { Chunk, ChunkManager } from "#src/chunk_manager/backend.js"; +import { WithParameters } from "#src/chunk_manager/backend.js"; +import { GenericSharedDataSource } from "#src/chunk_manager/generic_file_source.js"; +import { WithSharedCredentialsProviderCounterpart } from "#src/credentials_provider/shared_counterpart.js"; +import type { ShardingParameters } from "#src/datasource/precomputed/base.js"; +import { + DataEncoding, + ShardingHashFunction, + SkeletonSourceParameters, +} from "#src/datasource/precomputed/base.js"; +import type { SkeletonChunk } from "#src/skeleton/backend.js"; +import { SkeletonSource } from "#src/skeleton/backend.js"; +import { decodeSkeletonChunk } from "#src/skeleton/decode_precomputed_skeleton.js"; +import { fetchSpecialHttpByteRange } from "#src/util/byte_range_http_requests.js"; +import type { CancellationToken } from "#src/util/cancellation.js"; +import type { Borrowed } from "#src/util/disposable.js"; +import { convertEndian32, Endianness } from "#src/util/endian.js"; +import { murmurHash3_x86_128Hash64Bits } from "#src/util/hash.js"; +import { + isNotFoundError, + responseArrayBuffer, +} from "#src/util/http_request.js"; +import { stableStringify } from "#src/util/json.js"; +import { getObjectId } from "#src/util/object_id.js"; +import type { + SpecialProtocolCredentials, + SpecialProtocolCredentialsProvider, +} from "#src/util/special_protocol_request.js"; +import { cancellableFetchSpecialOk } from "#src/util/special_protocol_request.js"; +import { Uint64 } from "#src/util/uint64.js"; +import { registerSharedObject } from "#src/worker_rpc.js"; + + +console.log(import.meta.url); + +const shardingHashFunctions: Map void> = + new Map([ + [ + ShardingHashFunction.MURMURHASH3_X86_128, + (out) => { + murmurHash3_x86_128Hash64Bits(out, 0, out.low, out.high); + }, + ], + [ShardingHashFunction.IDENTITY, (_out) => { }], + ]); + +interface ShardInfo { + shardUrl: string; + offset: Uint64; +} + +interface DecodedMinishardIndex { + data: Uint32Array; + shardUrl: string; +} + +interface MinishardIndexSource + extends GenericSharedDataSource { + sharding: ShardingParameters; + credentialsProvider: SpecialProtocolCredentialsProvider; +} + +function getMinishardIndexDataSource( + chunkManager: Borrowed, + credentialsProvider: SpecialProtocolCredentialsProvider, + parameters: { url: string; sharding: ShardingParameters | undefined }, +): MinishardIndexSource | undefined { + const { url, sharding } = parameters; + if (sharding === undefined) return undefined; + const source = GenericSharedDataSource.get< + Uint64, + DecodedMinishardIndex | undefined + >( + chunkManager, + stableStringify({ + type: "precomputed:shardedDataSource", + url, + sharding, + credentialsProvider: getObjectId(credentialsProvider), + }), + { + download: async ( + shardAndMinishard: Uint64, + cancellationToken: CancellationToken, + ) => { + const minishard = Uint64.lowMask(new Uint64(), sharding.minishardBits); + Uint64.and(minishard, minishard, shardAndMinishard); + const shard = Uint64.lowMask(new Uint64(), sharding.shardBits); + const temp = new Uint64(); + Uint64.rshift(temp, shardAndMinishard, sharding.minishardBits); + Uint64.and(shard, shard, temp); + const shardUrl = `${url}/${shard + .toString(16) + .padStart(Math.ceil(sharding.shardBits / 4), "0")}.shard`; + // Retrive minishard index start/end offsets. + const shardIndexSize = new Uint64(16); + Uint64.lshift(shardIndexSize, shardIndexSize, sharding.minishardBits); + + // Multiply minishard by 16. + const shardIndexStart = Uint64.lshift(new Uint64(), minishard, 4); + const shardIndexEnd = Uint64.addUint32( + new Uint64(), + shardIndexStart, + 16, + ); + let shardIndexResponse: ArrayBuffer; + try { + shardIndexResponse = await fetchSpecialHttpByteRange( + credentialsProvider, + shardUrl, + shardIndexStart, + shardIndexEnd, + cancellationToken, + ); + } catch (e) { + if (isNotFoundError(e)) return { data: undefined, size: 0 }; + throw e; + } + if (shardIndexResponse.byteLength !== 16) { + throw new Error("Failed to retrieve minishard offset"); + } + const shardIndexDv = new DataView(shardIndexResponse); + const minishardStartOffset = new Uint64( + shardIndexDv.getUint32(0, /*littleEndian=*/ true), + shardIndexDv.getUint32(4, /*littleEndian=*/ true), + ); + const minishardEndOffset = new Uint64( + shardIndexDv.getUint32(8, /*littleEndian=*/ true), + shardIndexDv.getUint32(12, /*littleEndian=*/ true), + ); + if (Uint64.equal(minishardStartOffset, minishardEndOffset)) { + return { data: undefined, size: 0 }; + } + // The start/end offsets in the shard index are relative to the end of the shard + // index. + Uint64.add(minishardStartOffset, minishardStartOffset, shardIndexSize); + Uint64.add(minishardEndOffset, minishardEndOffset, shardIndexSize); + + let minishardIndexResponse = await fetchSpecialHttpByteRange( + credentialsProvider, + shardUrl, + minishardStartOffset, + minishardEndOffset, + cancellationToken, + ); + if (sharding.minishardIndexEncoding === DataEncoding.GZIP) { + minishardIndexResponse = ( + await requestAsyncComputation( + decodeGzip, + cancellationToken, + [minishardIndexResponse], + new Uint8Array(minishardIndexResponse), + ) + ).buffer; + } + if (minishardIndexResponse.byteLength % 24 !== 0) { + throw new Error( + `Invalid minishard index length: ${minishardIndexResponse.byteLength}`, + ); + } + const minishardIndex = new Uint32Array(minishardIndexResponse); + convertEndian32(minishardIndex, Endianness.LITTLE); + + const minishardIndexSize = minishardIndex.byteLength / 24; + let prevEntryKeyLow = 0; + let prevEntryKeyHigh = 0; + // Offsets in the minishard index are relative to the end of the shard index. + let prevStartLow = shardIndexSize.low; + let prevStartHigh = shardIndexSize.high; + for (let i = 0; i < minishardIndexSize; ++i) { + let entryKeyLow = prevEntryKeyLow + minishardIndex[i * 2]; + let entryKeyHigh = prevEntryKeyHigh + minishardIndex[i * 2 + 1]; + if (entryKeyLow >= 4294967296) { + entryKeyLow -= 4294967296; + entryKeyHigh += 1; + } + prevEntryKeyLow = minishardIndex[i * 2] = entryKeyLow; + prevEntryKeyHigh = minishardIndex[i * 2 + 1] = entryKeyHigh; + let startLow = + prevStartLow + minishardIndex[(minishardIndexSize + i) * 2]; + let startHigh = + prevStartHigh + minishardIndex[(minishardIndexSize + i) * 2 + 1]; + if (startLow >= 4294967296) { + startLow -= 4294967296; + startHigh += 1; + } + minishardIndex[(minishardIndexSize + i) * 2] = startLow; + minishardIndex[(minishardIndexSize + i) * 2 + 1] = startHigh; + const sizeLow = minishardIndex[(2 * minishardIndexSize + i) * 2]; + const sizeHigh = minishardIndex[(2 * minishardIndexSize + i) * 2 + 1]; + let endLow = startLow + sizeLow; + let endHigh = startHigh + sizeHigh; + if (endLow >= 4294967296) { + endLow -= 4294967296; + endHigh += 1; + } + prevStartLow = endLow; + prevStartHigh = endHigh; + minishardIndex[(2 * minishardIndexSize + i) * 2] = endLow; + minishardIndex[(2 * minishardIndexSize + i) * 2 + 1] = endHigh; + } + return { + data: { data: minishardIndex, shardUrl }, + size: minishardIndex.byteLength, + }; + }, + encodeKey: (key: Uint64) => key.toString(), + sourceQueueLevel: 1, + }, + ) as MinishardIndexSource; + source.sharding = sharding; + source.credentialsProvider = credentialsProvider; + return source; +} + +function findMinishardEntry( + minishardIndex: DecodedMinishardIndex, + key: Uint64, +): { startOffset: Uint64; endOffset: Uint64 } | undefined { + const minishardIndexData = minishardIndex.data; + const minishardIndexSize = minishardIndexData.length / 6; + const keyLow = key.low; + const keyHigh = key.high; + for (let i = 0; i < minishardIndexSize; ++i) { + if ( + minishardIndexData[i * 2] !== keyLow || + minishardIndexData[i * 2 + 1] !== keyHigh + ) { + continue; + } + const startOffset = new Uint64( + minishardIndexData[(minishardIndexSize + i) * 2], + minishardIndexData[(minishardIndexSize + i) * 2 + 1], + ); + const endOffset = new Uint64( + minishardIndexData[(2 * minishardIndexSize + i) * 2], + minishardIndexData[(2 * minishardIndexSize + i) * 2 + 1], + ); + return { startOffset, endOffset }; + } + return undefined; +} + +async function getShardedData( + minishardIndexSource: MinishardIndexSource, + chunk: Chunk, + key: Uint64, + cancellationToken: CancellationToken, +): Promise<{ shardInfo: ShardInfo; data: ArrayBuffer } | undefined> { + const { sharding } = minishardIndexSource; + const hashFunction = shardingHashFunctions.get(sharding.hash)!; + const hashCode = Uint64.rshift(new Uint64(), key, sharding.preshiftBits); + hashFunction(hashCode); + const shardAndMinishard = Uint64.lowMask( + new Uint64(), + sharding.minishardBits + sharding.shardBits, + ); + Uint64.and(shardAndMinishard, shardAndMinishard, hashCode); + const getPriority = () => ({ + priorityTier: chunk.priorityTier, + priority: chunk.priority, + }); + const minishardIndex = await minishardIndexSource.getData( + shardAndMinishard, + getPriority, + cancellationToken, + ); + if (minishardIndex === undefined) return undefined; + const minishardEntry = findMinishardEntry(minishardIndex, key); + if (minishardEntry === undefined) return undefined; + const { startOffset, endOffset } = minishardEntry; + let data = await fetchSpecialHttpByteRange( + minishardIndexSource.credentialsProvider, + minishardIndex.shardUrl, + startOffset, + endOffset, + cancellationToken, + ); + if (minishardIndexSource.sharding.dataEncoding === DataEncoding.GZIP) { + data = ( + await requestAsyncComputation( + decodeGzip, + cancellationToken, + [data], + new Uint8Array(data), + ) + ).buffer; + } + return { + data, + shardInfo: { shardUrl: minishardIndex.shardUrl, offset: startOffset }, + }; +} + +function getOrNotFoundError(v: T | undefined) { + if (v === undefined) throw new Error("not found"); + return v; +} + + +async function fetchByUint64( + credentialsProvider: SpecialProtocolCredentialsProvider, + url: string, + chunk: Chunk, + minishardIndexSource: MinishardIndexSource | undefined, + id: Uint64, + cancellationToken: CancellationToken, +) { + if (minishardIndexSource === undefined) { + try { + return await cancellableFetchSpecialOk( + credentialsProvider, + `${url}/${id}`, + {}, + responseArrayBuffer, + cancellationToken, + ); + } catch (e) { + if (isNotFoundError(e)) return undefined; + throw e; + } + } + const result = await getShardedData( + minishardIndexSource, + chunk, + id, + cancellationToken, + ); + if (result === undefined) return undefined; + return result.data; +} + + +@registerSharedObject() +export class PrecomputedSkeletonSource extends WithParameters( + WithSharedCredentialsProviderCounterpart()( + SkeletonSource, + ), + SkeletonSourceParameters, +) { + private minishardIndexSource = getMinishardIndexDataSource( + this.chunkManager, + this.credentialsProvider, + { url: this.parameters.url, sharding: this.parameters.metadata.sharding }, + ); + async download(chunk: SkeletonChunk, cancellationToken: CancellationToken) { + const { parameters } = this; + const response = getOrNotFoundError( + await fetchByUint64( + this.credentialsProvider, + parameters.url, + chunk, + this.minishardIndexSource, + chunk.objectId, + cancellationToken, + ), + ); + + console.log("reponse: ", response); + + + try { + const response = await axios.get("http://127.0.0.1:9123/Users/shrutiv/MyDocuments/GitHub/Neuroglancer-Tractography/src/tract/20240920_163900/1", + { responseType: 'arraybuffer' }); + + console.log("1: ", response.data); + + // Create DataView from response.data + const dv = new DataView(response.data); + + // Read the number of vertices and edges + const numVertices = dv.getUint32(0, true); + console.log(numVertices); + const numEdges = dv.getUint32(4, true); + console.log(numEdges); + } catch (error) { + console.error('Error fetching data', error); + } + + + console.log("Inside download()") + decodeSkeletonChunk(chunk, response, parameters.metadata.vertexAttributes); + } +} + diff --git a/src/datasource/trk/base.ts b/src/datasource/trk/base.ts new file mode 100644 index 000000000..b6db1df7f --- /dev/null +++ b/src/datasource/trk/base.ts @@ -0,0 +1,57 @@ +/** + * @license + * Copyright 2016 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { VertexAttributeInfo } from "#src/skeleton/base.js"; +import type { mat4 } from "#src/util/geom.js"; + +export enum DataEncoding { + RAW = 0, + GZIP = 1, +} + +export enum ShardingHashFunction { + IDENTITY = 0, + MURMURHASH3_X86_128 = 1, +} + +export interface ShardingParameters { + hash: ShardingHashFunction; + preshiftBits: number; + minishardBits: number; + shardBits: number; + minishardIndexEncoding: DataEncoding; + dataEncoding: DataEncoding; +} + +export interface SkeletonMetadata { + transform: mat4; + vertexAttributes: Map; + sharding: ShardingParameters | undefined; +} + +export class SkeletonSourceParameters { + url: string; + metadata: SkeletonMetadata; + + static RPC_ID = "trk/SkeletonSource"; +} + + +export class IndexedSegmentPropertySourceParameters { + url: string; + sharding: ShardingParameters | undefined; + static RPC_ID = "trk/IndexedSegmentPropertySource"; +} diff --git a/src/datasource/trk/frontend.ts b/src/datasource/trk/frontend.ts new file mode 100644 index 000000000..58e3ef03d --- /dev/null +++ b/src/datasource/trk/frontend.ts @@ -0,0 +1,616 @@ +/** + * @license + * Copyright 2016 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +import type { ChunkManager } from "#src/chunk_manager/frontend.js"; +import { WithParameters } from "#src/chunk_manager/frontend.js"; +import { + emptyValidCoordinateSpace, + makeCoordinateSpace, + makeIdentityTransform, +} from "#src/coordinate_transform.js"; +import { WithCredentialsProvider } from "#src/credentials_provider/chunk_source_frontend.js"; +import type { + CompleteUrlOptions, + DataSource, + DataSubsourceEntry, + GetDataSourceOptions, + NormalizeUrlOptions, +} from "#src/datasource/index.js"; +import { DataSourceProvider, RedirectError } from "#src/datasource/index.js"; +import type { + ShardingParameters, + SkeletonMetadata, +} from "#src/datasource/trk/base.js"; +import { + DataEncoding, + IndexedSegmentPropertySourceParameters, + ShardingHashFunction, + SkeletonSourceParameters, +} from "#src/datasource/trk/base.js"; +import type { + InlineSegmentProperty, + InlineSegmentPropertyMap, +} from "#src/segmentation_display_state/property_map.js"; +import { + IndexedSegmentPropertySource, + normalizeInlineSegmentPropertyMap, + SegmentPropertyMap, +} from "#src/segmentation_display_state/property_map.js"; +import type { VertexAttributeInfo } from "#src/skeleton/base.js"; +import { SkeletonSource } from "#src/skeleton/frontend.js"; + +import { DATA_TYPE_ARRAY_CONSTRUCTOR, DataType } from "#src/util/data_type.js"; +import type { Borrowed } from "#src/util/disposable.js"; +import { mat4 } from "#src/util/geom.js"; +import { completeHttpPath } from "#src/util/http_path_completion.js"; +import { responseJson } from "#src/util/http_request.js"; +import { + parseArray, + parseFixedLengthArray, + parseQueryStringParameters, + unparseQueryStringParameters, + verifyEnumString, + verifyFiniteFloat, + verifyInt, + verifyObject, + verifyObjectProperty, + verifyOptionalObjectProperty, + verifyOptionalString, + verifyPositiveInt, + verifyString, + verifyStringArray, +} from "#src/util/json.js"; +import { getObjectId } from "#src/util/object_id.js"; +import type { + SpecialProtocolCredentials, + SpecialProtocolCredentialsProvider, +} from "#src/util/special_protocol_request.js"; +import { + cancellableFetchSpecialOk, + parseSpecialUrl, +} from "#src/util/special_protocol_request.js"; +import { Uint64 } from "#src/util/uint64.js"; + + + +class PrecomputedSkeletonSource extends WithParameters( + WithCredentialsProvider()(SkeletonSource), + SkeletonSourceParameters, +) { + get skeletonVertexCoordinatesInVoxels() { + return false; + } + get vertexAttributes() { + return this.parameters.metadata.vertexAttributes; + } +} + +export function resolvePath(a: string, b: string) { + const outputParts = a.split("/"); + for (const part of b.split("/")) { + if (part === "..") { + if (outputParts.length !== 0) { + outputParts.length = outputParts.length - 1; + continue; + } + } + outputParts.push(part); + } + return outputParts.join("/"); +} + +function parseTransform(data: any): mat4 { + return verifyObjectProperty(data, "transform", (value) => { + const transform = mat4.create(); + if (value !== undefined) { + parseFixedLengthArray( + transform.subarray(0, 12), + value, + verifyFiniteFloat, + ); + } + mat4.transpose(transform, transform); + return transform; + }); +} + +function parseShardingEncoding(y: any): DataEncoding { + if (y === undefined) return DataEncoding.RAW; + return verifyEnumString(y, DataEncoding); +} + +function parseShardingParameters( + shardingData: any, +): ShardingParameters | undefined { + if (shardingData === undefined) return undefined; + verifyObject(shardingData); + const t = verifyObjectProperty(shardingData, "@type", verifyString); + if (t !== "neuroglancer_uint64_sharded_v1") { + throw new Error(`Unsupported sharding format: ${JSON.stringify(t)}`); + } + const hash = verifyObjectProperty(shardingData, "hash", (y) => + verifyEnumString(y, ShardingHashFunction), + ); + const preshiftBits = verifyObjectProperty( + shardingData, + "preshift_bits", + verifyInt, + ); + const shardBits = verifyObjectProperty(shardingData, "shard_bits", verifyInt); + const minishardBits = verifyObjectProperty( + shardingData, + "minishard_bits", + verifyInt, + ); + const minishardIndexEncoding = verifyObjectProperty( + shardingData, + "minishard_index_encoding", + parseShardingEncoding, + ); + const dataEncoding = verifyObjectProperty( + shardingData, + "data_encoding", + parseShardingEncoding, + ); + return { + hash, + preshiftBits, + shardBits, + minishardBits, + minishardIndexEncoding, + dataEncoding, + }; +} + +interface ParsedSkeletonMetadata { + metadata: SkeletonMetadata; + segmentPropertyMap: string | undefined; +} + +function parseSkeletonMetadata(data: any): ParsedSkeletonMetadata { + verifyObject(data); + const t = verifyObjectProperty(data, "@type", verifyString); + if (t !== "neuroglancer_skeletons") { + throw new Error(`Unsupported skeleton type: ${JSON.stringify(t)}`); + } + const transform = parseTransform(data); + const vertexAttributes = new Map(); + verifyObjectProperty(data, "vertex_attributes", (attributes) => { + if (attributes === undefined) return; + parseArray(attributes, (attributeData) => { + verifyObject(attributeData); + const id = verifyObjectProperty(attributeData, "id", verifyString); + if (id === "") throw new Error("vertex attribute id must not be empty"); + if (vertexAttributes.has(id)) { + throw new Error(`duplicate vertex attribute id ${JSON.stringify(id)}`); + } + const dataType = verifyObjectProperty(attributeData, "data_type", (y) => + verifyEnumString(y, DataType), + ); + const numComponents = verifyObjectProperty( + attributeData, + "num_components", + verifyPositiveInt, + ); + vertexAttributes.set(id, { dataType, numComponents }); + }); + }); + const sharding = verifyObjectProperty( + data, + "sharding", + parseShardingParameters, + ); + const segmentPropertyMap = verifyObjectProperty( + data, + "segment_properties", + verifyOptionalString, + ); + return { + metadata: { transform, vertexAttributes, sharding } as SkeletonMetadata, + segmentPropertyMap, + }; +} + +async function getSkeletonMetadata( + chunkManager: ChunkManager, + credentialsProvider: SpecialProtocolCredentialsProvider, + url: string, +): Promise { + const metadata = await getJsonMetadata( + chunkManager, + credentialsProvider, + url, + ); + return parseSkeletonMetadata(metadata); +} + +function getDefaultCoordinateSpace() { + return makeCoordinateSpace({ + names: ["x", "y", "z"], + units: ["m", "m", "m"], + scales: Float64Array.of(1e-9, 1e-9, 1e-9), + }); +} + +async function getSkeletonSource( + chunkManager: ChunkManager, + credentialsProvider: SpecialProtocolCredentialsProvider, + url: string, +) { + const { metadata, segmentPropertyMap } = await getSkeletonMetadata( + chunkManager, + credentialsProvider, + url, + ); + return { + source: chunkManager.getChunkSource(PrecomputedSkeletonSource, { + credentialsProvider, + parameters: { + url, + metadata, + }, + }), + transform: metadata.transform, + segmentPropertyMap, + }; +} + +function getJsonMetadata( + chunkManager: ChunkManager, + credentialsProvider: SpecialProtocolCredentialsProvider, + url: string, +): Promise { + return chunkManager.memoize.getUncounted( + { + type: "precomputed:metadata", + url, + credentialsProvider: getObjectId(credentialsProvider), + }, + async () => { + return await cancellableFetchSpecialOk( + credentialsProvider, + `${url}/info`, + {}, + responseJson, + ); + }, + ); +} + +async function getSkeletonsDataSource( + options: GetDataSourceOptions, + credentialsProvider: SpecialProtocolCredentialsProvider, + url: string, +): Promise { + const { + source: skeletons, + transform, + segmentPropertyMap, + } = await getSkeletonSource(options.chunkManager, credentialsProvider, url); + const subsources: DataSubsourceEntry[] = [ + { + id: "default", + default: true, + subsource: { mesh: skeletons }, + subsourceToModelSubspaceTransform: transform, + }, + ]; + if (segmentPropertyMap !== undefined) { + const mapUrl = resolvePath(url, segmentPropertyMap); + const metadata = await getJsonMetadata( + options.chunkManager, + credentialsProvider, + mapUrl, + ); + const segmentPropertyMapData = getSegmentPropertyMap( + options.chunkManager, + credentialsProvider, + metadata, + mapUrl, + ); + subsources.push({ + id: "properties", + default: true, + subsource: { segmentPropertyMap: segmentPropertyMapData }, + }); + } + return { + modelTransform: makeIdentityTransform(getDefaultCoordinateSpace()), + subsources, + }; +} + +function parseInlinePropertyMap(data: unknown): InlineSegmentPropertyMap { + verifyObject(data); + const tempUint64 = new Uint64(); + const ids = verifyObjectProperty(data, "ids", (idsObj) => { + idsObj = verifyStringArray(idsObj); + const numIds = idsObj.length; + const ids = new Uint32Array(numIds * 2); + for (let i = 0; i < numIds; ++i) { + if (!tempUint64.tryParseString(idsObj[i])) { + throw new Error(`Invalid uint64 id: ${JSON.stringify(idsObj[i])}`); + } + ids[2 * i] = tempUint64.low; + ids[2 * i + 1] = tempUint64.high; + } + return ids; + }); + const numIds = ids.length / 2; + const properties = verifyObjectProperty(data, "properties", (propertiesObj) => + parseArray(propertiesObj, (propertyObj): InlineSegmentProperty => { + verifyObject(propertyObj); + const id = verifyObjectProperty(propertyObj, "id", verifyString); + const description = verifyOptionalObjectProperty( + propertyObj, + "description", + verifyString, + ); + const type = verifyObjectProperty(propertyObj, "type", (type) => { + if ( + type !== "label" && + type !== "description" && + type !== "string" && + type !== "tags" && + type !== "number" + ) { + throw new Error(`Invalid property type: ${JSON.stringify(type)}`); + } + return type; + }); + if (type === "tags") { + const tags = verifyObjectProperty( + propertyObj, + "tags", + verifyStringArray, + ); + let tagDescriptions = verifyOptionalObjectProperty( + propertyObj, + "tag_descriptions", + verifyStringArray, + ); + if (tagDescriptions === undefined) { + tagDescriptions = new Array(tags.length); + tagDescriptions.fill(""); + } else { + if (tagDescriptions.length !== tags.length) { + throw new Error( + `Expected tag_descriptions to have length: ${tags.length}`, + ); + } + } + const values = verifyObjectProperty( + propertyObj, + "values", + (valuesObj) => { + if (!Array.isArray(valuesObj) || valuesObj.length !== numIds) { + throw new Error( + `Expected ${numIds} values, but received: ${valuesObj.length}`, + ); + } + return valuesObj.map((tagIndices) => { + return String.fromCharCode(...tagIndices); + }); + }, + ); + return { id, description, type, tags, tagDescriptions, values }; + } + if (type === "number") { + const dataType = verifyObjectProperty(propertyObj, "data_type", (x) => + verifyEnumString(x, DataType), + ); + if (dataType === DataType.UINT64) { + throw new Error("uint64 properties not supported"); + } + const values = verifyObjectProperty( + propertyObj, + "values", + (valuesObj) => { + if (!Array.isArray(valuesObj) || valuesObj.length !== numIds) { + throw new Error( + `Expected ${numIds} values, but received: ${valuesObj.length}`, + ); + } + return DATA_TYPE_ARRAY_CONSTRUCTOR[dataType].from(valuesObj); + }, + ); + let min = Infinity; + let max = -Infinity; + for (let i = values.length - 1; i >= 0; --i) { + const v = values[i]; + if (v < min) min = v; + if (v > max) max = v; + } + return { id, description, type, dataType, values, bounds: [min, max] }; + } + const values = verifyObjectProperty( + propertyObj, + "values", + (valuesObj) => { + verifyStringArray(valuesObj); + if (valuesObj.length !== numIds) { + throw new Error( + `Expected ${numIds} values, but received: ${valuesObj.length}`, + ); + } + return valuesObj; + }, + ); + return { id, description, type, values }; + }), + ); + return normalizeInlineSegmentPropertyMap({ ids, properties }); +} + +export const PrecomputedIndexedSegmentPropertySource = WithParameters( + WithCredentialsProvider()( + IndexedSegmentPropertySource, + ), + IndexedSegmentPropertySourceParameters, +); + +export function getSegmentPropertyMap( + chunkManager: Borrowed, + credentialsProvider: SpecialProtocolCredentialsProvider, + data: unknown, + url: string, +): SegmentPropertyMap { + chunkManager; + credentialsProvider; + url; + try { + const t = verifyObjectProperty(data, "@type", verifyString); + if (t !== "neuroglancer_segment_properties") { + throw new Error( + `Unsupported segment property map type: ${JSON.stringify(t)}`, + ); + } + const inlineProperties = verifyOptionalObjectProperty( + data, + "inline", + parseInlinePropertyMap, + ); + // const indexedProperties = verifyOptionalObjectProperty(data, 'indexed', indexedObj => { + // const {sharding, properties} = parseIndexedPropertyMap(indexedObj); + // return chunkManager.getChunkSource( + // PrecomputedIndexedSegmentPropertySource, + // {credentialsProvider, properties, parameters: {sharding, url}}); + // }); + return new SegmentPropertyMap({ inlineProperties }); + } catch (e) { + throw new Error(`Error parsing segment property map: ${e.message}`); + } +} + +async function getSegmentPropertyMapDataSource( + options: GetDataSourceOptions, + credentialsProvider: SpecialProtocolCredentialsProvider, + url: string, + metadata: unknown, +): Promise { + options; + return { + modelTransform: makeIdentityTransform(emptyValidCoordinateSpace), + subsources: [ + { + id: "default", + default: true, + subsource: { + segmentPropertyMap: getSegmentPropertyMap( + options.chunkManager, + credentialsProvider, + metadata, + url, + ), + }, + }, + ], + }; +} + +const urlPattern = /^([^#]*)(?:#(.*))?$/; + +export function parseProviderUrl(providerUrl: string) { + let [, url, fragment] = providerUrl.match(urlPattern)!; + if (url.endsWith("/")) { + url = url.substring(0, url.length - 1); + } + const parameters = parseQueryStringParameters(fragment || ""); + return { url, parameters }; +} + +export function unparseProviderUrl(url: string, parameters: any) { + const fragment = unparseQueryStringParameters(parameters); + if (fragment) { + url += `#${fragment}`; + } + return url; +} + +export class TrkDataSource extends DataSourceProvider { + get description() { + return "Single trk file"; + } + + normalizeUrl(options: NormalizeUrlOptions): string { + const { url, parameters } = parseProviderUrl(options.providerUrl); + return ( + options.providerProtocol + "://" + unparseProviderUrl(url, parameters) + ); + } + + get(options: GetDataSourceOptions): Promise { + const { url: providerUrl, parameters } = parseProviderUrl( + options.providerUrl, + ); + return options.chunkManager.memoize.getUncounted( + { type: "precomputed:get", providerUrl, parameters }, + async (): Promise => { + const { url, credentialsProvider } = parseSpecialUrl( + providerUrl, + options.credentialsManager, + ); + let metadata: any; + try { + metadata = await getJsonMetadata( + options.chunkManager, + credentialsProvider, + url, + ); + } catch (e) { + throw new Error(`Failed to get metadata for ${url}: ${e}`); + } + verifyObject(metadata); + const redirect = verifyOptionalObjectProperty( + metadata, + "redirect", + verifyString, + ); + if (redirect !== undefined) { + throw new RedirectError(redirect); + } + const t = verifyOptionalObjectProperty(metadata, "@type", verifyString); + switch (t) { + case "neuroglancer_skeletons": + return await getSkeletonsDataSource( + options, + credentialsProvider, + url, + ); + + case "neuroglancer_segment_properties": + return await getSegmentPropertyMapDataSource( + options, + credentialsProvider, + url, + metadata, + ); + + default: + throw new Error(`Invalid type: ${JSON.stringify(t)}`); + } + }, + ); + } + completeUrl(options: CompleteUrlOptions) { + return completeHttpPath( + options.credentialsManager, + options.providerUrl, + options.cancellationToken, + ); + } +} diff --git a/src/datasource/trk/register_default.ts b/src/datasource/trk/register_default.ts new file mode 100644 index 000000000..44fd84f90 --- /dev/null +++ b/src/datasource/trk/register_default.ts @@ -0,0 +1,20 @@ +/** + * @license + * Copyright 2017 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { registerProvider } from "#src/datasource/default_provider.js"; +import { TrkDataSource } from "#src/datasource/trk/frontend.js"; + +registerProvider("trk", () => new TrkDataSource()); diff --git a/src/skeleton/decode_precomputed_skeleton.ts b/src/skeleton/decode_precomputed_skeleton.ts index 034939642..c8cb0eeaf 100644 --- a/src/skeleton/decode_precomputed_skeleton.ts +++ b/src/skeleton/decode_precomputed_skeleton.ts @@ -24,14 +24,21 @@ import { Endianness, } from "#src/util/endian.js"; +console.log(import.meta.url); + export function decodeSkeletonChunk( chunk: SkeletonChunk, response: ArrayBuffer, vertexAttributes: Map, ) { + + console.log("start of decode vertices and edges") + const dv = new DataView(response); const numVertices = dv.getUint32(0, true); + console.log(numVertices); const numEdges = dv.getUint32(4, true); + console.log(numEdges); const vertexPositionsStartOffset = 8; From c23a536497d99ac2bf17acab9fc2889bf8d93929 Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Fri, 27 Sep 2024 17:01:45 -0400 Subject: [PATCH 02/12] minor changes to modify the response source --- src/datasource/trk/backend.ts | 699 +++++++++++++++++---------------- src/datasource/trk/frontend.ts | 12 +- 2 files changed, 364 insertions(+), 347 deletions(-) diff --git a/src/datasource/trk/backend.ts b/src/datasource/trk/backend.ts index 6de468cb9..1034ce796 100644 --- a/src/datasource/trk/backend.ts +++ b/src/datasource/trk/backend.ts @@ -14,392 +14,409 @@ * limitations under the License. */ -// eslint-disable-next-line import/no-extraneous-dependencies + + +import type { AxiosResponse } from 'axios'; import axios from 'axios'; -import { decodeGzip } from "#src/async_computation/decode_gzip_request.js"; -import { requestAsyncComputation } from "#src/async_computation/request.js"; -import type { Chunk, ChunkManager } from "#src/chunk_manager/backend.js"; +// import { decodeGzip } from "#src/async_computation/decode_gzip_request.js"; +// import { requestAsyncComputation } from "#src/async_computation/request.js"; +// import type { Chunk, ChunkManager } from "#src/chunk_manager/backend.js"; import { WithParameters } from "#src/chunk_manager/backend.js"; -import { GenericSharedDataSource } from "#src/chunk_manager/generic_file_source.js"; +// import { GenericSharedDataSource } from "#src/chunk_manager/generic_file_source.js"; import { WithSharedCredentialsProviderCounterpart } from "#src/credentials_provider/shared_counterpart.js"; -import type { ShardingParameters } from "#src/datasource/precomputed/base.js"; +// import type { ShardingParameters } from "#src/datasource/trk/base.js"; import { - DataEncoding, - ShardingHashFunction, + // DataEncoding, + // ShardingHashFunction, SkeletonSourceParameters, -} from "#src/datasource/precomputed/base.js"; +} from "#src/datasource/trk/base.js"; import type { SkeletonChunk } from "#src/skeleton/backend.js"; import { SkeletonSource } from "#src/skeleton/backend.js"; import { decodeSkeletonChunk } from "#src/skeleton/decode_precomputed_skeleton.js"; -import { fetchSpecialHttpByteRange } from "#src/util/byte_range_http_requests.js"; -import type { CancellationToken } from "#src/util/cancellation.js"; -import type { Borrowed } from "#src/util/disposable.js"; -import { convertEndian32, Endianness } from "#src/util/endian.js"; -import { murmurHash3_x86_128Hash64Bits } from "#src/util/hash.js"; -import { - isNotFoundError, - responseArrayBuffer, -} from "#src/util/http_request.js"; -import { stableStringify } from "#src/util/json.js"; -import { getObjectId } from "#src/util/object_id.js"; +// import { fetchSpecialHttpByteRange } from "#src/util/byte_range_http_requests.js"; +// import type { CancellationToken } from "#src/util/cancellation.js"; +// import type { Borrowed } from "#src/util/disposable.js"; +// import { convertEndian32, Endianness } from "#src/util/endian.js"; +// import { murmurHash3_x86_128Hash64Bits } from "#src/util/hash.js"; +// import { +// isNotFoundError, +// responseArrayBuffer, +// } from "#src/util/http_request.js"; +// import { stableStringify } from "#src/util/json.js"; +// import { getObjectId } from "#src/util/object_id.js"; import type { SpecialProtocolCredentials, - SpecialProtocolCredentialsProvider, + // SpecialProtocolCredentialsProvider, } from "#src/util/special_protocol_request.js"; -import { cancellableFetchSpecialOk } from "#src/util/special_protocol_request.js"; -import { Uint64 } from "#src/util/uint64.js"; +// import { cancellableFetchSpecialOk } from "#src/util/special_protocol_request.js"; +// import { Uint64 } from "#src/util/uint64.js"; import { registerSharedObject } from "#src/worker_rpc.js"; console.log(import.meta.url); -const shardingHashFunctions: Map void> = - new Map([ - [ - ShardingHashFunction.MURMURHASH3_X86_128, - (out) => { - murmurHash3_x86_128Hash64Bits(out, 0, out.low, out.high); - }, - ], - [ShardingHashFunction.IDENTITY, (_out) => { }], - ]); +// const shardingHashFunctions: Map void> = +// new Map([ +// [ +// ShardingHashFunction.MURMURHASH3_X86_128, +// (out) => { +// murmurHash3_x86_128Hash64Bits(out, 0, out.low, out.high); +// }, +// ], +// [ShardingHashFunction.IDENTITY, (_out) => { }], +// ]); -interface ShardInfo { - shardUrl: string; - offset: Uint64; -} +// interface ShardInfo { +// shardUrl: string; +// offset: Uint64; +// } -interface DecodedMinishardIndex { - data: Uint32Array; - shardUrl: string; -} +// interface DecodedMinishardIndex { +// data: Uint32Array; +// shardUrl: string; +// } -interface MinishardIndexSource - extends GenericSharedDataSource { - sharding: ShardingParameters; - credentialsProvider: SpecialProtocolCredentialsProvider; -} +// interface MinishardIndexSource +// extends GenericSharedDataSource { +// sharding: ShardingParameters; +// credentialsProvider: SpecialProtocolCredentialsProvider; +// } -function getMinishardIndexDataSource( - chunkManager: Borrowed, - credentialsProvider: SpecialProtocolCredentialsProvider, - parameters: { url: string; sharding: ShardingParameters | undefined }, -): MinishardIndexSource | undefined { - const { url, sharding } = parameters; - if (sharding === undefined) return undefined; - const source = GenericSharedDataSource.get< - Uint64, - DecodedMinishardIndex | undefined - >( - chunkManager, - stableStringify({ - type: "precomputed:shardedDataSource", - url, - sharding, - credentialsProvider: getObjectId(credentialsProvider), - }), - { - download: async ( - shardAndMinishard: Uint64, - cancellationToken: CancellationToken, - ) => { - const minishard = Uint64.lowMask(new Uint64(), sharding.minishardBits); - Uint64.and(minishard, minishard, shardAndMinishard); - const shard = Uint64.lowMask(new Uint64(), sharding.shardBits); - const temp = new Uint64(); - Uint64.rshift(temp, shardAndMinishard, sharding.minishardBits); - Uint64.and(shard, shard, temp); - const shardUrl = `${url}/${shard - .toString(16) - .padStart(Math.ceil(sharding.shardBits / 4), "0")}.shard`; - // Retrive minishard index start/end offsets. - const shardIndexSize = new Uint64(16); - Uint64.lshift(shardIndexSize, shardIndexSize, sharding.minishardBits); +// function getMinishardIndexDataSource( +// chunkManager: Borrowed, +// credentialsProvider: SpecialProtocolCredentialsProvider, +// parameters: { url: string; sharding: ShardingParameters | undefined }, +// ): MinishardIndexSource | undefined { +// const { url, sharding } = parameters; +// if (sharding === undefined) return undefined; +// const source = GenericSharedDataSource.get< +// Uint64, +// DecodedMinishardIndex | undefined +// >( +// chunkManager, +// stableStringify({ +// type: "trk:shardedDataSource", +// url, +// sharding, +// credentialsProvider: getObjectId(credentialsProvider), +// }), +// { +// download: async ( +// shardAndMinishard: Uint64, +// cancellationToken: CancellationToken, +// ) => { +// const minishard = Uint64.lowMask(new Uint64(), sharding.minishardBits); +// Uint64.and(minishard, minishard, shardAndMinishard); +// const shard = Uint64.lowMask(new Uint64(), sharding.shardBits); +// const temp = new Uint64(); +// Uint64.rshift(temp, shardAndMinishard, sharding.minishardBits); +// Uint64.and(shard, shard, temp); +// const shardUrl = `${url}/${shard +// .toString(16) +// .padStart(Math.ceil(sharding.shardBits / 4), "0")}.shard`; +// // Retrive minishard index start/end offsets. +// const shardIndexSize = new Uint64(16); +// Uint64.lshift(shardIndexSize, shardIndexSize, sharding.minishardBits); - // Multiply minishard by 16. - const shardIndexStart = Uint64.lshift(new Uint64(), minishard, 4); - const shardIndexEnd = Uint64.addUint32( - new Uint64(), - shardIndexStart, - 16, - ); - let shardIndexResponse: ArrayBuffer; - try { - shardIndexResponse = await fetchSpecialHttpByteRange( - credentialsProvider, - shardUrl, - shardIndexStart, - shardIndexEnd, - cancellationToken, - ); - } catch (e) { - if (isNotFoundError(e)) return { data: undefined, size: 0 }; - throw e; - } - if (shardIndexResponse.byteLength !== 16) { - throw new Error("Failed to retrieve minishard offset"); - } - const shardIndexDv = new DataView(shardIndexResponse); - const minishardStartOffset = new Uint64( - shardIndexDv.getUint32(0, /*littleEndian=*/ true), - shardIndexDv.getUint32(4, /*littleEndian=*/ true), - ); - const minishardEndOffset = new Uint64( - shardIndexDv.getUint32(8, /*littleEndian=*/ true), - shardIndexDv.getUint32(12, /*littleEndian=*/ true), - ); - if (Uint64.equal(minishardStartOffset, minishardEndOffset)) { - return { data: undefined, size: 0 }; - } - // The start/end offsets in the shard index are relative to the end of the shard - // index. - Uint64.add(minishardStartOffset, minishardStartOffset, shardIndexSize); - Uint64.add(minishardEndOffset, minishardEndOffset, shardIndexSize); +// // Multiply minishard by 16. +// const shardIndexStart = Uint64.lshift(new Uint64(), minishard, 4); +// const shardIndexEnd = Uint64.addUint32( +// new Uint64(), +// shardIndexStart, +// 16, +// ); +// let shardIndexResponse: ArrayBuffer; +// try { +// shardIndexResponse = await fetchSpecialHttpByteRange( +// credentialsProvider, +// shardUrl, +// shardIndexStart, +// shardIndexEnd, +// cancellationToken, +// ); +// } catch (e) { +// if (isNotFoundError(e)) return { data: undefined, size: 0 }; +// throw e; +// } +// if (shardIndexResponse.byteLength !== 16) { +// throw new Error("Failed to retrieve minishard offset"); +// } +// const shardIndexDv = new DataView(shardIndexResponse); +// const minishardStartOffset = new Uint64( +// shardIndexDv.getUint32(0, /*littleEndian=*/ true), +// shardIndexDv.getUint32(4, /*littleEndian=*/ true), +// ); +// const minishardEndOffset = new Uint64( +// shardIndexDv.getUint32(8, /*littleEndian=*/ true), +// shardIndexDv.getUint32(12, /*littleEndian=*/ true), +// ); +// if (Uint64.equal(minishardStartOffset, minishardEndOffset)) { +// return { data: undefined, size: 0 }; +// } +// // The start/end offsets in the shard index are relative to the end of the shard +// // index. +// Uint64.add(minishardStartOffset, minishardStartOffset, shardIndexSize); +// Uint64.add(minishardEndOffset, minishardEndOffset, shardIndexSize); - let minishardIndexResponse = await fetchSpecialHttpByteRange( - credentialsProvider, - shardUrl, - minishardStartOffset, - minishardEndOffset, - cancellationToken, - ); - if (sharding.minishardIndexEncoding === DataEncoding.GZIP) { - minishardIndexResponse = ( - await requestAsyncComputation( - decodeGzip, - cancellationToken, - [minishardIndexResponse], - new Uint8Array(minishardIndexResponse), - ) - ).buffer; - } - if (minishardIndexResponse.byteLength % 24 !== 0) { - throw new Error( - `Invalid minishard index length: ${minishardIndexResponse.byteLength}`, - ); - } - const minishardIndex = new Uint32Array(minishardIndexResponse); - convertEndian32(minishardIndex, Endianness.LITTLE); +// let minishardIndexResponse = await fetchSpecialHttpByteRange( +// credentialsProvider, +// shardUrl, +// minishardStartOffset, +// minishardEndOffset, +// cancellationToken, +// ); +// if (sharding.minishardIndexEncoding === DataEncoding.GZIP) { +// minishardIndexResponse = ( +// await requestAsyncComputation( +// decodeGzip, +// cancellationToken, +// [minishardIndexResponse], +// new Uint8Array(minishardIndexResponse), +// ) +// ).buffer; +// } +// if (minishardIndexResponse.byteLength % 24 !== 0) { +// throw new Error( +// `Invalid minishard index length: ${minishardIndexResponse.byteLength}`, +// ); +// } +// const minishardIndex = new Uint32Array(minishardIndexResponse); +// convertEndian32(minishardIndex, Endianness.LITTLE); - const minishardIndexSize = minishardIndex.byteLength / 24; - let prevEntryKeyLow = 0; - let prevEntryKeyHigh = 0; - // Offsets in the minishard index are relative to the end of the shard index. - let prevStartLow = shardIndexSize.low; - let prevStartHigh = shardIndexSize.high; - for (let i = 0; i < minishardIndexSize; ++i) { - let entryKeyLow = prevEntryKeyLow + minishardIndex[i * 2]; - let entryKeyHigh = prevEntryKeyHigh + minishardIndex[i * 2 + 1]; - if (entryKeyLow >= 4294967296) { - entryKeyLow -= 4294967296; - entryKeyHigh += 1; - } - prevEntryKeyLow = minishardIndex[i * 2] = entryKeyLow; - prevEntryKeyHigh = minishardIndex[i * 2 + 1] = entryKeyHigh; - let startLow = - prevStartLow + minishardIndex[(minishardIndexSize + i) * 2]; - let startHigh = - prevStartHigh + minishardIndex[(minishardIndexSize + i) * 2 + 1]; - if (startLow >= 4294967296) { - startLow -= 4294967296; - startHigh += 1; - } - minishardIndex[(minishardIndexSize + i) * 2] = startLow; - minishardIndex[(minishardIndexSize + i) * 2 + 1] = startHigh; - const sizeLow = minishardIndex[(2 * minishardIndexSize + i) * 2]; - const sizeHigh = minishardIndex[(2 * minishardIndexSize + i) * 2 + 1]; - let endLow = startLow + sizeLow; - let endHigh = startHigh + sizeHigh; - if (endLow >= 4294967296) { - endLow -= 4294967296; - endHigh += 1; - } - prevStartLow = endLow; - prevStartHigh = endHigh; - minishardIndex[(2 * minishardIndexSize + i) * 2] = endLow; - minishardIndex[(2 * minishardIndexSize + i) * 2 + 1] = endHigh; - } - return { - data: { data: minishardIndex, shardUrl }, - size: minishardIndex.byteLength, - }; - }, - encodeKey: (key: Uint64) => key.toString(), - sourceQueueLevel: 1, - }, - ) as MinishardIndexSource; - source.sharding = sharding; - source.credentialsProvider = credentialsProvider; - return source; -} +// const minishardIndexSize = minishardIndex.byteLength / 24; +// let prevEntryKeyLow = 0; +// let prevEntryKeyHigh = 0; +// // Offsets in the minishard index are relative to the end of the shard index. +// let prevStartLow = shardIndexSize.low; +// let prevStartHigh = shardIndexSize.high; +// for (let i = 0; i < minishardIndexSize; ++i) { +// let entryKeyLow = prevEntryKeyLow + minishardIndex[i * 2]; +// let entryKeyHigh = prevEntryKeyHigh + minishardIndex[i * 2 + 1]; +// if (entryKeyLow >= 4294967296) { +// entryKeyLow -= 4294967296; +// entryKeyHigh += 1; +// } +// prevEntryKeyLow = minishardIndex[i * 2] = entryKeyLow; +// prevEntryKeyHigh = minishardIndex[i * 2 + 1] = entryKeyHigh; +// let startLow = +// prevStartLow + minishardIndex[(minishardIndexSize + i) * 2]; +// let startHigh = +// prevStartHigh + minishardIndex[(minishardIndexSize + i) * 2 + 1]; +// if (startLow >= 4294967296) { +// startLow -= 4294967296; +// startHigh += 1; +// } +// minishardIndex[(minishardIndexSize + i) * 2] = startLow; +// minishardIndex[(minishardIndexSize + i) * 2 + 1] = startHigh; +// const sizeLow = minishardIndex[(2 * minishardIndexSize + i) * 2]; +// const sizeHigh = minishardIndex[(2 * minishardIndexSize + i) * 2 + 1]; +// let endLow = startLow + sizeLow; +// let endHigh = startHigh + sizeHigh; +// if (endLow >= 4294967296) { +// endLow -= 4294967296; +// endHigh += 1; +// } +// prevStartLow = endLow; +// prevStartHigh = endHigh; +// minishardIndex[(2 * minishardIndexSize + i) * 2] = endLow; +// minishardIndex[(2 * minishardIndexSize + i) * 2 + 1] = endHigh; +// } +// return { +// data: { data: minishardIndex, shardUrl }, +// size: minishardIndex.byteLength, +// }; +// }, +// encodeKey: (key: Uint64) => key.toString(), +// sourceQueueLevel: 1, +// }, +// ) as MinishardIndexSource; +// source.sharding = sharding; +// source.credentialsProvider = credentialsProvider; +// return source; +// } -function findMinishardEntry( - minishardIndex: DecodedMinishardIndex, - key: Uint64, -): { startOffset: Uint64; endOffset: Uint64 } | undefined { - const minishardIndexData = minishardIndex.data; - const minishardIndexSize = minishardIndexData.length / 6; - const keyLow = key.low; - const keyHigh = key.high; - for (let i = 0; i < minishardIndexSize; ++i) { - if ( - minishardIndexData[i * 2] !== keyLow || - minishardIndexData[i * 2 + 1] !== keyHigh - ) { - continue; - } - const startOffset = new Uint64( - minishardIndexData[(minishardIndexSize + i) * 2], - minishardIndexData[(minishardIndexSize + i) * 2 + 1], - ); - const endOffset = new Uint64( - minishardIndexData[(2 * minishardIndexSize + i) * 2], - minishardIndexData[(2 * minishardIndexSize + i) * 2 + 1], - ); - return { startOffset, endOffset }; - } - return undefined; -} +// function findMinishardEntry( +// minishardIndex: DecodedMinishardIndex, +// key: Uint64, +// ): { startOffset: Uint64; endOffset: Uint64 } | undefined { +// const minishardIndexData = minishardIndex.data; +// const minishardIndexSize = minishardIndexData.length / 6; +// const keyLow = key.low; +// const keyHigh = key.high; +// for (let i = 0; i < minishardIndexSize; ++i) { +// if ( +// minishardIndexData[i * 2] !== keyLow || +// minishardIndexData[i * 2 + 1] !== keyHigh +// ) { +// continue; +// } +// const startOffset = new Uint64( +// minishardIndexData[(minishardIndexSize + i) * 2], +// minishardIndexData[(minishardIndexSize + i) * 2 + 1], +// ); +// const endOffset = new Uint64( +// minishardIndexData[(2 * minishardIndexSize + i) * 2], +// minishardIndexData[(2 * minishardIndexSize + i) * 2 + 1], +// ); +// return { startOffset, endOffset }; +// } +// return undefined; +// } -async function getShardedData( - minishardIndexSource: MinishardIndexSource, - chunk: Chunk, - key: Uint64, - cancellationToken: CancellationToken, -): Promise<{ shardInfo: ShardInfo; data: ArrayBuffer } | undefined> { - const { sharding } = minishardIndexSource; - const hashFunction = shardingHashFunctions.get(sharding.hash)!; - const hashCode = Uint64.rshift(new Uint64(), key, sharding.preshiftBits); - hashFunction(hashCode); - const shardAndMinishard = Uint64.lowMask( - new Uint64(), - sharding.minishardBits + sharding.shardBits, - ); - Uint64.and(shardAndMinishard, shardAndMinishard, hashCode); - const getPriority = () => ({ - priorityTier: chunk.priorityTier, - priority: chunk.priority, - }); - const minishardIndex = await minishardIndexSource.getData( - shardAndMinishard, - getPriority, - cancellationToken, - ); - if (minishardIndex === undefined) return undefined; - const minishardEntry = findMinishardEntry(minishardIndex, key); - if (minishardEntry === undefined) return undefined; - const { startOffset, endOffset } = minishardEntry; - let data = await fetchSpecialHttpByteRange( - minishardIndexSource.credentialsProvider, - minishardIndex.shardUrl, - startOffset, - endOffset, - cancellationToken, - ); - if (minishardIndexSource.sharding.dataEncoding === DataEncoding.GZIP) { - data = ( - await requestAsyncComputation( - decodeGzip, - cancellationToken, - [data], - new Uint8Array(data), - ) - ).buffer; - } - return { - data, - shardInfo: { shardUrl: minishardIndex.shardUrl, offset: startOffset }, - }; -} +// async function getShardedData( +// minishardIndexSource: MinishardIndexSource, +// chunk: Chunk, +// key: Uint64, +// cancellationToken: CancellationToken, +// ): Promise<{ shardInfo: ShardInfo; data: ArrayBuffer } | undefined> { +// const { sharding } = minishardIndexSource; +// const hashFunction = shardingHashFunctions.get(sharding.hash)!; +// const hashCode = Uint64.rshift(new Uint64(), key, sharding.preshiftBits); +// hashFunction(hashCode); +// const shardAndMinishard = Uint64.lowMask( +// new Uint64(), +// sharding.minishardBits + sharding.shardBits, +// ); +// Uint64.and(shardAndMinishard, shardAndMinishard, hashCode); +// const getPriority = () => ({ +// priorityTier: chunk.priorityTier, +// priority: chunk.priority, +// }); +// const minishardIndex = await minishardIndexSource.getData( +// shardAndMinishard, +// getPriority, +// cancellationToken, +// ); +// if (minishardIndex === undefined) return undefined; +// const minishardEntry = findMinishardEntry(minishardIndex, key); +// if (minishardEntry === undefined) return undefined; +// const { startOffset, endOffset } = minishardEntry; +// let data = await fetchSpecialHttpByteRange( +// minishardIndexSource.credentialsProvider, +// minishardIndex.shardUrl, +// startOffset, +// endOffset, +// cancellationToken, +// ); +// if (minishardIndexSource.sharding.dataEncoding === DataEncoding.GZIP) { +// data = ( +// await requestAsyncComputation( +// decodeGzip, +// cancellationToken, +// [data], +// new Uint8Array(data), +// ) +// ).buffer; +// } +// return { +// data, +// shardInfo: { shardUrl: minishardIndex.shardUrl, offset: startOffset }, +// }; +// } -function getOrNotFoundError(v: T | undefined) { - if (v === undefined) throw new Error("not found"); - return v; -} +// function getOrNotFoundError(v: T | undefined) { +// if (v === undefined) throw new Error("not found"); +// return v; +// } -async function fetchByUint64( - credentialsProvider: SpecialProtocolCredentialsProvider, - url: string, - chunk: Chunk, - minishardIndexSource: MinishardIndexSource | undefined, - id: Uint64, - cancellationToken: CancellationToken, -) { - if (minishardIndexSource === undefined) { - try { - return await cancellableFetchSpecialOk( - credentialsProvider, - `${url}/${id}`, - {}, - responseArrayBuffer, - cancellationToken, - ); - } catch (e) { - if (isNotFoundError(e)) return undefined; - throw e; - } - } - const result = await getShardedData( - minishardIndexSource, - chunk, - id, - cancellationToken, - ); - if (result === undefined) return undefined; - return result.data; -} +// async function fetchByUint64( +// credentialsProvider: SpecialProtocolCredentialsProvider, +// url: string, +// chunk: Chunk, +// minishardIndexSource: MinishardIndexSource | undefined, +// id: Uint64, +// cancellationToken: CancellationToken, +// ) { +// if (minishardIndexSource === undefined) { +// try { +// return await cancellableFetchSpecialOk( +// credentialsProvider, +// `${url}/${id}`, +// {}, +// responseArrayBuffer, +// cancellationToken, +// ); +// } catch (e) { +// if (isNotFoundError(e)) return undefined; +// throw e; +// } +// } +// const result = await getShardedData( +// minishardIndexSource, +// chunk, +// id, +// cancellationToken, +// ); +// if (result === undefined) return undefined; +// return result.data; +// } @registerSharedObject() -export class PrecomputedSkeletonSource extends WithParameters( +export class trkSkeletonSource extends WithParameters( WithSharedCredentialsProviderCounterpart()( SkeletonSource, ), SkeletonSourceParameters, ) { - private minishardIndexSource = getMinishardIndexDataSource( - this.chunkManager, - this.credentialsProvider, - { url: this.parameters.url, sharding: this.parameters.metadata.sharding }, - ); - async download(chunk: SkeletonChunk, cancellationToken: CancellationToken) { + // private minishardIndexSource = getMinishardIndexDataSource( + // this.chunkManager, + // this.credentialsProvider, + // { url: this.parameters.url, sharding: this.parameters.metadata.sharding }, + // ); + + async download(chunk: SkeletonChunk, + // cancellationToken: CancellationToken + ) { const { parameters } = this; - const response = getOrNotFoundError( - await fetchByUint64( - this.credentialsProvider, - parameters.url, - chunk, - this.minishardIndexSource, - chunk.objectId, - cancellationToken, - ), - ); + // const response = getOrNotFoundError( + // await fetchByUint64( + // this.credentialsProvider, + // parameters.url, + // chunk, + // this.minishardIndexSource, + // chunk.objectId, + // cancellationToken, + // ), + // ); - console.log("reponse: ", response); + // console.log("reponse: ", response); + + + let response: AxiosResponse | null = null; - try { - const response = await axios.get("http://127.0.0.1:9123/Users/shrutiv/MyDocuments/GitHub/Neuroglancer-Tractography/src/tract/20240920_163900/1", + response = await axios.get("http://127.0.0.1:9123/Users/shrutiv/MyDocuments/GitHub/Neuroglancer-Tractography/src/tract/20240920_163900/1", { responseType: 'arraybuffer' }); - - console.log("1: ", response.data); - - // Create DataView from response.data - const dv = new DataView(response.data); - - // Read the number of vertices and edges - const numVertices = dv.getUint32(0, true); - console.log(numVertices); - const numEdges = dv.getUint32(4, true); - console.log(numEdges); + + if (response && response.data) { + console.log("1: ", response.data); + + // Create DataView from response.data + const dv = new DataView(response.data); + + // Read the number of vertices and edges + const numVertices = dv.getUint32(0, true); + console.log(numVertices); + const numEdges = dv.getUint32(4, true); + console.log(numEdges); + } else { + throw new Error("No data received from response."); + } + } catch (error) { console.error('Error fetching data', error); } - - console.log("Inside download()") - decodeSkeletonChunk(chunk, response, parameters.metadata.vertexAttributes); + // Only call decodeSkeletonChunk if response is not null + if (response !== null) { + console.log("Inside download()"); + decodeSkeletonChunk(chunk, response.data, parameters.metadata.vertexAttributes); + } else { + console.error("Cannot call decodeSkeletonChunk, response is null."); + } + } } diff --git a/src/datasource/trk/frontend.ts b/src/datasource/trk/frontend.ts index 58e3ef03d..69c579249 100644 --- a/src/datasource/trk/frontend.ts +++ b/src/datasource/trk/frontend.ts @@ -87,7 +87,7 @@ import { Uint64 } from "#src/util/uint64.js"; -class PrecomputedSkeletonSource extends WithParameters( +class trkSkeletonSource extends WithParameters( WithCredentialsProvider()(SkeletonSource), SkeletonSourceParameters, ) { @@ -257,7 +257,7 @@ async function getSkeletonSource( url, ); return { - source: chunkManager.getChunkSource(PrecomputedSkeletonSource, { + source: chunkManager.getChunkSource(trkSkeletonSource, { credentialsProvider, parameters: { url, @@ -276,7 +276,7 @@ function getJsonMetadata( ): Promise { return chunkManager.memoize.getUncounted( { - type: "precomputed:metadata", + type: "trk:metadata", url, credentialsProvider: getObjectId(credentialsProvider), }, @@ -456,7 +456,7 @@ function parseInlinePropertyMap(data: unknown): InlineSegmentPropertyMap { return normalizeInlineSegmentPropertyMap({ ids, properties }); } -export const PrecomputedIndexedSegmentPropertySource = WithParameters( +export const trkIndexedSegmentPropertySource = WithParameters( WithCredentialsProvider()( IndexedSegmentPropertySource, ), @@ -487,7 +487,7 @@ export function getSegmentPropertyMap( // const indexedProperties = verifyOptionalObjectProperty(data, 'indexed', indexedObj => { // const {sharding, properties} = parseIndexedPropertyMap(indexedObj); // return chunkManager.getChunkSource( - // PrecomputedIndexedSegmentPropertySource, + // trkIndexedSegmentPropertySource, // {credentialsProvider, properties, parameters: {sharding, url}}); // }); return new SegmentPropertyMap({ inlineProperties }); @@ -558,7 +558,7 @@ export class TrkDataSource extends DataSourceProvider { options.providerUrl, ); return options.chunkManager.memoize.getUncounted( - { type: "precomputed:get", providerUrl, parameters }, + { type: "trk:get", providerUrl, parameters }, async (): Promise => { const { url, credentialsProvider } = parseSpecialUrl( providerUrl, From e149e7ab5f8c1335b00e8156f4e4fd400b331e2e Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Thu, 10 Oct 2024 17:26:15 -0400 Subject: [PATCH 03/12] Sharding code commented --- src/datasource/trk/base.ts | 28 +++---- src/datasource/trk/frontend.ts | 133 +++++++++++++++++---------------- 2 files changed, 82 insertions(+), 79 deletions(-) diff --git a/src/datasource/trk/base.ts b/src/datasource/trk/base.ts index b6db1df7f..74281a8c7 100644 --- a/src/datasource/trk/base.ts +++ b/src/datasource/trk/base.ts @@ -27,19 +27,19 @@ export enum ShardingHashFunction { MURMURHASH3_X86_128 = 1, } -export interface ShardingParameters { - hash: ShardingHashFunction; - preshiftBits: number; - minishardBits: number; - shardBits: number; - minishardIndexEncoding: DataEncoding; - dataEncoding: DataEncoding; -} +// export interface ShardingParameters { +// hash: ShardingHashFunction; +// preshiftBits: number; +// minishardBits: number; +// shardBits: number; +// minishardIndexEncoding: DataEncoding; +// dataEncoding: DataEncoding; +// } export interface SkeletonMetadata { transform: mat4; vertexAttributes: Map; - sharding: ShardingParameters | undefined; + // sharding: ShardingParameters | undefined; } export class SkeletonSourceParameters { @@ -50,8 +50,8 @@ export class SkeletonSourceParameters { } -export class IndexedSegmentPropertySourceParameters { - url: string; - sharding: ShardingParameters | undefined; - static RPC_ID = "trk/IndexedSegmentPropertySource"; -} +// export class IndexedSegmentPropertySourceParameters { +// url: string; +// sharding: ShardingParameters | undefined; +// static RPC_ID = "trk/IndexedSegmentPropertySource"; +// } diff --git a/src/datasource/trk/frontend.ts b/src/datasource/trk/frontend.ts index 69c579249..6297b5001 100644 --- a/src/datasource/trk/frontend.ts +++ b/src/datasource/trk/frontend.ts @@ -32,13 +32,13 @@ import type { } from "#src/datasource/index.js"; import { DataSourceProvider, RedirectError } from "#src/datasource/index.js"; import type { - ShardingParameters, + // ShardingParameters, SkeletonMetadata, } from "#src/datasource/trk/base.js"; import { - DataEncoding, - IndexedSegmentPropertySourceParameters, - ShardingHashFunction, + // DataEncoding, + // IndexedSegmentPropertySourceParameters, + // ShardingHashFunction, SkeletonSourceParameters, } from "#src/datasource/trk/base.js"; import type { @@ -46,7 +46,7 @@ import type { InlineSegmentPropertyMap, } from "#src/segmentation_display_state/property_map.js"; import { - IndexedSegmentPropertySource, + // IndexedSegmentPropertySource, normalizeInlineSegmentPropertyMap, SegmentPropertyMap, } from "#src/segmentation_display_state/property_map.js"; @@ -65,7 +65,7 @@ import { unparseQueryStringParameters, verifyEnumString, verifyFiniteFloat, - verifyInt, + // verifyInt, verifyObject, verifyObjectProperty, verifyOptionalObjectProperty, @@ -128,53 +128,53 @@ function parseTransform(data: any): mat4 { }); } -function parseShardingEncoding(y: any): DataEncoding { - if (y === undefined) return DataEncoding.RAW; - return verifyEnumString(y, DataEncoding); -} +// function parseShardingEncoding(y: any): DataEncoding { +// if (y === undefined) return DataEncoding.RAW; +// return verifyEnumString(y, DataEncoding); +// } -function parseShardingParameters( - shardingData: any, -): ShardingParameters | undefined { - if (shardingData === undefined) return undefined; - verifyObject(shardingData); - const t = verifyObjectProperty(shardingData, "@type", verifyString); - if (t !== "neuroglancer_uint64_sharded_v1") { - throw new Error(`Unsupported sharding format: ${JSON.stringify(t)}`); - } - const hash = verifyObjectProperty(shardingData, "hash", (y) => - verifyEnumString(y, ShardingHashFunction), - ); - const preshiftBits = verifyObjectProperty( - shardingData, - "preshift_bits", - verifyInt, - ); - const shardBits = verifyObjectProperty(shardingData, "shard_bits", verifyInt); - const minishardBits = verifyObjectProperty( - shardingData, - "minishard_bits", - verifyInt, - ); - const minishardIndexEncoding = verifyObjectProperty( - shardingData, - "minishard_index_encoding", - parseShardingEncoding, - ); - const dataEncoding = verifyObjectProperty( - shardingData, - "data_encoding", - parseShardingEncoding, - ); - return { - hash, - preshiftBits, - shardBits, - minishardBits, - minishardIndexEncoding, - dataEncoding, - }; -} +// function parseShardingParameters( +// shardingData: any, +// ): ShardingParameters | undefined { +// if (shardingData === undefined) return undefined; +// verifyObject(shardingData); +// const t = verifyObjectProperty(shardingData, "@type", verifyString); +// if (t !== "neuroglancer_uint64_sharded_v1") { +// throw new Error(`Unsupported sharding format: ${JSON.stringify(t)}`); +// } +// const hash = verifyObjectProperty(shardingData, "hash", (y) => +// verifyEnumString(y, ShardingHashFunction), +// ); +// const preshiftBits = verifyObjectProperty( +// shardingData, +// "preshift_bits", +// verifyInt, +// ); +// const shardBits = verifyObjectProperty(shardingData, "shard_bits", verifyInt); +// const minishardBits = verifyObjectProperty( +// shardingData, +// "minishard_bits", +// verifyInt, +// ); +// const minishardIndexEncoding = verifyObjectProperty( +// shardingData, +// "minishard_index_encoding", +// parseShardingEncoding, +// ); +// const dataEncoding = verifyObjectProperty( +// shardingData, +// "data_encoding", +// parseShardingEncoding, +// ); +// return { +// hash, +// preshiftBits, +// shardBits, +// minishardBits, +// minishardIndexEncoding, +// dataEncoding, +// }; +// } interface ParsedSkeletonMetadata { metadata: SkeletonMetadata; @@ -209,18 +209,20 @@ function parseSkeletonMetadata(data: any): ParsedSkeletonMetadata { vertexAttributes.set(id, { dataType, numComponents }); }); }); - const sharding = verifyObjectProperty( - data, - "sharding", - parseShardingParameters, - ); + // const sharding = verifyObjectProperty( + // data, + // "sharding", + // parseShardingParameters, + // ); const segmentPropertyMap = verifyObjectProperty( data, "segment_properties", verifyOptionalString, ); return { - metadata: { transform, vertexAttributes, sharding } as SkeletonMetadata, + metadata: { transform, vertexAttributes, + // sharding + } as SkeletonMetadata, segmentPropertyMap, }; } @@ -456,12 +458,12 @@ function parseInlinePropertyMap(data: unknown): InlineSegmentPropertyMap { return normalizeInlineSegmentPropertyMap({ ids, properties }); } -export const trkIndexedSegmentPropertySource = WithParameters( - WithCredentialsProvider()( - IndexedSegmentPropertySource, - ), - IndexedSegmentPropertySourceParameters, -); +// export const trkIndexedSegmentPropertySource = WithParameters( +// WithCredentialsProvider()( +// IndexedSegmentPropertySource, +// ), +// IndexedSegmentPropertySourceParameters, +// ); export function getSegmentPropertyMap( chunkManager: Borrowed, @@ -547,7 +549,8 @@ export class TrkDataSource extends DataSourceProvider { } normalizeUrl(options: NormalizeUrlOptions): string { - const { url, parameters } = parseProviderUrl(options.providerUrl); + const { url, parameters } = + parseProviderUrl(options.providerUrl); return ( options.providerProtocol + "://" + unparseProviderUrl(url, parameters) ); From dade63517eb5e3c47c5bcc32e26a90041992f706 Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Thu, 10 Oct 2024 17:26:59 -0400 Subject: [PATCH 04/12] trk files and dependencies --- package-lock.json | 192 ++++++++++++- package.json | 5 +- src/datasource/trk/reader/color.txt | 34 +++ src/datasource/trk/reader/main.ts | 43 +++ src/datasource/trk/reader/skeletonWriter.ts | 254 ++++++++++++++++ src/datasource/trk/reader/trackProcessor.ts | 272 ++++++++++++++++++ src/datasource/trk/reader/trkHeader.ts | 182 ++++++++++++ .../trk/reader/voxelToRASConverter.ts | 271 +++++++++++++++++ 8 files changed, 1239 insertions(+), 14 deletions(-) create mode 100644 src/datasource/trk/reader/color.txt create mode 100644 src/datasource/trk/reader/main.ts create mode 100644 src/datasource/trk/reader/skeletonWriter.ts create mode 100644 src/datasource/trk/reader/trackProcessor.ts create mode 100644 src/datasource/trk/reader/trkHeader.ts create mode 100644 src/datasource/trk/reader/voxelToRASConverter.ts diff --git a/package-lock.json b/package-lock.json index 1e7ee9b2d..1453fe542 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,18 +11,22 @@ "dependencies": { "axios": "^1.7.7", "codemirror": "^5.61.1", + "dotenv": "^16.4.5", "gl-matrix": "3.1.0", "glsl-editor": "^1.0.0", "ikonate": "github:mikolajdobrucki/ikonate#a86b4107c6ec717e7877f880a930d1ccf0b59d89", "lodash-es": "^4.17.21", + "mathjs": "^13.2.0", "nifti-reader-js": "^0.6.8", "numcodecs": "^0.3.1", "pako": "^2.1.0" }, "devDependencies": { "@types/codemirror": "5.60.15", + "@types/dotenv": "^6.1.1", "@types/gl-matrix": "^2.4.5", "@types/lodash-es": "^4.17.12", + "@types/mathjs": "^9.4.1", "@types/node": "^20.14.12", "@types/pako": "^2.0.3", "@types/yargs": "^17.0.32", @@ -33,7 +37,6 @@ "css-loader": "^7.1.2", "esbuild": "^0.23.0", "esbuild-loader": "^4.2.2", - "eslint": "^8.56.0", "eslint-formatter-codeframe": "^7.32.1", "eslint-import-resolver-typescript": "^3.6.1", "eslint-interactive": "^10.8.0", @@ -65,6 +68,7 @@ "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", "integrity": "sha512-1Yjs2SvM8TflER/OD3cOjhWWOZb58A2t7wpE2S9XfBYTiIl+XFhQG2bjy4Pu1I+EAlCNUzRDYDdFwFYUKvXcIA==", "dev": true, + "peer": true, "engines": { "node": ">=0.10.0" } @@ -186,10 +190,10 @@ } }, "node_modules/@babel/runtime": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.24.8.tgz", - "integrity": "sha512-5F7SDGs1T72ZczbRwbGO9lQi0NLjQxzl6i4lJxLxfW9U5UluCSyEJeniWvnhl3/euNiqQVbo8zruhsDfid0esA==", - "dev": true, + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.7.tgz", + "integrity": "sha512-FjoyLe754PMiYsFaN5C94ttGiOmBNYTf6pLr4xXHAT5uctHb092PBszndLDR5XA/jghQvn4n7JMHl7dmTgbm9w==", + "license": "MIT", "dependencies": { "regenerator-runtime": "^0.14.0" }, @@ -656,6 +660,7 @@ "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz", "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==", "dev": true, + "peer": true, "dependencies": { "ajv": "^6.12.4", "debug": "^4.3.2", @@ -679,6 +684,7 @@ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, + "peer": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -689,6 +695,7 @@ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, + "peer": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -701,6 +708,7 @@ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.56.0.tgz", "integrity": "sha512-gMsVel9D7f2HLkBma9VbtzZRehRogVRfbr++f06nL2vnCGCNlzOD+/MUov/F4p8myyAHspEhVobgjpX64q5m6A==", "dev": true, + "peer": true, "engines": { "node": "^12.22.0 || ^14.17.0 || >=16.0.0" } @@ -710,6 +718,7 @@ "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", "dev": true, + "peer": true, "dependencies": { "@humanwhocodes/object-schema": "^2.0.2", "debug": "^4.3.1", @@ -724,6 +733,7 @@ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, + "peer": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -734,6 +744,7 @@ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, + "peer": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -746,6 +757,7 @@ "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", "dev": true, + "peer": true, "engines": { "node": ">=12.22" }, @@ -758,7 +770,8 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.2.tgz", "integrity": "sha512-6EwiSjwWYP7pTckG6I5eyFANjPhmPjUX9JRLUSfNPC7FX7zK9gyZAfUEaECL6ALTpGX5AjnBq3C9XmVWPitNpw==", - "dev": true + "dev": true, + "peer": true }, "node_modules/@inquirer/confirm": { "version": "3.1.17", @@ -1500,6 +1513,16 @@ "integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==", "dev": true }, + "node_modules/@types/dotenv": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/@types/dotenv/-/dotenv-6.1.1.tgz", + "integrity": "sha512-ftQl3DtBvqHl9L16tpqqzA4YzCSXZfi7g8cQceTz5rOlYtk/IZbFjAv3mLOQlNIgOaylCQWQoBdDQHPgEBJPHg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/eslint": { "version": "8.56.5", "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.56.5.tgz", @@ -1634,6 +1657,16 @@ "@types/lodash": "*" } }, + "node_modules/@types/mathjs": { + "version": "9.4.1", + "resolved": "https://registry.npmjs.org/@types/mathjs/-/mathjs-9.4.1.tgz", + "integrity": "sha512-pEvgJ9c0LkVSZODbBuxeFngyhg/xMpZElcmvtFLayUXEgt6I4fGcMaxlPspV4kIMucmY6W4YwmtaWyTAQpqsrQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "mathjs": "*" + } + }, "node_modules/@types/mime": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", @@ -1992,7 +2025,8 @@ "version": "1.2.0", "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.2.0.tgz", "integrity": "sha512-zuVdFrMJiuCDQUMCzQaD6KL28MjnqqN8XnAqiEq9PNm/hCPTSGfrXCOfwj1ow4LFb/tNymJPwsNbVePc1xFqrQ==", - "dev": true + "dev": true, + "peer": true }, "node_modules/@vitest/browser": { "version": "2.0.4", @@ -2611,6 +2645,7 @@ "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", "dev": true, + "peer": true, "peerDependencies": { "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" } @@ -2987,7 +3022,8 @@ "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true + "dev": true, + "peer": true }, "node_modules/aria-query": { "version": "5.3.0", @@ -4066,6 +4102,19 @@ "integrity": "sha512-QE33hToZseCH3jS0qN96O/bSh3kaw/h+Tq7ngyY9eWDUnTlTNUyqfqvCXioLe5Na5jFsL78ra/wuBU4iuEgd4w==", "dev": true }, + "node_modules/complex.js": { + "version": "2.2.5", + "resolved": "https://registry.npmjs.org/complex.js/-/complex.js-2.2.5.tgz", + "integrity": "sha512-U3pSYTZz5Af/xvHgKQkJYHBMGmae7Ms51qqJougCR05YWF1Fihef4LRfOpBFONH2gvPFHMZq2rhx0I44DG23xw==", + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/rawify" + } + }, "node_modules/compress-commons": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/compress-commons/-/compress-commons-6.0.2.tgz", @@ -4601,6 +4650,12 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/decimal.js": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.4.3.tgz", + "integrity": "sha512-VBBaLc1MgL5XpzgIP7ny5Z6Nx3UrRkIViUkPUdtl9aya5amy3De1gsUUSB1g3+3sExYNjCAsAznmukyxCb1GRA==", + "license": "MIT" + }, "node_modules/decompress-response": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", @@ -4935,6 +4990,7 @@ "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", "dev": true, + "peer": true, "dependencies": { "esutils": "^2.0.2" }, @@ -5031,6 +5087,18 @@ "tslib": "^2.0.3" } }, + "node_modules/dotenv": { + "version": "16.4.5", + "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.5.tgz", + "integrity": "sha512-ZmdL2rui+eB2YwhsWzjInR8LldtZHGDoQ1ugH85ppHKwpUHL7j7rN0Ti9NCnGiQbhaZ11FpR+7ao1dNsmduNUg==", + "license": "BSD-2-Clause", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://dotenvx.com" + } + }, "node_modules/duplexer": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/duplexer/-/duplexer-0.1.2.tgz", @@ -5838,6 +5906,12 @@ "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", "dev": true }, + "node_modules/escape-latex": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/escape-latex/-/escape-latex-1.2.0.tgz", + "integrity": "sha512-nV5aVWW1K0wEiUIEdZ4erkGGH8mDxGyxSeqPzRNtWP7ataw+/olFObw7hujFWlVjNsaDFw5VZ5NzVSIqRgfTiw==", + "license": "MIT" + }, "node_modules/escape-string-regexp": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", @@ -5875,6 +5949,7 @@ "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.56.0.tgz", "integrity": "sha512-Go19xM6T9puCOWntie1/P997aXxFsOi37JIHRWI514Hc6ZnaHGKY9xFhrU65RT6CcBEzZoGG1e6Nq+DT04ZtZQ==", "dev": true, + "peer": true, "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", @@ -6166,6 +6241,7 @@ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz", "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==", "dev": true, + "peer": true, "dependencies": { "esrecurse": "^4.3.0", "estraverse": "^5.2.0" @@ -6301,6 +6377,7 @@ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, + "peer": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -6311,6 +6388,7 @@ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, + "peer": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -6323,6 +6401,7 @@ "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz", "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==", "dev": true, + "peer": true, "dependencies": { "acorn": "^8.9.0", "acorn-jsx": "^5.3.2", @@ -6352,6 +6431,7 @@ "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "dev": true, + "peer": true, "dependencies": { "estraverse": "^5.1.0" }, @@ -6780,6 +6860,7 @@ "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz", "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==", "dev": true, + "peer": true, "dependencies": { "flat-cache": "^3.0.4" }, @@ -6853,6 +6934,7 @@ "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", "dev": true, + "peer": true, "dependencies": { "locate-path": "^6.0.0", "path-exists": "^4.0.0" @@ -6878,6 +6960,7 @@ "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz", "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==", "dev": true, + "peer": true, "dependencies": { "flatted": "^3.2.9", "keyv": "^4.5.3", @@ -7216,6 +7299,19 @@ "node": ">= 0.6" } }, + "node_modules/fraction.js": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.3.7.tgz", + "integrity": "sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==", + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "patreon", + "url": "https://github.com/sponsors/rawify" + } + }, "node_modules/fresh": { "version": "0.5.2", "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", @@ -7506,6 +7602,7 @@ "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", "dev": true, + "peer": true, "dependencies": { "is-glob": "^4.0.3" }, @@ -7554,6 +7651,7 @@ "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz", "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==", "dev": true, + "peer": true, "dependencies": { "type-fest": "^0.20.2" }, @@ -7569,6 +7667,7 @@ "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz", "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==", "dev": true, + "peer": true, "engines": { "node": ">=10" }, @@ -8304,6 +8403,7 @@ "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", "dev": true, + "peer": true, "engines": { "node": ">=0.8.19" } @@ -8855,6 +8955,12 @@ "@pkgjs/parseargs": "^0.11.0" } }, + "node_modules/javascript-natural-sort": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/javascript-natural-sort/-/javascript-natural-sort-0.7.1.tgz", + "integrity": "sha512-nO6jcEfZWQXDhOiBtG2KvKyEptz7RVbpGP4vTD2hLBdmNQSsCiicO2Ioinv6UI4y9ukqnBpy+XZ9H6uLNgJTlw==", + "license": "MIT" + }, "node_modules/jest-util": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", @@ -8984,6 +9090,7 @@ "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", "dev": true, + "peer": true, "dependencies": { "argparse": "^2.0.1" }, @@ -9168,7 +9275,8 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", - "dev": true + "dev": true, + "peer": true }, "node_modules/json-stringify-safe": { "version": "5.0.1", @@ -9305,6 +9413,7 @@ "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", "dev": true, + "peer": true, "dependencies": { "prelude-ls": "^1.2.1", "type-check": "~0.4.0" @@ -9389,6 +9498,7 @@ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", "dev": true, + "peer": true, "dependencies": { "p-locate": "^5.0.0" }, @@ -9419,7 +9529,8 @@ "version": "4.6.2", "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz", "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==", - "dev": true + "dev": true, + "peer": true }, "node_modules/lodash.sortby": { "version": "4.7.0", @@ -9542,6 +9653,29 @@ "@jridgewell/sourcemap-codec": "^1.4.15" } }, + "node_modules/mathjs": { + "version": "13.2.0", + "resolved": "https://registry.npmjs.org/mathjs/-/mathjs-13.2.0.tgz", + "integrity": "sha512-P5PZoiUX2Tkghkv3tsSqlK0B9My/ErKapv1j6wdxd0MOrYQ30cnGE4LH/kzYB2gA5rN46Njqc4cFgJjaxgijoQ==", + "license": "Apache-2.0", + "dependencies": { + "@babel/runtime": "^7.25.6", + "complex.js": "^2.1.1", + "decimal.js": "^10.4.3", + "escape-latex": "^1.2.0", + "fraction.js": "^4.3.7", + "javascript-natural-sort": "^0.7.1", + "seedrandom": "^3.0.5", + "tiny-emitter": "^2.1.0", + "typed-function": "^4.2.1" + }, + "bin": { + "mathjs": "bin/cli.js" + }, + "engines": { + "node": ">= 18" + } + }, "node_modules/media-typer": { "version": "0.3.0", "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", @@ -10263,6 +10397,7 @@ "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.3.tgz", "integrity": "sha512-JjCoypp+jKn1ttEFExxhetCKeJt9zhAgAve5FXHixTvFDW/5aEktX9bufBKLRRMdU7bNtpLfcGu94B3cdEJgjg==", "dev": true, + "peer": true, "dependencies": { "@aashutoshrathi/word-wrap": "^1.2.3", "deep-is": "^0.1.3", @@ -10357,6 +10492,7 @@ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", "dev": true, + "peer": true, "dependencies": { "yocto-queue": "^0.1.0" }, @@ -10372,6 +10508,7 @@ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", "dev": true, + "peer": true, "dependencies": { "p-limit": "^3.0.2" }, @@ -10833,6 +10970,7 @@ "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", "dev": true, + "peer": true, "engines": { "node": ">= 0.8.0" } @@ -11259,8 +11397,7 @@ "node_modules/regenerator-runtime": { "version": "0.14.1", "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", - "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==", - "dev": true + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" }, "node_modules/regexp.prototype.flags": { "version": "1.5.1", @@ -11567,6 +11704,7 @@ "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "dev": true, + "peer": true, "dependencies": { "glob": "^7.1.3" }, @@ -11582,6 +11720,7 @@ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dev": true, + "peer": true, "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" @@ -11592,6 +11731,7 @@ "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "dev": true, + "peer": true, "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", @@ -11612,6 +11752,7 @@ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dev": true, + "peer": true, "dependencies": { "brace-expansion": "^1.1.7" }, @@ -11777,6 +11918,12 @@ "url": "https://opencollective.com/webpack" } }, + "node_modules/seedrandom": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.5.tgz", + "integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==", + "license": "MIT" + }, "node_modules/select-hose": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/select-hose/-/select-hose-2.0.0.tgz", @@ -12703,6 +12850,7 @@ "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", "dev": true, + "peer": true, "engines": { "node": ">=8" }, @@ -12983,7 +13131,8 @@ "version": "0.2.0", "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==", - "dev": true + "dev": true, + "peer": true }, "node_modules/through": { "version": "2.3.8", @@ -13027,6 +13176,12 @@ "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==", "dev": true }, + "node_modules/tiny-emitter": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/tiny-emitter/-/tiny-emitter-2.1.0.tgz", + "integrity": "sha512-NB6Dk1A9xgQPMoGqC5CVXn123gWyte215ONT5Pp5a0yt4nlEoO1ZWeCwpncaekPHXO60i47ihFnZPiRPjRMq4Q==", + "license": "MIT" + }, "node_modules/tinybench": { "version": "2.8.0", "resolved": "https://registry.npmjs.org/tinybench/-/tinybench-2.8.0.tgz", @@ -13601,6 +13756,7 @@ "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", "dev": true, + "peer": true, "dependencies": { "prelude-ls": "^1.2.1" }, @@ -13698,6 +13854,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/typed-function": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/typed-function/-/typed-function-4.2.1.tgz", + "integrity": "sha512-EGjWssW7Tsk4DGfE+5yluuljS1OGYWiI1J6e8puZz9nTMM51Oug8CD5Zo4gWMsOhq5BI+1bF+rWTm4Vbj3ivRA==", + "license": "MIT", + "engines": { + "node": ">= 18" + } + }, "node_modules/typedarray": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", @@ -15503,6 +15668,7 @@ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "dev": true, + "peer": true, "engines": { "node": ">=10" }, diff --git a/package.json b/package.json index 2965f1225..cb74eb6c3 100644 --- a/package.json +++ b/package.json @@ -41,8 +41,10 @@ }, "devDependencies": { "@types/codemirror": "5.60.15", + "@types/dotenv": "^6.1.1", "@types/gl-matrix": "^2.4.5", "@types/lodash-es": "^4.17.12", + "@types/mathjs": "^9.4.1", "@types/node": "^20.14.12", "@types/pako": "^2.0.3", "@types/yargs": "^17.0.32", @@ -53,7 +55,6 @@ "css-loader": "^7.1.2", "esbuild": "^0.23.0", "esbuild-loader": "^4.2.2", - "eslint": "^8.56.0", "eslint-formatter-codeframe": "^7.32.1", "eslint-import-resolver-typescript": "^3.6.1", "eslint-interactive": "^10.8.0", @@ -79,10 +80,12 @@ "dependencies": { "axios": "^1.7.7", "codemirror": "^5.61.1", + "dotenv": "^16.4.5", "gl-matrix": "3.1.0", "glsl-editor": "^1.0.0", "ikonate": "github:mikolajdobrucki/ikonate#a86b4107c6ec717e7877f880a930d1ccf0b59d89", "lodash-es": "^4.17.21", + "mathjs": "^13.2.0", "nifti-reader-js": "^0.6.8", "numcodecs": "^0.3.1", "pako": "^2.1.0" diff --git a/src/datasource/trk/reader/color.txt b/src/datasource/trk/reader/color.txt new file mode 100644 index 000000000..fe9b692c6 --- /dev/null +++ b/src/datasource/trk/reader/color.txt @@ -0,0 +1,34 @@ +/** + * Converts a 3D orientation vector into a color map based on the absolute values of its components. + * Each component of the orientation vector corresponds to a color channel (R, G, B), + * which is then clamped between 0.0 and 1.0 to ensure valid color values. + * + * @param {vec3} orient - The orientation vector whose components are used to determine the color. + * @returns {vec3} A color vector where each component is derived from the corresponding component of the orientation vector. + */ + +vec3 colormapOrient(vec3 orient){ + vec3 result; + result.r = abs(orient[0]); + result.g = abs(orient[1]); + result.b = abs(orient[2]); + return clamp(result, 0.0, 1.0); + + +} + +/** + * Main rendering function for a shader that adjusts the output color based on the orientation. + * The function checks if the orientation-based coloring is enabled (through a UI control), + * and if so, it uses the `colormapOrient` function to determine the color based on the orientation. + * If not enabled, it emits a default color or style. + */ +#uicontrol bool orient_color checkbox(default=true) +void main() { + if(orient_color){ + emitRGB(colormapOrient(orientation)); + } + else{ + emitDefault(); + } +} \ No newline at end of file diff --git a/src/datasource/trk/reader/main.ts b/src/datasource/trk/reader/main.ts new file mode 100644 index 000000000..01f396ab8 --- /dev/null +++ b/src/datasource/trk/reader/main.ts @@ -0,0 +1,43 @@ + +// import dotenv from 'dotenv'; +import { TrackProcessor } from '#src/datasource/trk/reader/trackProcessor.js'; +// dotenv.config(); + +/** + * This function serves as the entry point for the application. It handles the sequence of operations involving track processing and skeleton precomputed format file creation. + * The function first sets up the track processor, fetches and processes only the header of the track file, and checks if the header was successfully processed. + * If the header is present, it then processes a specified number of track data and uploads the results to an S3 bucket. + * + * @async + * @returns {Promise} A promise that resolves when all operations are successfully completed or throws an error if any step fails. + */ +async function main() { + + // Create a global instance + const trackProcessor = new TrackProcessor(); + // Upload data from cloud + const trkFileUrl = 'https://dandiarchive.s3.amazonaws.com/blobs/d4a/c43/d4ac43bd-6896-4adf-a911-82edbea21f67'; + // Upload data from local machine + // const trkFilePath = '/Users/shrutiv/MyDocuments/GitHub/d4ac43bd-6896-4adf-a911-82edbea21f67.trk'; + + /* Process the header informtion from first 1000 bytes (0-999). */ + await trackProcessor.streamAndProcessHeader(trkFileUrl, 0, 999); + if (!trackProcessor.globalHeader) { + console.error('Error: Failed to fetch or process the TRK header.'); + return; + } + + /* Process all the tracks from starting from 1 and generate precomputed file for all + the tracks present in the randomTrackNumbers array. */ + const totalTracks = trackProcessor.globalHeader.n_count; + const randomTrackNumbers = trackProcessor.getRandomTrackIndices(totalTracks, 1000); + // const randomTrackNumbers = [1]; // process only single track + + await trackProcessor.processTrackData(randomTrackNumbers, 1, trkFileUrl); + // await trackProcessor.processTrackData(randomTrackNumbers, 1, trkFilePath); + + + +} + +main().catch(console.error); diff --git a/src/datasource/trk/reader/skeletonWriter.ts b/src/datasource/trk/reader/skeletonWriter.ts new file mode 100644 index 000000000..b04e1ea2a --- /dev/null +++ b/src/datasource/trk/reader/skeletonWriter.ts @@ -0,0 +1,254 @@ +import fs from 'fs'; +import path from 'path'; + +// import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3'; + +/** + * Represents a 3D vertex with coordinates. + * @interface + */ +export interface Vertex { + x: number; + y: number; + z: number; +} + +/** + * Represents an edge connecting two vertices by their indices. + * @interface + */ +export interface Edge { + vertex1: number; + vertex2: number; +} + +/** + * Provides utilities for writing skeleton data to files and uploading them to AWS S3. + */ +export class SkeletonWriter { + + /** + * Writes skeleton data including vertices, edges, and orientations to a binary file. + * @static + * @param {Vertex[]} vertices - The list of vertices to write. + * @param {Edge[]} edges - The list of edges connecting the vertices. + * @param {number[][]} orientations - The orientations of each vertex. + * @param {string} outputFilePath - The file path where the binary data will be written. + */ + static writeSkeleton(vertices: Vertex[], edges: Edge[], orientations: number[][], outputFilePath: string) { + fs.mkdirSync(path.dirname(outputFilePath), { recursive: true }); + + const vertexCount = vertices.length; + const edgeCount = edges.length; + + const vertexSize = 12; // 3 floats (x, y, z), each 4 bytes + const edgeSize = 8; // 2 uint32s (source and target), each 4 bytes + const orientationSize = 12; // 3 floats (x, y, z) for orientations + const bufferSize = 4 + 4 + (vertexSize * vertexCount) + (edgeSize * edgeCount) + (orientationSize * vertexCount); + + const buffer = Buffer.alloc(bufferSize); + let offset = 0; + + buffer.writeUInt32LE(vertexCount, offset); // Number of vertices + offset += 4; + buffer.writeUInt32LE(edgeCount, offset); // Number of edges + offset += 4; + + // Write the vertices (3 floats per vertex: x, y, z) + for (let i = 0; i < vertexCount; i++) { + buffer.writeFloatLE((vertices[i].x)*1E6, offset); + buffer.writeFloatLE(vertices[i].y*1E6, offset + 4); + buffer.writeFloatLE(vertices[i].z*1E6, offset + 8); + offset += 12; + } + + // Write the edges (2 uint32 per edge: vertex1, vertex2) + for (let i = 0; i < edgeCount; i++) { + buffer.writeUInt32LE(edges[i].vertex1, offset); + buffer.writeUInt32LE(edges[i].vertex2, offset + 4); + offset += 8; + } + + // Write the orientations (3 floats per vertex) + for (let i = 0; i < vertexCount; i++) { + buffer.writeFloatLE(orientations[i][0], offset); + buffer.writeFloatLE(orientations[i][1], offset + 4); + buffer.writeFloatLE(orientations[i][2], offset + 8); + offset += 12; + } + + fs.writeFileSync(outputFilePath, buffer); + console.log(`Skeleton written to ${outputFilePath}`); + } + + /** + * Writes metadata about the skeleton data structure to a JSON file. + * @static + * @param {string} infoFilePath - The file path where the skeleton info will be written. + */ + static writeSkeletonInfo(infoFilePath: string) { + fs.mkdirSync(path.dirname(infoFilePath), { recursive: true }); + + const skeletonInfo = { + "@type": "neuroglancer_skeletons", + "vertex_attributes": [ + { + "id": "orientation", + "data_type": "float32", + "num_components": 3, + }, + ], + "segment_properties": "prop", + }; + + fs.mkdirSync(path.dirname(infoFilePath), { recursive: true }); + + // Write the skeleton info to the specified path + fs.writeFileSync(infoFilePath, JSON.stringify(skeletonInfo, null, 2)); + console.log(`Skeleton info file written to ${infoFilePath}`); + } + + /** + * Writes properties metadata for the skeleton to a JSON file. + * @static + * @param {string} propFilePath - The file path where the properties info will be written. + */ + + static writePropInfo(propFilePath: string) { + fs.mkdirSync(path.dirname(propFilePath), { recursive: true }); + + const propInfo = { + "@type": "neuroglancer_segment_properties", + "inline": { + "ids": ["1"], + "properties": [{ "id": "label", "type": "label", "values": ["1"] }] + } + }; + + // Write the prop info to the specified path + fs.writeFileSync(propFilePath, JSON.stringify(propInfo, null, 2)); + console.log(`Prop info file written to ${propFilePath}`); + } + + /** + * Generates file paths for the binary, property, and skeleton info files based on a timestamp. + * TimeStamp is used for having unique filename. + * @static + * @param {string} outputDirectory - The output directory for the files. + * @param {string} timestamp - The timestamp used to format the file paths. + * @returns {{ binaryFilePath: string, propInfoFilePath: string, skeletonInfoFilePath: string }} + */ + static generateSkeletonFilePaths(outputDirectory: string, timestamp: string) { + + // Build the file paths with the formatted timestamp + const binaryFilePath = path.join(outputDirectory, 'tract', timestamp, '1'); // Binary file path + const propInfoFilePath = path.join(outputDirectory, 'tract', timestamp, 'prop', 'info'); // JSON file path + const skeletonInfoFilePath = path.join(outputDirectory, 'tract', timestamp, 'info'); // JSON file path + + return { + binaryFilePath, + propInfoFilePath, + skeletonInfoFilePath + }; + } + + /** + * Uploads a directory of files to AWS S3. + * @static + * @param {string} outputDirectory - The directory containing the files to upload. + * @param {string} timestamp - The timestamp used to organize the files in S3. + */ + // static async uploadSkeletonFilePathsToS3(outputDirectory: string, timestamp: string) { + // // Initialize the S3 client + // const s3Client = new S3Client({ + // region: process.env.AWS_REGION || 'us-east-2', + // }); + + // // Read the bucket name from environment variables + // const bucketName = process.env.BUCKET_NAME || 'linc-brain-mit-prod-us-east-2'; + + // // Check for required environment variables + // if (!process.env.AWS_REGION || !process.env.BUCKET_NAME) { + // console.error('AWS_REGION and BUCKET_NAME must be set in environment variables.'); + // return; + // } + + // // Define the local directory to upload + // const localDir = path.join(outputDirectory, 'tract', timestamp); + + // // Include the 'neuroglancer_trk/' prefix in the S3 destination path + // const s3DestinationPath = path.join('neuroglancer_trk', 'tract', timestamp).replace(/\\/g, '/'); + + // // Recursively upload all files in the local directory to S3 + // await SkeletonWriter.uploadDirectoryToS3(s3Client, bucketName, localDir, s3DestinationPath); + + // console.log('Uploaded generated files to S3.'); + // } + + /** + * Iteratively uploads all files from a local directory to an AWS S3 bucket. + * @static + * @param {S3Client} s3Client - The AWS S3 client used for the upload. + * @param {string} bucketName - The name of the S3 bucket. + * @param {string} localDirectory - The local directory containing the files to upload. + * @param {string} s3DestinationPath - The destination path in the S3 bucket. + */ + // static async uploadDirectoryToS3( + // s3Client: S3Client, + // bucketName: string, + // localDirectory: string, + // s3DestinationPath: string + // ) { + // const files = SkeletonWriter.getAllFilesInDirectory(localDirectory); + + // for (const filePath of files) { + // // Compute the relative path from the local directory + // const relativeFilePath = path.relative(localDirectory, filePath); + + // // Construct the S3 key by joining the destination path and relative file path (Hashmap) + // const s3Key = path.join(s3DestinationPath, relativeFilePath).replace(/\\/g, '/'); + + // try { + // const fileContent = fs.readFileSync(filePath); + + // const params = { + // Bucket: bucketName, + // Key: s3Key, + // Body: fileContent, + // }; + + // const command = new PutObjectCommand(params); + // await s3Client.send(command); + // console.log(`File uploaded successfully to s3://${bucketName}/${s3Key}`); + // } catch (error) { + // console.error(`Error uploading file ${filePath} to S3:`, error); + // } + // } + // } + + /** + * Interatively collects all file paths in a directory. + * @static + * @param {string} dir - The directory to scan. + * @returns {string[]} An array of file paths found in the directory. + */ + static getAllFilesInDirectory(dir: string): string[] { + let results: string[] = []; + + const list = fs.readdirSync(dir); + list.forEach((file) => { + const filePath = path.join(dir, file); + const stat = fs.statSync(filePath); + if (stat && stat.isDirectory()) { + // Recursively walk subdirectories + results = results.concat(SkeletonWriter.getAllFilesInDirectory(filePath)); + } else { + results.push(filePath); + } + }); + + return results; + } +} + + diff --git a/src/datasource/trk/reader/trackProcessor.ts b/src/datasource/trk/reader/trackProcessor.ts new file mode 100644 index 000000000..06dcc9dc0 --- /dev/null +++ b/src/datasource/trk/reader/trackProcessor.ts @@ -0,0 +1,272 @@ +import fs from 'fs'; +import path from 'path'; +import axios from 'axios'; +import type { Vertex, Edge } from '#src/datasource/trk/reader/skeletonWriter.js'; +import { SkeletonWriter } from '#src/datasource/trk/reader/skeletonWriter.js'; +import type { TrkHeader} from '#src/datasource/trk/reader/trkHeader.js'; +import { TrkHeaderProcessor } from '#src/datasource/trk/reader/trkHeader.js'; +import { VoxelToRASConverter } from '#src/datasource/trk/reader/voxelToRASConverter.js'; + +/** + * Represents the processing state of track data, indicating progress in bytes and tracks. + * @interface + */ +export interface ProcessState { + byteOffset: number; + trackNumber: number; + offset: number; +} + +/** + * Represents a 3D orientation vector. + * @typedef Orientation + */ +type Orientation = [number, number, number]; + +/** + * Manages the processing of track data from TRK files, including streaming, and processing track data. + */ +export class TrackProcessor { + globalHeader: TrkHeader | null; + + /** + * Initializes a new instance of the TrackProcessor class with an optional global header. + * @param {TrkHeader | null} globalHeader - The global header of the TRK file. + */ + constructor(globalHeader: TrkHeader | null = null) { + this.globalHeader = globalHeader; + } + + /** + * Streams the TRK file header from a URL and processes it to set the global header. + * @async + * @param {string} url - The URL of the TRK file. + * @param {number} start - The start byte position for the range request. + * @param {number} end - The end byte position for the range request. + */ + async streamAndProcessHeader(url: string, start: number, end: number) { + try { + const response = await axios.get(url, { + responseType: 'arraybuffer', + headers: { + 'Range': `bytes=${start}-${end}`, + }, + }); + const buffer = Buffer.from(response.data); + this.globalHeader = TrkHeaderProcessor.readTrkHeader(buffer); + TrkHeaderProcessor.printTrkHeader(this.globalHeader); + } catch (error) { + console.error('Error streaming or processing the TRK file header:', error); + } + } + + /** + * Computes the 3D orientation vectors for track points, normalizing them to unit vectors. + * @static + * @param {number[][]} points - The array of 3D points for which to compute orientations. + * @returns {number[][]} An array of normalized 3D orientation vectors. + */ + static computeOrientation(points: number[][]): number[][] { + // Step 1: Compute directed orientation of each edge + let orient: number[][] = points.slice(1).map((point, i) => { + return [ + point[0] - points[i][0], + point[1] - points[i][1], + point[2] - points[i][2] + ]; + }); + + // Step 2: Compute orientation for each vertex + const originalOrient = [...orient]; + orient = [ + ...originalOrient.slice(0, 1), // First vertex (only one edge) + ...originalOrient.slice(0, -1).map((o, i) => { + return [ + o[0] + orient[i + 1][0], // x + o[1] + orient[i + 1][1], // y + o[2] + orient[i + 1][2] // z + ]; + }), + ...originalOrient.slice(-1) // Last vertex (only one edge) + ]; + + // Step 3: Normalize orientation vectors to unit length + orient = orient.map((o: number[]) => { + const length = Math.sqrt(o[0] * o[0] + o[1] * o[1] + o[2] * o[2]); + const normalizedLength = Math.max(length, 1e-12); // Avoid division by 0 + return [o[0] / normalizedLength, o[1] / normalizedLength, o[2] / normalizedLength] as Orientation; + }); + + + return orient; + } + + /** + * Processes the track data for selected track numbers and writes the result to disk. + * @async + * @param {number[]} randomTrackNumbers - The array of track numbers to be processed. + * @param {number} trackNumber - The current track number being processed. + * @param {string} filePath - The file path of the TRK file. + * @returns {Promise<{processState: ProcessState; timestamp: string}>} A promise that resolves to the processing state and a timestamp. + */ + async processTrackData( randomTrackNumbers: number[], trackNumber: number, filePath: string): Promise<{ processState: ProcessState; timestamp: string }> { + + // Get the current date and time + const now = new Date(); + + // Format the timestamp as YYYYMMDD_HHMMSS + const timestamp = now.toISOString().replace(/[-:]/g, '').replace('T', '_').slice(0, 15); + + + if (!this.globalHeader) { + console.error('Error: Global header is not initialized.'); + return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; + } + + const outputFilePath = path.join(__dirname, 'track_data.txt'); + const writeStream = fs.createWriteStream(outputFilePath, { flags: 'a' }); + + const maxTracksToProcess = randomTrackNumbers.length; + const vertices: Vertex[] = []; + const edges: Edge[] = []; + const orientations: number[][] = []; + let trackProcessedCount = 0; + let vertexIndex = 0; + + try { + + const { dataView, buffer } = await this.loadFileBuffer(filePath); + console.log('Buffer length:', buffer.length); + console.log('DataView length:', dataView.byteLength); + + let offset = 1000; + + while (trackProcessedCount < maxTracksToProcess && offset < buffer.byteLength) { + // Read the number of points in the track (first 4 bytes) + const n_points = dataView.getInt32(offset, true); // true indicates little-endian byte order. + offset += 4; + + writeStream.write(`Track ${trackNumber} processed, number of points: ${n_points}\n`); + + // Only process the track if it is in the random track numbers + if (randomTrackNumbers.includes(trackNumber)) { + // Process each point in the track (x, y, z -> 12 bytes per point) + const points: number[][] = []; + for (let i = 0; i < n_points; i++) { + const x = dataView.getFloat32(offset, true); + const y = dataView.getFloat32(offset + 4, true); + const z = dataView.getFloat32(offset + 8, true); + offset += 12; + points.push([x, y, z]); + + const voxelPoint: [number, number, number] = [x, y, z]; + const affine = + VoxelToRASConverter.getAffineToRasmm(this.globalHeader); + const rasPoint = VoxelToRASConverter.applyAffineMatrix(voxelPoint, affine); + + // Add vertex data + vertices.push({ x: rasPoint[0], y: rasPoint[1], z: rasPoint[2] }); + + // Add edge data + if (i > 0) { + edges.push({ vertex1: vertexIndex - 1, vertex2: vertexIndex }); + } + vertexIndex++; + } + + // Compute and add orientation for the tract + const orient = TrackProcessor.computeOrientation(points); + orientations.push(...orient); + + trackProcessedCount++; // Increment the number of processed tracks + + if (trackProcessedCount >= maxTracksToProcess) { + const outputDirectory = path.resolve(__dirname, '..', 'src'); + const { binaryFilePath, propInfoFilePath, skeletonInfoFilePath } = SkeletonWriter.generateSkeletonFilePaths(outputDirectory, timestamp); + + SkeletonWriter.writeSkeleton(vertices, edges, orientations, binaryFilePath); + SkeletonWriter.writePropInfo(propInfoFilePath); + SkeletonWriter.writeSkeletonInfo(skeletonInfoFilePath); + + console.log(`Processed ${maxTracksToProcess} random tracks and wrote skeleton and info files.`); + + // SkeletonWriter.uploadSkeletonFilePathsToS3(outputDirectory, timestamp); + + console.log(`Uploaded tracks to S3.`) + + break; + } + } else { + offset += n_points * 12; // Skip the track data if it's not in the selected tracks + } + + trackNumber++; + } + + writeStream.end(); + return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; + + } catch (error) { + + console.error('Error fetching or processing track data:', error); + return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; + + } + } + + /** + * Shuffles and selects a random number of track indices from a total number of tracks. + * @param {number} totalTracks - The total number of tracks available. + * @param {number} numTracks - The number of tracks to select. + * @returns {number[]} An array of randomly selected track indices. + */ + getRandomTrackIndices(totalTracks: number, numTracks: number): number[] { + const trackIndices = Array.from({ length: totalTracks }, (_, i) => i + 1); // Create an array of track numbers + for (let i = trackIndices.length - 1; i > 0; i--) { + const j = Math.floor(Math.random() * (i + 1)); + [trackIndices[i], trackIndices[j]] = [trackIndices[j], trackIndices[i]]; // Shuffle array + } + return trackIndices.slice(0, numTracks); // Return the first `numTracks` tracks + } + + /** + * Loads the binary data of a file from a URL or local path into a buffer and creates a DataView for processing. + * @param {string} filePath - The URL or local path of the file to load. + * @returns {Promise<{dataView: DataView; buffer: Buffer}>} A promise that resolves to the DataView and buffer of the file. + */ + loadFileBuffer(filePath: string) { + if (filePath.startsWith('http://') || filePath.startsWith('https://')) { + // Handle URL loading with axios + return axios.get(filePath, { responseType: 'arraybuffer' }) + .then(response => { + const buffer = Buffer.from(response.data); + const dataView = new DataView(buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength)); + console.log('Data loaded from URL successfully.'); + return { + dataView, + buffer + }; + }) + .catch(error => { + console.error('Failed to load file from URL:', error); + throw error; + }); + } else { + // Handle local file loading with fs + try { + const absolutePath = path.resolve(filePath); + const buffer = fs.readFileSync(absolutePath); + const dataView = new DataView(buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength)); + console.log('Data loaded from local file successfully.'); + return { + dataView, + buffer + }; + } catch (error) { + console.error('Failed to load local file:', error); + throw error; + } + } + } + +} diff --git a/src/datasource/trk/reader/trkHeader.ts b/src/datasource/trk/reader/trkHeader.ts new file mode 100644 index 000000000..ba07d0ee5 --- /dev/null +++ b/src/datasource/trk/reader/trkHeader.ts @@ -0,0 +1,182 @@ +/** + * Represents the header of a Track (TRK) file, describing how track data is structured. + * @interface + */ +export interface TrkHeader { + id_string: string; + dim: [number, number, number]; + voxel_size: [number, number, number]; + origin: [number, number, number]; + n_scalars: number; + scalar_name: string[]; + n_properties: number; + property_name: string[]; + vox_to_ras: number[][]; + voxel_order: string; + image_orientation_patient: [number, number, number, number, number, number]; + invert_x: boolean; + invert_y: boolean; + invert_z: boolean; + swap_xy: boolean; + swap_yz: boolean; + swap_zx: boolean; + n_count: number; + version: number; + hdr_size: number; +} + +/** + * Handles reading and processing the header of TRK files. + */ +export class TrkHeaderProcessor { + + /** + * Reads the header from a buffer and returns a structured object. + * @static + * @param {Buffer} buffer The buffer containing binary data of a TRK file header. + * @returns {TrkHeader} The parsed header as a structured object. + */ + static readTrkHeader(buffer: Buffer): TrkHeader { + + let offset = 0; + + const readChars = (length: number) => { + const value = buffer.toString('ascii', offset, offset + length).replace(/\0/g, ''); + offset += length; + return value; + }; + + const readShorts = (length: number): number[] => { + const values: number[] = []; + for (let i = 0; i < length; i++) { + values.push(buffer.readInt16LE(offset)); + offset += 2; + } + return values; + }; + + const readFloats = (length: number): number[] => { + const values: number[] = []; + for (let i = 0; i < length; i++) { + values.push(buffer.readFloatLE(offset)); + offset += 4; + } + return values; + }; + + const readMatrix = (rows: number, cols: number): number[][] => { + const matrix: number[][] = []; + for (let i = 0; i < rows; i++) { + const row: number[] = []; + for (let j = 0; j < cols; j++) { + row.push(buffer.readFloatLE(offset)); + offset += 4; + } + matrix.push(row); + } + return matrix; + }; + + const readUChar = (): boolean => { + const value = buffer.readUInt8(offset); + offset += 1; + return value !== 0; + }; + + const header: TrkHeader = { + id_string: readChars(6), + dim: readShorts(3) as [number, number, number], + voxel_size: readFloats(3) as [number, number, number], + origin: readFloats(3) as [number, number, number], + n_scalars: buffer.readInt16LE(offset), + scalar_name: [], + n_properties: 0, + property_name: [], + vox_to_ras: [], + voxel_order: '', + image_orientation_patient: [0, 0, 0, 0, 0, 0], + invert_x: false, + invert_y: false, + invert_z: false, + swap_xy: false, + swap_yz: false, + swap_zx: false, + n_count: 0, + version: 0, + hdr_size: 0, + }; + offset += 2; + + // Scalar names (10 names x 20 chars each = 200 bytes) + for (let i = 0; i < 10; i++) { + header.scalar_name.push(readChars(20)); + } + + header.n_properties = buffer.readInt16LE(offset); + offset += 2; + + // Property names (10 names x 20 chars each = 200 bytes) + for (let i = 0; i < 10; i++) { + header.property_name.push(readChars(20)); + } + + header.vox_to_ras = readMatrix(4, 4); + + offset += 444; // Skipped: Reserved space for future version. + + header.voxel_order = readChars(4); + offset += 4; // Skipped: paddings + + header.image_orientation_patient = readFloats(6) as [number, number, number, number, number, number]; + + offset += 2; // Skipped: paddings + + header.invert_x = readUChar(); + header.invert_y = readUChar(); + header.invert_z = readUChar(); + header.swap_xy = readUChar(); + header.swap_yz = readUChar(); + header.swap_zx = readUChar(); + + header.n_count = buffer.readInt32LE(offset); + offset += 4; + + header.version = buffer.readInt32LE(offset); + offset += 4; + + header.hdr_size = buffer.readInt32LE(offset); + offset += 4; + + return header; + } + + /** + * Prints detailed information about the TRK file header to the console. + * @static + * @param {TrkHeader} header The TRK header to be printed. + */ + static printTrkHeader(header: TrkHeader): void { + console.log('--- TRK File Metadata ---'); + console.log(`ID String: ${header.id_string}`); + console.log(`Dimensions: ${header.dim.join(' x ')}`); + console.log(`Voxel Size: ${header.voxel_size.join(', ')}`); + console.log(`Origin: ${header.origin.join(', ')}`); + console.log(`Number of Scalars per Point: ${header.n_scalars}`); + console.log(`Scalar Names: ${header.scalar_name.filter(name => name).join(', ')}`); + console.log(`Number of Properties per Track: ${header.n_properties}`); + console.log(`Property Names: ${header.property_name.filter(name => name).join(', ')}`); + console.log('Voxel to RAS Matrix:'); + header.vox_to_ras.forEach(row => console.log(` [${row.join(', ')}]`)); + console.log(`Voxel Order: ${header.voxel_order}`); + console.log(`Image Orientation (Patient): ${header.image_orientation_patient.join(', ')}`); + console.log(`Invert X: ${header.invert_x}`); + console.log(`Invert Y: ${header.invert_y}`); + console.log(`Invert Z: ${header.invert_z}`); + console.log(`Swap XY: ${header.swap_xy}`); + console.log(`Swap YZ: ${header.swap_yz}`); + console.log(`Swap ZX: ${header.swap_zx}`); + console.log(`Number of Tracks: ${header.n_count}`); + console.log(`Version: ${header.version}`); + console.log(`Header Size: ${header.hdr_size}`); + } +} diff --git a/src/datasource/trk/reader/voxelToRASConverter.ts b/src/datasource/trk/reader/voxelToRASConverter.ts new file mode 100644 index 000000000..71765feeb --- /dev/null +++ b/src/datasource/trk/reader/voxelToRASConverter.ts @@ -0,0 +1,271 @@ +import * as math from 'mathjs'; +import { multiply } from 'mathjs'; +import type { TrkHeader } from '#src/datasource/trk/reader/trkHeader.js'; + +/** + * Provides methods for converting voxel coordinates to RAS coordinates using affine transformations. + */ +export class VoxelToRASConverter { + + /** + * Applies an affine transformation to a 3D point to convert voxel coordinates to RAS coordinates. + * @param {number[]} point - The voxel coordinates to transform. + * @param {number[][]} aff - The 4x4 affine transformation matrix. + * @returns {number[]} The RAS coordinates resulting from the transformation. + */ + static applyAffineMatrix(point: number[], aff: number[][]): number[] { + const [x, y, z] = point; + const transformed = [ + aff[0][0] * x + aff[0][1] * y + aff[0][2] * z + aff[0][3], + aff[1][0] * x + aff[1][1] * y + aff[1][2] * z + aff[1][3], + aff[2][0] * x + aff[2][1] * y + aff[2][2] * z + aff[2][3] + ]; + return transformed; + } + + /** + * Applies an affine transformation to a 3D point to convert voxel coordinates to RAS coordinates. + * + * This function is derived from the `getAffineToRasmm()` function in the nibabel library. + * See the original implementation here: [Nibabel Repository](https://github.com/nipy/nibabel/blob/83eaf0b55be9e9079bf9ad64975b71c22523f5f0/nibabel/streamlines/trk.py#L60C5-L60C33) + * + * @param {number[]} point - The voxel coordinates to transform. + * @param {number[][]} aff - The 4x4 affine transformation matrix. + * @returns {number[]} The RAS coordinates resulting from the transformation. + */ + static getAffineToRasmm(header: TrkHeader): number[][] { + + // Create an identity matrix for the affine transformation + let affine = math.identity(4) as math.Matrix; + + // Apply scale: adjust voxel space based on voxel size + const scale = math.identity(4) as math.Matrix; + for (let i = 0; i < 3; i++) { + scale.set([i, i], 1 / header.voxel_size[i]); // Scale by voxel size + } + affine = math.multiply(scale, affine) as math.Matrix; + + // Apply offset: Shift by half a voxel to account for corner/center discrepancy + const offset = math.identity(4) as math.Matrix; + for (let i = 0; i < 3; i++) { + offset.set([i, 3], -0.5); + } + affine = math.multiply(offset, affine) as math.Matrix; + + // Apply Orientation: If the voxel order implied by the affine does not match the voxel order in the TRK header, change the orientation. + const vox_order = header.voxel_order; + const affine_ornt = VoxelToRASConverter.aff2axcodes(header.vox_to_ras); + // Convert voxel order to orientation array + const header_ornt = VoxelToRASConverter.axcodes2orntTrans(vox_order.split('')); + // Convert affine orientation string to orientation array + const affine_ornt_array = VoxelToRASConverter.axcodes2orntTrans(affine_ornt); + // Create a transformation between the header and affine orientations + const ornt = VoxelToRASConverter.orntTransform(header_ornt, affine_ornt_array) + // Compute the affine transformation matrix M + const M = VoxelToRASConverter.invOrntAff(ornt, header.dim); + // Update the affine matrix by applying M to the existing affine matrix + + const affine_transformed = multiply(math.matrix(M), math.matrix(affine)).toArray(); + const voxelToRASMatrix = math.matrix(header.vox_to_ras); + const affine_voxmm_to_rasmm = math.multiply(voxelToRASMatrix, affine_transformed) as math.Matrix; + + // Convert the final affine matrix back to a 2D array (number[][]) and return + return affine_voxmm_to_rasmm.toArray() as number[][]; + + } + + /** + * Converts an affine matrix to axis direction codes. + * @param {number[][]} aff - The affine transformation matrix. + * @param {string[][]} [labels=[['L', 'R'], ['P', 'A'], ['I', 'S']]] - Optional labels representing the axis directions. + * @returns {string[]} An array of strings representing the axis directions. + */ + static aff2axcodes(aff: number[][], labels: [string, string][] = [['L', 'R'], ['P', 'A'], ['I', 'S']]): string[] { + const ornt = VoxelToRASConverter.io_orientation(aff); + return VoxelToRASConverter.orntInfo2axcodes(ornt, labels); + } + + /** + * Computes the orientation of the axes from an affine matrix using Singular Value Decomposition. + * @param {number[][]} aff - The affine transformation matrix. + * @returns {number[][]} An array representing the orientation of each axis. + */ + static io_orientation(aff: number[][]): number[][] { + const n = aff.length - 1; + const m = aff[0].length - 1; + + if (n !== m) { + throw new Error('Affine matrix must be square'); + } + + // Extract rotation part of the affine matrix (ignoring translation) + const rotation = aff.slice(0, n).map(row => row.slice(0, m)); + + // Singular Value Decomposition (SVD) to get the axis orientation + const invRotation = math.inv(rotation); + const invTrans = math.transpose(invRotation); + + // Calculate the orientation using absolute values + const orientation = math.zeros([n, 2]) as number[][]; + for (let i = 0; i < n; i++) { + let maxVal = 0; + let maxIndex = 0; + for (let j = 0; j < m; j++) { + const val = math.abs(invTrans[i][j]); + if (val > maxVal) { + maxVal = val; + maxIndex = j; + } + } + const direction = invTrans[i][maxIndex] > 0 ? 1 : -1; + orientation[i] = [maxIndex, direction]; + } + + return orientation; + } + + /** + * Converts orientation information into axis direction labels. + * @param {number[][]} ornt - The orientation information. + * @param {string[][]} [labels=[['L', 'R'], ['P', 'A'], ['I', 'S']]] - Optional labels representing the axis directions. + * @returns {string[]} An array of strings representing the axis directions. + */ + static orntInfo2axcodes(ornt: number[][], labels: [string, string][] = [['L', 'R'], ['P', 'A'], ['I', 'S']]): string[] { + return ornt.map(([axis, direction]) => { + if (isNaN(axis)) { + return ''; + } + const axisInt = Math.round(axis); + if (direction === 1) { + return labels[axisInt][1]; // Positive direction + } else if (direction === -1) { + return labels[axisInt][0]; // Negative direction + } else { + throw new Error('Direction should be -1 or 1'); + } + }); + } + + /** + * Converts axis codes to an orientation array. + * @param {string[]} axcodes - The axis codes. + * @param {string[][]} [labels=[['L', 'R'], ['P', 'A'], ['I', 'S']]] - Optional labels representing the axis directions. + * @returns {number[][]} An array representing the orientation of each axis. + */ + static axcodes2orntTrans(axcodes: string[], labels?: [string, string][]): number[][] { + // Default labels corresponding to RAS coordinate system + labels = labels || [['L', 'R'], ['P', 'A'], ['I', 'S']]; + + // Flatten labels for validation + const allowedLabels: Set = new Set(labels.flat()); + + // Validate input axcodes + if (!axcodes.every(axcode => allowedLabels.has(axcode))) { + throw new Error(`Not all axis codes [${axcodes}] are in label set [${Array.from(allowedLabels)}]`); + } + + // Create orientation array + const nAxes: number = axcodes.length; + const ornt: number[][] = Array.from({ length: nAxes }, () => [NaN, NaN]); + + // Fill orientation array + axcodes.forEach((code, codeIdx) => { + labels.forEach((codes, labelIdx) => { + if (code === codes[0]) { + ornt[codeIdx] = [labelIdx, -1]; // Negative direction + } else if (code === codes[1]) { + ornt[codeIdx] = [labelIdx, 1]; // Positive direction + } + }); + }); + + return ornt; + } + + /** + * Computes the transformation required to match a starting orientation to an ending orientation. + * @param {number[][]} startOrnt - The starting orientation. + * @param {number[][]} endOrnt - The desired ending orientation. + * @returns {number[][]} An array representing the transformation matrix to adjust the orientation. + */ + static orntTransform(startOrnt: number[][], endOrnt: number[][]): number[][] { + if (startOrnt.length !== endOrnt.length || startOrnt[0].length !== 2 || endOrnt[0].length !== 2) { + throw new Error('The orientations must have the same shape and each should be an (n,2) array'); + } + + const result: number[][] = new Array(startOrnt.length).fill(null).map(() => [0, 0]); + + endOrnt.forEach((end, endInIdx) => { + const endOutIdx = end[0]; + const endFlip = end[1]; + let found = false; + + startOrnt.forEach((start, startInIdx) => { + const startOutIdx = start[0]; + const startFlip = start[1]; + + if (endOutIdx === startOutIdx) { + if (startFlip === endFlip) { + result[startInIdx] = [endInIdx, 1]; + } else { + result[startInIdx] = [endInIdx, -1]; + } + found = true; + } + }); + + if (!found) { + throw new Error(`Unable to find out axis ${endOutIdx} in startOrnt`); + } + }); + + return result; + } + + /** + * Computes the inverse of the orientation transform for an affine matrix. + * @param {number[][]} orntInput - The orientation information. + * @param {number[]} shapeInput - The shape of the data corresponding to the orientation. + * @returns {number[][]} An array representing the inverse transformation matrix. + */ + static invOrntAff(orntInput: number[][], shapeInput: number[]) { + const ornt = math.matrix(orntInput); + const p = ornt.size()[0]; + const shape = shapeInput.slice(0, p); + + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const axisTranspose = ornt.toArray().map((row: any) => row[0]); + const identityMatrix = math.identity(p + 1) as math.Matrix; + + let undoReorder = math.matrix(math.zeros(p + 1, p + 1)); // Create a zero matrix + axisTranspose.push(p); + axisTranspose.forEach((newIndex: number, i: number) => { + const row = identityMatrix.subset(math.index(i, math.range(0, p + 1))) as math.Matrix; + undoReorder = math.subset(undoReorder, math.index(newIndex, math.range(0, p + 1)), row); + }); + + // Create undo_flip as a diagonal matrix + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const flips = ornt.toArray().map((row: any) => row[1]); + let undoFlip = math.diag([...flips, 1.0]) as math.Matrix; + + // Calculate center transformation corrections for flip + const centerTrans = math.multiply(math.subtract(shape, 1), -0.5); + const correction = math.multiply(flips, centerTrans); + const updatedCenterTrans = math.subtract(correction, centerTrans); + + // Manually set the translations for flip corrections + flips.forEach((flip: number, index: number) => { + if (flip !== 1) { // Only adjust if there is a flip + const value = updatedCenterTrans.get([index]); + undoFlip = math.subset(undoFlip, math.index(index, p), value); + } + }); + + // Compose the transformations to get the final affine transformation matrix + const transformAffine = math.multiply(undoFlip, undoReorder); + return transformAffine; + } +} + + From 5c020ff5ada8d3135dd7efb42c499a2e9d46a195 Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Wed, 16 Oct 2024 17:07:53 -0400 Subject: [PATCH 05/12] read header info and process the tracks --- package-lock.json | 402 +++++++++++++++++++- package.json | 5 +- src/datasource/trk/frontend.ts | 230 +++++++---- src/datasource/trk/reader/trackProcessor.ts | 75 ++-- 4 files changed, 594 insertions(+), 118 deletions(-) diff --git a/package-lock.json b/package-lock.json index 1453fe542..88e158458 100644 --- a/package-lock.json +++ b/package-lock.json @@ -10,6 +10,8 @@ "license": "Apache-2.0", "dependencies": { "axios": "^1.7.7", + "browserify-fs": "^1.0.0", + "buffer": "^6.0.3", "codemirror": "^5.61.1", "dotenv": "^16.4.5", "gl-matrix": "3.1.0", @@ -19,7 +21,8 @@ "mathjs": "^13.2.0", "nifti-reader-js": "^0.6.8", "numcodecs": "^0.3.1", - "pako": "^2.1.0" + "pako": "^2.1.0", + "path-browserify": "^1.0.1" }, "devDependencies": { "@types/codemirror": "5.60.15", @@ -2586,6 +2589,23 @@ "node": ">=6.5" } }, + "node_modules/abstract-leveldown": { + "version": "0.12.4", + "resolved": "https://registry.npmjs.org/abstract-leveldown/-/abstract-leveldown-0.12.4.tgz", + "integrity": "sha512-TOod9d5RDExo6STLMGa+04HGkl+TlMfbDnTyN93/ETJ9DpQ0DaYLqcMZlbXvdc4W3vVo1Qrl+WhSp8zvDsJ+jA==", + "license": "MIT", + "dependencies": { + "xtend": "~3.0.0" + } + }, + "node_modules/abstract-leveldown/node_modules/xtend": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-3.0.0.tgz", + "integrity": "sha512-sp/sT9OALMjRW1fKDlPeuSZlDQpkqReA0pyJukniWbTGoEKefHxhGJynE3PNhUMlcM8qWIjPwecwCw4LArS5Eg==", + "engines": { + "node": ">=0.4" + } + }, "node_modules/accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -3333,7 +3353,6 @@ "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true, "funding": [ { "type": "github", @@ -3576,6 +3595,16 @@ "resolved": "https://registry.npmjs.org/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz", "integrity": "sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow==" }, + "node_modules/browserify-fs": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/browserify-fs/-/browserify-fs-1.0.0.tgz", + "integrity": "sha512-8LqHRPuAEKvyTX34R6tsw4bO2ro6j9DmlYBhiYWHRM26Zv2cBw1fJOU0NeUQ0RkXkPn/PFBjhA0dm4AgaBurTg==", + "dependencies": { + "level-filesystem": "^1.0.1", + "level-js": "^2.1.3", + "levelup": "^0.18.2" + } + }, "node_modules/browserslist": { "version": "4.23.0", "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.0.tgz", @@ -3612,7 +3641,6 @@ "version": "6.0.3", "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "dev": true, "funding": [ { "type": "github", @@ -3627,6 +3655,7 @@ "url": "https://feross.org/support" } ], + "license": "MIT", "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.2.1" @@ -4020,6 +4049,15 @@ "url": "https://github.com/chalk/wrap-ansi?sponsor=1" } }, + "node_modules/clone": { + "version": "0.1.19", + "resolved": "https://registry.npmjs.org/clone/-/clone-0.1.19.tgz", + "integrity": "sha512-IO78I0y6JcSpEPHzK4obKdsL7E7oLdRVDVOLwr2Hkbjsb+Eoz0dxW6tef0WizoKu0gLC4oZSZuEF4U2K6w1WQw==", + "license": "MIT", + "engines": { + "node": "*" + } + }, "node_modules/clone-deep": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", @@ -4856,6 +4894,15 @@ "node": ">=10" } }, + "node_modules/deferred-leveldown": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/deferred-leveldown/-/deferred-leveldown-0.2.0.tgz", + "integrity": "sha512-+WCbb4+ez/SZ77Sdy1iadagFiVzMB89IKOBhglgnUkVxOxRWmmFsz8UDSNWh4Rhq+3wr/vMFlYj+rdEwWUDdng==", + "license": "MIT", + "dependencies": { + "abstract-leveldown": "~0.12.1" + } + }, "node_modules/define-data-property": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.1.tgz", @@ -5320,6 +5367,18 @@ "node": ">=4" } }, + "node_modules/errno": { + "version": "0.1.8", + "resolved": "https://registry.npmjs.org/errno/-/errno-0.1.8.tgz", + "integrity": "sha512-dJ6oBr5SQ1VSd9qkk7ByRgb/1SH4JZjCHSW/mr63/QcXO9zLVxvJ6Oy13nio03rxpSnVDDjFor75SjVeZWPW/A==", + "license": "MIT", + "dependencies": { + "prr": "~1.0.1" + }, + "bin": { + "errno": "cli.js" + } + }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", @@ -7004,6 +7063,12 @@ "is-callable": "^1.1.3" } }, + "node_modules/foreach": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.6.tgz", + "integrity": "sha512-k6GAGDyqLe9JaebCsFCoudPPWfihKu8pylYXRlqP1J7ms39iPoTtk2fviNglIeQEwdh0bQeKJ01ZPyuyQvKzwg==", + "license": "MIT" + }, "node_modules/foreground-child": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", @@ -7405,6 +7470,38 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/fwd-stream": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/fwd-stream/-/fwd-stream-1.0.4.tgz", + "integrity": "sha512-q2qaK2B38W07wfPSQDKMiKOD5Nzv2XyuvQlrmh1q0pxyHNanKHq8lwQ6n9zHucAwA5EbzRJKEgds2orn88rYTg==", + "dependencies": { + "readable-stream": "~1.0.26-4" + } + }, + "node_modules/fwd-stream/node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", + "license": "MIT" + }, + "node_modules/fwd-stream/node_modules/readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha512-ok1qVCJuRkNmvebYikljxJA/UEsKwLl2nI1OmaqAu4/UE+h0wKCHok4XkL/gvi39OacXvw59RJUOFUkDib2rHg==", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/fwd-stream/node_modules/string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==", + "license": "MIT" + }, "node_modules/geckodriver": { "version": "4.4.2", "resolved": "https://registry.npmjs.org/geckodriver/-/geckodriver-4.4.2.tgz", @@ -8243,11 +8340,16 @@ "postcss": "^8.1.0" } }, + "node_modules/idb-wrapper": { + "version": "1.7.2", + "resolved": "https://registry.npmjs.org/idb-wrapper/-/idb-wrapper-1.7.2.tgz", + "integrity": "sha512-zfNREywMuf0NzDo9mVsL0yegjsirJxHpKHvWcyRozIqQy89g0a3U+oBPOCN4cc0oCiOuYgZHimzaW/R46G1Mpg==", + "license": "MIT" + }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true, "funding": [ { "type": "github", @@ -8408,6 +8510,11 @@ "node": ">=0.8.19" } }, + "node_modules/indexof": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", + "integrity": "sha512-i0G7hLJ1z0DE8dsqJa2rycj9dBmNKgXBvotXtZYXakU9oivfB9Uj2ZBC27qqef2U58/ZLwalxa1X/RDCdkHtVg==" + }, "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", @@ -8488,6 +8595,14 @@ "node": ">= 10" } }, + "node_modules/is": { + "version": "0.2.7", + "resolved": "https://registry.npmjs.org/is/-/is-0.2.7.tgz", + "integrity": "sha512-ajQCouIvkcSnl2iRdK70Jug9mohIHVX9uKpoWnl115ov0R5mzBvRrXxrnHbsA+8AdwCwc/sfw7HXmd4I5EJBdQ==", + "engines": { + "node": "*" + } + }, "node_modules/is-array-buffer": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.2.tgz", @@ -8743,6 +8858,11 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/is-object": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/is-object/-/is-object-0.1.2.tgz", + "integrity": "sha512-GkfZZlIZtpkFrqyAXPQSRBMsaHAw+CgoKe2HXAkjd/sfoI9+hS8PT4wg2rJxdQyUKr7N2vHJbg7/jQtE5l5vBQ==" + }, "node_modules/is-path-inside": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz", @@ -8918,6 +9038,12 @@ "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==" }, + "node_modules/isbuffer": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/isbuffer/-/isbuffer-0.0.0.tgz", + "integrity": "sha512-xU+NoHp+YtKQkaM2HsQchYn0sltxMxew0HavMfHbjnucBoTSGbw745tL+Z7QBANleWM1eEQMenEpi174mIeS4g==", + "license": "MIT" + }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -9408,6 +9534,234 @@ "integrity": "sha512-XI5MPzVNApjAyhQzphX8BkmKsKUxD4LdyK24iZeQGinBN9yTQT3bFlCBy/aVx2HrNcqQGsdot8ghrjyrvMCoEA==", "deprecated": "use String.prototype.padStart()" }, + "node_modules/level-blobs": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/level-blobs/-/level-blobs-0.1.7.tgz", + "integrity": "sha512-n0iYYCGozLd36m/Pzm206+brIgXP8mxPZazZ6ZvgKr+8YwOZ8/PPpYC5zMUu2qFygRN8RO6WC/HH3XWMW7RMVg==", + "dependencies": { + "level-peek": "1.0.6", + "once": "^1.3.0", + "readable-stream": "^1.0.26-4" + } + }, + "node_modules/level-blobs/node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", + "license": "MIT" + }, + "node_modules/level-blobs/node_modules/readable-stream": { + "version": "1.1.14", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.1.14.tgz", + "integrity": "sha512-+MeVjFf4L44XUkhM1eYbD8fyEsxcV81pqMSR5gblfcLCHfZvbrqy4/qYHE+/R5HoBUT11WV5O08Cr1n3YXkWVQ==", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/level-blobs/node_modules/string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==", + "license": "MIT" + }, + "node_modules/level-filesystem": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/level-filesystem/-/level-filesystem-1.2.0.tgz", + "integrity": "sha512-PhXDuCNYpngpxp3jwMT9AYBMgOvB6zxj3DeuIywNKmZqFj2djj9XfT2XDVslfqmo0Ip79cAd3SBy3FsfOZPJ1g==", + "dependencies": { + "concat-stream": "^1.4.4", + "errno": "^0.1.1", + "fwd-stream": "^1.0.4", + "level-blobs": "^0.1.7", + "level-peek": "^1.0.6", + "level-sublevel": "^5.2.0", + "octal": "^1.0.0", + "once": "^1.3.0", + "xtend": "^2.2.0" + } + }, + "node_modules/level-filesystem/node_modules/xtend": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-2.2.0.tgz", + "integrity": "sha512-SLt5uylT+4aoXxXuwtQp5ZnMMzhDb1Xkg4pEqc00WUJCQifPfV9Ub1VrNhp9kXkrjZD2I2Hl8WnjP37jzZLPZw==", + "engines": { + "node": ">=0.4" + } + }, + "node_modules/level-fix-range": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/level-fix-range/-/level-fix-range-1.0.2.tgz", + "integrity": "sha512-9llaVn6uqBiSlBP+wKiIEoBa01FwEISFgHSZiyec2S0KpyLUkGR4afW/FCZ/X8y+QJvzS0u4PGOlZDdh1/1avQ==", + "license": "MIT" + }, + "node_modules/level-hooks": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/level-hooks/-/level-hooks-4.5.0.tgz", + "integrity": "sha512-fxLNny/vL/G4PnkLhWsbHnEaRi+A/k8r5EH/M77npZwYL62RHi2fV0S824z3QdpAk6VTgisJwIRywzBHLK4ZVA==", + "dependencies": { + "string-range": "~1.2" + } + }, + "node_modules/level-js": { + "version": "2.2.4", + "resolved": "https://registry.npmjs.org/level-js/-/level-js-2.2.4.tgz", + "integrity": "sha512-lZtjt4ZwHE00UMC1vAb271p9qzg8vKlnDeXfIesH3zL0KxhHRDjClQLGLWhyR0nK4XARnd4wc/9eD1ffd4PshQ==", + "license": "BSD-2-Clause", + "dependencies": { + "abstract-leveldown": "~0.12.0", + "idb-wrapper": "^1.5.0", + "isbuffer": "~0.0.0", + "ltgt": "^2.1.2", + "typedarray-to-buffer": "~1.0.0", + "xtend": "~2.1.2" + } + }, + "node_modules/level-js/node_modules/object-keys": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-0.4.0.tgz", + "integrity": "sha512-ncrLw+X55z7bkl5PnUvHwFK9FcGuFYo9gtjws2XtSzL+aZ8tm830P60WJ0dSmFVaSalWieW5MD7kEdnXda9yJw==", + "license": "MIT" + }, + "node_modules/level-js/node_modules/xtend": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-2.1.2.tgz", + "integrity": "sha512-vMNKzr2rHP9Dp/e1NQFnLQlwlhp9L/LfvnsVdHxN1f+uggyVI3i08uD14GPvCToPkdsRfyPqIyYGmIk58V98ZQ==", + "dependencies": { + "object-keys": "~0.4.0" + }, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/level-peek": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/level-peek/-/level-peek-1.0.6.tgz", + "integrity": "sha512-TKEzH5TxROTjQxWMczt9sizVgnmJ4F3hotBI48xCTYvOKd/4gA/uY0XjKkhJFo6BMic8Tqjf6jFMLWeg3MAbqQ==", + "license": "MIT", + "dependencies": { + "level-fix-range": "~1.0.2" + } + }, + "node_modules/level-sublevel": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/level-sublevel/-/level-sublevel-5.2.3.tgz", + "integrity": "sha512-tO8jrFp+QZYrxx/Gnmjawuh1UBiifpvKNAcm4KCogesWr1Nm2+ckARitf+Oo7xg4OHqMW76eAqQ204BoIlscjA==", + "license": "MIT", + "dependencies": { + "level-fix-range": "2.0", + "level-hooks": ">=4.4.0 <5", + "string-range": "~1.2.1", + "xtend": "~2.0.4" + } + }, + "node_modules/level-sublevel/node_modules/level-fix-range": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/level-fix-range/-/level-fix-range-2.0.0.tgz", + "integrity": "sha512-WrLfGWgwWbYPrHsYzJau+5+te89dUbENBg3/lsxOs4p2tYOhCHjbgXxBAj4DFqp3k/XBwitcRXoCh8RoCogASA==", + "license": "MIT", + "dependencies": { + "clone": "~0.1.9" + } + }, + "node_modules/level-sublevel/node_modules/object-keys": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-0.2.0.tgz", + "integrity": "sha512-XODjdR2pBh/1qrjPcbSeSgEtKbYo7LqYNq64/TPuCf7j9SfDD3i21yatKoIy39yIWNvVM59iutfQQpCv1RfFzA==", + "deprecated": "Please update to the latest object-keys", + "license": "MIT", + "dependencies": { + "foreach": "~2.0.1", + "indexof": "~0.0.1", + "is": "~0.2.6" + } + }, + "node_modules/level-sublevel/node_modules/xtend": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-2.0.6.tgz", + "integrity": "sha512-fOZg4ECOlrMl+A6Msr7EIFcON1L26mb4NY5rurSkOex/TWhazOrg6eXD/B0XkuiYcYhQDWLXzQxLMVJ7LXwokg==", + "dependencies": { + "is-object": "~0.1.2", + "object-keys": "~0.2.0" + }, + "engines": { + "node": ">=0.4" + } + }, + "node_modules/levelup": { + "version": "0.18.6", + "resolved": "https://registry.npmjs.org/levelup/-/levelup-0.18.6.tgz", + "integrity": "sha512-uB0auyRqIVXx+hrpIUtol4VAPhLRcnxcOsd2i2m6rbFIDarO5dnrupLOStYYpEcu8ZT087Z9HEuYw1wjr6RL6Q==", + "license": "MIT", + "dependencies": { + "bl": "~0.8.1", + "deferred-leveldown": "~0.2.0", + "errno": "~0.1.1", + "prr": "~0.0.0", + "readable-stream": "~1.0.26", + "semver": "~2.3.1", + "xtend": "~3.0.0" + } + }, + "node_modules/levelup/node_modules/bl": { + "version": "0.8.2", + "resolved": "https://registry.npmjs.org/bl/-/bl-0.8.2.tgz", + "integrity": "sha512-pfqikmByp+lifZCS0p6j6KreV6kNU6Apzpm2nKOk+94cZb/jvle55+JxWiByUQ0Wo/+XnDXEy5MxxKMb6r0VIw==", + "license": "MIT", + "dependencies": { + "readable-stream": "~1.0.26" + } + }, + "node_modules/levelup/node_modules/isarray": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-0.0.1.tgz", + "integrity": "sha512-D2S+3GLxWH+uhrNEcoh/fnmYeP8E8/zHl644d/jdA0g2uyXvy3sb0qxotE+ne0LtccHknQzWwZEzhak7oJ0COQ==", + "license": "MIT" + }, + "node_modules/levelup/node_modules/prr": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/prr/-/prr-0.0.0.tgz", + "integrity": "sha512-LmUECmrW7RVj6mDWKjTXfKug7TFGdiz9P18HMcO4RHL+RW7MCOGNvpj5j47Rnp6ne6r4fZ2VzyUWEpKbg+tsjQ==", + "license": "MIT" + }, + "node_modules/levelup/node_modules/readable-stream": { + "version": "1.0.34", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz", + "integrity": "sha512-ok1qVCJuRkNmvebYikljxJA/UEsKwLl2nI1OmaqAu4/UE+h0wKCHok4XkL/gvi39OacXvw59RJUOFUkDib2rHg==", + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.1", + "isarray": "0.0.1", + "string_decoder": "~0.10.x" + } + }, + "node_modules/levelup/node_modules/semver": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-2.3.2.tgz", + "integrity": "sha512-abLdIKCosKfpnmhS52NCTjO4RiLspDfsn37prjzGrp9im5DPJOgh82Os92vtwGh6XdQryKI/7SREZnV+aqiXrA==", + "license": "BSD", + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/levelup/node_modules/string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha512-ev2QzSzWPYmy9GuqfIVildA4OdcGLeFZQrq5ys6RtiuF+RQQiZWr8TZNyAcuVXyQRYfEO+MsoB/1BuQVhOJuoQ==", + "license": "MIT" + }, + "node_modules/levelup/node_modules/xtend": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/xtend/-/xtend-3.0.0.tgz", + "integrity": "sha512-sp/sT9OALMjRW1fKDlPeuSZlDQpkqReA0pyJukniWbTGoEKefHxhGJynE3PNhUMlcM8qWIjPwecwCw4LArS5Eg==", + "engines": { + "node": ">=0.4" + } + }, "node_modules/levn": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", @@ -9635,6 +9989,12 @@ "node": "20 || >=22" } }, + "node_modules/ltgt": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/ltgt/-/ltgt-2.2.1.tgz", + "integrity": "sha512-AI2r85+4MquTw9ZYqabu4nMwy9Oftlfa/e/52t9IjtfG+mGBbTNdAoZ3RQKLHR6r0wQnwZnPIEh/Ya6XTWAKNA==", + "license": "MIT" + }, "node_modules/lz-string": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/lz-string/-/lz-string-1.5.0.tgz", @@ -10320,6 +10680,12 @@ "integrity": "sha512-PX1wu0AmAdPqOL1mWhqmlOd8kOIZQwGZw6rh7uby9fTc5lhaOWFLX3I6R1hrF9k3zUY40e6igsLGkDXK92LJNg==", "dev": true }, + "node_modules/octal": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/octal/-/octal-1.0.0.tgz", + "integrity": "sha512-nnda7W8d+A3vEIY+UrDQzzboPf1vhs4JYVhff5CDkq9QNoZY7Xrxeo/htox37j9dZf7yNHevZzqtejWgy1vCqQ==", + "license": "MIT" + }, "node_modules/on-finished": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", @@ -10345,7 +10711,6 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, "dependencies": { "wrappy": "1" } @@ -10651,6 +11016,12 @@ "tslib": "^2.0.3" } }, + "node_modules/path-browserify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-browserify/-/path-browserify-1.0.1.tgz", + "integrity": "sha512-b7uo2UCUOYZcnF/3ID0lulOJi/bafxa1xPe7ZPsammBSpjSWQkjNxlt635YGS2MiR9GjvuXCtz2emr3jbsz98g==", + "license": "MIT" + }, "node_modules/path-exists": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", @@ -11122,6 +11493,12 @@ "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" }, + "node_modules/prr": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/prr/-/prr-1.0.1.tgz", + "integrity": "sha512-yPw4Sng1gWghHQWj0B3ZggWUm4qVbPwPFcRG8KyxiU7J2OHFSoEHKS+EZ3fv5l1t9CyCiop6l/ZYeWbrgoQejw==", + "license": "MIT" + }, "node_modules/psl": { "version": "1.9.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.9.0.tgz", @@ -12696,6 +13073,12 @@ "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, + "node_modules/string-range": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/string-range/-/string-range-1.2.2.tgz", + "integrity": "sha512-tYft6IFi8SjplJpxCUxyqisD3b+R2CSkomrtJYCkvuf1KuCAWgz7YXt4O0jip7efpfCemwHEzTEAO8EuOYgh3w==", + "license": "MIT" + }, "node_modules/string-width": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", @@ -13868,6 +14251,12 @@ "resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz", "integrity": "sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA==" }, + "node_modules/typedarray-to-buffer": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-1.0.4.tgz", + "integrity": "sha512-vjMKrfSoUDN8/Vnqitw2FmstOfuJ73G6CrSEKnf11A6RmasVxHqfeBcnTb6RsL4pTMuV5Zsv9IiHRphMZyckUw==", + "license": "MIT" + }, "node_modules/typescript": { "version": "5.5.4", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", @@ -15533,8 +15922,7 @@ "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "node_modules/ws": { "version": "8.18.0", diff --git a/package.json b/package.json index cb74eb6c3..4547c3d13 100644 --- a/package.json +++ b/package.json @@ -79,6 +79,8 @@ }, "dependencies": { "axios": "^1.7.7", + "browserify-fs": "^1.0.0", + "buffer": "^6.0.3", "codemirror": "^5.61.1", "dotenv": "^16.4.5", "gl-matrix": "3.1.0", @@ -88,7 +90,8 @@ "mathjs": "^13.2.0", "nifti-reader-js": "^0.6.8", "numcodecs": "^0.3.1", - "pako": "^2.1.0" + "pako": "^2.1.0", + "path-browserify": "^1.0.1" }, "overrides": { "@puppeteer/browsers": ">=2" diff --git a/src/datasource/trk/frontend.ts b/src/datasource/trk/frontend.ts index 6297b5001..d25cc92f8 100644 --- a/src/datasource/trk/frontend.ts +++ b/src/datasource/trk/frontend.ts @@ -28,7 +28,7 @@ import type { DataSource, DataSubsourceEntry, GetDataSourceOptions, - NormalizeUrlOptions, + // NormalizeUrlOptions, } from "#src/datasource/index.js"; import { DataSourceProvider, RedirectError } from "#src/datasource/index.js"; import type { @@ -41,6 +41,7 @@ import { // ShardingHashFunction, SkeletonSourceParameters, } from "#src/datasource/trk/base.js"; +import { TrackProcessor } from "#src/datasource/trk/reader/trackProcessor.js"; import type { InlineSegmentProperty, InlineSegmentPropertyMap, @@ -57,7 +58,7 @@ import { DATA_TYPE_ARRAY_CONSTRUCTOR, DataType } from "#src/util/data_type.js"; import type { Borrowed } from "#src/util/disposable.js"; import { mat4 } from "#src/util/geom.js"; import { completeHttpPath } from "#src/util/http_path_completion.js"; -import { responseJson } from "#src/util/http_request.js"; +// import { responseJson } from "#src/util/http_request.js"; import { parseArray, parseFixedLengthArray, @@ -74,13 +75,13 @@ import { verifyString, verifyStringArray, } from "#src/util/json.js"; -import { getObjectId } from "#src/util/object_id.js"; +// import { getObjectId } from "#src/util/object_id.js"; import type { SpecialProtocolCredentials, SpecialProtocolCredentialsProvider, } from "#src/util/special_protocol_request.js"; import { - cancellableFetchSpecialOk, + // cancellableFetchSpecialOk, parseSpecialUrl, } from "#src/util/special_protocol_request.js"; import { Uint64 } from "#src/util/uint64.js"; @@ -95,23 +96,24 @@ class trkSkeletonSource extends WithParameters( return false; } get vertexAttributes() { + console.log(this.parameters.metadata.vertexAttributes); return this.parameters.metadata.vertexAttributes; } } -export function resolvePath(a: string, b: string) { - const outputParts = a.split("/"); - for (const part of b.split("/")) { - if (part === "..") { - if (outputParts.length !== 0) { - outputParts.length = outputParts.length - 1; - continue; - } - } - outputParts.push(part); - } - return outputParts.join("/"); -} +// export function resolvePath(a: string, b: string) { +// const outputParts = a.split("/"); +// for (const part of b.split("/")) { +// if (part === "..") { +// if (outputParts.length !== 0) { +// outputParts.length = outputParts.length - 1; +// continue; +// } +// } +// outputParts.push(part); +// } +// return outputParts.join("/"); +// } function parseTransform(data: any): mat4 { return verifyObjectProperty(data, "transform", (value) => { @@ -220,7 +222,8 @@ function parseSkeletonMetadata(data: any): ParsedSkeletonMetadata { verifyOptionalString, ); return { - metadata: { transform, vertexAttributes, + metadata: { + transform, vertexAttributes, // sharding } as SkeletonMetadata, segmentPropertyMap, @@ -228,15 +231,16 @@ function parseSkeletonMetadata(data: any): ParsedSkeletonMetadata { } async function getSkeletonMetadata( - chunkManager: ChunkManager, - credentialsProvider: SpecialProtocolCredentialsProvider, - url: string, + // chunkManager: ChunkManager, + // credentialsProvider: SpecialProtocolCredentialsProvider, + // url: string, ): Promise { - const metadata = await getJsonMetadata( - chunkManager, - credentialsProvider, - url, - ); + // const metadata = await getJsonMetadata( + // chunkManager, + // credentialsProvider, + // url, + // ); + const metadata = await getMetadata(); return parseSkeletonMetadata(metadata); } @@ -254,9 +258,9 @@ async function getSkeletonSource( url: string, ) { const { metadata, segmentPropertyMap } = await getSkeletonMetadata( - chunkManager, - credentialsProvider, - url, + // chunkManager, + // credentialsProvider, + // url, ); return { source: chunkManager.getChunkSource(trkSkeletonSource, { @@ -271,26 +275,60 @@ async function getSkeletonSource( }; } -function getJsonMetadata( - chunkManager: ChunkManager, - credentialsProvider: SpecialProtocolCredentialsProvider, - url: string, -): Promise { - return chunkManager.memoize.getUncounted( - { - type: "trk:metadata", - url, - credentialsProvider: getObjectId(credentialsProvider), - }, - async () => { - return await cancellableFetchSpecialOk( - credentialsProvider, - `${url}/info`, - {}, - responseJson, - ); - }, - ); +// function getJsonMetadata( +// chunkManager: ChunkManager, +// credentialsProvider: SpecialProtocolCredentialsProvider, +// url: string, +// ): Promise { +// return chunkManager.memoize.getUncounted( +// { +// type: "trk:metadata", +// url, +// credentialsProvider: getObjectId(credentialsProvider), +// }, +// async () => { +// return await cancellableFetchSpecialOk( +// credentialsProvider, +// `${url}/info`, +// {}, +// responseJson, +// ); +// }, +// ); +// } + +function getMetadata(){ + return { + "@type": "neuroglancer_skeletons", + "vertex_attributes": [ + { + "id": "orientation", + "data_type": "float32", + "num_components": 3 + } + ], + "segment_properties": "prop" + }; +} + +function getPropMetadata(){ + return { + "@type": "neuroglancer_segment_properties", + "inline": { + "ids": [ + "1" + ], + "properties": [ + { + "id": "label", + "type": "label", + "values": [ + "1" + ] + } + ] + } + }; } async function getSkeletonsDataSource( @@ -312,17 +350,18 @@ async function getSkeletonsDataSource( }, ]; if (segmentPropertyMap !== undefined) { - const mapUrl = resolvePath(url, segmentPropertyMap); - const metadata = await getJsonMetadata( - options.chunkManager, - credentialsProvider, - mapUrl, - ); + // const mapUrl = resolvePath(url, segmentPropertyMap); + // const metadata = await getJsonMetadata( + // options.chunkManager, + // credentialsProvider, + // mapUrl, + // ); + const metadata = await getPropMetadata(); const segmentPropertyMapData = getSegmentPropertyMap( options.chunkManager, credentialsProvider, metadata, - mapUrl, + // mapUrl, ); subsources.push({ id: "properties", @@ -469,11 +508,11 @@ export function getSegmentPropertyMap( chunkManager: Borrowed, credentialsProvider: SpecialProtocolCredentialsProvider, data: unknown, - url: string, + // url: string, ): SegmentPropertyMap { chunkManager; credentialsProvider; - url; + // url; try { const t = verifyObjectProperty(data, "@type", verifyString); if (t !== "neuroglancer_segment_properties") { @@ -501,7 +540,7 @@ export function getSegmentPropertyMap( async function getSegmentPropertyMapDataSource( options: GetDataSourceOptions, credentialsProvider: SpecialProtocolCredentialsProvider, - url: string, + // url: string, metadata: unknown, ): Promise { options; @@ -516,7 +555,7 @@ async function getSegmentPropertyMapDataSource( options.chunkManager, credentialsProvider, metadata, - url, + // url, ), }, }, @@ -548,13 +587,13 @@ export class TrkDataSource extends DataSourceProvider { return "Single trk file"; } - normalizeUrl(options: NormalizeUrlOptions): string { - const { url, parameters } = - parseProviderUrl(options.providerUrl); - return ( - options.providerProtocol + "://" + unparseProviderUrl(url, parameters) - ); - } + // normalizeUrl(options: NormalizeUrlOptions): string { + // const { url, parameters } = + // parseProviderUrl(options.providerUrl); + // return ( + // options.providerProtocol + "://" + unparseProviderUrl(url, parameters) + // ); + // } get(options: GetDataSourceOptions): Promise { const { url: providerUrl, parameters } = parseProviderUrl( @@ -567,26 +606,66 @@ export class TrkDataSource extends DataSourceProvider { providerUrl, options.credentialsManager, ); + + // Logging the URL to the console + console.log("TRK file URL:", url); + + const trackProcessor = new TrackProcessor(); + + await trackProcessor.streamAndProcessHeader(url, 0, 999); + if (!trackProcessor.globalHeader) { + console.error('Error: Failed to fetch or process the TRK header.'); + + } + + const totalTracks = trackProcessor.globalHeader?.n_count; + if (totalTracks !== undefined) { + const randomTrackNumbers = trackProcessor.getRandomTrackIndices(totalTracks, 1000); + await trackProcessor.processTrackData(randomTrackNumbers, 1, url); + } else { + console.error("totalTracks is undefined. Cannot proceed."); + } + + let metadata: any; try { - metadata = await getJsonMetadata( - options.chunkManager, - credentialsProvider, - url, - ); - } catch (e) { + // metadata = await getJsonMetadata( + // options.chunkManager, + // credentialsProvider, + // 'http://127.0.0.1:9123/Users/shrutiv/MyDocuments/GitHub/Neuroglancer-Tractography/src/tract/20240920_163900', + // ); + metadata = await getMetadata(); + } catch (e) { throw new Error(`Failed to get metadata for ${url}: ${e}`); - } + } + + + // const metadata = { + // "@type": "neuroglancer_skeletons", + // "vertex_attributes": [ + // { + // "id": "orientation", + // "data_type": "float32", + // "num_components": 3 + // } + // ], + // "segment_properties": "prop" + // }; + verifyObject(metadata); + const redirect = verifyOptionalObjectProperty( metadata, "redirect", verifyString, ); + if (redirect !== undefined) { throw new RedirectError(redirect); } const t = verifyOptionalObjectProperty(metadata, "@type", verifyString); + + switch (t) { case "neuroglancer_skeletons": return await getSkeletonsDataSource( @@ -599,7 +678,7 @@ export class TrkDataSource extends DataSourceProvider { return await getSegmentPropertyMapDataSource( options, credentialsProvider, - url, + // url, metadata, ); @@ -609,6 +688,7 @@ export class TrkDataSource extends DataSourceProvider { }, ); } + completeUrl(options: CompleteUrlOptions) { return completeHttpPath( options.credentialsManager, diff --git a/src/datasource/trk/reader/trackProcessor.ts b/src/datasource/trk/reader/trackProcessor.ts index 06dcc9dc0..cad202163 100644 --- a/src/datasource/trk/reader/trackProcessor.ts +++ b/src/datasource/trk/reader/trackProcessor.ts @@ -1,12 +1,14 @@ -import fs from 'fs'; -import path from 'path'; +// import fs from 'fs'; +// import path from 'path'; +import { Buffer } from 'buffer'; import axios from 'axios'; import type { Vertex, Edge } from '#src/datasource/trk/reader/skeletonWriter.js'; -import { SkeletonWriter } from '#src/datasource/trk/reader/skeletonWriter.js'; +// import { SkeletonWriter } from '#src/datasource/trk/reader/skeletonWriter.js'; import type { TrkHeader} from '#src/datasource/trk/reader/trkHeader.js'; import { TrkHeaderProcessor } from '#src/datasource/trk/reader/trkHeader.js'; import { VoxelToRASConverter } from '#src/datasource/trk/reader/voxelToRASConverter.js'; + /** * Represents the processing state of track data, indicating progress in bytes and tracks. * @interface @@ -123,8 +125,8 @@ export class TrackProcessor { return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; } - const outputFilePath = path.join(__dirname, 'track_data.txt'); - const writeStream = fs.createWriteStream(outputFilePath, { flags: 'a' }); + // const outputFilePath = path.join(__dirname, 'track_data.txt'); + // const writeStream = fs.createWriteStream(outputFilePath, { flags: 'a' }); const maxTracksToProcess = randomTrackNumbers.length; const vertices: Vertex[] = []; @@ -146,11 +148,14 @@ export class TrackProcessor { const n_points = dataView.getInt32(offset, true); // true indicates little-endian byte order. offset += 4; - writeStream.write(`Track ${trackNumber} processed, number of points: ${n_points}\n`); + // writeStream.write(`Track ${trackNumber} processed, number of points: ${n_points}\n`); + + // console.log(`Track ${trackNumber} processed, number of points: ${n_points}\n`); // Only process the track if it is in the random track numbers if (randomTrackNumbers.includes(trackNumber)) { // Process each point in the track (x, y, z -> 12 bytes per point) + console.log(`Track ${trackNumber}`); const points: number[][] = []; for (let i = 0; i < n_points; i++) { const x = dataView.getFloat32(offset, true); @@ -180,22 +185,22 @@ export class TrackProcessor { trackProcessedCount++; // Increment the number of processed tracks - if (trackProcessedCount >= maxTracksToProcess) { - const outputDirectory = path.resolve(__dirname, '..', 'src'); - const { binaryFilePath, propInfoFilePath, skeletonInfoFilePath } = SkeletonWriter.generateSkeletonFilePaths(outputDirectory, timestamp); + // if (trackProcessedCount >= maxTracksToProcess) { + // const outputDirectory = path.resolve(__dirname, '..', 'src'); + // const { binaryFilePath, propInfoFilePath, skeletonInfoFilePath } = SkeletonWriter.generateSkeletonFilePaths(outputDirectory, timestamp); - SkeletonWriter.writeSkeleton(vertices, edges, orientations, binaryFilePath); - SkeletonWriter.writePropInfo(propInfoFilePath); - SkeletonWriter.writeSkeletonInfo(skeletonInfoFilePath); + // SkeletonWriter.writeSkeleton(vertices, edges, orientations, binaryFilePath); + // SkeletonWriter.writePropInfo(propInfoFilePath); + // SkeletonWriter.writeSkeletonInfo(skeletonInfoFilePath); - console.log(`Processed ${maxTracksToProcess} random tracks and wrote skeleton and info files.`); + // console.log(`Processed ${maxTracksToProcess} random tracks and wrote skeleton and info files.`); - // SkeletonWriter.uploadSkeletonFilePathsToS3(outputDirectory, timestamp); + // // SkeletonWriter.uploadSkeletonFilePathsToS3(outputDirectory, timestamp); - console.log(`Uploaded tracks to S3.`) + // console.log(`Uploaded tracks to S3.`) - break; - } + // break; + // } } else { offset += n_points * 12; // Skip the track data if it's not in the selected tracks } @@ -203,7 +208,7 @@ export class TrackProcessor { trackNumber++; } - writeStream.end(); + // writeStream.end(); return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; } catch (error) { @@ -235,7 +240,7 @@ export class TrackProcessor { * @returns {Promise<{dataView: DataView; buffer: Buffer}>} A promise that resolves to the DataView and buffer of the file. */ loadFileBuffer(filePath: string) { - if (filePath.startsWith('http://') || filePath.startsWith('https://')) { + // if (filePath.startsWith('http://') || filePath.startsWith('https://')) { // Handle URL loading with axios return axios.get(filePath, { responseType: 'arraybuffer' }) .then(response => { @@ -251,22 +256,22 @@ export class TrackProcessor { console.error('Failed to load file from URL:', error); throw error; }); - } else { - // Handle local file loading with fs - try { - const absolutePath = path.resolve(filePath); - const buffer = fs.readFileSync(absolutePath); - const dataView = new DataView(buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength)); - console.log('Data loaded from local file successfully.'); - return { - dataView, - buffer - }; - } catch (error) { - console.error('Failed to load local file:', error); - throw error; - } - } + // } else { + // // Handle local file loading with fs + // try { + // const absolutePath = path.resolve(filePath); + // const buffer = fs.readFileSync(absolutePath); + // const dataView = new DataView(buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength)); + // console.log('Data loaded from local file successfully.'); + // return { + // dataView, + // buffer + // }; + // } catch (error) { + // console.error('Failed to load local file:', error); + // throw error; + // } + // } } } From 7093eca2c2ea34fd01505e7b05249126cbca6a0e Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Mon, 28 Oct 2024 12:44:51 -0400 Subject: [PATCH 06/12] trying to fix the new public url --- src/datasource/trk/backend.ts | 2 +- src/datasource/trk/backup/skeletonWriter.ts | 254 +++++++++++++++++++ src/datasource/trk/base.ts | 22 +- src/datasource/trk/frontend.ts | 255 +++++--------------- src/datasource/trk/reader/skeletonWriter.ts | 248 +++++-------------- src/datasource/trk/reader/trackProcessor.ts | 84 +++---- 6 files changed, 415 insertions(+), 450 deletions(-) create mode 100644 src/datasource/trk/backup/skeletonWriter.ts diff --git a/src/datasource/trk/backend.ts b/src/datasource/trk/backend.ts index 1034ce796..19c4c4532 100644 --- a/src/datasource/trk/backend.ts +++ b/src/datasource/trk/backend.ts @@ -387,7 +387,7 @@ export class trkSkeletonSource extends WithParameters( let response: AxiosResponse | null = null; try { - response = await axios.get("http://127.0.0.1:9123/Users/shrutiv/MyDocuments/GitHub/Neuroglancer-Tractography/src/tract/20240920_163900/1", + response = await axios.get("http://127.0.0.1:9123/Users/shrutiv/MyDocuments/GitHub/Neuroglancer-Tractography/src/tract/20241028_161202/1", { responseType: 'arraybuffer' }); if (response && response.data) { diff --git a/src/datasource/trk/backup/skeletonWriter.ts b/src/datasource/trk/backup/skeletonWriter.ts new file mode 100644 index 000000000..b04e1ea2a --- /dev/null +++ b/src/datasource/trk/backup/skeletonWriter.ts @@ -0,0 +1,254 @@ +import fs from 'fs'; +import path from 'path'; + +// import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3'; + +/** + * Represents a 3D vertex with coordinates. + * @interface + */ +export interface Vertex { + x: number; + y: number; + z: number; +} + +/** + * Represents an edge connecting two vertices by their indices. + * @interface + */ +export interface Edge { + vertex1: number; + vertex2: number; +} + +/** + * Provides utilities for writing skeleton data to files and uploading them to AWS S3. + */ +export class SkeletonWriter { + + /** + * Writes skeleton data including vertices, edges, and orientations to a binary file. + * @static + * @param {Vertex[]} vertices - The list of vertices to write. + * @param {Edge[]} edges - The list of edges connecting the vertices. + * @param {number[][]} orientations - The orientations of each vertex. + * @param {string} outputFilePath - The file path where the binary data will be written. + */ + static writeSkeleton(vertices: Vertex[], edges: Edge[], orientations: number[][], outputFilePath: string) { + fs.mkdirSync(path.dirname(outputFilePath), { recursive: true }); + + const vertexCount = vertices.length; + const edgeCount = edges.length; + + const vertexSize = 12; // 3 floats (x, y, z), each 4 bytes + const edgeSize = 8; // 2 uint32s (source and target), each 4 bytes + const orientationSize = 12; // 3 floats (x, y, z) for orientations + const bufferSize = 4 + 4 + (vertexSize * vertexCount) + (edgeSize * edgeCount) + (orientationSize * vertexCount); + + const buffer = Buffer.alloc(bufferSize); + let offset = 0; + + buffer.writeUInt32LE(vertexCount, offset); // Number of vertices + offset += 4; + buffer.writeUInt32LE(edgeCount, offset); // Number of edges + offset += 4; + + // Write the vertices (3 floats per vertex: x, y, z) + for (let i = 0; i < vertexCount; i++) { + buffer.writeFloatLE((vertices[i].x)*1E6, offset); + buffer.writeFloatLE(vertices[i].y*1E6, offset + 4); + buffer.writeFloatLE(vertices[i].z*1E6, offset + 8); + offset += 12; + } + + // Write the edges (2 uint32 per edge: vertex1, vertex2) + for (let i = 0; i < edgeCount; i++) { + buffer.writeUInt32LE(edges[i].vertex1, offset); + buffer.writeUInt32LE(edges[i].vertex2, offset + 4); + offset += 8; + } + + // Write the orientations (3 floats per vertex) + for (let i = 0; i < vertexCount; i++) { + buffer.writeFloatLE(orientations[i][0], offset); + buffer.writeFloatLE(orientations[i][1], offset + 4); + buffer.writeFloatLE(orientations[i][2], offset + 8); + offset += 12; + } + + fs.writeFileSync(outputFilePath, buffer); + console.log(`Skeleton written to ${outputFilePath}`); + } + + /** + * Writes metadata about the skeleton data structure to a JSON file. + * @static + * @param {string} infoFilePath - The file path where the skeleton info will be written. + */ + static writeSkeletonInfo(infoFilePath: string) { + fs.mkdirSync(path.dirname(infoFilePath), { recursive: true }); + + const skeletonInfo = { + "@type": "neuroglancer_skeletons", + "vertex_attributes": [ + { + "id": "orientation", + "data_type": "float32", + "num_components": 3, + }, + ], + "segment_properties": "prop", + }; + + fs.mkdirSync(path.dirname(infoFilePath), { recursive: true }); + + // Write the skeleton info to the specified path + fs.writeFileSync(infoFilePath, JSON.stringify(skeletonInfo, null, 2)); + console.log(`Skeleton info file written to ${infoFilePath}`); + } + + /** + * Writes properties metadata for the skeleton to a JSON file. + * @static + * @param {string} propFilePath - The file path where the properties info will be written. + */ + + static writePropInfo(propFilePath: string) { + fs.mkdirSync(path.dirname(propFilePath), { recursive: true }); + + const propInfo = { + "@type": "neuroglancer_segment_properties", + "inline": { + "ids": ["1"], + "properties": [{ "id": "label", "type": "label", "values": ["1"] }] + } + }; + + // Write the prop info to the specified path + fs.writeFileSync(propFilePath, JSON.stringify(propInfo, null, 2)); + console.log(`Prop info file written to ${propFilePath}`); + } + + /** + * Generates file paths for the binary, property, and skeleton info files based on a timestamp. + * TimeStamp is used for having unique filename. + * @static + * @param {string} outputDirectory - The output directory for the files. + * @param {string} timestamp - The timestamp used to format the file paths. + * @returns {{ binaryFilePath: string, propInfoFilePath: string, skeletonInfoFilePath: string }} + */ + static generateSkeletonFilePaths(outputDirectory: string, timestamp: string) { + + // Build the file paths with the formatted timestamp + const binaryFilePath = path.join(outputDirectory, 'tract', timestamp, '1'); // Binary file path + const propInfoFilePath = path.join(outputDirectory, 'tract', timestamp, 'prop', 'info'); // JSON file path + const skeletonInfoFilePath = path.join(outputDirectory, 'tract', timestamp, 'info'); // JSON file path + + return { + binaryFilePath, + propInfoFilePath, + skeletonInfoFilePath + }; + } + + /** + * Uploads a directory of files to AWS S3. + * @static + * @param {string} outputDirectory - The directory containing the files to upload. + * @param {string} timestamp - The timestamp used to organize the files in S3. + */ + // static async uploadSkeletonFilePathsToS3(outputDirectory: string, timestamp: string) { + // // Initialize the S3 client + // const s3Client = new S3Client({ + // region: process.env.AWS_REGION || 'us-east-2', + // }); + + // // Read the bucket name from environment variables + // const bucketName = process.env.BUCKET_NAME || 'linc-brain-mit-prod-us-east-2'; + + // // Check for required environment variables + // if (!process.env.AWS_REGION || !process.env.BUCKET_NAME) { + // console.error('AWS_REGION and BUCKET_NAME must be set in environment variables.'); + // return; + // } + + // // Define the local directory to upload + // const localDir = path.join(outputDirectory, 'tract', timestamp); + + // // Include the 'neuroglancer_trk/' prefix in the S3 destination path + // const s3DestinationPath = path.join('neuroglancer_trk', 'tract', timestamp).replace(/\\/g, '/'); + + // // Recursively upload all files in the local directory to S3 + // await SkeletonWriter.uploadDirectoryToS3(s3Client, bucketName, localDir, s3DestinationPath); + + // console.log('Uploaded generated files to S3.'); + // } + + /** + * Iteratively uploads all files from a local directory to an AWS S3 bucket. + * @static + * @param {S3Client} s3Client - The AWS S3 client used for the upload. + * @param {string} bucketName - The name of the S3 bucket. + * @param {string} localDirectory - The local directory containing the files to upload. + * @param {string} s3DestinationPath - The destination path in the S3 bucket. + */ + // static async uploadDirectoryToS3( + // s3Client: S3Client, + // bucketName: string, + // localDirectory: string, + // s3DestinationPath: string + // ) { + // const files = SkeletonWriter.getAllFilesInDirectory(localDirectory); + + // for (const filePath of files) { + // // Compute the relative path from the local directory + // const relativeFilePath = path.relative(localDirectory, filePath); + + // // Construct the S3 key by joining the destination path and relative file path (Hashmap) + // const s3Key = path.join(s3DestinationPath, relativeFilePath).replace(/\\/g, '/'); + + // try { + // const fileContent = fs.readFileSync(filePath); + + // const params = { + // Bucket: bucketName, + // Key: s3Key, + // Body: fileContent, + // }; + + // const command = new PutObjectCommand(params); + // await s3Client.send(command); + // console.log(`File uploaded successfully to s3://${bucketName}/${s3Key}`); + // } catch (error) { + // console.error(`Error uploading file ${filePath} to S3:`, error); + // } + // } + // } + + /** + * Interatively collects all file paths in a directory. + * @static + * @param {string} dir - The directory to scan. + * @returns {string[]} An array of file paths found in the directory. + */ + static getAllFilesInDirectory(dir: string): string[] { + let results: string[] = []; + + const list = fs.readdirSync(dir); + list.forEach((file) => { + const filePath = path.join(dir, file); + const stat = fs.statSync(filePath); + if (stat && stat.isDirectory()) { + // Recursively walk subdirectories + results = results.concat(SkeletonWriter.getAllFilesInDirectory(filePath)); + } else { + results.push(filePath); + } + }); + + return results; + } +} + + diff --git a/src/datasource/trk/base.ts b/src/datasource/trk/base.ts index 74281a8c7..5f7446e12 100644 --- a/src/datasource/trk/base.ts +++ b/src/datasource/trk/base.ts @@ -17,6 +17,7 @@ import type { VertexAttributeInfo } from "#src/skeleton/base.js"; import type { mat4 } from "#src/util/geom.js"; + export enum DataEncoding { RAW = 0, GZIP = 1, @@ -27,31 +28,14 @@ export enum ShardingHashFunction { MURMURHASH3_X86_128 = 1, } -// export interface ShardingParameters { -// hash: ShardingHashFunction; -// preshiftBits: number; -// minishardBits: number; -// shardBits: number; -// minishardIndexEncoding: DataEncoding; -// dataEncoding: DataEncoding; -// } - export interface SkeletonMetadata { transform: mat4; vertexAttributes: Map; - // sharding: ShardingParameters | undefined; } export class SkeletonSourceParameters { url: string; metadata: SkeletonMetadata; - + skeletonBuffer: ArrayBuffer; static RPC_ID = "trk/SkeletonSource"; -} - - -// export class IndexedSegmentPropertySourceParameters { -// url: string; -// sharding: ShardingParameters | undefined; -// static RPC_ID = "trk/IndexedSegmentPropertySource"; -// } +} \ No newline at end of file diff --git a/src/datasource/trk/frontend.ts b/src/datasource/trk/frontend.ts index d25cc92f8..9ce037da1 100644 --- a/src/datasource/trk/frontend.ts +++ b/src/datasource/trk/frontend.ts @@ -28,17 +28,12 @@ import type { DataSource, DataSubsourceEntry, GetDataSourceOptions, - // NormalizeUrlOptions, } from "#src/datasource/index.js"; import { DataSourceProvider, RedirectError } from "#src/datasource/index.js"; import type { - // ShardingParameters, SkeletonMetadata, } from "#src/datasource/trk/base.js"; import { - // DataEncoding, - // IndexedSegmentPropertySourceParameters, - // ShardingHashFunction, SkeletonSourceParameters, } from "#src/datasource/trk/base.js"; import { TrackProcessor } from "#src/datasource/trk/reader/trackProcessor.js"; @@ -47,7 +42,6 @@ import type { InlineSegmentPropertyMap, } from "#src/segmentation_display_state/property_map.js"; import { - // IndexedSegmentPropertySource, normalizeInlineSegmentPropertyMap, SegmentPropertyMap, } from "#src/segmentation_display_state/property_map.js"; @@ -58,7 +52,6 @@ import { DATA_TYPE_ARRAY_CONSTRUCTOR, DataType } from "#src/util/data_type.js"; import type { Borrowed } from "#src/util/disposable.js"; import { mat4 } from "#src/util/geom.js"; import { completeHttpPath } from "#src/util/http_path_completion.js"; -// import { responseJson } from "#src/util/http_request.js"; import { parseArray, parseFixedLengthArray, @@ -66,7 +59,6 @@ import { unparseQueryStringParameters, verifyEnumString, verifyFiniteFloat, - // verifyInt, verifyObject, verifyObjectProperty, verifyOptionalObjectProperty, @@ -75,19 +67,16 @@ import { verifyString, verifyStringArray, } from "#src/util/json.js"; -// import { getObjectId } from "#src/util/object_id.js"; import type { SpecialProtocolCredentials, SpecialProtocolCredentialsProvider, } from "#src/util/special_protocol_request.js"; import { - // cancellableFetchSpecialOk, parseSpecialUrl, } from "#src/util/special_protocol_request.js"; import { Uint64 } from "#src/util/uint64.js"; - class trkSkeletonSource extends WithParameters( WithCredentialsProvider()(SkeletonSource), SkeletonSourceParameters, @@ -99,21 +88,11 @@ class trkSkeletonSource extends WithParameters( console.log(this.parameters.metadata.vertexAttributes); return this.parameters.metadata.vertexAttributes; } -} -// export function resolvePath(a: string, b: string) { -// const outputParts = a.split("/"); -// for (const part of b.split("/")) { -// if (part === "..") { -// if (outputParts.length !== 0) { -// outputParts.length = outputParts.length - 1; -// continue; -// } -// } -// outputParts.push(part); -// } -// return outputParts.join("/"); -// } + // get skeleton() { + // return this.parameters.binarydata.skeleton; + // } +} function parseTransform(data: any): mat4 { return verifyObjectProperty(data, "transform", (value) => { @@ -130,54 +109,6 @@ function parseTransform(data: any): mat4 { }); } -// function parseShardingEncoding(y: any): DataEncoding { -// if (y === undefined) return DataEncoding.RAW; -// return verifyEnumString(y, DataEncoding); -// } - -// function parseShardingParameters( -// shardingData: any, -// ): ShardingParameters | undefined { -// if (shardingData === undefined) return undefined; -// verifyObject(shardingData); -// const t = verifyObjectProperty(shardingData, "@type", verifyString); -// if (t !== "neuroglancer_uint64_sharded_v1") { -// throw new Error(`Unsupported sharding format: ${JSON.stringify(t)}`); -// } -// const hash = verifyObjectProperty(shardingData, "hash", (y) => -// verifyEnumString(y, ShardingHashFunction), -// ); -// const preshiftBits = verifyObjectProperty( -// shardingData, -// "preshift_bits", -// verifyInt, -// ); -// const shardBits = verifyObjectProperty(shardingData, "shard_bits", verifyInt); -// const minishardBits = verifyObjectProperty( -// shardingData, -// "minishard_bits", -// verifyInt, -// ); -// const minishardIndexEncoding = verifyObjectProperty( -// shardingData, -// "minishard_index_encoding", -// parseShardingEncoding, -// ); -// const dataEncoding = verifyObjectProperty( -// shardingData, -// "data_encoding", -// parseShardingEncoding, -// ); -// return { -// hash, -// preshiftBits, -// shardBits, -// minishardBits, -// minishardIndexEncoding, -// dataEncoding, -// }; -// } - interface ParsedSkeletonMetadata { metadata: SkeletonMetadata; segmentPropertyMap: string | undefined; @@ -230,16 +161,7 @@ function parseSkeletonMetadata(data: any): ParsedSkeletonMetadata { }; } -async function getSkeletonMetadata( - // chunkManager: ChunkManager, - // credentialsProvider: SpecialProtocolCredentialsProvider, - // url: string, -): Promise { - // const metadata = await getJsonMetadata( - // chunkManager, - // credentialsProvider, - // url, - // ); +async function getSkeletonMetadata(): Promise { const metadata = await getMetadata(); return parseSkeletonMetadata(metadata); } @@ -257,17 +179,16 @@ async function getSkeletonSource( credentialsProvider: SpecialProtocolCredentialsProvider, url: string, ) { - const { metadata, segmentPropertyMap } = await getSkeletonMetadata( - // chunkManager, - // credentialsProvider, - // url, - ); + const { metadata, segmentPropertyMap } = await getSkeletonMetadata(); + const skeletonBuffer = await getSkeletonBuffer(url) ?? new ArrayBuffer(0); + return { source: chunkManager.getChunkSource(trkSkeletonSource, { credentialsProvider, parameters: { url, metadata, + skeletonBuffer }, }), transform: metadata.transform, @@ -275,60 +196,63 @@ async function getSkeletonSource( }; } -// function getJsonMetadata( -// chunkManager: ChunkManager, -// credentialsProvider: SpecialProtocolCredentialsProvider, -// url: string, -// ): Promise { -// return chunkManager.memoize.getUncounted( -// { -// type: "trk:metadata", -// url, -// credentialsProvider: getObjectId(credentialsProvider), -// }, -// async () => { -// return await cancellableFetchSpecialOk( -// credentialsProvider, -// `${url}/info`, -// {}, -// responseJson, -// ); -// }, -// ); -// } - -function getMetadata(){ +function getMetadata() { return { "@type": "neuroglancer_skeletons", "vertex_attributes": [ - { - "id": "orientation", - "data_type": "float32", - "num_components": 3 - } + { + "id": "orientation", + "data_type": "float32", + "num_components": 3 + } ], "segment_properties": "prop" - }; + }; } -function getPropMetadata(){ +function getPropMetadata() { return { "@type": "neuroglancer_segment_properties", "inline": { - "ids": [ - "1" - ], - "properties": [ - { - "id": "label", - "type": "label", - "values": [ + "ids": [ "1" - ] - } - ] + ], + "properties": [ + { + "id": "label", + "type": "label", + "values": [ + "1" + ] + } + ] } - }; + }; +} + +async function getSkeletonBuffer(url: string) { + + const trackProcessor = new TrackProcessor(); + + await trackProcessor.streamAndProcessHeader(url, 0, 999); + // await trackProcessor.streamAndProcessHeader(url); + if (!trackProcessor.globalHeader) { + console.error('Error: Failed to fetch or process the TRK header.'); + + } + + const totalTracks = trackProcessor.globalHeader?.n_count; + if (totalTracks !== undefined) { + const randomTrackNumbers = trackProcessor.getRandomTrackIndices(totalTracks, 1000); + const skeleton = await trackProcessor.processTrackData(randomTrackNumbers, 1, url); + console.log(skeleton.arrayBuffer); + return skeleton.arrayBuffer; + + } else { + console.error("totalTracks is undefined. Cannot proceed."); + return new ArrayBuffer(0) + } + } async function getSkeletonsDataSource( @@ -350,18 +274,11 @@ async function getSkeletonsDataSource( }, ]; if (segmentPropertyMap !== undefined) { - // const mapUrl = resolvePath(url, segmentPropertyMap); - // const metadata = await getJsonMetadata( - // options.chunkManager, - // credentialsProvider, - // mapUrl, - // ); const metadata = await getPropMetadata(); const segmentPropertyMapData = getSegmentPropertyMap( options.chunkManager, credentialsProvider, metadata, - // mapUrl, ); subsources.push({ id: "properties", @@ -497,22 +414,13 @@ function parseInlinePropertyMap(data: unknown): InlineSegmentPropertyMap { return normalizeInlineSegmentPropertyMap({ ids, properties }); } -// export const trkIndexedSegmentPropertySource = WithParameters( -// WithCredentialsProvider()( -// IndexedSegmentPropertySource, -// ), -// IndexedSegmentPropertySourceParameters, -// ); - export function getSegmentPropertyMap( chunkManager: Borrowed, credentialsProvider: SpecialProtocolCredentialsProvider, data: unknown, - // url: string, ): SegmentPropertyMap { chunkManager; credentialsProvider; - // url; try { const t = verifyObjectProperty(data, "@type", verifyString); if (t !== "neuroglancer_segment_properties") { @@ -525,12 +433,6 @@ export function getSegmentPropertyMap( "inline", parseInlinePropertyMap, ); - // const indexedProperties = verifyOptionalObjectProperty(data, 'indexed', indexedObj => { - // const {sharding, properties} = parseIndexedPropertyMap(indexedObj); - // return chunkManager.getChunkSource( - // trkIndexedSegmentPropertySource, - // {credentialsProvider, properties, parameters: {sharding, url}}); - // }); return new SegmentPropertyMap({ inlineProperties }); } catch (e) { throw new Error(`Error parsing segment property map: ${e.message}`); @@ -587,14 +489,6 @@ export class TrkDataSource extends DataSourceProvider { return "Single trk file"; } - // normalizeUrl(options: NormalizeUrlOptions): string { - // const { url, parameters } = - // parseProviderUrl(options.providerUrl); - // return ( - // options.providerProtocol + "://" + unparseProviderUrl(url, parameters) - // ); - // } - get(options: GetDataSourceOptions): Promise { const { url: providerUrl, parameters } = parseProviderUrl( options.providerUrl, @@ -607,51 +501,13 @@ export class TrkDataSource extends DataSourceProvider { options.credentialsManager, ); - // Logging the URL to the console - console.log("TRK file URL:", url); - - const trackProcessor = new TrackProcessor(); - - await trackProcessor.streamAndProcessHeader(url, 0, 999); - if (!trackProcessor.globalHeader) { - console.error('Error: Failed to fetch or process the TRK header.'); - - } - - const totalTracks = trackProcessor.globalHeader?.n_count; - if (totalTracks !== undefined) { - const randomTrackNumbers = trackProcessor.getRandomTrackIndices(totalTracks, 1000); - await trackProcessor.processTrackData(randomTrackNumbers, 1, url); - } else { - console.error("totalTracks is undefined. Cannot proceed."); - } - - let metadata: any; try { - // metadata = await getJsonMetadata( - // options.chunkManager, - // credentialsProvider, - // 'http://127.0.0.1:9123/Users/shrutiv/MyDocuments/GitHub/Neuroglancer-Tractography/src/tract/20240920_163900', - // ); metadata = await getMetadata(); } catch (e) { throw new Error(`Failed to get metadata for ${url}: ${e}`); } - - // const metadata = { - // "@type": "neuroglancer_skeletons", - // "vertex_attributes": [ - // { - // "id": "orientation", - // "data_type": "float32", - // "num_components": 3 - // } - // ], - // "segment_properties": "prop" - // }; - verifyObject(metadata); const redirect = verifyOptionalObjectProperty( @@ -665,7 +521,6 @@ export class TrkDataSource extends DataSourceProvider { } const t = verifyOptionalObjectProperty(metadata, "@type", verifyString); - switch (t) { case "neuroglancer_skeletons": return await getSkeletonsDataSource( @@ -688,7 +543,7 @@ export class TrkDataSource extends DataSourceProvider { }, ); } - + completeUrl(options: CompleteUrlOptions) { return completeHttpPath( options.credentialsManager, diff --git a/src/datasource/trk/reader/skeletonWriter.ts b/src/datasource/trk/reader/skeletonWriter.ts index b04e1ea2a..fed95672f 100644 --- a/src/datasource/trk/reader/skeletonWriter.ts +++ b/src/datasource/trk/reader/skeletonWriter.ts @@ -1,7 +1,4 @@ -import fs from 'fs'; -import path from 'path'; - -// import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3'; +// import axios from "axios"; /** * Represents a 3D vertex with coordinates. @@ -23,232 +20,105 @@ export interface Edge { } /** - * Provides utilities for writing skeleton data to files and uploading them to AWS S3. + * Provides utilities for creating skeleton data, storing it in an ArrayBuffer, + * and sending it to a backend service. */ export class SkeletonWriter { /** - * Writes skeleton data including vertices, edges, and orientations to a binary file. + * Creates an ArrayBuffer with skeleton data, including vertices, edges, and orientations. * @static - * @param {Vertex[]} vertices - The list of vertices to write. + * @param {Vertex[]} vertices - The list of vertices to store. * @param {Edge[]} edges - The list of edges connecting the vertices. * @param {number[][]} orientations - The orientations of each vertex. - * @param {string} outputFilePath - The file path where the binary data will be written. + * @returns {ArrayBuffer} - The created ArrayBuffer containing the skeleton data. */ - static writeSkeleton(vertices: Vertex[], edges: Edge[], orientations: number[][], outputFilePath: string) { - fs.mkdirSync(path.dirname(outputFilePath), { recursive: true }); - + static createArrayBuffer(vertices: Vertex[], edges: Edge[], orientations: number[][]): ArrayBuffer { const vertexCount = vertices.length; const edgeCount = edges.length; const vertexSize = 12; // 3 floats (x, y, z), each 4 bytes const edgeSize = 8; // 2 uint32s (source and target), each 4 bytes const orientationSize = 12; // 3 floats (x, y, z) for orientations - const bufferSize = 4 + 4 + (vertexSize * vertexCount) + (edgeSize * edgeCount) + (orientationSize * vertexCount); - const buffer = Buffer.alloc(bufferSize); + const bufferSize = 4 + 4 + (vertexSize * vertexCount) + (edgeSize * edgeCount) + (orientationSize * vertexCount); + + // Create an ArrayBuffer and a DataView to manipulate it + const buffer = new ArrayBuffer(bufferSize); + const dataView = new DataView(buffer); let offset = 0; - buffer.writeUInt32LE(vertexCount, offset); // Number of vertices + // Write the number of vertices + dataView.setUint32(offset, vertexCount, true); offset += 4; - buffer.writeUInt32LE(edgeCount, offset); // Number of edges + + // Write the number of edges + dataView.setUint32(offset, edgeCount, true); offset += 4; // Write the vertices (3 floats per vertex: x, y, z) for (let i = 0; i < vertexCount; i++) { - buffer.writeFloatLE((vertices[i].x)*1E6, offset); - buffer.writeFloatLE(vertices[i].y*1E6, offset + 4); - buffer.writeFloatLE(vertices[i].z*1E6, offset + 8); + dataView.setFloat32(offset, vertices[i].x * 1E6, true); + dataView.setFloat32(offset + 4, vertices[i].y * 1E6, true); + dataView.setFloat32(offset + 8, vertices[i].z * 1E6, true); offset += 12; } // Write the edges (2 uint32 per edge: vertex1, vertex2) for (let i = 0; i < edgeCount; i++) { - buffer.writeUInt32LE(edges[i].vertex1, offset); - buffer.writeUInt32LE(edges[i].vertex2, offset + 4); + dataView.setUint32(offset, edges[i].vertex1, true); + dataView.setUint32(offset + 4, edges[i].vertex2, true); offset += 8; } - // Write the orientations (3 floats per vertex) + // Write the orientations (3 floats per vertex: x, y, z) for (let i = 0; i < vertexCount; i++) { - buffer.writeFloatLE(orientations[i][0], offset); - buffer.writeFloatLE(orientations[i][1], offset + 4); - buffer.writeFloatLE(orientations[i][2], offset + 8); + dataView.setFloat32(offset, orientations[i][0], true); + dataView.setFloat32(offset + 4, orientations[i][1], true); + dataView.setFloat32(offset + 8, orientations[i][2], true); offset += 12; } - fs.writeFileSync(outputFilePath, buffer); - console.log(`Skeleton written to ${outputFilePath}`); - } - - /** - * Writes metadata about the skeleton data structure to a JSON file. - * @static - * @param {string} infoFilePath - The file path where the skeleton info will be written. - */ - static writeSkeletonInfo(infoFilePath: string) { - fs.mkdirSync(path.dirname(infoFilePath), { recursive: true }); - - const skeletonInfo = { - "@type": "neuroglancer_skeletons", - "vertex_attributes": [ - { - "id": "orientation", - "data_type": "float32", - "num_components": 3, - }, - ], - "segment_properties": "prop", - }; - - fs.mkdirSync(path.dirname(infoFilePath), { recursive: true }); - - // Write the skeleton info to the specified path - fs.writeFileSync(infoFilePath, JSON.stringify(skeletonInfo, null, 2)); - console.log(`Skeleton info file written to ${infoFilePath}`); + return buffer; } /** - * Writes properties metadata for the skeleton to a JSON file. - * @static - * @param {string} propFilePath - The file path where the properties info will be written. + * Sends the ArrayBuffer containing skeleton data to the backend. + * @param {ArrayBuffer} buffer - The ArrayBuffer to send. + * @param {string} url - The URL of the backend endpoint. */ - - static writePropInfo(propFilePath: string) { - fs.mkdirSync(path.dirname(propFilePath), { recursive: true }); - - const propInfo = { - "@type": "neuroglancer_segment_properties", - "inline": { - "ids": ["1"], - "properties": [{ "id": "label", "type": "label", "values": ["1"] }] + static async sendArrayBufferToBackend(buffer: ArrayBuffer, url: string): Promise { + // try { + // const response = await axios.post(url, buffer, { + // headers: { + // 'Content-Type': 'application/octet-stream' + // } + // }); + // console.log("ArrayBuffer sent to backend successfully", response.data); + // } catch (error) { + // console.error("Error sending ArrayBuffer to backend", error); + // } + + try { + const response = await fetch(url, { + method: 'POST', + body: buffer, + headers: { + 'Content-Type': 'application/octet-stream', + } + }); + + if (!response.ok) { + throw new Error('Network response was not ok'); } - }; + + const responseData = await response.json(); + console.log("ArrayBuffer sent to backend successfully", responseData); + } catch (error) { + console.error("Error sending ArrayBuffer to backend", error); + } - // Write the prop info to the specified path - fs.writeFileSync(propFilePath, JSON.stringify(propInfo, null, 2)); - console.log(`Prop info file written to ${propFilePath}`); - } - /** - * Generates file paths for the binary, property, and skeleton info files based on a timestamp. - * TimeStamp is used for having unique filename. - * @static - * @param {string} outputDirectory - The output directory for the files. - * @param {string} timestamp - The timestamp used to format the file paths. - * @returns {{ binaryFilePath: string, propInfoFilePath: string, skeletonInfoFilePath: string }} - */ - static generateSkeletonFilePaths(outputDirectory: string, timestamp: string) { - - // Build the file paths with the formatted timestamp - const binaryFilePath = path.join(outputDirectory, 'tract', timestamp, '1'); // Binary file path - const propInfoFilePath = path.join(outputDirectory, 'tract', timestamp, 'prop', 'info'); // JSON file path - const skeletonInfoFilePath = path.join(outputDirectory, 'tract', timestamp, 'info'); // JSON file path - - return { - binaryFilePath, - propInfoFilePath, - skeletonInfoFilePath - }; } - /** - * Uploads a directory of files to AWS S3. - * @static - * @param {string} outputDirectory - The directory containing the files to upload. - * @param {string} timestamp - The timestamp used to organize the files in S3. - */ - // static async uploadSkeletonFilePathsToS3(outputDirectory: string, timestamp: string) { - // // Initialize the S3 client - // const s3Client = new S3Client({ - // region: process.env.AWS_REGION || 'us-east-2', - // }); - - // // Read the bucket name from environment variables - // const bucketName = process.env.BUCKET_NAME || 'linc-brain-mit-prod-us-east-2'; - - // // Check for required environment variables - // if (!process.env.AWS_REGION || !process.env.BUCKET_NAME) { - // console.error('AWS_REGION and BUCKET_NAME must be set in environment variables.'); - // return; - // } - - // // Define the local directory to upload - // const localDir = path.join(outputDirectory, 'tract', timestamp); - - // // Include the 'neuroglancer_trk/' prefix in the S3 destination path - // const s3DestinationPath = path.join('neuroglancer_trk', 'tract', timestamp).replace(/\\/g, '/'); - - // // Recursively upload all files in the local directory to S3 - // await SkeletonWriter.uploadDirectoryToS3(s3Client, bucketName, localDir, s3DestinationPath); - - // console.log('Uploaded generated files to S3.'); - // } - - /** - * Iteratively uploads all files from a local directory to an AWS S3 bucket. - * @static - * @param {S3Client} s3Client - The AWS S3 client used for the upload. - * @param {string} bucketName - The name of the S3 bucket. - * @param {string} localDirectory - The local directory containing the files to upload. - * @param {string} s3DestinationPath - The destination path in the S3 bucket. - */ - // static async uploadDirectoryToS3( - // s3Client: S3Client, - // bucketName: string, - // localDirectory: string, - // s3DestinationPath: string - // ) { - // const files = SkeletonWriter.getAllFilesInDirectory(localDirectory); - - // for (const filePath of files) { - // // Compute the relative path from the local directory - // const relativeFilePath = path.relative(localDirectory, filePath); - - // // Construct the S3 key by joining the destination path and relative file path (Hashmap) - // const s3Key = path.join(s3DestinationPath, relativeFilePath).replace(/\\/g, '/'); - - // try { - // const fileContent = fs.readFileSync(filePath); - - // const params = { - // Bucket: bucketName, - // Key: s3Key, - // Body: fileContent, - // }; - - // const command = new PutObjectCommand(params); - // await s3Client.send(command); - // console.log(`File uploaded successfully to s3://${bucketName}/${s3Key}`); - // } catch (error) { - // console.error(`Error uploading file ${filePath} to S3:`, error); - // } - // } - // } - - /** - * Interatively collects all file paths in a directory. - * @static - * @param {string} dir - The directory to scan. - * @returns {string[]} An array of file paths found in the directory. - */ - static getAllFilesInDirectory(dir: string): string[] { - let results: string[] = []; - - const list = fs.readdirSync(dir); - list.forEach((file) => { - const filePath = path.join(dir, file); - const stat = fs.statSync(filePath); - if (stat && stat.isDirectory()) { - // Recursively walk subdirectories - results = results.concat(SkeletonWriter.getAllFilesInDirectory(filePath)); - } else { - results.push(filePath); - } - }); - - return results; - } } - - diff --git a/src/datasource/trk/reader/trackProcessor.ts b/src/datasource/trk/reader/trackProcessor.ts index cad202163..6b66ffb44 100644 --- a/src/datasource/trk/reader/trackProcessor.ts +++ b/src/datasource/trk/reader/trackProcessor.ts @@ -1,10 +1,10 @@ + +import { Buffer } from 'buffer'; // import fs from 'fs'; // import path from 'path'; -import { Buffer } from 'buffer'; import axios from 'axios'; -import type { Vertex, Edge } from '#src/datasource/trk/reader/skeletonWriter.js'; -// import { SkeletonWriter } from '#src/datasource/trk/reader/skeletonWriter.js'; -import type { TrkHeader} from '#src/datasource/trk/reader/trkHeader.js'; +import { type Vertex, type Edge, SkeletonWriter } from '#src/datasource/trk/reader/skeletonWriter.js'; +import type { TrkHeader } from '#src/datasource/trk/reader/trkHeader.js'; import { TrkHeaderProcessor } from '#src/datasource/trk/reader/trkHeader.js'; import { VoxelToRASConverter } from '#src/datasource/trk/reader/voxelToRASConverter.js'; @@ -111,8 +111,8 @@ export class TrackProcessor { * @param {string} filePath - The file path of the TRK file. * @returns {Promise<{processState: ProcessState; timestamp: string}>} A promise that resolves to the processing state and a timestamp. */ - async processTrackData( randomTrackNumbers: number[], trackNumber: number, filePath: string): Promise<{ processState: ProcessState; timestamp: string }> { - + async processTrackData(randomTrackNumbers: number[], trackNumber: number, filePath: string): Promise<{ processState: ProcessState; timestamp: string, arrayBuffer?: ArrayBuffer }> { + // Get the current date and time const now = new Date(); @@ -122,12 +122,9 @@ export class TrackProcessor { if (!this.globalHeader) { console.error('Error: Global header is not initialized.'); - return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; + return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; } - // const outputFilePath = path.join(__dirname, 'track_data.txt'); - // const writeStream = fs.createWriteStream(outputFilePath, { flags: 'a' }); - const maxTracksToProcess = randomTrackNumbers.length; const vertices: Vertex[] = []; const edges: Edge[] = []; @@ -148,14 +145,12 @@ export class TrackProcessor { const n_points = dataView.getInt32(offset, true); // true indicates little-endian byte order. offset += 4; - // writeStream.write(`Track ${trackNumber} processed, number of points: ${n_points}\n`); - // console.log(`Track ${trackNumber} processed, number of points: ${n_points}\n`); // Only process the track if it is in the random track numbers if (randomTrackNumbers.includes(trackNumber)) { // Process each point in the track (x, y, z -> 12 bytes per point) - console.log(`Track ${trackNumber}`); + // console.log(`Track ${trackNumber}`); const points: number[][] = []; for (let i = 0; i < n_points; i++) { const x = dataView.getFloat32(offset, true); @@ -185,22 +180,29 @@ export class TrackProcessor { trackProcessedCount++; // Increment the number of processed tracks - // if (trackProcessedCount >= maxTracksToProcess) { - // const outputDirectory = path.resolve(__dirname, '..', 'src'); - // const { binaryFilePath, propInfoFilePath, skeletonInfoFilePath } = SkeletonWriter.generateSkeletonFilePaths(outputDirectory, timestamp); + if (trackProcessedCount >= maxTracksToProcess) { + // const outputDirectory = path.resolve(__dirname, '..', 'src'); + // const { binaryFilePath, propInfoFilePath, skeletonInfoFilePath } = SkeletonWriter.generateSkeletonFilePaths(outputDirectory, timestamp); + // SkeletonWriter.writeSkeleton(vertices, edges, orientations, binaryFilePath); + // SkeletonWriter.writePropInfo(propInfoFilePath); + // SkeletonWriter.writeSkeletonInfo(skeletonInfoFilePath); + // console.log(`Processed ${maxTracksToProcess} random tracks and wrote skeleton and info files.`); + // // SkeletonWriter.uploadSkeletonFilePathsToS3(outputDirectory, timestamp); + // console.log(`Uploaded tracks to S3.`) + - // SkeletonWriter.writeSkeleton(vertices, edges, orientations, binaryFilePath); - // SkeletonWriter.writePropInfo(propInfoFilePath); - // SkeletonWriter.writeSkeletonInfo(skeletonInfoFilePath); - // console.log(`Processed ${maxTracksToProcess} random tracks and wrote skeleton and info files.`); + // Create the ArrayBuffer + const arrayBuffer = SkeletonWriter.createArrayBuffer(vertices, edges, orientations); + console.log(arrayBuffer) + // Return the state, timestamp, and arrayBuffer + return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp, arrayBuffer }; - // // SkeletonWriter.uploadSkeletonFilePathsToS3(outputDirectory, timestamp); - // console.log(`Uploaded tracks to S3.`) + // Send the ArrayBuffer to the backend + // SkeletonWriter.sendArrayBufferToBackend(arrayBuffer, 'http://127.0.0.1:8080/data'); - // break; - // } + } } else { offset += n_points * 12; // Skip the track data if it's not in the selected tracks } @@ -209,12 +211,12 @@ export class TrackProcessor { } // writeStream.end(); - return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; + return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; } catch (error) { console.error('Error fetching or processing track data:', error); - return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; + return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; } } @@ -241,21 +243,21 @@ export class TrackProcessor { */ loadFileBuffer(filePath: string) { // if (filePath.startsWith('http://') || filePath.startsWith('https://')) { - // Handle URL loading with axios - return axios.get(filePath, { responseType: 'arraybuffer' }) - .then(response => { - const buffer = Buffer.from(response.data); - const dataView = new DataView(buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength)); - console.log('Data loaded from URL successfully.'); - return { - dataView, - buffer - }; - }) - .catch(error => { - console.error('Failed to load file from URL:', error); - throw error; - }); + // Handle URL loading with axios + return axios.get(filePath, { responseType: 'arraybuffer' }) + .then(response => { + const buffer = Buffer.from(response.data); + const dataView = new DataView(buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength)); + console.log('Data loaded from URL successfully.'); + return { + dataView, + buffer + }; + }) + .catch(error => { + console.error('Failed to load file from URL:', error); + throw error; + }); // } else { // // Handle local file loading with fs // try { From 85d0aa9ed8543398b80c902d417f3f97ea3c562d Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Tue, 29 Oct 2024 19:47:51 -0400 Subject: [PATCH 07/12] code cleanup backend.ts --- src/datasource/trk/backend.ts | 376 +--------------------------------- 1 file changed, 1 insertion(+), 375 deletions(-) diff --git a/src/datasource/trk/backend.ts b/src/datasource/trk/backend.ts index 19c4c4532..5a1a587ca 100644 --- a/src/datasource/trk/backend.ts +++ b/src/datasource/trk/backend.ts @@ -15,344 +15,22 @@ */ - -import type { AxiosResponse } from 'axios'; -import axios from 'axios'; -// import { decodeGzip } from "#src/async_computation/decode_gzip_request.js"; -// import { requestAsyncComputation } from "#src/async_computation/request.js"; -// import type { Chunk, ChunkManager } from "#src/chunk_manager/backend.js"; import { WithParameters } from "#src/chunk_manager/backend.js"; -// import { GenericSharedDataSource } from "#src/chunk_manager/generic_file_source.js"; import { WithSharedCredentialsProviderCounterpart } from "#src/credentials_provider/shared_counterpart.js"; -// import type { ShardingParameters } from "#src/datasource/trk/base.js"; import { - // DataEncoding, - // ShardingHashFunction, SkeletonSourceParameters, } from "#src/datasource/trk/base.js"; import type { SkeletonChunk } from "#src/skeleton/backend.js"; import { SkeletonSource } from "#src/skeleton/backend.js"; import { decodeSkeletonChunk } from "#src/skeleton/decode_precomputed_skeleton.js"; -// import { fetchSpecialHttpByteRange } from "#src/util/byte_range_http_requests.js"; -// import type { CancellationToken } from "#src/util/cancellation.js"; -// import type { Borrowed } from "#src/util/disposable.js"; -// import { convertEndian32, Endianness } from "#src/util/endian.js"; -// import { murmurHash3_x86_128Hash64Bits } from "#src/util/hash.js"; -// import { -// isNotFoundError, -// responseArrayBuffer, -// } from "#src/util/http_request.js"; -// import { stableStringify } from "#src/util/json.js"; -// import { getObjectId } from "#src/util/object_id.js"; import type { SpecialProtocolCredentials, - // SpecialProtocolCredentialsProvider, } from "#src/util/special_protocol_request.js"; -// import { cancellableFetchSpecialOk } from "#src/util/special_protocol_request.js"; -// import { Uint64 } from "#src/util/uint64.js"; import { registerSharedObject } from "#src/worker_rpc.js"; console.log(import.meta.url); -// const shardingHashFunctions: Map void> = -// new Map([ -// [ -// ShardingHashFunction.MURMURHASH3_X86_128, -// (out) => { -// murmurHash3_x86_128Hash64Bits(out, 0, out.low, out.high); -// }, -// ], -// [ShardingHashFunction.IDENTITY, (_out) => { }], -// ]); - -// interface ShardInfo { -// shardUrl: string; -// offset: Uint64; -// } - -// interface DecodedMinishardIndex { -// data: Uint32Array; -// shardUrl: string; -// } - -// interface MinishardIndexSource -// extends GenericSharedDataSource { -// sharding: ShardingParameters; -// credentialsProvider: SpecialProtocolCredentialsProvider; -// } - -// function getMinishardIndexDataSource( -// chunkManager: Borrowed, -// credentialsProvider: SpecialProtocolCredentialsProvider, -// parameters: { url: string; sharding: ShardingParameters | undefined }, -// ): MinishardIndexSource | undefined { -// const { url, sharding } = parameters; -// if (sharding === undefined) return undefined; -// const source = GenericSharedDataSource.get< -// Uint64, -// DecodedMinishardIndex | undefined -// >( -// chunkManager, -// stableStringify({ -// type: "trk:shardedDataSource", -// url, -// sharding, -// credentialsProvider: getObjectId(credentialsProvider), -// }), -// { -// download: async ( -// shardAndMinishard: Uint64, -// cancellationToken: CancellationToken, -// ) => { -// const minishard = Uint64.lowMask(new Uint64(), sharding.minishardBits); -// Uint64.and(minishard, minishard, shardAndMinishard); -// const shard = Uint64.lowMask(new Uint64(), sharding.shardBits); -// const temp = new Uint64(); -// Uint64.rshift(temp, shardAndMinishard, sharding.minishardBits); -// Uint64.and(shard, shard, temp); -// const shardUrl = `${url}/${shard -// .toString(16) -// .padStart(Math.ceil(sharding.shardBits / 4), "0")}.shard`; -// // Retrive minishard index start/end offsets. -// const shardIndexSize = new Uint64(16); -// Uint64.lshift(shardIndexSize, shardIndexSize, sharding.minishardBits); - -// // Multiply minishard by 16. -// const shardIndexStart = Uint64.lshift(new Uint64(), minishard, 4); -// const shardIndexEnd = Uint64.addUint32( -// new Uint64(), -// shardIndexStart, -// 16, -// ); -// let shardIndexResponse: ArrayBuffer; -// try { -// shardIndexResponse = await fetchSpecialHttpByteRange( -// credentialsProvider, -// shardUrl, -// shardIndexStart, -// shardIndexEnd, -// cancellationToken, -// ); -// } catch (e) { -// if (isNotFoundError(e)) return { data: undefined, size: 0 }; -// throw e; -// } -// if (shardIndexResponse.byteLength !== 16) { -// throw new Error("Failed to retrieve minishard offset"); -// } -// const shardIndexDv = new DataView(shardIndexResponse); -// const minishardStartOffset = new Uint64( -// shardIndexDv.getUint32(0, /*littleEndian=*/ true), -// shardIndexDv.getUint32(4, /*littleEndian=*/ true), -// ); -// const minishardEndOffset = new Uint64( -// shardIndexDv.getUint32(8, /*littleEndian=*/ true), -// shardIndexDv.getUint32(12, /*littleEndian=*/ true), -// ); -// if (Uint64.equal(minishardStartOffset, minishardEndOffset)) { -// return { data: undefined, size: 0 }; -// } -// // The start/end offsets in the shard index are relative to the end of the shard -// // index. -// Uint64.add(minishardStartOffset, minishardStartOffset, shardIndexSize); -// Uint64.add(minishardEndOffset, minishardEndOffset, shardIndexSize); - -// let minishardIndexResponse = await fetchSpecialHttpByteRange( -// credentialsProvider, -// shardUrl, -// minishardStartOffset, -// minishardEndOffset, -// cancellationToken, -// ); -// if (sharding.minishardIndexEncoding === DataEncoding.GZIP) { -// minishardIndexResponse = ( -// await requestAsyncComputation( -// decodeGzip, -// cancellationToken, -// [minishardIndexResponse], -// new Uint8Array(minishardIndexResponse), -// ) -// ).buffer; -// } -// if (minishardIndexResponse.byteLength % 24 !== 0) { -// throw new Error( -// `Invalid minishard index length: ${minishardIndexResponse.byteLength}`, -// ); -// } -// const minishardIndex = new Uint32Array(minishardIndexResponse); -// convertEndian32(minishardIndex, Endianness.LITTLE); - -// const minishardIndexSize = minishardIndex.byteLength / 24; -// let prevEntryKeyLow = 0; -// let prevEntryKeyHigh = 0; -// // Offsets in the minishard index are relative to the end of the shard index. -// let prevStartLow = shardIndexSize.low; -// let prevStartHigh = shardIndexSize.high; -// for (let i = 0; i < minishardIndexSize; ++i) { -// let entryKeyLow = prevEntryKeyLow + minishardIndex[i * 2]; -// let entryKeyHigh = prevEntryKeyHigh + minishardIndex[i * 2 + 1]; -// if (entryKeyLow >= 4294967296) { -// entryKeyLow -= 4294967296; -// entryKeyHigh += 1; -// } -// prevEntryKeyLow = minishardIndex[i * 2] = entryKeyLow; -// prevEntryKeyHigh = minishardIndex[i * 2 + 1] = entryKeyHigh; -// let startLow = -// prevStartLow + minishardIndex[(minishardIndexSize + i) * 2]; -// let startHigh = -// prevStartHigh + minishardIndex[(minishardIndexSize + i) * 2 + 1]; -// if (startLow >= 4294967296) { -// startLow -= 4294967296; -// startHigh += 1; -// } -// minishardIndex[(minishardIndexSize + i) * 2] = startLow; -// minishardIndex[(minishardIndexSize + i) * 2 + 1] = startHigh; -// const sizeLow = minishardIndex[(2 * minishardIndexSize + i) * 2]; -// const sizeHigh = minishardIndex[(2 * minishardIndexSize + i) * 2 + 1]; -// let endLow = startLow + sizeLow; -// let endHigh = startHigh + sizeHigh; -// if (endLow >= 4294967296) { -// endLow -= 4294967296; -// endHigh += 1; -// } -// prevStartLow = endLow; -// prevStartHigh = endHigh; -// minishardIndex[(2 * minishardIndexSize + i) * 2] = endLow; -// minishardIndex[(2 * minishardIndexSize + i) * 2 + 1] = endHigh; -// } -// return { -// data: { data: minishardIndex, shardUrl }, -// size: minishardIndex.byteLength, -// }; -// }, -// encodeKey: (key: Uint64) => key.toString(), -// sourceQueueLevel: 1, -// }, -// ) as MinishardIndexSource; -// source.sharding = sharding; -// source.credentialsProvider = credentialsProvider; -// return source; -// } - -// function findMinishardEntry( -// minishardIndex: DecodedMinishardIndex, -// key: Uint64, -// ): { startOffset: Uint64; endOffset: Uint64 } | undefined { -// const minishardIndexData = minishardIndex.data; -// const minishardIndexSize = minishardIndexData.length / 6; -// const keyLow = key.low; -// const keyHigh = key.high; -// for (let i = 0; i < minishardIndexSize; ++i) { -// if ( -// minishardIndexData[i * 2] !== keyLow || -// minishardIndexData[i * 2 + 1] !== keyHigh -// ) { -// continue; -// } -// const startOffset = new Uint64( -// minishardIndexData[(minishardIndexSize + i) * 2], -// minishardIndexData[(minishardIndexSize + i) * 2 + 1], -// ); -// const endOffset = new Uint64( -// minishardIndexData[(2 * minishardIndexSize + i) * 2], -// minishardIndexData[(2 * minishardIndexSize + i) * 2 + 1], -// ); -// return { startOffset, endOffset }; -// } -// return undefined; -// } - -// async function getShardedData( -// minishardIndexSource: MinishardIndexSource, -// chunk: Chunk, -// key: Uint64, -// cancellationToken: CancellationToken, -// ): Promise<{ shardInfo: ShardInfo; data: ArrayBuffer } | undefined> { -// const { sharding } = minishardIndexSource; -// const hashFunction = shardingHashFunctions.get(sharding.hash)!; -// const hashCode = Uint64.rshift(new Uint64(), key, sharding.preshiftBits); -// hashFunction(hashCode); -// const shardAndMinishard = Uint64.lowMask( -// new Uint64(), -// sharding.minishardBits + sharding.shardBits, -// ); -// Uint64.and(shardAndMinishard, shardAndMinishard, hashCode); -// const getPriority = () => ({ -// priorityTier: chunk.priorityTier, -// priority: chunk.priority, -// }); -// const minishardIndex = await minishardIndexSource.getData( -// shardAndMinishard, -// getPriority, -// cancellationToken, -// ); -// if (minishardIndex === undefined) return undefined; -// const minishardEntry = findMinishardEntry(minishardIndex, key); -// if (minishardEntry === undefined) return undefined; -// const { startOffset, endOffset } = minishardEntry; -// let data = await fetchSpecialHttpByteRange( -// minishardIndexSource.credentialsProvider, -// minishardIndex.shardUrl, -// startOffset, -// endOffset, -// cancellationToken, -// ); -// if (minishardIndexSource.sharding.dataEncoding === DataEncoding.GZIP) { -// data = ( -// await requestAsyncComputation( -// decodeGzip, -// cancellationToken, -// [data], -// new Uint8Array(data), -// ) -// ).buffer; -// } -// return { -// data, -// shardInfo: { shardUrl: minishardIndex.shardUrl, offset: startOffset }, -// }; -// } - -// function getOrNotFoundError(v: T | undefined) { -// if (v === undefined) throw new Error("not found"); -// return v; -// } - - -// async function fetchByUint64( -// credentialsProvider: SpecialProtocolCredentialsProvider, -// url: string, -// chunk: Chunk, -// minishardIndexSource: MinishardIndexSource | undefined, -// id: Uint64, -// cancellationToken: CancellationToken, -// ) { -// if (minishardIndexSource === undefined) { -// try { -// return await cancellableFetchSpecialOk( -// credentialsProvider, -// `${url}/${id}`, -// {}, -// responseArrayBuffer, -// cancellationToken, -// ); -// } catch (e) { -// if (isNotFoundError(e)) return undefined; -// throw e; -// } -// } -// const result = await getShardedData( -// minishardIndexSource, -// chunk, -// id, -// cancellationToken, -// ); -// if (result === undefined) return undefined; -// return result.data; -// } - - @registerSharedObject() export class trkSkeletonSource extends WithParameters( WithSharedCredentialsProviderCounterpart()( @@ -360,63 +38,11 @@ export class trkSkeletonSource extends WithParameters( ), SkeletonSourceParameters, ) { - // private minishardIndexSource = getMinishardIndexDataSource( - // this.chunkManager, - // this.credentialsProvider, - // { url: this.parameters.url, sharding: this.parameters.metadata.sharding }, - // ); - async download(chunk: SkeletonChunk, - // cancellationToken: CancellationToken ) { const { parameters } = this; - // const response = getOrNotFoundError( - // await fetchByUint64( - // this.credentialsProvider, - // parameters.url, - // chunk, - // this.minishardIndexSource, - // chunk.objectId, - // cancellationToken, - // ), - // ); - - // console.log("reponse: ", response); - - - let response: AxiosResponse | null = null; - - try { - response = await axios.get("http://127.0.0.1:9123/Users/shrutiv/MyDocuments/GitHub/Neuroglancer-Tractography/src/tract/20241028_161202/1", - { responseType: 'arraybuffer' }); - - if (response && response.data) { - console.log("1: ", response.data); - - // Create DataView from response.data - const dv = new DataView(response.data); - - // Read the number of vertices and edges - const numVertices = dv.getUint32(0, true); - console.log(numVertices); - const numEdges = dv.getUint32(4, true); - console.log(numEdges); - } else { - throw new Error("No data received from response."); - } - - } catch (error) { - console.error('Error fetching data', error); - } - - // Only call decodeSkeletonChunk if response is not null - if (response !== null) { - console.log("Inside download()"); - decodeSkeletonChunk(chunk, response.data, parameters.metadata.vertexAttributes); - } else { - console.error("Cannot call decodeSkeletonChunk, response is null."); - } + decodeSkeletonChunk(chunk, parameters.skeletonBuffer, parameters.metadata.vertexAttributes); } } From 6ceb844d0092f611639583ca9aee9fb52c0426f1 Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Tue, 29 Oct 2024 19:48:12 -0400 Subject: [PATCH 08/12] remove local file handling --- src/datasource/trk/reader/trackProcessor.ts | 19 +------------------ 1 file changed, 1 insertion(+), 18 deletions(-) diff --git a/src/datasource/trk/reader/trackProcessor.ts b/src/datasource/trk/reader/trackProcessor.ts index 6b66ffb44..e25a98221 100644 --- a/src/datasource/trk/reader/trackProcessor.ts +++ b/src/datasource/trk/reader/trackProcessor.ts @@ -242,8 +242,7 @@ export class TrackProcessor { * @returns {Promise<{dataView: DataView; buffer: Buffer}>} A promise that resolves to the DataView and buffer of the file. */ loadFileBuffer(filePath: string) { - // if (filePath.startsWith('http://') || filePath.startsWith('https://')) { - // Handle URL loading with axios + return axios.get(filePath, { responseType: 'arraybuffer' }) .then(response => { const buffer = Buffer.from(response.data); @@ -258,22 +257,6 @@ export class TrackProcessor { console.error('Failed to load file from URL:', error); throw error; }); - // } else { - // // Handle local file loading with fs - // try { - // const absolutePath = path.resolve(filePath); - // const buffer = fs.readFileSync(absolutePath); - // const dataView = new DataView(buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength)); - // console.log('Data loaded from local file successfully.'); - // return { - // dataView, - // buffer - // }; - // } catch (error) { - // console.error('Failed to load local file:', error); - // throw error; - // } - // } } } From 6eae4fd664116b29910730bb846d60b44d21fc94 Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Tue, 29 Oct 2024 19:49:17 -0400 Subject: [PATCH 09/12] remove local file handling and writing skeleton metadata --- src/datasource/trk/reader/trackProcessor.ts | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/src/datasource/trk/reader/trackProcessor.ts b/src/datasource/trk/reader/trackProcessor.ts index e25a98221..ad3ebec8a 100644 --- a/src/datasource/trk/reader/trackProcessor.ts +++ b/src/datasource/trk/reader/trackProcessor.ts @@ -1,7 +1,5 @@ import { Buffer } from 'buffer'; -// import fs from 'fs'; -// import path from 'path'; import axios from 'axios'; import { type Vertex, type Edge, SkeletonWriter } from '#src/datasource/trk/reader/skeletonWriter.js'; import type { TrkHeader } from '#src/datasource/trk/reader/trkHeader.js'; @@ -150,7 +148,6 @@ export class TrackProcessor { // Only process the track if it is in the random track numbers if (randomTrackNumbers.includes(trackNumber)) { // Process each point in the track (x, y, z -> 12 bytes per point) - // console.log(`Track ${trackNumber}`); const points: number[][] = []; for (let i = 0; i < n_points; i++) { const x = dataView.getFloat32(offset, true); @@ -181,16 +178,6 @@ export class TrackProcessor { trackProcessedCount++; // Increment the number of processed tracks if (trackProcessedCount >= maxTracksToProcess) { - // const outputDirectory = path.resolve(__dirname, '..', 'src'); - // const { binaryFilePath, propInfoFilePath, skeletonInfoFilePath } = SkeletonWriter.generateSkeletonFilePaths(outputDirectory, timestamp); - // SkeletonWriter.writeSkeleton(vertices, edges, orientations, binaryFilePath); - // SkeletonWriter.writePropInfo(propInfoFilePath); - // SkeletonWriter.writeSkeletonInfo(skeletonInfoFilePath); - // console.log(`Processed ${maxTracksToProcess} random tracks and wrote skeleton and info files.`); - // // SkeletonWriter.uploadSkeletonFilePathsToS3(outputDirectory, timestamp); - // console.log(`Uploaded tracks to S3.`) - - // Create the ArrayBuffer const arrayBuffer = SkeletonWriter.createArrayBuffer(vertices, edges, orientations); @@ -198,10 +185,6 @@ export class TrackProcessor { // Return the state, timestamp, and arrayBuffer return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp, arrayBuffer }; - - // Send the ArrayBuffer to the backend - // SkeletonWriter.sendArrayBufferToBackend(arrayBuffer, 'http://127.0.0.1:8080/data'); - } } else { offset += n_points * 12; // Skip the track data if it's not in the selected tracks From 46345b81742106c74cea417e899a4bac683485e4 Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Wed, 30 Oct 2024 20:44:32 -0400 Subject: [PATCH 10/12] Heat scale done --- src/datasource/trk/frontend.ts | 64 +++++++++++------ src/datasource/trk/reader/color.txt | 47 ++++++------ src/datasource/trk/reader/skeletonWriter.ts | 58 +++------------ src/datasource/trk/reader/trackProcessor.ts | 79 ++++++++++++--------- src/skeleton/decode_precomputed_skeleton.ts | 8 +-- 5 files changed, 131 insertions(+), 125 deletions(-) diff --git a/src/datasource/trk/frontend.ts b/src/datasource/trk/frontend.ts index 9ce037da1..10ba6f311 100644 --- a/src/datasource/trk/frontend.ts +++ b/src/datasource/trk/frontend.ts @@ -85,7 +85,6 @@ class trkSkeletonSource extends WithParameters( return false; } get vertexAttributes() { - console.log(this.parameters.metadata.vertexAttributes); return this.parameters.metadata.vertexAttributes; } @@ -163,6 +162,7 @@ function parseSkeletonMetadata(data: any): ParsedSkeletonMetadata { async function getSkeletonMetadata(): Promise { const metadata = await getMetadata(); + console.log(metadata) return parseSkeletonMetadata(metadata); } @@ -179,8 +179,9 @@ async function getSkeletonSource( credentialsProvider: SpecialProtocolCredentialsProvider, url: string, ) { + + const skeletonBuffer = await getSkeletonBuffer(url); const { metadata, segmentPropertyMap } = await getSkeletonMetadata(); - const skeletonBuffer = await getSkeletonBuffer(url) ?? new ArrayBuffer(0); return { source: chunkManager.getChunkSource(trkSkeletonSource, { @@ -196,20 +197,40 @@ async function getSkeletonSource( }; } +let globalHeader: any = null; + function getMetadata() { + // Start with the default vertex attributes + const vertexAttributes = [ + { + "id": "orientation", + "data_type": "float32", + "num_components": 3 + } + ]; + + // Check if scalars are present in the globalHeader and add them as vertex attributes + if (globalHeader && globalHeader.scalar_name) { + for (const scalarName of globalHeader.scalar_name) { + if(scalarName != ''){ + vertexAttributes.push({ + "id": scalarName, // Use the scalar name as the ID + "data_type": "float32", // Assuming the scalar data type is float32 + "num_components": 1 // Each scalar is a single component + }); + } + + } + } + return { "@type": "neuroglancer_skeletons", - "vertex_attributes": [ - { - "id": "orientation", - "data_type": "float32", - "num_components": 3 - } - ], + "vertex_attributes": vertexAttributes, "segment_properties": "prop" }; } + function getPropMetadata() { return { "@type": "neuroglancer_segment_properties", @@ -230,31 +251,33 @@ function getPropMetadata() { }; } -async function getSkeletonBuffer(url: string) { - +async function getSkeletonBuffer(url: string): Promise { const trackProcessor = new TrackProcessor(); - await trackProcessor.streamAndProcessHeader(url, 0, 999); - // await trackProcessor.streamAndProcessHeader(url); + if (!trackProcessor.globalHeader) { console.error('Error: Failed to fetch or process the TRK header.'); - + return new ArrayBuffer(0); } - const totalTracks = trackProcessor.globalHeader?.n_count; + // Set globalHeader and process tracks + globalHeader = trackProcessor.globalHeader; + console.log(globalHeader); + + const totalTracks = globalHeader?.n_count; if (totalTracks !== undefined) { const randomTrackNumbers = trackProcessor.getRandomTrackIndices(totalTracks, 1000); - const skeleton = await trackProcessor.processTrackData(randomTrackNumbers, 1, url); - console.log(skeleton.arrayBuffer); - return skeleton.arrayBuffer; + // Process track data and get the skeleton data in arrayBuffer format + const skeleton = await trackProcessor.processTrackData(randomTrackNumbers, 1, url); + return skeleton.arrayBuffer || new ArrayBuffer(0); // Resolves only after processing all tracks } else { console.error("totalTracks is undefined. Cannot proceed."); - return new ArrayBuffer(0) + return new ArrayBuffer(0); } - } + async function getSkeletonsDataSource( options: GetDataSourceOptions, credentialsProvider: SpecialProtocolCredentialsProvider, @@ -504,6 +527,7 @@ export class TrkDataSource extends DataSourceProvider { let metadata: any; try { metadata = await getMetadata(); + console.log(metadata) } catch (e) { throw new Error(`Failed to get metadata for ${url}: ${e}`); } diff --git a/src/datasource/trk/reader/color.txt b/src/datasource/trk/reader/color.txt index fe9b692c6..d093e69ea 100644 --- a/src/datasource/trk/reader/color.txt +++ b/src/datasource/trk/reader/color.txt @@ -1,11 +1,3 @@ -/** - * Converts a 3D orientation vector into a color map based on the absolute values of its components. - * Each component of the orientation vector corresponds to a color channel (R, G, B), - * which is then clamped between 0.0 and 1.0 to ensure valid color values. - * - * @param {vec3} orient - The orientation vector whose components are used to determine the color. - * @returns {vec3} A color vector where each component is derived from the corresponding component of the orientation vector. - */ vec3 colormapOrient(vec3 orient){ vec3 result; @@ -13,22 +5,35 @@ vec3 colormapOrient(vec3 orient){ result.g = abs(orient[1]); result.b = abs(orient[2]); return clamp(result, 0.0, 1.0); - - } -/** - * Main rendering function for a shader that adjusts the output color based on the orientation. - * The function checks if the orientation-based coloring is enabled (through a UI control), - * and if so, it uses the `colormapOrient` function to determine the color based on the orientation. - * If not enabled, it emits a default color or style. - */ -#uicontrol bool orient_color checkbox(default=true) + +vec3 colormapHeat(float scalar) { + // The scalar is already normalized to the 0 to 1 range + float value = clamp(scalar, 0.0, 1.0); // Ensure value is clamped to 0-1 just in case + + // Define the colors for the gradient + vec3 red = vec3(1.0, 0.0, 0.0); // Red at the lower end (0) + vec3 yellow = vec3(1.0, 1.0, 0.0); // Yellow at the upper end (1) + + // Interpolate between red and yellow + vec3 color = mix(red, yellow, value); + + return color; +} + + +#uicontrol bool heatmap checkbox(default=true) +#uicontrol bool orient_color checkbox(default=false) + void main() { if(orient_color){ - emitRGB(colormapOrient(orientation)); + emitRGB(colormapOrient(orientation)); } - else{ - emitDefault(); + else if (heatmap) { + emitRGB(colormapHeat(FA)); // Use the colormapHeat function with normalized FA scalar + } else { + emitDefault(); // Default color if the heatmap is not enabled } -} \ No newline at end of file +} + diff --git a/src/datasource/trk/reader/skeletonWriter.ts b/src/datasource/trk/reader/skeletonWriter.ts index fed95672f..f0981e4dd 100644 --- a/src/datasource/trk/reader/skeletonWriter.ts +++ b/src/datasource/trk/reader/skeletonWriter.ts @@ -33,30 +33,26 @@ export class SkeletonWriter { * @param {number[][]} orientations - The orientations of each vertex. * @returns {ArrayBuffer} - The created ArrayBuffer containing the skeleton data. */ - static createArrayBuffer(vertices: Vertex[], edges: Edge[], orientations: number[][]): ArrayBuffer { + static createArrayBuffer(vertices: Vertex[], edges: Edge[], orientations: number[][], scalarsArray: { [key: string]: number }[]): ArrayBuffer { const vertexCount = vertices.length; const edgeCount = edges.length; const vertexSize = 12; // 3 floats (x, y, z), each 4 bytes const edgeSize = 8; // 2 uint32s (source and target), each 4 bytes const orientationSize = 12; // 3 floats (x, y, z) for orientations + const scalarSize = scalarsArray.length > 0 ? 4 * Object.keys(scalarsArray[0]).length : 0; + + const bufferSize = 4 + 4 + (vertexSize * vertexCount) + (edgeSize * edgeCount) + (orientationSize * vertexCount) + scalarSize * vertexCount; - const bufferSize = 4 + 4 + (vertexSize * vertexCount) + (edgeSize * edgeCount) + (orientationSize * vertexCount); - - // Create an ArrayBuffer and a DataView to manipulate it const buffer = new ArrayBuffer(bufferSize); const dataView = new DataView(buffer); let offset = 0; - // Write the number of vertices dataView.setUint32(offset, vertexCount, true); offset += 4; - - // Write the number of edges dataView.setUint32(offset, edgeCount, true); offset += 4; - // Write the vertices (3 floats per vertex: x, y, z) for (let i = 0; i < vertexCount; i++) { dataView.setFloat32(offset, vertices[i].x * 1E6, true); dataView.setFloat32(offset + 4, vertices[i].y * 1E6, true); @@ -64,14 +60,12 @@ export class SkeletonWriter { offset += 12; } - // Write the edges (2 uint32 per edge: vertex1, vertex2) for (let i = 0; i < edgeCount; i++) { dataView.setUint32(offset, edges[i].vertex1, true); dataView.setUint32(offset + 4, edges[i].vertex2, true); offset += 8; } - // Write the orientations (3 floats per vertex: x, y, z) for (let i = 0; i < vertexCount; i++) { dataView.setFloat32(offset, orientations[i][0], true); dataView.setFloat32(offset + 4, orientations[i][1], true); @@ -79,46 +73,16 @@ export class SkeletonWriter { offset += 12; } - return buffer; - } - - /** - * Sends the ArrayBuffer containing skeleton data to the backend. - * @param {ArrayBuffer} buffer - The ArrayBuffer to send. - * @param {string} url - The URL of the backend endpoint. - */ - static async sendArrayBufferToBackend(buffer: ArrayBuffer, url: string): Promise { - // try { - // const response = await axios.post(url, buffer, { - // headers: { - // 'Content-Type': 'application/octet-stream' - // } - // }); - // console.log("ArrayBuffer sent to backend successfully", response.data); - // } catch (error) { - // console.error("Error sending ArrayBuffer to backend", error); - // } - - try { - const response = await fetch(url, { - method: 'POST', - body: buffer, - headers: { - 'Content-Type': 'application/octet-stream', - } + for (let i = 0; i < vertexCount; i++) { + const scalarValues = scalarsArray[i]; + Object.values(scalarValues).forEach(scalar => { + dataView.setFloat32(offset, scalar, true); + offset += 4; }); - - if (!response.ok) { - throw new Error('Network response was not ok'); - } - - const responseData = await response.json(); - console.log("ArrayBuffer sent to backend successfully", responseData); - } catch (error) { - console.error("Error sending ArrayBuffer to backend", error); } - + return buffer; } + } diff --git a/src/datasource/trk/reader/trackProcessor.ts b/src/datasource/trk/reader/trackProcessor.ts index ad3ebec8a..c22c3c74e 100644 --- a/src/datasource/trk/reader/trackProcessor.ts +++ b/src/datasource/trk/reader/trackProcessor.ts @@ -54,7 +54,7 @@ export class TrackProcessor { }); const buffer = Buffer.from(response.data); this.globalHeader = TrkHeaderProcessor.readTrkHeader(buffer); - TrkHeaderProcessor.printTrkHeader(this.globalHeader); + // TrkHeaderProcessor.printTrkHeader(this.globalHeader); } catch (error) { console.error('Error streaming or processing the TRK file header:', error); } @@ -110,14 +110,9 @@ export class TrackProcessor { * @returns {Promise<{processState: ProcessState; timestamp: string}>} A promise that resolves to the processing state and a timestamp. */ async processTrackData(randomTrackNumbers: number[], trackNumber: number, filePath: string): Promise<{ processState: ProcessState; timestamp: string, arrayBuffer?: ArrayBuffer }> { - - // Get the current date and time const now = new Date(); - - // Format the timestamp as YYYYMMDD_HHMMSS const timestamp = now.toISOString().replace(/[-:]/g, '').replace('T', '_').slice(0, 15); - if (!this.globalHeader) { console.error('Error: Global header is not initialized.'); return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; @@ -127,27 +122,24 @@ export class TrackProcessor { const vertices: Vertex[] = []; const edges: Edge[] = []; const orientations: number[][] = []; + const scalarsArray: { [key: string]: number }[] = []; let trackProcessedCount = 0; let vertexIndex = 0; try { - const { dataView, buffer } = await this.loadFileBuffer(filePath); - console.log('Buffer length:', buffer.length); - console.log('DataView length:', dataView.byteLength); - let offset = 1000; + const numScalarsPerPoint = this.globalHeader.n_scalars || 0; + const scalarNames = this.globalHeader.scalar_name || []; + let minScalar = Infinity; + let maxScalar = -Infinity; + while (trackProcessedCount < maxTracksToProcess && offset < buffer.byteLength) { - // Read the number of points in the track (first 4 bytes) - const n_points = dataView.getInt32(offset, true); // true indicates little-endian byte order. + const n_points = dataView.getInt32(offset, true); offset += 4; - // console.log(`Track ${trackNumber} processed, number of points: ${n_points}\n`); - - // Only process the track if it is in the random track numbers if (randomTrackNumbers.includes(trackNumber)) { - // Process each point in the track (x, y, z -> 12 bytes per point) const points: number[][] = []; for (let i = 0; i < n_points; i++) { const x = dataView.getFloat32(offset, true); @@ -157,53 +149,74 @@ export class TrackProcessor { points.push([x, y, z]); const voxelPoint: [number, number, number] = [x, y, z]; - const affine = - VoxelToRASConverter.getAffineToRasmm(this.globalHeader); + const affine = VoxelToRASConverter.getAffineToRasmm(this.globalHeader); const rasPoint = VoxelToRASConverter.applyAffineMatrix(voxelPoint, affine); - // Add vertex data vertices.push({ x: rasPoint[0], y: rasPoint[1], z: rasPoint[2] }); - // Add edge data if (i > 0) { edges.push({ vertex1: vertexIndex - 1, vertex2: vertexIndex }); } vertexIndex++; + + const scalars: number[] = []; + const normalizedScalars: number[] = []; + + if (numScalarsPerPoint > 0) { + for (let s = 0; s < numScalarsPerPoint; s++) { + const scalarValue = dataView.getFloat32(offset, true); + scalars.push(scalarValue); + offset += 4; + + // Update the min and max scalar values + if (scalarValue < minScalar) minScalar = scalarValue; + if (scalarValue > maxScalar) maxScalar = scalarValue; + } + + // Normalize scalars after finding min and max + for (const scalar of scalars) { + const normalizedScalar = (scalar - minScalar) / (maxScalar - minScalar); + normalizedScalars.push(normalizedScalar); + } + + scalarsArray.push( + normalizedScalars.reduce((acc, scalar, idx) => { + acc[scalarNames[idx] || `scalar${idx + 1}`] = scalar; + return acc; + }, {} as { [key: string]: number }) + ); + } + + } - // Compute and add orientation for the tract const orient = TrackProcessor.computeOrientation(points); orientations.push(...orient); - trackProcessedCount++; // Increment the number of processed tracks + trackProcessedCount++; if (trackProcessedCount >= maxTracksToProcess) { - - // Create the ArrayBuffer - const arrayBuffer = SkeletonWriter.createArrayBuffer(vertices, edges, orientations); - console.log(arrayBuffer) - // Return the state, timestamp, and arrayBuffer + // After processing, log the min and max values + console.log(`Scalar range: min = ${minScalar}, max = ${maxScalar}`); + const arrayBuffer = SkeletonWriter.createArrayBuffer(vertices, edges, orientations, scalarsArray); return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp, arrayBuffer }; - } } else { - offset += n_points * 12; // Skip the track data if it's not in the selected tracks + offset += n_points * (12 + numScalarsPerPoint * 4); } trackNumber++; } - // writeStream.end(); return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; } catch (error) { - console.error('Error fetching or processing track data:', error); return { processState: { byteOffset: 0, trackNumber, offset: 0 }, timestamp }; - } } + /** * Shuffles and selects a random number of track indices from a total number of tracks. * @param {number} totalTracks - The total number of tracks available. @@ -225,7 +238,7 @@ export class TrackProcessor { * @returns {Promise<{dataView: DataView; buffer: Buffer}>} A promise that resolves to the DataView and buffer of the file. */ loadFileBuffer(filePath: string) { - + return axios.get(filePath, { responseType: 'arraybuffer' }) .then(response => { const buffer = Buffer.from(response.data); diff --git a/src/skeleton/decode_precomputed_skeleton.ts b/src/skeleton/decode_precomputed_skeleton.ts index c8cb0eeaf..764efc48a 100644 --- a/src/skeleton/decode_precomputed_skeleton.ts +++ b/src/skeleton/decode_precomputed_skeleton.ts @@ -24,7 +24,7 @@ import { Endianness, } from "#src/util/endian.js"; -console.log(import.meta.url); +// console.log(import.meta.url); export function decodeSkeletonChunk( chunk: SkeletonChunk, @@ -32,13 +32,13 @@ export function decodeSkeletonChunk( vertexAttributes: Map, ) { - console.log("start of decode vertices and edges") + // console.log("start of decode vertices and edges") const dv = new DataView(response); const numVertices = dv.getUint32(0, true); - console.log(numVertices); + // console.log(numVertices); const numEdges = dv.getUint32(4, true); - console.log(numEdges); + // console.log(numEdges); const vertexPositionsStartOffset = 8; From 388a618bb03d44341cd53d34e0c23710f80f09ed Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Thu, 7 Nov 2024 19:29:17 -0500 Subject: [PATCH 11/12] colorHeat map fixed --- src/datasource/trk/frontend.ts | 17 ++++----- src/datasource/trk/reader/color.txt | 56 ++++++++++++++--------------- 2 files changed, 37 insertions(+), 36 deletions(-) diff --git a/src/datasource/trk/frontend.ts b/src/datasource/trk/frontend.ts index 10ba6f311..824684378 100644 --- a/src/datasource/trk/frontend.ts +++ b/src/datasource/trk/frontend.ts @@ -209,17 +209,17 @@ function getMetadata() { } ]; - // Check if scalars are present in the globalHeader and add them as vertex attributes - if (globalHeader && globalHeader.scalar_name) { - for (const scalarName of globalHeader.scalar_name) { - if(scalarName != ''){ + // Check if globalHeader, globalHeader_n_scalar, and scalar_name are present + if (globalHeader && globalHeader.n_scalars && globalHeader.scalar_name) { + for (let i = 0; i < globalHeader.n_scalars; i++) { + const scalarName = globalHeader.scalar_name[i]; + if (scalarName && scalarName !== '') { // Ensure scalarName is valid and not empty vertexAttributes.push({ "id": scalarName, // Use the scalar name as the ID - "data_type": "float32", // Assuming the scalar data type is float32 - "num_components": 1 // Each scalar is a single component + "data_type": "float32", // Assuming the scalar data type is float32 + "num_components": 1 // Each scalar is a single component }); } - } } @@ -251,6 +251,7 @@ function getPropMetadata() { }; } +const n_tracks = 1000; async function getSkeletonBuffer(url: string): Promise { const trackProcessor = new TrackProcessor(); await trackProcessor.streamAndProcessHeader(url, 0, 999); @@ -266,7 +267,7 @@ async function getSkeletonBuffer(url: string): Promise { const totalTracks = globalHeader?.n_count; if (totalTracks !== undefined) { - const randomTrackNumbers = trackProcessor.getRandomTrackIndices(totalTracks, 1000); + const randomTrackNumbers = trackProcessor.getRandomTrackIndices(totalTracks, n_tracks); // Process track data and get the skeleton data in arrayBuffer format const skeleton = await trackProcessor.processTrackData(randomTrackNumbers, 1, url); diff --git a/src/datasource/trk/reader/color.txt b/src/datasource/trk/reader/color.txt index d093e69ea..6b51aba29 100644 --- a/src/datasource/trk/reader/color.txt +++ b/src/datasource/trk/reader/color.txt @@ -1,39 +1,39 @@ - -vec3 colormapOrient(vec3 orient){ - vec3 result; - result.r = abs(orient[0]); - result.g = abs(orient[1]); - result.b = abs(orient[2]); - return clamp(result, 0.0, 1.0); +vec3 colormapOrient(vec3 orient) { + vec3 result; + result.r = abs(orient[0]); + result.g = abs(orient[1]); + result.b = abs(orient[2]); + return clamp(result, 0.0, 1.0); } +vec3 colormapHeat(float scalar, float min, float max) { + float value = (clamp(scalar, min, max) - min) / (max - min + 1e-5); + //float value = clamp(scalar, 0.0, 1.22) / 1.22; // Ensure scalar is between 0 and 1 + vec3 color; -vec3 colormapHeat(float scalar) { - // The scalar is already normalized to the 0 to 1 range - float value = clamp(scalar, 0.0, 1.0); // Ensure value is clamped to 0-1 just in case - - // Define the colors for the gradient - vec3 red = vec3(1.0, 0.0, 0.0); // Red at the lower end (0) - vec3 yellow = vec3(1.0, 1.0, 0.0); // Yellow at the upper end (1) - - // Interpolate between red and yellow - vec3 color = mix(red, yellow, value); + if (value < 0.33) { + color = mix(vec3(1.0, 0.0, 0.0), vec3(1.0, 0.5, 0.0), value / 0.33); // Red to Orange + } else if (value < 0.66) { + color = mix(vec3(1.0, 0.5, 0.0), vec3(1.0, 1.0, 0.0), (value - 0.33) / 0.33); // Orange to Yellow + } else { + color = vec3(1.0, 1.0, 0.0); // Solid Yellow + } return color; } - -#uicontrol bool heatmap checkbox(default=true) #uicontrol bool orient_color checkbox(default=false) - +#uicontrol bool heatmap checkbox(default=true) +#uicontrol float vmin slider(min=0.0, max=1.22, default=0.0) +#uicontrol float vmax slider(min=0.0, max=1.22, default=1.22) + void main() { - if(orient_color){ - emitRGB(colormapOrient(orientation)); - } - else if (heatmap) { - emitRGB(colormapHeat(FA)); // Use the colormapHeat function with normalized FA scalar - } else { - emitDefault(); // Default color if the heatmap is not enabled - } + if (orient_color) { + emitRGB(colormapOrient(orientation)); + } else if (heatmap) { + emitRGB(colormapHeat(FA, vmin, vmax)); + } else { + emitDefault(); // Default color if neither heatmap nor orientation color is enabled + } } From 5a5e3997d9b3d06e164984d21b2d694f18ac061c Mon Sep 17 00:00:00 2001 From: Shruti Varade Date: Thu, 7 Nov 2024 20:31:58 -0500 Subject: [PATCH 12/12] code cleanup --- src/datasource/trk/backup/skeletonWriter.ts | 254 -------------------- src/datasource/trk/frontend.ts | 13 - src/skeleton/decode_precomputed_skeleton.ts | 6 - 3 files changed, 273 deletions(-) delete mode 100644 src/datasource/trk/backup/skeletonWriter.ts diff --git a/src/datasource/trk/backup/skeletonWriter.ts b/src/datasource/trk/backup/skeletonWriter.ts deleted file mode 100644 index b04e1ea2a..000000000 --- a/src/datasource/trk/backup/skeletonWriter.ts +++ /dev/null @@ -1,254 +0,0 @@ -import fs from 'fs'; -import path from 'path'; - -// import { S3Client, PutObjectCommand } from '@aws-sdk/client-s3'; - -/** - * Represents a 3D vertex with coordinates. - * @interface - */ -export interface Vertex { - x: number; - y: number; - z: number; -} - -/** - * Represents an edge connecting two vertices by their indices. - * @interface - */ -export interface Edge { - vertex1: number; - vertex2: number; -} - -/** - * Provides utilities for writing skeleton data to files and uploading them to AWS S3. - */ -export class SkeletonWriter { - - /** - * Writes skeleton data including vertices, edges, and orientations to a binary file. - * @static - * @param {Vertex[]} vertices - The list of vertices to write. - * @param {Edge[]} edges - The list of edges connecting the vertices. - * @param {number[][]} orientations - The orientations of each vertex. - * @param {string} outputFilePath - The file path where the binary data will be written. - */ - static writeSkeleton(vertices: Vertex[], edges: Edge[], orientations: number[][], outputFilePath: string) { - fs.mkdirSync(path.dirname(outputFilePath), { recursive: true }); - - const vertexCount = vertices.length; - const edgeCount = edges.length; - - const vertexSize = 12; // 3 floats (x, y, z), each 4 bytes - const edgeSize = 8; // 2 uint32s (source and target), each 4 bytes - const orientationSize = 12; // 3 floats (x, y, z) for orientations - const bufferSize = 4 + 4 + (vertexSize * vertexCount) + (edgeSize * edgeCount) + (orientationSize * vertexCount); - - const buffer = Buffer.alloc(bufferSize); - let offset = 0; - - buffer.writeUInt32LE(vertexCount, offset); // Number of vertices - offset += 4; - buffer.writeUInt32LE(edgeCount, offset); // Number of edges - offset += 4; - - // Write the vertices (3 floats per vertex: x, y, z) - for (let i = 0; i < vertexCount; i++) { - buffer.writeFloatLE((vertices[i].x)*1E6, offset); - buffer.writeFloatLE(vertices[i].y*1E6, offset + 4); - buffer.writeFloatLE(vertices[i].z*1E6, offset + 8); - offset += 12; - } - - // Write the edges (2 uint32 per edge: vertex1, vertex2) - for (let i = 0; i < edgeCount; i++) { - buffer.writeUInt32LE(edges[i].vertex1, offset); - buffer.writeUInt32LE(edges[i].vertex2, offset + 4); - offset += 8; - } - - // Write the orientations (3 floats per vertex) - for (let i = 0; i < vertexCount; i++) { - buffer.writeFloatLE(orientations[i][0], offset); - buffer.writeFloatLE(orientations[i][1], offset + 4); - buffer.writeFloatLE(orientations[i][2], offset + 8); - offset += 12; - } - - fs.writeFileSync(outputFilePath, buffer); - console.log(`Skeleton written to ${outputFilePath}`); - } - - /** - * Writes metadata about the skeleton data structure to a JSON file. - * @static - * @param {string} infoFilePath - The file path where the skeleton info will be written. - */ - static writeSkeletonInfo(infoFilePath: string) { - fs.mkdirSync(path.dirname(infoFilePath), { recursive: true }); - - const skeletonInfo = { - "@type": "neuroglancer_skeletons", - "vertex_attributes": [ - { - "id": "orientation", - "data_type": "float32", - "num_components": 3, - }, - ], - "segment_properties": "prop", - }; - - fs.mkdirSync(path.dirname(infoFilePath), { recursive: true }); - - // Write the skeleton info to the specified path - fs.writeFileSync(infoFilePath, JSON.stringify(skeletonInfo, null, 2)); - console.log(`Skeleton info file written to ${infoFilePath}`); - } - - /** - * Writes properties metadata for the skeleton to a JSON file. - * @static - * @param {string} propFilePath - The file path where the properties info will be written. - */ - - static writePropInfo(propFilePath: string) { - fs.mkdirSync(path.dirname(propFilePath), { recursive: true }); - - const propInfo = { - "@type": "neuroglancer_segment_properties", - "inline": { - "ids": ["1"], - "properties": [{ "id": "label", "type": "label", "values": ["1"] }] - } - }; - - // Write the prop info to the specified path - fs.writeFileSync(propFilePath, JSON.stringify(propInfo, null, 2)); - console.log(`Prop info file written to ${propFilePath}`); - } - - /** - * Generates file paths for the binary, property, and skeleton info files based on a timestamp. - * TimeStamp is used for having unique filename. - * @static - * @param {string} outputDirectory - The output directory for the files. - * @param {string} timestamp - The timestamp used to format the file paths. - * @returns {{ binaryFilePath: string, propInfoFilePath: string, skeletonInfoFilePath: string }} - */ - static generateSkeletonFilePaths(outputDirectory: string, timestamp: string) { - - // Build the file paths with the formatted timestamp - const binaryFilePath = path.join(outputDirectory, 'tract', timestamp, '1'); // Binary file path - const propInfoFilePath = path.join(outputDirectory, 'tract', timestamp, 'prop', 'info'); // JSON file path - const skeletonInfoFilePath = path.join(outputDirectory, 'tract', timestamp, 'info'); // JSON file path - - return { - binaryFilePath, - propInfoFilePath, - skeletonInfoFilePath - }; - } - - /** - * Uploads a directory of files to AWS S3. - * @static - * @param {string} outputDirectory - The directory containing the files to upload. - * @param {string} timestamp - The timestamp used to organize the files in S3. - */ - // static async uploadSkeletonFilePathsToS3(outputDirectory: string, timestamp: string) { - // // Initialize the S3 client - // const s3Client = new S3Client({ - // region: process.env.AWS_REGION || 'us-east-2', - // }); - - // // Read the bucket name from environment variables - // const bucketName = process.env.BUCKET_NAME || 'linc-brain-mit-prod-us-east-2'; - - // // Check for required environment variables - // if (!process.env.AWS_REGION || !process.env.BUCKET_NAME) { - // console.error('AWS_REGION and BUCKET_NAME must be set in environment variables.'); - // return; - // } - - // // Define the local directory to upload - // const localDir = path.join(outputDirectory, 'tract', timestamp); - - // // Include the 'neuroglancer_trk/' prefix in the S3 destination path - // const s3DestinationPath = path.join('neuroglancer_trk', 'tract', timestamp).replace(/\\/g, '/'); - - // // Recursively upload all files in the local directory to S3 - // await SkeletonWriter.uploadDirectoryToS3(s3Client, bucketName, localDir, s3DestinationPath); - - // console.log('Uploaded generated files to S3.'); - // } - - /** - * Iteratively uploads all files from a local directory to an AWS S3 bucket. - * @static - * @param {S3Client} s3Client - The AWS S3 client used for the upload. - * @param {string} bucketName - The name of the S3 bucket. - * @param {string} localDirectory - The local directory containing the files to upload. - * @param {string} s3DestinationPath - The destination path in the S3 bucket. - */ - // static async uploadDirectoryToS3( - // s3Client: S3Client, - // bucketName: string, - // localDirectory: string, - // s3DestinationPath: string - // ) { - // const files = SkeletonWriter.getAllFilesInDirectory(localDirectory); - - // for (const filePath of files) { - // // Compute the relative path from the local directory - // const relativeFilePath = path.relative(localDirectory, filePath); - - // // Construct the S3 key by joining the destination path and relative file path (Hashmap) - // const s3Key = path.join(s3DestinationPath, relativeFilePath).replace(/\\/g, '/'); - - // try { - // const fileContent = fs.readFileSync(filePath); - - // const params = { - // Bucket: bucketName, - // Key: s3Key, - // Body: fileContent, - // }; - - // const command = new PutObjectCommand(params); - // await s3Client.send(command); - // console.log(`File uploaded successfully to s3://${bucketName}/${s3Key}`); - // } catch (error) { - // console.error(`Error uploading file ${filePath} to S3:`, error); - // } - // } - // } - - /** - * Interatively collects all file paths in a directory. - * @static - * @param {string} dir - The directory to scan. - * @returns {string[]} An array of file paths found in the directory. - */ - static getAllFilesInDirectory(dir: string): string[] { - let results: string[] = []; - - const list = fs.readdirSync(dir); - list.forEach((file) => { - const filePath = path.join(dir, file); - const stat = fs.statSync(filePath); - if (stat && stat.isDirectory()) { - // Recursively walk subdirectories - results = results.concat(SkeletonWriter.getAllFilesInDirectory(filePath)); - } else { - results.push(filePath); - } - }); - - return results; - } -} - - diff --git a/src/datasource/trk/frontend.ts b/src/datasource/trk/frontend.ts index 824684378..e6efe50f9 100644 --- a/src/datasource/trk/frontend.ts +++ b/src/datasource/trk/frontend.ts @@ -87,10 +87,6 @@ class trkSkeletonSource extends WithParameters( get vertexAttributes() { return this.parameters.metadata.vertexAttributes; } - - // get skeleton() { - // return this.parameters.binarydata.skeleton; - // } } function parseTransform(data: any): mat4 { @@ -141,11 +137,6 @@ function parseSkeletonMetadata(data: any): ParsedSkeletonMetadata { vertexAttributes.set(id, { dataType, numComponents }); }); }); - // const sharding = verifyObjectProperty( - // data, - // "sharding", - // parseShardingParameters, - // ); const segmentPropertyMap = verifyObjectProperty( data, "segment_properties", @@ -154,7 +145,6 @@ function parseSkeletonMetadata(data: any): ParsedSkeletonMetadata { return { metadata: { transform, vertexAttributes, - // sharding } as SkeletonMetadata, segmentPropertyMap, }; @@ -466,7 +456,6 @@ export function getSegmentPropertyMap( async function getSegmentPropertyMapDataSource( options: GetDataSourceOptions, credentialsProvider: SpecialProtocolCredentialsProvider, - // url: string, metadata: unknown, ): Promise { options; @@ -481,7 +470,6 @@ async function getSegmentPropertyMapDataSource( options.chunkManager, credentialsProvider, metadata, - // url, ), }, }, @@ -558,7 +546,6 @@ export class TrkDataSource extends DataSourceProvider { return await getSegmentPropertyMapDataSource( options, credentialsProvider, - // url, metadata, ); diff --git a/src/skeleton/decode_precomputed_skeleton.ts b/src/skeleton/decode_precomputed_skeleton.ts index 764efc48a..fd30d96e3 100644 --- a/src/skeleton/decode_precomputed_skeleton.ts +++ b/src/skeleton/decode_precomputed_skeleton.ts @@ -24,21 +24,15 @@ import { Endianness, } from "#src/util/endian.js"; -// console.log(import.meta.url); - export function decodeSkeletonChunk( chunk: SkeletonChunk, response: ArrayBuffer, vertexAttributes: Map, ) { - // console.log("start of decode vertices and edges") - const dv = new DataView(response); const numVertices = dv.getUint32(0, true); - // console.log(numVertices); const numEdges = dv.getUint32(4, true); - // console.log(numEdges); const vertexPositionsStartOffset = 8;