diff --git a/python/tests/n5_test.py b/python/tests/n5_test.py index d03e23009..928130208 100644 --- a/python/tests/n5_test.py +++ b/python/tests/n5_test.py @@ -27,6 +27,10 @@ [ {"driver": "n5", "metadata": {"compression": {"type": "raw"}}}, {"driver": "n5", "metadata": {"compression": {"type": "gzip"}}}, + { + "driver": "n5", + "metadata": {"compression": {"type": "gzip", "useZlib": True}}, + }, { "driver": "n5", "metadata": { @@ -38,8 +42,7 @@ } }, }, - # TODO(jbms): Add once tensorstore supports zstd - # {"driver": "n5", "metadata": {"compression": {"type": "zstd"}}}, + {"driver": "n5", "metadata": {"compression": {"type": "zstd"}}}, ], ids=str, ) diff --git a/python/tests/zarr_test.py b/python/tests/zarr_test.py index 74b3586cc..529e26406 100644 --- a/python/tests/zarr_test.py +++ b/python/tests/zarr_test.py @@ -26,6 +26,7 @@ "spec", [ {"driver": "zarr"}, + {"driver": "zarr", "metadata": {"compressor": {"id": "zlib"}}}, {"driver": "zarr", "schema": {"chunk_layout": {"inner_order": [2, 1, 0]}}}, {"driver": "zarr3"}, {"driver": "zarr3", "schema": {"chunk_layout": {"inner_order": [2, 1, 0]}}}, diff --git a/src/datasource/n5/backend.ts b/src/datasource/n5/backend.ts index ef3faee19..d2fd83a20 100644 --- a/src/datasource/n5/backend.ts +++ b/src/datasource/n5/backend.ts @@ -61,8 +61,11 @@ async function decodeChunk( chunk.chunkDataSize = shape; let buffer = new Uint8Array(response, offset); switch (encoding) { + case VolumeChunkEncoding.ZLIB: + buffer = new Uint8Array(await decodeGzip(buffer, "deflate")); + break; case VolumeChunkEncoding.GZIP: - buffer = new Uint8Array(await decodeGzip(buffer)); + buffer = new Uint8Array(await decodeGzip(buffer, "gzip")); break; case VolumeChunkEncoding.BLOSC: buffer = await requestAsyncComputation( diff --git a/src/datasource/n5/base.ts b/src/datasource/n5/base.ts index ef471c8c1..5dc6a2f44 100644 --- a/src/datasource/n5/base.ts +++ b/src/datasource/n5/base.ts @@ -15,10 +15,11 @@ */ export enum VolumeChunkEncoding { - RAW = 0, - GZIP = 1, - BLOSC = 2, - ZSTD = 3, + RAW, + ZLIB, + GZIP, + BLOSC, + ZSTD, } export class VolumeChunkSourceParameters { diff --git a/src/datasource/n5/frontend.ts b/src/datasource/n5/frontend.ts index ed4299a52..92dba794b 100644 --- a/src/datasource/n5/frontend.ts +++ b/src/datasource/n5/frontend.ts @@ -68,6 +68,7 @@ import { expectArray, parseArray, parseFixedLengthArray, + verifyBoolean, verifyEnumString, verifyFinitePositiveFloat, verifyObject, @@ -235,6 +236,17 @@ class ScaleMetadata { encoding = verifyObjectProperty(compression, "type", (x) => verifyEnumString(x, VolumeChunkEncoding), ); + if ( + encoding === VolumeChunkEncoding.GZIP && + verifyOptionalObjectProperty( + compression, + "useZlib", + verifyBoolean, + false, + ) === true + ) { + encoding = VolumeChunkEncoding.ZLIB; + } }); if (encoding === undefined) { encoding = verifyObjectProperty(obj, "compressionType", (x) => diff --git a/src/datasource/nifti/backend.ts b/src/datasource/nifti/backend.ts index 0b8192e8a..4778a49b1 100644 --- a/src/datasource/nifti/backend.ts +++ b/src/datasource/nifti/backend.ts @@ -61,7 +61,7 @@ async function decodeNiftiFile( _cancellationToken: CancellationToken, ) { if (isCompressed(buffer)) { - buffer = await decodeGzip(buffer); + buffer = await decodeGzip(buffer, "gzip"); } const data = new NiftiFileData(); data.uncompressedData = buffer; diff --git a/src/datasource/precomputed/backend.ts b/src/datasource/precomputed/backend.ts index 4ae0bacf1..c58f69596 100644 --- a/src/datasource/precomputed/backend.ts +++ b/src/datasource/precomputed/backend.ts @@ -218,7 +218,10 @@ function getMinishardIndexDataSource( cancellationToken, ); if (sharding.minishardIndexEncoding === DataEncoding.GZIP) { - minishardIndexResponse = await decodeGzip(minishardIndexResponse); + minishardIndexResponse = await decodeGzip( + minishardIndexResponse, + "gzip", + ); } if (minishardIndexResponse.byteLength % 24 !== 0) { throw new Error( @@ -344,7 +347,7 @@ async function getShardedData( cancellationToken, ); if (minishardIndexSource.sharding.dataEncoding === DataEncoding.GZIP) { - data = await decodeGzip(data); + data = await decodeGzip(data, "gzip"); } return { data, diff --git a/src/datasource/zarr/codec/gzip/decode.ts b/src/datasource/zarr/codec/gzip/decode.ts index 8003868eb..288688adb 100644 --- a/src/datasource/zarr/codec/gzip/decode.ts +++ b/src/datasource/zarr/codec/gzip/decode.ts @@ -20,16 +20,21 @@ import { CodecKind } from "#src/datasource/zarr/codec/index.js"; import type { CancellationToken } from "#src/util/cancellation.js"; import { decodeGzip } from "#src/util/gzip.js"; -registerCodec({ - name: "gzip", - kind: CodecKind.bytesToBytes, - async decode( - configuration: Configuration, - encoded: Uint8Array, - cancellationToken: CancellationToken, - ): Promise { - configuration; - cancellationToken; - return new Uint8Array(await decodeGzip(encoded)); - }, -}); +for (const [name, compressionFormat] of [ + ["gzip", "gzip"], + ["zlib", "deflate"], +] as const) { + registerCodec({ + name, + kind: CodecKind.bytesToBytes, + async decode( + configuration: Configuration, + encoded: Uint8Array, + cancellationToken: CancellationToken, + ): Promise { + configuration; + cancellationToken; + return new Uint8Array(await decodeGzip(encoded, compressionFormat)); + }, + }); +} diff --git a/src/datasource/zarr/codec/gzip/resolve.ts b/src/datasource/zarr/codec/gzip/resolve.ts index eec56f4c6..60d058247 100644 --- a/src/datasource/zarr/codec/gzip/resolve.ts +++ b/src/datasource/zarr/codec/gzip/resolve.ts @@ -26,12 +26,14 @@ export interface Configuration { level: number; } -registerCodec({ - name: "gzip", - kind: CodecKind.bytesToBytes, - resolve(configuration: unknown): { configuration: Configuration } { - verifyObject(configuration); - const level = verifyObjectProperty(configuration, "level", verifyInt); - return { configuration: { level } }; - }, -}); +for (const name of ["gzip", "zlib"]) { + registerCodec({ + name, + kind: CodecKind.bytesToBytes, + resolve(configuration: unknown): { configuration: Configuration } { + verifyObject(configuration); + const level = verifyObjectProperty(configuration, "level", verifyInt); + return { configuration: { level } }; + }, + }); +} diff --git a/src/datasource/zarr/metadata/parse.ts b/src/datasource/zarr/metadata/parse.ts index af158972b..a4f515f98 100644 --- a/src/datasource/zarr/metadata/parse.ts +++ b/src/datasource/zarr/metadata/parse.ts @@ -396,16 +396,9 @@ export function parseV2Metadata( break; case "zlib": case "gzip": - codecs.push({ - name: "gzip", - configuration: { - level: verifyObjectProperty(compressor, "level", verifyInt), - }, - }); - break; case "zstd": codecs.push({ - name: "zstd", + name: id, configuration: { level: verifyObjectProperty(compressor, "level", verifyInt), }, diff --git a/src/sliceview/backend_chunk_decoders/bossNpz.ts b/src/sliceview/backend_chunk_decoders/bossNpz.ts index e8f056ba0..76075224a 100644 --- a/src/sliceview/backend_chunk_decoders/bossNpz.ts +++ b/src/sliceview/backend_chunk_decoders/bossNpz.ts @@ -35,7 +35,9 @@ export async function decodeBossNpzChunk( cancellationToken: CancellationToken, response: ArrayBuffer, ) { - const parseResult = parseNpy(new Uint8Array(await decodeGzip(response))); + const parseResult = parseNpy( + new Uint8Array(await decodeGzip(response, "deflate")), + ); const chunkDataSize = chunk.chunkDataSize!; const source = chunk.source!; const { shape } = parseResult; diff --git a/src/sliceview/backend_chunk_decoders/ndstoreNpz.ts b/src/sliceview/backend_chunk_decoders/ndstoreNpz.ts index a4b4fa215..5bf2f83a3 100644 --- a/src/sliceview/backend_chunk_decoders/ndstoreNpz.ts +++ b/src/sliceview/backend_chunk_decoders/ndstoreNpz.ts @@ -35,7 +35,9 @@ export async function decodeNdstoreNpzChunk( cancellationToken: CancellationToken, response: ArrayBuffer, ) { - const parseResult = parseNpy(new Uint8Array(await decodeGzip(response))); + const parseResult = parseNpy( + new Uint8Array(await decodeGzip(response, "deflate")), + ); const chunkDataSize = chunk.chunkDataSize!; const source = chunk.source!; const { shape } = parseResult; diff --git a/src/util/gzip.ts b/src/util/gzip.ts index 118e03703..8e04f0856 100644 --- a/src/util/gzip.ts +++ b/src/util/gzip.ts @@ -22,9 +22,12 @@ export function isGzipFormat(data: ArrayBufferView) { return view.length > 2 && view[0] === 0x1f && view[1] === 0x8b; } -export async function decodeGzip(data: ArrayBuffer | ArrayBufferView) { +export async function decodeGzip( + data: ArrayBuffer | ArrayBufferView, + format: CompressionFormat, +) { const decompressedStream = new Response(data).body!.pipeThrough( - new DecompressionStream("gzip"), + new DecompressionStream(format), ); return await new Response(decompressedStream).arrayBuffer(); } @@ -40,7 +43,7 @@ export async function maybeDecompressGzip(data: ArrayBuffer | ArrayBufferView) { byteView = new Uint8Array(data.buffer, data.byteOffset, data.byteLength); } if (isGzipFormat(byteView)) { - return new Uint8Array(await decodeGzip(byteView)); + return new Uint8Array(await decodeGzip(byteView, "gzip")); } return byteView; }