diff --git a/components/store.js b/components/store.js index 55655ca..f130252 100644 --- a/components/store.js +++ b/components/store.js @@ -16,6 +16,7 @@ const createDatasetSlice = (set, get) => ({ apiMetadata: null, variables: [], arrays: {}, + headers: null, // variable variable: { @@ -75,9 +76,14 @@ const useStore = create((set, get) => ({ if (variables.length === 0) { return 'No viewable variables found. Please provide a dataset with 2D data arrays.' } - const arrays = await getArrays(url, metadata, variables, apiMetadata) + const { arrays, headers } = await getArrays( + url, + metadata, + variables, + apiMetadata + ) - set({ metadata, variables, arrays }) + set({ metadata, variables, arrays, headers }) // default to first variable const initialVariable = variables[0] diff --git a/components/utils.js b/components/utils.js index 60a0a6b..7e81779 100644 --- a/components/utils.js +++ b/components/utils.js @@ -103,6 +103,51 @@ const getChunkShapeOverride = (chunkShape, limits) => { return chunkShape.map((d, i) => Math.min(d, limits[i])) } +const getChunksOverrides = (metadata, variables, apiMetadata) => { + const coordinates = new Set( + variables.flatMap( + (variable) => + metadata.metadata[`${variable}/.zattrs`]['_ARRAY_DIMENSIONS'] + ) + ) + + const result = {} + + coordinates.forEach((coordinate) => { + const { shape, chunks } = metadata.metadata[`${coordinate}/.zarray`] + + if (shape.some((d, i) => d !== chunks[i])) { + result[coordinate] = shape + } + }) + + variables.forEach((variable) => { + const nativeChunks = metadata.metadata[`${variable}/.zarray`].chunks + const dimensions = + metadata.metadata[`${variable}/.zattrs`]['_ARRAY_DIMENSIONS'] + const limits = dimensions.map((d, i) => + [apiMetadata[variable].X, apiMetadata[variable].Y].includes(d) ? 256 : 30 + ) + const chunks = getChunkShapeOverride(nativeChunks, limits) + + if (chunks) { + result[variable] = chunks + } + }) + + return result +} + +const getChunksHeader = (metadata, variables, apiMetadata) => { + const chunks = getChunksOverrides(metadata, variables, apiMetadata) + return new Headers( + Object.keys(chunks).map((key) => [ + 'chunks', + `${key}=${chunks[key].join(',')}`, + ]) + ) +} + export const getArrays = async (url, metadata, variables, apiMetadata) => { // TODO: validate that we can reuse compressors across the store const compressorId = @@ -113,6 +158,11 @@ export const getArrays = async (url, metadata, variables, apiMetadata) => { } zarr.registry.set(compressor.codecId, () => compressor) + + // TODO: instantiate store with headers and clean up manual overrides + const headers = getChunksHeader(metadata, variables, apiMetadata) + const chunksOverrides = getChunksOverrides(metadata, variables, apiMetadata) + const store = new FetchStore(url) const coords = new Set( @@ -133,22 +183,18 @@ export const getArrays = async (url, metadata, variables, apiMetadata) => { ) keys.forEach((key, i) => { const arr = arrs[i] - const dimensions = metadata.metadata[`${key}/.zattrs`]['_ARRAY_DIMENSIONS'] - const limits = dimensions.map((d, i) => - [apiMetadata[key].X, apiMetadata[key].Y].includes(d) ? 256 : 30 - ) - const override = getChunkShapeOverride(arr.chunk_shape, limits) - if (override) arr.chunk_shape = override + // TODO: remove if store can be instantiated with headers + if (chunksOverrides[key]) arr.chunk_shape = chunksOverrides[key] - result[key] = arrs[i] + result[key] = arr }) - return result + return { arrays: result, headers } } export const getVariableInfo = async ( variable, - { arrays, metadata, apiMetadata } + { arrays, headers, metadata, apiMetadata } ) => { const dataArray = arrays[variable] const zattrs = metadata.metadata[`${variable}/.zattrs`] @@ -161,9 +207,7 @@ export const getVariableInfo = async ( const coordinates = await Promise.all( dimensions .map((coord) => arrays[coord]) - .map((arr) => - arr.get_chunk([0], { headers: { chunks: arr.shape.join(',') } }) - ) + .map((arr, i) => arr.get_chunk([0], { headers })) ) const nullValue = getNullValue(dataArray) @@ -214,16 +258,12 @@ export const getVariableInfo = async ( const getChunkData = async ( chunkKey, - { arrays, variable: { axes, name: variable, nullValue } } + { arrays, variable: { axes, name: variable, nullValue }, headers } ) => { const dataArray = arrays[variable] const chunkKeyArray = toKeyArray(chunkKey, { arrays, variable }) const data = await dataArray - .get_chunk(chunkKeyArray, { - headers: { - chunks: dataArray.chunk_shape.join(','), - }, - }) + .get_chunk(chunkKeyArray, { headers }) .then((c) => ndarray(new Float32Array(c.data), dataArray.chunk_shape)) const clim = getRange(data.data, { nullValue })