Skip to content

Commit

Permalink
Use new headers pattern
Browse files Browse the repository at this point in the history
  • Loading branch information
katamartin committed Feb 1, 2023
1 parent ecd9f0a commit dfcb2d4
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 20 deletions.
10 changes: 8 additions & 2 deletions components/store.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ const createDatasetSlice = (set, get) => ({
apiMetadata: null,
variables: [],
arrays: {},
headers: null,

// variable
variable: {
Expand Down Expand Up @@ -75,9 +76,14 @@ const useStore = create((set, get) => ({
if (variables.length === 0) {
return 'No viewable variables found. Please provide a dataset with 2D data arrays.'
}
const arrays = await getArrays(url, metadata, variables, apiMetadata)
const { arrays, headers } = await getArrays(
url,
metadata,
variables,
apiMetadata
)

set({ metadata, variables, arrays })
set({ metadata, variables, arrays, headers })

// default to first variable
const initialVariable = variables[0]
Expand Down
76 changes: 58 additions & 18 deletions components/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,51 @@ const getChunkShapeOverride = (chunkShape, limits) => {
return chunkShape.map((d, i) => Math.min(d, limits[i]))
}

const getChunksOverrides = (metadata, variables, apiMetadata) => {
const coordinates = new Set(
variables.flatMap(
(variable) =>
metadata.metadata[`${variable}/.zattrs`]['_ARRAY_DIMENSIONS']
)
)

const result = {}

coordinates.forEach((coordinate) => {
const { shape, chunks } = metadata.metadata[`${coordinate}/.zarray`]

if (shape.some((d, i) => d !== chunks[i])) {
result[coordinate] = shape
}
})

variables.forEach((variable) => {
const nativeChunks = metadata.metadata[`${variable}/.zarray`].chunks
const dimensions =
metadata.metadata[`${variable}/.zattrs`]['_ARRAY_DIMENSIONS']
const limits = dimensions.map((d, i) =>
[apiMetadata[variable].X, apiMetadata[variable].Y].includes(d) ? 256 : 30
)
const chunks = getChunkShapeOverride(nativeChunks, limits)

if (chunks) {
result[variable] = chunks
}
})

return result
}

const getChunksHeader = (metadata, variables, apiMetadata) => {
const chunks = getChunksOverrides(metadata, variables, apiMetadata)
return new Headers(
Object.keys(chunks).map((key) => [
'chunks',
`${key}=${chunks[key].join(',')}`,
])
)
}

export const getArrays = async (url, metadata, variables, apiMetadata) => {
// TODO: validate that we can reuse compressors across the store
const compressorId =
Expand All @@ -113,6 +158,11 @@ export const getArrays = async (url, metadata, variables, apiMetadata) => {
}

zarr.registry.set(compressor.codecId, () => compressor)

// TODO: instantiate store with headers and clean up manual overrides
const headers = getChunksHeader(metadata, variables, apiMetadata)
const chunksOverrides = getChunksOverrides(metadata, variables, apiMetadata)

const store = new FetchStore(url)

const coords = new Set(
Expand All @@ -133,22 +183,18 @@ export const getArrays = async (url, metadata, variables, apiMetadata) => {
)
keys.forEach((key, i) => {
const arr = arrs[i]
const dimensions = metadata.metadata[`${key}/.zattrs`]['_ARRAY_DIMENSIONS']
const limits = dimensions.map((d, i) =>
[apiMetadata[key].X, apiMetadata[key].Y].includes(d) ? 256 : 30
)
const override = getChunkShapeOverride(arr.chunk_shape, limits)
if (override) arr.chunk_shape = override
// TODO: remove if store can be instantiated with headers
if (chunksOverrides[key]) arr.chunk_shape = chunksOverrides[key]

result[key] = arrs[i]
result[key] = arr
})

return result
return { arrays: result, headers }
}

export const getVariableInfo = async (
variable,
{ arrays, metadata, apiMetadata }
{ arrays, headers, metadata, apiMetadata }
) => {
const dataArray = arrays[variable]
const zattrs = metadata.metadata[`${variable}/.zattrs`]
Expand All @@ -161,9 +207,7 @@ export const getVariableInfo = async (
const coordinates = await Promise.all(
dimensions
.map((coord) => arrays[coord])
.map((arr) =>
arr.get_chunk([0], { headers: { chunks: arr.shape.join(',') } })
)
.map((arr, i) => arr.get_chunk([0], { headers }))
)

const nullValue = getNullValue(dataArray)
Expand Down Expand Up @@ -214,16 +258,12 @@ export const getVariableInfo = async (

const getChunkData = async (
chunkKey,
{ arrays, variable: { axes, name: variable, nullValue } }
{ arrays, variable: { axes, name: variable, nullValue }, headers }
) => {
const dataArray = arrays[variable]
const chunkKeyArray = toKeyArray(chunkKey, { arrays, variable })
const data = await dataArray
.get_chunk(chunkKeyArray, {
headers: {
chunks: dataArray.chunk_shape.join(','),
},
})
.get_chunk(chunkKeyArray, { headers })
.then((c) => ndarray(new Float32Array(c.data), dataArray.chunk_shape))

const clim = getRange(data.data, { nullValue })
Expand Down

0 comments on commit dfcb2d4

Please sign in to comment.