Skip to content

Commit

Permalink
Merge branch 'main' into katamartin/new-headers
Browse files Browse the repository at this point in the history
  • Loading branch information
katamartin committed Feb 2, 2023
2 parents dfcb2d4 + c1f9795 commit 9d8de88
Show file tree
Hide file tree
Showing 2 changed files with 61 additions and 89 deletions.
19 changes: 12 additions & 7 deletions components/store.js
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ const useStore = create((set, get) => ({
},
setVariable: async (name, overrideClim) => {
set({
variable: { name },
variable: { name, selectors: [] },
// Null out variable-specific fields
chunkKey: null,
chunks: {},
Expand All @@ -113,6 +113,10 @@ const useStore = create((set, get) => ({
bounds,
lockZoom,
selectors,
chunk_separator,
chunk_shape,
shape,
array,
} = await getVariableInfo(name, get())

set({
Expand All @@ -124,6 +128,10 @@ const useStore = create((set, get) => ({
bounds,
lockZoom,
selectors,
chunk_separator,
chunk_shape,
shape,
array,
},
})
get().setChunkKey(chunkKey, overrideClim)
Expand All @@ -133,7 +141,7 @@ const useStore = create((set, get) => ({
return
}

const { variable, arrays, chunks } = get()
const { variable, headers, chunks } = get()

set({
chunkKey,
Expand All @@ -149,8 +157,8 @@ const useStore = create((set, get) => ({
chunks: newChunks,
} = await getAllData(chunkKey, {
chunks,
arrays,
variable,
headers,
})

set({
Expand All @@ -167,10 +175,7 @@ const useStore = create((set, get) => ({
resetCenterChunk: (centerPoint) => {
const { variable, arrays, setChunkKey } = get()

const newChunkKey = pointToChunkKey(centerPoint, {
arrays,
variable,
})
const newChunkKey = pointToChunkKey(centerPoint, variable)

if (newChunkKey) {
setChunkKey(newChunkKey, false) // TODO: reinstate auto-updating clim after demo
Expand Down
131 changes: 49 additions & 82 deletions components/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import ndarray from 'ndarray'
import concatColumns from 'ndarray-concat-cols'
import concatRows from 'ndarray-concat-rows'

import { Blosc, GZip, Zlib, LZ4, Zstd } from 'numcodecs'
import { PROJECTIONS, ASPECTS } from './constants'

const getRange = (arr, { nullValue }) => {
Expand Down Expand Up @@ -48,24 +47,22 @@ const getNullValue = (dataArray) => {
return nullValue
}

const toKeyString = (chunkKeyArray, { variable, arrays }) => {
const toKeyString = (chunkKeyArray, { chunk_separator }) => {
if (chunkKeyArray.length === 0) {
return ''
}

return chunkKeyArray.join(arrays[variable].chunk_separator)
return chunkKeyArray.join(chunk_separator)
}
const toKeyArray = (chunkKey, { variable, arrays }) => {
const toKeyArray = (chunkKey, { chunk_separator }) => {
if (chunkKey.length === 0) {
return []
}

return chunkKey.split(arrays[variable].chunk_separator).map(Number)
return chunkKey.split(chunk_separator).map(Number)
}

const getCenterChunk = ({ arrays, variable, selectors }) => {
const { chunk_shape, shape } = arrays[variable]

const getCenterChunk = ({ chunk_shape, shape, selectors }) => {
return shape.map(
(d, i) => selectors[i].chunk ?? Math.floor(d / chunk_shape[i] / 2)
)
Expand All @@ -91,11 +88,6 @@ export const getMetadata = async (url) => {
return { metadata, variables }
}

const COMPRESSORS = {
zlib: Zlib,
blosc: Blosc,
}

const getChunkShapeOverride = (chunkShape, limits) => {
if (chunkShape.length === 1 || chunkShape.every((d, i) => d <= limits[i])) {
return null
Expand Down Expand Up @@ -149,16 +141,6 @@ const getChunksHeader = (metadata, variables, apiMetadata) => {
}

export const getArrays = async (url, metadata, variables, apiMetadata) => {
// TODO: validate that we can reuse compressors across the store
const compressorId =
metadata.metadata[`${variables[0]}/.zarray`].compressor.id
const compressor = COMPRESSORS[compressorId]
if (!compressor) {
throw new Error(`no compressor found for compressor.id=${compressorId}`)
}

zarr.registry.set(compressor.codecId, () => compressor)

// TODO: instantiate store with headers and clean up manual overrides
const headers = getChunksHeader(metadata, variables, apiMetadata)
const chunksOverrides = getChunksOverrides(metadata, variables, apiMetadata)
Expand Down Expand Up @@ -193,12 +175,12 @@ export const getArrays = async (url, metadata, variables, apiMetadata) => {
}

export const getVariableInfo = async (
variable,
name,
{ arrays, headers, metadata, apiMetadata }
) => {
const dataArray = arrays[variable]
const zattrs = metadata.metadata[`${variable}/.zattrs`]
const zarray = metadata.metadata[`${variable}/.zarray`]
const dataArray = arrays[name]
const zattrs = metadata.metadata[`${name}/.zattrs`]
const zarray = metadata.metadata[`${name}/.zarray`]

const gridMapping = zattrs.grid_mapping
? metadata.metadata[`${zattrs.grid_mapping}/.zattrs`]
Expand All @@ -211,9 +193,10 @@ export const getVariableInfo = async (
)

const nullValue = getNullValue(dataArray)
const { chunk_separator, chunk_shape, shape } = dataArray

const axes = Object.keys(apiMetadata[variable]).reduce((accum, key) => {
const index = dimensions.indexOf(apiMetadata[variable][key])
const axes = Object.keys(apiMetadata[name]).reduce((accum, key) => {
const index = dimensions.indexOf(apiMetadata[name][key])

return {
...accum,
Expand All @@ -238,11 +221,10 @@ export const getVariableInfo = async (
index: isSpatialDimension ? null : 0,
}
})
const chunkKeyArray = getCenterChunk({ arrays, variable, selectors })
const chunkKeyArray = getCenterChunk({ chunk_shape, shape, selectors })

return {
chunkKey: toKeyString(chunkKeyArray, { arrays, variable }),
nullValue,
chunkKey: toKeyString(chunkKeyArray, { chunk_separator }),
northPole: gridMapping
? [
gridMapping.grid_north_pole_longitude,
Expand All @@ -253,24 +235,32 @@ export const getVariableInfo = async (
bounds,
lockZoom,
selectors,
nullValue,
chunk_separator,
chunk_shape,
shape,
array: dataArray,
}
}

const getChunkData = async (
chunkKey,
{ arrays, variable: { axes, name: variable, nullValue }, headers }
{
variable: { array, axes, nullValue, chunk_separator, chunk_shape, shape },
headers,
}
) => {
const dataArray = arrays[variable]
const chunkKeyArray = toKeyArray(chunkKey, { arrays, variable })
const data = await dataArray
const chunkKeyArray = toKeyArray(chunkKey, { chunk_separator })
const data = await array
.get_chunk(chunkKeyArray, { headers })
.then((c) => ndarray(new Float32Array(c.data), dataArray.chunk_shape))
.then((c) => ndarray(new Float32Array(c.data), chunk_shape))

const clim = getRange(data.data, { nullValue })

const filteredData = filterData(chunkKey, data, {
arrays,
variable,
chunk_separator,
chunk_shape,
shape,
})

const { X, Y } = axes
Expand All @@ -293,7 +283,7 @@ const getChunkData = async (

const { X: lonRange, Y: latRange } = getChunkBounds(chunkKeyArray, {
axes,
chunk_shape: dataArray.chunk_shape,
chunk_shape: chunk_shape,
})
const bounds = {
lat: latRange,
Expand Down Expand Up @@ -327,10 +317,8 @@ export const getMapProps = (bounds, projection) => {
return { scale, translate, projection: PROJECTIONS[projection] }
}

export const getAdjacentChunk = (
offset,
{ arrays, axes, variable, chunkKey, chunk, shape }
) => {
export const getAdjacentChunk = (offset, chunkKey, variable) => {
const { axes, chunk_shape, shape, chunk_separator } = variable
const [horizontalOffset, verticalOffset] = offset

const coordinateOffset = shape.map((d, i) => {
Expand All @@ -343,25 +331,23 @@ export const getAdjacentChunk = (
}
})

const chunkKeyArray = toKeyArray(chunkKey, { arrays, variable })
const chunkKeyArray = toKeyArray(chunkKey, { chunk_separator })
const newChunkKeyArray = chunkKeyArray.map((d, i) => d + coordinateOffset[i])

// if new chunk key corresponds to array indices outside of the range represented
// by `shape`, return null
if (
newChunkKeyArray.some(
(d, i) => d * chunk[i] < 0 || d * chunk[i] >= shape[i]
(d, i) => d * chunk_shape[i] < 0 || d * chunk_shape[i] >= shape[i]
)
) {
return null
} else {
return toKeyString(newChunkKeyArray, { arrays, variable })
return toKeyString(newChunkKeyArray, { chunk_separator })
}
}

const getActiveChunkKeys = (chunkKey, { axes, arrays, variable }) => {
const { chunk_shape, shape } = arrays[variable]

const getActiveChunkKeys = (chunkKey, variable) => {
return [
[-2, -1],
[-1, -1],
Expand All @@ -379,38 +365,19 @@ const getActiveChunkKeys = (chunkKey, { axes, arrays, variable }) => {
[1, 1],
[2, 1],
]
.map((offset) =>
getAdjacentChunk(offset, {
axes,
arrays,
variable,
chunkKey,
chunk: chunk_shape,
shape,
})
)
.map((offset) => getAdjacentChunk(offset, chunkKey, variable))
.filter(Boolean)
}
export const getAllData = async (
chunkKey,
{ arrays, chunks, variable: { axes, name: variable, nullValue, selectors } }
) => {
const activeChunkKeys = getActiveChunkKeys(chunkKey, {
axes,
arrays,
variable,
})
export const getAllData = async (chunkKey, { chunks, variable, headers }) => {
const activeChunkKeys = getActiveChunkKeys(chunkKey, variable)
const activeChunks = {}
const allChunks = await Promise.all(
activeChunkKeys.map(async (key) => {
if (chunks[key]) {
activeChunks[key] = chunks[key]
return chunks[key]
} else {
const chunk = await getChunkData(key, {
arrays,
variable: { axes, name: variable, nullValue },
})
const chunk = await getChunkData(key, { variable, headers })
activeChunks[key] = chunk
return chunk
}
Expand Down Expand Up @@ -439,7 +406,7 @@ export const getAllData = async (
[Infinity, -Infinity]
)

const separator = arrays[variable].chunk_separator
const { chunk_separator: separator, axes, selectors } = variable
// stitch together chunks in order dictated by chunk keys
// TODO: handle empty chunks https://zarr.readthedocs.io/en/stable/tutorial.html#empty-chunks (i.e. non-continuous chunks)

Expand Down Expand Up @@ -494,10 +461,12 @@ export const getAllData = async (
}
}

const filterData = (chunkKey, data, { arrays, variable }) => {
const separator = arrays[variable].chunk_separator
const indices = chunkKey.split(separator).map(Number)
const { chunk_shape, shape } = arrays[variable]
const filterData = (
chunkKey,
data,
{ chunk_separator, chunk_shape, shape }
) => {
const indices = chunkKey.split(chunk_separator).map(Number)

const truncatedShape = indices.map((index, i) => {
const impliedShape = (index + 1) * chunk_shape[i]
Expand All @@ -517,10 +486,8 @@ const filterData = (chunkKey, data, { arrays, variable }) => {

export const pointToChunkKey = (
[lon, lat],
{ arrays, variable: { name, selectors, axes } }
{ selectors, axes, chunk_separator, chunk_shape, shape }
) => {
const { chunk_shape, shape } = arrays[name]

const chunkKey = shape.map((d, i) => {
if (axes.X.index === i) {
return getAxisIndex(lon, { axis: axes.X, chunk_shape, shape })
Expand All @@ -532,7 +499,7 @@ export const pointToChunkKey = (
})

if (chunkKey.every((d) => d >= 0)) {
return toKeyString(chunkKey, { variable: name, arrays })
return toKeyString(chunkKey, { chunk_separator })
}
}

Expand Down

0 comments on commit 9d8de88

Please sign in to comment.