Skip to content

Commit

Permalink
Merge pull request #480 from PaulHax/zarr-channels
Browse files Browse the repository at this point in the history
feat(ngff zarr): add multi channel support
  • Loading branch information
thewtex authored May 17, 2022
2 parents e7a9a4f + f80bb83 commit aef6541
Show file tree
Hide file tree
Showing 17 changed files with 402 additions and 247 deletions.
10 changes: 8 additions & 2 deletions karma.conf.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
/* eslint-disable global-require */
/* eslint-disable react/require-extension */
const path = require('path')

const vtkRules = require('vtk.js/Utilities/config/rules-vtk.js')
Expand Down Expand Up @@ -154,6 +153,7 @@ module.exports = function init(config) {

client: {
useIframe: true,
args: config.dockered ? ['--dockered'] : [],
},

// browserNoActivityTimeout: 600000,
Expand All @@ -163,7 +163,13 @@ module.exports = function init(config) {
colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['Chrome'],
browsers: ['Chrome_without_sandbox'],
singleRun: true,
customLaunchers: {
Chrome_without_sandbox: {
base: 'Chrome',
flags: ['--no-sandbox'],
},
},
})
}
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@
"lint:types": "tsc --noEmit",
"start": "webpack serve --mode development --static ./dist/ --open --port 8082",
"semantic-release": "semantic-release",
"test": "npm run test:downloadData && npm run lint:types && npm run build:test-ui && karma start ./karma.conf.js --browsers Chrome,Firefox",
"test": "npm run test:downloadData && npm run lint:types && npm run build:test-ui && karma start ./karma.conf.js --browsers Chrome_without_sandbox,Firefox",
"test:downloadData": "node test/downloadData.mjs",
"test:headless": "./test/run.sh",
"test:headless-debug": "./test/run.sh -d",
Expand All @@ -154,4 +154,4 @@
"lint-staged": {
"*.js": "prettier --write"
}
}
}
28 changes: 3 additions & 25 deletions src/Compression/bloscZarrDecompress.js
Original file line number Diff line number Diff line change
@@ -1,24 +1,5 @@
import { runPipeline, InterfaceTypes, WorkerPool } from 'itk-wasm'
import dtypeToTypedArray from '../IO/dtypeToTypedArray'

const dtypeToElementSize = new Map([
['<b', 1],
['<B', 1],
['<u1', 1],
['>u1', 1],
['|u1', 1],
['<i1', 1],
['|i1', 1],
['<u2', 2],
['<i2', 2],
['<u4', 4],
['<i4', 4],
['<u8', 8],
['<i8', 8],

['<f4', 8],
['<f8', 8],
])
import { getSize } from '../IO/dtypeUtils'

const cores = navigator.hardwareConcurrency ? navigator.hardwareConcurrency : 4
const numberOfWorkers = cores + Math.floor(Math.sqrt(cores))
Expand Down Expand Up @@ -50,7 +31,7 @@ async function bloscZarrDecompress(chunkData) {
const compressedChunk = chunkData[index].data
dtype = zarrayMetadata.dtype
const nElements = zarrayMetadata.chunks.reduce((a, b) => a * b)
const elementSize = dtypeToElementSize.get(dtype)
const elementSize = getSize(dtype)
if (!elementSize) throw Error('Unknown dtype in .zarray metadata')
const outputSize = nElements * elementSize
const inputs = [
Expand All @@ -73,14 +54,11 @@ async function bloscZarrDecompress(chunkData) {
}
const results = await workerPool.runTasks(taskArgsArray).promise

const typedArray = dtypeToTypedArray.get(dtype)
const decompressedChunks = []
for (let index = 0; index < results.length; index++) {
// console.log(results[index].stdout)
// console.error(results[index].stderr)
decompressedChunks.push(
new typedArray(results[index].outputs[0].data.data.buffer)
)
decompressedChunks.push(results[index].outputs[0].data.data.buffer)
}
return decompressedChunks
}
Expand Down
4 changes: 2 additions & 2 deletions src/Context/ImageActorContext.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,10 @@ class ImageActorContext {
// The rendered image / label image scale
renderedScale = null

// MultiscaleChunked label image to be visualized
// MultiscaleSpatialImage label image to be visualized
labelImage = null

// MultiscaleChunked label image to be visualized for use with
// MultiscaleSpatialImage label image to be visualized for use with
// interactive, manual editing as opposed to stored or algorithmic results
editorLabelImage = null

Expand Down
221 changes: 136 additions & 85 deletions src/IO/ImageDataFromChunks.worker.js
Original file line number Diff line number Diff line change
@@ -1,9 +1,33 @@
import registerWebworker from 'webworker-promise/lib/register'
import componentTypeToTypedArray from './componentTypeToTypedArray'
import { toDimensionArray } from './dimensionUtils'
import { CXYZT, ensuredDims } from './dimensionUtils'
import {
getTypedArray,
ElementGetter,
getSize,
testLittleEndian,
} from './dtypeUtils'

const haveSharedArrayBuffer = typeof self.SharedArrayBuffer === 'function'

const validateIndices = ({ chunkStart, chunkEnd, roiStart, roiEnd }) => {
if (
['x', 'y', 'z'].some(
dim => chunkStart[dim] > roiEnd[dim] || chunkEnd[dim] < roiStart[dim]
)
) {
// We should never get here...
console.error('Requested a chunk outside the region of interest!')
}
}

const IS_SYSTEM_LITTLE_ENDIAN = (function() {
const buffer = new ArrayBuffer(2)
new DataView(buffer).setInt16(0, 256, true /* littleEndian */)
// Int16Array uses the platform's endianness.
return new Int16Array(buffer)[0] === 256
})()

registerWebworker().operation(
'imageDataFromChunks',
({
Expand All @@ -14,29 +38,13 @@ registerWebworker().operation(
indexStart,
indexEnd,
}) => {
const chunkSize = toDimensionArray(['c', 'x', 'y', 'z'], info.chunkSize)
const chunkStrides = [
chunkSize[0],
chunkSize[0] * chunkSize[1],
chunkSize[0] * chunkSize[1] * chunkSize[2],
chunkSize[0] * chunkSize[1] * chunkSize[2] * chunkSize[3],
] // c, x, y, z,

const size = toDimensionArray(['x', 'y', 'z'], info.arrayShape)
const components = imageType.components

const pixelStrides = [
components,
components * size[0],
components * size[0] * size[1],
components * size[0] * size[1] * size[2],
] // c, x, y, z

const pixelArrayType = componentTypeToTypedArray.get(
imageType.componentType
)
let pixelArray = null
const pixelArrayElements = size.reduce((a, b) => a * b) * components
const pixelArrayElements = Array.from(info.arrayShape.values()).reduce(
(a, b) => a * b
)
if (haveSharedArrayBuffer) {
const pixelArrayBytes =
pixelArrayElements * pixelArrayType.BYTES_PER_ELEMENT
Expand All @@ -46,74 +54,117 @@ registerWebworker().operation(
pixelArray = new pixelArrayType(pixelArrayElements)
}

const arrayShape = Object.fromEntries(
ensuredDims(1, CXYZT, info.arrayShape)
)
const pixelStrides = {
z: arrayShape.c * arrayShape.x * arrayShape.y,
y: arrayShape.c * arrayShape.x,
x: arrayShape.c,
}

const chunkSizeDefault1 = ensuredDims(1, CXYZT, info.chunkSize)
const chunkSize = Object.fromEntries(chunkSizeDefault1)

// stride is the number of elements between elements in a dimension
const [chunkStrides] = Array.from(chunkSizeDefault1)
.reverse()
.reduce(
([strides, size], [dim, dimSize]) => [
{ [dim]: size, ...strides },
size * dimSize,
],
[{}, 1]
)

for (let index = 0; index < chunkIndices.length; index++) {
const chunk = chunks[index]
const [h, i, j, k, l] = chunkIndices[index]
const [c, x, y, z /*t*/] = chunkIndices[index]

const chunkStart = [
i * chunkSize[1],
j * chunkSize[2],
k * chunkSize[3],
l * chunkSize[4],
]
const chunkEnd = [
(i + 1) * chunkSize[1],
(j + 1) * chunkSize[2],
(k + 1) * chunkSize[3],
(l + 1) * chunkSize[4],
]
// Skip if the chunk lives outside the region of interest
if (
chunkStart[0] > indexEnd[0] ||
chunkEnd[0] < indexStart[0] ||
chunkStart[1] > indexEnd[1] ||
chunkEnd[1] < indexStart[1] ||
chunkStart[2] > indexEnd[2] ||
chunkEnd[2] < indexStart[2] ||
chunkStart[3] > indexEnd[3] ||
chunkEnd[3] < indexStart[3]
) {
// We should never get here...
console.error('Requested a chunk outside the region of interest!')
const chunkStart = {
c: c * chunkSize.c,
z: z * chunkSize.z,
y: y * chunkSize.y,
x: x * chunkSize.x,
}
const itStart = [
Math.max(chunkStart[0], indexStart[0]),
Math.max(chunkStart[1], indexStart[1]),
Math.max(chunkStart[2], indexStart[2]),
Math.max(chunkStart[3], indexStart[3]),
]
const itEnd = [
Math.min(chunkEnd[0], indexEnd[0]),
Math.min(chunkEnd[1], indexEnd[1]),
Math.min(chunkEnd[2], indexEnd[2]),
Math.min(chunkEnd[3], indexEnd[3]),
]
const itChunkOffsets = [0, 0, 0, 0]
itChunkOffsets[3] = chunkStrides[3] * l
const itPixelOffsets = [0, 0, 0]
for (let kk = itStart[2]; kk < itEnd[2]; kk++) {
itChunkOffsets[2] = chunkStrides[2] * (kk - k * chunkSize[3])
itPixelOffsets[2] = pixelStrides[2] * (kk - indexStart[2])
for (let jj = itStart[1]; jj < itEnd[1]; jj++) {
itChunkOffsets[1] = chunkStrides[1] * (jj - j * chunkSize[2])
itPixelOffsets[1] = pixelStrides[1] * (jj - indexStart[1])
for (let ii = itStart[0]; ii < itEnd[0]; ii++) {
const begin =
chunkStrides[0] * (itStart[0] - i * chunkSize[1]) +
itChunkOffsets[1] +
itChunkOffsets[2] +
itChunkOffsets[3]
const end = begin + components * (itEnd[0] - itStart[0])
const offset =
pixelStrides[0] * (itStart[0] - indexStart[0]) +
itPixelOffsets[1] +
itPixelOffsets[2]
const subarray = chunk.subarray(begin, end)
pixelArray.set(subarray, offset)
} // for every column
} // for every row
} // for every slice
}
const chunkEnd = {
c: (c + 1) * chunkSize.c,
z: (z + 1) * chunkSize.z,
y: (y + 1) * chunkSize.y,
x: (x + 1) * chunkSize.x,
}
const roiStart = Object.fromEntries(ensuredDims(0, CXYZT, indexStart))
const roiEnd = Object.fromEntries(ensuredDims(1, CXYZT, indexEnd))
validateIndices({ chunkStart, chunkEnd, roiStart, roiEnd })

// iterate on image from chunk or ROI start
const itStart = {
c: Math.max(chunkStart.c, roiStart.c),
z: Math.max(chunkStart.z, roiStart.z),
y: Math.max(chunkStart.y, roiStart.y),
x: Math.max(chunkStart.x, roiStart.x),
}
const itEnd = {
c: Math.min(chunkEnd.c, roiEnd.c),
z: Math.min(chunkEnd.z, roiEnd.z),
y: Math.min(chunkEnd.y, roiEnd.y),
x: Math.min(chunkEnd.x, roiEnd.x),
}

// Does input data group component(s) with each pixel?
const areComponentsInterleaved = Array.from(info.arrayShape.keys())
.join('')
.endsWith(arrayShape.c === 1 ? 'x' : 'xc') // if one component, can end with just 'x'
// Input data endiennes matches system or just 1 byte?
const dataEndiennesOK =
getSize(info.dtype) === 1 ||
IS_SYSTEM_LITTLE_ENDIAN === testLittleEndian(info.dtype)
if (areComponentsInterleaved && dataEndiennesOK) {
// copy whole row TURBO MODE
const TypedArray = getTypedArray(info.dtype)
const typedChunk = new TypedArray(chunks[index])
for (let zz = itStart.z; zz < itEnd.z; zz++) {
const zChunkOffset = (zz - z * chunkSize.z) * chunkStrides.z
const zPixelOffset = zz * pixelStrides.z
for (let yy = itStart.y; yy < itEnd.y; yy++) {
const yChunkOffset =
(yy - y * chunkSize.y) * chunkStrides.y + zChunkOffset
const subarray = typedChunk.subarray(
yChunkOffset,
yChunkOffset + itEnd.c * (itEnd.x - itStart.x)
)
const pixelOffset =
(itStart.x - roiStart.x) * pixelStrides.x + // chunk's x index mapped to image's x index
yy * pixelStrides.y +
zPixelOffset
pixelArray.set(subarray, pixelOffset)
} // row
} // slice
} else {
// copy element by element tortoise mode
const getChunkElement = ElementGetter(info.dtype, chunks[index])
for (let cc = itStart.c; cc < itEnd.c; cc++) {
// subtract c * chunkSize.c from cc to start at beginning of chunk despite itStart.c
const cChunkOffset = (cc - c * chunkSize.c) * chunkStrides.c
for (let zz = itStart.z; zz < itEnd.z; zz++) {
const zChunkOffset =
(zz - z * chunkSize.z) * chunkStrides.z + cChunkOffset
const zPixelOffset = zz * pixelStrides.z + cc
for (let yy = itStart.y; yy < itEnd.y; yy++) {
const yChunkOffset =
(yy - y * chunkSize.y) * chunkStrides.y + zChunkOffset
const yPixelOffset = yy * pixelStrides.y + zPixelOffset
for (let xx = itStart.x; xx < itEnd.x; xx++) {
pixelArray[
xx * pixelStrides.x + yPixelOffset
] = getChunkElement(
(xx - x * chunkSize.x) * chunkStrides.x + yChunkOffset
)
} // column
} // row
} // slice
} // component
} // copy by row or element
} // chunk

let response = pixelArray
if (!haveSharedArrayBuffer) {
Expand Down
Loading

0 comments on commit aef6541

Please sign in to comment.