Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ND data loading with other axis orders #7592

Merged
merged 16 commits into from
Feb 5, 2024
Merged
2 changes: 2 additions & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
[Commits](https://github.com/scalableminds/webknossos/compare/24.02.0...HEAD)

### Added
- Webknossos can now open ND Zarr datasets with arbitrary axis orders (not limited to `**xyz` anymore). [#7592](https://github.com/scalableminds/webknossos/pull/7592)

### Changed
- Datasets stored in WKW format are no longer loaded with memory mapping, reducing memory demands. [#7528](https://github.com/scalableminds/webknossos/pull/7528)
Expand All @@ -20,6 +21,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Fixed loading local datasets for organizations that have spaces in their names. [#7593](https://github.com/scalableminds/webknossos/pull/7593)
- Fixed a bug where proofreading annotations would stay black until the next server restart due to expired but cached tokens. [#7598](https://github.com/scalableminds/webknossos/pull/7598)
- Fixed a bug where ad-hoc meshing didn't make use of a segment index, even when it existed. [#7600](https://github.com/scalableminds/webknossos/pull/7600)
- Fixed a bug in ND volume annotation downloads where the additionalAxes metadata had wrong indices. [#7592](https://github.com/scalableminds/webknossos/pull/7592)

### Removed

Expand Down
53 changes: 53 additions & 0 deletions test/backend/AxisOrderPermutationTestSuite.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
package backend

import com.scalableminds.webknossos.datastore.datareaders.{Axis, FullAxisOrder}
import org.scalatestplus.play.PlaySpec

class AxisOrderPermutationTestSuite extends PlaySpec {

private def permute(permutation: Array[Int], str: String): String =
permutation.map(i => str(i)).mkString("")

def orderFromStringChars(str: String) = FullAxisOrder(str.map(char => Axis(name = char.toString)))

private def permuteAxisOrderArrayCtoWkC(str: String) = {
val axisOrder = orderFromStringChars(str)
permute(axisOrder.arrayToWkPermutation, axisOrder.toString)
}

private def permuteAxisOrderArrayFtoWkF(str: String) = {
val axisOrder = orderFromStringChars(str)
val axisOrderFStr = axisOrder.toString.reverse
permute(axisOrder.arrayFToWkFPermutation, axisOrderFStr)
}

private def permuteAxisOrderArrayCtoWkF(str: String) = {
val axisOrder = orderFromStringChars(str)
permute(axisOrder.arrayCToWkFPermutation, axisOrder.toString)
}

"AxisOrderPermutation" should {
"correctly permute from C (array) to C (wk)" in {
assert(permuteAxisOrderArrayCtoWkC("xyz") == "xyz")
assert(permuteAxisOrderArrayCtoWkC("cxyz") == "cxyz")
assert(permuteAxisOrderArrayCtoWkC("xycz") == "cxyz")
assert(permuteAxisOrderArrayCtoWkC("xasdfczy") == "asdfcxyz")
}

"correctly permute from F (array) to F (wk)" in {
assert(permuteAxisOrderArrayFtoWkF("xyz") == "zyx")
assert(permuteAxisOrderArrayFtoWkF("cxyz") == "zyxc")
assert(permuteAxisOrderArrayFtoWkF("xycz") == "zyxc")
assert(permuteAxisOrderArrayFtoWkF("xasdfczy") == "zyxcfdsa")
}

"correctly permute from C (array) to F (wk)" in {
assert(permuteAxisOrderArrayCtoWkF("xyz") == "zyx")
assert(permuteAxisOrderArrayCtoWkF("cxyz") == "zyxc")
assert(permuteAxisOrderArrayCtoWkF("xycz") == "zyxc")
assert(permuteAxisOrderArrayCtoWkF("xasdfczy") == "zyxcfdsa")
assert(permuteAxisOrderArrayCtoWkF("tasxdfczy") == "zyxcfdsat")
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -14,19 +14,20 @@ trait BucketProvider extends FoxImplicits with LazyLogging {
def remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService]

// To be defined in subclass.
def openShardOrArrayHandle(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[DataCubeHandle] =
def openDatasetArrayHandle(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[DatasetArrayHandle] =
Empty

def load(readInstruction: DataReadInstruction, cache: DataCubeCache)(
implicit ec: ExecutionContext): Fox[Array[Byte]] =
cache.withCache(readInstruction)(openShardOrArrayHandleWithTimeout)(
cache.withCache(readInstruction)(openDatasetArrayHandleWithTimeout)(
_.cutOutBucket(readInstruction.bucket, readInstruction.dataLayer))

private def openShardOrArrayHandleWithTimeout(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[DataCubeHandle] = {
private def openDatasetArrayHandleWithTimeout(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = {
val t = System.currentTimeMillis
for {
result <- openShardOrArrayHandle(readInstruction).futureBox
result <- openDatasetArrayHandle(readInstruction).futureBox
duration = System.currentTimeMillis - t
_ = if (duration > 500) {
val className = this.getClass.getName.split("\\.").last
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
package com.scalableminds.webknossos.datastore.dataformats

import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.datareaders.DatasetArray
import com.scalableminds.webknossos.datastore.models.BucketPosition
import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, ElementClass}

import scala.concurrent.ExecutionContext

class DatasetArrayHandle(datasetArray: DatasetArray) extends SafeCachable {
def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] = {
val shape = Vec3Int.full(bucket.bucketLength)
val offset = Vec3Int(bucket.topLeft.voxelXInMag, bucket.topLeft.voxelYInMag, bucket.topLeft.voxelZInMag)

datasetArray.readBytesWithAdditionalCoordinates(shape,
offset,
bucket.additionalCoordinates,
dataLayer.elementClass == ElementClass.uint24)
}
}
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
package com.scalableminds.webknossos.datastore.dataformats.n5

import com.scalableminds.util.cache.AlfuCache
import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator}
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DatasetArrayHandle, MagLocator}
import com.scalableminds.webknossos.datastore.datareaders.n5.N5Array
import com.scalableminds.webknossos.datastore.datavault.VaultPath
import com.scalableminds.webknossos.datastore.models.BucketPosition
import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, ElementClass}
import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId
import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction
import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService
import com.typesafe.scalalogging.LazyLogging
Expand All @@ -16,27 +14,15 @@ import ucar.ma2.{Array => MultiArray}

import scala.concurrent.ExecutionContext

class N5CubeHandle(n5Array: N5Array) extends DataCubeHandle with LazyLogging {

def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] = {
val shape = Vec3Int.full(bucket.bucketLength)
val offset = Vec3Int(bucket.topLeft.voxelXInMag, bucket.topLeft.voxelYInMag, bucket.topLeft.voxelZInMag)
n5Array.readBytesXYZ(shape, offset, dataLayer.elementClass == ElementClass.uint24)
}

override protected def onFinalize(): Unit = ()

}

class N5BucketProvider(layer: N5Layer,
dataSourceId: DataSourceId,
val remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService],
sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]])
extends BucketProvider
with LazyLogging {

override def openShardOrArrayHandle(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[N5CubeHandle] = {
override def openDatasetArrayHandle(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = {
val magLocatorOpt: Option[MagLocator] =
layer.mags.find(_.mag == readInstruction.bucket.mag)

Expand All @@ -57,8 +43,9 @@ class N5BucketProvider(layer: N5Layer,
layer.name,
magLocator.axisOrder,
magLocator.channelIndex,
layer.additionalAxes,
chunkContentsCache)
.map(new N5CubeHandle(_))
.map(new DatasetArrayHandle(_))
} yield cubeHandle
case None => Empty
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
package com.scalableminds.webknossos.datastore.dataformats.precomputed

import com.scalableminds.util.cache.AlfuCache
import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator}
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DatasetArrayHandle, MagLocator}
import com.scalableminds.webknossos.datastore.datareaders.precomputed.PrecomputedArray
import com.scalableminds.webknossos.datastore.datavault.VaultPath
import com.scalableminds.webknossos.datastore.models.BucketPosition
import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, ElementClass}
import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId
import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction
import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService
import com.typesafe.scalalogging.LazyLogging
Expand All @@ -16,27 +14,15 @@ import net.liftweb.common.Empty
import scala.concurrent.ExecutionContext
import ucar.ma2.{Array => MultiArray}

class PrecomputedCubeHandle(precomputedArray: PrecomputedArray) extends DataCubeHandle with LazyLogging {

def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] = {
val shape = Vec3Int.full(bucket.bucketLength)
val offset = Vec3Int(bucket.topLeft.voxelXInMag, bucket.topLeft.voxelYInMag, bucket.topLeft.voxelZInMag)
precomputedArray.readBytesXYZ(shape, offset, dataLayer.elementClass == ElementClass.uint24)
}

override protected def onFinalize(): Unit = ()

}

class PrecomputedBucketProvider(layer: PrecomputedLayer,
dataSourceId: DataSourceId,
val remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService],
sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]])
extends BucketProvider
with LazyLogging {

override def openShardOrArrayHandle(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[PrecomputedCubeHandle] = {
override def openDatasetArrayHandle(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = {
val magLocatorOpt: Option[MagLocator] =
layer.mags.find(_.mag == readInstruction.bucket.mag)

Expand All @@ -57,8 +43,9 @@ class PrecomputedBucketProvider(layer: PrecomputedLayer,
layer.name,
magLocator.axisOrder,
magLocator.channelIndex,
layer.additionalAxes,
chunkContentsCache)
.map(new PrecomputedCubeHandle(_))
.map(new DatasetArrayHandle(_))
} yield cubeHandle
case None => Empty
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
package com.scalableminds.webknossos.datastore.dataformats.wkw

import com.scalableminds.util.cache.AlfuCache
import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator}
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DatasetArrayHandle, MagLocator}
import com.scalableminds.webknossos.datastore.datareaders.wkw.WKWArray
import com.scalableminds.webknossos.datastore.datavault.VaultPath
import com.scalableminds.webknossos.datastore.models.BucketPosition
import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, ElementClass}
import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId
import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction
import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService
import com.typesafe.scalalogging.LazyLogging
Expand All @@ -16,27 +14,15 @@ import ucar.ma2.{Array => MultiArray}

import scala.concurrent.ExecutionContext

class WKWCubeHandle(wkwArray: WKWArray) extends DataCubeHandle with LazyLogging {

def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] = {
val shape = Vec3Int.full(bucket.bucketLength)
val offset = Vec3Int(bucket.topLeft.voxelXInMag, bucket.topLeft.voxelYInMag, bucket.topLeft.voxelZInMag)
wkwArray.readBytesXYZ(shape, offset, dataLayer.elementClass == ElementClass.uint24)
}

override protected def onFinalize(): Unit = ()

}

class WKWBucketProvider(layer: WKWLayer,
dataSourceId: DataSourceId,
val remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService],
sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]])
extends BucketProvider
with LazyLogging {

override def openShardOrArrayHandle(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[WKWCubeHandle] = {
override def openDatasetArrayHandle(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = {
val magLocatorOpt: Option[MagLocator] =
layer.wkwResolutions.find(_.resolution == readInstruction.bucket.mag).map(wkwResolutionToMagLocator)

Expand All @@ -53,7 +39,7 @@ class WKWBucketProvider(layer: WKWLayer,
chunkContentsCache <- sharedChunkContentsCache.toFox
cubeHandle <- WKWArray
.open(magPath, dataSourceId, layer.name, chunkContentsCache)
.map(new WKWCubeHandle(_))
.map(new DatasetArrayHandle(_))
} yield cubeHandle
case None => Empty
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
package com.scalableminds.webknossos.datastore.dataformats.zarr

import com.scalableminds.util.cache.AlfuCache
import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DataCubeHandle, MagLocator}
import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, DatasetArrayHandle, MagLocator}
import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrArray
import com.scalableminds.webknossos.datastore.datavault.VaultPath
import com.scalableminds.webknossos.datastore.models.BucketPosition
import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, ElementClass}
import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId
import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction
import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService
import com.typesafe.scalalogging.LazyLogging
Expand All @@ -16,32 +14,15 @@ import ucar.ma2.{Array => MultiArray}

import scala.concurrent.ExecutionContext

class ZarrCubeHandle(zarrArray: ZarrArray) extends DataCubeHandle with LazyLogging {

def cutOutBucket(bucket: BucketPosition, dataLayer: DataLayer)(implicit ec: ExecutionContext): Fox[Array[Byte]] = {
val shape = Vec3Int.full(bucket.bucketLength)
val offset = Vec3Int(bucket.topLeft.voxelXInMag, bucket.topLeft.voxelYInMag, bucket.topLeft.voxelZInMag)

bucket.additionalCoordinates match {
case Some(additionalCoordinates) if additionalCoordinates.nonEmpty =>
zarrArray.readBytesWithAdditionalCoordinates(shape, offset, additionalCoordinates, dataLayer.additionalAxisMap)
case _ => zarrArray.readBytesXYZ(shape, offset, dataLayer.elementClass == ElementClass.uint24)
}
}

override protected def onFinalize(): Unit = ()

}

class ZarrBucketProvider(layer: ZarrLayer,
dataSourceId: DataSourceId,
val remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService],
sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]])
extends BucketProvider
with LazyLogging {

override def openShardOrArrayHandle(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[ZarrCubeHandle] = {
override def openDatasetArrayHandle(readInstruction: DataReadInstruction)(
implicit ec: ExecutionContext): Fox[DatasetArrayHandle] = {
val magLocatorOpt: Option[MagLocator] =
layer.mags.find(_.mag == readInstruction.bucket.mag)

Expand All @@ -62,8 +43,9 @@ class ZarrBucketProvider(layer: ZarrLayer,
layer.name,
magLocator.axisOrder,
magLocator.channelIndex,
layer.additionalAxes,
chunkContentsCache)
.map(new ZarrCubeHandle(_))
.map(new DatasetArrayHandle(_))
} yield cubeHandle
case None => Empty
}
Expand Down
Loading