From c4d35a355d56934a4eb96c5fc8d80d0b51f5fcf3 Mon Sep 17 00:00:00 2001 From: frcroth Date: Fri, 27 Jan 2023 14:25:53 +0100 Subject: [PATCH] WIP - Chunks with high z are spread around -> investigate - It is not clear to me why the chunks has to be in a different order than xyz when given to copyRange, while the offset needs to be in xyz - Histogram not working -> not yet investigated --- conf/application.conf | 4 +- package.json | 1 + .../util/geometry/BoundingBox.scala | 3 ++ .../scalableminds/util/geometry/Vec3Int.scala | 9 ++++ .../PrecomputedBucketProvider.scala | 3 +- .../datastore/datareaders/ChunkReader.scala | 33 +++++++------ .../datastore/datareaders/ChunkUtils.scala | 25 ++++++++++ .../datastore/datareaders/Compressor.scala | 2 +- .../datastore/datareaders/DatasetArray.scala | 16 ++++--- .../datastore/datareaders/DatasetHeader.scala | 10 ++++ .../datareaders/FileSystemStore.scala | 18 ++++++- .../datareaders/MultiArrayUtils.scala | 35 ++++++++++++-- .../precomputed/PrecomputedArray.scala | 48 ++++++++++++------- .../precomputed/PrecomputedHeader.scala | 25 +++++++++- 14 files changed, 179 insertions(+), 53 deletions(-) diff --git a/conf/application.conf b/conf/application.conf index b6950491e75..658c5ab76a2 100644 --- a/conf/application.conf +++ b/conf/application.conf @@ -287,5 +287,5 @@ pidfile.path = "/dev/null" # uncomment these lines for faster restart during local backend development (but beware the then-missing features): -#slick.checkSchemaOnStartup = false -#play.modules.disabled += "play.modules.swagger.SwaggerModule" +slick.checkSchemaOnStartup = false +play.modules.disabled += "play.modules.swagger.SwaggerModule" diff --git a/package.json b/package.json index 1f23eca8d23..765cadecd5a 100644 --- a/package.json +++ b/package.json @@ -85,6 +85,7 @@ "build-watch": "node_modules/.bin/webpack -w", "listening": "lsof -i:5005,7155,9000,9001,9002", "kill-listeners": "kill -9 $(lsof -t -i:5005,7155,9000,9001,9002)", + "rm-lock": "rm fossildb/data/LOCK", "test": "tools/test.sh test --timeout=30s", "test-changed": "tools/test.sh test-changed --timeout=30s", "test-verbose": "xvfb-run -s '-ac -screen 0 1280x1024x24' tools/test.sh test --timeout=60s --verbose", diff --git a/util/src/main/scala/com/scalableminds/util/geometry/BoundingBox.scala b/util/src/main/scala/com/scalableminds/util/geometry/BoundingBox.scala index 5ae585fd0e8..80fe210a2b7 100644 --- a/util/src/main/scala/com/scalableminds/util/geometry/BoundingBox.scala +++ b/util/src/main/scala/com/scalableminds/util/geometry/BoundingBox.scala @@ -100,6 +100,9 @@ object BoundingBox { else None + def fromOffsetAndShape(offset: Vec3Int, shape: Vec3Int): BoundingBox = + BoundingBox(offset, shape.x, shape.y, shape.z) + def union(bbs: List[BoundingBox]): BoundingBox = bbs match { case head :: tail => diff --git a/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala b/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala index e17a0959c37..5d7f363c7a7 100644 --- a/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala +++ b/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala @@ -14,6 +14,15 @@ case class Vec3Int(x: Int, y: Int, z: Int) { def *(that: Int): Vec3Int = Vec3Int(x * that, y * that, z * that) + def /(that: Vec3Int): Vec3Int = + Vec3Int(x / that.x, y / that.y, z / that.z) + + def +(that: Vec3Int): Vec3Int = + Vec3Int(x + that.x, y + that.y, z + that.z) + + def -(that: Vec3Int): Vec3Int = + Vec3Int(x - that.x, y - that.y, z - that.z) + def scale(s: Float): Vec3Int = Vec3Int((x * s).toInt, (y * s).toInt, (z * s).toInt) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala index 5354587e7de..983659ba5fb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/precomputed/PrecomputedBucketProvider.scala @@ -53,7 +53,8 @@ class PrecomputedBucketProvider(layer: PrecomputedLayer) case None => Empty case Some(magPath) => tryo(onError = e => logError(e))( - PrecomputedArray.open(magPath, precomputedMag.axisOrder, precomputedMag.channelIndex)) + PrecomputedArray + .open(magPath, precomputedMag.axisOrder, precomputedMag.channelIndex, readInstruction.bucket.mag)) .map(new PrecomputedCubeHandle(_)) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala index ccc380cba6b..80e304bb22f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala @@ -27,8 +27,8 @@ class ChunkReader(val header: DatasetHeader, val store: FileSystemStore, val typ lazy val chunkSize: Int = header.chunkSize.toList.product @throws[IOException] - def read(path: String): Future[MultiArray] = - typedChunkReader.read(readBytes(path)) + def read(path: String, chunkShape: Array[Int]): Future[MultiArray] = + typedChunkReader.read(readBytes(path), chunkShape) protected def readBytes(path: String): Option[Array[Byte]] = Using.Manager { use => @@ -44,28 +44,27 @@ class ChunkReader(val header: DatasetHeader, val store: FileSystemStore, val typ abstract class TypedChunkReader { val header: DatasetHeader - var chunkShape: Array[Int] = header.chunkShapeOrdered def ma2DataType: MADataType - def read(bytes: Option[Array[Byte]]): Future[MultiArray] + def read(bytes: Option[Array[Byte]], chunkShape: Array[Int]): Future[MultiArray] - def createFilled(dataType: MADataType): MultiArray = + def createFilled(dataType: MADataType, chunkShape: Array[Int]): MultiArray = MultiArrayUtils.createFilledArray(dataType, chunkShape, header.fillValueNumber) } class ByteChunkReader(val header: DatasetHeader) extends TypedChunkReader { val ma2DataType: MADataType = MADataType.BYTE - def read(bytes: Option[Array[Byte]]): Future[MultiArray] = + def read(bytes: Option[Array[Byte]], chunkShape: Array[Int]): Future[MultiArray] = Future.successful(bytes.map { result => MultiArray.factory(ma2DataType, chunkShape, result) - }.getOrElse(createFilled(ma2DataType))) + }.getOrElse(createFilled(ma2DataType, chunkShape))) } class DoubleChunkReader(val header: DatasetHeader) extends TypedChunkReader { val ma2DataType: MADataType = MADataType.DOUBLE - def read(bytes: Option[Array[Byte]]): Future[MultiArray] = + def read(bytes: Option[Array[Byte]], chunkShape: Array[Int]): Future[MultiArray] = Future.successful(Using.Manager { use => bytes.map { result => val typedStorage = new Array[Double](chunkShape.product) @@ -74,7 +73,7 @@ class DoubleChunkReader(val header: DatasetHeader) extends TypedChunkReader { iis.setByteOrder(header.byteOrder) iis.readFully(typedStorage, 0, typedStorage.length) MultiArray.factory(ma2DataType, chunkShape, typedStorage) - }.getOrElse(createFilled(ma2DataType)) + }.getOrElse(createFilled(ma2DataType, chunkShape)) }.get) } @@ -82,7 +81,7 @@ class ShortChunkReader(val header: DatasetHeader) extends TypedChunkReader with val ma2DataType: MADataType = MADataType.SHORT - def read(bytes: Option[Array[Byte]]): Future[MultiArray] = + def read(bytes: Option[Array[Byte]], chunkShape: Array[Int]): Future[MultiArray] = Future.successful(Using.Manager { use => bytes.map { result => val typedStorage = new Array[Short](chunkShape.product) @@ -91,7 +90,7 @@ class ShortChunkReader(val header: DatasetHeader) extends TypedChunkReader with iis.setByteOrder(header.byteOrder) iis.readFully(typedStorage, 0, typedStorage.length) MultiArray.factory(ma2DataType, chunkShape, typedStorage) - }.getOrElse(createFilled(ma2DataType)) + }.getOrElse(createFilled(ma2DataType, chunkShape)) }.get) } @@ -99,7 +98,7 @@ class IntChunkReader(val header: DatasetHeader) extends TypedChunkReader { val ma2DataType: MADataType = MADataType.INT - def read(bytes: Option[Array[Byte]]): Future[MultiArray] = + def read(bytes: Option[Array[Byte]], chunkShape: Array[Int]): Future[MultiArray] = Future.successful(Using.Manager { use => bytes.map { result => val typedStorage = new Array[Int](chunkShape.product) @@ -108,7 +107,7 @@ class IntChunkReader(val header: DatasetHeader) extends TypedChunkReader { iis.setByteOrder(header.byteOrder) iis.readFully(typedStorage, 0, typedStorage.length) MultiArray.factory(ma2DataType, chunkShape, typedStorage) - }.getOrElse(createFilled(ma2DataType)) + }.getOrElse(createFilled(ma2DataType, chunkShape)) }.get) } @@ -116,7 +115,7 @@ class LongChunkReader(val header: DatasetHeader) extends TypedChunkReader { val ma2DataType: MADataType = MADataType.LONG - def read(bytes: Option[Array[Byte]]): Future[MultiArray] = + def read(bytes: Option[Array[Byte]], chunkShape: Array[Int]): Future[MultiArray] = Future.successful(Using.Manager { use => bytes.map { result => val typedStorage = new Array[Long](chunkShape.product) @@ -125,7 +124,7 @@ class LongChunkReader(val header: DatasetHeader) extends TypedChunkReader { iis.setByteOrder(header.byteOrder) iis.readFully(typedStorage, 0, typedStorage.length) MultiArray.factory(ma2DataType, chunkShape, typedStorage) - }.getOrElse(createFilled(ma2DataType)) + }.getOrElse(createFilled(ma2DataType, chunkShape)) }.get) } @@ -133,7 +132,7 @@ class FloatChunkReader(val header: DatasetHeader) extends TypedChunkReader { val ma2DataType: MADataType = MADataType.FLOAT - def read(bytes: Option[Array[Byte]]): Future[MultiArray] = + def read(bytes: Option[Array[Byte]], chunkShape: Array[Int]): Future[MultiArray] = Future.successful(Using.Manager { use => bytes.map { result => val typedStorage = new Array[Float](chunkShape.product) @@ -142,6 +141,6 @@ class FloatChunkReader(val header: DatasetHeader) extends TypedChunkReader { iis.setByteOrder(header.byteOrder) iis.readFully(typedStorage, 0, typedStorage.length) MultiArray.factory(ma2DataType, chunkShape, typedStorage) - }.getOrElse(createFilled(ma2DataType)) + }.getOrElse(createFilled(ma2DataType, chunkShape)) }.get) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala index f6d10acb2c7..5321fdfeef2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkUtils.scala @@ -40,4 +40,29 @@ object ChunkUtils extends LazyLogging { } chunkIndices.toList } +/* + def calculatePrecomputedChunks(arrayShape: Array[Int], + arrayChunkSize: Array[Int], + selectedShape: Array[Int], + selectedOffset: Array[Int], + mag: Vec3Int): Iterable[BoundingBox] = { + + val offset = (selectedOffset, mag.toList).zipped.map((o, m) => o / m) + val requested = BoundingBox.fromOffsetAndShape(Vec3Int.fromList(offset.toList).getOrElse(Vec3Int(0, 0, 0)), + Vec3Int.fromList(selectedShape.toList).getOrElse(Vec3Int(0, 0, 0))) + val boundingBox = BoundingBox(Vec3Int(0, 0, 0), arrayShape(0), arrayShape(1), arrayShape(2)) + val inside = requested.intersection(boundingBox) + + val chunkSize = Vec3Int.fromList(arrayChunkSize.toList).getOrElse(Vec3Int(0, 0, 0)) + + inside match { + case Some(inside) => { + val aligned = (inside - boundingBox.topLeft).div(chunkSize) * chunkSize + boundingBox.topLeft + aligned + .range(chunkSize) + .flatMap(chunkOffset => BoundingBox.fromOffsetAndShape(chunkOffset, chunkSize).intersection(boundingBox)) + } + case _ => List() + } + }*/ } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/Compressor.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/Compressor.scala index 8b08a009a01..444342c1e64 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/Compressor.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/Compressor.scala @@ -15,7 +15,7 @@ import java.nio.ByteBuffer import java.util import java.util.zip.{Deflater, DeflaterOutputStream, Inflater, InflaterInputStream} import javax.imageio.ImageIO -import javax.imageio.ImageIO.{createImageInputStream, createImageOutputStream} +import javax.imageio.ImageIO.{createImageInputStream} import javax.imageio.stream.ImageInputStream sealed trait CompressionSetting diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index 7de39c74c70..21b4c86aaf1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -25,7 +25,7 @@ class DatasetArray(relativePath: DatasetPath, ChunkReader.create(store, header) // cache currently limited to 1 GB per array - private lazy val chunkContentsCache: Cache[String, MultiArray] = { + protected lazy val chunkContentsCache: Cache[String, MultiArray] = { val maxSizeBytes = 1000L * 1000 * 1000 val maxEntries = maxSizeBytes / header.bytesPerChunk AlfuCache(maxEntries.toInt) @@ -58,7 +58,7 @@ class DatasetArray(relativePath: DatasetPath, // This function will internally adapt to the array's axis order so that XYZ data in fortran-order is returned. @throws[IOException] @throws[InvalidRangeException] - private def readAsFortranOrder(shape: Array[Int], offset: Array[Int])(implicit ec: ExecutionContext): Fox[Object] = { + protected def readAsFortranOrder(shape: Array[Int], offset: Array[Int])(implicit ec: ExecutionContext): Fox[Object] = { val chunkIndices = ChunkUtils.computeChunkIndices(axisOrder.permuteIndicesReverse(header.datasetShape), axisOrder.permuteIndicesReverse(header.chunkSize), shape, @@ -78,7 +78,8 @@ class DatasetArray(relativePath: DatasetPath, offsetInChunk = computeOffsetInChunk(chunkIndex, offset) sourceChunkInCOrder: MultiArray = MultiArrayUtils.axisOrderXYZView(sourceChunk, axisOrder, - flip = header.order != ArrayOrder.C) + flip = header.order != ArrayOrder.C, + header.shiftAxisOrderRight) _ = MultiArrayUtils.copyRange(offsetInChunk, sourceChunkInCOrder, targetInCOrder) } yield () } @@ -88,12 +89,13 @@ class DatasetArray(relativePath: DatasetPath, } } - private def getSourceChunkDataWithCache(chunkIndex: Array[Int]): Future[MultiArray] = { + protected def getSourceChunkDataWithCache(chunkIndex: Array[Int]): Future[MultiArray] = { val chunkFilename = getChunkFilename(chunkIndex) val chunkFilePath = relativePath.resolve(chunkFilename) val storeKey = chunkFilePath.storeKey + val chunkShape = header.chunkSizeAtIndex(chunkIndex) - chunkContentsCache.getOrLoad(storeKey, chunkReader.read) + chunkContentsCache.getOrLoad(storeKey, key => chunkReader.read(key, chunkShape)) } protected def getChunkFilename(chunkIndex: Array[Int]): String = @@ -118,9 +120,9 @@ class DatasetArray(relativePath: DatasetPath, private def isZeroOffset(offset: Array[Int]): Boolean = util.Arrays.equals(offset, new Array[Int](offset.length)) - private def computeOffsetInChunk(chunkIndex: Array[Int], globalOffset: Array[Int]): Array[Int] = + protected def computeOffsetInChunk(chunkIndex: Array[Int], globalOffset: Array[Int]): Array[Int] = chunkIndex.indices.map { dim => - globalOffset(dim) - (chunkIndex(dim) * axisOrder.permuteIndicesReverse(header.chunkSize)(dim)) + globalOffset(dim) - (chunkIndex(dim) * header.chunkSize(dim)) }.toArray override def toString: String = diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala index 06f409354e2..04f382f1174 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetHeader.scala @@ -9,13 +9,19 @@ import java.nio.ByteOrder trait DatasetHeader { def datasetShape: Array[Int] // shape of the entire array + def chunkSize: Array[Int] // shape of each chunk + def dimension_separator: DimensionSeparator + def dataType: String + def fill_value: Either[String, Number] + def order: ArrayOrder def resolvedDataType: ArrayDataType + def compressorImpl: Compressor lazy val byteOrder: ByteOrder = ByteOrder.BIG_ENDIAN @@ -40,4 +46,8 @@ trait DatasetHeader { Some(BoundingBox(Vec3Int.zeros, datasetShape(axisOrder.x), datasetShape(axisOrder.y), datasetShape(axisOrder.z))) lazy val rank: Int = datasetShape.length + + def shiftAxisOrderRight: Boolean = false + + def chunkSizeAtIndex(chunkIndex: Array[Int]): Array[Int] = chunkSize } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/FileSystemStore.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/FileSystemStore.scala index f62fc61f4e4..3551c38a023 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/FileSystemStore.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/FileSystemStore.scala @@ -1,7 +1,9 @@ package com.scalableminds.webknossos.datastore.datareaders +import com.typesafe.scalalogging.LazyLogging import net.liftweb.util.Helpers.tryo +import java.io.FileNotFoundException import java.nio.file.{FileSystem, Files, Path} class FileSystemStore(val internalRoot: Path) { @@ -12,7 +14,8 @@ class FileSystemStore(val internalRoot: Path) { } class GoogleCloudFileSystemStore(override val internalRoot: Path, fs: FileSystem) - extends FileSystemStore(internalRoot) { + extends FileSystemStore(internalRoot) + with LazyLogging { private def normalizedInternalRoot = { def prefix = internalRoot.getParent.toString // This part uses "/" @@ -23,6 +26,17 @@ class GoogleCloudFileSystemStore(override val internalRoot: Path, fs: FileSystem override def readBytes(key: String): Option[Array[Byte]] = { val path = s"$normalizedInternalRoot%2F$key?alt=media" - tryo(Files.readAllBytes(fs.getPath(path))).toOption + try { + Some(Files.readAllBytes(fs.getPath(path))) + } catch { + case e: FileNotFoundException => { + logger.info(s"Could not read data at ${path}") + None + } + case _ => { + logger.info(s"Could not read data at ${path}") + None + } + } } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala index 0dbaa924c4b..c35521d5f9a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/MultiArrayUtils.scala @@ -64,12 +64,19 @@ object MultiArrayUtils { */ @throws[InvalidRangeException] def copyRange(offset: Array[Int], source: MultiArray, target: MultiArray): Unit = { + // At this point offset is XYZ, source should be XYZ (but for precomputed it is XZY) + + val new_offset = if (source.getShape.toSet.size > 1) { + Array(offset(0), offset(2), offset(1)) + } else { + offset + } val sourceShape: Array[Int] = source.getShape val targetShape: Array[Int] = target.getShape val sourceRanges = new util.ArrayList[Range] val targetRanges = new util.ArrayList[Range] - for (dimension <- offset.indices) { - val dimOffset = offset(dimension) + for (dimension <- new_offset.indices) { + val dimOffset = new_offset(dimension) var sourceFirst = 0 var targetFirst = 0 if (dimOffset >= 0) { @@ -120,18 +127,36 @@ object MultiArrayUtils { source.permute(permutation) } - def axisOrderXYZView(source: MultiArray, axisOrder: AxisOrder, flip: Boolean): MultiArray = { + def pad(source: MultiArray): MultiArray = { + val ma = MultiArray.factory(source.getDataType, source.getShape) + val sourceInnerArray = source.getStorage.asInstanceOf[Array[Byte]] + val actualSize = sourceInnerArray.length + if(source.getSize == actualSize) { + return source + } + val targetInnerArray = ma.getStorage.asInstanceOf[Array[Byte]] + for(i <- 0 until actualSize) { + targetInnerArray(i) = sourceInnerArray(i) + } + ma + } + + def axisOrderXYZView(source: MultiArray, axisOrder: AxisOrder, flip: Boolean, shiftRight: Boolean): MultiArray = { /* create a view in which the last three axes are XYZ, rest unchanged * optionally flip the axes afterwards * * Note that we are at this point unsure if this function should be using the *inverse* permutation. * For all cases we could test, the two are identical. Beware of this when debugging future datasets, * e.g. with axis order ZXY + * + * 2023-01-19: For neuroglancer-precomputed datasets, the axis order used for indexing chunks is a + * different one that is used. Therefore, the additional parameter "mirror" is introduced, which is + * only used for neuroglancer-precomputed datasets. */ val permutation = axisOrder.permutation(source.getRank) val flippedIfNeeded = if (flip) permutation.reverse else permutation - val permutationHack = Array(0, 2, 1) - source.permute(permutationHack) + val shiftedIfNeeded = if (shiftRight) flippedIfNeeded.last +: flippedIfNeeded.init else flippedIfNeeded + source.permute(shiftedIfNeeded) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala index 8c83ee4a000..bf30d7b17a0 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala @@ -1,24 +1,24 @@ package com.scalableminds.webknossos.datastore.datareaders.precomputed -import com.scalableminds.webknossos.datastore.datareaders.{ - AxisOrder, - ChunkReader, - DatasetArray, - DatasetPath, - FileSystemStore, - GoogleCloudFileSystemStore, - GoogleCloudStoragePath -} +import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.datareaders.{ArrayOrder, AxisOrder, ChunkReader, ChunkUtils, DatasetArray, DatasetPath, FileSystemStore, GoogleCloudFileSystemStore, GoogleCloudStoragePath, MultiArrayUtils} import com.typesafe.scalalogging.LazyLogging import play.api.libs.json.{JsError, JsSuccess, Json} +import ucar.ma2.InvalidRangeException import java.io.IOException import java.nio.charset.StandardCharsets import java.nio.file.Path +import scala.concurrent.{ExecutionContext, Future} +import ucar.ma2.{InvalidRangeException, Array => MultiArray} object PrecomputedArray extends LazyLogging { @throws[IOException] - def open(magPath: Path, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int]): PrecomputedArray = { + def open(magPath: Path, + axisOrderOpt: Option[AxisOrder], + channelIndex: Option[Int], + mag: Vec3Int): PrecomputedArray = { //val rootPath = new DatasetPath("") @@ -57,7 +57,8 @@ object PrecomputedArray extends LazyLogging { store, scaleHeader, axisOrderOpt.getOrElse(AxisOrder.asZyxFromRank(scaleHeader.rank)), - channelIndex) + channelIndex, + mag) } } @@ -65,7 +66,8 @@ class PrecomputedArray(relativePath: DatasetPath, store: FileSystemStore, header: PrecomputedScaleHeader, axisOrder: AxisOrder, - channelIndex: Option[Int]) + channelIndex: Option[Int], + mag: Vec3Int) extends DatasetArray(relativePath, store, header, axisOrder, channelIndex) with LazyLogging { @@ -73,16 +75,30 @@ class PrecomputedArray(relativePath: DatasetPath, PrecomputedChunkReader.create(store, header) lazy val voxelOffset = header.precomputedScale.voxel_offset.getOrElse(Array(0, 0, 0)) - override protected def getChunkFilename(chunkIndex: Array[Int]): String = { - val coordinates: Array[String] = chunkIndex.zipWithIndex.map(indices => { + + /*val coordinates: Array[String] = chunkIndex.zipWithIndex.map(indices => { val (cIndex, i) = indices val beginOffset = voxelOffset(i) + cIndex * header.precomputedScale.chunk_sizes.head(i) val endOffset = voxelOffset(i) + ((cIndex + 1) * header.precomputedScale.chunk_sizes.head(i)) .min(header.precomputedScale.size(i)) s"$beginOffset-$endOffset" - }) - coordinates.mkString(header.dimension_separator.toString) + })*/ + val bbox = header.chunkIndexToBoundingBox(chunkIndex) + bbox.map(dim => { + s"${dim._1}-${dim._2}" + }).mkString(header.dimension_separator.toString) + /* + logger.info( + s"Requesting chunkIndex ${chunkIndex(0)},${chunkIndex(1)},${chunkIndex(2)} with grid_size ${ + header.grid_size + .mkString("Array(", ", ", ")") + },") + if (chunkIndex(2) > header.grid_size(2)) { + logger.info("Chunk index > grid size") + } + logger.info(coordinates.mkString(header.dimension_separator.toString)) + coordinates.mkString(header.dimension_separator.toString)*/ } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedHeader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedHeader.scala index cca4a68ec08..be39034d5c7 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedHeader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedHeader.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datareaders.precomputed +import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.webknossos.datastore.datareaders.ArrayDataType.ArrayDataType import com.scalableminds.webknossos.datastore.datareaders.ArrayOrder.ArrayOrder import com.scalableminds.webknossos.datastore.datareaders.DimensionSeparator.DimensionSeparator @@ -9,6 +10,7 @@ import play.api.libs.json.{Format, JsResult, JsValue, Json} import play.api.libs.json.Json.WithDefaultValues import java.nio.ByteOrder +import scala.:: case class PrecomputedHeader(`type`: String, data_type: String, num_channels: Int, scales: List[PrecomputedScale]) { @@ -27,7 +29,7 @@ case class PrecomputedScale(key: String, case class PrecomputedScaleHeader(precomputedScale: PrecomputedScale, precomputedHeader: PrecomputedHeader) extends DatasetHeader { - override def datasetShape: Array[Int] = (precomputedScale.resolution, precomputedScale.size).zipped.map(_ * _) + override def datasetShape: Array[Int] = precomputedScale.size //(precomputedScale.resolution, precomputedScale.size).zipped.map(_ * _) override def chunkSize: Array[Int] = precomputedScale.chunk_sizes.head @@ -41,12 +43,31 @@ case class PrecomputedScaleHeader(precomputedScale: PrecomputedScale, precompute override lazy val byteOrder: ByteOrder = ByteOrder.LITTLE_ENDIAN + override def shiftAxisOrderRight: Boolean = true + override def resolvedDataType: ArrayDataType = PrecomputedDataType.toArrayDataType(PrecomputedDataType.fromString(dataType.toLowerCase).get) lazy val compressorImpl: Compressor = PrecomputedCompressorFactory.create(precomputedScale.encoding) + + def grid_size: Array[Int] = (chunkSize, precomputedScale.size).zipped.map((c, s) => (s.toDouble / c).ceil.toInt) + + override def chunkSizeAtIndex(chunkIndex: Array[Int]): Array[Int] = { + chunkIndexToBoundingBox(chunkIndex).map(dim => dim._2 - dim._1) + } + + lazy val voxelOffset: Array[Int] = precomputedScale.voxel_offset.getOrElse(Array(0, 0, 0)) + + def chunkIndexToBoundingBox(chunkIndex: Array[Int]): Array[(Int,Int)] = { + chunkIndex.zipWithIndex.map(indices => { + val (cIndex, i) = indices + val beginOffset = voxelOffset(i) + cIndex * precomputedScale.chunk_sizes.head(i) + val endOffset = voxelOffset(i) + ((cIndex + 1) * precomputedScale.chunk_sizes.head(i)) + .min(precomputedScale.size(i)) + (beginOffset, endOffset) + }) + } } -// object PrecomputedScale extends JsonImplicits { implicit object PrecomputedScaleFormat extends Format[PrecomputedScale] {