Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Explore and view Neuroglancer Precomputed image volumes #6716

Merged
merged 29 commits into from
Feb 9, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
45a558e
WIP: Precomputed
frcroth Dec 20, 2022
b1b387e
--wip-- [skip ci]
frcroth Jan 5, 2023
a8bf9ea
Merge branch 'master' into precomputed
frcroth Jan 5, 2023
06e7b63
WIP: Separate header and scaleheader
frcroth Jan 9, 2023
5f3737c
Fix metadata reading
frcroth Jan 10, 2023
786fd8f
Implement reading of precomputed data sets
frcroth Jan 13, 2023
93b2132
Kill all listeners
frcroth Jan 13, 2023
4f48a8b
Format
frcroth Jan 13, 2023
2b80411
WIP: Fix precomputed reading
frcroth Jan 19, 2023
2d0e09b
WIP
frcroth Jan 31, 2023
97f7119
Pretty print
frcroth Feb 2, 2023
ee73fe1
Remove unused code
frcroth Feb 2, 2023
173efcd
Merge branch 'master' into precomputed
frcroth Feb 2, 2023
d482060
Fix FileSystem things introduced by merge
frcroth Feb 2, 2023
0470031
Implement exploration of precomputed datasets
frcroth Feb 3, 2023
f7d9f75
Revert minor changes
frcroth Feb 3, 2023
b23e858
Remove handling for specific google cloud url schema
frcroth Feb 3, 2023
0479ef4
Fix compile warnings
frcroth Feb 3, 2023
44265df
Create precomputed segmentation layer when type=segmentation
frcroth Feb 3, 2023
3b1968b
Apply suggestions from code review
frcroth Feb 6, 2023
9291caa
Add changelog entry
frcroth Feb 6, 2023
40ec8ec
Merge branch 'master' into precomputed
frcroth Feb 6, 2023
2e13a2b
Update docs
frcroth Feb 6, 2023
1870367
Adjust frontend
frcroth Feb 6, 2023
0fa9b30
Prevent exploration of sharded data
frcroth Feb 6, 2023
053b047
Format backend
frcroth Feb 6, 2023
d7908e2
Merge branch 'master' into precomputed
frcroth Feb 7, 2023
a99072b
Explain some things better
frcroth Feb 7, 2023
838db4b
Merge branch 'master' into precomputed
frcroth Feb 9, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released

### Added
- Remote datasets can now also be streamed from Google Cloud Storage URIs (`gs://`). [#6775](https://github.com/scalableminds/webknossos/pull/6775)
- Remote volume datasets in the neuroglancer precomputed format can now be viewed in WEBKNOSSOS. [#6716](https://github.com/scalableminds/webknossos/pull/6716)


### Changed
- Limit paid team sharing features to respective organization plans. [#6767](https://github.com/scalableminds/webknossos/pull/6776)
Expand Down
16 changes: 15 additions & 1 deletion app/models/binary/explore/ExploreRemoteLayerService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,10 @@ package models.binary.explore
import com.scalableminds.util.geometry.{Vec3Double, Vec3Int}
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.dataformats.n5.{N5DataLayer, N5SegmentationLayer}
import com.scalableminds.webknossos.datastore.dataformats.precomputed.{
PrecomputedDataLayer,
PrecomputedSegmentationLayer
}
import com.scalableminds.webknossos.datastore.dataformats.zarr._
import com.scalableminds.webknossos.datastore.datareaders.n5.N5Header
import com.scalableminds.webknossos.datastore.datareaders.zarr._
Expand Down Expand Up @@ -132,6 +136,12 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService)
case l: N5SegmentationLayer =>
l.copy(mags = l.mags.map(mag => mag.copy(mag = mag.mag * magFactors)),
boundingBox = l.boundingBox * magFactors)
case l: PrecomputedDataLayer =>
l.copy(mags = l.mags.map(mag => mag.copy(mag = mag.mag * magFactors)),
boundingBox = l.boundingBox * magFactors)
case l: PrecomputedSegmentationLayer =>
l.copy(mags = l.mags.map(mag => mag.copy(mag = mag.mag * magFactors)),
boundingBox = l.boundingBox * magFactors)
case _ => throw new Exception("Encountered unsupported layer format during explore remote")
}
})
Expand Down Expand Up @@ -159,7 +169,11 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService)
remotePath,
credentialId.map(_.toString),
reportMutable,
List(new ZarrArrayExplorer, new NgffExplorer, new N5ArrayExplorer, new N5MultiscalesExplorer)
List(new ZarrArrayExplorer,
new NgffExplorer,
new N5ArrayExplorer,
new N5MultiscalesExplorer,
new PrecomputedExplorer)
)
} yield layersWithVoxelSizes

Expand Down
69 changes: 69 additions & 0 deletions app/models/binary/explore/PrecomputedExplorer.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
package models.binary.explore
import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int}
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.dataformats.MagLocator
import com.scalableminds.webknossos.datastore.dataformats.precomputed.{
PrecomputedDataLayer,
PrecomputedLayer,
PrecomputedSegmentationLayer
}
import com.scalableminds.webknossos.datastore.datareaders.AxisOrder
import com.scalableminds.webknossos.datastore.datareaders.precomputed.{PrecomputedHeader, PrecomputedScale}
import com.scalableminds.webknossos.datastore.models.datasource.{Category, ElementClass}

import java.nio.file.Path
import scala.concurrent.ExecutionContext.Implicits.global

class PrecomputedExplorer extends RemoteLayerExplorer {
override def name: String = "Neuroglancer Precomputed"

override def explore(remotePath: Path, credentialId: Option[String]): Fox[List[(PrecomputedLayer, Vec3Double)]] =
for {
infoPath <- Fox.successful(remotePath.resolve(PrecomputedHeader.FILENAME_INFO))
precomputedHeader <- parseJsonFromPath[PrecomputedHeader](infoPath) ?~> s"Failed to read neuroglancer precomputed metadata at $infoPath"
layerAndVoxelSize <- layerFromPrecomputedHeader(precomputedHeader, remotePath, credentialId)
} yield List(layerAndVoxelSize)

private def layerFromPrecomputedHeader(precomputedHeader: PrecomputedHeader,
remotePath: Path,
credentialId: Option[String]): Fox[(PrecomputedLayer, Vec3Double)] =
for {
name <- guessNameFromPath(remotePath)
firstScale <- precomputedHeader.scales.headOption.toFox
_ <- bool2Fox(firstScale.sharding.isEmpty) ?~> "Failed to read dataset: sharding not supported"
boundingBox <- BoundingBox.fromSizeArray(firstScale.size).toFox
elementClass: ElementClass.Value <- elementClassFromPrecomputedDataType(precomputedHeader.data_type) ?~> "Unknown data type"
smallestResolution = firstScale.resolution
voxelSize <- Vec3Int.fromArray(smallestResolution).toFox
mags: List[MagLocator] <- Fox.serialCombined(precomputedHeader.scales)(
getMagFromScale(_, smallestResolution, remotePath, credentialId))
layer = if (precomputedHeader.describesSegmentationLayer) {
PrecomputedSegmentationLayer(name, boundingBox, elementClass, mags, None)
} else PrecomputedDataLayer(name, boundingBox, Category.color, elementClass, mags)
} yield (layer, Vec3Double.fromVec3Int(voxelSize))

private def elementClassFromPrecomputedDataType(precomputedDataType: String): Fox[ElementClass.Value] =
precomputedDataType.toLowerCase match {
case "uint8" => Some(ElementClass.uint8)
case "uint16" => Some(ElementClass.uint16)
case "uint32" => Some(ElementClass.uint32)
case "uint64" => Some(ElementClass.uint64)
case "float32" => Some(ElementClass.float)
case _ => None
}

private def getMagFromScale(scale: PrecomputedScale,
minimalResolution: Array[Int],
remotePath: Path,
credentialId: Option[String]): Fox[MagLocator] = {
val normalizedResolution = (scale.resolution, minimalResolution).zipped.map((r, m) => r / m)
for {
mag <- Vec3Int.fromList(normalizedResolution.toList)
path = remotePath.resolve(scale.key)

// Neuroglancer precomputed specification does not specify axis order, but uses x,y,z implicitly.
// https://github.com/google/neuroglancer/blob/master/src/neuroglancer/datasource/precomputed/volume.md#unsharded-chunk-storage
axisOrder = AxisOrder(0, 1, 2)
frcroth marked this conversation as resolved.
Show resolved Hide resolved
} yield MagLocator(mag, Some(path.toUri.toString), None, Some(axisOrder), channelIndex = None, credentialId)
}
}
4 changes: 2 additions & 2 deletions conf/application.conf
Original file line number Diff line number Diff line change
Expand Up @@ -293,5 +293,5 @@ pidfile.path = "/dev/null"


# uncomment these lines for faster restart during local backend development (but beware the then-missing features):
#slick.checkSchemaOnStartup = false
#play.modules.disabled += "play.modules.swagger.SwaggerModule"
slick.checkSchemaOnStartup = false
play.modules.disabled += "play.modules.swagger.SwaggerModule"
10 changes: 5 additions & 5 deletions docs/datasets.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,14 @@ In particular, the following file formats are supported for uploading (and conve

Once the data is uploaded (and potentially converted), you can further configure a dataset's [Settings](#configuring-datasets) and double-check layer properties, finetune access rights & permissions, or set default values for rendering.

### Working with Zarr and N5 datasets
WEBKNOSSOS supports loading and remotely streaming [Zarr](https://zarr.dev) and [N5](https://github.com/saalfeldlab/n5) datasets from a remote source, e.g. Cloud storage (S3) or HTTP server.
### Working with Zarr, Neuroglancer Precomputed and N5 datasets
WEBKNOSSOS supports loading and remotely streaming [Zarr](https://zarr.dev), [Neuroglancer precomputed format](https://github.com/google/neuroglancer/tree/master/src/neuroglancer/datasource/precomputed) and [N5](https://github.com/saalfeldlab/n5) datasets from a remote source, e.g. Cloud storage (S3) or HTTP server.
WEBKNOSSOS supports loading Zarr datasets according to the [OME NGFF v0.4 spec](https://ngff.openmicroscopy.org/latest/).

WEBKNOSSOS can load several Zarr sources and assemble them into a WEBKNOSSOS dataset with several layers, e.g. one Zarr file/source for the `color` layer and one Zarr file/source for a `segmentation` layer.
WEBKNOSSOS can load several remote sources and assemble them into a WEBKNOSSOS dataset with several layers, e.g. one Zarr file/source for the `color` layer and one Zarr file/source for a `segmentation` layer.

1. From the *Datasets* tab in the user dashboard, click the *Add Dataset* button.
2. Select the *Add Remote Zarr Dataset*
2. Select the *Add Remote Dataset* tab
3. For each layer, provide some metadata information:
- a URL or domain/collection identifier to locate the dataset on the remote service (supported protocols are HTTPS, Amazon S3 and Google Cloud Storage).
- authentication credentials for accessing the resources on the remote service (optional)
Expand All @@ -67,7 +67,7 @@ Note that data streaming may count against any usage limits or minutes as define
Hint: If you happen to have any Zarr dataset locally that you would like to view in WEBKNOSSOS, consider running an HTTP server locally to serve the dataset. Then WEBKNOSSOS can easily stream the data.
Alternatively, convert the dataset to wkw using [webknossos-libs](https://github.com/scalableminds/webknossos-libs/).

### Working with Neuroglancer and BossDB datasets
### Working with Neuroglancer and BossDB datasets on webknossos.org
webknossos.org supports loading and remotely streaming datasets in the [Neuroglancer precomputed format](https://github.com/google/neuroglancer/tree/master/src/neuroglancer/datasource/precomputed) stored in the Google Cloud or datasets served from [BossDB](https://bossdb.org).

To import these datasets:
Expand Down
2 changes: 1 addition & 1 deletion frontend/javascripts/admin/dataset/dataset_add_view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ function DatasetAddView({ history }: RouteComponentProps) {
tab={
<span>
<DatabaseOutlined />
Add Remote Zarr / N5 Dataset
Add Remote Dataset
</span>
}
key="2"
Expand Down
10 changes: 5 additions & 5 deletions frontend/javascripts/admin/dataset/dataset_add_zarr_view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ function DatasetAddZarrView(props: Props) {
return (
// Using Forms here only to validate fields and for easy layout
<div style={{ padding: 5 }}>
<CardContainer title="Add Remote Zarr / N5 Dataset">
<CardContainer title="Add Remote Zarr / Neuroglancer Precomputed / N5 Dataset">
<Form form={form} layout="vertical">
<Modal
title="Add Layer"
Expand Down Expand Up @@ -404,10 +404,10 @@ function AddZarrLayer({

return (
<>
Please enter a URL that points to the Zarr or N5 data you would like to import. If necessary,
specify the credentials for the dataset. For datasets with multiple layers, e.g. raw
microscopy and segmentation data, please add them separately with the ”Add Layer” button
below. Once you have approved of the resulting datasource you can import it.
Please enter a URL that points to the Zarr, Neuroglancer Precomputed or N5 data you would like
to import. If necessary, specify the credentials for the dataset. For datasets with multiple
layers, e.g. raw microscopy and segmentation data, please add them separately with the ”Add
Layer” button below. Once you have approved of the resulting datasource you can import it.
<FormItem
style={{ marginTop: 16, marginBottom: 16 }}
name="url"
Expand Down
53 changes: 53 additions & 0 deletions frontend/javascripts/types/schemas/datasource.schema.ts
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,56 @@ export default {
},
required: ["dataFormat", "mags"],
},
"types::DataLayerPrecomputedPartial": {
title: "DataLayerPrecomputed",
type: "object",
properties: {
dataFormat: {
const: "neuroglancerPrecomputed",
},
boundingBox: {
$ref: "#/definitions/types::BoundingBox",
},
numChannels: {
type: "number",
},
mags: {
type: "array",
items: {
type: "object",
properties: {
mag: {
anyOf: [
{
type: "number",
},
{
$ref: "#/definitions/types::Vector3",
},
],
},
path: {
type: "string",
},
credentials: {
type: "object",
properties: {
user: { type: "string" },
password: { type: "string" },
},
required: ["user", "password"],
},
axisOrder: {
type: "object",
additionalProperties: { type: "number" },
},
},
required: ["mag"],
},
},
},
required: ["dataFormat", "mags"],
},
"types::DataLayer": {
title: "DataLayer",
allOf: [
Expand Down Expand Up @@ -246,6 +296,9 @@ export default {
{
$ref: "#/definitions/types::DataLayerN5Partial",
},
{
$ref: "#/definitions/types::DataLayerPrecomputedPartial",
},
],
},
],
Expand Down
5 changes: 4 additions & 1 deletion frontend/javascripts/types/schemas/datasource.types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ type DataLayerZarrPartial = BaseRemoteLayer & {
type DataLayerN5Partial = BaseRemoteLayer & {
dataFormat: "n5";
};
type DataLayerPrecomputedPartial = BaseRemoteLayer & {
dataFormat: "neuroglancerPrecomputed";
};
export type DataLayer = {
name: string;
category: "color" | "segmentation";
Expand All @@ -59,7 +62,7 @@ export type DataLayer = {
mappings: Array<string>;
}
) &
(DataLayerWKWPartial | DataLayerZarrPartial | DataLayerN5Partial);
(DataLayerWKWPartial | DataLayerZarrPartial | DataLayerN5Partial | DataLayerPrecomputedPartial);
export type DatasourceConfiguration = {
id: {
name: string;
Expand Down
5 changes: 3 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -80,8 +80,9 @@
"build": "node --max-old-space-size=4096 node_modules/.bin/webpack --env production",
"build-dev": "node_modules/.bin/webpack",
"build-watch": "node_modules/.bin/webpack -w",
"listening": "lsof -i:7155,9000,9001,9002",
"kill-listeners": "kill $(lsof -t -i:7155,9000,9001,9002)",
"listening": "lsof -i:5005,7155,9000,9001,9002",
"kill-listeners": "kill -9 $(lsof -t -i:5005,7155,9000,9001,9002)",
"rm-fossil-lock": "rm fossildb/data/LOCK",
"test": "tools/test.sh test --timeout=30s",
"test-changed": "tools/test.sh test-changed --timeout=30s",
"test-verbose": "xvfb-run -s '-ac -screen 0 1280x1024x24' tools/test.sh test --timeout=60s --verbose",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -118,5 +118,11 @@ object BoundingBox {
None
}

def fromSizeArray(size: Array[Int]): Option[BoundingBox] =
size.length match {
case 3 => Some(BoundingBox(Vec3Int(0, 0, 0), size(0), size(1), size(2)))
case _ => None
}

implicit val jsonFormat: OFormat[BoundingBox] = Json.format[BoundingBox]
}
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,9 @@ object Vec3Double {
case _: NumberFormatException => None
}

def fromVec3Int(v: Vec3Int) =
Vec3Double(v.x, v.y, v.z)

implicit object Vector3DReads extends Format[Vec3Double] {
def reads(json: JsValue): JsResult[Vec3Double] = json match {
case JsArray(ts) if ts.size == 3 =>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,12 @@ object Vec3Int {
else
None

def fromArray(a: Array[Int]): Option[Vec3Int] =
if (a.length >= 3)
Some(Vec3Int(a(0), a(1), a(2)))
else
None

def full(i: Int): Vec3Int = Vec3Int(i, i, i)

def zeros: Vec3Int = Vec3Int(0, 0, 0)
Expand Down
6 changes: 3 additions & 3 deletions util/src/main/scala/com/scalableminds/util/tools/Fox.scala
Original file line number Diff line number Diff line change
Expand Up @@ -207,15 +207,15 @@ object Fox extends FoxImplicits {
} yield ()

def chainFunctions[T](functions: List[T => Fox[T]])(implicit ec: ExecutionContext): T => Fox[T] = {
def runNext(remainingFunctions: List[T => Fox[T]], previousRestult: T): Fox[T] =
def runNext(remainingFunctions: List[T => Fox[T]], previousResult: T): Fox[T] =
frcroth marked this conversation as resolved.
Show resolved Hide resolved
remainingFunctions match {
case head :: tail =>
for {
currentResult <- head(previousRestult)
currentResult <- head(previousResult)
nextResult <- runNext(tail, currentResult)
} yield nextResult
case Nil =>
Fox.successful(previousRestult)
Fox.successful(previousResult)
}
t =>
runNext(functions, t)
Expand Down
Loading