Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add nd support for tiff export job #7971

Merged
merged 16 commits into from
Sep 19, 2024
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released

### Added
- If the opacity of a volume layer is zero, a warning is now shown in the layer settings tab. [#8003](https://github.com/scalableminds/webknossos/pull/8003)
- Added the option to export nd datasets as ome tiff or tiff stack. Previously, this was only possible for 3d datasets. [#7971](https://github.com/scalableminds/webknossos/pull/7971)


### Changed

Expand Down
29 changes: 24 additions & 5 deletions app/controllers/JobController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import play.silhouette.api.Silhouette
import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int}
import com.scalableminds.util.accesscontext.GlobalAccessContext
import com.scalableminds.util.tools.Fox
import models.dataset.{DataStoreDAO, DatasetDAO, DatasetService}
import models.dataset.{DataStoreDAO, DatasetDAO, DatasetLayerAdditionalAxesDAO, DatasetService}
import models.job._
import models.organization.OrganizationDAO
import models.user.MultiUserDAO
Expand All @@ -19,7 +19,9 @@ import java.util.Date
import javax.inject.Inject
import scala.concurrent.ExecutionContext
import com.scalableminds.util.enumeration.ExtendedEnumeration
import com.scalableminds.webknossos.datastore.models.{LengthUnit, VoxelSize}
import com.scalableminds.webknossos.datastore.dataformats.zarr.Zarr3OutputHelper
import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, FullAxisOrder, NDBoundingBox}
import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, LengthUnit, VoxelSize}
import models.team.PricingPlan

object MovieResolutionSetting extends ExtendedEnumeration {
Expand Down Expand Up @@ -54,13 +56,15 @@ class JobController @Inject()(
jobService: JobService,
workerService: WorkerService,
workerDAO: WorkerDAO,
datasetLayerAdditionalAxesDAO: DatasetLayerAdditionalAxesDAO,
wkconf: WkConf,
multiUserDAO: MultiUserDAO,
wkSilhouetteEnvironment: WkSilhouetteEnvironment,
slackNotificationService: SlackNotificationService,
organizationDAO: OrganizationDAO,
dataStoreDAO: DataStoreDAO)(implicit ec: ExecutionContext, playBodyParsers: PlayBodyParsers)
extends Controller {
extends Controller
with Zarr3OutputHelper {

def status: Action[AnyContent] = sil.SecuredAction.async { implicit request =>
for {
Expand Down Expand Up @@ -316,6 +320,7 @@ class JobController @Inject()(
def runExportTiffJob(organizationId: String,
datasetName: String,
bbox: String,
additionalCoordinates: Option[String],
layerName: Option[String],
mag: Option[String],
annotationLayerName: Option[String],
Expand All @@ -330,6 +335,20 @@ class JobController @Inject()(
_ <- Fox.runOptional(layerName)(datasetService.assertValidLayerNameLax)
_ <- Fox.runOptional(annotationLayerName)(datasetService.assertValidLayerNameLax)
_ <- jobService.assertBoundingBoxLimits(bbox, mag)
additionalAxesOpt <- Fox.runOptional(layerName)(layerName =>
datasetLayerAdditionalAxesDAO.findAllForDatasetAndDataLayerName(dataset._id, layerName))
additionalAxesOpt <- Fox.runOptional(additionalAxesOpt)(a => Fox.successful(reorderAdditionalAxes(a)))
rank = additionalAxesOpt.map(_.length).getOrElse(0) + 4
axisOrder = FullAxisOrder.fromAxisOrderAndAdditionalAxes(rank,
AxisOrder.cAdditionalxyz(rank),
additionalAxesOpt)
threeDBBox <- BoundingBox.fromLiteral(bbox).toFox ~> "job.invalidBoundingBox"
parsedAdditionalCoordinatesOpt <- Fox.runOptional(additionalCoordinates)(coords =>
Json.parse(coords).validate[Seq[AdditionalCoordinate]]) ~> "job.additionalCoordinates.invalid"
parsedAdditionalCoordinates = parsedAdditionalCoordinatesOpt.getOrElse(Seq.empty)
additionalAxesOfNdBBox = additionalAxesOpt.map(additionalAxes =>
additionalAxes.map(_.enclosingAdditionalCoordinates(parsedAdditionalCoordinates)))
ndBoundingBox = NDBoundingBox(threeDBBox, additionalAxesOfNdBBox.getOrElse(Seq.empty), axisOrder)
command = JobCommand.export_tiff
exportFileName = if (asOmeTiff)
s"${formatDateForFilename(new Date())}__${datasetName}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.ome.tif"
Expand All @@ -338,12 +357,12 @@ class JobController @Inject()(
commandArgs = Json.obj(
"organization_name" -> organizationId,
"dataset_name" -> datasetName,
"bbox" -> bbox,
"nd_bbox" -> ndBoundingBox.toWkLibsDict,
"export_file_name" -> exportFileName,
"layer_name" -> layerName,
"mag" -> mag,
"annotation_layer_name" -> annotationLayerName,
"annotation_id" -> annotationId
"annotation_id" -> annotationId,
)
job <- jobService.submitJob(command, commandArgs, request.identity, dataset._dataStore) ?~> "job.couldNotRunTiffExport"
js <- jobService.publicWrites(job)
Expand Down
1 change: 1 addition & 0 deletions conf/messages
Original file line number Diff line number Diff line change
Expand Up @@ -334,6 +334,7 @@ job.emailNotifactionsDisabled = Email notifications are not enabled for this job
job.renderAnimation.notAllowed.organization = "Rendering animations is only allowed for datasets of your own organization."
job.alignSections.notAllowed.organization = "Aligning sections is only allowed for datasets of your own organization."
job.alignSections.notAllowed.onlySuperUsers = "For now, aligning sections is only allowed for super users."
job.additionalCoordinates.invalid = "The passed additional coordinates are invalid."

voxelytics.disabled = Voxelytics workflow reporting and logging are not enabled for this WEBKNOSSOS instance.
voxelytics.runNotFound = Workflow runs not found
Expand Down
2 changes: 1 addition & 1 deletion conf/webknossos.latest.routes
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ GET /jobs/status
POST /jobs/run/convertToWkw/:organizationId/:datasetName controllers.JobController.runConvertToWkwJob(organizationId: String, datasetName: String, scale: String, unit: Option[String])
POST /jobs/run/computeMeshFile/:organizationId/:datasetName controllers.JobController.runComputeMeshFileJob(organizationId: String, datasetName: String, layerName: String, mag: String, agglomerateView: Option[String])
POST /jobs/run/computeSegmentIndexFile/:organizationId/:datasetName controllers.JobController.runComputeSegmentIndexFileJob(organizationId: String, datasetName: String, layerName: String)
POST /jobs/run/exportTiff/:organizationId/:datasetName controllers.JobController.runExportTiffJob(organizationId: String, datasetName: String, bbox: String, layerName: Option[String], mag: Option[String], annotationLayerName: Option[String], annotationId: Option[String], asOmeTiff: Boolean)
POST /jobs/run/exportTiff/:organizationId/:datasetName controllers.JobController.runExportTiffJob(organizationId: String, datasetName: String, bbox: String, additionalCoordinates: Option[String], layerName: Option[String], mag: Option[String], annotationLayerName: Option[String], annotationId: Option[String], asOmeTiff: Boolean)
POST /jobs/run/inferNuclei/:organizationId/:datasetName controllers.JobController.runInferNucleiJob(organizationId: String, datasetName: String, layerName: String, newDatasetName: String)
POST /jobs/run/inferNeurons/:organizationId/:datasetName controllers.JobController.runInferNeuronsJob(organizationId: String, datasetName: String, layerName: String, bbox: String, newDatasetName: String)
POST /jobs/run/inferMitochondria/:organizationId/:datasetName controllers.JobController.runInferMitochondriaJob(organizationId: String, datasetName: String, layerName: String, bbox: String, newDatasetName: String)
Expand Down
5 changes: 5 additions & 0 deletions frontend/javascripts/admin/api/jobs.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import type {
APIEffectiveJobState,
AiModel,
RenderAnimationOptions,
AdditionalCoordinate,
} from "types/api_flow_types";
import { assertResponseLimit } from "./api_utils";

Expand Down Expand Up @@ -100,6 +101,7 @@ export async function startExportTiffJob(
datasetName: string,
organizationId: string,
bbox: Vector6,
additionalCoordinates: AdditionalCoordinate[] | null,
layerName: string | null | undefined,
mag: string | null | undefined,
annotationId: string | null | undefined,
Expand All @@ -119,6 +121,9 @@ export async function startExportTiffJob(
if (annotationLayerName != null) {
params.append("annotationLayerName", annotationLayerName);
}
if (additionalCoordinates != null) {
params.append("additionalCoordinates", JSON.stringify(additionalCoordinates));
}
return Request.receiveJSON(
`/api/jobs/run/exportTiff/${organizationId}/${datasetName}?${params}`,
{
Expand Down
43 changes: 26 additions & 17 deletions frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ import { formatCountToDataAmountUnit, formatScale } from "libs/format_utils";
import type { BoundingBoxType, Vector3 } from "oxalis/constants";
import { useStartAndPollJob } from "admin/job/job_hooks";
import { LayerSelection } from "components/layer_selection";
import { getAdditionalCoordinatesAsString } from "oxalis/model/accessors/flycam_accessor";
const { Paragraph, Text } = Typography;

type TabKeys = "download" | "export" | "python";
Expand Down Expand Up @@ -281,6 +282,9 @@ function _DownloadModalView({
const rawUserBoundingBoxes = useSelector((state: OxalisState) =>
getUserBoundingBoxesFromState(state),
);
const currentAdditionalCoordinates = useSelector(
(state: OxalisState) => state.flycam.additionalCoordinates,
);
const typeName = isAnnotation ? "annotation" : "dataset";
const isMergerModeEnabled = useSelector(
(state: OxalisState) => state.temporaryConfiguration.isMergerModeEnabled,
Expand Down Expand Up @@ -357,16 +361,13 @@ function _DownloadModalView({
);
onClose();
} else if (activeTabKey === "export" && startJob != null) {
if ((selectedLayerInfos.additionalAxes || []).length > 0) {
Toast.warning("Exporting an n-dimensional layer is currently not supported.");
return;
}
await Model.ensureSavedState();
await startJob(async () => {
const job = await startExportTiffJob(
dataset.name,
dataset.owningOrganization,
computeArrayFromBoundingBox(selectedBoundingBox.boundingBox),
currentAdditionalCoordinates,
selectedLayerInfos.layerName,
mag.join("-"),
selectedLayerInfos.annotationId,
Expand Down Expand Up @@ -396,18 +397,6 @@ function _DownloadModalView({
</Text>
</Row>
) : null;
const ndVolumeWarning = isVolumeNDimensional ? (
<Row key="unsupported-nd">
<Text
style={{
margin: "0 6px 12px",
}}
type="warning"
>
Downloading/exporting n-dimensional volume data is not yet supported.
</Text>
</Row>
) : null;
const pythonTokenWarning =
activeTabKey === "python" ? (
<Row key="python-token-warning">
Expand All @@ -424,7 +413,7 @@ function _DownloadModalView({
</Row>
) : null;

return [volumeFallbackWarning, ndVolumeWarning, pythonTokenWarning];
return [volumeFallbackWarning, pythonTokenWarning];
};

const handleTabChange = (key: string) => {
Expand Down Expand Up @@ -652,6 +641,26 @@ function _DownloadModalView({
style={{ width: "100%" }}
/>
{boundingBoxCompatibilityAlerts}
{selectedLayerInfos.additionalAxes != null && (
<Row>
<Divider
style={{
margin: "18px 0",
}}
>
Additional Coordinates
</Divider>
<Text
style={{
margin: "0 6px 12px",
}}
>
Your dataset has more than three dimensions. The export will only include the
selected bounding box at the current additional dimensions:{" "}
{getAdditionalCoordinatesAsString(currentAdditionalCoordinates)}
</Text>
</Row>
)}

<Divider
style={{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package com.scalableminds.util.geometry

import com.scalableminds.util.tools.Math.ceilDiv
import net.liftweb.common.Full
import play.api.libs.json.{JsObject, Json}

case class BoundingBox(topLeft: Vec3Int, width: Int, height: Int, depth: Int) {

Expand Down Expand Up @@ -70,6 +71,9 @@ case class BoundingBox(topLeft: Vec3Int, width: Int, height: Int, depth: Int) {
Vec3Int(width, height, depth)

def toLiteral: String = f"${topLeft.x},${topLeft.y},${topLeft.z},$width,$height,$depth"

def toWkLibsDict: JsObject =
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
def toWkLibsDict: JsObject =
def toWkLibsJson: JsObject =

Json.obj("topLeft" -> topLeft, "width" -> width, "height" -> height, "depth" -> depth)
}

object BoundingBox {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
package com.scalableminds.webknossos.datastore.datareaders

import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis
import play.api.libs.json.{Json, OFormat}
import play.api.libs.json.{JsValue, Json, OFormat}

// Defines the axis order of a DatasetArray. Note that this ignores transpose codecs/ArrayOrder.F/C.
// Those will have to be applied on individual chunk’s contents.
Expand Down Expand Up @@ -97,6 +97,12 @@ case class FullAxisOrder(axes: Seq[Axis]) {
def permuteIndicesArrayToWk(indices: Array[Int]): Array[Int] =
arrayToWkPermutation.map(indices(_))

def toWkLibsDictObject: JsValue =
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
def toWkLibsDictObject: JsValue =
def toWkLibsDictJson: JsValue =

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I also went for toWkLibsJson here as below to have the naming more consistent. Feel free to argue

Json.toJson(axes.zipWithIndex.collect {
case (axis, index) if axis.name == "x" || axis.name == "y" || axis.name == "z" =>
axis.name -> index
}.toMap)

}

object FullAxisOrder {
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package com.scalableminds.webknossos.datastore.datareaders

import com.scalableminds.util.geometry.BoundingBox
import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis
import play.api.libs.json.{JsValue, Json}

case class NDBoundingBox(boundingBox: BoundingBox, additionalAxes: Seq[AdditionalAxis], fullAxisOrder: FullAxisOrder) {

def toWkLibsDict: JsValue = {
val additionalAxesDict = Json.toJson(additionalAxes)
val axisOrderDict = fullAxisOrder.toWkLibsDictObject
boundingBox.toWkLibsDict ++ Json.obj("additionalAxes" -> additionalAxesDict, "axisOrder" -> axisOrderDict)
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -62,15 +62,19 @@ object ZarrHeader extends JsonImplicits {
// data request method always decompresses before sending
val compressor = None

val shape = Array(
channels,
val additionalAxesShapeEntries =
dataLayer.additionalAxes.map(axes => axes.map(_.bounds(1)).toArray).getOrElse(Array.empty)
val additionalAxesChunksEntries =
dataLayer.additionalAxes.map(axes => axes.map(_ => 1).toArray).getOrElse(Array.empty)

val shape = Array(channels) ++ additionalAxesShapeEntries ++ Array(
// Zarr can't handle data sets that don't start at 0, so we extend the shape to include "true" coords
(dataLayer.boundingBox.width + dataLayer.boundingBox.topLeft.x) / mag.x,
(dataLayer.boundingBox.height + dataLayer.boundingBox.topLeft.y) / mag.y,
(dataLayer.boundingBox.depth + dataLayer.boundingBox.topLeft.z) / mag.z
)

val chunks = Array(channels, cubeLength, cubeLength, cubeLength)
val chunks = Array(channels) ++ additionalAxesChunksEntries ++ Array(cubeLength, cubeLength, cubeLength)

ZarrHeader(zarr_format = 2,
shape = shape,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,17 @@ case class AdditionalAxis(name: String, bounds: Array[Int], index: Int) {
lazy val lowerBound: Int = bounds(0)
lazy val upperBound: Int = bounds(1)
lazy val highestValue: Int = upperBound - 1

def enclosingAdditionalCoordinates(additionalCoordinates: Seq[AdditionalCoordinate]): AdditionalAxis = {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could you add a comment what this is for? I didn’t quite understand it on first read. Also, if it returns an Axis, why is it called enclosingAdditionalCoordinates? 🤔

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I renamed the method and added comments. Is it clearer now?

val matchingCoordinate = additionalCoordinates.find(ac => ac.name == name)
matchingCoordinate match {
case Some(ac) =>
AdditionalAxis(name, Array(ac.value, ac.value + 1), index)
case None =>
// Use the lower bound as fallback
AdditionalAxis(name, Array(lowerBound, lowerBound + 1), index)
}
}
}

object AdditionalAxis {
Expand Down Expand Up @@ -112,5 +123,4 @@ object AdditionalAxis {
}
case None => Seq.empty
}

}