From 429b18bc8a3ced5e90cf09a9e319e761e77e7356 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Fri, 31 Jul 2020 16:50:37 +0200 Subject: [PATCH 001/121] keep fixed zoom during td rotation to planes --- .../oxalis/controller/camera_controller.js | 64 ++++++++----------- 1 file changed, 28 insertions(+), 36 deletions(-) diff --git a/frontend/javascripts/oxalis/controller/camera_controller.js b/frontend/javascripts/oxalis/controller/camera_controller.js index d515b8294f9..24a6ceeb71f 100644 --- a/frontend/javascripts/oxalis/controller/camera_controller.js +++ b/frontend/javascripts/oxalis/controller/camera_controller.js @@ -183,8 +183,9 @@ type TweenState = { export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { const state = Store.getState(); const { dataset } = state; + const { tdCamera } = state.viewModeData.plane; const b = voxelToNm(dataset.dataSource.scale, getBoundaries(dataset).upperBoundary); - const pos = voxelToNm(dataset.dataSource.scale, getPosition(state.flycam)); + const flycamPos = voxelToNm(dataset.dataSource.scale, getPosition(state.flycam)); const aspectRatio = getInputCatcherAspectRatio(state, OrthoViews.TDView); // This distance ensures that the 3D camera is so far "in the back" that all elements in the scene @@ -201,8 +202,8 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { const b1 = -b[1]; const x1 = 0; const y1 = b[1]; - const x2 = pos[0]; - const y2 = pos[1]; + const x2 = flycamPos[0]; + const y2 = flycamPos[1]; const b2 = 1 / Math.sqrt((b1 * b1) / a1 / a1 + 1); const a2 = (-b2 * b1) / a1; @@ -212,7 +213,7 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { const distance = Dimensions.distance([x1, y1], intersect); // Approximation to center the view vertically - const yOffset = pos[2] - b[2] / 2; + const yOffset = flycamPos[2] - b[2] / 2; // Calulate the x coordinate so that the vector from the camera to the cube's middle point is // perpendicular to the vector going from (0, b[1], 0) to (b[0], 0, 0). @@ -240,18 +241,8 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { b: squareCenterY - height / 2, }; } else { - const ind = Dimensions.getIndices(id); - const width = Math.max(b[ind[0]], b[ind[1]] * 1.12) * 1.1; - const height = width / aspectRatio; - - const paddingTop = width * 0.12; - const padding = ((width / 1.1) * 0.1) / 2; - const offsetX = pos[ind[0]] + padding + (width - b[ind[0]]) / 2; - const offsetY = pos[ind[1]] + paddingTop + padding; - - const l = -offsetX; - const t = offsetY; - + const width = tdCamera.right - tdCamera.left; + const height = tdCamera.top - tdCamera.bottom; const positionOffset: OrthoViewMap = { [OrthoViews.PLANE_XY]: [0, 0, -clippingOffsetFactor], [OrthoViews.PLANE_YZ]: [clippingOffsetFactor, 0, 0], @@ -270,46 +261,47 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { upX: upVector[id][0], upY: upVector[id][1], upZ: upVector[id][2], - l, - t, - r: l + width, - b: t - height, + l: -width / 2, + r: width / 2, + t: height / 2, + b: -height / 2, }; } const updateCameraTDView = (tweenState: TweenState) => { - const p = voxelToNm( + const currentFlycamPos = voxelToNm( Store.getState().dataset.dataSource.scale, getPosition(Store.getState().flycam), ); - Store.dispatch( setTDCameraAction({ - position: [tweenState.dx + p[0], tweenState.dy + p[1], tweenState.dz + p[2]], + position: [ + tweenState.dx + currentFlycamPos[0], + tweenState.dy + currentFlycamPos[1], + tweenState.dz + currentFlycamPos[2], + ], left: tweenState.l, right: tweenState.r, top: tweenState.t, bottom: tweenState.b, up: [tweenState.upX, tweenState.upY, tweenState.upZ], - lookAt: p, + lookAt: currentFlycamPos, }), ); }; if (animate) { - const camera = state.viewModeData.plane.tdCamera; - const from = { - upX: camera.up[0], - upY: camera.up[1], - upZ: camera.up[2], - dx: camera.position[0] - pos[0], - dy: camera.position[1] - pos[1], - dz: camera.position[2] - pos[2], - l: camera.left, - r: camera.right, - t: camera.top, - b: camera.bottom, + upX: tdCamera.up[0], + upY: tdCamera.up[1], + upZ: tdCamera.up[2], + dx: tdCamera.position[0] - flycamPos[0], + dy: tdCamera.position[1] - flycamPos[1], + dz: tdCamera.position[2] - flycamPos[2], + l: tdCamera.left, + r: tdCamera.right, + t: tdCamera.top, + b: tdCamera.bottom, }; const tween = new TWEEN.Tween(from); From 14eb2a109029329e1ceeaeec914d63060ce90a70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 3 Aug 2020 17:09:00 +0200 Subject: [PATCH 002/121] refactor 3d rotation method to keep the zoom and simplify it --- .../oxalis/controller/camera_controller.js | 177 ++++++++---------- .../model/accessors/dataset_accessor.js | 2 +- 2 files changed, 82 insertions(+), 97 deletions(-) diff --git a/frontend/javascripts/oxalis/controller/camera_controller.js b/frontend/javascripts/oxalis/controller/camera_controller.js index 24a6ceeb71f..e1d7897932e 100644 --- a/frontend/javascripts/oxalis/controller/camera_controller.js +++ b/frontend/javascripts/oxalis/controller/camera_controller.js @@ -16,7 +16,10 @@ import { OrthoViews, type Vector3, } from "oxalis/constants"; -import { getBoundaries } from "oxalis/model/accessors/dataset_accessor"; +import { + getDatasetExtentInLength, + getDatasetCenter, +} from "oxalis/model/accessors/dataset_accessor"; import { getInputCatcherAspectRatio } from "oxalis/model/accessors/view_mode_accessor"; import { getPlaneExtentInVoxelFromStore, @@ -25,7 +28,6 @@ import { import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; import { setTDCameraAction } from "oxalis/model/actions/view_mode_actions"; import { voxelToNm, getBaseVoxel } from "oxalis/model/scaleinfo"; -import Dimensions from "oxalis/model/dimensions"; import Store, { type CameraData } from "oxalis/store"; import api from "oxalis/api/internal_api"; @@ -168,81 +170,63 @@ class CameraController extends React.PureComponent { } type TweenState = { - upX: number, - upY: number, - upZ: number, - dx: number, - dy: number, - dz: number, - l: number, - r: number, - t: number, - b: number, + up: Vector3, + position: Vector3, + left: number, + right: number, + top: number, + bottom: number, }; export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { const state = Store.getState(); const { dataset } = state; const { tdCamera } = state.viewModeData.plane; - const b = voxelToNm(dataset.dataSource.scale, getBoundaries(dataset).upperBoundary); const flycamPos = voxelToNm(dataset.dataSource.scale, getPosition(state.flycam)); - - const aspectRatio = getInputCatcherAspectRatio(state, OrthoViews.TDView); + const datasetExtent = getDatasetExtentInLength(dataset); // This distance ensures that the 3D camera is so far "in the back" that all elements in the scene // are in front of it and thus visible. const clippingOffsetFactor = 900000; - - let to: TweenState; - if (id === OrthoViews.TDView) { - const diagonal = Math.sqrt(b[0] * b[0] + b[1] * b[1]); - const padding = 0.05 * diagonal; - - // Calculate the distance from (0, b[1]) in order to center the view - const a1 = b[0]; - const b1 = -b[1]; - const x1 = 0; - const y1 = b[1]; - const x2 = flycamPos[0]; - const y2 = flycamPos[1]; - - const b2 = 1 / Math.sqrt((b1 * b1) / a1 / a1 + 1); - const a2 = (-b2 * b1) / a1; - const d2 = ((a1 / b1) * (y1 - y2) - x1 + x2) / (-a2 + (a1 * b2) / b1); - - const intersect = [x2 + d2 * a2, y2 + d2 * b2]; - const distance = Dimensions.distance([x1, y1], intersect); - - // Approximation to center the view vertically - const yOffset = flycamPos[2] - b[2] / 2; - - // Calulate the x coordinate so that the vector from the camera to the cube's middle point is - // perpendicular to the vector going from (0, b[1], 0) to (b[0], 0, 0). - - const squareLeft = -distance - padding; - const squareRight = diagonal - distance + padding; - const squareTop = diagonal / 2 + padding + yOffset; - const squareBottom = -diagonal / 2 - padding + yOffset; - const squareCenterX = (squareLeft + squareRight) / 2; - const squareCenterY = (squareTop + squareBottom) / 2; - const squareWidth = Math.abs(squareLeft - squareRight); - - const height = squareWidth / aspectRatio; - - to = { - dx: (b[1] / diagonal) * clippingOffsetFactor, - dy: (b[0] / diagonal) * clippingOffsetFactor, - dz: (-1 / 2) * clippingOffsetFactor, - upX: 0, - upY: 0, - upZ: -1, - l: squareCenterX - squareWidth / 2, - r: squareCenterX + squareWidth / 2, - t: squareCenterY + height / 2, - b: squareCenterY - height / 2, - }; + // Factor to reduce the clipping offset of the z coordinate to get a better angle on the dataset. + const zReductionFactor = 0.7; + // Use width and height to keep the same zoom. + let width = tdCamera.right - tdCamera.left; + let height = tdCamera.top - tdCamera.bottom; + + let position: Vector3; + let up: Vector3; + // Way to calculate the position and rotation of the camera: + // First, the camera is either positioned at the current center of the flycam or in the dataset center. + // Second, the camera is the moved backwards by a clipping factor into the wanted direction. + // Together with matching lookUp (up) vectors and keeping the width and height, the position and rotation updates correctly. + if (id === OrthoViews.TDView && (height <= 0 || width <= 0)) { + // This should only be the case when initializing the 3D-viewport. + const aspectRatio = getInputCatcherAspectRatio(state, OrthoViews.TDView); + const datasetCenter = voxelToNm(dataset.dataSource.scale, getDatasetCenter(dataset)); + // The camera has not width and height which might be due to a bug or the camera has not been initialized. + // Thus we zoom out to show the whole dataset. + const paddingFactor = 1.1; + width = + Math.sqrt( + datasetExtent.width * datasetExtent.width + datasetExtent.height * datasetExtent.height, + ) * paddingFactor; + height = width / aspectRatio; + up = [0, 0, -1]; + // For very high datasets that have a very low or hight z starting coordinate, the planes might not be visible. + // Thus take the z coordinate of the flycam instead of the z coordinate of the center. + position = [ + datasetCenter[0] - clippingOffsetFactor, + datasetCenter[1] - clippingOffsetFactor, + flycamPos[2] - clippingOffsetFactor * zReductionFactor, + ]; + } else if (id === OrthoViews.TDView) { + position = [ + flycamPos[0] - clippingOffsetFactor, + flycamPos[1] - clippingOffsetFactor, + flycamPos[2] - clippingOffsetFactor * zReductionFactor, + ]; + up = [0, 0, -1]; } else { - const width = tdCamera.right - tdCamera.left; - const height = tdCamera.top - tdCamera.bottom; const positionOffset: OrthoViewMap = { [OrthoViews.PLANE_XY]: [0, 0, -clippingOffsetFactor], [OrthoViews.PLANE_YZ]: [clippingOffsetFactor, 0, 0], @@ -253,38 +237,39 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { [OrthoViews.PLANE_YZ]: [0, -1, 0], [OrthoViews.PLANE_XZ]: [0, 0, -1], }; - - to = { - dx: positionOffset[id][0], - dy: positionOffset[id][1], - dz: positionOffset[id][2], - upX: upVector[id][0], - upY: upVector[id][1], - upZ: upVector[id][2], - l: -width / 2, - r: width / 2, - t: height / 2, - b: -height / 2, - }; + up = upVector[id]; + position = [ + positionOffset[id][0] + flycamPos[0], + positionOffset[id][1] + flycamPos[1], + positionOffset[id][2] + flycamPos[2], + ]; } + // Ensure a valid width and height if camera ended up with no width and height due to a bug. + width = width > 0 ? width : datasetExtent.width; + height = height > 0 ? height : datasetExtent.height; + const to: TweenState = { + position, + up, + left: -width / 2, + right: width / 2, + top: height / 2, + bottom: -height / 2, + }; const updateCameraTDView = (tweenState: TweenState) => { const currentFlycamPos = voxelToNm( Store.getState().dataset.dataSource.scale, getPosition(Store.getState().flycam), ); + const { position: newPosition, up: upDirection, left, right, top, bottom } = tweenState; Store.dispatch( setTDCameraAction({ - position: [ - tweenState.dx + currentFlycamPos[0], - tweenState.dy + currentFlycamPos[1], - tweenState.dz + currentFlycamPos[2], - ], - left: tweenState.l, - right: tweenState.r, - top: tweenState.t, - bottom: tweenState.b, - up: [tweenState.upX, tweenState.upY, tweenState.upZ], + position: newPosition, + up: upDirection, + left, + right, + top, + bottom, lookAt: currentFlycamPos, }), ); @@ -295,13 +280,13 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { upX: tdCamera.up[0], upY: tdCamera.up[1], upZ: tdCamera.up[2], - dx: tdCamera.position[0] - flycamPos[0], - dy: tdCamera.position[1] - flycamPos[1], - dz: tdCamera.position[2] - flycamPos[2], - l: tdCamera.left, - r: tdCamera.right, - t: tdCamera.top, - b: tdCamera.bottom, + dx: tdCamera.position[0], + dy: tdCamera.position[1], + dz: tdCamera.position[2], + left: tdCamera.left, + right: tdCamera.right, + top: tdCamera.top, + bottom: tdCamera.bottom, }; const tween = new TWEEN.Tween(from); diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 1996f120af5..f80185bdb1b 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -192,7 +192,7 @@ export function getDatasetExtentInVoxel(dataset: APIDataset) { return extent; } -function getDatasetExtentInLength(dataset: APIDataset): BoundingBoxObject { +export function getDatasetExtentInLength(dataset: APIDataset): BoundingBoxObject { const extentInVoxel = getDatasetExtentInVoxel(dataset); const { scale } = dataset.dataSource; const topLeft = ((extentInVoxel.topLeft.map((val, index) => val * scale[index]): any): Vector3); From fc278a9dfc6a2f1b54b2115706ed21d196384a79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 3 Aug 2020 17:41:37 +0200 Subject: [PATCH 003/121] fix position and up vector not getting interpolated correctly --- .../oxalis/controller/camera_controller.js | 30 ++++++++++++------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/frontend/javascripts/oxalis/controller/camera_controller.js b/frontend/javascripts/oxalis/controller/camera_controller.js index e1d7897932e..84678552375 100644 --- a/frontend/javascripts/oxalis/controller/camera_controller.js +++ b/frontend/javascripts/oxalis/controller/camera_controller.js @@ -170,8 +170,12 @@ class CameraController extends React.PureComponent { } type TweenState = { - up: Vector3, - position: Vector3, + upX: number, + upY: number, + upZ: number, + xPos: number, + yPos: number, + zPos: number, left: number, right: number, top: number, @@ -248,8 +252,12 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { width = width > 0 ? width : datasetExtent.width; height = height > 0 ? height : datasetExtent.height; const to: TweenState = { - position, - up, + xPos: position[0], + yPos: position[1], + zPos: position[2], + upX: up[0], + upY: up[1], + upZ: up[2], left: -width / 2, right: width / 2, top: height / 2, @@ -261,11 +269,11 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { Store.getState().dataset.dataSource.scale, getPosition(Store.getState().flycam), ); - const { position: newPosition, up: upDirection, left, right, top, bottom } = tweenState; + const { xPos, yPos, zPos, upX, upY, upZ, left, right, top, bottom } = tweenState; Store.dispatch( setTDCameraAction({ - position: newPosition, - up: upDirection, + position: [xPos, yPos, zPos], + up: [upX, upY, upZ], left, right, top, @@ -276,13 +284,13 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { }; if (animate) { - const from = { + const from: TweenState = { upX: tdCamera.up[0], upY: tdCamera.up[1], upZ: tdCamera.up[2], - dx: tdCamera.position[0], - dy: tdCamera.position[1], - dz: tdCamera.position[2], + xPos: tdCamera.position[0], + yPos: tdCamera.position[1], + zPos: tdCamera.position[2], left: tdCamera.left, right: tdCamera.right, top: tdCamera.top, From 34192870247890aee130f9827219cdc1d973507f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 3 Aug 2020 17:56:46 +0200 Subject: [PATCH 004/121] add changelog entry --- CHANGELOG.unreleased.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 4bc58f67526..114364b705d 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -16,6 +16,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Changed - When d/f switching is turned off and a slice is copied with the shortcut `v`, the previous slice used as the source will always be slice - 1 and `shift + v` will always take slice + 1 as the slice to copy from. [#4728](https://github.com/scalableminds/webknossos/pull/4728) - Disabled the autofill feature of the brush when using this tool to erase data. [#4729](https://github.com/scalableminds/webknossos/pull/4729) +- The rotation buttons of the 3D-viewport no longer change the zoom. [#4750](https://github.com/scalableminds/webknossos/pull/4750) ### Fixed - Speed up NML import in existing tracings for NMLs with many trees (20,000+). [#4742](https://github.com/scalableminds/webknossos/pull/4742) From 88ef65b52ce0c7634b98be008592bd0f166bf4a6 Mon Sep 17 00:00:00 2001 From: MichaelBuessemeyer <39529669+MichaelBuessemeyer@users.noreply.github.com> Date: Thu, 6 Aug 2020 15:05:11 +0200 Subject: [PATCH 005/121] Apply suggestions from code review Co-authored-by: Daniel --- frontend/javascripts/oxalis/controller/camera_controller.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frontend/javascripts/oxalis/controller/camera_controller.js b/frontend/javascripts/oxalis/controller/camera_controller.js index 84678552375..9e14a50656b 100644 --- a/frontend/javascripts/oxalis/controller/camera_controller.js +++ b/frontend/javascripts/oxalis/controller/camera_controller.js @@ -201,13 +201,13 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { let up: Vector3; // Way to calculate the position and rotation of the camera: // First, the camera is either positioned at the current center of the flycam or in the dataset center. - // Second, the camera is the moved backwards by a clipping factor into the wanted direction. + // Second, the camera is moved backwards by a clipping offset into the wanted direction. // Together with matching lookUp (up) vectors and keeping the width and height, the position and rotation updates correctly. if (id === OrthoViews.TDView && (height <= 0 || width <= 0)) { // This should only be the case when initializing the 3D-viewport. const aspectRatio = getInputCatcherAspectRatio(state, OrthoViews.TDView); const datasetCenter = voxelToNm(dataset.dataSource.scale, getDatasetCenter(dataset)); - // The camera has not width and height which might be due to a bug or the camera has not been initialized. + // The camera has no width and height which might be due to a bug or the camera has not been initialized. // Thus we zoom out to show the whole dataset. const paddingFactor = 1.1; width = @@ -216,7 +216,7 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { ) * paddingFactor; height = width / aspectRatio; up = [0, 0, -1]; - // For very high datasets that have a very low or hight z starting coordinate, the planes might not be visible. + // For very tall datasets that have a very low or high z starting coordinate, the planes might not be visible. // Thus take the z coordinate of the flycam instead of the z coordinate of the center. position = [ datasetCenter[0] - clippingOffsetFactor, From 58bd2f9676eb54bd6ba8d12ecdf1b02922e92846 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Thu, 6 Aug 2020 15:06:05 +0200 Subject: [PATCH 006/121] remove zReductionFactor --- frontend/javascripts/oxalis/controller/camera_controller.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/frontend/javascripts/oxalis/controller/camera_controller.js b/frontend/javascripts/oxalis/controller/camera_controller.js index 84678552375..af8681f5f07 100644 --- a/frontend/javascripts/oxalis/controller/camera_controller.js +++ b/frontend/javascripts/oxalis/controller/camera_controller.js @@ -191,8 +191,6 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { // This distance ensures that the 3D camera is so far "in the back" that all elements in the scene // are in front of it and thus visible. const clippingOffsetFactor = 900000; - // Factor to reduce the clipping offset of the z coordinate to get a better angle on the dataset. - const zReductionFactor = 0.7; // Use width and height to keep the same zoom. let width = tdCamera.right - tdCamera.left; let height = tdCamera.top - tdCamera.bottom; @@ -221,13 +219,13 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { position = [ datasetCenter[0] - clippingOffsetFactor, datasetCenter[1] - clippingOffsetFactor, - flycamPos[2] - clippingOffsetFactor * zReductionFactor, + flycamPos[2] - clippingOffsetFactor, ]; } else if (id === OrthoViews.TDView) { position = [ flycamPos[0] - clippingOffsetFactor, flycamPos[1] - clippingOffsetFactor, - flycamPos[2] - clippingOffsetFactor * zReductionFactor, + flycamPos[2] - clippingOffsetFactor, ]; up = [0, 0, -1]; } else { From 59b490c8774d959cad2611ae4ba05c574f85bcb8 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 6 Aug 2020 15:37:40 +0200 Subject: [PATCH 007/121] backend: support anisotropic resolutions in volume bucket keys --- .../volume/VolumeTracingBucketHelper.scala | 57 +++++++++++++------ 1 file changed, 40 insertions(+), 17 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index e2f71a65219..024a979de68 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.scalableminds.util.geometry.Point3D import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.util.tools.ExtendedTypes._ import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, ElementClass} import com.scalableminds.webknossos.tracingstore.tracings.{ @@ -56,13 +57,19 @@ trait VolumeTracingBucketHelper implicit def volumeDataStore: FossilDBClient private def buildKeyPrefix(dataLayerName: String, resolution: Int): String = - s"$dataLayerName/${resolution}/" + s"$dataLayerName/$resolution/" private def buildBucketKey(dataLayerName: String, bucket: BucketPosition): String = { val mortonIndex = mortonEncode(bucket.x, bucket.y, bucket.z) - s"$dataLayerName/${bucket.resolution.maxDim}/$mortonIndex-[${bucket.x},${bucket.y},${bucket.z}]" + s"$dataLayerName/${formatResolution(bucket.resolution)}/$mortonIndex-[${bucket.x},${bucket.y},${bucket.z}]" } + private def formatResolution(resolution: Point3D): String = + if (resolution.x == resolution.y && resolution.x == resolution.z) + s"${resolution.maxDim}" + else + s"${resolution.x}-${resolution.y}-${resolution.z}" + def loadBucket(dataLayer: VolumeTracingLayer, bucket: BucketPosition, version: Option[Long] = None): Fox[Array[Byte]] = { @@ -111,15 +118,15 @@ class VersionedBucketIterator(prefix: String, with KeyValueStoreImplicits with VolumeBucketCompression with FoxImplicits { - val batchSize = 64 + private val batchSize = 64 - var currentStartKey = prefix - var currentBatchIterator: Iterator[VersionedKeyValuePair[Array[Byte]]] = fetchNext + private var currentStartKey = prefix + private var currentBatchIterator: Iterator[VersionedKeyValuePair[Array[Byte]]] = fetchNext - def fetchNext = + private def fetchNext = volumeDataStore.getMultipleKeys(currentStartKey, Some(prefix), version, Some(batchSize)).toIterator - def fetchNextAndSave = { + private def fetchNextAndSave = { currentBatchIterator = fetchNext if (currentBatchIterator.hasNext) currentBatchIterator.next //in pagination, skip first entry because it was already the last entry of the previous batch currentBatchIterator @@ -138,24 +145,40 @@ class VersionedBucketIterator(prefix: String, } private def parseBucketKey(key: String): Option[(String, BucketPosition)] = { - val keyRx = "([0-9a-z-]+)/(\\d+)/-?\\d+-\\[(\\d+),(\\d+),(\\d+)\\]".r + val keyRx = "([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/-?\\d+-\\[(\\d+),(\\d+),(\\d+)]".r key match { case keyRx(name, resolutionStr, xStr, yStr, zStr) => - val resolution = resolutionStr.toInt - val x = xStr.toInt - val y = yStr.toInt - val z = zStr.toInt - val bucket = new BucketPosition(x * resolution * DataLayer.bucketLength, - y * resolution * DataLayer.bucketLength, - z * resolution * DataLayer.bucketLength, - Point3D(resolution, resolution, resolution)) - Some((name, bucket)) + val resolutionOpt = parseResolution(resolutionStr) + resolutionOpt match { + case Some(resolution) => + val x = xStr.toInt + val y = yStr.toInt + val z = zStr.toInt + val bucket = new BucketPosition(x * resolution.x * DataLayer.bucketLength, + y * resolution.y * DataLayer.bucketLength, + z * resolution.z * DataLayer.bucketLength, + resolution) + Some((name, bucket)) + case _ => None + } + case _ => None } } + private def parseResolution(resolutionStr: String): Option[Point3D] = + resolutionStr.toIntOpt match { + case Some(resolutionInt) => Some(Point3D(resolutionInt, resolutionInt, resolutionInt)) + case None => + val pattern = """(\d+)-(\d+)-(\d+)""".r + resolutionStr match { + case pattern(x, y, z) => Some(Point3D(x.toInt, y.toInt, z.toInt)) + case _ => None + } + } + } class BucketIterator(prefix: String, From 0298bf453b53504005e9aa8da766fe78881d60e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Fri, 7 Aug 2020 14:53:24 +0200 Subject: [PATCH 008/121] fix 3d rotation preset --- .../javascripts/oxalis/controller/camera_controller.js | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/frontend/javascripts/oxalis/controller/camera_controller.js b/frontend/javascripts/oxalis/controller/camera_controller.js index 415a606c37b..d880f2edff0 100644 --- a/frontend/javascripts/oxalis/controller/camera_controller.js +++ b/frontend/javascripts/oxalis/controller/camera_controller.js @@ -216,15 +216,17 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { up = [0, 0, -1]; // For very tall datasets that have a very low or high z starting coordinate, the planes might not be visible. // Thus take the z coordinate of the flycam instead of the z coordinate of the center. + // The clippingOffsetFactor is added in x and y direction to get a view on the dataset the 3D view that is close to the plane views. + // Thus the rotation between the 3D view to the eg. XY plane views is much shorter and the interpolated rotation does not look weird. position = [ - datasetCenter[0] - clippingOffsetFactor, - datasetCenter[1] - clippingOffsetFactor, + datasetCenter[0] + clippingOffsetFactor, + datasetCenter[1] + clippingOffsetFactor, flycamPos[2] - clippingOffsetFactor, ]; } else if (id === OrthoViews.TDView) { position = [ - flycamPos[0] - clippingOffsetFactor, - flycamPos[1] - clippingOffsetFactor, + flycamPos[0] + clippingOffsetFactor, + flycamPos[1] + clippingOffsetFactor, flycamPos[2] - clippingOffsetFactor, ]; up = [0, 0, -1]; From 7801896da0423bb7c3f9029623b6bc92656dcc6f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Fri, 7 Aug 2020 15:06:54 +0200 Subject: [PATCH 009/121] remove useless check for valid values --- frontend/javascripts/oxalis/controller/camera_controller.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/frontend/javascripts/oxalis/controller/camera_controller.js b/frontend/javascripts/oxalis/controller/camera_controller.js index d880f2edff0..5be57a86ee0 100644 --- a/frontend/javascripts/oxalis/controller/camera_controller.js +++ b/frontend/javascripts/oxalis/controller/camera_controller.js @@ -248,9 +248,8 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { positionOffset[id][2] + flycamPos[2], ]; } - // Ensure a valid width and height if camera ended up with no width and height due to a bug. - width = width > 0 ? width : datasetExtent.width; - height = height > 0 ? height : datasetExtent.height; + width = datasetExtent.width; + height = datasetExtent.height; const to: TweenState = { xPos: position[0], yPos: position[1], From 50ae8b9bb08804caf9d9d55d332aacb08f7e1f79 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 10 Aug 2020 14:07:39 +0200 Subject: [PATCH 010/121] remove reassignment of width and height --- frontend/javascripts/oxalis/controller/camera_controller.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/frontend/javascripts/oxalis/controller/camera_controller.js b/frontend/javascripts/oxalis/controller/camera_controller.js index 5be57a86ee0..1fa3a341abf 100644 --- a/frontend/javascripts/oxalis/controller/camera_controller.js +++ b/frontend/javascripts/oxalis/controller/camera_controller.js @@ -248,8 +248,6 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { positionOffset[id][2] + flycamPos[2], ]; } - width = datasetExtent.width; - height = datasetExtent.height; const to: TweenState = { xPos: position[0], yPos: position[1], From a118495de7bd121cdbfaaefb65dc246f092cd1ad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 11 Aug 2020 12:07:35 +0200 Subject: [PATCH 011/121] do volume annotation in all resolutions --- frontend/javascripts/messages.js | 1 + frontend/javascripts/oxalis/api/api_latest.js | 4 +-- .../model/bucket_data_handling/data_cube.js | 36 ++++++++++++------- .../oxalis/model/sagas/volumetracing_saga.js | 8 ++--- .../oxalis/model/volumetracing/volumelayer.js | 9 ++++- .../oxalis/model_initialization.js | 8 +++-- 6 files changed, 45 insertions(+), 21 deletions(-) diff --git a/frontend/javascripts/messages.js b/frontend/javascripts/messages.js index 09c8fb398dc..e940e5777cc 100644 --- a/frontend/javascripts/messages.js +++ b/frontend/javascripts/messages.js @@ -165,6 +165,7 @@ instead. Only enable this option if you understand its effect. All layers will n "dataset.upload_success": "The dataset was uploaded successfully.", "dataset.add_success": "The dataset was added successfully.", "dataset.add_error": "Could not reach the datastore.", + "dataset.segmentationlayer_not_existing": "This tracing has no segmentation layer.", "dataset.invalid_datasource_json": "The datasource-properties.json on disk is invalid. The values below are guessed by webKnossos. Please review all properties before importing the dataset. You can always go back and change the values later.", "dataset.missing_datasource_json": diff --git a/frontend/javascripts/oxalis/api/api_latest.js b/frontend/javascripts/oxalis/api/api_latest.js index 9c1e76e23ad..e0df6e8e690 100644 --- a/frontend/javascripts/oxalis/api/api_latest.js +++ b/frontend/javascripts/oxalis/api/api_latest.js @@ -1032,13 +1032,13 @@ class DataApi { * @example // Set the segmentation id for some voxels to 1337 * api.data.labelVoxels([[1,1,1], [1,2,1], [2,1,1], [2,2,1]], 1337); */ - labelVoxels(voxels: Array, label: number): void { + labelVoxels(voxels: Array, label: number, zoomStep: number = 0): void { assertVolume(Store.getState().tracing); const segmentationLayer = this.model.getSegmentationLayer(); assertExists(segmentationLayer, "Segmentation layer not found!"); for (const voxel of voxels) { - segmentationLayer.cube.labelVoxel(voxel, label); + segmentationLayer.cube.labelVoxelInResolution(voxel, label, zoomStep); } segmentationLayer.cube.pushQueue.push(); diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 4df1110cf73..cb5d9cd0cba 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -316,7 +316,7 @@ class DataCube { if ( Math.sqrt((x - 100) * (x - 100) + (y - 100) * (y - 100) + (z - 100) * (z - 100)) <= 20 ) { - this.labelVoxel([x, y, z], 5); + this.labelVoxelInResolution([x, y, z], 5, 0); } } } @@ -325,30 +325,42 @@ class DataCube { this.trigger("volumeLabeled"); } - labelVoxels(iterator: VoxelIterator, label: number, activeCellId?: ?number = null): void { - while (iterator.hasNext) { - const voxel = iterator.getNext(); - this.labelVoxel(voxel, label, activeCellId); + labelVoxelsInAllResolutions( + iterator: VoxelIterator, + label: number, + activeCellId?: ?number = null, + ): void { + const numberOfResolutions = getResolutions(Store.getState().dataset).length; + for (let zoomStep = 0; zoomStep < numberOfResolutions; ++zoomStep) { + while (iterator.hasNext) { + const voxel = iterator.getNext(); + this.labelVoxelInResolution(voxel, label, zoomStep, activeCellId); + } + iterator.reset(); } - this.pushQueue.push(); this.trigger("volumeLabeled"); } - labelVoxel(voxel: Vector3, label: number, activeCellId: ?number): void { + labelVoxelInResolution( + voxel: Vector3, + label: number, + zoomStep: number, + activeCellId: ?number, + ): void { let voxelInCube = true; for (let i = 0; i <= 2; i++) { voxelInCube = voxelInCube && voxel[i] >= 0 && voxel[i] < this.upperBoundary[i]; } if (voxelInCube) { - const address = this.positionToBaseAddress(voxel); + const address = this.positionToZoomedAddress(voxel, zoomStep); const bucket = this.getOrCreateBucket(address); if (bucket instanceof DataBucket) { - const voxelIndex = this.getVoxelIndex(voxel); + const voxelIndex = this.getVoxelIndex(voxel, zoomStep); let shouldUpdateVoxel = true; if (activeCellId != null) { - const voxelValue = this.getMappedDataValue(voxel); + const voxelValue = this.getMappedDataValue(voxel, zoomStep); shouldUpdateVoxel = activeCellId === voxelValue; } @@ -411,12 +423,12 @@ class DataCube { return this.getVoxelIndexByVoxelOffset(voxelOffset); } - positionToZoomedAddress(position: Vector3, resolutionIndex: number = 0): Vector4 { + positionToZoomedAddress(position: Vector3, zoomStep: number = 0): Vector4 { // return the bucket a given voxel lies in return globalPositionToBucketPosition( position, getResolutions(Store.getState().dataset), - resolutionIndex, + zoomStep, ); } diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index dffd7b364ec..c6747ee4cc0 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -166,16 +166,16 @@ function* labelWithIterator(iterator, contourTracingMode): Saga { const { cube } = segmentationLayer; switch (contourTracingMode) { case ContourModeEnum.DRAW_OVERWRITE: - yield* call([cube, cube.labelVoxels], iterator, activeCellId); + yield* call([cube, cube.labelVoxelsInAllResolutions], iterator, activeCellId); break; case ContourModeEnum.DRAW: - yield* call([cube, cube.labelVoxels], iterator, activeCellId, 0); + yield* call([cube, cube.labelVoxelsInAllResolutions], iterator, activeCellId, 0); break; case ContourModeEnum.DELETE_FROM_ACTIVE_CELL: - yield* call([cube, cube.labelVoxels], iterator, 0, activeCellId); + yield* call([cube, cube.labelVoxelsInAllResolutions], iterator, 0, activeCellId); break; case ContourModeEnum.DELETE_FROM_ANY_CELL: - yield* call([cube, cube.labelVoxels], iterator, 0); + yield* call([cube, cube.labelVoxelsInAllResolutions], iterator, 0); break; default: throw new Error("Invalid volume tracing mode."); diff --git a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js index 95a5b57915f..3192a58969c 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js @@ -52,6 +52,12 @@ export class VoxelIterator { this.minCoord2d = minCoord2d; this.get3DCoordinate = get3DCoordinate; this.boundingBox = boundingBox; + this.reset(); + } + + reset() { + this.x = 0; + this.y = 0; if (!this.map || !this.map[0]) { this.hasNext = false; } else { @@ -94,9 +100,10 @@ export class VoxelIterator { this.x + this.minCoord2d[0], this.y + this.minCoord2d[1], ]); - // check position for beeing in bounds + // Check if position is in bounds. if (this.isCoordinateInBounds(currentCoordinate)) { this.next = currentCoordinate; + this.hasNext = true; foundNext = true; } } diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index 532b19d2da9..fae6606e07a 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -441,7 +441,11 @@ function setupLayerForVolumeTracing( ): Array { // This method adds/merges the segmentation layers of the tracing into the dataset layers let layers = _.clone(dataset.dataSource.dataLayers); - + const segmentationLayer = layers.find(layer => layer.category === "segmentation"); + if (!segmentationLayer) { + Toast.error(messages["dataset.segmentationlayer_not_existing"]); + throw HANDLED_ERROR; + } // The tracing always contains the layer information for the user segmentation. // Two possible cases: // 1) No segmentation exists yet: In that case layers doesn't contain the dataLayer - it needs @@ -459,7 +463,7 @@ function setupLayerForVolumeTracing( largestSegmentId: tracing.largestSegmentId, boundingBox: convertBoundariesToBoundingBox(boundaries), // volume tracing can only be done for the first resolution - resolutions: [[1, 1, 1]], + resolutions: segmentationLayer.resolutions, mappings: fallbackLayer != null && fallbackLayer.mappings != null ? fallbackLayer.mappings : [], // remember the name of the original layer, used to request mappings fallbackLayer: tracing.fallbackLayer, From 618b471ed248122e27ac47ebf8fc248df6c64b4d Mon Sep 17 00:00:00 2001 From: MichaelBuessemeyer <39529669+MichaelBuessemeyer@users.noreply.github.com> Date: Tue, 11 Aug 2020 14:01:48 +0200 Subject: [PATCH 012/121] Update frontend/javascripts/oxalis/controller/camera_controller.js Co-authored-by: Philipp Otto --- frontend/javascripts/oxalis/controller/camera_controller.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/controller/camera_controller.js b/frontend/javascripts/oxalis/controller/camera_controller.js index 1fa3a341abf..3cb07e62537 100644 --- a/frontend/javascripts/oxalis/controller/camera_controller.js +++ b/frontend/javascripts/oxalis/controller/camera_controller.js @@ -210,7 +210,7 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { const paddingFactor = 1.1; width = Math.sqrt( - datasetExtent.width * datasetExtent.width + datasetExtent.height * datasetExtent.height, + datasetExtent.width **2 + datasetExtent.height ** 2, ) * paddingFactor; height = width / aspectRatio; up = [0, 0, -1]; From 83fee8bb76efc591daa36c7c5dafbf2b1434fbcf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 11 Aug 2020 15:34:26 +0200 Subject: [PATCH 013/121] made code pretty --- frontend/javascripts/oxalis/controller/camera_controller.js | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/frontend/javascripts/oxalis/controller/camera_controller.js b/frontend/javascripts/oxalis/controller/camera_controller.js index 3cb07e62537..5ba50af8492 100644 --- a/frontend/javascripts/oxalis/controller/camera_controller.js +++ b/frontend/javascripts/oxalis/controller/camera_controller.js @@ -208,10 +208,7 @@ export function rotate3DViewTo(id: OrthoView, animate: boolean = true): void { // The camera has no width and height which might be due to a bug or the camera has not been initialized. // Thus we zoom out to show the whole dataset. const paddingFactor = 1.1; - width = - Math.sqrt( - datasetExtent.width **2 + datasetExtent.height ** 2, - ) * paddingFactor; + width = Math.sqrt(datasetExtent.width ** 2 + datasetExtent.height ** 2) * paddingFactor; height = width / aspectRatio; up = [0, 0, -1]; // For very tall datasets that have a very low or high z starting coordinate, the planes might not be visible. From e9591e71a7d9c3ca3b72ba17d6fdac5698f350aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 11 Aug 2020 15:41:43 +0200 Subject: [PATCH 014/121] reduce max brush size --- frontend/javascripts/libs/user_settings.schema.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/libs/user_settings.schema.js b/frontend/javascripts/libs/user_settings.schema.js index 7db90c7580c..486fe4ecce4 100644 --- a/frontend/javascripts/libs/user_settings.schema.js +++ b/frontend/javascripts/libs/user_settings.schema.js @@ -32,7 +32,7 @@ export const userSettings = { highlightHoveredCellId: { type: "boolean" }, zoom: { type: "number", minimum: 0.005 }, renderMissingDataBlack: { type: "boolean" }, - brushSize: { type: "number", minimum: 1, maximum: 5000 }, + brushSize: { type: "number", minimum: 1, maximum: 300 }, layoutScaleValue: { type: "number", minimum: 1, maximum: 5 }, autoSaveLayouts: { type: "boolean" }, gpuMemoryFactor: { type: "number" }, From 3c3abbf2f09cb86a890d73c57dbaeb71679af6f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 11 Aug 2020 18:14:05 +0200 Subject: [PATCH 015/121] disallow trace tool in higher resolutions --- frontend/javascripts/messages.js | 2 -- .../model/accessors/volumetracing_accessor.js | 12 ++++--- .../reducers/volumetracing_reducer_helpers.js | 7 +++-- .../oxalis/model/sagas/annotation_saga.js | 17 +--------- .../oxalis/model/sagas/root_saga.js | 8 ++--- .../oxalis/model/sagas/volumetracing_saga.js | 25 ++++++++++----- .../view/action-bar/volume_actions_view.js | 31 +++++++++++++++++-- .../stylesheets/trace_view/_tracing_view.less | 3 -- 8 files changed, 62 insertions(+), 43 deletions(-) diff --git a/frontend/javascripts/messages.js b/frontend/javascripts/messages.js index 09c8fb398dc..d2bc0a7c947 100644 --- a/frontend/javascripts/messages.js +++ b/frontend/javascripts/messages.js @@ -94,8 +94,6 @@ instead. Only enable this option if you understand its effect. All layers will n "tracing.branchpoint_set": "Branchpoint set", "tracing.branchpoint_jump_twice": "You didn't add a node after jumping to this branchpoint, do you really want to jump again?", - "tracing.segmentation_zoom_warning": - "Segmentation data and volume annotation is only fully supported at a smaller zoom level.", "tracing.uint64_segmentation_warning": "This is an unsigned 64-bit segmentation. The displayed ids are truncated to 32-bit. Thus, they might not match the ids on the server.", "tracing.segmentation_zoom_warning_agglomerate": diff --git a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js index fc85b8f4a05..9c006193eaf 100644 --- a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js @@ -44,10 +44,14 @@ export function getContourTracingMode(volumeTracing: VolumeTracing): ContourMode return contourTracingMode; } -export function isVolumeTracingDisallowed(state: OxalisState) { - const isVolumeTracing = state.tracing.volume != null; - const isWrongZoomStep = getRequestLogZoomStep(state) > 1; - return isVolumeTracing && isWrongZoomStep; +export function isVolumeTraceToolDisallowed(state: OxalisState) { + if (state.tracing.volume == null) { + return false; + } + // The current resolution is to high for allowing the trace tool + // because to many voxel could be annotated at the same time. + const isZoomStepTooHigh = getRequestLogZoomStep(state) > 1.5; + return isZoomStepTooHigh; } export function isSegmentationMissingForZoomstep( diff --git a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.js b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.js index 893b1b5b47a..f0eaae5b39a 100644 --- a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.js +++ b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.js @@ -10,14 +10,14 @@ import update from "immutability-helper"; import { type ContourMode, type Vector3, type VolumeTool, VolumeToolEnum } from "oxalis/constants"; import type { OxalisState, VolumeTracing, VolumeCell } from "oxalis/store"; -import { isVolumeTracingDisallowed } from "oxalis/model/accessors/volumetracing_accessor"; +import { isVolumeTraceToolDisallowed } from "oxalis/model/accessors/volumetracing_accessor"; import { setDirectionReducer } from "oxalis/model/reducers/flycam_reducer"; export function setToolReducer(state: OxalisState, volumeTracing: VolumeTracing, tool: VolumeTool) { if (tool === volumeTracing.activeTool) { return state; } - if (tool !== VolumeToolEnum.MOVE && isVolumeTracingDisallowed(state)) { + if (tool === VolumeToolEnum.TRACE && isVolumeTraceToolDisallowed(state)) { return state; } @@ -101,7 +101,8 @@ export function addToLayerReducer( position: Vector3, ) { const { allowUpdate } = state.tracing.restrictions; - if (!allowUpdate || isVolumeTracingDisallowed(state)) { + const { activeTool } = volumeTracing; + if (!allowUpdate || (activeTool === VolumeToolEnum.TRACE && isVolumeTraceToolDisallowed(state))) { return state; } diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.js b/frontend/javascripts/oxalis/model/sagas/annotation_saga.js index a94f76c7621..92b1d4ddc33 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.js @@ -8,10 +8,6 @@ import { take, _delay, } from "oxalis/model/sagas/effect-generators"; -import { - isVolumeTracingDisallowed, - isSegmentationMissingForZoomstep, -} from "oxalis/model/accessors/volumetracing_accessor"; import Model from "oxalis/model"; import Store from "oxalis/store"; import Toast from "libs/toast"; @@ -46,23 +42,12 @@ function shouldDisplaySegmentationData(): boolean { return !isSegmentationLayerDisabled; } -export function* warnAboutSegmentationOpacity(): Saga { +export function* warnAboutSegmentationZoom(): Saga { function* warnMaybe(): Saga { const segmentationLayer = Model.getSegmentationLayer(); if (!segmentationLayer) { return; } - const isDisallowed = yield* select(isVolumeTracingDisallowed); - const isSegmentationMissing = yield* select(state => - isSegmentationMissingForZoomstep(state, segmentationLayer.cube.MAX_ZOOM_STEP), - ); - - if (shouldDisplaySegmentationData() && (isDisallowed || isSegmentationMissing)) { - Toast.error(messages["tracing.segmentation_zoom_warning"], { sticky: false, timeout: 3000 }); - } else { - Toast.close(messages["tracing.segmentation_zoom_warning"]); - } - const isAgglomerateMappingEnabled = yield* select( storeState => storeState.temporaryConfiguration.activeMapping.isMappingEnabled && diff --git a/frontend/javascripts/oxalis/model/sagas/root_saga.js b/frontend/javascripts/oxalis/model/sagas/root_saga.js index b8836dfff71..ecd9218f36d 100644 --- a/frontend/javascripts/oxalis/model/sagas/root_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/root_saga.js @@ -4,7 +4,7 @@ import { type Saga, _all, _call, _cancel, fork, take } from "oxalis/model/sagas/ import { alert } from "libs/window"; import { editVolumeLayerAsync, - disallowVolumeTracingWarning, + ensureNoTraceToolInLowResolutions, watchVolumeTracingAsync, } from "oxalis/model/sagas/volumetracing_saga"; import { @@ -14,7 +14,7 @@ import { toggleErrorHighlighting, } from "oxalis/model/sagas/save_saga"; import { - warnAboutSegmentationOpacity, + warnAboutSegmentationZoom, watchAnnotationAsync, } from "oxalis/model/sagas/annotation_saga"; import { watchDataRelevantChanges } from "oxalis/model/sagas/prefetch_saga"; @@ -38,14 +38,14 @@ export default function* rootSaga(): Saga { function* restartableSaga(): Saga { try { yield _all([ - _call(warnAboutSegmentationOpacity), + _call(warnAboutSegmentationZoom), _call(watchPushSettingsAsync), _call(watchSkeletonTracingAsync), _call(collectUndoStates), _call(saveTracingAsync), _call(pushAnnotationAsync), _call(editVolumeLayerAsync), - _call(disallowVolumeTracingWarning), + _call(ensureNoTraceToolInLowResolutions), _call(watchVolumeTracingAsync), _call(watchAnnotationAsync), _call(loadHistogramData), diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index dffd7b364ec..b4bae592949 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -5,6 +5,7 @@ import { type CopySegmentationLayerAction, resetContourAction, updateDirectionAction, + setToolAction, } from "oxalis/model/actions/volumetracing_actions"; import { type Saga, @@ -27,7 +28,7 @@ import { V3 } from "libs/mjs"; import type { VolumeTracing, Flycam } from "oxalis/store"; import { enforceVolumeTracing, - isVolumeTracingDisallowed, + isVolumeTraceToolDisallowed, } from "oxalis/model/accessors/volumetracing_accessor"; import { getPosition, getRotation } from "oxalis/model/accessors/flycam_accessor"; import { @@ -89,12 +90,13 @@ export function* editVolumeLayerAsync(): Generator { contourTracingMode === ContourModeEnum.DRAW_OVERWRITE || contourTracingMode === ContourModeEnum.DRAW; - // Volume tracing for higher zoomsteps is currently not allowed - if (yield* select(state => isVolumeTracingDisallowed(state))) { + const activeTool = yield* select(state => enforceVolumeTracing(state.tracing).activeTool); + // The trace tool is not allowed for too high zoom steps. + const isZoomStepTooHighForTraceTool = yield* select(isVolumeTraceToolDisallowed); + if (isZoomStepTooHighForTraceTool && activeTool !== VolumeToolEnum.TRACE) { continue; } const currentLayer = yield* call(createVolumeLayer, startEditingAction.planeId); - const activeTool = yield* select(state => enforceVolumeTracing(state.tracing).activeTool); const initialViewport = yield* select(state => state.viewModeData.plane.activeViewport); const activeViewportBounding = yield* call(getBoundingsFromPosition, initialViewport); @@ -255,11 +257,18 @@ export function* finishLayer( yield* put(resetContourAction()); } -export function* disallowVolumeTracingWarning(): Saga<*> { +export function* ensureNoTraceToolInLowResolutions(): Saga<*> { + yield* take("INITIALIZE_VOLUMETRACING"); while (true) { - yield* take(["SET_TOOL", "CYCLE_TOOL"]); - if (yield* select(state => isVolumeTracingDisallowed(state))) { - Toast.warning("Volume tracing is not possible at this zoom level. Please zoom in further."); + yield* take(["ZOOM_IN", "ZOOM_OUT", "ZOOM_BY_DELTA", "SET_ZOOM_STEP"]); + const isResolutionToLowForTraceTool = yield* select(state => + isVolumeTraceToolDisallowed(state), + ); + const isTraceToolActive = yield* select( + state => enforceVolumeTracing(state.tracing).activeTool === VolumeToolEnum.TRACE, + ); + if (isResolutionToLowForTraceTool && isTraceToolActive) { + yield* put(setToolAction(VolumeToolEnum.MOVE)); } } } diff --git a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js index 173183cbdff..cfb7a419195 100644 --- a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js @@ -1,11 +1,14 @@ // @flow -import { Button, Radio } from "antd"; +import { Button, Radio, Tooltip } from "antd"; import { connect } from "react-redux"; import React, { PureComponent } from "react"; import { type VolumeTool, VolumeToolEnum } from "oxalis/constants"; import { document } from "libs/window"; -import { enforceVolumeTracing } from "oxalis/model/accessors/volumetracing_accessor"; +import { + enforceVolumeTracing, + isVolumeTraceToolDisallowed, +} from "oxalis/model/accessors/volumetracing_accessor"; import { setToolAction, createCellAction } from "oxalis/model/actions/volumetracing_actions"; import ButtonComponent from "oxalis/view/components/button_component"; import Store, { type OxalisState } from "oxalis/store"; @@ -17,8 +20,13 @@ const ButtonGroup = Button.Group; type Props = {| activeTool: VolumeTool, + // This component should be updated when the zoom changes. + // eslint-disable-next-line react/no-unused-prop-types + zoomStep: number, |}; +const isZoomStepTooHighForTraceTool = () => isVolumeTraceToolDisallowed(Store.getState()); + class VolumeActionsView extends PureComponent { handleSetTool = (event: { target: { value: VolumeTool } }) => { Store.dispatch(setToolAction(event.target.value)); @@ -29,6 +37,14 @@ class VolumeActionsView extends PureComponent { }; render() { + const isTraceToolDisabled = isZoomStepTooHighForTraceTool(); + const traceToolDisabledTooltip = isTraceToolDisabled + ? "Your zoom is low to use the trace tool. Please zoom in further to use it." + : ""; + // TOO unfiy this with the dataset position view. + const maybeErrorColorForTraceTool = isTraceToolDisabled + ? { color: "rgb(255, 155, 85)", borderColor: "rgb(241, 122, 39)" } + : {}; return (
{ @@ -41,7 +57,15 @@ class VolumeActionsView extends PureComponent { style={{ marginRight: 10 }} > Move - Trace + + + Trace + + Brush @@ -58,6 +82,7 @@ class VolumeActionsView extends PureComponent { function mapStateToProps(state: OxalisState): Props { return { activeTool: enforceVolumeTracing(state.tracing).activeTool, + zoomStep: state.flycam.zoomStep, }; } diff --git a/frontend/stylesheets/trace_view/_tracing_view.less b/frontend/stylesheets/trace_view/_tracing_view.less index 9986f5296ce..ce99f7d7e95 100644 --- a/frontend/stylesheets/trace_view/_tracing_view.less +++ b/frontend/stylesheets/trace_view/_tracing_view.less @@ -86,9 +86,6 @@ text-align: right; } } -.zoomstep-warning { - border: 5px solid red; -} .tracing-layout { padding-top: 4px; From 20038635a3a2a2aa23aedf02169137f6f74f4f67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 11 Aug 2020 18:19:24 +0200 Subject: [PATCH 016/121] enable brushing in high again --- frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 2a0b583aae3..6526610b1e2 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -93,7 +93,7 @@ export function* editVolumeLayerAsync(): Generator { const activeTool = yield* select(state => enforceVolumeTracing(state.tracing).activeTool); // The trace tool is not allowed for too high zoom steps. const isZoomStepTooHighForTraceTool = yield* select(isVolumeTraceToolDisallowed); - if (isZoomStepTooHighForTraceTool && activeTool !== VolumeToolEnum.TRACE) { + if (isZoomStepTooHighForTraceTool && activeTool === VolumeToolEnum.TRACE) { continue; } const currentLayer = yield* call(createVolumeLayer, startEditingAction.planeId); From 255286313a4f8d3496efa1fe477c4ca7a1849898 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 12 Aug 2020 15:04:03 +0200 Subject: [PATCH 017/121] do not block resolution loading --- .../tracingstore/tracings/volume/VolumeTracingLayer.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index 899dd2157c2..6e90f5d51f1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -51,4 +51,5 @@ case class VolumeTracingLayer( val mappings: Option[Set[String]] = None val resolutions: List[Point3D] = List(Point3D(1, 1, 1)) + override def containsResolution(resolution: Point3D) = true } From 283075c02197e1406b141e10482b5285956efcfa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 12 Aug 2020 18:56:57 +0200 Subject: [PATCH 018/121] annotate z many slices at once when z resolution is > 1 --- .../oxalis/model/accessors/flycam_accessor.js | 6 ++ .../oxalis/model/sagas/volumetracing_saga.js | 27 ++++++- .../oxalis/model/volumetracing/volumelayer.js | 75 ++++++++++++++++--- .../view/right-menu/dataset_info_tab_view.js | 17 ++--- 4 files changed, 100 insertions(+), 25 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js b/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js index ea0819b2493..d92073f51d2 100644 --- a/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js @@ -239,6 +239,12 @@ export function getRequestLogZoomStep(state: OxalisState): number { return Math.min(zoomStep, maxLogZoomStep); } +export function getCurrentResolution(state: OxalisState): Vector3 { + const resolutions = getResolutions(state.dataset); + const logZoomStep = getRequestLogZoomStep(state); + return resolutions[logZoomStep]; +} + export function getValidZoomRangeForUser(state: OxalisState): [number, number] { const maximumZoomSteps = getMaximumZoomForAllResolutionsFromStore(state); const lastZoomStep = _.last(maximumZoomSteps); diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 6526610b1e2..c15c3f22900 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -30,7 +30,11 @@ import { enforceVolumeTracing, isVolumeTraceToolDisallowed, } from "oxalis/model/accessors/volumetracing_accessor"; -import { getPosition, getRotation } from "oxalis/model/accessors/flycam_accessor"; +import { + getPosition, + getRotation, + getCurrentResolution, +} from "oxalis/model/accessors/flycam_accessor"; import { type BoundingBoxType, type ContourMode, @@ -101,9 +105,14 @@ export function* editVolumeLayerAsync(): Generator { const initialViewport = yield* select(state => state.viewModeData.plane.activeViewport); const activeViewportBounding = yield* call(getBoundingsFromPosition, initialViewport); if (activeTool === VolumeToolEnum.BRUSH) { + const currentResolution = yield* select(state => getCurrentResolution(state)); yield* call( labelWithIterator, - currentLayer.getCircleVoxelIterator(startEditingAction.position, activeViewportBounding), + currentLayer.getCircleVoxelIterator( + startEditingAction.position, + currentResolution, + activeViewportBounding, + ), contourTracingMode, ); } @@ -131,9 +140,14 @@ export function* editVolumeLayerAsync(): Generator { } if (activeTool === VolumeToolEnum.BRUSH) { const currentViewportBounding = yield* call(getBoundingsFromPosition, activeViewport); + const currentResolution = yield* select(state => getCurrentResolution(state)); yield* call( labelWithIterator, - currentLayer.getCircleVoxelIterator(addToLayerAction.position, currentViewportBounding), + currentLayer.getCircleVoxelIterator( + addToLayerAction.position, + currentResolution, + currentViewportBounding, + ), contourTracingMode, ); } @@ -250,7 +264,12 @@ export function* finishLayer( } if (activeTool === VolumeToolEnum.TRACE || activeTool === VolumeToolEnum.BRUSH) { - yield* call(labelWithIterator, layer.getVoxelIterator(activeTool), contourTracingMode); + const currentResolution = yield* select(state => getCurrentResolution(state)); + yield* call( + labelWithIterator, + layer.getVoxelIterator(activeTool, currentResolution), + contourTracingMode, + ); } yield* put(updateDirectionAction(layer.getCentroid())); diff --git a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js index 3192a58969c..1ba6b32f9da 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js @@ -29,11 +29,14 @@ export class VoxelIterator { height: number; minCoord2d: Vector2; get3DCoordinate: Vector2 => Vector3; + numberOfSlices: number; boundingBox: ?BoundingBoxType; next: Vector3; + currentSlice = 0; + thirdDimensionIndex: number; static finished(): VoxelIterator { - const iterator = new VoxelIterator([], 0, 0, [0, 0], () => [0, 0, 0]); + const iterator = new VoxelIterator([], 0, 0, [0, 0], () => [0, 0, 0], 0); iterator.hasNext = false; return iterator; } @@ -44,6 +47,8 @@ export class VoxelIterator { height: number, minCoord2d: Vector2, get3DCoordinate: Vector2 => Vector3, + thirdDimensionIndex: number, + numberOfSlices: number = 1, boundingBox?: ?BoundingBoxType, ) { this.map = map; @@ -51,17 +56,28 @@ export class VoxelIterator { this.height = height; this.minCoord2d = minCoord2d; this.get3DCoordinate = get3DCoordinate; + this.thirdDimensionIndex = thirdDimensionIndex; this.boundingBox = boundingBox; + this.numberOfSlices = numberOfSlices; this.reset(); } - reset() { + get3DCoordinateWithSliceOffset(position: Vector2): Vector3 { + const threeDPosition = this.get3DCoordinate(position); + threeDPosition[this.thirdDimensionIndex] += this.currentSlice; + return threeDPosition; + } + + reset(resetSliceCount: boolean = true) { this.x = 0; this.y = 0; + if (resetSliceCount) { + this.currentSlice = 0; + } if (!this.map || !this.map[0]) { this.hasNext = false; } else { - const firstCoordinate = this.get3DCoordinate(this.minCoord2d); + const firstCoordinate = this.get3DCoordinateWithSliceOffset(this.minCoord2d); if (this.map[0][0] && this.isCoordinateInBounds(firstCoordinate)) { this.next = firstCoordinate; } else { @@ -84,6 +100,15 @@ export class VoxelIterator { ); } + nextSlice() { + ++this.currentSlice; + if (this.currentSlice < this.numberOfSlices) { + this.reset(false); + return true; + } + return false; + } + getNext(): Vector3 { const res = this.next; let foundNext = false; @@ -93,10 +118,13 @@ export class VoxelIterator { this.y++; } if (this.y === this.height) { - foundNext = true; - this.hasNext = false; + const hasNextSlice = this.nextSlice(); + if (!hasNextSlice) { + foundNext = true; + this.hasNext = false; + } } else if (this.map[this.x][this.y]) { - const currentCoordinate = this.get3DCoordinate([ + const currentCoordinate = this.get3DCoordinateWithSliceOffset([ this.x + this.minCoord2d[0], this.y + this.minCoord2d[1], ]); @@ -156,7 +184,7 @@ class VolumeLayer { return this.getContourList().length === 0; } - getVoxelIterator(mode: VolumeTool): VoxelIterator { + getVoxelIterator(mode: VolumeTool, activeResolution: Vector3): VoxelIterator { if (this.isEmpty()) { return VoxelIterator.finished(); } @@ -206,17 +234,26 @@ class VolumeLayer { this.fillOutsideArea(map, width, height); this.drawOutlineVoxels(setMap, mode); + const numberOfSlices = this.getNumberOfSlicesForResolution(activeResolution); + const thirdDimensionIndex = Dimensions.thirdDimensionForPlane(this.plane); + const iterator = new VoxelIterator( map, width, height, minCoord2d, this.get3DCoordinate.bind(this), + thirdDimensionIndex, + numberOfSlices, ); return iterator; } - getCircleVoxelIterator(position: Vector3, boundings?: ?BoundingBoxType): VoxelIterator { + getCircleVoxelIterator( + position: Vector3, + activeResolution: Vector3, + boundings?: ?BoundingBoxType, + ): VoxelIterator { const state = Store.getState(); const { brushSize } = state.userConfiguration; @@ -226,10 +263,7 @@ class VolumeLayer { const map = new Array(width); for (let x = 0; x < width; x++) { - map[x] = new Array(height); - for (let y = 0; y < height; y++) { - map[x][y] = false; - } + map[x] = new Array(height).fill(false); } const floatingCoord2d = this.get2DCoordinate(position); const coord2d = [Math.floor(floatingCoord2d[0]), Math.floor(floatingCoord2d[1])]; @@ -245,12 +279,23 @@ class VolumeLayer { }; Drawing.fillCircle(radius, radius, radius, scaleX, scaleY, setMap); + const numberOfSlices = this.getNumberOfSlicesForResolution(activeResolution); + const thirdDimensionIndex = Dimensions.thirdDimensionForPlane(this.plane); + if ( + boundings != null && + boundings.max[thirdDimensionIndex] - boundings.min[thirdDimensionIndex] < numberOfSlices - 1 + ) { + boundings.max[thirdDimensionIndex] = boundings.min[thirdDimensionIndex] + numberOfSlices - 1; + } + const iterator = new VoxelIterator( map, width, height, minCoord2d, this.get3DCoordinate.bind(this), + thirdDimensionIndex, + numberOfSlices, boundings, ); return iterator; @@ -337,6 +382,12 @@ class VolumeLayer { return this.get3DCoordinate([cx, cy]); } + + getNumberOfSlicesForResolution(activeResolution: Vector3) { + const thirdDimenstionIndex = Dimensions.thirdDimensionForPlane(this.plane); + const numberOfSlices = activeResolution[thirdDimenstionIndex]; + return numberOfSlices; + } } export default VolumeLayer; diff --git a/frontend/javascripts/oxalis/view/right-menu/dataset_info_tab_view.js b/frontend/javascripts/oxalis/view/right-menu/dataset_info_tab_view.js index d61f75979fe..ad784f12d69 100644 --- a/frontend/javascripts/oxalis/view/right-menu/dataset_info_tab_view.js +++ b/frontend/javascripts/oxalis/view/right-menu/dataset_info_tab_view.js @@ -9,12 +9,12 @@ import Markdown from "react-remarkable"; import React from "react"; import { APIAnnotationTypeEnum, type APIDataset, type APIUser } from "admin/api_flow_types"; -import { ControlModeEnum } from "oxalis/constants"; +import { ControlModeEnum, type Vector3 } from "oxalis/constants"; import { convertToHybridTracing } from "admin/admin_rest_api"; import { formatScale } from "libs/format_utils"; import { getBaseVoxel } from "oxalis/model/scaleinfo"; import { getDatasetExtentAsString, getResolutions } from "oxalis/model/accessors/dataset_accessor"; -import { getRequestLogZoomStep } from "oxalis/model/accessors/flycam_accessor"; +import { getCurrentResolution } from "oxalis/model/accessors/flycam_accessor"; import { getStats } from "oxalis/model/accessors/skeletontracing_accessor"; import { location } from "libs/window"; import { @@ -34,7 +34,7 @@ type StateProps = {| dataset: APIDataset, task: ?Task, activeUser: ?APIUser, - logZoomStep: number, + activeResolution: Vector3, |}; type DispatchProps = {| setAnnotationName: string => void, @@ -358,14 +358,13 @@ class DatasetInfoTabView extends React.PureComponent { } render() { + const { dataset, activeResolution } = this.props; const isDatasetViewMode = Store.getState().temporaryConfiguration.controlMode === ControlModeEnum.VIEW; - const extentInVoxel = getDatasetExtentAsString(this.props.dataset, true); - const extentInLength = getDatasetExtentAsString(this.props.dataset, false); - - const resolutions = getResolutions(this.props.dataset); - const activeResolution = resolutions[this.props.logZoomStep]; + const extentInVoxel = getDatasetExtentAsString(dataset, true); + const extentInLength = getDatasetExtentAsString(dataset, false); + const resolutions = getResolutions(dataset); const resolutionInfo = activeResolution != null ? ( @@ -456,7 +455,7 @@ const mapStateToProps = (state: OxalisState): StateProps => ({ dataset: state.dataset, task: state.task, activeUser: state.activeUser, - logZoomStep: getRequestLogZoomStep(state), + activeResolution: getCurrentResolution(state), }); const mapDispatchToProps = (dispatch: Dispatch<*>) => ({ From bf81003804190f3ff6a63f81a9361452fbf3b58d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Thu, 13 Aug 2020 12:22:05 +0200 Subject: [PATCH 019/121] fixing a few tests --- frontend/javascripts/oxalis/api/api_v2.js | 6 ++--- .../test/model/binary/cube.spec.js | 18 ++++++------- .../reducers/volumetracing_reducer.spec.js | 26 +++++++++---------- 3 files changed, 25 insertions(+), 25 deletions(-) diff --git a/frontend/javascripts/oxalis/api/api_v2.js b/frontend/javascripts/oxalis/api/api_v2.js index 152572a8580..841f5c35821 100644 --- a/frontend/javascripts/oxalis/api/api_v2.js +++ b/frontend/javascripts/oxalis/api/api_v2.js @@ -404,7 +404,7 @@ class TracingApi { rotation?: Vector3, ): void { // Let the user still manipulate the "third dimension" during animation - const activeViewport = Store.getState().viewModeData.plane.activeViewport; + const { activeViewport } = Store.getState().viewModeData.plane; const dimensionToSkip = skipDimensions && activeViewport !== OrthoViews.TDView ? dimensions.thirdDimensionForPlane(activeViewport) @@ -622,7 +622,7 @@ class DataApi { * api.data.downloadRawDataCuboid("segmentation", [0,0,0], [100,200,100]); */ downloadRawDataCuboid(layerName: string, topLeft: Vector3, bottomRight: Vector3): Promise { - const dataset = Store.getState().dataset; + const { dataset } = Store.getState(); return doWithToken(token => { const downloadUrl = @@ -656,7 +656,7 @@ class DataApi { assertExists(segmentationLayer, "Segmentation layer not found!"); for (const voxel of voxels) { - segmentationLayer.cube.labelVoxel(voxel, label); + segmentationLayer.cube.labelVoxelInResolution(voxel, label, 0); } segmentationLayer.cube.pushQueue.push(); diff --git a/frontend/javascripts/test/model/binary/cube.spec.js b/frontend/javascripts/test/model/binary/cube.spec.js index 7498b0a2d13..102ee06bc56 100644 --- a/frontend/javascripts/test/model/binary/cube.spec.js +++ b/frontend/javascripts/test/model/binary/cube.spec.js @@ -98,7 +98,7 @@ test("GetBucket should only create one bucket on getOrCreateBucket()", t => { test("Voxel Labeling should request buckets when temporal buckets are created", t => { const { cube, pullQueue } = t.context; - cube.labelVoxel([1, 1, 1], 42); + cube.labelVoxelInResolution([1, 1, 1], 42, 0); t.plan(2); return runAsync([ @@ -116,7 +116,7 @@ test("Voxel Labeling should request buckets when temporal buckets are created", test("Voxel Labeling should push buckets after they were pulled", t => { const { cube, pushQueue } = t.context; - cube.labelVoxel([1, 1, 1], 42); + cube.labelVoxelInResolution([1, 1, 1], 42, 0); t.plan(3); let bucket; @@ -142,7 +142,7 @@ test("Voxel Labeling should push buckets immediately if they are pulled already" bucket.pull(); bucket.receiveData(new Uint8Array(32 * 32 * 32 * 3)); - cube.labelVoxel([0, 0, 0], 42); + cube.labelVoxelInResolution([0, 0, 0], 42, 0); t.plan(1); return runAsync([ @@ -155,9 +155,9 @@ test("Voxel Labeling should push buckets immediately if they are pulled already" test("Voxel Labeling should only create one temporal bucket", t => { const { cube } = t.context; // Creates temporal bucket - cube.labelVoxel([0, 0, 0], 42); + cube.labelVoxelInResolution([0, 0, 0], 42, 0); // Uses existing temporal bucket - cube.labelVoxel([1, 0, 0], 43); + cube.labelVoxelInResolution([1, 0, 0], 43, 0); const data = cube.getBucket([0, 0, 0, 0]).getData(); @@ -175,7 +175,7 @@ test("Voxel Labeling should merge incoming buckets", t => { // Second voxel should be merged into new data oldData[1] = 67890; - cube.labelVoxel([0, 0, 0], 424242); + cube.labelVoxelInResolution([0, 0, 0], 424242, 0); bucket.pull(); bucket.receiveData(new Uint8Array(oldData.buffer)); @@ -188,15 +188,15 @@ test("Voxel Labeling should merge incoming buckets", t => { test("getDataValue() should return the raw value without a mapping", t => { const { cube } = t.context; const value = 1 * (1 << 16) + 2 * (1 << 8) + 3; - cube.labelVoxel([0, 0, 0], value); + cube.labelVoxelInResolution([0, 0, 0], value, 0); t.is(cube.getDataValue([0, 0, 0]), value); }); test("getDataValue() should return the mapping value if available", t => { const { cube } = t.context; - cube.labelVoxel([0, 0, 0], 42); - cube.labelVoxel([1, 1, 1], 43); + cube.labelVoxelInResolution([0, 0, 0], 42, 0); + cube.labelVoxelInResolution([1, 1, 1], 43, 0); const mapping = []; mapping[42] = 1; diff --git a/frontend/javascripts/test/reducers/volumetracing_reducer.spec.js b/frontend/javascripts/test/reducers/volumetracing_reducer.spec.js index d9decc094c9..e5dc04425f0 100644 --- a/frontend/javascripts/test/reducers/volumetracing_reducer.spec.js +++ b/frontend/javascripts/test/reducers/volumetracing_reducer.spec.js @@ -250,14 +250,17 @@ test("VolumeTracing should update its lastCentroid", t => { }); }); -test("VolumeTracing should add values to the contourList", t => { +const prepareContourListTest = (t, state) => { const contourList = [[4, 6, 9], [1, 2, 3], [9, 3, 2]]; const addToLayerActionFn = VolumeTracingActions.addToLayerAction; - - // Add positions to the contourList - let newState = VolumeTracingReducer(initialState, addToLayerActionFn(contourList[0])); + let newState = VolumeTracingReducer(state, addToLayerActionFn(contourList[0])); newState = VolumeTracingReducer(newState, addToLayerActionFn(contourList[1])); newState = VolumeTracingReducer(newState, addToLayerActionFn(contourList[2])); + return { newState, contourList }; +}; + +test("VolumeTracing should add values to the contourList", t => { + const { newState, contourList } = prepareContourListTest(t, initialState); t.not(newState, initialState); getVolumeTracing(newState.tracing).map(tracing => { @@ -265,9 +268,7 @@ test("VolumeTracing should add values to the contourList", t => { }); }); -test("VolumeTracing should not add values to the contourList if getRequestLogZoomStep(zoomStep) > 1", t => { - const contourList = [[4, 6, 9], [1, 2, 3], [9, 3, 2]]; - const addToLayerActionFn = VolumeTracingActions.addToLayerAction; +test("VolumeTracing should add values to the contourList even if getRequestLogZoomStep(zoomStep) > 1", t => { const alteredState = update(initialState, { flycam: { zoomStep: { $set: 3 }, @@ -276,12 +277,11 @@ test("VolumeTracing should not add values to the contourList if getRequestLogZoo t.true(getRequestLogZoomStep(alteredState) > 1); - // Try to add positions to the contourList - let newState = VolumeTracingReducer(alteredState, addToLayerActionFn(contourList[0])); - newState = VolumeTracingReducer(newState, addToLayerActionFn(contourList[1])); - newState = VolumeTracingReducer(newState, addToLayerActionFn(contourList[2])); - - t.is(newState, alteredState); + const { newState, contourList } = prepareContourListTest(t, alteredState); + t.not(newState, initialState); + getVolumeTracing(newState.tracing).map(tracing => { + t.deepEqual(tracing.contourList, contourList); + }); }); test("VolumeTracing should not add values to the contourList if volumetracing is not allowed", t => { From 23b83c06662a417d783c87b4159a4d4e90de3552 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Thu, 13 Aug 2020 14:54:08 +0200 Subject: [PATCH 020/121] hopefully fixed all tests --- .../javascripts/test/sagas/volumetracing_saga.spec.js | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/frontend/javascripts/test/sagas/volumetracing_saga.spec.js b/frontend/javascripts/test/sagas/volumetracing_saga.spec.js index ae93d138d92..486d90425a7 100644 --- a/frontend/javascripts/test/sagas/volumetracing_saga.spec.js +++ b/frontend/javascripts/test/sagas/volumetracing_saga.spec.js @@ -110,6 +110,7 @@ test("VolumeTracingSaga should create a volume layer (saga test)", t => { expectValueDeepEqual(t, saga.next(true), take("START_EDITING")); saga.next(startEditingAction); saga.next(ContourModeEnum.DRAW_OVERWRITE); + saga.next(VolumeToolEnum.BRUSH); const startEditingSaga = execCall(t, saga.next(false)); startEditingSaga.next(); const layer = startEditingSaga.next([1, 1, 1]).value; @@ -123,10 +124,10 @@ test("VolumeTracingSaga should add values to volume layer (saga test)", t => { expectValueDeepEqual(t, saga.next(true), take("START_EDITING")); saga.next(startEditingAction); saga.next(ContourModeEnum.DRAW_OVERWRITE); + saga.next(VolumeToolEnum.TRACE); saga.next(false); const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10); saga.next(volumeLayer); - saga.next(VolumeToolEnum.TRACE); saga.next(OrthoViews.PLANE_XY); saga.next(); saga.next({ addToLayerAction: addToLayerActionFn([1, 2, 3]) }); @@ -146,10 +147,10 @@ test("VolumeTracingSaga should finish a volume layer (saga test)", t => { expectValueDeepEqual(t, saga.next(true), take("START_EDITING")); saga.next(startEditingAction); saga.next(ContourModeEnum.DRAW_OVERWRITE); + saga.next(VolumeToolEnum.TRACE); saga.next(false); const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10); saga.next(volumeLayer); - saga.next(VolumeToolEnum.TRACE); saga.next(OrthoViews.PLANE_XY); saga.next(); saga.next({ addToLayerAction: addToLayerActionFn([1, 2, 3]) }); @@ -169,10 +170,10 @@ test("VolumeTracingSaga should finish a volume layer in delete mode (saga test)" expectValueDeepEqual(t, saga.next(true), take("START_EDITING")); saga.next(startEditingAction); saga.next(ContourModeEnum.DELETE_FROM_ACTIVE_CELL); + saga.next(VolumeToolEnum.TRACE); saga.next(false); const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10); saga.next(volumeLayer); - saga.next(VolumeToolEnum.TRACE); saga.next(OrthoViews.PLANE_XY); saga.next(); saga.next({ addToLayerAction: addToLayerActionFn([1, 2, 3]) }); @@ -189,8 +190,8 @@ test("finishLayer saga should emit resetContourAction and then be done (saga tes // $FlowFixMe const saga = finishLayer(mockedVolumeLayer, VolumeToolEnum.TRACE); saga.next(); + saga.next([1, 1, 1]); saga.next(); - const iterator = saga.next(); - expectValueDeepEqual(t, iterator, put(resetContourAction)); + expectValueDeepEqual(t, saga.next(), put(resetContourAction)); t.true(saga.next().done); }); From bb715b77e37ea64d1505717cc1a1f8bd3a40ea3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 25 Aug 2020 16:59:58 +0200 Subject: [PATCH 021/121] Add info about annotating multiple slices --- .../view/action-bar/volume_actions_view.js | 31 +++++++++++-------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js index a23bda29e90..b854368c41b 100644 --- a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js @@ -3,7 +3,7 @@ import { Button, Radio, Tooltip } from "antd"; import { connect } from "react-redux"; import React, { PureComponent } from "react"; -import { type VolumeTool, VolumeToolEnum } from "oxalis/constants"; +import { type VolumeTool, VolumeToolEnum, type Vector3 } from "oxalis/constants"; import { document } from "libs/window"; import { enforceVolumeTracing, @@ -11,6 +11,7 @@ import { } from "oxalis/model/accessors/volumetracing_accessor"; import { setToolAction, createCellAction } from "oxalis/model/actions/volumetracing_actions"; import ButtonComponent from "oxalis/view/components/button_component"; +import { getCurrentResolution } from "oxalis/model/accessors/flycam_accessor"; import Store, { type OxalisState } from "oxalis/store"; // Workaround until github.com/facebook/flow/issues/1113 is fixed @@ -24,6 +25,7 @@ type Props = {| // eslint-disable-next-line react/no-unused-prop-types zoomStep: number, isInMergerMode: boolean, + activeResolution: Vector3, |}; const isZoomStepTooHighForTraceTool = () => isVolumeTraceToolDisallowed(Store.getState()); @@ -44,40 +46,42 @@ class VolumeActionsView extends PureComponent { }; render() { + const { activeTool, activeResolution, isInMergerMode } = this.props; + const hasResolutionWithHigherDimension = activeResolution.some(val => val > 1); + const multiSliceAnnotationInfoIcon = hasResolutionWithHigherDimension ? ( + + place holder + + ) : null; const isTraceToolDisabled = isZoomStepTooHighForTraceTool(); const traceToolDisabledTooltip = isTraceToolDisabled ? "Your zoom is low to use the trace tool. Please zoom in further to use it." : ""; + return (
{ if (document.activeElement) document.activeElement.blur(); }} > - + Move - Trace + Trace {activeTool === "TRACE" ? multiSliceAnnotationInfoIcon : null} - - Brush + + Brush {activeTool === "BRUSH" ? multiSliceAnnotationInfoIcon : null} @@ -97,6 +101,7 @@ function mapStateToProps(state: OxalisState): Props { activeTool: enforceVolumeTracing(state.tracing).activeTool, zoomStep: state.flycam.zoomStep, isInMergerMode: state.temporaryConfiguration.isMergerModeEnabled, + activeResolution: getCurrentResolution(state), }; } From edfec9c13f16245bc95af59d2391cf414f16f8c9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Thu, 27 Aug 2020 16:10:23 +0200 Subject: [PATCH 022/121] limit area that gets autofilled --- frontend/javascripts/messages.js | 2 ++ frontend/javascripts/oxalis/constants.js | 2 ++ .../oxalis/model/volumetracing/volumelayer.js | 27 ++++++++++++++----- 3 files changed, 25 insertions(+), 6 deletions(-) diff --git a/frontend/javascripts/messages.js b/frontend/javascripts/messages.js index cc287de3d15..9de3c2422e5 100644 --- a/frontend/javascripts/messages.js +++ b/frontend/javascripts/messages.js @@ -140,6 +140,8 @@ instead. Only enable this option if you understand its effect. All layers will n "This dataset already contains a segmentation layer provided by its author. If you do not wish to base your work on this original segmentation, you can unlink it by confirming this dialog.", "tracing.confirm_remove_fallback_layer.notes": "Note, that this action cannot be undone. Also note, if you already started with your annotation work based on the original segmentation layer, some small chunks of the segmentation might have already been merged into your annotation for technical reasons.", + "tracing.area_to_fill_is_too_big": + "The area you want to fill is too big. Please annotate the area in multiple strokes.", "layouting.missing_custom_layout_info": "The annotation views are separated into four classes. Each of them has their own layouts. If you can't find your layout please open the annotation in the correct view mode or just add it here manually.", "datastore.unknown_type": "Unknown datastore type:", diff --git a/frontend/javascripts/oxalis/constants.js b/frontend/javascripts/oxalis/constants.js index 1b4a059b753..c3d13b9c92d 100644 --- a/frontend/javascripts/oxalis/constants.js +++ b/frontend/javascripts/oxalis/constants.js @@ -141,6 +141,8 @@ const Constants = { BUCKET_WIDTH: 32, BUCKET_SIZE: 32 ** 3, VIEWPORT_WIDTH, + // About the area the brush reaches at maximum radius (pi * 300 ^ 2). + AUTO_FILL_AREA_LIMIT: 200000, // The amount of buckets which is required per layer can be customized // via the settings. The value which we expose for customization is a factor diff --git a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js index 1ba6b32f9da..8647a320ec4 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js @@ -5,7 +5,7 @@ import _ from "lodash"; -import { +import Constants, { type BoundingBoxType, type OrthoView, type Vector2, @@ -14,10 +14,13 @@ import { VolumeToolEnum, type VolumeTool, } from "oxalis/constants"; +import { V3 } from "libs/mjs"; import { enforceVolumeTracing } from "oxalis/model/accessors/volumetracing_accessor"; import { getBaseVoxelFactors } from "oxalis/model/scaleinfo"; import Dimensions from "oxalis/model/dimensions"; import Drawing from "libs/drawing"; +import messages from "messages"; +import Toast from "libs/toast"; import Store from "oxalis/store"; export class VoxelIterator { @@ -175,6 +178,15 @@ class VolumeLayer { this.maxCoord = maxCoord; } + getArea(): number { + const [maxCoord, minCoord] = [this.maxCoord, this.minCoord]; + if (maxCoord == null || minCoord == null) { + return 0; + } + const difference = V3.sub(maxCoord, minCoord); + return difference[0] * difference[1] * difference[2]; + } + getContourList() { const volumeTracing = enforceVolumeTracing(Store.getState().tracing); return volumeTracing.contourList; @@ -185,11 +197,7 @@ class VolumeLayer { } getVoxelIterator(mode: VolumeTool, activeResolution: Vector3): VoxelIterator { - if (this.isEmpty()) { - return VoxelIterator.finished(); - } - - if (this.minCoord == null) { + if (this.isEmpty() || this.minCoord == null) { return VoxelIterator.finished(); } const minCoord2d = this.get2DCoordinate(this.minCoord); @@ -199,6 +207,13 @@ class VolumeLayer { } const maxCoord2d = this.get2DCoordinate(this.maxCoord); + // The maximum area is scaled by 3 as the min and maxCoord will always be three slices apart, + // because in lines 171 + 172 a value of 2 is subtracted / added when the values get updated. + if (this.getArea() > Constants.AUTO_FILL_AREA_LIMIT * 3) { + Toast.info(messages["tracing.area_to_fill_is_too_big"]); + return VoxelIterator.finished(); + } + const width = maxCoord2d[0] - minCoord2d[0] + 1; const height = maxCoord2d[1] - minCoord2d[1] + 1; From 81e44594bb59c1c413b33a98c5cfb4f7e0efa713 Mon Sep 17 00:00:00 2001 From: Youri K Date: Mon, 31 Aug 2020 16:43:58 +0200 Subject: [PATCH 023/121] [WIP] lazy downscaling of volume buckets --- .../datastore/models/Positions.scala | 6 +- .../volume/VolumeTracingBucketHelper.scala | 131 +++++++++++++++++- .../volume/VolumeTracingService.scala | 2 + 3 files changed, 130 insertions(+), 9 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/Positions.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/Positions.scala index fa5e81c2194..cc131f8db26 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/Positions.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/Positions.scala @@ -41,9 +41,9 @@ class VoxelPosition( } class BucketPosition( - protected val globalX: Int, - protected val globalY: Int, - protected val globalZ: Int, + val globalX: Int, + val globalY: Int, + val globalZ: Int, val resolution: Point3D ) extends GenericPosition { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index 024a979de68..cd94f0dea18 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -1,10 +1,13 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume +import java.nio.{ByteBuffer, ByteOrder} + import com.scalableminds.util.geometry.Point3D import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.util.tools.ExtendedTypes._ import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, ElementClass} +import com.scalableminds.webknossos.datastore.services.DataConverter import com.scalableminds.webknossos.tracingstore.tracings.{ FossilDBClient, KeyValueStoreImplicits, @@ -13,8 +16,12 @@ import com.scalableminds.webknossos.tracingstore.tracings.{ import com.scalableminds.webknossos.wrap.WKWMortonHelper import com.typesafe.scalalogging.LazyLogging import net.jpountz.lz4.{LZ4Compressor, LZ4Factory, LZ4FastDecompressor} +import net.liftweb.common._ +import spire.math.{UByte, UInt, ULong, UShort} +import scala.collection.mutable import scala.concurrent.ExecutionContext.Implicits.global +import scala.reflect.ClassTag trait VolumeBucketCompression extends LazyLogging { @@ -52,7 +59,8 @@ trait VolumeTracingBucketHelper extends WKWMortonHelper with KeyValueStoreImplicits with FoxImplicits - with VolumeBucketCompression { + with VolumeBucketCompression + with DataConverter { implicit def volumeDataStore: FossilDBClient @@ -78,17 +86,128 @@ trait VolumeTracingBucketHelper .get(key, version, mayBeEmpty = Some(true)) .futureBox .map( - _.toOption.map { versionedVolumeBucket => - if (versionedVolumeBucket.value sameElements Array[Byte](0)) Fox.empty - else - Fox.successful( - decompressIfNeeded(versionedVolumeBucket.value, expectedUncompressedBucketSizeFor(dataLayer))) + _.toOption match { + case Some(versionedVolumeBucket) => + if (versionedVolumeBucket.value sameElements Array[Byte](0)) + if (bucket.resolution.maxDim == 1) Fox.empty else loadHigherResBuckets(dataLayer, bucket, version) + else + Fox.successful( + decompressIfNeeded(versionedVolumeBucket.value, expectedUncompressedBucketSizeFor(dataLayer))) + case _ => + if (bucket.resolution.maxDim == 1 || bucket.resolution.maxDim > 2) Fox.empty + else loadHigherResBuckets(dataLayer, bucket, version) } ) .toFox .flatten } + def loadHigherResBuckets(dataLayer: VolumeTracingLayer, bucket: BucketPosition, version: Option[Long]) = { + val downScaleFactor = bucket.resolution + def downscale[T: ClassTag](data: Array[Array[T]])(nullElement: T) = { + def downscaleImpl(data: Array[T]) = { + def mode(a: Array[T]) = { + val filtered = a.filterNot(_ == nullElement) + if (filtered.isEmpty) nullElement + else filtered.groupBy(i => i).mapValues(_.length).maxBy(_._2)._1 + } + + val factor = bucket.resolution + val extensions = (bucket.bucketLength, bucket.bucketLength, bucket.bucketLength) + + val xGrouped = data.grouped(factor.x).toArray + val yGroupedMap = xGrouped.zipWithIndex.groupBy(_._2 % (extensions._1 / factor.x)) + val yGrouped = yGroupedMap.values.map(_.map(_._1).grouped(factor.y).map(_.flatten).toArray) + val zGroupedMap = yGrouped.map(_.zipWithIndex.groupBy(_._2 % (extensions._2 / factor.y))) + val zGrouped = zGroupedMap.map(_.values.map(_.map(_._1).grouped(factor.z).map(_.flatten).toArray)) + val downScaled = zGrouped.map(yGrouped => yGrouped.map(xGrouped => xGrouped.map(mode)).toArray).toArray + + val res = mutable.ArrayBuffer[T]() + for { + z <- 0 until (extensions._3 / factor.z) + y <- 0 until (extensions._2 / factor.y) + x <- 0 until (extensions._1 / factor.x) + } { + res += downScaled(x)(y)(z) + } + res.toArray + } + val downScaledData = data.map(downscaleImpl) + val res = mutable.ArrayBuffer[T]() + for { + z <- 0 until 32 + y <- 0 until 32 + x <- 0 until 32 + } { + val numBox = x / 16 + y / 16 * 2 + z / 16 * 4 + val adjustedX = x - (x / 16) * 16 + val adjustedY = y - (y / 16) * 16 + val adjustedZ = z - (z / 16) * 16 + res += downScaledData(numBox)(adjustedX + 16 * adjustedY + 256 * adjustedZ) + } + res.toArray + } + + val buckets = for { + z <- 0 until downScaleFactor.z + y <- 0 until downScaleFactor.y + x <- 0 until downScaleFactor.x + } yield { + new BucketPosition(bucket.globalX + x * bucket.bucketLength, + bucket.globalY + y * bucket.bucketLength, + bucket.globalZ + z * bucket.bucketLength, + Point3D(1, 1, 1)) + } + (for { + dataBoxes <- Fox.serialSequence(buckets.toList)(loadBucket(dataLayer, _, version)) + data = if (dataBoxes.forall(_.isEmpty)) + Array.fill[Byte](bucket.volume * dataLayer.bytesPerElement)(0) + else + dataBoxes.flatMap { + case Full(bytes) => bytes + case _ => + Array.fill[Byte](bucket.volume * dataLayer.bytesPerElement)(0) + }.toArray + downscaledData = if (data.length == bucket.volume * dataLayer.bytesPerElement) data + else + convertData(data, dataLayer.elementClass) match { + case data: Array[UByte] => + downscale[UByte](data.grouped(bucket.volume).toArray)(UByte(0)) + .foldLeft( + ByteBuffer + .allocate( + dataLayer.bytesPerElement * data.length / downScaleFactor.x / downScaleFactor.y / downScaleFactor.z) + .order(ByteOrder.LITTLE_ENDIAN))((buf, el) => buf put el.toByte) + .array + case data: Array[UShort] => + downscale[UShort](data.grouped(bucket.volume).toArray)(UShort(0)) + .foldLeft( + ByteBuffer + .allocate( + dataLayer.bytesPerElement * data.length / downScaleFactor.x / downScaleFactor.y / downScaleFactor.z) + .order(ByteOrder.LITTLE_ENDIAN))((buf, el) => buf putShort el.toShort) + .array + case data: Array[UInt] => + downscale[UInt](data.grouped(bucket.volume).toArray)(UInt(0)) + .foldLeft( + ByteBuffer + .allocate( + dataLayer.bytesPerElement * data.length / downScaleFactor.x / downScaleFactor.y / downScaleFactor.z) + .order(ByteOrder.LITTLE_ENDIAN))((buf, el) => buf putInt el.toInt) + .array + case data: Array[ULong] => + downscale[ULong](data.grouped(bucket.volume).toArray)(ULong(0)) + .foldLeft( + ByteBuffer + .allocate( + dataLayer.bytesPerElement * data.length / downScaleFactor.x / downScaleFactor.y / downScaleFactor.z) + .order(ByteOrder.LITTLE_ENDIAN))((buf, el) => buf putLong el.toLong) + .array + case _ => data + } + } yield downscaledData).toFox + } + def saveBucket(dataLayer: VolumeTracingLayer, bucket: BucketPosition, data: Array[Byte], version: Long): Fox[Unit] = { val key = buildBucketKey(dataLayer.name, bucket) volumeDataStore.put(key, version, compressVolumeBucket(data, expectedUncompressedBucketSizeFor(dataLayer))) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index bde5744fa06..d900307d283 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -39,6 +39,8 @@ import com.scalableminds.webknossos.tracingstore.geometry.{ Point3D => ProtoPoint } +import scala.collection.mutable + class VolumeTracingService @Inject()( tracingDataStore: TracingDataStore, config: TracingStoreConfig, From 8520506b3149ad53f6767ed16b849c4c5d8651c4 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 7 Sep 2020 10:02:54 +0200 Subject: [PATCH 024/121] lookup-based live downsampling --- .../volume/VolumeTracingBucketHelper.scala | 59 ++++++------------- 1 file changed, 18 insertions(+), 41 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index fc94b80358c..f9fca32f71f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -109,7 +109,7 @@ trait VolumeTracingBucketHelper ) } case _ => - if (bucket.resolution.maxDim == 1 || bucket.resolution.maxDim > 2) Fox.empty + if (bucket.resolution.maxDim == 1 || bucket.resolution.maxDim > 4) Fox.empty else loadHigherResBuckets(dataLayer, bucket, version) } ) @@ -121,51 +121,27 @@ trait VolumeTracingBucketHelper bucket: BucketPosition, version: Option[Long]): Fox[Array[Byte]] = { val downScaleFactor = bucket.resolution - def downscale[T: ClassTag](data: Array[Array[T]])(nullElement: T) = { - def downscaleImpl(data: Array[T]) = { - def mode(a: Array[T]) = { - val filtered = a.filterNot(_ == nullElement) - if (filtered.isEmpty) nullElement - else filtered.groupBy(i => i).mapValues(_.length).maxBy(_._2)._1 - } - val factor = bucket.resolution - val extensions = (bucket.bucketLength, bucket.bucketLength, bucket.bucketLength) - - val xGrouped = data.grouped(factor.x).toArray - val yGroupedMap = xGrouped.zipWithIndex.groupBy(_._2 % (extensions._1 / factor.x)) - val yGrouped = yGroupedMap.values.map(_.map(_._1).grouped(factor.y).map(_.flatten).toArray) - val zGroupedMap = yGrouped.map(_.zipWithIndex.groupBy(_._2 % (extensions._2 / factor.y))) - val zGrouped = zGroupedMap.map(_.values.map(_.map(_._1).grouped(factor.z).map(_.flatten).toArray)) - val downScaled = zGrouped.map(yGrouped => yGrouped.map(xGrouped => xGrouped.map(mode)).toArray).toArray - - val res = mutable.ArrayBuffer[T]() - for { - z <- 0 until (extensions._3 / factor.z) - y <- 0 until (extensions._2 / factor.y) - x <- 0 until (extensions._1 / factor.x) - } { - res += downScaled(x)(y)(z) - } - res.toArray - } - val downScaledData = data.map(downscaleImpl) - val res = mutable.ArrayBuffer[T]() + def downscale[T: ClassTag](data: Array[Array[T]]): Array[T] = { + val result = new Array[T](32 * 32 * 32) for { z <- 0 until 32 y <- 0 until 32 x <- 0 until 32 } { - val numBox = x / 16 + y / 16 * 2 + z / 16 * 4 - val adjustedX = x - (x / 16) * 16 - val adjustedY = y - (y / 16) * 16 - val adjustedZ = z - (z / 16) * 16 - res += downScaledData(numBox)(adjustedX + 16 * adjustedY + 256 * adjustedZ) + val sourceVoxelPosition = Point3D(x * downScaleFactor.x, y * downScaleFactor.y, z * downScaleFactor.z) + val sourceBucketPosition = + Point3D(sourceVoxelPosition.x / 32, sourceVoxelPosition.y / 32, sourceVoxelPosition.z / 32) + val sourceVoxelPositionInSourceBucket = + Point3D(sourceVoxelPosition.x % 32, sourceVoxelPosition.y % 32, sourceVoxelPosition.z % 32) + val sourceBucketIndex = sourceBucketPosition.x + sourceBucketPosition.y * downScaleFactor.y + sourceBucketPosition.z * downScaleFactor.y * downScaleFactor.z + val sourceVoxelIndex = sourceVoxelPositionInSourceBucket.x + sourceVoxelPositionInSourceBucket.y * 32 + sourceVoxelPositionInSourceBucket.z * 32 * 32 + result(x + y * 32 + z * 32 * 32) = data(sourceBucketIndex)(sourceVoxelIndex) } - res.toArray + result } - val buckets = for { + val buckets: Seq[BucketPosition] = for { z <- 0 until downScaleFactor.z y <- 0 until downScaleFactor.y x <- 0 until downScaleFactor.x @@ -175,6 +151,7 @@ trait VolumeTracingBucketHelper bucket.globalZ + z * bucket.bucketLength, Point3D(1, 1, 1)) } + logger.info(s"downsampling bucket from ${buckets.length} buckets...") (for { dataBoxes <- Fox.serialSequence(buckets.toList)(loadBucket(dataLayer, _, version)) data = if (dataBoxes.forall(_.isEmpty)) @@ -189,7 +166,7 @@ trait VolumeTracingBucketHelper else convertData(data, dataLayer.elementClass) match { case data: Array[UByte] => - downscale[UByte](data.grouped(bucket.volume).toArray)(UByte(0)) + downscale[UByte](data.grouped(bucket.volume).toArray) .foldLeft( ByteBuffer .allocate( @@ -197,7 +174,7 @@ trait VolumeTracingBucketHelper .order(ByteOrder.LITTLE_ENDIAN))((buf, el) => buf put el.toByte) .array case data: Array[UShort] => - downscale[UShort](data.grouped(bucket.volume).toArray)(UShort(0)) + downscale[UShort](data.grouped(bucket.volume).toArray) .foldLeft( ByteBuffer .allocate( @@ -205,7 +182,7 @@ trait VolumeTracingBucketHelper .order(ByteOrder.LITTLE_ENDIAN))((buf, el) => buf putShort el.toShort) .array case data: Array[UInt] => - downscale[UInt](data.grouped(bucket.volume).toArray)(UInt(0)) + downscale[UInt](data.grouped(bucket.volume).toArray) .foldLeft( ByteBuffer .allocate( @@ -213,7 +190,7 @@ trait VolumeTracingBucketHelper .order(ByteOrder.LITTLE_ENDIAN))((buf, el) => buf putInt el.toInt) .array case data: Array[ULong] => - downscale[ULong](data.grouped(bucket.volume).toArray)(ULong(0)) + downscale[ULong](data.grouped(bucket.volume).toArray) .foldLeft( ByteBuffer .allocate( From ee3a989b0d63468d8004bc7cd1ebf309829d3701 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 7 Sep 2020 11:49:29 +0200 Subject: [PATCH 025/121] [WIP] downsample volume tracings on upload --- .../datastore/models/Positions.scala | 3 + .../controllers/VolumeTracingController.scala | 3 +- .../volume/VolumeTracingBucketHelper.scala | 105 +++++++++--------- .../volume/VolumeTracingService.scala | 73 ++++++++---- 4 files changed, 113 insertions(+), 71 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/Positions.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/Positions.scala index cc131f8db26..4210bafd8a4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/Positions.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/Positions.scala @@ -82,6 +82,9 @@ class BucketPosition( bucketLength * resolution.x, bucketLength * resolution.y, bucketLength * resolution.z) + + override def hashCode(): Int = + new HashCodeBuilder(17, 31).append(globalX).append(globalY).append(globalZ).append(resolution).toHashCode } class CubePosition( diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 5f9cb7252d5..300bc84fc7e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -58,7 +58,8 @@ class VolumeTracingController @Inject()(val tracingService: VolumeTracingService for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - _ <- tracingService.initializeWithData(tracingId, tracing, initialData) + _ <- tracingService.initializeWithData(tracingId, tracing, initialData).toFox + _ = tracingService.downsample(tracingId: String, tracing: VolumeTracing) } yield Ok(Json.toJson(tracingId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index f9fca32f71f..22d1e29e7c2 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -19,7 +19,6 @@ import net.jpountz.lz4.{LZ4Compressor, LZ4Factory, LZ4FastDecompressor} import net.liftweb.common._ import spire.math.{UByte, UInt, ULong, UShort} -import scala.collection.mutable import scala.concurrent.ExecutionContext.Implicits.global import scala.reflect.ClassTag @@ -66,29 +65,68 @@ trait VolumeBucketCompression extends LazyLogging { } } -trait VolumeTracingBucketHelper - extends WKWMortonHelper - with KeyValueStoreImplicits - with FoxImplicits - with VolumeBucketCompression - with DataConverter { - - implicit def volumeDataStore: FossilDBClient - - private def buildKeyPrefix(dataLayerName: String, resolution: Int): String = - s"$dataLayerName/$resolution/" - - private def buildBucketKey(dataLayerName: String, bucket: BucketPosition): String = { +trait BucketKeys + extends WKWMortonHelper { + protected def buildBucketKey(dataLayerName: String, bucket: BucketPosition): String = { val mortonIndex = mortonEncode(bucket.x, bucket.y, bucket.z) s"$dataLayerName/${formatResolution(bucket.resolution)}/$mortonIndex-[${bucket.x},${bucket.y},${bucket.z}]" } - private def formatResolution(resolution: Point3D): String = + protected def formatResolution(resolution: Point3D): String = if (resolution.x == resolution.y && resolution.x == resolution.z) s"${resolution.maxDim}" else s"${resolution.x}-${resolution.y}-${resolution.z}" + protected def buildKeyPrefix(dataLayerName: String, resolution: Int): String = + s"$dataLayerName/$resolution/" + + protected def parseBucketKey(key: String): Option[(String, BucketPosition)] = { + val keyRx = "([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/-?\\d+-\\[(\\d+),(\\d+),(\\d+)]".r + + key match { + case keyRx(name, resolutionStr, xStr, yStr, zStr) => + val resolutionOpt = parseResolution(resolutionStr) + resolutionOpt match { + case Some(resolution) => + val x = xStr.toInt + val y = yStr.toInt + val z = zStr.toInt + val bucket = new BucketPosition(x * resolution.x * DataLayer.bucketLength, + y * resolution.y * DataLayer.bucketLength, + z * resolution.z * DataLayer.bucketLength, + resolution) + Some((name, bucket)) + case _ => None + } + + case _ => + None + } + } + + protected def parseResolution(resolutionStr: String): Option[Point3D] = + resolutionStr.toIntOpt match { + case Some(resolutionInt) => Some(Point3D(resolutionInt, resolutionInt, resolutionInt)) + case None => + val pattern = """(\d+)-(\d+)-(\d+)""".r + resolutionStr match { + case pattern(x, y, z) => Some(Point3D(x.toInt, y.toInt, z.toInt)) + case _ => None + } + } + +} + +trait VolumeTracingBucketHelper + extends KeyValueStoreImplicits + with FoxImplicits + with VolumeBucketCompression + with DataConverter + with BucketKeys { + + implicit def volumeDataStore: FossilDBClient + def loadBucket(dataLayer: VolumeTracingLayer, bucket: BucketPosition, version: Option[Long] = None): Fox[Array[Byte]] = { @@ -227,7 +265,7 @@ class VersionedBucketIterator(prefix: String, expectedUncompressedBucketSize: Int, version: Option[Long] = None) extends Iterator[(BucketPosition, Array[Byte], Long)] - with WKWMortonHelper + with BucketKeys with KeyValueStoreImplicits with VolumeBucketCompression with FoxImplicits { @@ -260,41 +298,6 @@ class VersionedBucketIterator(prefix: String, .get } - private def parseBucketKey(key: String): Option[(String, BucketPosition)] = { - val keyRx = "([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/-?\\d+-\\[(\\d+),(\\d+),(\\d+)]".r - - key match { - case keyRx(name, resolutionStr, xStr, yStr, zStr) => - val resolutionOpt = parseResolution(resolutionStr) - resolutionOpt match { - case Some(resolution) => - val x = xStr.toInt - val y = yStr.toInt - val z = zStr.toInt - val bucket = new BucketPosition(x * resolution.x * DataLayer.bucketLength, - y * resolution.y * DataLayer.bucketLength, - z * resolution.z * DataLayer.bucketLength, - resolution) - Some((name, bucket)) - case _ => None - } - - case _ => - None - } - } - - private def parseResolution(resolutionStr: String): Option[Point3D] = - resolutionStr.toIntOpt match { - case Some(resolutionInt) => Some(Point3D(resolutionInt, resolutionInt, resolutionInt)) - case None => - val pattern = """(\d+)-(\d+)-(\d+)""".r - resolutionStr match { - case pattern(x, y, z) => Some(Point3D(x.toInt, y.toInt, z.toInt)) - case _ => None - } - } - } class BucketIterator(prefix: String, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index d900307d283..1d6ce3fb3b4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -7,37 +7,30 @@ import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Point3D} import com.scalableminds.webknossos.datastore.dataformats.wkw.{WKWBucketStreamSink, WKWDataFormatHelper} import com.scalableminds.webknossos.datastore.models.BucketPosition -import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, ElementClass, SegmentationLayer} +import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, SegmentationLayer} import com.scalableminds.webknossos.tracingstore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} -import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.models.DataRequestCollection.DataRequestCollection import com.scalableminds.webknossos.datastore.models.requests.DataServiceDataRequest import com.scalableminds.webknossos.datastore.services.BinaryDataService -import com.scalableminds.webknossos.datastore.storage.TemporaryStore -import com.scalableminds.webknossos.tracingstore.SkeletonTracing.SkeletonTracing + +import collection.mutable.HashMap import com.scalableminds.webknossos.tracingstore.{RedisTemporaryStore, TracingStoreConfig} import com.scalableminds.webknossos.wrap.WKWFile import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.libs.Files import play.api.libs.Files.TemporaryFileCreator -import play.api.libs.iteratee.Concurrent.Channel import scala.concurrent.duration._ -import play.api.libs.iteratee.{Concurrent, Enumerator, Input} -import play.api.libs.json.{JsObject, Json} +import play.api.libs.iteratee.Enumerator +import play.api.libs.json.{JsObject, JsValue, Json} -import scala.concurrent.{ExecutionContext, Future} +import scala.concurrent.Future import scala.concurrent.ExecutionContext.Implicits.global -import scala.util.Try -import com.scalableminds.webknossos.tracingstore.geometry.{ - NamedBoundingBox, - BoundingBox => ProtoBox, - Point3D => ProtoPoint -} +import com.scalableminds.webknossos.tracingstore.geometry.NamedBoundingBox import scala.collection.mutable @@ -55,9 +48,9 @@ class VolumeTracingService @Inject()( with FoxImplicits with LazyLogging { - implicit val volumeDataStore = tracingDataStore.volumeData + implicit val volumeDataStore: FossilDBClient = tracingDataStore.volumeData - implicit val tracingCompanion = VolumeTracing + implicit val tracingCompanion: VolumeTracing.type = VolumeTracing implicit val updateActionJsonFormat = VolumeUpdateAction.volumeUpdateActionFormat @@ -204,7 +197,7 @@ class VolumeTracingService @Inject()( case failure: scala.util.Failure[Unit] => logger.debug( s"Failed to send zipped volume data for $tracingId: ${TextUtils.stackTraceAsString(failure.exception)}") - case success: scala.util.Success[Unit] => logger.debug(s"Successfully sent zipped volume data for $tracingId") + case _: scala.util.Success[Unit] => logger.debug(s"Successfully sent zipped volume data for $tracingId") } zipResult } @@ -241,7 +234,7 @@ class VolumeTracingService @Inject()( def duplicateData(sourceId: String, sourceTracing: VolumeTracing, destinationId: String, - destinationTracing: VolumeTracing) = { + destinationTracing: VolumeTracing): Fox[Unit] = { val sourceDataLayer = volumeTracingLayer(sourceId, sourceTracing) val destinationDataLayer = volumeTracingLayer(destinationId, destinationTracing) val buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream(1) @@ -277,7 +270,7 @@ class VolumeTracingService @Inject()( def dataLayerForVolumeTracing(tracingId: String, dataSource: DataSource): Fox[SegmentationLayer] = find(tracingId).map(volumeTracingLayerWithFallback(tracingId, _, dataSource)) - def updateActionLog(tracingId: String) = { + def updateActionLog(tracingId: String): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[CompactVolumeUpdateAction])): JsObject = Json.obj( "version" -> tuple._1, @@ -290,4 +283,46 @@ class VolumeTracingService @Inject()( updateActionGroupsJs = volumeTracings.map(versionedTupleToJson) } yield Json.toJson(updateActionGroupsJs) } + + def downsample(tracingId: String, tracing: VolumeTracing): Unit = { + //TODO: + // - skip if already downsampled + // - figure out which resolutions to create + // - list all keys first, before fetching actual data + val data: List[VersionedKeyValuePair[Array[Byte]]] = tracingDataStore.volumeData.getMultipleKeys(tracingId, Some(tracingId)) + val keyValueMap = new mutable.HashMap[String,Array[Byte]]() { override def default(key:String): Array[Byte] = Array[Byte](0) } + val keys = data.map(_.key) + data.foreach { keyValuePair: VersionedKeyValuePair[Array[Byte]] => + keyValueMap(keyValuePair.key) = keyValuePair.value + } + val originalBucketPositions: Seq[BucketPosition] = keys.flatMap(parseBucketKey).map(_._2) + val originalMag = Point3D(1,1,1) + val requiredMags = Seq(Point3D(2,2,1), Point3D(4,4,1), Point3D(8,8,2)) + requiredMags.foldLeft(originalMag) { + (previousMag, requiredMag) => + logger.info(s"downsampling mag $requiredMag from mag $previousMag...") + val requiredBucketPositions: mutable.HashSet[BucketPosition] = new mutable.HashSet[BucketPosition]() + originalBucketPositions.foreach { bucketPosition: BucketPosition => + val downsampledBucketPosition = new BucketPosition((bucketPosition.globalX / requiredMag.x) * requiredMag.x, + (bucketPosition.globalY / requiredMag.y) * requiredMag.y, + (bucketPosition.globalZ / requiredMag.z) * requiredMag.z, + requiredMag) + requiredBucketPositions.add(downsampledBucketPosition) + } + val downScaleFactor = Point3D(requiredMag.x / previousMag.x, requiredMag.y / previousMag.y, requiredMag.z / previousMag.z) + requiredBucketPositions.foreach { bucketPosition => + val sourceBuckets: Seq[BucketPosition] = for { + z <- 0 until downScaleFactor.z + y <- 0 until downScaleFactor.y + x <- 0 until downScaleFactor.x + } yield { + new BucketPosition(bucketPosition.globalX + x * bucketPosition.bucketLength, + bucketPosition.globalY + y * bucketPosition.bucketLength, + bucketPosition.globalZ + z * bucketPosition.bucketLength, + previousMag) + } + } + requiredMag + } + } } From 5380b198b8da7c177f152c236fa33c7c0a022608 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 7 Sep 2020 14:18:34 +0200 Subject: [PATCH 026/121] [WIP] downscale during upload --- .../controllers/VolumeTracingController.scala | 3 +- .../volume/VolumeTracingBucketHelper.scala | 32 +++--- .../volume/VolumeTracingService.scala | 100 +++++++++++++++--- 3 files changed, 98 insertions(+), 37 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 3705bb8cc98..c72118f8762 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -90,7 +90,8 @@ class VolumeTracingController @Inject()(val tracingService: VolumeTracingService for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - _ <- tracingService.initializeWithDataMultiple(tracingId, tracing, initialData) + _ <- tracingService.initializeWithDataMultiple(tracingId, tracing, initialData).toFox + - <- tracingService.downsample(tracingId, tracing) } yield Ok(Json.toJson(tracingId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index a339fe0c6c7..06d4654d3a5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -68,8 +68,7 @@ trait VolumeBucketCompression extends LazyLogging { } } -trait BucketKeys - extends WKWMortonHelper { +trait BucketKeys extends WKWMortonHelper { protected def buildBucketKey(dataLayerName: String, bucket: BucketPosition): String = { val mortonIndex = mortonEncode(bucket.x, bucket.y, bucket.z) s"$dataLayerName/${formatResolution(bucket.resolution)}/$mortonIndex-[${bucket.x},${bucket.y},${bucket.z}]" @@ -96,9 +95,9 @@ trait BucketKeys val y = yStr.toInt val z = zStr.toInt val bucket = BucketPosition(x * resolution.x * DataLayer.bucketLength, - y * resolution.y * DataLayer.bucketLength, - z * resolution.z * DataLayer.bucketLength, - resolution) + y * resolution.y * DataLayer.bucketLength, + z * resolution.z * DataLayer.bucketLength, + resolution) Some((name, bucket)) case _ => None } @@ -146,20 +145,15 @@ trait VolumeTracingBucketHelper } dataFox.futureBox .map( - _.toOption match { - case Some(versionedVolumeBucket) => - if (versionedVolumeBucket.value sameElements Array[Byte](0)) - if (bucket.resolution.maxDim == 1) Fox.empty else loadHigherResBuckets(dataLayer, bucket, version) - else { - val debugInfo = - s"key: $key, ${versionedVolumeBucket.value.length} bytes, version ${versionedVolumeBucket.version}" - Fox.successful( - decompressIfNeeded(versionedVolumeBucket.value, expectedUncompressedBucketSizeFor(dataLayer), debugInfo) - ) - } - case _ => - if (bucket.resolution.maxDim == 1 || bucket.resolution.maxDim > 4) Fox.empty - else loadHigherResBuckets(dataLayer, bucket, version) + _.toOption.map { versionedVolumeBucket => + if (versionedVolumeBucket.value sameElements Array[Byte](0)) Fox.empty + else { + val debugInfo = + s"key: $key, ${versionedVolumeBucket.value.length} bytes, version ${versionedVolumeBucket.version}" + Fox.successful( + decompressIfNeeded(versionedVolumeBucket.value, expectedUncompressedBucketSizeFor(dataLayer), debugInfo) + ) + } } ) .toFox diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 30bdc21c947..4998c3bbcf8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -15,6 +15,7 @@ import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} import com.scalableminds.webknossos.datastore.models.DataRequestCollection.DataRequestCollection import com.scalableminds.webknossos.datastore.models.requests.DataServiceDataRequest + import collection.mutable.HashMap import com.scalableminds.webknossos.tracingstore.{RedisTemporaryStore, TracingStoreConfig} import com.scalableminds.webknossos.datastore.services.{BinaryDataService, DataConverter} @@ -35,6 +36,7 @@ import scala.concurrent.ExecutionContext.Implicits.global import com.scalableminds.webknossos.tracingstore.geometry.NamedBoundingBox import scala.collection.mutable +import scala.reflect.ClassTag class VolumeTracingService @Inject()( tracingDataStore: TracingDataStore, @@ -406,46 +408,110 @@ class VolumeTracingService @Inject()( } yield Json.toJson(updateActionGroupsJs) } - def downsample(tracingId: String, tracing: VolumeTracing): Unit = { + def downsample(tracingId: String, tracing: VolumeTracing): Fox[Unit] = { //TODO: // - skip if already downsampled // - figure out which resolutions to create // - list all keys first, before fetching actual data - val data: List[VersionedKeyValuePair[Array[Byte]]] = tracingDataStore.volumeData.getMultipleKeys(tracingId, Some(tracingId)) - val keyValueMap = new mutable.HashMap[String,Array[Byte]]() { override def default(key:String): Array[Byte] = Array[Byte](0) } - val keys = data.map(_.key) + val dataLayer = volumeTracingLayer(tracingId, tracing) + val elementClass = elementClassFromProto(tracing.elementClass) + + val data: List[VersionedKeyValuePair[Array[Byte]]] = + tracingDataStore.volumeData.getMultipleKeys(tracingId, Some(tracingId)) + val bucketDataMap = new mutable.HashMap[BucketPosition, Array[Byte]]() { + override def default(key: BucketPosition): Array[Byte] = Array[Byte](0) + } data.foreach { keyValuePair: VersionedKeyValuePair[Array[Byte]] => - keyValueMap(keyValuePair.key) = keyValuePair.value + val bucketPosition = parseBucketKey(keyValuePair.key).map(_._2) + bucketPosition.foreach { + bucketDataMap(_) = decompressIfNeeded(keyValuePair.value, + expectedUncompressedBucketSizeFor(dataLayer), + s"bucket $bucketPosition during downsampling") + } } - val originalBucketPositions: Seq[BucketPosition] = keys.flatMap(parseBucketKey).map(_._2) - val originalMag = Point3D(1,1,1) - val requiredMags = Seq(Point3D(2,2,1), Point3D(4,4,1), Point3D(8,8,2)) - requiredMags.foldLeft(originalMag) { - (previousMag, requiredMag) => + val originalBucketPositions: Seq[BucketPosition] = bucketDataMap.keys.toList + + val originalMag = Point3D(1, 1, 1) + val requiredMags = Seq(Point3D(2, 2, 2), Point3D(4, 4, 4), Point3D(8, 8, 8), Point3D(16, 16, 16)) + val bucketVolume = 32 * 32 * 32 + + val updatedBuckets = new mutable.HashSet[BucketPosition]() + requiredMags.foldLeft(originalMag) { (previousMag, requiredMag) => logger.info(s"downsampling mag $requiredMag from mag $previousMag...") val requiredBucketPositions: mutable.HashSet[BucketPosition] = new mutable.HashSet[BucketPosition]() originalBucketPositions.foreach { bucketPosition: BucketPosition => - val downsampledBucketPosition = new BucketPosition((bucketPosition.globalX / requiredMag.x) * requiredMag.x, - (bucketPosition.globalY / requiredMag.y) * requiredMag.y, - (bucketPosition.globalZ / requiredMag.z) * requiredMag.z, - requiredMag) + val downsampledBucketPosition = BucketPosition( + (bucketPosition.globalX / requiredMag.x / 32) * requiredMag.x * 32, + (bucketPosition.globalY / requiredMag.y / 32) * requiredMag.y * 32, + (bucketPosition.globalZ / requiredMag.z / 32) * requiredMag.z * 32, + requiredMag + ) requiredBucketPositions.add(downsampledBucketPosition) } - val downScaleFactor = Point3D(requiredMag.x / previousMag.x, requiredMag.y / previousMag.y, requiredMag.z / previousMag.z) + val downScaleFactor = + Point3D(requiredMag.x / previousMag.x, requiredMag.y / previousMag.y, requiredMag.z / previousMag.z) + logger.info(s"creating buckets $requiredBucketPositions...") requiredBucketPositions.foreach { bucketPosition => val sourceBuckets: Seq[BucketPosition] = for { z <- 0 until downScaleFactor.z y <- 0 until downScaleFactor.y x <- 0 until downScaleFactor.x } yield { - BucketPosition(bucketPosition.globalX + x * bucketPosition.bucketLength, + BucketPosition( + bucketPosition.globalX + x * bucketPosition.bucketLength, bucketPosition.globalY + y * bucketPosition.bucketLength, bucketPosition.globalZ + z * bucketPosition.bucketLength, - previousMag) + previousMag + ) } + val sourceData: Seq[Array[Byte]] = sourceBuckets.map(bucketDataMap(_)) + val downsampledData: Array[Byte] = + if (sourceData.forall(_.sameElements(Array[Byte](0)))) + Array[Byte](0) + else { + val sourceDataFilled = sourceData.map { sourceBucketData => + if (sourceBucketData.sameElements(Array[Byte](0))) { + Array.fill[Byte](bucketVolume * dataLayer.bytesPerElement)(0) + } else sourceBucketData + } + val sourceDataTyped: Array[UnsignedInteger] = + UnsignedIntegerArray.fromByteArray(sourceDataFilled.toArray.flatten, elementClass) + val dataDownscaledTyped: Array[UnsignedInteger] = + downscale(sourceDataTyped.grouped(bucketVolume).toArray, downScaleFactor) + UnsignedIntegerArray.toByteArray(dataDownscaledTyped, elementClass) + } + bucketDataMap(bucketPosition) = downsampledData + updatedBuckets.add(bucketPosition) + } requiredMag } + for { + _ <- Fox.serialCombined(updatedBuckets.toList) { bucketPosition: BucketPosition => + saveBucket(dataLayer, bucketPosition, bucketDataMap(bucketPosition), tracing.version + 1L, toCache = true) + } + } yield () + // TODO: remove toCache + // TODO: update tracing version + } + + private def downscale[T: ClassTag](data: Array[Array[T]], downScaleFactor: Point3D): Array[T] = { + val result = new Array[T](32 * 32 * 32) + for { + z <- 0 until 32 + y <- 0 until 32 + x <- 0 until 32 + } { + val sourceVoxelPosition = Point3D(x * downScaleFactor.x, y * downScaleFactor.y, z * downScaleFactor.z) + val sourceBucketPosition = + Point3D(sourceVoxelPosition.x / 32, sourceVoxelPosition.y / 32, sourceVoxelPosition.z / 32) + val sourceVoxelPositionInSourceBucket = + Point3D(sourceVoxelPosition.x % 32, sourceVoxelPosition.y % 32, sourceVoxelPosition.z % 32) + val sourceBucketIndex = sourceBucketPosition.x + sourceBucketPosition.y * downScaleFactor.y + sourceBucketPosition.z * downScaleFactor.y * downScaleFactor.z + val sourceVoxelIndex = sourceVoxelPositionInSourceBucket.x + sourceVoxelPositionInSourceBucket.y * 32 + sourceVoxelPositionInSourceBucket.z * 32 * 32 + result(x + y * 32 + z * 32 * 32) = data(sourceBucketIndex)(sourceVoxelIndex) + } + result } def merge(tracings: Seq[VolumeTracing]): VolumeTracing = tracings.reduceLeft(mergeTwo) From fd114f31efbb14ea3c0161325358b0fe5410ce7d Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 7 Sep 2020 15:17:56 +0200 Subject: [PATCH 027/121] fix offsets, switch to mode --- .../volume/VolumeTracingService.scala | 43 ++++++++++++------- 1 file changed, 27 insertions(+), 16 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 4998c3bbcf8..006fb971e4b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -413,6 +413,7 @@ class VolumeTracingService @Inject()( // - skip if already downsampled // - figure out which resolutions to create // - list all keys first, before fetching actual data + // - update tracing version? val dataLayer = volumeTracingLayer(tracingId, tracing) val elementClass = elementClassFromProto(tracing.elementClass) @@ -437,7 +438,7 @@ class VolumeTracingService @Inject()( val updatedBuckets = new mutable.HashSet[BucketPosition]() requiredMags.foldLeft(originalMag) { (previousMag, requiredMag) => - logger.info(s"downsampling mag $requiredMag from mag $previousMag...") + // logger.info(s"downsampling mag $requiredMag from mag $previousMag...") val requiredBucketPositions: mutable.HashSet[BucketPosition] = new mutable.HashSet[BucketPosition]() originalBucketPositions.foreach { bucketPosition: BucketPosition => val downsampledBucketPosition = BucketPosition( @@ -450,7 +451,7 @@ class VolumeTracingService @Inject()( } val downScaleFactor = Point3D(requiredMag.x / previousMag.x, requiredMag.y / previousMag.y, requiredMag.z / previousMag.z) - logger.info(s"creating buckets $requiredBucketPositions...") + // logger.info(s"creating buckets $requiredBucketPositions...") requiredBucketPositions.foreach { bucketPosition => val sourceBuckets: Seq[BucketPosition] = for { z <- 0 until downScaleFactor.z @@ -458,12 +459,13 @@ class VolumeTracingService @Inject()( x <- 0 until downScaleFactor.x } yield { BucketPosition( - bucketPosition.globalX + x * bucketPosition.bucketLength, - bucketPosition.globalY + y * bucketPosition.bucketLength, - bucketPosition.globalZ + z * bucketPosition.bucketLength, + bucketPosition.globalX + x * bucketPosition.bucketLength * previousMag.x, + bucketPosition.globalY + y * bucketPosition.bucketLength * previousMag.y, + bucketPosition.globalZ + z * bucketPosition.bucketLength * previousMag.z, previousMag ) } + // logger.info(s"source buckets: $sourceBuckets") val sourceData: Seq[Array[Byte]] = sourceBuckets.map(bucketDataMap(_)) val downsampledData: Array[Byte] = if (sourceData.forall(_.sameElements(Array[Byte](0)))) @@ -488,11 +490,9 @@ class VolumeTracingService @Inject()( } for { _ <- Fox.serialCombined(updatedBuckets.toList) { bucketPosition: BucketPosition => - saveBucket(dataLayer, bucketPosition, bucketDataMap(bucketPosition), tracing.version + 1L, toCache = true) + saveBucket(dataLayer, bucketPosition, bucketDataMap(bucketPosition), tracing.version + 1L) } } yield () - // TODO: remove toCache - // TODO: update tracing version } private def downscale[T: ClassTag](data: Array[Array[T]], downScaleFactor: Point3D): Array[T] = { @@ -502,18 +502,29 @@ class VolumeTracingService @Inject()( y <- 0 until 32 x <- 0 until 32 } { - val sourceVoxelPosition = Point3D(x * downScaleFactor.x, y * downScaleFactor.y, z * downScaleFactor.z) - val sourceBucketPosition = - Point3D(sourceVoxelPosition.x / 32, sourceVoxelPosition.y / 32, sourceVoxelPosition.z / 32) - val sourceVoxelPositionInSourceBucket = - Point3D(sourceVoxelPosition.x % 32, sourceVoxelPosition.y % 32, sourceVoxelPosition.z % 32) - val sourceBucketIndex = sourceBucketPosition.x + sourceBucketPosition.y * downScaleFactor.y + sourceBucketPosition.z * downScaleFactor.y * downScaleFactor.z - val sourceVoxelIndex = sourceVoxelPositionInSourceBucket.x + sourceVoxelPositionInSourceBucket.y * 32 + sourceVoxelPositionInSourceBucket.z * 32 * 32 - result(x + y * 32 + z * 32 * 32) = data(sourceBucketIndex)(sourceVoxelIndex) + val voxelSourceData: IndexedSeq[T] = for { + z_offset <- 0 until downScaleFactor.z + y_offset <- 0 until downScaleFactor.y + x_offset <- 0 until downScaleFactor.x + } yield { + val sourceVoxelPosition = + Point3D(x * downScaleFactor.x + x_offset, y * downScaleFactor.y + y_offset, z * downScaleFactor.z + z_offset) + val sourceBucketPosition = + Point3D(sourceVoxelPosition.x / 32, sourceVoxelPosition.y / 32, sourceVoxelPosition.z / 32) + val sourceVoxelPositionInSourceBucket = + Point3D(sourceVoxelPosition.x % 32, sourceVoxelPosition.y % 32, sourceVoxelPosition.z % 32) + val sourceBucketIndex = sourceBucketPosition.x + sourceBucketPosition.y * downScaleFactor.y + sourceBucketPosition.z * downScaleFactor.y * downScaleFactor.z + val sourceVoxelIndex = sourceVoxelPositionInSourceBucket.x + sourceVoxelPositionInSourceBucket.y * 32 + sourceVoxelPositionInSourceBucket.z * 32 * 32 + data(sourceBucketIndex)(sourceVoxelIndex) + } + result(x + y * 32 + z * 32 * 32) = mode(voxelSourceData) } result } + private def mode[T](items: Seq[T]): T = + items.groupBy(i => i).mapValues(_.size).maxBy(_._2)._1 + def merge(tracings: Seq[VolumeTracing]): VolumeTracing = tracings.reduceLeft(mergeTwo) def mergeTwo(tracingA: VolumeTracing, tracingB: VolumeTracing): VolumeTracing = { From 15ca6b025d2b852534c1ffa833eeaa2f7869f6be Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 8 Sep 2020 14:58:07 +0200 Subject: [PATCH 028/121] determine which mags to downsample --- app/controllers/AnnotationIOController.scala | 27 ++- app/controllers/TracingStoreController.scala | 1 + .../WKTracingStoreController.scala | 34 +++- app/models/annotation/nml/NmlParser.scala | 35 ++-- app/models/annotation/nml/NmlResults.scala | 7 +- app/models/annotation/nml/NmlService.scala | 20 +- conf/webknossos.latest.routes | 1 + .../TracingStoreWkRpcClient.scala | 7 + .../volume/VolumeTracingBucketHelper.scala | 85 -------- .../volume/VolumeTracingDownsampling.scala | 192 ++++++++++++++++++ .../tracings/volume/VolumeTracingLayer.scala | 2 +- .../volume/VolumeTracingService.scala | 126 +----------- .../proto/SkeletonTracing.proto | 1 + .../proto/VolumeTracing.proto | 1 + 14 files changed, 279 insertions(+), 260 deletions(-) create mode 100644 webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 083aa8f2b86..93a213c47cd 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -82,9 +82,7 @@ class AnnotationIOController @Inject()(nmlWriter: NmlWriter, for { _ <- bool2Fox(skeletonTracings.nonEmpty || volumeTracingsWithDataLocations.nonEmpty) ?~> "nml.file.noFile" - dataSet <- findDataSetForUploadedAnnotations(skeletonTracings, - volumeTracingsWithDataLocations.map(_._1), - parseSuccesses) + dataSet <- findDataSetForUploadedAnnotations(skeletonTracings, volumeTracingsWithDataLocations.map(_._1)) tracingStoreClient <- tracingStoreService.clientFor(dataSet) mergedVolumeTracingIdOpt <- Fox.runOptional(volumeTracingsWithDataLocations.headOption) { _ => for { @@ -120,11 +118,10 @@ class AnnotationIOController @Inject()(nmlWriter: NmlWriter, private def findDataSetForUploadedAnnotations( skeletonTracings: List[SkeletonTracing], - volumeTracings: List[VolumeTracing], - parseSuccesses: List[NmlParseResult])(implicit mp: MessagesProvider, ctx: DBAccessContext): Fox[DataSet] = + volumeTracings: List[VolumeTracing])(implicit mp: MessagesProvider, ctx: DBAccessContext): Fox[DataSet] = for { dataSetName <- assertAllOnSameDataSet(skeletonTracings, volumeTracings) ?~> "nml.file.differentDatasets" - organizationNameOpt <- assertAllOnSameOrganization(parseSuccesses.flatMap(s => s.organizationName)) ?~> "nml.file.differentDatasets" + organizationNameOpt <- assertAllOnSameOrganization(skeletonTracings, volumeTracings) ?~> "nml.file.differentDatasets" organizationIdOpt <- Fox.runOptional(organizationNameOpt) { organizationDAO.findOneByName(_)(GlobalAccessContext).map(_._id) } ?~> Messages("organization.notFound", organizationNameOpt.getOrElse("")) ~> NOT_FOUND @@ -172,14 +169,16 @@ class AnnotationIOController @Inject()(nmlWriter: NmlWriter, _ <- bool2Fox(volumes.forall(_.dataSetName == dataSetName)) } yield dataSetName - private def assertAllOnSameOrganization(organizationNames: List[String]): Fox[Option[String]] = - if (organizationNames.isEmpty) Fox.successful(None) - else { - for { - organizationName <- organizationNames.headOption.toFox - _ <- bool2Fox(organizationNames.forall(name => name == organizationName)) - } yield Some(organizationName) - } + private def assertAllOnSameOrganization(skeletons: List[SkeletonTracing], + volumes: List[VolumeTracing]): Fox[Option[String]] = + for { + organizationName: Option[String] <- volumes.headOption + .map(_.organizationName) + .orElse(skeletons.headOption.map(_.organizationName)) + .toFox + _ <- bool2Fox(skeletons.forall(_.organizationName == organizationName)) + _ <- bool2Fox(volumes.forall(_.organizationName == organizationName)) + } yield organizationName private def adaptPropertiesToFallbackLayer(volumeTracing: VolumeTracing, dataSet: DataSet)( implicit ctx: DBAccessContext): Fox[VolumeTracing] = diff --git a/app/controllers/TracingStoreController.scala b/app/controllers/TracingStoreController.scala index 40c7d1098fa..2e24cfb1edf 100644 --- a/app/controllers/TracingStoreController.scala +++ b/app/controllers/TracingStoreController.scala @@ -43,4 +43,5 @@ class TracingStoreController @Inject()(tracingStoreService: TracingStoreService, } yield { Ok(Json.toJson(js)) } } } + } diff --git a/app/controllers/WKTracingStoreController.scala b/app/controllers/WKTracingStoreController.scala index ddca620dec1..69a44dd3c35 100644 --- a/app/controllers/WKTracingStoreController.scala +++ b/app/controllers/WKTracingStoreController.scala @@ -1,21 +1,27 @@ package controllers -import com.scalableminds.util.accesscontext.GlobalAccessContext +import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.tools.{Fox, FoxImplicits} import javax.inject.Inject -import models.annotation.{Annotation, AnnotationDAO, TracingStoreDAO, TracingStoreService} +import models.annotation.{Annotation, AnnotationDAO, TracingStoreService} import models.user.time.TimeSpanService import oxalis.security.{WkEnv, WkSilhouetteEnvironment} import com.mohiva.play.silhouette.api.Silhouette import play.api.libs.json.{JsObject, Json} import models.annotation.AnnotationState._ +import models.binary.{DataSetDAO, DataSetService} +import models.team.OrganizationDAO +import play.api.i18n.Messages +import play.api.mvc.{Action, AnyContent} import scala.concurrent.ExecutionContext class WKTracingStoreController @Inject()(tracingStoreService: TracingStoreService, - tracingStoreDAO: TracingStoreDAO, wkSilhouetteEnvironment: WkSilhouetteEnvironment, timeSpanService: TimeSpanService, + dataSetService: DataSetService, + organizationDAO: OrganizationDAO, + dataSetDAO: DataSetDAO, annotationDAO: AnnotationDAO, sil: Silhouette[WkEnv])(implicit ec: ExecutionContext) extends Controller @@ -24,7 +30,7 @@ class WKTracingStoreController @Inject()(tracingStoreService: TracingStoreServic val bearerTokenService = wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService def handleTracingUpdateReport(name: String) = Action.async(parse.json) { implicit request => - tracingStoreService.validateAccess(name) { dataStore => + tracingStoreService.validateAccess(name) { _ => for { tracingId <- (request.body \ "tracingId").asOpt[String].toFox annotation <- annotationDAO.findOneByTracingId(tracingId)(GlobalAccessContext) @@ -49,4 +55,24 @@ class WKTracingStoreController @Inject()(tracingStoreService: TracingStoreServic private def ensureAnnotationNotFinished(annotation: Annotation) = if (annotation.state == Finished) Fox.failure("annotation already finshed") else Fox.successful(()) + + def dataSource(name: String, organizationName: String, dataSetName: String): Action[AnyContent] = Action.async { + implicit request => + tracingStoreService.validateAccess(name) { _ => + implicit val ctx: DBAccessContext = GlobalAccessContext + val organizationNameOpt = if (organizationName == "") None else Some(organizationName) + for { + organizationIdOpt <- Fox.runOptional(organizationNameOpt) { + organizationDAO.findOneByName(_)(GlobalAccessContext).map(_._id) + } ?~> Messages("organization.notFound", organizationNameOpt.getOrElse("")) ~> NOT_FOUND + organizationId <- Fox.fillOption(organizationIdOpt) { + dataSetDAO.getOrganizationForDataSet(dataSetName)(GlobalAccessContext) + } ?~> Messages("dataSet.noAccess", dataSetName) ~> FORBIDDEN + dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organizationId) ?~> Messages( + "dataSet.noAccess", + dataSetName) ~> FORBIDDEN + dataSource <- dataSetService.dataSourceFor(dataSet) + } yield Ok(Json.toJson(dataSource)) + } + } } diff --git a/app/models/annotation/nml/NmlParser.scala b/app/models/annotation/nml/NmlParser.scala index d525f2af8dd..e30082cfd1c 100755 --- a/app/models/annotation/nml/NmlParser.scala +++ b/app/models/annotation/nml/NmlParser.scala @@ -43,8 +43,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener @SuppressWarnings(Array("TraversableHead")) //We check if volumes are empty before accessing the head def parse(name: String, nmlInputStream: InputStream, overwritingDataSetName: Option[String], isTaskUpload: Boolean)( - implicit m: MessagesProvider) - : Box[(Option[SkeletonTracing], Option[(VolumeTracing, String)], String, Option[String])] = + implicit m: MessagesProvider): Box[(Option[SkeletonTracing], Option[(VolumeTracing, String)], String)] = try { val data = XML.load(nmlInputStream) for { @@ -97,7 +96,8 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener 0, zoomLevel, None, - userBoundingBoxes + userBoundingBoxes, + organizationName ), volumes.head.location) ) @@ -106,21 +106,24 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener if (treesSplit.isEmpty) None else Some( - SkeletonTracing(dataSetName, - treesSplit, - time, - taskBoundingBox, - activeNodeId, - editPosition, - editRotation, - zoomLevel, - version = 0, - None, - treeGroupsAfterSplit, - userBoundingBoxes) + SkeletonTracing( + dataSetName, + treesSplit, + time, + taskBoundingBox, + activeNodeId, + editPosition, + editRotation, + zoomLevel, + version = 0, + None, + treeGroupsAfterSplit, + userBoundingBoxes, + organizationName + ) ) - (skeletonTracing, volumeTracingWithDataLocation, description, organizationName) + (skeletonTracing, volumeTracingWithDataLocation, description) } } catch { case e: org.xml.sax.SAXParseException if e.getMessage.startsWith("Premature end of file") => diff --git a/app/models/annotation/nml/NmlResults.scala b/app/models/annotation/nml/NmlResults.scala index 3add9b9e8fd..27320d42cf1 100644 --- a/app/models/annotation/nml/NmlResults.scala +++ b/app/models/annotation/nml/NmlResults.scala @@ -17,8 +17,6 @@ object NmlResults extends LazyLogging { def description: Option[String] = None - def organizationName: Option[String] = None - def succeeded: Boolean def toSuccessFox(implicit ec: ExecutionContext): Fox[NmlParseSuccess] = this match { @@ -36,8 +34,7 @@ object NmlResults extends LazyLogging { case class NmlParseSuccess(fileName: String, skeletonTracing: Option[SkeletonTracing], volumeTracingWithDataLocation: Option[(VolumeTracing, String)], - _description: String, - organizationNameOpt: Option[String]) + _description: String) extends NmlParseResult { def succeeded = true @@ -45,8 +42,6 @@ object NmlResults extends LazyLogging { override def description = Some(_description) - override def organizationName: Option[String] = organizationNameOpt - override def withName(name: String): NmlParseResult = this.copy(fileName = name) } diff --git a/app/models/annotation/nml/NmlService.scala b/app/models/annotation/nml/NmlService.scala index b05bb62356c..b80f3ab21a0 100644 --- a/app/models/annotation/nml/NmlService.scala +++ b/app/models/annotation/nml/NmlService.scala @@ -34,8 +34,8 @@ class NmlService @Inject()(temporaryFileCreator: TemporaryFileCreator)(implicit overwritingDataSetName: Option[String], isTaskUpload: Boolean)(implicit m: MessagesProvider): NmlParseResult = NmlParser.parse(name, inputStream, overwritingDataSetName, isTaskUpload) match { - case Full((skeletonTracing, volumeTracingWithDataLocation, description, organizationNameOpt)) => - NmlParseSuccess(name, skeletonTracing, volumeTracingWithDataLocation, description, organizationNameOpt) + case Full((skeletonTracing, volumeTracingWithDataLocation, description)) => + NmlParseSuccess(name, skeletonTracing, volumeTracingWithDataLocation, description) case Failure(msg, _, chain) => NmlParseFailure(name, msg + chain.map(_ => formatChain(chain)).getOrElse("")) case Empty => NmlParseEmpty(name) } @@ -77,12 +77,8 @@ class NmlService @Inject()(temporaryFileCreator: TemporaryFileCreator)(implicit if (parseResults.length > 1) { parseResults.map { - case NmlParseSuccess(name, Some(skeletonTracing), volumeTracingOpt, description, organizationNameOpt) => - NmlParseSuccess(name, - Some(renameTrees(name, skeletonTracing)), - volumeTracingOpt, - description, - organizationNameOpt) + case NmlParseSuccess(name, Some(skeletonTracing), volumeTracingOpt, description) => + NmlParseSuccess(name, Some(renameTrees(name, skeletonTracing)), volumeTracingOpt, description) case r => r } } else { @@ -101,12 +97,8 @@ class NmlService @Inject()(temporaryFileCreator: TemporaryFileCreator)(implicit } parseResults.map { - case NmlParseSuccess(name, Some(skeletonTracing), volumeTracingOpt, description, organizationNameOpt) => - NmlParseSuccess(name, - Some(wrapTreesInGroup(name, skeletonTracing)), - volumeTracingOpt, - description, - organizationNameOpt) + case NmlParseSuccess(name, Some(skeletonTracing), volumeTracingOpt, description) => + NmlParseSuccess(name, Some(wrapTreesInGroup(name, skeletonTracing)), volumeTracingOpt, description) case r => r } } diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index a23ab132409..d8ef5de3424 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -91,6 +91,7 @@ GET /tracingstore POST /tracingstores/:name/handleTracingUpdateReport controllers.WKTracingStoreController.handleTracingUpdateReport(name: String) POST /tracingstores/:name/validateUserAccess controllers.UserTokenController.validateAccessViaTracingstore(name: String, token: Option[String]) PUT /tracingstores/:name controllers.TracingStoreController.update(name: String) +GET /tracingstores/:name/dataSource/:organizationName/:dataSetName controllers.WKTracingStoreController.dataSource(name: String, organizationName: String, dataSetName: String) # User access tokens for datastore authentification POST /userToken/generate controllers.UserTokenController.generateTokenForDataStore diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreWkRpcClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreWkRpcClient.scala index cdec2da727c..0bd93f2bb65 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreWkRpcClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreWkRpcClient.scala @@ -3,6 +3,7 @@ package com.scalableminds.webknossos.tracingstore import com.google.inject.Inject import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, DataSourceLike} import com.scalableminds.webknossos.datastore.services.{ AccessTokenService, UserAccessAnswer, @@ -39,6 +40,12 @@ class TracingStoreWkRpcClient @Inject()( "tracingId" -> tracingId, "userToken" -> userToken)) + def getDataSource(organizationNameOpt: Option[String], dataSetName: String): Fox[DataSourceLike] = + rpc( + s"$webKnossosUrl/api/tracingstores/$tracingStoreName/dataSource/${organizationNameOpt.getOrElse("")}/${dataSetName}") + .addQueryString("key" -> tracingStoreKey) + .getWithJsonResponse[DataSourceLike] + override def requestUserAccess(token: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] = rpc(s"$webKnossosUrl/api/tracingstores/$tracingStoreName/validateUserAccess") .addQueryString("key" -> tracingStoreKey) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index 06d4654d3a5..e8566ac164b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -160,91 +160,6 @@ trait VolumeTracingBucketHelper .flatten } - private def loadHigherResBuckets(dataLayer: VolumeTracingLayer, - bucket: BucketPosition, - version: Option[Long]): Fox[Array[Byte]] = { - val downScaleFactor = bucket.resolution - - def downscale[T: ClassTag](data: Array[Array[T]]): Array[T] = { - val result = new Array[T](32 * 32 * 32) - for { - z <- 0 until 32 - y <- 0 until 32 - x <- 0 until 32 - } { - val sourceVoxelPosition = Point3D(x * downScaleFactor.x, y * downScaleFactor.y, z * downScaleFactor.z) - val sourceBucketPosition = - Point3D(sourceVoxelPosition.x / 32, sourceVoxelPosition.y / 32, sourceVoxelPosition.z / 32) - val sourceVoxelPositionInSourceBucket = - Point3D(sourceVoxelPosition.x % 32, sourceVoxelPosition.y % 32, sourceVoxelPosition.z % 32) - val sourceBucketIndex = sourceBucketPosition.x + sourceBucketPosition.y * downScaleFactor.y + sourceBucketPosition.z * downScaleFactor.y * downScaleFactor.z - val sourceVoxelIndex = sourceVoxelPositionInSourceBucket.x + sourceVoxelPositionInSourceBucket.y * 32 + sourceVoxelPositionInSourceBucket.z * 32 * 32 - result(x + y * 32 + z * 32 * 32) = data(sourceBucketIndex)(sourceVoxelIndex) - } - result - } - - val buckets: Seq[BucketPosition] = for { - z <- 0 until downScaleFactor.z - y <- 0 until downScaleFactor.y - x <- 0 until downScaleFactor.x - } yield { - BucketPosition(bucket.globalX + x * bucket.bucketLength, - bucket.globalY + y * bucket.bucketLength, - bucket.globalZ + z * bucket.bucketLength, - Point3D(1, 1, 1)) - } - logger.info(s"downsampling bucket from ${buckets.length} buckets...") - (for { - dataBoxes <- Fox.serialSequence(buckets.toList)(loadBucket(dataLayer, _, version)) - data = if (dataBoxes.forall(_.isEmpty)) - Array.fill[Byte](bucket.volume * dataLayer.bytesPerElement)(0) - else - dataBoxes.flatMap { - case Full(bytes) => bytes - case _ => - Array.fill[Byte](bucket.volume * dataLayer.bytesPerElement)(0) - }.toArray - downscaledData = if (data.length == bucket.volume * dataLayer.bytesPerElement) data - else - convertData(data, dataLayer.elementClass) match { - case data: Array[UByte] => - downscale[UByte](data.grouped(bucket.volume).toArray) - .foldLeft( - ByteBuffer - .allocate( - dataLayer.bytesPerElement * data.length / downScaleFactor.x / downScaleFactor.y / downScaleFactor.z) - .order(ByteOrder.LITTLE_ENDIAN))((buf, el) => buf put el.toByte) - .array - case data: Array[UShort] => - downscale[UShort](data.grouped(bucket.volume).toArray) - .foldLeft( - ByteBuffer - .allocate( - dataLayer.bytesPerElement * data.length / downScaleFactor.x / downScaleFactor.y / downScaleFactor.z) - .order(ByteOrder.LITTLE_ENDIAN))((buf, el) => buf putShort el.toShort) - .array - case data: Array[UInt] => - downscale[UInt](data.grouped(bucket.volume).toArray) - .foldLeft( - ByteBuffer - .allocate( - dataLayer.bytesPerElement * data.length / downScaleFactor.x / downScaleFactor.y / downScaleFactor.z) - .order(ByteOrder.LITTLE_ENDIAN))((buf, el) => buf putInt el.toInt) - .array - case data: Array[ULong] => - downscale[ULong](data.grouped(bucket.volume).toArray) - .foldLeft( - ByteBuffer - .allocate( - dataLayer.bytesPerElement * data.length / downScaleFactor.x / downScaleFactor.y / downScaleFactor.z) - .order(ByteOrder.LITTLE_ENDIAN))((buf, el) => buf putLong el.toLong) - .array - case _ => data - } - } yield downscaledData).toFox - } - def saveBucket(dataLayer: VolumeTracingLayer, bucket: BucketPosition, data: Array[Byte], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala new file mode 100644 index 00000000000..66af7043aee --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -0,0 +1,192 @@ +package com.scalableminds.webknossos.tracingstore.tracings.volume + +import com.scalableminds.util.geometry.Point3D +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedIntegerArray} +import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, DataSourceLike, ElementClass} +import com.scalableminds.webknossos.tracingstore.TracingStoreWkRpcClient +import com.scalableminds.webknossos.tracingstore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.tracingstore.tracings.{ + KeyValueStoreImplicits, + ProtoGeometryImplicits, + TracingDataStore, + VersionedKeyValuePair +} + +import scala.collection.mutable +import scala.concurrent.ExecutionContext +import scala.reflect.ClassTag + +trait VolumeTracingDownsampling + extends BucketKeys + with ProtoGeometryImplicits + with VolumeBucketCompression + with KeyValueStoreImplicits { + + val tracingDataStore: TracingDataStore + val tracingStoreWkRpcClient: TracingStoreWkRpcClient + def saveBucket(dataLayer: VolumeTracingLayer, + bucket: BucketPosition, + data: Array[Byte], + version: Long, + toCache: Boolean = false): Fox[Unit] + + private def fillMapWithInitialBucketsInplace(bucketDataMap: mutable.HashMap[BucketPosition, Array[Byte]], + tracingId: String, + dataLayer: VolumeTracingLayer): Unit = { + val data: List[VersionedKeyValuePair[Array[Byte]]] = + tracingDataStore.volumeData.getMultipleKeys(tracingId, Some(tracingId)) + data.foreach { keyValuePair: VersionedKeyValuePair[Array[Byte]] => + val bucketPosition = parseBucketKey(keyValuePair.key).map(_._2) + bucketPosition.foreach { + bucketDataMap(_) = decompressIfNeeded(keyValuePair.value, + expectedUncompressedBucketSizeFor(dataLayer), + s"bucket $bucketPosition during downsampling") + } + } + } + + def downsampleWithLayer(tracingId: String, tracing: VolumeTracing, dataLayer: VolumeTracingLayer)( + implicit ec: ExecutionContext): Fox[Unit] = { + //TODO: + // - skip if already downsampled + // - list all keys first, before fetching actual data + // - update tracing version? can the user restore not-downsampled old versions, what happens? + val bucketVolume = 32 * 32 * 32 + val originalMag = Point3D(1, 1, 1) + for { + requiredMags <- getRequiredMags(tracing) + elementClass = elementClassFromProto(tracing.elementClass) + bucketDataMap = new mutable.HashMap[BucketPosition, Array[Byte]]() { + override def default(key: BucketPosition): Array[Byte] = Array[Byte](0) + } + _ = fillMapWithInitialBucketsInplace(bucketDataMap, tracingId, dataLayer) + originalBucketPositions: List[BucketPosition] = bucketDataMap.keys.toList + updatedBuckets = new mutable.HashSet[BucketPosition]() + _ = requiredMags.foldLeft(originalMag) { (previousMag, requiredMag) => + downsampleMagFromMag(previousMag, + requiredMag, + originalBucketPositions, + bucketDataMap, + updatedBuckets, + bucketVolume, + elementClass, + dataLayer) + //logger.info(s"bucketDataMap keys: ${bucketDataMap.keys.toList}") + requiredMag + } + _ <- Fox.serialCombined(updatedBuckets.toList) { bucketPosition: BucketPosition => + //logger.info(s"saving bucket $bucketPosition") + saveBucket(dataLayer, bucketPosition, bucketDataMap(bucketPosition), tracing.version) + } + } yield () + } + + private def downsampleMagFromMag(previousMag: Point3D, + requiredMag: Point3D, + originalBucketPositions: List[BucketPosition], + bucketDataMap: mutable.HashMap[BucketPosition, Array[Byte]], + updatedBuckets: mutable.HashSet[BucketPosition], + bucketVolume: Int, + elementClass: ElementClass.Value, + dataLayer: VolumeTracingLayer): Unit = { + //logger.info(s"downsampling volume tracing mag $requiredMag from mag $previousMag...") + val downScaleFactor = + Point3D(requiredMag.x / previousMag.x, requiredMag.y / previousMag.y, requiredMag.z / previousMag.z) + downsampledBucketPositions(originalBucketPositions, requiredMag).foreach { downsampledBucketPosition => + val sourceBuckets: Seq[BucketPosition] = + sourceBucketPositionsFor(downsampledBucketPosition, downScaleFactor, previousMag) + //logger.info(s"source buckets for bucket $downsampledBucketPosition: ${sourceBuckets}") + val sourceData: Seq[Array[Byte]] = sourceBuckets.map(bucketDataMap(_)) + val downsampledData: Array[Byte] = + if (sourceData.forall(_.sameElements(Array[Byte](0)))) + Array[Byte](0) + else { + val sourceDataFilled = fillZeroedIfNeeded(sourceData, bucketVolume, dataLayer.bytesPerElement) + val sourceDataTyped = UnsignedIntegerArray.fromByteArray(sourceDataFilled.toArray.flatten, elementClass) + val dataDownscaledTyped = + downsampleData(sourceDataTyped.grouped(bucketVolume).toArray, downScaleFactor, bucketVolume) + UnsignedIntegerArray.toByteArray(dataDownscaledTyped, elementClass) + } + bucketDataMap(downsampledBucketPosition) = downsampledData + updatedBuckets.add(downsampledBucketPosition) + } + } + + private def downsampledBucketPositions(originalBucketPositions: List[BucketPosition], + requiredMag: Point3D): Set[BucketPosition] = + originalBucketPositions.map { bucketPosition: BucketPosition => + BucketPosition( + (bucketPosition.globalX / requiredMag.x / 32) * requiredMag.x * 32, + (bucketPosition.globalY / requiredMag.y / 32) * requiredMag.y * 32, + (bucketPosition.globalZ / requiredMag.z / 32) * requiredMag.z * 32, + requiredMag + ) + }.toSet + + private def sourceBucketPositionsFor(bucketPosition: BucketPosition, + downScaleFactor: Point3D, + previousMag: Point3D): Seq[BucketPosition] = + for { + z <- 0 until downScaleFactor.z + y <- 0 until downScaleFactor.y + x <- 0 until downScaleFactor.x + } yield { + BucketPosition( + bucketPosition.globalX + x * bucketPosition.bucketLength * previousMag.x, + bucketPosition.globalY + y * bucketPosition.bucketLength * previousMag.y, + bucketPosition.globalZ + z * bucketPosition.bucketLength * previousMag.z, + previousMag + ) + } + + private def fillZeroedIfNeeded(sourceData: Seq[Array[Byte]], + bucketVolume: Int, + bytesPerElement: Int): Seq[Array[Byte]] = + // Reverted buckets and missing buckets arer epresented by a single zero-byte. + // For downsampling, those need to be replaced with the full bucket volume of zero-bytes. + sourceData.map { sourceBucketData => + if (sourceBucketData.sameElements(Array[Byte](0))) { + Array.fill[Byte](bucketVolume * bytesPerElement)(0) + } else sourceBucketData + } + + private def downsampleData[T: ClassTag](data: Array[Array[T]], + downScaleFactor: Point3D, + bucketVolume: Int): Array[T] = { + val result = new Array[T](bucketVolume) + for { + z <- 0 until 32 + y <- 0 until 32 + x <- 0 until 32 + } { + val voxelSourceData: IndexedSeq[T] = for { + z_offset <- 0 until downScaleFactor.z + y_offset <- 0 until downScaleFactor.y + x_offset <- 0 until downScaleFactor.x + } yield { + val sourceVoxelPosition = + Point3D(x * downScaleFactor.x + x_offset, y * downScaleFactor.y + y_offset, z * downScaleFactor.z + z_offset) + val sourceBucketPosition = + Point3D(sourceVoxelPosition.x / 32, sourceVoxelPosition.y / 32, sourceVoxelPosition.z / 32) + val sourceVoxelPositionInSourceBucket = + Point3D(sourceVoxelPosition.x % 32, sourceVoxelPosition.y % 32, sourceVoxelPosition.z % 32) + val sourceBucketIndex = sourceBucketPosition.x + sourceBucketPosition.y * downScaleFactor.y + sourceBucketPosition.z * downScaleFactor.y * downScaleFactor.z + val sourceVoxelIndex = sourceVoxelPositionInSourceBucket.x + sourceVoxelPositionInSourceBucket.y * 32 + sourceVoxelPositionInSourceBucket.z * 32 * 32 + data(sourceBucketIndex)(sourceVoxelIndex) + } + result(x + y * 32 + z * 32 * 32) = mode(voxelSourceData) + } + result + } + + private def mode[T](items: Seq[T]): T = + items.groupBy(i => i).mapValues(_.size).maxBy(_._2)._1 + + private def getRequiredMags(tracing: VolumeTracing): Fox[Seq[Point3D]] = + for { + dataSource: DataSourceLike <- tracingStoreWkRpcClient.getDataSource(tracing.organizationName, tracing.dataSetName) + mags = dataSource.dataLayers.flatMap(_.resolutions).distinct.sortBy(_.maxDim).filterNot(_.maxDim == 1) + } yield mags + +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index a836d642a2b..993a8285e28 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -93,6 +93,6 @@ case class VolumeTracingLayer( val mappings: Option[Set[String]] = None - val resolutions: List[Point3D] = List(Point3D(1, 1, 1)) + val resolutions: List[Point3D] = List(Point3D(1, 1, 1)) // unused for volume tracings override def containsResolution(resolution: Point3D) = true } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 006fb971e4b..5e1b92c9497 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -6,7 +6,6 @@ import java.nio.file.Paths import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Point3D} import com.scalableminds.webknossos.datastore.dataformats.wkw.{WKWBucketStreamSink, WKWDataFormatHelper} -import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, SegmentationLayer} import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedInteger, UnsignedIntegerArray} import com.scalableminds.webknossos.tracingstore.VolumeTracing.VolumeTracing @@ -16,10 +15,8 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} import com.scalableminds.webknossos.datastore.models.DataRequestCollection.DataRequestCollection import com.scalableminds.webknossos.datastore.models.requests.DataServiceDataRequest -import collection.mutable.HashMap -import com.scalableminds.webknossos.tracingstore.{RedisTemporaryStore, TracingStoreConfig} +import com.scalableminds.webknossos.tracingstore.{RedisTemporaryStore, TracingStoreWkRpcClient} import com.scalableminds.webknossos.datastore.services.{BinaryDataService, DataConverter} -import com.scalableminds.webknossos.tracingstore.RedisTemporaryStore import com.scalableminds.webknossos.tracingstore.VolumeTracing.VolumeTracing.ElementClass import com.scalableminds.webknossos.wrap.WKWFile import com.typesafe.scalalogging.LazyLogging @@ -36,10 +33,10 @@ import scala.concurrent.ExecutionContext.Implicits.global import com.scalableminds.webknossos.tracingstore.geometry.NamedBoundingBox import scala.collection.mutable -import scala.reflect.ClassTag class VolumeTracingService @Inject()( - tracingDataStore: TracingDataStore, + val tracingDataStore: TracingDataStore, + val tracingStoreWkRpcClient: TracingStoreWkRpcClient, implicit val temporaryTracingStore: TemporaryTracingStore[VolumeTracing], implicit val volumeDataCache: TemporaryVolumeDataStore, val handledGroupIdStore: RedisTemporaryStore, @@ -48,6 +45,7 @@ class VolumeTracingService @Inject()( val temporaryFileCreator: TemporaryFileCreator ) extends TracingService[VolumeTracing] with VolumeTracingBucketHelper + with VolumeTracingDownsampling with WKWDataFormatHelper with ProtoGeometryImplicits with FoxImplicits @@ -409,122 +407,10 @@ class VolumeTracingService @Inject()( } def downsample(tracingId: String, tracing: VolumeTracing): Fox[Unit] = { - //TODO: - // - skip if already downsampled - // - figure out which resolutions to create - // - list all keys first, before fetching actual data - // - update tracing version? - val dataLayer = volumeTracingLayer(tracingId, tracing) - val elementClass = elementClassFromProto(tracing.elementClass) - - val data: List[VersionedKeyValuePair[Array[Byte]]] = - tracingDataStore.volumeData.getMultipleKeys(tracingId, Some(tracingId)) - val bucketDataMap = new mutable.HashMap[BucketPosition, Array[Byte]]() { - override def default(key: BucketPosition): Array[Byte] = Array[Byte](0) - } - data.foreach { keyValuePair: VersionedKeyValuePair[Array[Byte]] => - val bucketPosition = parseBucketKey(keyValuePair.key).map(_._2) - bucketPosition.foreach { - bucketDataMap(_) = decompressIfNeeded(keyValuePair.value, - expectedUncompressedBucketSizeFor(dataLayer), - s"bucket $bucketPosition during downsampling") - } - } - val originalBucketPositions: Seq[BucketPosition] = bucketDataMap.keys.toList - - val originalMag = Point3D(1, 1, 1) - val requiredMags = Seq(Point3D(2, 2, 2), Point3D(4, 4, 4), Point3D(8, 8, 8), Point3D(16, 16, 16)) - val bucketVolume = 32 * 32 * 32 - - val updatedBuckets = new mutable.HashSet[BucketPosition]() - requiredMags.foldLeft(originalMag) { (previousMag, requiredMag) => - // logger.info(s"downsampling mag $requiredMag from mag $previousMag...") - val requiredBucketPositions: mutable.HashSet[BucketPosition] = new mutable.HashSet[BucketPosition]() - originalBucketPositions.foreach { bucketPosition: BucketPosition => - val downsampledBucketPosition = BucketPosition( - (bucketPosition.globalX / requiredMag.x / 32) * requiredMag.x * 32, - (bucketPosition.globalY / requiredMag.y / 32) * requiredMag.y * 32, - (bucketPosition.globalZ / requiredMag.z / 32) * requiredMag.z * 32, - requiredMag - ) - requiredBucketPositions.add(downsampledBucketPosition) - } - val downScaleFactor = - Point3D(requiredMag.x / previousMag.x, requiredMag.y / previousMag.y, requiredMag.z / previousMag.z) - // logger.info(s"creating buckets $requiredBucketPositions...") - requiredBucketPositions.foreach { bucketPosition => - val sourceBuckets: Seq[BucketPosition] = for { - z <- 0 until downScaleFactor.z - y <- 0 until downScaleFactor.y - x <- 0 until downScaleFactor.x - } yield { - BucketPosition( - bucketPosition.globalX + x * bucketPosition.bucketLength * previousMag.x, - bucketPosition.globalY + y * bucketPosition.bucketLength * previousMag.y, - bucketPosition.globalZ + z * bucketPosition.bucketLength * previousMag.z, - previousMag - ) - } - // logger.info(s"source buckets: $sourceBuckets") - val sourceData: Seq[Array[Byte]] = sourceBuckets.map(bucketDataMap(_)) - val downsampledData: Array[Byte] = - if (sourceData.forall(_.sameElements(Array[Byte](0)))) - Array[Byte](0) - else { - val sourceDataFilled = sourceData.map { sourceBucketData => - if (sourceBucketData.sameElements(Array[Byte](0))) { - Array.fill[Byte](bucketVolume * dataLayer.bytesPerElement)(0) - } else sourceBucketData - } - val sourceDataTyped: Array[UnsignedInteger] = - UnsignedIntegerArray.fromByteArray(sourceDataFilled.toArray.flatten, elementClass) - val dataDownscaledTyped: Array[UnsignedInteger] = - downscale(sourceDataTyped.grouped(bucketVolume).toArray, downScaleFactor) - UnsignedIntegerArray.toByteArray(dataDownscaledTyped, elementClass) - } - bucketDataMap(bucketPosition) = downsampledData - updatedBuckets.add(bucketPosition) - - } - requiredMag - } - for { - _ <- Fox.serialCombined(updatedBuckets.toList) { bucketPosition: BucketPosition => - saveBucket(dataLayer, bucketPosition, bucketDataMap(bucketPosition), tracing.version + 1L) - } - } yield () - } - - private def downscale[T: ClassTag](data: Array[Array[T]], downScaleFactor: Point3D): Array[T] = { - val result = new Array[T](32 * 32 * 32) - for { - z <- 0 until 32 - y <- 0 until 32 - x <- 0 until 32 - } { - val voxelSourceData: IndexedSeq[T] = for { - z_offset <- 0 until downScaleFactor.z - y_offset <- 0 until downScaleFactor.y - x_offset <- 0 until downScaleFactor.x - } yield { - val sourceVoxelPosition = - Point3D(x * downScaleFactor.x + x_offset, y * downScaleFactor.y + y_offset, z * downScaleFactor.z + z_offset) - val sourceBucketPosition = - Point3D(sourceVoxelPosition.x / 32, sourceVoxelPosition.y / 32, sourceVoxelPosition.z / 32) - val sourceVoxelPositionInSourceBucket = - Point3D(sourceVoxelPosition.x % 32, sourceVoxelPosition.y % 32, sourceVoxelPosition.z % 32) - val sourceBucketIndex = sourceBucketPosition.x + sourceBucketPosition.y * downScaleFactor.y + sourceBucketPosition.z * downScaleFactor.y * downScaleFactor.z - val sourceVoxelIndex = sourceVoxelPositionInSourceBucket.x + sourceVoxelPositionInSourceBucket.y * 32 + sourceVoxelPositionInSourceBucket.z * 32 * 32 - data(sourceBucketIndex)(sourceVoxelIndex) - } - result(x + y * 32 + z * 32 * 32) = mode(voxelSourceData) - } - result + val volumeLayer = volumeTracingLayer(tracingId, tracing) + downsampleWithLayer(tracingId, tracing, volumeLayer) } - private def mode[T](items: Seq[T]): T = - items.groupBy(i => i).mapValues(_.size).maxBy(_._2)._1 - def merge(tracings: Seq[VolumeTracing]): VolumeTracing = tracings.reduceLeft(mergeTwo) def mergeTwo(tracingA: VolumeTracing, tracingB: VolumeTracing): VolumeTracing = { diff --git a/webknossos-tracingstore/proto/SkeletonTracing.proto b/webknossos-tracingstore/proto/SkeletonTracing.proto index eec59e5a73c..5a59ec20987 100644 --- a/webknossos-tracingstore/proto/SkeletonTracing.proto +++ b/webknossos-tracingstore/proto/SkeletonTracing.proto @@ -63,6 +63,7 @@ message SkeletonTracing { optional BoundingBox userBoundingBox = 10; repeated TreeGroup treeGroups = 11; repeated NamedBoundingBox userBoundingBoxes = 12; + optional string organizationName = 13; } message SkeletonTracingOpt { diff --git a/webknossos-tracingstore/proto/VolumeTracing.proto b/webknossos-tracingstore/proto/VolumeTracing.proto index 9cfa507ecd5..5208b9c25ba 100644 --- a/webknossos-tracingstore/proto/VolumeTracing.proto +++ b/webknossos-tracingstore/proto/VolumeTracing.proto @@ -26,6 +26,7 @@ message VolumeTracing { required double zoomLevel = 11; optional BoundingBox userBoundingBox = 12; repeated NamedBoundingBox userBoundingBoxes = 13; + optional string organizationName = 14; } message VolumeTracingOpt { From b9fb5894e528db9e77995cdf6af9bc5b22c15765 Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 8 Sep 2020 15:21:35 +0200 Subject: [PATCH 029/121] adapt nml unit test suite --- test/backend/NMLUnitTestSuite.scala | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/test/backend/NMLUnitTestSuite.scala b/test/backend/NMLUnitTestSuite.scala index 77f3dd4f290..0f052282f30 100644 --- a/test/backend/NMLUnitTestSuite.scala +++ b/test/backend/NMLUnitTestSuite.scala @@ -27,7 +27,7 @@ class NMLUnitTestSuite extends FlatSpec { def getObjectId = ObjectId.generate def writeAndParseTracing(skeletonTracing: SkeletonTracing) - : Box[(Option[SkeletonTracing], Option[(VolumeTracing, String)], String, Option[String])] = { + : Box[(Option[SkeletonTracing], Option[(VolumeTracing, String)], String)] = { val nmlEnumarator = new NmlWriter().toNmlStream(Some(skeletonTracing), None, None, None, None, "testOrganization", None, None) val arrayFuture = Iteratee.flatten(nmlEnumarator |>> Iteratee.consume[Array[Byte]]()).run @@ -36,11 +36,11 @@ class NMLUnitTestSuite extends FlatSpec { } def isParseSuccessful( - parsedTracing: Box[(Option[SkeletonTracing], Option[(VolumeTracing, String)], String, Option[String])]): Boolean = + parsedTracing: Box[(Option[SkeletonTracing], Option[(VolumeTracing, String)], String)]): Boolean = parsedTracing match { case Full(tuple) => tuple match { - case (Some(_), _, _, _) => true + case (Some(_), _, _) => true case _ => false } case _ => false @@ -52,9 +52,8 @@ class NMLUnitTestSuite extends FlatSpec { writeAndParseTracing(dummyTracing) match { case Full(tuple) => tuple match { - case (Some(tracing), _, _, _) => { + case (Some(tracing), _, _) => assert(tracing == dummyTracing) - } case _ => throw new Exception } case _ => throw new Exception From 3f9a38b63d2500521ec1fcd20a241a3bf297033a Mon Sep 17 00:00:00 2001 From: Florian M Date: Tue, 8 Sep 2020 15:29:50 +0200 Subject: [PATCH 030/121] fix backend dummy tracing --- test/backend/Dummies.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/backend/Dummies.scala b/test/backend/Dummies.scala index 5000d97f5ff..8f1198dda19 100644 --- a/test/backend/Dummies.scala +++ b/test/backend/Dummies.scala @@ -48,7 +48,9 @@ object Dummies { 1.0, 0, None, - Seq(treeGroup1, treeGroup2)) + Seq(treeGroup1, treeGroup2), + Seq.empty, + Some("testOrganization")) //tree with two components, from tree1 and tree2 val comp1Nodes = Seq(createDummyNode(10), createDummyNode(11), createDummyNode(12), createDummyNode(13)) From 07e2a0a3e23f6a102984a813b0134b365c6d17cc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 9 Sep 2020 11:50:16 +0200 Subject: [PATCH 031/121] move function to get number of slices to accessor --- .../model/accessors/volumetracing_accessor.js | 9 ++++- .../model/bucket_data_handling/bucket.js | 13 ++++--- .../model/bucket_data_handling/data_cube.js | 5 +-- .../oxalis/model/sagas/volumetracing_saga.js | 36 ++++++++++++++++--- .../oxalis/model/volumetracing/volumelayer.js | 21 ++++------- 5 files changed, 52 insertions(+), 32 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js index 9c006193eaf..e21c9afd7b0 100644 --- a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js @@ -5,7 +5,8 @@ import Maybe from "data.maybe"; import { getRequestLogZoomStep } from "oxalis/model/accessors/flycam_accessor"; import type { Tracing, VolumeTracing, OxalisState } from "oxalis/store"; -import type { VolumeTool, ContourMode } from "oxalis/constants"; +import Dimensions from "oxalis/model/dimensions"; +import type { VolumeTool, ContourMode, Vector3, OrthoView } from "oxalis/constants"; import type { HybridServerTracing, ServerVolumeTracing } from "admin/api_flow_types"; export function getVolumeTracing(tracing: Tracing): Maybe { @@ -60,3 +61,9 @@ export function isSegmentationMissingForZoomstep( ): boolean { return getRequestLogZoomStep(state) > maxZoomStepForSegmentation; } + +export function getNumberOfSlicesForResolution(activeResolution: Vector3, activePlane: OrthoView) { + const thirdDimenstionIndex = Dimensions.thirdDimensionForPlane(activePlane); + const numberOfSlices = activeResolution[thirdDimenstionIndex]; + return numberOfSlices; +} diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js index 1cc17d67218..4837fec22e5 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js @@ -137,14 +137,13 @@ export class DataBucket { } getBoundingBox(): BoundingBoxType { - const min = bucketPositionToGlobalAddress( - this.zoomedAddress, - getResolutions(Store.getState().dataset), - ); + const resolutions = getResolutions(Store.getState().dataset); + const min = bucketPositionToGlobalAddress(this.zoomedAddress, resolutions); + const bucketResolution = resolutions[this.zoomedAddress[3]]; const max = [ - min[0] + Constants.BUCKET_WIDTH, - min[1] + Constants.BUCKET_WIDTH, - min[2] + Constants.BUCKET_WIDTH, + min[0] + Constants.BUCKET_WIDTH * bucketResolution[0], + min[1] + Constants.BUCKET_WIDTH * bucketResolution[1], + min[2] + Constants.BUCKET_WIDTH * bucketResolution[2], ]; return { min, max }; } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index a9fd8a1c790..1584a6785ce 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -25,7 +25,6 @@ import PullQueue from "oxalis/model/bucket_data_handling/pullqueue"; import PushQueue from "oxalis/model/bucket_data_handling/pushqueue"; import Store, { type Mapping } from "oxalis/store"; import TemporalBucketManager from "oxalis/model/bucket_data_handling/temporal_bucket_manager"; -import { finishAnnotationStrokeAction } from "oxalis/model/actions/volumetracing_actions"; import type { DimensionMap } from "oxalis/model/dimensions"; import constants, { type Vector2, @@ -411,7 +410,7 @@ class DataCube { if (seedBucket.type === "null") { return; } - const seedVoxelIndex = this.getVoxelIndex(seedVoxel); + const seedVoxelIndex = this.getVoxelIndex(seedVoxel, zoomStep); const sourceCellId = seedBucket.getOrCreateData()[seedVoxelIndex]; if (sourceCellId === cellId) { return; @@ -477,12 +476,10 @@ class DataCube { } bucketsToAddToPushQueue.add(currentBucket); } - Store.dispatch(finishAnnotationStrokeAction()); for (const bucket of bucketsToAddToPushQueue.values()) { this.pushQueue.insert(bucket); bucket.trigger("bucketLabeled"); } - this.triggerPushQueue(); } setBucketData(zoomedAddress: Vector4, data: Uint8Array) { diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index a32db3cfafb..086e1602d08 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -31,11 +31,13 @@ import type { VolumeTracing, Flycam } from "oxalis/store"; import { enforceVolumeTracing, isVolumeTraceToolDisallowed, + getNumberOfSlicesForResolution, } from "oxalis/model/accessors/volumetracing_accessor"; import { getPosition, getRotation, getCurrentResolution, + getRequestLogZoomStep, } from "oxalis/model/accessors/flycam_accessor"; import { type BoundingBoxType, @@ -107,7 +109,13 @@ export function* editVolumeLayerAsync(): Generator { const currentLayer = yield* call(createVolumeLayer, startEditingAction.planeId); const initialViewport = yield* select(state => state.viewModeData.plane.activeViewport); - const activeViewportBounding = yield* call(getBoundingsFromPosition, initialViewport); + let activeResolution = yield* select(state => getCurrentResolution(state)); + let numberOfSlices = getNumberOfSlicesForResolution(activeResolution, initialViewport); + const activeViewportBounding = yield* call( + getBoundingsFromPosition, + initialViewport, + numberOfSlices, + ); if (activeTool === VolumeToolEnum.BRUSH) { const currentResolution = yield* select(state => getCurrentResolution(state)); yield* call( @@ -143,7 +151,13 @@ export function* editVolumeLayerAsync(): Generator { currentLayer.addContour(addToLayerAction.position); } if (activeTool === VolumeToolEnum.BRUSH) { - const currentViewportBounding = yield* call(getBoundingsFromPosition, activeViewport); + activeResolution = yield* select(state => getCurrentResolution(state)); + numberOfSlices = getNumberOfSlicesForResolution(activeResolution, initialViewport); + const currentViewportBounding = yield* call( + getBoundingsFromPosition, + activeViewport, + numberOfSlices, + ); const currentResolution = yield* select(state => getCurrentResolution(state)); yield* call( labelWithIterator, @@ -162,14 +176,21 @@ export function* editVolumeLayerAsync(): Generator { } } -function* getBoundingsFromPosition(currentViewport: OrthoView): Saga { +function* getBoundingsFromPosition( + currentViewport: OrthoView, + numberOfSlices: number, +): Saga { const position = Dimensions.roundCoordinate(yield* select(state => getPosition(state.flycam))); const halfViewportExtents = yield* call(getHalfViewportExtents, currentViewport); const halfViewportExtentsUVW = Dimensions.transDim([...halfViewportExtents, 0], currentViewport); - return { + const thirdDimension = Dimensions.thirdDimensionForPlane(currentViewport); + const currentViewportBounding = { min: V3.sub(position, halfViewportExtentsUVW), max: V3.add(position, halfViewportExtentsUVW), }; + currentViewportBounding.max[thirdDimension] = + currentViewportBounding.min[thirdDimension] + numberOfSlices; + return currentViewportBounding; } function* createVolumeLayer(planeId: OrthoView): Saga { @@ -273,7 +294,6 @@ export function* floodFill(): Saga { const segmentationLayer = Model.getSegmentationLayer(); const { cube } = segmentationLayer; const seedVoxel = Dimensions.roundCoordinate(position); - const currentViewportBounding = yield* call(getBoundingsFromPosition, planeId); const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); const dimensionIndices = Dimensions.getIndices(planeId); const get3DAddress = (voxel: Vector2) => { @@ -289,6 +309,9 @@ export function* floodFill(): Saga { voxel[dimensionIndices[0]], voxel[dimensionIndices[1]], ]; + const activeResolution = yield* select(state => getCurrentResolution(state)); + const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); + const currentViewportBounding = yield* call(getBoundingsFromPosition, planeId, 1); cube.floodFill( seedVoxel, activeCellId, @@ -296,7 +319,10 @@ export function* floodFill(): Saga { get2DAddress, dimensionIndices, currentViewportBounding, + activeZoomStep, ); + yield* put(finishAnnotationStrokeAction()); + cube.triggerPushQueue(); } } diff --git a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js index b87094b5784..d31b9018ecf 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js @@ -15,7 +15,10 @@ import Constants, { type VolumeTool, } from "oxalis/constants"; import { V3 } from "libs/mjs"; -import { enforceVolumeTracing } from "oxalis/model/accessors/volumetracing_accessor"; +import { + enforceVolumeTracing, + getNumberOfSlicesForResolution, +} from "oxalis/model/accessors/volumetracing_accessor"; import { getBaseVoxelFactors } from "oxalis/model/scaleinfo"; import Dimensions from "oxalis/model/dimensions"; import Drawing from "libs/drawing"; @@ -278,7 +281,7 @@ class VolumeLayer { this.fillOutsideArea(map, width, height); this.drawOutlineVoxels(setMap, mode); - const numberOfSlices = this.getNumberOfSlicesForResolution(activeResolution); + const numberOfSlices = getNumberOfSlicesForResolution(activeResolution, this.plane); const thirdDimensionIndex = Dimensions.thirdDimensionForPlane(this.plane); const iterator = new VoxelIterator( @@ -323,14 +326,8 @@ class VolumeLayer { }; Drawing.fillCircle(radius, radius, radius, scaleX, scaleY, setMap); - const numberOfSlices = this.getNumberOfSlicesForResolution(activeResolution); + const numberOfSlices = getNumberOfSlicesForResolution(activeResolution, this.plane); const thirdDimensionIndex = Dimensions.thirdDimensionForPlane(this.plane); - if ( - boundings != null && - boundings.max[thirdDimensionIndex] - boundings.min[thirdDimensionIndex] < numberOfSlices - 1 - ) { - boundings.max[thirdDimensionIndex] = boundings.min[thirdDimensionIndex] + numberOfSlices - 1; - } const iterator = new VoxelIterator( map, @@ -426,12 +423,6 @@ class VolumeLayer { return this.get3DCoordinate([cx, cy]); } - - getNumberOfSlicesForResolution(activeResolution: Vector3) { - const thirdDimenstionIndex = Dimensions.thirdDimensionForPlane(this.plane); - const numberOfSlices = activeResolution[thirdDimenstionIndex]; - return numberOfSlices; - } } export default VolumeLayer; From bd335e138fa1b27c60e7a1459f75b96e417b6eef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 9 Sep 2020 19:28:29 +0200 Subject: [PATCH 032/121] WIP: applying flood fill to all resolutions --- frontend/javascripts/libs/utils.js | 14 ++ .../model/bucket_data_handling/bucket.js | 1 + .../model/bucket_data_handling/data_cube.js | 174 +++++++++++++++++- .../oxalis/model/sagas/save_saga.js | 1 - .../oxalis/model/sagas/volumetracing_saga.js | 36 +++- 5 files changed, 212 insertions(+), 14 deletions(-) diff --git a/frontend/javascripts/libs/utils.js b/frontend/javascripts/libs/utils.js index dff6048f38b..d19a18fc557 100644 --- a/frontend/javascripts/libs/utils.js +++ b/frontend/javascripts/libs/utils.js @@ -34,6 +34,20 @@ export function map3(fn: (A, number) => B, tuple: [A, A, A]): [B, B, B] { return [fn(x, 0), fn(y, 1), fn(z, 2)]; } +export function iterateThroughBounds( + minVoxel: Vector3, + maxVoxel: Vector3, + fn: (number, number, number) => void, +): void { + for (let x = minVoxel[0]; x < maxVoxel[0]; x++) { + for (let y = minVoxel[1]; y < maxVoxel[1]; y++) { + for (let z = minVoxel[2]; z < maxVoxel[2]; z++) { + fn(x, y, z); + } + } + } +} + function swap(arr, a, b) { let tmp; if (arr[a] > arr[b]) { diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js index 4837fec22e5..6f6216e7946 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js @@ -225,6 +225,7 @@ export class DataBucket { throttledTriggerLabeled = _.throttle(() => this.trigger("bucketLabeled"), 10); markAndAddBucketForUndo() { + this.dirty = true; if (!bucketsAlreadyInUndoState.has(this)) { bucketsAlreadyInUndoState.add(this); Store.dispatch(addBucketToUndoAction(this.zoomedAddress, this.getCopyOfData())); diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 1584a6785ce..9105c3761a0 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -33,7 +33,7 @@ import constants, { type BoundingBoxType, } from "oxalis/constants"; import { type ElementClass } from "admin/api_flow_types"; -import { areBoundingBoxesOverlappingOrTouching } from "libs/utils"; +import { areBoundingBoxesOverlappingOrTouching, map3, iterateThroughBounds } from "libs/utils"; class CubeEntry { data: Map; boundary: Vector3; @@ -44,6 +44,8 @@ class CubeEntry { } } +export type LabeledVoxelsMap = Map; + class DataCube { MAXIMUM_BUCKET_COUNT = 5000; ZOOM_STEP_COUNT: number; @@ -372,6 +374,12 @@ class DataCube { if (shouldUpdateVoxel) { const labelFunc = (data: BucketDataArray): void => { + if (address[3] === 1) + console.log( + `labeled in bucket ${bucket.zoomedAddress.toString()}, voxel ${voxel.toString()}, voxelIndex ${voxelIndex}, with modulo ${voxel.map( + a => Math.floor(a / 2) % 32, + )}`, + ); data[voxelIndex] = label; }; bucket.label(labelFunc); @@ -394,26 +402,27 @@ class DataCube { dimensionIndices: DimensionMap, viewportBoundings: BoundingBoxType, zoomStep: number = 0, - ) { + ): ?LabeledVoxelsMap { // This flood-fill algorithm works in two nested levels and uses a list of buckets to flood fill. // On the inner level a bucket is flood-filled and if the iteration of the buckets data // reaches an neighbour bucket, this bucket is added to this list of buckets to flood fill. // The outer level simply iterates over all buckets in the list and triggers the bucket-wise flood fill. + // Additionally a map is created that saves all labeled voxels for each bucket. This map is returned at the end. // - // Note: It is possible that a bucket is multiple times added to the address. This is intended + // Note: It is possible that a bucket is multiple times added to the list of buckets. This is intended // because a border of the "neighbour volume shape" might leave the neighbour bucket and enter is somewhere else. // If it would not be possible to have the same neighbour bucket in the list multiple times, // not all of the target area in the neighbour bucket might be filled. - const bucketsToAddToPushQueue = new Set(); + const bucketsWithLabeledVoxelsMap: LabeledVoxelsMap = new Map(); const seedBucketAddress = this.positionToZoomedAddress(seedVoxel, zoomStep); const seedBucket = this.getOrCreateBucket(seedBucketAddress); if (seedBucket.type === "null") { - return; + return null; } const seedVoxelIndex = this.getVoxelIndex(seedVoxel, zoomStep); const sourceCellId = seedBucket.getOrCreateData()[seedVoxelIndex]; if (sourceCellId === cellId) { - return; + return null; } const bucketsToFill: Array<[DataBucket, Vector3]> = [ [seedBucket, this.getVoxelOffset(seedVoxel, zoomStep)], @@ -437,8 +446,18 @@ class DataCube { currentBucket.markAndAddBucketForUndo(); // Mark the initial voxel. bucketData[initialVoxelIndex] = cellId; + // Create an array saving the labeled voxel of the current slice for the current bucket, if there isn't already one. + const currentLabeledVoxelMap = + bucketsWithLabeledVoxelsMap.get(currentBucket) || + new Uint8Array(constants.BUCKET_WIDTH ** 2).fill(0); + const markVoxelOfSliceAsLabeled = ([firstCoord, secondCoord]) => { + currentLabeledVoxelMap[firstCoord * constants.BUCKET_WIDTH + secondCoord] = 1; + }; + // Use a VoxelNeighborStack2D to iterate over the bucket in 2d and using bucket-local addresses and not global addresses. - const neighbourVoxelStack = new VoxelNeighborStack2D(get2DAddress(initialVoxelInBucket)); + const initialVoxelInSlice = get2DAddress(initialVoxelInBucket); + markVoxelOfSliceAsLabeled(initialVoxelInSlice); + const neighbourVoxelStack = new VoxelNeighborStack2D(initialVoxelInSlice); // Iterating over all neighbours from the initialAddress. while (!neighbourVoxelStack.isEmpty()) { const neighbours = neighbourVoxelStack.popVoxelAndGetNeighbors(); @@ -469,19 +488,156 @@ class DataCube { const neighbourVoxelIndex = this.getVoxelIndex(neighbourVoxel3D, zoomStep); if (bucketData[neighbourVoxelIndex] === sourceCellId) { bucketData[neighbourVoxelIndex] = cellId; + markVoxelOfSliceAsLabeled(neighbourVoxel); neighbourVoxelStack.pushVoxel(neighbourVoxel); } } } } - bucketsToAddToPushQueue.add(currentBucket); + bucketsWithLabeledVoxelsMap.set(currentBucket, currentLabeledVoxelMap); + } + for (const bucket of bucketsWithLabeledVoxelsMap.keys()) { + this.pushQueue.insert(bucket); + bucket.trigger("bucketLabeled"); + } + return bucketsWithLabeledVoxelsMap; + } + + applyLabeledVoxelMapToResolution( + labeledVoxelMap: LabeledVoxelsMap, + sourceResolution: Vector3, + sourceZoomStep: number, + goalResolution: Vector3, + goalZoomStep: number, + cellId: number, + thirdDimension: number, + get3DAddress: Vector2 => Vector3, + ) { + const labeledBuckets = new Set(); + const voxelsToLabelInEachDirection = map3( + (sourceVal, index) => Math.ceil(sourceVal / goalResolution[index]), + sourceResolution, + ); + const voxelToGoalResolution = voxelInBucket => + map3( + (value, index) => Math.floor(value * (sourceResolution[index] / goalResolution[index])), + voxelInBucket, + ); + for (const [labeledBucket, voxelMap] of labeledVoxelMap) { + const bucketsOfGoalResolution = this.getBucketsContainingBucket( + labeledBucket, + sourceResolution, + goalResolution, + goalZoomStep, + ); + if (!bucketsOfGoalResolution) { + continue; + } + const labelVoxelInGoalResolution = (x, y, z) => { + const xBucket = Math.floor(x / constants.BUCKET_WIDTH); + const yBucket = Math.floor(y / constants.BUCKET_WIDTH); + const zBucket = Math.floor(z / constants.BUCKET_WIDTH); + x %= constants.BUCKET_WIDTH; + y %= constants.BUCKET_WIDTH; + z %= constants.BUCKET_WIDTH; + const voxelIndex = this.getVoxelIndexByVoxelOffset([x, y, z]); + const bucket = bucketsOfGoalResolution[xBucket][yBucket][zBucket]; + const bucketData = bucket.getOrCreateData(); + bucketData[voxelIndex] = cellId; + labeledBuckets.add(bucket); + console.log( + `labeled in bucket ${bucket.zoomedAddress.toString()}, voxel ${[ + x, + y, + z, + ].toString()}, voxelIndex ${voxelIndex}`, + ); + bucket.markAndAddBucketForUndo(); + }; + for (let x = 0; x < constants.BUCKET_WIDTH; x++) { + for (let y = 0; y < constants.BUCKET_WIDTH; y++) { + if (voxelMap[x * constants.BUCKET_WIDTH + y] === 1) { + // TODO: Label the other buckets + const voxelInBucket = get3DAddress([x, y]); + debugger; + const voxelInGoalResolution = voxelToGoalResolution(voxelInBucket); + // The value of the third dimension was already adjusted by the get3DAddress call. Thus we rewrite this value. + voxelInGoalResolution[thirdDimension] = voxelInBucket[thirdDimension]; + const maxVoxelBoundingInGoalResolution = [ + voxelInGoalResolution[0] + voxelsToLabelInEachDirection[0], + voxelInGoalResolution[1] + voxelsToLabelInEachDirection[1], + voxelInGoalResolution[2] + voxelsToLabelInEachDirection[2], + ]; + iterateThroughBounds( + voxelInGoalResolution, + maxVoxelBoundingInGoalResolution, + labelVoxelInGoalResolution, + ); + } + } + } } - for (const bucket of bucketsToAddToPushQueue.values()) { + for (const bucket of labeledBuckets.keys()) { + console.log(`labeled in bucket ${bucket.zoomedAddress.toString()}`); this.pushQueue.insert(bucket); bucket.trigger("bucketLabeled"); } } + getBucketsContainingBucket( + bucket: DataBucket, + bucketResolution: Vector3, + goalResolution: Vector3, + zoomStep: number, + ): ?Array>> { + const mapToGoalResolution = (value, index) => + Math.floor(value * (bucketResolution[index] / goalResolution[index])); + const bucketMin = [bucket.zoomedAddress[0], bucket.zoomedAddress[1], bucket.zoomedAddress[2]]; + const bucketMax = [bucketMin[0] + 1, bucketMin[1] + 1, bucketMin[2] + 1]; + // If the buckets zoomStep is smaller than the wanted zoom step, + // then the bucket is completely contained by a bucket of the higher goalResolution. + const bucketMinInOtherResolution = map3(mapToGoalResolution, bucketMin); + const bucketMaxInOtherResolution = map3(mapToGoalResolution, bucketMax); + const bucketsInGoalResolution = []; + // Iteration over all three dimensions until all buckets of the goal resolution + // that overlap with the given bucket are added to bucketsInGoalResolution. + // Note: The bucketsInGoalResolution.length === 0 check ensures that the bucket containing the given bucket + // will be added to the array when the goalResolution is lower than the buckets resolution. + for ( + let x = bucketMinInOtherResolution[0]; + x < bucketMaxInOtherResolution[0] || bucketsInGoalResolution.length === 0; + x++ + ) { + const bucketsInYDirection = []; + for ( + let y = bucketMinInOtherResolution[1]; + y < bucketMaxInOtherResolution[1] || bucketsInYDirection.length === 0; + y++ + ) { + const bucketsInZDirection = []; + for ( + let z = bucketMinInOtherResolution[2]; + z < bucketMaxInOtherResolution[2] || bucketsInZDirection.length === 0; + z++ + ) { + const bucketsZoomedAddress = [x, y, z, zoomStep]; + const currentBucketInGoalResolution = this.getOrCreateBucket(bucketsZoomedAddress); + if (currentBucketInGoalResolution.type === "null") { + console.warn( + `The bucket at ${bucket.zoomedAddress.toString()} has not matching bucket` + + ` in resolution ${goalResolution.toString()}. The buckets address is ${bucketsZoomedAddress.toString()}`, + ); + return null; + } + bucketsInZDirection.push(currentBucketInGoalResolution); + } + bucketsInYDirection.push(bucketsInZDirection); + } + bucketsInGoalResolution.push(bucketsInYDirection); + } + return bucketsInGoalResolution; + } + setBucketData(zoomedAddress: Vector4, data: Uint8Array) { const bucket = this.getOrCreateBucket(zoomedAddress); if (bucket.type === "null") { diff --git a/frontend/javascripts/oxalis/model/sagas/save_saga.js b/frontend/javascripts/oxalis/model/sagas/save_saga.js index e75eef26a63..4501597fa5c 100644 --- a/frontend/javascripts/oxalis/model/sagas/save_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/save_saga.js @@ -365,7 +365,6 @@ function getRetryWaitTime(retryCount: number) { export function* sendRequestToServer(tracingType: "skeleton" | "volume"): Saga { const fullSaveQueue = yield* select(state => state.save.queue[tracingType]); const saveQueue = sliceAppropriateBatchCount(fullSaveQueue); - let compactedSaveQueue = compactSaveQueue(saveQueue); const { version, type, tracingId } = yield* select(state => Maybe.fromNullable(state.tracing[tracingType]).get(), diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 086e1602d08..0c229f4b74f 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -39,7 +39,8 @@ import { getCurrentResolution, getRequestLogZoomStep, } from "oxalis/model/accessors/flycam_accessor"; -import { +import { getResolutions } from "oxalis/model/accessors/dataset_accessor"; +import Constants, { type BoundingBoxType, type ContourMode, ContourModeEnum, @@ -296,8 +297,9 @@ export function* floodFill(): Saga { const seedVoxel = Dimensions.roundCoordinate(position); const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); const dimensionIndices = Dimensions.getIndices(planeId); + let thirdDimensionValue = seedVoxel[dimensionIndices[2]]; const get3DAddress = (voxel: Vector2) => { - const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], seedVoxel[dimensionIndices[2]]]; + const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], thirdDimensionValue]; const orderedVoxelWithThirdDimension = [ unorderedVoxelWithThirdDimension[dimensionIndices[0]], unorderedVoxelWithThirdDimension[dimensionIndices[1]], @@ -309,10 +311,9 @@ export function* floodFill(): Saga { voxel[dimensionIndices[0]], voxel[dimensionIndices[1]], ]; - const activeResolution = yield* select(state => getCurrentResolution(state)); const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); const currentViewportBounding = yield* call(getBoundingsFromPosition, planeId, 1); - cube.floodFill( + const bucketsWithLabeledVoxelMap = cube.floodFill( seedVoxel, activeCellId, get3DAddress, @@ -321,6 +322,33 @@ export function* floodFill(): Saga { currentViewportBounding, activeZoomStep, ); + if (!bucketsWithLabeledVoxelMap) { + continue; + } + const allResolutions = yield* select(state => getResolutions(state.dataset)); + const activeResolution = allResolutions[activeZoomStep]; + for (let zoomStep = 0; zoomStep < 2; zoomStep++) { + if (zoomStep === activeZoomStep) { + continue; + } + const goalResolution = allResolutions[zoomStep]; + // After flood filling on the current resolution the labeled voxels are down and upscaled through all resolutions. + // For this a get3DAddress function is needed that always has the third dimension within the current bucket ([0,32]). + // This depends on the goalResolution. + thirdDimensionValue = + (seedVoxel[dimensionIndices[2]] / goalResolution[dimensionIndices[2]]) % + Constants.BUCKET_WIDTH; + cube.applyLabeledVoxelMapToResolution( + bucketsWithLabeledVoxelMap, + activeResolution, + activeZoomStep, + goalResolution, + zoomStep, + activeCellId, + dimensionIndices[2], + get3DAddress, + ); + } yield* put(finishAnnotationStrokeAction()); cube.triggerPushQueue(); } From 4d1ece0aa9a950064e05f919b685fa198b2db32f Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 10 Sep 2020 15:15:54 +0200 Subject: [PATCH 033/121] [WIP] track mags in volumetracing object --- app/models/annotation/AnnotationService.scala | 1 + frontend/javascripts/oxalis/model_initialization.js | 11 +++++++---- .../tracings/volume/VolumeTracingDownsampling.scala | 1 + .../tracings/volume/VolumeTracingService.scala | 4 ++++ webknossos-tracingstore/proto/VolumeTracing.proto | 1 + 5 files changed, 14 insertions(+), 4 deletions(-) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index bba864adaa0..45deaf6f3a5 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -119,6 +119,7 @@ class AnnotationService @Inject()(annotationInformationProvider: AnnotationInfor 0, VolumeTracingDefaults.zoomLevel ) + //TODO: add mag list def createTracings( dataSet: DataSet, diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index fae6606e07a..91483b6db5f 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -50,6 +50,7 @@ import { initializeSkeletonTracingAction, } from "oxalis/model/actions/skeletontracing_actions"; import { setDatasetAction } from "oxalis/model/actions/dataset_actions"; +import { getResolutions } from "oxalis/model/accessors/dataset_accessor"; import { setPositionAction, setZoomStepAction, @@ -442,9 +443,11 @@ function setupLayerForVolumeTracing( // This method adds/merges the segmentation layers of the tracing into the dataset layers let layers = _.clone(dataset.dataSource.dataLayers); const segmentationLayer = layers.find(layer => layer.category === "segmentation"); - if (!segmentationLayer) { - Toast.error(messages["dataset.segmentationlayer_not_existing"]); - throw HANDLED_ERROR; + let resolutions; + if (segmentationLayer) { + resolutions = segmentationLayer.resolutions + } else { + resolutions = getResolutions(dataset); } // The tracing always contains the layer information for the user segmentation. // Two possible cases: @@ -463,7 +466,7 @@ function setupLayerForVolumeTracing( largestSegmentId: tracing.largestSegmentId, boundingBox: convertBoundariesToBoundingBox(boundaries), // volume tracing can only be done for the first resolution - resolutions: segmentationLayer.resolutions, + resolutions, mappings: fallbackLayer != null && fallbackLayer.mappings != null ? fallbackLayer.mappings : [], // remember the name of the original layer, used to request mappings fallbackLayer: tracing.fallbackLayer, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index 66af7043aee..97d9412b210 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -184,6 +184,7 @@ trait VolumeTracingDownsampling items.groupBy(i => i).mapValues(_.size).maxBy(_._2)._1 private def getRequiredMags(tracing: VolumeTracing): Fox[Seq[Point3D]] = + // TODO: if tracing has fallback layer, use only mags present in that fallback layer for { dataSource: DataSourceLike <- tracingStoreWkRpcClient.getDataSource(tracing.organizationName, tracing.dataSetName) mags = dataSource.dataLayers.flatMap(_.resolutions).distinct.sortBy(_.maxDim).filterNot(_.maxDim == 1) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 5e1b92c9497..cb73fe00715 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -193,6 +193,8 @@ class VolumeTracingService @Inject()( val destinationDataLayer = volumeTracingLayer(tracingId, tracing) mergedVolume.saveTo(destinationDataLayer, tracing.version, toCache = false) + + //TODO: save mag list to tracing } class MergedVolume(elementClass: ElementClass) extends DataConverter { @@ -280,6 +282,8 @@ class VolumeTracingService @Inject()( } } } + + //TODO: save mag list to tracing } private def isAllZero(data: Array[Byte]): Boolean = diff --git a/webknossos-tracingstore/proto/VolumeTracing.proto b/webknossos-tracingstore/proto/VolumeTracing.proto index 5208b9c25ba..38b2f7694ac 100644 --- a/webknossos-tracingstore/proto/VolumeTracing.proto +++ b/webknossos-tracingstore/proto/VolumeTracing.proto @@ -27,6 +27,7 @@ message VolumeTracing { optional BoundingBox userBoundingBox = 12; repeated NamedBoundingBox userBoundingBoxes = 13; optional string organizationName = 14; + repeated Point3D mags = 15; } message VolumeTracingOpt { From e511e652502ad52440cc333fe9f284262f7d8fa8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Thu, 10 Sep 2020 15:35:31 +0200 Subject: [PATCH 034/121] fix calculating error during downsampling --- .../model/bucket_data_handling/data_cube.js | 73 ++++++++++++------- .../oxalis/model/sagas/volumetracing_saga.js | 11 +-- 2 files changed, 49 insertions(+), 35 deletions(-) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 9105c3761a0..38c3f5bd74d 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -376,9 +376,9 @@ class DataCube { const labelFunc = (data: BucketDataArray): void => { if (address[3] === 1) console.log( - `labeled in bucket ${bucket.zoomedAddress.toString()}, voxel ${voxel.toString()}, voxelIndex ${voxelIndex}, with modulo ${voxel.map( - a => Math.floor(a / 2) % 32, - )}`, + `labeled in bucket ${bucket.zoomedAddress.toString()}, voxel ${voxel.toString()}, voxelIndex ${voxelIndex}, with modulo ${voxel + .map(a => Math.floor(a / 2) % 32) + .toString()}`, ); data[voxelIndex] = label; }; @@ -451,6 +451,11 @@ class DataCube { bucketsWithLabeledVoxelsMap.get(currentBucket) || new Uint8Array(constants.BUCKET_WIDTH ** 2).fill(0); const markVoxelOfSliceAsLabeled = ([firstCoord, secondCoord]) => { + console.log( + `Flood filled ${currentBucket.zoomedAddress.toString()} at ${firstCoord},${secondCoord}, index ${firstCoord * + constants.BUCKET_WIDTH + + secondCoord}`, + ); currentLabeledVoxelMap[firstCoord * constants.BUCKET_WIDTH + secondCoord] = 1; }; @@ -514,15 +519,12 @@ class DataCube { get3DAddress: Vector2 => Vector3, ) { const labeledBuckets = new Set(); - const voxelsToLabelInEachDirection = map3( - (sourceVal, index) => Math.ceil(sourceVal / goalResolution[index]), - sourceResolution, - ); + const isDownsampling = goalZoomStep > sourceZoomStep; + const scaleToSource = map3((val, index) => val / sourceResolution[index], goalResolution); + const scaleToGoal = map3((val, index) => val / goalResolution[index], sourceResolution); + const voxelsToLabelInEachDirection = map3(scaleValue => Math.ceil(scaleValue), scaleToGoal); const voxelToGoalResolution = voxelInBucket => - map3( - (value, index) => Math.floor(value * (sourceResolution[index] / goalResolution[index])), - voxelInBucket, - ); + map3((value, index) => Math.floor(value * scaleToGoal[index]), voxelInBucket); for (const [labeledBucket, voxelMap] of labeledVoxelMap) { const bucketsOfGoalResolution = this.getBucketsContainingBucket( labeledBucket, @@ -534,14 +536,32 @@ class DataCube { continue; } const labelVoxelInGoalResolution = (x, y, z) => { - const xBucket = Math.floor(x / constants.BUCKET_WIDTH); - const yBucket = Math.floor(y / constants.BUCKET_WIDTH); - const zBucket = Math.floor(z / constants.BUCKET_WIDTH); - x %= constants.BUCKET_WIDTH; - y %= constants.BUCKET_WIDTH; - z %= constants.BUCKET_WIDTH; + let bucket = bucketsOfGoalResolution[0][0][0]; + if (isDownsampling) { + // If the annotation given by the voxelMap will be downsampled, the labeledBucket can only be within one bucket in the lower resolution. + // It is possible that the labeledBucket is does not have the same global origin as the bucket of the lower resolution. Thus an additional offset is needed. + const offset = [0, 0, 0]; + for (let index = 0; index < 3; index++) { + // Scaling the zoomed address of the bucket up to the source resolution and calculate the offset. + const upscaledZoomAddressPart = bucket.zoomedAddress[index] * scaleToSource[index]; + offset[index] = labeledBucket.zoomedAddress[index] - upscaledZoomAddressPart; + offset[index] = offset[index] * constants.BUCKET_WIDTH * scaleToGoal[index]; + } + x += offset[0]; + y += offset[1]; + z += offset[2]; + } else { + // If this method upsamples the labeled voxels, the voxel can be within one out of many buckets. + // As the x, y, z values are already scaled up, the bucket the belong to is calculated and x, y, z get shrinked to be within that bucket. + const xBucket = Math.floor(x / constants.BUCKET_WIDTH); + const yBucket = Math.floor(y / constants.BUCKET_WIDTH); + const zBucket = Math.floor(z / constants.BUCKET_WIDTH); + bucket = bucketsOfGoalResolution[xBucket][yBucket][zBucket]; + x %= constants.BUCKET_WIDTH; + y %= constants.BUCKET_WIDTH; + z %= constants.BUCKET_WIDTH; + } const voxelIndex = this.getVoxelIndexByVoxelOffset([x, y, z]); - const bucket = bucketsOfGoalResolution[xBucket][yBucket][zBucket]; const bucketData = bucket.getOrCreateData(); bucketData[voxelIndex] = cellId; labeledBuckets.add(bucket); @@ -554,23 +574,24 @@ class DataCube { ); bucket.markAndAddBucketForUndo(); }; - for (let x = 0; x < constants.BUCKET_WIDTH; x++) { - for (let y = 0; y < constants.BUCKET_WIDTH; y++) { - if (voxelMap[x * constants.BUCKET_WIDTH + y] === 1) { + for (let firstDim = 0; firstDim < constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < constants.BUCKET_WIDTH; secondDim++) { + if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { // TODO: Label the other buckets - const voxelInBucket = get3DAddress([x, y]); debugger; + const voxelInBucket = get3DAddress([firstDim, secondDim]); + // As the iteration is only over the first two dimensions the third dimension is not within the labeledBucket. + // Here we adjust the third dimension to be with the source labeledBucket. + voxelInBucket[thirdDimension] %= constants.BUCKET_WIDTH; const voxelInGoalResolution = voxelToGoalResolution(voxelInBucket); - // The value of the third dimension was already adjusted by the get3DAddress call. Thus we rewrite this value. - voxelInGoalResolution[thirdDimension] = voxelInBucket[thirdDimension]; - const maxVoxelBoundingInGoalResolution = [ + const maxVoxelBoundingsInGoalResolution = [ voxelInGoalResolution[0] + voxelsToLabelInEachDirection[0], voxelInGoalResolution[1] + voxelsToLabelInEachDirection[1], voxelInGoalResolution[2] + voxelsToLabelInEachDirection[2], ]; iterateThroughBounds( voxelInGoalResolution, - maxVoxelBoundingInGoalResolution, + maxVoxelBoundingsInGoalResolution, labelVoxelInGoalResolution, ); } diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 0c229f4b74f..7b0c7cafe6d 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -297,9 +297,8 @@ export function* floodFill(): Saga { const seedVoxel = Dimensions.roundCoordinate(position); const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); const dimensionIndices = Dimensions.getIndices(planeId); - let thirdDimensionValue = seedVoxel[dimensionIndices[2]]; const get3DAddress = (voxel: Vector2) => { - const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], thirdDimensionValue]; + const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], seedVoxel[dimensionIndices[2]]]; const orderedVoxelWithThirdDimension = [ unorderedVoxelWithThirdDimension[dimensionIndices[0]], unorderedVoxelWithThirdDimension[dimensionIndices[1]], @@ -327,17 +326,11 @@ export function* floodFill(): Saga { } const allResolutions = yield* select(state => getResolutions(state.dataset)); const activeResolution = allResolutions[activeZoomStep]; - for (let zoomStep = 0; zoomStep < 2; zoomStep++) { + for (let zoomStep = 0; zoomStep < allResolutions.length; zoomStep++) { if (zoomStep === activeZoomStep) { continue; } const goalResolution = allResolutions[zoomStep]; - // After flood filling on the current resolution the labeled voxels are down and upscaled through all resolutions. - // For this a get3DAddress function is needed that always has the third dimension within the current bucket ([0,32]). - // This depends on the goalResolution. - thirdDimensionValue = - (seedVoxel[dimensionIndices[2]] / goalResolution[dimensionIndices[2]]) % - Constants.BUCKET_WIDTH; cube.applyLabeledVoxelMapToResolution( bucketsWithLabeledVoxelMap, activeResolution, From 3cdabfbd998a26af64227962499fa29c2397f460 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Thu, 10 Sep 2020 18:11:08 +0200 Subject: [PATCH 035/121] fix flood fill for all source resolutions --- .../model/bucket_data_handling/bucket.js | 13 ++++----- .../model/bucket_data_handling/data_cube.js | 29 ++++++++----------- .../oxalis/model/sagas/volumetracing_saga.js | 7 ++++- 3 files changed, 24 insertions(+), 25 deletions(-) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js index 6f6216e7946..5dfdf9cb396 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js @@ -191,17 +191,16 @@ export class DataBucket { const dimension = dimensionIndices[dimensionIndex]; if (voxel[dimensionIndex] < 0) { isVoxelOutside = true; - neighbourBucketAddress[dimension] -= Math.ceil( - -voxel[dimensionIndex] / Constants.BUCKET_WIDTH, - ); + const offset = Math.ceil(-voxel[dimensionIndex] / Constants.BUCKET_WIDTH); + neighbourBucketAddress[dimension] -= offset; // Add a full bucket width to the coordinate below 0 to avoid error's // caused by the modulo operation used in getVoxelOffset. - adjustedVoxel[dimensionIndex] += Constants.BUCKET_WIDTH; + adjustedVoxel[dimensionIndex] += Constants.BUCKET_WIDTH * offset; } else if (voxel[dimensionIndex] >= Constants.BUCKET_WIDTH) { isVoxelOutside = true; - neighbourBucketAddress[dimension] += Math.floor( - voxel[dimensionIndex] / Constants.BUCKET_WIDTH, - ); + const offset = Math.floor(voxel[dimensionIndex] / Constants.BUCKET_WIDTH); + neighbourBucketAddress[dimension] += offset; + adjustedVoxel[dimensionIndex] -= Constants.BUCKET_WIDTH * offset; } } return { isVoxelOutside, neighbourBucketAddress, adjustedVoxel }; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 38c3f5bd74d..3ef118f3dc9 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -397,7 +397,7 @@ class DataCube { floodFill( seedVoxel: Vector3, cellId: number, - get3DAddress: (Vector2, Vector3) => Vector3, + get3DAddress: Vector2 => Vector3, get2DAddress: Vector3 => Vector2, dimensionIndices: DimensionMap, viewportBoundings: BoundingBoxType, @@ -437,7 +437,8 @@ class DataCube { continue; } const bucketData = currentBucket.getOrCreateData(); - const initialVoxelIndex = this.getVoxelIndex(initialVoxelInBucket, zoomStep); + // initialVoxelInBucket + const initialVoxelIndex = this.getVoxelIndexByVoxelOffset(initialVoxelInBucket); if (bucketData[initialVoxelIndex] !== sourceCellId) { // Ignoring neighbour buckets whose cellId at the initial voxel does not match the source cell id. continue; @@ -451,11 +452,6 @@ class DataCube { bucketsWithLabeledVoxelsMap.get(currentBucket) || new Uint8Array(constants.BUCKET_WIDTH ** 2).fill(0); const markVoxelOfSliceAsLabeled = ([firstCoord, secondCoord]) => { - console.log( - `Flood filled ${currentBucket.zoomedAddress.toString()} at ${firstCoord},${secondCoord}, index ${firstCoord * - constants.BUCKET_WIDTH + - secondCoord}`, - ); currentLabeledVoxelMap[firstCoord * constants.BUCKET_WIDTH + secondCoord] = 1; }; @@ -478,25 +474,26 @@ class DataCube { neighbourBucketAddress, adjustedVoxel: adjustedNeighbourVoxel, } = currentBucket.is2DVoxelInsideBucket(neighbourVoxel, dimensionIndices, zoomStep); - const neighbourVoxel3D = get3DAddress(adjustedNeighbourVoxel, seedVoxel); + const neighbourVoxel3D = get3DAddress(adjustedNeighbourVoxel); if (isVoxelOutside) { // Add the bucket to the list of buckets to flood fill. const neighbourBucket = this.getOrCreateBucket(neighbourBucketAddress); if (neighbourBucket.type !== "null") { - bucketsToFill.push([ - neighbourBucket, - this.getVoxelOffset(neighbourVoxel3D, zoomStep), - ]); + bucketsToFill.push([neighbourBucket, neighbourVoxel3D]); } } else { // Label the current neighbour and add it to the neighbourVoxelStack to iterate over its neighbours. - const neighbourVoxelIndex = this.getVoxelIndex(neighbourVoxel3D, zoomStep); + const neighbourVoxelIndex = this.getVoxelIndexByVoxelOffset(neighbourVoxel3D); if (bucketData[neighbourVoxelIndex] === sourceCellId) { bucketData[neighbourVoxelIndex] = cellId; + console.log( + `labeled in bucket ${currentBucket.zoomedAddress.toString()}, voxel ${neighbourVoxel3D.toString()}, voxelIndex ${neighbourVoxelIndex}`, + ); markVoxelOfSliceAsLabeled(neighbourVoxel); neighbourVoxelStack.pushVoxel(neighbourVoxel); } } + debugger; } } bucketsWithLabeledVoxelsMap.set(currentBucket, currentLabeledVoxelMap); @@ -545,7 +542,7 @@ class DataCube { // Scaling the zoomed address of the bucket up to the source resolution and calculate the offset. const upscaledZoomAddressPart = bucket.zoomedAddress[index] * scaleToSource[index]; offset[index] = labeledBucket.zoomedAddress[index] - upscaledZoomAddressPart; - offset[index] = offset[index] * constants.BUCKET_WIDTH * scaleToGoal[index]; + offset[index] = Math.round(offset[index] * constants.BUCKET_WIDTH * scaleToGoal[index]); } x += offset[0]; y += offset[1]; @@ -561,6 +558,7 @@ class DataCube { y %= constants.BUCKET_WIDTH; z %= constants.BUCKET_WIDTH; } + bucket.markAndAddBucketForUndo(); const voxelIndex = this.getVoxelIndexByVoxelOffset([x, y, z]); const bucketData = bucket.getOrCreateData(); bucketData[voxelIndex] = cellId; @@ -572,13 +570,10 @@ class DataCube { z, ].toString()}, voxelIndex ${voxelIndex}`, ); - bucket.markAndAddBucketForUndo(); }; for (let firstDim = 0; firstDim < constants.BUCKET_WIDTH; firstDim++) { for (let secondDim = 0; secondDim < constants.BUCKET_WIDTH; secondDim++) { if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { - // TODO: Label the other buckets - debugger; const voxelInBucket = get3DAddress([firstDim, secondDim]); // As the iteration is only over the first two dimensions the third dimension is not within the labeledBucket. // Here we adjust the third dimension to be with the source labeledBucket. diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 7b0c7cafe6d..28467a356f6 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -297,8 +297,10 @@ export function* floodFill(): Saga { const seedVoxel = Dimensions.roundCoordinate(position); const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); const dimensionIndices = Dimensions.getIndices(planeId); + // The flood fill method of the cube iterates within the bucket. Thus thirdDimensionValue must also be within a bucket. + let thirdDimensionValue = seedVoxel[dimensionIndices[2]] % Constants.BUCKET_WIDTH; const get3DAddress = (voxel: Vector2) => { - const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], seedVoxel[dimensionIndices[2]]]; + const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], thirdDimensionValue]; const orderedVoxelWithThirdDimension = [ unorderedVoxelWithThirdDimension[dimensionIndices[0]], unorderedVoxelWithThirdDimension[dimensionIndices[1]], @@ -326,6 +328,9 @@ export function* floodFill(): Saga { } const allResolutions = yield* select(state => getResolutions(state.dataset)); const activeResolution = allResolutions[activeZoomStep]; + // The applyLabeledVoxelMapToResolution method of calculates the thirdDimensionValue in the bucket of the goalResolution itself. + // Thus reset the thirdDimensionValue value. + thirdDimensionValue = seedVoxel[dimensionIndices[2]]; for (let zoomStep = 0; zoomStep < allResolutions.length; zoomStep++) { if (zoomStep === activeZoomStep) { continue; From 3751541526525981331510026d4a2efba6dc19a0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Thu, 10 Sep 2020 18:57:53 +0200 Subject: [PATCH 036/121] fixed flood fill for all resolutions --- .../model/bucket_data_handling/data_cube.js | 1 - .../oxalis/model/sagas/volumetracing_saga.js | 16 ++++++++-------- 2 files changed, 8 insertions(+), 9 deletions(-) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 3ef118f3dc9..bab941df4e2 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -493,7 +493,6 @@ class DataCube { neighbourVoxelStack.pushVoxel(neighbourVoxel); } } - debugger; } } bucketsWithLabeledVoxelsMap.set(currentBucket, currentLabeledVoxelMap); diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 28467a356f6..55c9f7f6a61 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -297,8 +297,14 @@ export function* floodFill(): Saga { const seedVoxel = Dimensions.roundCoordinate(position); const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); const dimensionIndices = Dimensions.getIndices(planeId); - // The flood fill method of the cube iterates within the bucket. Thus thirdDimensionValue must also be within a bucket. - let thirdDimensionValue = seedVoxel[dimensionIndices[2]] % Constants.BUCKET_WIDTH; + const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); + const allResolutions = yield* select(state => getResolutions(state.dataset)); + const activeResolution = allResolutions[activeZoomStep]; + // The flood fill and the applyLabeledVoxelMapToResolution method of the cube iterates within the bucket. + // Thus thirdDimensionValue must also be within the initial bucket in the correct resolution. + const thirdDimensionValue = + Math.floor(seedVoxel[dimensionIndices[2]] / activeResolution[dimensionIndices[2]]) % + Constants.BUCKET_WIDTH; const get3DAddress = (voxel: Vector2) => { const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], thirdDimensionValue]; const orderedVoxelWithThirdDimension = [ @@ -312,7 +318,6 @@ export function* floodFill(): Saga { voxel[dimensionIndices[0]], voxel[dimensionIndices[1]], ]; - const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); const currentViewportBounding = yield* call(getBoundingsFromPosition, planeId, 1); const bucketsWithLabeledVoxelMap = cube.floodFill( seedVoxel, @@ -326,11 +331,6 @@ export function* floodFill(): Saga { if (!bucketsWithLabeledVoxelMap) { continue; } - const allResolutions = yield* select(state => getResolutions(state.dataset)); - const activeResolution = allResolutions[activeZoomStep]; - // The applyLabeledVoxelMapToResolution method of calculates the thirdDimensionValue in the bucket of the goalResolution itself. - // Thus reset the thirdDimensionValue value. - thirdDimensionValue = seedVoxel[dimensionIndices[2]]; for (let zoomStep = 0; zoomStep < allResolutions.length; zoomStep++) { if (zoomStep === activeZoomStep) { continue; From 52f28f2b1cb310c09b3871e7f6355334383bf7c4 Mon Sep 17 00:00:00 2001 From: Florian M Date: Fri, 11 Sep 2020 14:57:57 +0200 Subject: [PATCH 037/121] handle differing resolutions during volume merging + upload --- app/models/annotation/AnnotationService.scala | 6 +- frontend/javascripts/admin/api_flow_types.js | 1 + frontend/javascripts/messages.js | 2 + .../oxalis/model_initialization.js | 41 ++++-- .../controllers/TracingController.scala | 10 +- .../controllers/VolumeTracingController.scala | 14 +- .../tracings/TracingService.scala | 22 ++-- .../skeleton/SkeletonTracingService.scala | 10 +- .../volume/VolumeTracingDownsampling.scala | 44 +++++-- .../volume/VolumeTracingService.scala | 124 ++++++++++++++---- .../proto/VolumeTracing.proto | 2 +- 11 files changed, 203 insertions(+), 73 deletions(-) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 45deaf6f3a5..73d5a118047 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -19,7 +19,7 @@ import com.scalableminds.webknossos.tracingstore.VolumeTracing.{VolumeTracing, V import com.scalableminds.webknossos.tracingstore.geometry.{Color, NamedBoundingBox} import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.{NodeDefaults, SkeletonTracingDefaults} -import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingDefaults +import com.scalableminds.webknossos.tracingstore.tracings.volume.{VolumeTracingDefaults, VolumeTracingDownsampling} import com.typesafe.scalalogging.LazyLogging import javax.inject.Inject import models.annotation.AnnotationState._ @@ -117,9 +117,9 @@ class AnnotationService @Inject()(annotationInformationProvider: AnnotationInfor fallbackLayer.map(_.name), fallbackLayer.map(_.largestSegmentId).getOrElse(VolumeTracingDefaults.largestSegmentId), 0, - VolumeTracingDefaults.zoomLevel + VolumeTracingDefaults.zoomLevel, + resolutions = VolumeTracingDownsampling.resolutionsForVolumeTracing(dataSource, fallbackLayer).map(point3DToProto) ) - //TODO: add mag list def createTracings( dataSet: DataSet, diff --git a/frontend/javascripts/admin/api_flow_types.js b/frontend/javascripts/admin/api_flow_types.js index 11c9b7aaa0b..6a7a7c27755 100644 --- a/frontend/javascripts/admin/api_flow_types.js +++ b/frontend/javascripts/admin/api_flow_types.js @@ -615,6 +615,7 @@ export type ServerVolumeTracing = {| elementClass: ElementClass, fallbackLayer?: string, largestSegmentId: number, + resolutions?: Array, |}; export type ServerTracing = ServerSkeletonTracing | ServerVolumeTracing; diff --git a/frontend/javascripts/messages.js b/frontend/javascripts/messages.js index 9de3c2422e5..cb2cd6e52d6 100644 --- a/frontend/javascripts/messages.js +++ b/frontend/javascripts/messages.js @@ -99,6 +99,8 @@ instead. Only enable this option if you understand its effect. All layers will n "You didn't add a node after jumping to this branchpoint, do you really want to jump again?", "tracing.edit_volume_in_merger_mode": "The volume annotation would be changed by this action. This is not allowed while merger mode is active.", + "tracing.volume_resolution_mismatch": + "The volume annotation resolutions do not match the dataset resolutions. Was the dataset edited after creating the annotation? Consider downloading and re-uploading resolution 1 only to adapt the annotation.", "tracing.segmentation_zoom_warning": "Segmentation data and volume annotation is only fully supported at a smaller zoom level.", "tracing.uint64_segmentation_warning": diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index 91483b6db5f..fa14034fb56 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -25,6 +25,7 @@ import { getMostExtensiveResolutions, getSegmentationLayer, isElementClassSupported, + getResolutions, } from "oxalis/model/accessors/dataset_accessor"; import { getSomeServerTracing } from "oxalis/model/accessors/tracing_accessor"; import { @@ -50,7 +51,6 @@ import { initializeSkeletonTracingAction, } from "oxalis/model/actions/skeletontracing_actions"; import { setDatasetAction } from "oxalis/model/actions/dataset_actions"; -import { getResolutions } from "oxalis/model/accessors/dataset_accessor"; import { setPositionAction, setZoomStepAction, @@ -442,13 +442,7 @@ function setupLayerForVolumeTracing( ): Array { // This method adds/merges the segmentation layers of the tracing into the dataset layers let layers = _.clone(dataset.dataSource.dataLayers); - const segmentationLayer = layers.find(layer => layer.category === "segmentation"); - let resolutions; - if (segmentationLayer) { - resolutions = segmentationLayer.resolutions - } else { - resolutions = getResolutions(dataset); - } + // The tracing always contains the layer information for the user segmentation. // Two possible cases: // 1) No segmentation exists yet: In that case layers doesn't contain the dataLayer - it needs @@ -459,6 +453,35 @@ function setupLayerForVolumeTracing( const fallbackLayer = layers[fallbackLayerIndex]; const boundaries = getBoundaries(dataset); + console.log(tracing.resolutions); + + const tracingResolutions = tracing.resolutions + ? tracing.resolutions.map(({ x, y, z }) => [x, y, z]) + : [[1, 1, 1]]; + + console.log(tracingResolutions); + const targetResolutions = + fallbackLayer != null ? fallbackLayer.resolutions : getResolutions(dataset); + + const resolutionsAreSubset = (resAs, resBs) => + resAs.every(resA => resBs.some(resB => _.isEqual(resA, resB))); + const doResolutionsMatch = + resolutionsAreSubset(targetResolutions, tracingResolutions) && + resolutionsAreSubset(tracingResolutions, targetResolutions); + + if (!doResolutionsMatch) { + if (tracing.resolutions) { + Toast.warning( + messages["tracing.volume_resolution_mismatch"], + {}, + `Tracing resolutions ${tracingResolutions.toString()} vs dataset resolutions ${targetResolutions.toString()}`, + ); + throw HANDLED_ERROR; + } else { + console.log("Detected legacy tracing with no resolution pyramid."); + } + } + const tracingLayer = { name: tracing.id, elementClass: tracing.elementClass, @@ -466,7 +489,7 @@ function setupLayerForVolumeTracing( largestSegmentId: tracing.largestSegmentId, boundingBox: convertBoundariesToBoundingBox(boundaries), // volume tracing can only be done for the first resolution - resolutions, + resolutions: tracingResolutions, mappings: fallbackLayer != null && fallbackLayer.mappings != null ? fallbackLayer.mappings : [], // remember the name of the original layer, used to request mappings fallbackLayer: tracing.fallbackLayer, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index 16e7026403f..57f0af267a4 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -224,12 +224,12 @@ trait TracingController[T <: GeneratedMessage with Message[T], Ts <: GeneratedMe tracings <- tracingService.findMultiple(request.body, applyUpdates = true) ?~> Messages("tracing.notFound") newId = tracingService.generateTracingId mergedTracing = tracingService.merge(tracings.flatten) - _ <- tracingService.mergeVolumeData(request.body.flatten, - tracings.flatten, - newId, - mergedTracing, - toCache = !persist) _ <- tracingService.save(mergedTracing, Some(newId), version = 0, toCache = !persist) + _ <- tracingService.mergeVolumeDataWithDownsampling(request.body.flatten, + tracings.flatten, + newId, + mergedTracing, + toCache = !persist) } yield { Ok(Json.toJson(newId)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index c72118f8762..bc2bb8616f0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -59,8 +59,11 @@ class VolumeTracingController @Inject()(val tracingService: VolumeTracingService for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - _ <- tracingService.initializeWithData(tracingId, tracing, initialData).toFox - _ = tracingService.downsample(tracingId: String, tracing: VolumeTracing) + originalResolutions <- tracingService.initializeWithData(tracingId, tracing, initialData).toFox + filledResolutions <- tracingService.downsample(tracingId: String, + tracing: VolumeTracing, + originalResolutions) + _ <- tracingService.updateResolutionList(tracingId, tracing, filledResolutions) } yield Ok(Json.toJson(tracingId)) } } @@ -90,8 +93,11 @@ class VolumeTracingController @Inject()(val tracingService: VolumeTracingService for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - _ <- tracingService.initializeWithDataMultiple(tracingId, tracing, initialData).toFox - - <- tracingService.downsample(tracingId, tracing) + originalResolutions <- tracingService.initializeWithDataMultiple(tracingId, tracing, initialData).toFox + filledResolutions <- tracingService.downsample(tracingId: String, + tracing: VolumeTracing, + originalResolutions) + _ <- tracingService.updateResolutionList(tracingId, tracing, filledResolutions) } yield Ok(Json.toJson(tracingId)) } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index e9ef5e1316c..19f5f64aaf6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -2,7 +2,7 @@ package com.scalableminds.webknossos.tracingstore.tracings import java.util.UUID -import com.scalableminds.util.geometry.BoundingBox +import com.scalableminds.util.geometry.Point3D import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.tracingstore.RedisTemporaryStore import scalapb.{GeneratedMessage, GeneratedMessageCompanion, Message} @@ -58,11 +58,11 @@ trait TracingService[T <: GeneratedMessage with Message[T]] s"transactionBatch___${tracingId}___${transactionidOpt}___${transactionGroupindexOpt}___$version" protected def temporaryIdKey(tracingId: String) = - s"temporaryTracingId___${tracingId}" + s"temporaryTracingId___$tracingId" def currentUncommittedVersion(tracingId: String, transactionIdOpt: Option[String]): Fox[Option[Long]] = transactionIdOpt match { - case Some(transactionId) => + case Some(_) => for { keys <- uncommittedUpdatesStore.keys(s"transactionBatch___${tracingId}___${transactionIdOpt}___*") } yield if (keys.isEmpty) None else Some(keys.flatMap(versionFromTransactionBatchKey).max) @@ -160,13 +160,11 @@ trait TracingService[T <: GeneratedMessage with Message[T]] def saveToHandledGroupIdStore(tracingId: String, transactionIdOpt: Option[String], version: Long): Fox[Unit] = transactionIdOpt match { - case Some(transactionId) => { + case Some(transactionId) => val key = handledGroupKey(tracingId, transactionId, version) handledGroupIdStore.insert(key, "()", Some(handledGroupCacheExpiry)) - } - case _ => { + case _ => Fox.successful(()) - } } def handledGroupIdStoreContains(tracingId: String, transactionId: String, version: Long): Fox[Boolean] = @@ -174,10 +172,10 @@ trait TracingService[T <: GeneratedMessage with Message[T]] def merge(tracings: Seq[T]): T - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], - tracings: Seq[T], - newId: String, - newTracing: T, - toCache: Boolean): Fox[Unit] + def mergeVolumeDataWithDownsampling(tracingSelectors: Seq[TracingSelector], + tracings: Seq[T], + newId: String, + newTracing: T, + toCache: Boolean): Fox[Unit] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index 3bc7786af95..ebb3620d8e7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -176,11 +176,11 @@ class SkeletonTracingService @Inject()(tracingDataStore: TracingDataStore, ) } - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], - tracings: Seq[SkeletonTracing], - newId: String, - newTracing: SkeletonTracing, - toCache: Boolean): Fox[Unit] = Fox.successful(()) + def mergeVolumeDataWithDownsampling(tracingSelectors: Seq[TracingSelector], + tracings: Seq[SkeletonTracing], + newId: String, + newTracing: SkeletonTracing, + toCache: Boolean): Fox[Unit] = Fox.successful(()) def updateActionLog(tracingId: String) = { def versionedTupleToJson(tuple: (Long, List[SkeletonUpdateAction])): JsObject = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index 97d9412b210..91e4d31b927 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -3,7 +3,13 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.scalableminds.util.geometry.Point3D import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedIntegerArray} -import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, DataSourceLike, ElementClass} +import com.scalableminds.webknossos.datastore.models.datasource.{ + DataLayerLike, + DataSource, + DataSourceLike, + ElementClass, + SegmentationLayerLike +} import com.scalableminds.webknossos.tracingstore.TracingStoreWkRpcClient import com.scalableminds.webknossos.tracingstore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.tracingstore.tracings.{ @@ -12,11 +18,26 @@ import com.scalableminds.webknossos.tracingstore.tracings.{ TracingDataStore, VersionedKeyValuePair } +import com.scalableminds.webknossos.tracingstore.geometry.{Point3D => ProtoPoint3D} import scala.collection.mutable import scala.concurrent.ExecutionContext import scala.reflect.ClassTag +object VolumeTracingDownsampling { + def resolutionsForVolumeTracingByLayerName(dataSource: DataSourceLike, + fallbackLayerName: Option[String]): List[Point3D] = { + val fallbackLayer: Option[DataLayerLike] = + fallbackLayerName.flatMap(name => dataSource.dataLayers.find(_.name == name)) + resolutionsForVolumeTracing(dataSource, fallbackLayer) + } + + def resolutionsForVolumeTracing(dataSource: DataSourceLike, fallbackLayer: Option[DataLayerLike]): List[Point3D] = { + val fallBackLayerMags = fallbackLayer.map(_.resolutions) + fallBackLayerMags.getOrElse(dataSource.dataLayers.flatMap(_.resolutions).distinct) + } +} + trait VolumeTracingDownsampling extends BucketKeys with ProtoGeometryImplicits @@ -47,11 +68,9 @@ trait VolumeTracingDownsampling } def downsampleWithLayer(tracingId: String, tracing: VolumeTracing, dataLayer: VolumeTracingLayer)( - implicit ec: ExecutionContext): Fox[Unit] = { + implicit ec: ExecutionContext): Fox[Set[Point3D]] = { //TODO: - // - skip if already downsampled // - list all keys first, before fetching actual data - // - update tracing version? can the user restore not-downsampled old versions, what happens? val bucketVolume = 32 * 32 * 32 val originalMag = Point3D(1, 1, 1) for { @@ -79,7 +98,7 @@ trait VolumeTracingDownsampling //logger.info(s"saving bucket $bucketPosition") saveBucket(dataLayer, bucketPosition, bucketDataMap(bucketPosition), tracing.version) } - } yield () + } yield (requiredMags.toSet + originalMag) } private def downsampleMagFromMag(previousMag: Point3D, @@ -184,10 +203,19 @@ trait VolumeTracingDownsampling items.groupBy(i => i).mapValues(_.size).maxBy(_._2)._1 private def getRequiredMags(tracing: VolumeTracing): Fox[Seq[Point3D]] = - // TODO: if tracing has fallback layer, use only mags present in that fallback layer for { dataSource: DataSourceLike <- tracingStoreWkRpcClient.getDataSource(tracing.organizationName, tracing.dataSetName) - mags = dataSource.dataLayers.flatMap(_.resolutions).distinct.sortBy(_.maxDim).filterNot(_.maxDim == 1) - } yield mags + magsForTracing = VolumeTracingDownsampling.resolutionsForVolumeTracingByLayerName(dataSource, + tracing.fallbackLayer) + magsToCreate = magsForTracing.filterNot(_.maxDim == 1).sortBy(_.maxDim) + } yield magsToCreate + + def resolutionsMatch(tracings: Seq[VolumeTracing]): Boolean = + tracings.headOption.forall { firstTracing => + tracings.forall(t => + resolveLegacyResolutionList(t.resolutions).toSet == resolveLegacyResolutionList(firstTracing.resolutions).toSet) + } + private def resolveLegacyResolutionList(resolutions: Seq[ProtoPoint3D]) = + if (resolutions.isEmpty) Seq(ProtoPoint3D(1, 1, 1)) else resolutions } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index cb73fe00715..0d13bf9d187 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -14,7 +14,6 @@ import com.scalableminds.util.io.{NamedStream, ZipIO} import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} import com.scalableminds.webknossos.datastore.models.DataRequestCollection.DataRequestCollection import com.scalableminds.webknossos.datastore.models.requests.DataServiceDataRequest - import com.scalableminds.webknossos.tracingstore.{RedisTemporaryStore, TracingStoreWkRpcClient} import com.scalableminds.webknossos.datastore.services.{BinaryDataService, DataConverter} import com.scalableminds.webknossos.tracingstore.VolumeTracing.VolumeTracing.ElementClass @@ -147,24 +146,45 @@ class VolumeTracingService @Inject()( Fox.successful(tracing.withUserBoundingBoxes(updateUserBoundingBoxes())) } - def initializeWithDataMultiple(tracingId: String, tracing: VolumeTracing, initialData: File): Box[_] = { + def initializeWithDataMultiple(tracingId: String, tracing: VolumeTracing, initialData: File): Fox[Set[Point3D]] = { if (tracing.version != 0L) { return Failure("Tracing has already been edited.") } + + val resolutionSets = new mutable.HashSet[Set[Point3D]]() + ZipIO.withUnziped(initialData) { + case (_, is) => + val resulutionSet = new mutable.HashSet[Point3D]() + ZipIO.withUnziped(is) { + case (fileName, _) => + parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => + resulutionSet.add(bucketPosition.resolution) + } + } + resolutionSets.add(resulutionSet.toSet) + } + val resolutionsMatch = resolutionSets.headOption.forall { head => + resolutionSets.forall(_ == head) + } + val mergedVolume = new MergedVolume(tracing.elementClass) ZipIO.withUnziped(initialData) { case (_, is) => val labelSet: mutable.Set[UnsignedInteger] = scala.collection.mutable.Set() ZipIO.withUnziped(is) { - case (_, is) => + case (fileName, is) => WKWFile.read(is) { case (header, buckets) => if (header.numBlocksPerCube == 1) { - val dataTyped = - UnsignedIntegerArray.fromByteArray(buckets.next(), elementClassFromProto(tracing.elementClass)) - val nonZeroData = UnsignedIntegerArray.filterNonZero(dataTyped) - labelSet ++= nonZeroData + parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => + if (resolutionsMatch || bucketPosition.resolution == Point3D(1, 1, 1)) { + val dataTyped = + UnsignedIntegerArray.fromByteArray(buckets.next(), elementClassFromProto(tracing.elementClass)) + val nonZeroData = UnsignedIntegerArray.filterNonZero(dataTyped) + labelSet ++= nonZeroData + } + } } } } @@ -181,7 +201,7 @@ class VolumeTracingService @Inject()( if (header.numBlocksPerCube == 1) { parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => val data = buckets.next() - if (!isAllZero(data)) { + if (!isAllZero(data) && (resolutionsMatch || bucketPosition.resolution == Point3D(1, 1, 1))) { mergedVolume.add(sourceVolumeIndex, bucketPosition, data) } } @@ -192,9 +212,9 @@ class VolumeTracingService @Inject()( } val destinationDataLayer = volumeTracingLayer(tracingId, tracing) - mergedVolume.saveTo(destinationDataLayer, tracing.version, toCache = false) - - //TODO: save mag list to tracing + for { + _ <- mergedVolume.saveTo(destinationDataLayer, tracing.version, toCache = false) + } yield mergedVolume.presentResolutions } class MergedVolume(elementClass: ElementClass) extends DataConverter { @@ -257,16 +277,23 @@ class VolumeTracingService @Inject()( toCache) }.toList) } yield () + + def presentResolutions: Set[Point3D] = + mergedVolume.map { + case (bucketPosition: BucketPosition, _) => bucketPosition.resolution + }.toSet + } - def initializeWithData(tracingId: String, tracing: VolumeTracing, initialData: File): Box[_] = { + def initializeWithData(tracingId: String, tracing: VolumeTracing, initialData: File): Box[Set[Point3D]] = { if (tracing.version != 0L) { return Failure("Tracing has already been edited.") } val dataLayer = volumeTracingLayer(tracingId, tracing) + val savedResolutions = new mutable.HashSet[Point3D]() - ZipIO.withUnziped(initialData) { + val unzipResult = ZipIO.withUnziped(initialData) { case (fileName, is) => WKWFile.read(is) { case (header, buckets) => @@ -276,6 +303,7 @@ class VolumeTracingService @Inject()( if (isAllZero(data)) { Fox.successful(()) } else { + savedResolutions.add(bucket.resolution) saveBucket(dataLayer, bucket, data, tracing.version) } } @@ -283,7 +311,9 @@ class VolumeTracingService @Inject()( } } - //TODO: save mag list to tracing + for { + _ <- unzipResult + } yield savedResolutions.toSet } private def isAllZero(data: Array[Byte]): Boolean = @@ -410,9 +440,28 @@ class VolumeTracingService @Inject()( } yield Json.toJson(updateActionGroupsJs) } - def downsample(tracingId: String, tracing: VolumeTracing): Fox[Unit] = { - val volumeLayer = volumeTracingLayer(tracingId, tracing) - downsampleWithLayer(tracingId, tracing, volumeLayer) + def updateResolutionList(tracingId: String, + tracing: VolumeTracing, + filledResolutions: Set[Point3D], + toCache: Boolean = false): Fox[String] = { + if (tracing.version != 0L) { + return Fox.failure("Tracing has already been edited.") + } + save(tracing.copy(resolutions = filledResolutions.map(point3DToProto).toSeq), + Some(tracingId), + tracing.version, + toCache) + } + + def downsample(tracingId: String, tracing: VolumeTracing, originalResolutions: Set[Point3D]): Fox[Set[Point3D]] = { + if (tracing.version != 0L) { + return Failure("Tracing has already been edited.") + } + if (originalResolutions == Set(Point3D(1, 1, 1))) { + // is legacy tracing, needs downsampling + val volumeLayer = volumeTracingLayer(tracingId, tracing) + downsampleWithLayer(tracingId, tracing, volumeLayer) + } else Fox.successful(originalResolutions) } def merge(tracings: Seq[VolumeTracing]): VolumeTracing = tracings.reduceLeft(mergeTwo) @@ -439,12 +488,29 @@ class VolumeTracingService @Inject()( ) } - def mergeVolumeData(tracingSelectors: Seq[TracingSelector], - tracings: Seq[VolumeTracing], - newId: String, - newTracing: VolumeTracing, - toCache: Boolean): Fox[Unit] = { + def mergeVolumeDataWithDownsampling(tracingSelectors: Seq[TracingSelector], + tracings: Seq[VolumeTracing], + newId: String, + newTracing: VolumeTracing, + toCache: Boolean): Fox[Unit] = { val elementClass = tracings.headOption.map(_.elementClass).getOrElse(ElementClass.uint8) + + val resolutionSets = new mutable.HashSet[Set[Point3D]]() + tracingSelectors.zip(tracings).foreach { + case (selector, tracing) => + val resulutionSet = new mutable.HashSet[Point3D]() + val dataLayer = volumeTracingLayer(selector.tracingId, tracing) + val bucketStream: Iterator[(BucketPosition, Array[Byte])] = + dataLayer.bucketProvider.bucketStream(1, Some(tracing.version)) + bucketStream.foreach { + case (bucketPosition, _) => + resulutionSet.add(bucketPosition.resolution) + } + } + val resolutionsMatch = resolutionSets.headOption.forall { head => + resolutionSets.forall(_ == head) + } + val mergedVolume = new MergedVolume(elementClass) tracingSelectors.zip(tracings).foreach { @@ -454,8 +520,8 @@ class VolumeTracingService @Inject()( val bucketStream: Iterator[(BucketPosition, Array[Byte])] = dataLayer.bucketProvider.bucketStream(1, Some(tracing.version)) bucketStream.foreach { - case (_, data) => - if (data.length > 1) { // skip reverted buckets + case (bucketPosition, data) => + if (data.length > 1 && (resolutionsMatch || bucketPosition.resolution == Point3D(1, 1, 1))) { // skip reverted buckets val dataTyped = UnsignedIntegerArray.fromByteArray(data, elementClass) val nonZeroData: Array[UnsignedInteger] = UnsignedIntegerArray.filterNonZero(dataTyped) labelSet ++= nonZeroData @@ -471,11 +537,17 @@ class VolumeTracingService @Inject()( dataLayer.bucketProvider.bucketStream(1, Some(tracing.version)) bucketStream.foreach { case (bucketPosition, data) => - mergedVolume.add(sourceVolumeIndex, bucketPosition, data) + if (data.length > 1 && (resolutionsMatch || bucketPosition.resolution == Point3D(1, 1, 1))) { + mergedVolume.add(sourceVolumeIndex, bucketPosition, data) + } } } val destinationDataLayer = volumeTracingLayer(newId, newTracing) - mergedVolume.saveTo(destinationDataLayer, newTracing.version, toCache) + for { + _ <- mergedVolume.saveTo(destinationDataLayer, newTracing.version, toCache) + filledResolutions <- downsample(newId, newTracing, mergedVolume.presentResolutions) + _ <- updateResolutionList(newId, newTracing, filledResolutions) + } yield () } } diff --git a/webknossos-tracingstore/proto/VolumeTracing.proto b/webknossos-tracingstore/proto/VolumeTracing.proto index 38b2f7694ac..a640b8a9296 100644 --- a/webknossos-tracingstore/proto/VolumeTracing.proto +++ b/webknossos-tracingstore/proto/VolumeTracing.proto @@ -27,7 +27,7 @@ message VolumeTracing { optional BoundingBox userBoundingBox = 12; repeated NamedBoundingBox userBoundingBoxes = 13; optional string organizationName = 14; - repeated Point3D mags = 15; + repeated Point3D resolutions = 15; } message VolumeTracingOpt { From 387b0dbed5008e64ef570c856b248138e1cc2ff1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Fri, 11 Sep 2020 15:13:10 +0200 Subject: [PATCH 038/121] add up and downsampling of LabeledVoxelsMap --- frontend/javascripts/messages.js | 3 + frontend/javascripts/oxalis/constants.js | 2 + .../model/bucket_data_handling/bucket.js | 11 +- .../model/bucket_data_handling/data_cube.js | 2 - .../volume_annotation_sampling.js | 210 ++++++++++++++++++ 5 files changed, 225 insertions(+), 3 deletions(-) create mode 100644 frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js diff --git a/frontend/javascripts/messages.js b/frontend/javascripts/messages.js index 9de3c2422e5..a0adf7b690f 100644 --- a/frontend/javascripts/messages.js +++ b/frontend/javascripts/messages.js @@ -1,5 +1,6 @@ // @flow import _ from "lodash"; +import type { Vector4 } from "oxalis/constants"; export const settings = { clippingDistance: "Clipping Distance", @@ -79,6 +80,8 @@ In order to restore the current window, a reload is necessary.`, "data.disabled_render_missing_data_black": `You just disabled the option to render missing data black. This means that in case of missing data, data of lower quality is rendered instead. Only enable this option if you understand its effect. All layers will now be reloaded.`, + "sampling.could_not_get_or_create_bucket": (zoomedAddress: Vector4) => + `While sampling could not get or create bucket at address ${zoomedAddress.toString()}.`, "tracing.unhandled_initialization_error": "Initialization error. Please refresh the page to retry. If the error persists, please contact an administrator.", "tracing.out_of_dataset_bounds": diff --git a/frontend/javascripts/oxalis/constants.js b/frontend/javascripts/oxalis/constants.js index c3d13b9c92d..f68a26f19b4 100644 --- a/frontend/javascripts/oxalis/constants.js +++ b/frontend/javascripts/oxalis/constants.js @@ -127,6 +127,8 @@ export const Unicode = { MultiplicationSymbol: "×", }; +export type LabeledVoxelsMap = Map; + const Constants = { ARBITRARY_VIEW: 4, diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js index 5dfdf9cb396..8b7c83a1f67 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js @@ -16,7 +16,12 @@ import { getResolutions } from "oxalis/model/accessors/dataset_accessor"; import DataCube from "oxalis/model/bucket_data_handling/data_cube"; import Store from "oxalis/store"; import TemporalBucketManager from "oxalis/model/bucket_data_handling/temporal_bucket_manager"; -import Constants, { type Vector2, type Vector4, type BoundingBoxType } from "oxalis/constants"; +import Constants, { + type Vector2, + type Vector3, + type Vector4, + type BoundingBoxType, +} from "oxalis/constants"; import type { DimensionMap } from "oxalis/model/dimensions"; import window from "libs/window"; import { type ElementClass } from "admin/api_flow_types"; @@ -178,6 +183,10 @@ export class DataBucket { return this.state === BucketStateEnum.MISSING; } + getAddress(): Vector3 { + return [this.zoomedAddress[0], this.zoomedAddress[1], this.zoomedAddress[2]]; + } + is2DVoxelInsideBucket = (voxel: Vector2, dimensionIndices: DimensionMap, zoomStep: number) => { const neighbourBucketAddress = [ this.zoomedAddress[0], diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index bab941df4e2..31056b8d27a 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -44,8 +44,6 @@ class CubeEntry { } } -export type LabeledVoxelsMap = Map; - class DataCube { MAXIMUM_BUCKET_COUNT = 5000; ZOOM_STEP_COUNT: number; diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js new file mode 100644 index 00000000000..44c6f1f7d7f --- /dev/null +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -0,0 +1,210 @@ +// @flow + +import constants, { type Vector2, type Vector3, type LabeledVoxelsMap } from "oxalis/constants"; +import { map3 } from "libs/utils"; +import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; +import messages from "messages"; + +export function upsampleVoxelMap( + labeledVoxelMap: LabeledVoxelsMap, + dataCube: DataCube, + sourceResolution: Vector3, + sourceZoomStep: number, + goalResolution: Vector3, + goalZoomStep: number, + dimensionIndices: Vector2, +): LabeledVoxelsMap { + // TODO: Add comment + if (sourceZoomStep <= goalZoomStep) { + throw new Error("Trying to upsample a LabeledVoxelMap with the down sample function."); + } + const labeledVoxelMapInGoalResolution: LabeledVoxelsMap = new Map(); + const scaleToSource = map3((val, index) => val / sourceResolution[index], goalResolution); + // This array serves multiple purposes. It has a name / variable for each purpose. + const scaleToGoal = map3((val, index) => val / goalResolution[index], sourceResolution); + const numberOfBucketWithSourceBucket = scaleToGoal; + const singleVoxelBoundsInGoalResolution = scaleToGoal; + const boundsOfGoalBucketWithinSourceBucket = map3( + value => Math.ceil(value * constants.BUCKET_WIDTH), + scaleToSource, + ); + for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { + const labeledBucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); + if (labeledBucket.type === "null") { + console.warn(messages["sampling.could_not_get_or_create_bucket"](labeledBucketZoomedAddress)); + continue; + } + const goalBaseBucketAddress = map3( + (value, index) => Math.floor(value * scaleToGoal[index]), + labeledBucket.getAddress(), + ); + for ( + let firstDimBucketOffset = 0; + firstDimBucketOffset < numberOfBucketWithSourceBucket[dimensionIndices[0]]; + firstDimBucketOffset++ + ) { + for ( + let secondDimBucketOffset = 0; + secondDimBucketOffset < numberOfBucketWithSourceBucket[dimensionIndices[1]]; + secondDimBucketOffset++ + ) { + const currentGoalBucketAddress = [...goalBaseBucketAddress]; + currentGoalBucketAddress[dimensionIndices[0]] += firstDimBucketOffset; + currentGoalBucketAddress[dimensionIndices[1]] += secondDimBucketOffset; + // The inner bucket of whose the voxelMap will be created. + const currentGoalBucket = dataCube.getOrCreateBucket([ + ...currentGoalBucketAddress, + goalZoomStep, + ]); + if (currentGoalBucket.type === "null") { + console.warn( + messages["sampling.could_not_get_or_create_bucket"]([ + ...currentGoalBucketAddress, + goalZoomStep, + ]), + ); + continue; + } + const currentGoalVoxelMap = new Uint8Array(constants.BUCKET_WIDTH ** 2).fill(0); + const firstDimVoxelOffset = + boundsOfGoalBucketWithinSourceBucket[dimensionIndices[0]] * firstDimBucketOffset; + const secondDimVoxelOffset = + boundsOfGoalBucketWithinSourceBucket[dimensionIndices[1]] * secondDimBucketOffset; + // Iterate over the part of voxelMap that covers the currentGoalBucket with an upscaling kernel. + for ( + let kernelLeft = 0; + kernelLeft < boundsOfGoalBucketWithinSourceBucket[dimensionIndices[0]]; + kernelLeft++ + ) { + for ( + let kernelTop = 0; + kernelTop < boundsOfGoalBucketWithinSourceBucket[dimensionIndices[1]]; + kernelTop++ + ) { + if ( + voxelMap[ + (kernelLeft + firstDimVoxelOffset) * constants.BUCKET_WIDTH + + kernelTop + + secondDimVoxelOffset + ] === 1 + ) { + const kernelTopLeftVoxelInGoalResolution = [ + kernelLeft * singleVoxelBoundsInGoalResolution[dimensionIndices[0]], + kernelTop * singleVoxelBoundsInGoalResolution[dimensionIndices[1]], + ]; + // The labeled voxel is upscaled. + for ( + let firstKernelOffset = 0; + firstKernelOffset < singleVoxelBoundsInGoalResolution[dimensionIndices[0]]; + firstKernelOffset++ + ) { + for ( + let secondKernelOffset = 0; + secondKernelOffset < singleVoxelBoundsInGoalResolution[dimensionIndices[1]]; + secondKernelOffset++ + ) { + currentGoalVoxelMap[ + (kernelTopLeftVoxelInGoalResolution[0] + firstKernelOffset) * + constants.BUCKET_WIDTH + + kernelTopLeftVoxelInGoalResolution[1] + + secondKernelOffset + ] = 1; + } + } + } + } + } + labeledVoxelMapInGoalResolution.set(currentGoalBucket.zoomedAddress, currentGoalVoxelMap); + } + } + } + return labeledVoxelMapInGoalResolution; +} + +export function downsampleVoxelMap( + labeledVoxelMap: LabeledVoxelsMap, + dataCube: DataCube, + sourceResolution: Vector3, + sourceZoomStep: number, + goalResolution: Vector3, + goalZoomStep: number, + dimensionIndices: Vector2, +): LabeledVoxelsMap { + if (goalZoomStep <= sourceZoomStep) { + throw new Error("Trying to downsample a LabeledVoxelMap with the down sample function."); + } + const labeledVoxelMapInGoalResolution: LabeledVoxelsMap = new Map(); + const scaleToSource = map3((val, index) => val / sourceResolution[index], goalResolution); + const scaleToGoal = map3((val, index) => val / goalResolution[index], sourceResolution); + for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { + const labeledBucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); + if (labeledBucket.type === "null") { + console.warn(messages["sampling.could_not_get_or_create_bucket"](labeledBucketZoomedAddress)); + continue; + } + const goalBucketAddress = map3( + (value, index) => Math.floor(value * scaleToGoal[index]), + labeledBucket.getAddress(), + ); + const goalBucket = dataCube.getOrCreateBucket([...goalBucketAddress, goalZoomStep]); + if (goalBucket.type === "null") { + console.warn( + messages["sampling.could_not_get_or_create_bucket"]([...goalBucketAddress, goalZoomStep]), + ); + continue; + } + // Scale the bucket address back to the source scale to calculate the offset the source bucket has to the goalBucket. + const goalBucketAddressUpscaled = map3( + (value, index) => value * scaleToSource[index], + goalBucketAddress, + ); + const bucketOffset = map3( + (value, index) => labeledBucket.zoomedAddress[index] - value, + goalBucketAddressUpscaled, + ); + // Calculate the offset in voxel the source bucket has to the goal bucket. + const voxelOffset = map3( + (value, index) => value * constants.BUCKET_WIDTH * scaleToGoal[index], + bucketOffset, + ); + const goalVoxelMap = new Uint8Array(constants.BUCKET_WIDTH ** 2).fill(0); + // Iterate over the voxelMap in the goal resolution and search in each voxel for a labeled voxel (kernel-wise iteration). + const kernelSize = map3(scaleValue => Math.ceil(scaleValue), scaleToGoal); + for ( + let firstVoxelDim = 0; + firstVoxelDim < constants.BUCKET_WIDTH; + firstVoxelDim += kernelSize[dimensionIndices[0]] + ) { + for ( + let secondVoxelDim = 0; + secondVoxelDim < constants.BUCKET_WIDTH; + secondVoxelDim += kernelSize[dimensionIndices[1]] + ) { + let foundVoxel = false; + for ( + let firstKernelDim = 0; + firstKernelDim < kernelSize[dimensionIndices[0]] && !foundVoxel; + firstKernelDim++ + ) { + for ( + let secondKernelDim = 0; + secondKernelDim < kernelSize[dimensionIndices[1]] && !foundVoxel; + secondKernelDim++ + ) { + const firstDim = firstVoxelDim + firstKernelDim + voxelOffset[dimensionIndices[0]]; + const secondDim = secondVoxelDim + secondKernelDim + voxelOffset[dimensionIndices[1]]; + if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { + goalVoxelMap[ + firstDim * scaleToGoal[dimensionIndices[0]] * constants.BUCKET_WIDTH + + secondDim * scaleToGoal[dimensionIndices[1]] + ] = 1; + foundVoxel = true; + } + } + } + } + } + labeledVoxelMapInGoalResolution.set(goalBucket.zoomedAddress, goalVoxelMap); + } + return labeledVoxelMapInGoalResolution; +} From 7192375ca7537f0cf21ee6fc6036724040fc91c7 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 14 Sep 2020 11:12:45 +0200 Subject: [PATCH 039/121] add unlinkFallback route --- app/controllers/AnnotationController.scala | 16 ++++++++++++++++ .../annotation/TracingStoreRpcClient.scala | 18 +++++++++++++----- conf/messages | 2 ++ conf/webknossos.latest.routes | 1 + .../controllers/VolumeTracingController.scala | 19 +++++++++++++++++++ .../volume/VolumeTracingService.scala | 11 ++++++++++- ...alableminds.webknossos.tracingstore.routes | 1 + 7 files changed, 62 insertions(+), 6 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index c242675fc77..e29056d8ecb 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -174,6 +174,22 @@ class AnnotationController @Inject()( } } + def unlinkFallback(typ: String, id: String) = sil.SecuredAction.async { implicit request => + for { + _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.unlinkFallback.explorationalsOnly" + annotation <- provider.provideAnnotation(typ, id, request.identity) + volumeTracingId <- annotation.volumeTracingId.toFox ?~> "annotation.unlinkFallback.noVolume" + dataSet <- dataSetDAO + .findOne(annotation._dataSet)(GlobalAccessContext) ?~> "dataSet.notFoundForAnnotation" ~> NOT_FOUND + dataSource <- dataSetService.dataSourceFor(dataSet).flatMap(_.toUsable) ?~> "dataSet.notImported" + tracingStoreClient <- tracingStoreService.clientFor(dataSet) + newTracingId <- tracingStoreClient.unlinkFallback(volumeTracingId, dataSource) + _ <- annotationDAO.updateVolumeTracingId(annotation._id, newTracingId) + updatedAnnotation <- provider.provideAnnotation(typ, id, request.identity) + js <- annotationService.publicWrites(updatedAnnotation, Some(request.identity)) + } yield JsonOk(js) + } + private def finishAnnotation(typ: String, id: String, issuingUser: User, timestamp: Long)( implicit ctx: DBAccessContext): Fox[(Annotation, String)] = for { diff --git a/app/models/annotation/TracingStoreRpcClient.scala b/app/models/annotation/TracingStoreRpcClient.scala index a6c7381942e..a26498d7eb4 100644 --- a/app/models/annotation/TracingStoreRpcClient.scala +++ b/app/models/annotation/TracingStoreRpcClient.scala @@ -1,11 +1,11 @@ package models.annotation -import java.io.{BufferedOutputStream, File, FileOutputStream} +import java.io.File -import akka.stream.scaladsl.{Source, StreamConverters} +import akka.stream.scaladsl.Source import akka.util.ByteString import com.scalableminds.util.geometry.BoundingBox -import com.scalableminds.util.io.{NamedEnumeratorStream, ZipIO} +import com.scalableminds.util.io.ZipIO import com.scalableminds.webknossos.tracingstore.SkeletonTracing.{SkeletonTracing, SkeletonTracings} import com.scalableminds.webknossos.tracingstore.VolumeTracing.{VolumeTracing, VolumeTracings} import com.scalableminds.webknossos.tracingstore.tracings.TracingSelector @@ -13,11 +13,10 @@ import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.util.tools.JsonHelper.boxFormat import com.scalableminds.util.tools.JsonHelper.optionFormat import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, DataSourceLike} import com.typesafe.scalalogging.LazyLogging import models.binary.{DataSet, DataStoreRpcClient} import net.liftweb.common.Box -import play.api.libs.iteratee.Enumerator -import play.api.libs.json.JsObject import scala.concurrent.ExecutionContext @@ -198,4 +197,13 @@ class TracingStoreRpcClient(tracingStore: TracingStore, dataSet: DataSet, rpc: R } yield data } + def unlinkFallback(tracingId: String, dataSource: DataSourceLike): Fox[String] = { + logger.debug(s"Called to unlink fallback segmentation for tracing $tracingId." + baseInfo) + for { + newId: String <- rpc(s"${tracingStore.url}/tracings/volume/$tracingId/unlinkFallback") + .addQueryString("token" -> TracingStoreRpcClient.webKnossosToken) + .postWithJsonResponse[DataSourceLike, String](dataSource) + } yield newId + } + } diff --git a/conf/messages b/conf/messages index 5ad7c7692e9..24b17232402 100644 --- a/conf/messages +++ b/conf/messages @@ -223,6 +223,8 @@ annotation.timelogging.read.failed=Time annotation.write.failed=Could not convert annotation to json annotation.noSkeleton=No skeleton tracing found for this annotation annotation.needsEitherSkeletonOrVolume=Annotation needs at least one of skeleton or volume +annotation.unlinkFallback.explorationalsOnly=Could not unlink fallback segmentation (only allowed for explorational annotations). +annotation.unlinkFallback.noVolume=Could not unlink fallback segmentation (only allowed for annotations with volume layer). annotation.makeHybrid.explorationalsOnly=Could not convert annotation to hybrid annotation because it is only allowed for explorational annotations. annotation.makeHybrid.failed=Could not convert to hybrid. annotation.makeHybrid.alreadyHybrid=Could not convert annotation to hybrid annotation because it is already a hybrid annotation. diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index d8ef5de3424..ad44e2eb150 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -109,6 +109,7 @@ PATCH /annotations/:typ/:id/transfer GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Long) PATCH /annotations/:typ/:id/makeHybrid controllers.AnnotationController.makeHybrid(typ: String, id: String) +PATCH /annotations/:typ/:id/unlinkFallback controllers.AnnotationController.unlinkFallback(typ: String, id: String) DELETE /annotations/:typ/:id controllers.AnnotationController.cancel(typ: String, id: String) POST /annotations/:typ/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.merge(typ: String, id: String, mergedTyp: String, mergedId: String) GET /annotations/:typ/:id/download controllers.AnnotationIOController.download(typ: String, id: String, skeletonVersion: Option[Long], volumeVersion: Option[Long], skipVolumeData: Option[Boolean]) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index bc2bb8616f0..d20e23c57a1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -19,6 +19,7 @@ import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import com.scalableminds.util.tools.JsonHelper.boxFormat import com.scalableminds.util.tools.JsonHelper.optionFormat +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceLike import com.scalableminds.webknossos.datastore.storage.TemporaryStore import com.scalableminds.webknossos.tracingstore.slacknotification.SlackNotificationService import play.api.http.HttpEntity @@ -174,6 +175,24 @@ class VolumeTracingController @Inject()(val tracingService: VolumeTracingService } } + def unlinkFallback(tracingId: String) = Action.async(validateJson[DataSourceLike]) { implicit request => + log { + logTime(slackNotificationService.reportUnusalRequest) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { + AllowRemoteOrigin { + for { + tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") + updatedTracing = tracingService.unlinkFallback(tracing, request.body) + newId <- tracingService.save(updatedTracing, None, 0L) + } yield { + Ok(Json.toJson(newId)) + } + } + } + } + } + } + def updateActionLog(tracingId: String) = Action.async { implicit request => log { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId)) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 0d13bf9d187..912a3e49d27 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -6,7 +6,7 @@ import java.nio.file.Paths import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Point3D} import com.scalableminds.webknossos.datastore.dataformats.wkw.{WKWBucketStreamSink, WKWDataFormatHelper} -import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, SegmentationLayer} +import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, DataSourceLike, SegmentationLayer} import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedInteger, UnsignedIntegerArray} import com.scalableminds.webknossos.tracingstore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.tracingstore.tracings.{TracingType, _} @@ -361,6 +361,15 @@ class VolumeTracingService @Inject()( data <- binaryDataService.handleDataRequests(requests) } yield data + def unlinkFallback(tracing: VolumeTracing, dataSource: DataSourceLike): VolumeTracing = + tracing.copy( + activeSegmentId = None, + largestSegmentId = 0L, + fallbackLayer = None, + version = 0L, + resolutions = VolumeTracingDownsampling.resolutionsForVolumeTracing(dataSource, None).map(point3DToProto) + ) + @SuppressWarnings(Array("OptionGet")) //We suppress this warning because we check the option beforehand def duplicate(tracingId: String, tracing: VolumeTracing, diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 3f349558914..94c356e4187 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -16,6 +16,7 @@ GET /volume/:tracingId/allData @com.scalablemin GET /volume/:tracingId/allDataBlocking @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataBlocking(tracingId: String, version: Option[Long]) POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String) POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(tracingId: String, fromTask: Option[Boolean]) +POST /volume/:tracingId/unlinkFallback @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.unlinkFallback(tracingId: String) GET /volume/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateActionLog(tracingId: String) POST /volume/getMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getMultiple POST /volume/mergedFromIds @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.mergedFromIds(persist: Boolean) From a662d0f41c4f5f05977fde80f6f57761c2d72cc1 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 14 Sep 2020 11:27:33 +0200 Subject: [PATCH 040/121] changelog --- CHANGELOG.unreleased.md | 3 ++- MIGRATIONS.unreleased.md | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index a9e0f1e0af9..b3c9ab3e93f 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -16,9 +16,10 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Added the possibility to remove the fallback segmentation layer from a hybrid/volume tracing. Accessible by a minus button next to the layer's settings. [#4741](https://github.com/scalableminds/webknossos/pull/4766) - Added the possibility to undo and redo volume annotation strokes. [#4771](https://github.com/scalableminds/webknossos/pull/4771) - Added the possibility to navigate to the preceding/subsequent node by pressing "ctrl + ," or "ctrl + ." in a skeleton tracing. [#4147](https://github.com/scalableminds/webknossos/pull/4784) +- Added multi-resolution volume annotations. [#4755](https://github.com/scalableminds/webknossos/pull/4755) ### Changed -- +- New volume/hybrid annotations are now automatically multi-resolution volume annotations. [#4755](https://github.com/scalableminds/webknossos/pull/4755) ### Fixed - Improved the data loading behavior for flight and oblique mode. [#4800](https://github.com/scalableminds/webknossos/pull/4800) diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index f4b6114cff5..dd635d0fcc8 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -6,7 +6,7 @@ This project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. User-facing changes are documented in the [changelog](CHANGELOG.released.md). ## Unreleased -- +- To convert individual legacy volume annotations to multi-resolution volume annotations, download and re-upload them (upload may take a while). ### Postgres Evolutions: -- +- From 04cb83e6f956a9c85d637ec833e37b19e7515749 Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 14 Sep 2020 11:57:12 +0200 Subject: [PATCH 041/121] cleanup backend code --- .../volume/VolumeTracingDownsampling.scala | 18 +++------------- .../tracings/volume/VolumeTracingLayer.scala | 3 ++- .../volume/VolumeTracingService.scala | 21 ------------------- 3 files changed, 5 insertions(+), 37 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index 91e4d31b927..83f42034368 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -3,13 +3,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.scalableminds.util.geometry.Point3D import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedIntegerArray} -import com.scalableminds.webknossos.datastore.models.datasource.{ - DataLayerLike, - DataSource, - DataSourceLike, - ElementClass, - SegmentationLayerLike -} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceLike, ElementClass} import com.scalableminds.webknossos.tracingstore.TracingStoreWkRpcClient import com.scalableminds.webknossos.tracingstore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.tracingstore.tracings.{ @@ -69,8 +63,6 @@ trait VolumeTracingDownsampling def downsampleWithLayer(tracingId: String, tracing: VolumeTracing, dataLayer: VolumeTracingLayer)( implicit ec: ExecutionContext): Fox[Set[Point3D]] = { - //TODO: - // - list all keys first, before fetching actual data val bucketVolume = 32 * 32 * 32 val originalMag = Point3D(1, 1, 1) for { @@ -91,14 +83,12 @@ trait VolumeTracingDownsampling bucketVolume, elementClass, dataLayer) - //logger.info(s"bucketDataMap keys: ${bucketDataMap.keys.toList}") requiredMag } _ <- Fox.serialCombined(updatedBuckets.toList) { bucketPosition: BucketPosition => - //logger.info(s"saving bucket $bucketPosition") saveBucket(dataLayer, bucketPosition, bucketDataMap(bucketPosition), tracing.version) } - } yield (requiredMags.toSet + originalMag) + } yield requiredMags.toSet + originalMag } private def downsampleMagFromMag(previousMag: Point3D, @@ -109,13 +99,11 @@ trait VolumeTracingDownsampling bucketVolume: Int, elementClass: ElementClass.Value, dataLayer: VolumeTracingLayer): Unit = { - //logger.info(s"downsampling volume tracing mag $requiredMag from mag $previousMag...") val downScaleFactor = Point3D(requiredMag.x / previousMag.x, requiredMag.y / previousMag.y, requiredMag.z / previousMag.z) downsampledBucketPositions(originalBucketPositions, requiredMag).foreach { downsampledBucketPosition => val sourceBuckets: Seq[BucketPosition] = sourceBucketPositionsFor(downsampledBucketPosition, downScaleFactor, previousMag) - //logger.info(s"source buckets for bucket $downsampledBucketPosition: ${sourceBuckets}") val sourceData: Seq[Array[Byte]] = sourceBuckets.map(bucketDataMap(_)) val downsampledData: Array[Byte] = if (sourceData.forall(_.sameElements(Array[Byte](0)))) @@ -162,7 +150,7 @@ trait VolumeTracingDownsampling private def fillZeroedIfNeeded(sourceData: Seq[Array[Byte]], bucketVolume: Int, bytesPerElement: Int): Seq[Array[Byte]] = - // Reverted buckets and missing buckets arer epresented by a single zero-byte. + // Reverted buckets and missing buckets are represented by a single zero-byte. // For downsampling, those need to be replaced with the full bucket volume of zero-bytes. sourceData.map { sourceBucketData => if (sourceBucketData.sameElements(Array[Byte](0))) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index 993a8285e28..cbc7ecba7d0 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -94,5 +94,6 @@ case class VolumeTracingLayer( val mappings: Option[Set[String]] = None val resolutions: List[Point3D] = List(Point3D(1, 1, 1)) // unused for volume tracings - override def containsResolution(resolution: Point3D) = true + override def containsResolution(resolution: Point3D) = + true // allow requesting buckets of all resolutions. database takes care of missing. } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 912a3e49d27..6ac7a7c8d4c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -414,27 +414,6 @@ class VolumeTracingService @Inject()( tracing.largestSegmentId, isTemporaryTracing) - private def volumeTracingLayerWithFallback(tracingId: String, - tracing: VolumeTracing, - dataSource: DataSource): SegmentationLayer = { - val dataLayer = volumeTracingLayer(tracingId, tracing) - tracing.fallbackLayer - .flatMap(dataSource.getDataLayer) - .map { - case layer: SegmentationLayer if dataLayer.elementClass == layer.elementClass => - new FallbackLayerAdapter(dataLayer, layer) - case _ => - logger.error( - s"Fallback layer is not a segmentation layer and thus being ignored. " + - s"DataSource: ${dataSource.id}. FallbackLayer: ${tracing.fallbackLayer}.") - dataLayer - } - .getOrElse(dataLayer) - } - - def dataLayerForVolumeTracing(tracingId: String, dataSource: DataSource): Fox[SegmentationLayer] = - find(tracingId).map(volumeTracingLayerWithFallback(tracingId, _, dataSource)) - def updateActionLog(tracingId: String): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[CompactVolumeUpdateAction])): JsObject = Json.obj( From 3dfd131b0376bd3f7c2bcff507dadee22c185db8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 14 Sep 2020 14:08:08 +0200 Subject: [PATCH 042/121] fixed up and downsampling and added tests --- .../volume_annotation_sampling.js | 76 +++- .../volume_annotation_sampling.spec.js | 392 ++++++++++++++++++ 2 files changed, 454 insertions(+), 14 deletions(-) create mode 100644 frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 44c6f1f7d7f..7f830f1b292 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -1,18 +1,19 @@ // @flow -import constants, { type Vector2, type Vector3, type LabeledVoxelsMap } from "oxalis/constants"; +import constants, { type Vector3, type LabeledVoxelsMap } from "oxalis/constants"; import { map3 } from "libs/utils"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; import messages from "messages"; +import type { DimensionMap } from "oxalis/model/dimensions"; -export function upsampleVoxelMap( +function upsampleVoxelMap( labeledVoxelMap: LabeledVoxelsMap, dataCube: DataCube, sourceResolution: Vector3, sourceZoomStep: number, goalResolution: Vector3, goalZoomStep: number, - dimensionIndices: Vector2, + dimensionIndices: DimensionMap, ): LabeledVoxelsMap { // TODO: Add comment if (sourceZoomStep <= goalZoomStep) { @@ -52,6 +53,7 @@ export function upsampleVoxelMap( currentGoalBucketAddress[dimensionIndices[0]] += firstDimBucketOffset; currentGoalBucketAddress[dimensionIndices[1]] += secondDimBucketOffset; // The inner bucket of whose the voxelMap will be created. + let annotatedAtleastOneVoxel = false; const currentGoalBucket = dataCube.getOrCreateBucket([ ...currentGoalBucketAddress, goalZoomStep, @@ -111,27 +113,30 @@ export function upsampleVoxelMap( ] = 1; } } + annotatedAtleastOneVoxel = true; } } } - labeledVoxelMapInGoalResolution.set(currentGoalBucket.zoomedAddress, currentGoalVoxelMap); + if (annotatedAtleastOneVoxel) { + labeledVoxelMapInGoalResolution.set(currentGoalBucket.zoomedAddress, currentGoalVoxelMap); + } } } } return labeledVoxelMapInGoalResolution; } -export function downsampleVoxelMap( +function downsampleVoxelMap( labeledVoxelMap: LabeledVoxelsMap, dataCube: DataCube, sourceResolution: Vector3, sourceZoomStep: number, goalResolution: Vector3, goalZoomStep: number, - dimensionIndices: Vector2, + dimensionIndices: DimensionMap, ): LabeledVoxelsMap { if (goalZoomStep <= sourceZoomStep) { - throw new Error("Trying to downsample a LabeledVoxelMap with the down sample function."); + throw new Error("Trying to upsample a LabeledVoxelMap with the down sample function."); } const labeledVoxelMapInGoalResolution: LabeledVoxelsMap = new Map(); const scaleToSource = map3((val, index) => val / sourceResolution[index], goalResolution); @@ -164,12 +169,15 @@ export function downsampleVoxelMap( ); // Calculate the offset in voxel the source bucket has to the goal bucket. const voxelOffset = map3( - (value, index) => value * constants.BUCKET_WIDTH * scaleToGoal[index], + (value, index) => Math.floor(value * constants.BUCKET_WIDTH * scaleToGoal[index]), bucketOffset, ); - const goalVoxelMap = new Uint8Array(constants.BUCKET_WIDTH ** 2).fill(0); + const goalVoxelMap = + labeledVoxelMapInGoalResolution.get(goalBucket.zoomedAddress) || + new Uint8Array(constants.BUCKET_WIDTH ** 2).fill(0); // Iterate over the voxelMap in the goal resolution and search in each voxel for a labeled voxel (kernel-wise iteration). - const kernelSize = map3(scaleValue => Math.ceil(scaleValue), scaleToGoal); + const kernelSize = map3(scaleValue => Math.ceil(scaleValue), scaleToSource); + // The next two for loops move the kernel. for ( let firstVoxelDim = 0; firstVoxelDim < constants.BUCKET_WIDTH; @@ -180,6 +188,7 @@ export function downsampleVoxelMap( secondVoxelDim < constants.BUCKET_WIDTH; secondVoxelDim += kernelSize[dimensionIndices[1]] ) { + // The next two for loops iterate within the kernel. let foundVoxel = false; for ( let firstKernelDim = 0; @@ -191,12 +200,17 @@ export function downsampleVoxelMap( secondKernelDim < kernelSize[dimensionIndices[1]] && !foundVoxel; secondKernelDim++ ) { - const firstDim = firstVoxelDim + firstKernelDim + voxelOffset[dimensionIndices[0]]; - const secondDim = secondVoxelDim + secondKernelDim + voxelOffset[dimensionIndices[1]]; + const firstDim = firstVoxelDim + firstKernelDim; + const secondDim = secondVoxelDim + secondKernelDim; if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { + const firstDimInGoalBucket = + Math.floor(firstDim * scaleToGoal[dimensionIndices[0]]) + + voxelOffset[dimensionIndices[0]]; + const secondDimInGoalBucket = + Math.floor(secondDim * scaleToGoal[dimensionIndices[1]]) + + voxelOffset[dimensionIndices[1]]; goalVoxelMap[ - firstDim * scaleToGoal[dimensionIndices[0]] * constants.BUCKET_WIDTH + - secondDim * scaleToGoal[dimensionIndices[1]] + firstDimInGoalBucket * constants.BUCKET_WIDTH + secondDimInGoalBucket ] = 1; foundVoxel = true; } @@ -208,3 +222,37 @@ export function downsampleVoxelMap( } return labeledVoxelMapInGoalResolution; } + +export default function sampleVoxelMapToResolution( + labeledVoxelMap: LabeledVoxelsMap, + dataCube: DataCube, + sourceResolution: Vector3, + sourceZoomStep: number, + goalResolution: Vector3, + goalZoomStep: number, + dimensionIndices: DimensionMap, +): LabeledVoxelsMap { + if (sourceZoomStep < goalZoomStep) { + return downsampleVoxelMap( + labeledVoxelMap, + dataCube, + sourceResolution, + sourceZoomStep, + goalResolution, + goalZoomStep, + dimensionIndices, + ); + } else if (goalZoomStep < sourceZoomStep) { + return upsampleVoxelMap( + labeledVoxelMap, + dataCube, + sourceResolution, + sourceZoomStep, + goalResolution, + goalZoomStep, + dimensionIndices, + ); + } else { + return labeledVoxelMap; + } +} diff --git a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js new file mode 100644 index 00000000000..32a92873ad7 --- /dev/null +++ b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js @@ -0,0 +1,392 @@ +/* + * cube.spec.js + * @flow + */ +import _ from "lodash"; + +import { tracing as skeletontracingServerObject } from "test/fixtures/skeletontracing_server_objects"; +import sampleVoxelMapToResolution from "oxalis/model/volumetracing/volume_annotation_sampling"; +import Constants, { type Vector4 } from "oxalis/constants"; +import anyTest, { type TestInterface } from "ava"; +import datasetServerObject from "test/fixtures/dataset_server_object"; +import mockRequire from "mock-require"; +import sinon from "sinon"; + +mockRequire.stopAll(); + +const StoreMock = { + getState: () => ({ + dataset: datasetServerObject, + tracing: { skeleton: skeletontracingServerObject }, + datasetConfiguration: { fourBit: false }, + }), + dispatch: sinon.stub(), + subscribe: sinon.stub(), +}; + +mockRequire("oxalis/store", StoreMock); + +mockRequire("oxalis/model/sagas/root_saga", function*() { + yield; +}); + +type LabeledVoxelsMapAsArray = Array<[Vector4, Uint8Array]>; +// Avoid node caching and make sure all mockRequires are applied +const Cube = mockRequire.reRequire("oxalis/model/bucket_data_handling/data_cube").default; + +// Ava's recommendation for Flow types +// https://github.com/avajs/ava/blob/master/docs/recipes/flow.md#typing-tcontext +const test: TestInterface<{ + cube: Cube, +}> = (anyTest: any); + +test.beforeEach(t => { + const mockedLayer = { + resolutions: [[1, 1, 1], [2, 2, 2], [4, 4, 4], [8, 8, 8], [16, 16, 16], [32, 32, 32]], + }; + const cube = new Cube([100, 100, 100], 3, "uint32", mockedLayer); + const pullQueue = { + add: sinon.stub(), + pull: sinon.stub(), + }; + const pushQueue = { + insert: sinon.stub(), + push: sinon.stub(), + }; + cube.initializeWithQueues(pullQueue, pushQueue); + + t.context = { + cube, + }; +}); + +function getEmptyVoxelMap() { + return new Uint8Array(Constants.BUCKET_WIDTH ** 2).fill(0); +} + +function labelVoxelInVoxelMap(firstDim: number, secondDim: number, voxelMap: Uint8Array) { + voxelMap[firstDim * Constants.BUCKET_WIDTH + secondDim] = 1; +} + +function getVoxelMapEntry(firstDim: number, secondDim: number, voxelMap: Uint8Array) { + return voxelMap[firstDim * Constants.BUCKET_WIDTH + secondDim]; +} + +test("Upsampling an annotation should work in the top left part of a bucket", t => { + const { cube } = t.context; + const sourceVoxelMap = getEmptyVoxelMap(); + [[5, 5], [5, 6], [6, 5], [6, 6]].forEach(([firstDim, secondDim]) => + labelVoxelInVoxelMap(firstDim, secondDim, sourceVoxelMap), + ); + const goalVoxelMap = getEmptyVoxelMap(); + [[10, 10], [10, 11], [10, 12], [10, 13], [11, 10], [11, 11], [11, 12], [11, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap), + ); + [[12, 10], [12, 11], [12, 12], [12, 13], [13, 10], [13, 11], [13, 12], [13, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap), + ); + const bucket = cube.getOrCreateBucket([0, 0, 0, 1]); + const labeledVoxelsMap = new Map([[bucket.zoomedAddress, sourceVoxelMap]]); + const upsampledVoxelMapPerBucket = sampleVoxelMapToResolution( + labeledVoxelsMap, + cube, + [2, 2, 2], + 1, + [1, 1, 1], + 0, + [0, 1, 2], + ); + const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); + const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; + const upsampledVoxelMap = upsampledVoxelMapAsArray[0][1]; + t.deepEqual( + bucketZoomedAddress, + [0, 0, 0, 0], + "The bucket of the upsampled map should be correct.", + ); + for (let firstDim = 0; firstDim < 32; firstDim++) { + for (let secondDim = 0; secondDim < 32; secondDim++) { + t.is( + getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), + getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), + "The labeled voxels of the upsampled voxel map should match the expected labels", + ); + } + } +}); + +test("Upsampling an annotation should work in the top right part of a bucket", t => { + const { cube } = t.context; + const sourceVoxelMap = getEmptyVoxelMap(); + [[21, 5], [21, 6], [22, 5], [22, 6]].forEach(([firstDim, secondDim]) => + labelVoxelInVoxelMap(firstDim, secondDim, sourceVoxelMap), + ); + const goalVoxelMap = getEmptyVoxelMap(); + [[10, 10], [10, 11], [10, 12], [10, 13], [11, 10], [11, 11], [11, 12], [11, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap), + ); + [[12, 10], [12, 11], [12, 12], [12, 13], [13, 10], [13, 11], [13, 12], [13, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap), + ); + const bucket = cube.getOrCreateBucket([0, 0, 0, 1]); + const labeledVoxelsMap = new Map([[bucket.zoomedAddress, sourceVoxelMap]]); + const upsampledVoxelMapPerBucket = sampleVoxelMapToResolution( + labeledVoxelsMap, + cube, + [2, 2, 2], + 1, + [1, 1, 1], + 0, + [0, 1, 2], + ); + const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); + const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; + const upsampledVoxelMap = upsampledVoxelMapAsArray[0][1]; + t.deepEqual( + bucketZoomedAddress, + [1, 0, 0, 0], + "The bucket of the upsampled map should be correct.", + ); + for (let firstDim = 0; firstDim < 32; firstDim++) { + for (let secondDim = 0; secondDim < 32; secondDim++) { + t.is( + getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), + getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), + "The labeled voxels of the upsampled voxel map should match the expected labels", + ); + } + } +}); + +test("Upsampling an annotation should work in the bottom left part of a bucket", t => { + const { cube } = t.context; + const sourceVoxelMap = getEmptyVoxelMap(); + [[5, 21], [6, 21], [5, 22], [6, 22]].forEach(([firstDim, secondDim]) => + labelVoxelInVoxelMap(firstDim, secondDim, sourceVoxelMap), + ); + const goalVoxelMap = getEmptyVoxelMap(); + [[10, 10], [10, 11], [10, 12], [10, 13], [11, 10], [11, 11], [11, 12], [11, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap), + ); + [[12, 10], [12, 11], [12, 12], [12, 13], [13, 10], [13, 11], [13, 12], [13, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap), + ); + const bucket = cube.getOrCreateBucket([0, 0, 0, 1]); + const labeledVoxelsMap = new Map([[bucket.zoomedAddress, sourceVoxelMap]]); + const upsampledVoxelMapPerBucket = sampleVoxelMapToResolution( + labeledVoxelsMap, + cube, + [2, 2, 2], + 1, + [1, 1, 1], + 0, + [0, 1, 2], + ); + const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); + const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; + const upsampledVoxelMap = upsampledVoxelMapAsArray[0][1]; + t.deepEqual( + bucketZoomedAddress, + [0, 1, 0, 0], + "The bucket of the upsampled map should be correct.", + ); + for (let firstDim = 0; firstDim < 32; firstDim++) { + for (let secondDim = 0; secondDim < 32; secondDim++) { + t.is( + getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), + getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), + "The labeled voxels of the upsampled voxel map should match the expected labels", + ); + } + } +}); + +test("Upsampling an annotation should work in the bottom right part of a bucket", t => { + const { cube } = t.context; + const sourceVoxelMap = getEmptyVoxelMap(); + [[21, 21], [22, 21], [21, 22], [22, 22]].forEach(([firstDim, secondDim]) => + labelVoxelInVoxelMap(firstDim, secondDim, sourceVoxelMap), + ); + const goalVoxelMap = getEmptyVoxelMap(); + [[10, 10], [10, 11], [10, 12], [10, 13], [11, 10], [11, 11], [11, 12], [11, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap), + ); + [[12, 10], [12, 11], [12, 12], [12, 13], [13, 10], [13, 11], [13, 12], [13, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap), + ); + const bucket = cube.getOrCreateBucket([0, 0, 0, 1]); + const labeledVoxelsMap = new Map([[bucket.zoomedAddress, sourceVoxelMap]]); + const upsampledVoxelMapPerBucket = sampleVoxelMapToResolution( + labeledVoxelsMap, + cube, + [2, 2, 2], + 1, + [1, 1, 1], + 0, + [0, 1, 2], + ); + const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); + const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; + const upsampledVoxelMap = upsampledVoxelMapAsArray[0][1]; + t.deepEqual( + bucketZoomedAddress, + [1, 1, 0, 0], + "The bucket of the upsampled map should be correct.", + ); + for (let firstDim = 0; firstDim < 32; firstDim++) { + for (let secondDim = 0; secondDim < 32; secondDim++) { + t.is( + getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), + getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), + "The labeled voxels of the upsampled voxel map should match the expected labels", + ); + } + } +}); + +test("Upsampling an annotation should work across more than one resolution", t => { + const { cube } = t.context; + const sourceVoxelMap = getEmptyVoxelMap(); + [[10, 10], [10, 11], [11, 10], [11, 11]].forEach(([firstDim, secondDim]) => + labelVoxelInVoxelMap(firstDim, secondDim, sourceVoxelMap), + ); + const goalVoxelMap = getEmptyVoxelMap(); + // scaling [10,10],[11,11] up: 10 -> 20 -> 40 (mod 32) -> 8; 11 -> 23 -> 47 (mod 32) -> 15; + for (let firstDim = 8; firstDim <= 15; firstDim++) { + for (let secondDim = 8; secondDim <= 15; secondDim++) { + labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap); + } + } + const bucket = cube.getOrCreateBucket([0, 0, 0, 2]); + const labeledVoxelsMap = new Map([[bucket.zoomedAddress, sourceVoxelMap]]); + const upsampledVoxelMapPerBucket = sampleVoxelMapToResolution( + labeledVoxelsMap, + cube, + [4, 4, 4], + 2, + [1, 1, 1], + 0, + [0, 1, 2], + ); + const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); + const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; + const upsampledVoxelMap = upsampledVoxelMapAsArray[0][1]; + t.deepEqual( + bucketZoomedAddress, + [1, 1, 0, 0], + "The bucket of the upsampled map should be correct.", + ); + for (let firstDim = 0; firstDim < 32; firstDim++) { + for (let secondDim = 0; secondDim < 32; secondDim++) { + t.is( + getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), + getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), + "The labeled voxels of the upsampled voxel map should match the expected labels", + ); + } + } +}); + +test("Downsampling annotation of neighbour buckets should result in one downsampled voxelMap", t => { + const { cube } = t.context; + const labeledVoxelsMap = new Map(); + [[0, 0, 0], [1, 0, 0], [0, 1, 0], [1, 1, 0]].forEach(zoomedAddress => { + const voxelMap = getEmptyVoxelMap(); + [[10, 10], [10, 11], [10, 12], [10, 13], [11, 10], [11, 11], [11, 12], [11, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, voxelMap), + ); + [[12, 10], [12, 11], [12, 12], [12, 13], [13, 10], [13, 11], [13, 12], [13, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, voxelMap), + ); + const bucket = cube.getOrCreateBucket([...zoomedAddress, 0]); + labeledVoxelsMap.set(bucket.zoomedAddress, voxelMap); + }); + const goalVoxelMap = getEmptyVoxelMap(); + [[0, 0], [16, 0], [0, 16], [16, 16]].forEach(([firstOffset, secondOffset]) => { + [[5, 5], [5, 6], [6, 5], [6, 6]].forEach(([firstDim, secondDim]) => { + labelVoxelInVoxelMap(firstDim + firstOffset, secondDim + secondOffset, goalVoxelMap); + }); + }); + const upsampledVoxelMapPerBucket = sampleVoxelMapToResolution( + labeledVoxelsMap, + cube, + [1, 1, 1], + 0, + [2, 2, 2], + 1, + [0, 1, 2], + ); + const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); + const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; + const upsampledVoxelMap = upsampledVoxelMapAsArray[0][1]; + t.deepEqual( + bucketZoomedAddress, + [0, 0, 0, 1], + "The bucket of the downsampled map should be correct.", + ); + for (let firstDim = 0; firstDim < 32; firstDim++) { + for (let secondDim = 0; secondDim < 32; secondDim++) { + t.is( + getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), + getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), + `The labeled voxels of the downsampled voxel map should match the expected labels: ${firstDim}, ${secondDim}, got ${getVoxelMapEntry( + firstDim, + secondDim, + upsampledVoxelMap, + )} , expected ${getVoxelMapEntry(firstDim, secondDim, goalVoxelMap)}.`, + ); + } + } +}); + +test("Downsampling annotation should work across more than one resolution", t => { + const { cube } = t.context; + const labeledVoxelsMap = new Map(); + [[0, 0, 0], [1, 0, 0], [0, 1, 0], [1, 1, 0]].forEach(zoomedAddress => { + const voxelMap = getEmptyVoxelMap(); + [[10, 10], [10, 11], [10, 12], [10, 13], [11, 10], [11, 11], [11, 12], [11, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, voxelMap), + ); + [[12, 10], [12, 11], [12, 12], [12, 13], [13, 10], [13, 11], [13, 12], [13, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, voxelMap), + ); + const bucket = cube.getOrCreateBucket([...zoomedAddress, 0]); + labeledVoxelsMap.set(bucket.zoomedAddress, voxelMap); + }); + const goalVoxelMap = getEmptyVoxelMap(); + [[0, 0], [8, 0], [0, 8], [8, 8]].forEach(([firstOffset, secondOffset]) => { + [[2, 2], [2, 3], [3, 2], [3, 3]].forEach(([firstDim, secondDim]) => { + labelVoxelInVoxelMap(firstDim + firstOffset, secondDim + secondOffset, goalVoxelMap); + }); + }); + const upsampledVoxelMapPerBucket = sampleVoxelMapToResolution( + labeledVoxelsMap, + cube, + [1, 1, 1], + 0, + [4, 4, 4], + 2, + [0, 1, 2], + ); + const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); + const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; + const upsampledVoxelMap = upsampledVoxelMapAsArray[0][1]; + t.deepEqual( + bucketZoomedAddress, + [0, 0, 0, 2], + "The bucket of the downsampled map should be correct.", + ); + for (let firstDim = 0; firstDim < 32; firstDim++) { + for (let secondDim = 0; secondDim < 32; secondDim++) { + t.is( + getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), + getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), + `The labeled voxels of the downsampled voxel map should match the expected labels: ${firstDim}, ${secondDim}, got ${getVoxelMapEntry( + firstDim, + secondDim, + upsampledVoxelMap, + )} , expected ${getVoxelMapEntry(firstDim, secondDim, goalVoxelMap)}.`, + ); + } + } +}); From 163da0483d653db9ca115cd45363bc95630a2145 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 14 Sep 2020 14:42:41 +0200 Subject: [PATCH 043/121] added method to apply a voxel map --- .../volume_annotation_sampling.js | 29 ++++++- .../volume_annotation_sampling.spec.js | 75 ++++++++++++++----- 2 files changed, 86 insertions(+), 18 deletions(-) diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 7f830f1b292..5ddef22226b 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -1,6 +1,6 @@ // @flow -import constants, { type Vector3, type LabeledVoxelsMap } from "oxalis/constants"; +import constants, { type Vector2, type Vector3, type LabeledVoxelsMap } from "oxalis/constants"; import { map3 } from "libs/utils"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; import messages from "messages"; @@ -256,3 +256,30 @@ export default function sampleVoxelMapToResolution( return labeledVoxelMap; } } + +export function applyVoxelMap( + labeledVoxelMap: LabeledVoxelsMap, + dataCube: DataCube, + cellId: number, + get3DAddress: Vector2 => Vector3, +) { + for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { + const bucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); + if (bucket.type === "null") { + continue; + } + bucket.markAndAddBucketForUndo(); + const zoomedStep = bucket.zoomedAddress[3]; + const data = bucket.getOrCreateData(); + for (let firstDim = 0; firstDim < constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < constants.BUCKET_WIDTH; secondDim++) { + if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { + const voxelToLabel = get3DAddress([firstDim, secondDim]); + const voxelAddress = dataCube.getVoxelIndex(voxelToLabel, zoomedStep); + data[voxelAddress] = cellId; + } + } + } + bucket.trigger("bucketLabeled"); + } +} diff --git a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js index 32a92873ad7..d2136f8e4b3 100644 --- a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js +++ b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js @@ -5,8 +5,10 @@ import _ from "lodash"; import { tracing as skeletontracingServerObject } from "test/fixtures/skeletontracing_server_objects"; -import sampleVoxelMapToResolution from "oxalis/model/volumetracing/volume_annotation_sampling"; -import Constants, { type Vector4 } from "oxalis/constants"; +import sampleVoxelMapToResolution, { + applyVoxelMap, +} from "oxalis/model/volumetracing/volume_annotation_sampling"; +import Constants, { type Vector2, type Vector4 } from "oxalis/constants"; import anyTest, { type TestInterface } from "ava"; import datasetServerObject from "test/fixtures/dataset_server_object"; import mockRequire from "mock-require"; @@ -104,8 +106,8 @@ test("Upsampling an annotation should work in the top left part of a bucket", t [0, 0, 0, 0], "The bucket of the upsampled map should be correct.", ); - for (let firstDim = 0; firstDim < 32; firstDim++) { - for (let secondDim = 0; secondDim < 32; secondDim++) { + for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) { t.is( getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), @@ -147,8 +149,8 @@ test("Upsampling an annotation should work in the top right part of a bucket", t [1, 0, 0, 0], "The bucket of the upsampled map should be correct.", ); - for (let firstDim = 0; firstDim < 32; firstDim++) { - for (let secondDim = 0; secondDim < 32; secondDim++) { + for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) { t.is( getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), @@ -190,8 +192,8 @@ test("Upsampling an annotation should work in the bottom left part of a bucket", [0, 1, 0, 0], "The bucket of the upsampled map should be correct.", ); - for (let firstDim = 0; firstDim < 32; firstDim++) { - for (let secondDim = 0; secondDim < 32; secondDim++) { + for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) { t.is( getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), @@ -233,8 +235,8 @@ test("Upsampling an annotation should work in the bottom right part of a bucket" [1, 1, 0, 0], "The bucket of the upsampled map should be correct.", ); - for (let firstDim = 0; firstDim < 32; firstDim++) { - for (let secondDim = 0; secondDim < 32; secondDim++) { + for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) { t.is( getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), @@ -251,7 +253,7 @@ test("Upsampling an annotation should work across more than one resolution", t = labelVoxelInVoxelMap(firstDim, secondDim, sourceVoxelMap), ); const goalVoxelMap = getEmptyVoxelMap(); - // scaling [10,10],[11,11] up: 10 -> 20 -> 40 (mod 32) -> 8; 11 -> 23 -> 47 (mod 32) -> 15; + // scaling [10,10],[11,11] up: 10 -> 20 -> 40 (mod Constants.BUCKET_WIDTH) -> 8; 11 -> 23 -> 47 (mod Constants.BUCKET_WIDTH) -> 15; for (let firstDim = 8; firstDim <= 15; firstDim++) { for (let secondDim = 8; secondDim <= 15; secondDim++) { labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap); @@ -276,8 +278,8 @@ test("Upsampling an annotation should work across more than one resolution", t = [1, 1, 0, 0], "The bucket of the upsampled map should be correct.", ); - for (let firstDim = 0; firstDim < 32; firstDim++) { - for (let secondDim = 0; secondDim < 32; secondDim++) { + for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) { t.is( getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), @@ -324,8 +326,8 @@ test("Downsampling annotation of neighbour buckets should result in one downsamp [0, 0, 0, 1], "The bucket of the downsampled map should be correct.", ); - for (let firstDim = 0; firstDim < 32; firstDim++) { - for (let secondDim = 0; secondDim < 32; secondDim++) { + for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) { t.is( getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), @@ -376,8 +378,8 @@ test("Downsampling annotation should work across more than one resolution", t => [0, 0, 0, 2], "The bucket of the downsampled map should be correct.", ); - for (let firstDim = 0; firstDim < 32; firstDim++) { - for (let secondDim = 0; secondDim < 32; secondDim++) { + for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) { t.is( getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), @@ -390,3 +392,42 @@ test("Downsampling annotation should work across more than one resolution", t => } } }); + +test("A labeledVoxelMap should be applied correctly", t => { + const { cube } = t.context; + const bucket = cube.getOrCreateBucket([0, 0, 0, 0]); + const labeledVoxelsMap = new Map(); + const voxelMap = getEmptyVoxelMap(); + const voxelsToLabel = [ + [10, 10], + [10, 11], + [10, 12], + [10, 13], + [11, 10], + [11, 11], + [11, 12], + [11, 13], + ]; + voxelsToLabel.forEach(([firstDim, secondDim]) => + labelVoxelInVoxelMap(firstDim, secondDim, voxelMap), + ); + labeledVoxelsMap.set(bucket.zoomedAddress, voxelMap); + const get3DAddress = (voxel2D: Vector2) => [...voxel2D, 5]; + const expectedBucketData = new Uint32Array(Constants.BUCKET_SIZE).fill(0); + voxelsToLabel.forEach(([firstDim, secondDim]) => { + const addr = cube.getVoxelIndex([firstDim, secondDim, 5], 0); + expectedBucketData[addr] = 1; + }); + applyVoxelMap(labeledVoxelsMap, cube, 1, get3DAddress); + const labeledBucketData = bucket.getOrCreateData(); + for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) { + const addr = cube.getVoxelIndex([firstDim, secondDim, 5], 0); + t.is( + labeledBucketData[addr], + expectedBucketData[addr], + `Did not apply voxel map at ${[firstDim, secondDim, 5, 1].toString()} correctly.`, + ); + } + } +}); From 0f009a69ae0aa1e4d61cb3fa15e9b8cb72e0a00c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 14 Sep 2020 18:21:14 +0200 Subject: [PATCH 044/121] fixed upsampling and added regression test --- .../model/bucket_data_handling/data_cube.js | 169 +----------------- .../oxalis/model/sagas/volumetracing_saga.js | 66 +++++-- .../volume_annotation_sampling.js | 20 ++- .../volume_annotation_sampling.spec.js | 53 +++++- 4 files changed, 124 insertions(+), 184 deletions(-) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 31056b8d27a..b9d0fc664ec 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -31,6 +31,7 @@ import constants, { type Vector3, type Vector4, type BoundingBoxType, + type LabeledVoxelsMap, } from "oxalis/constants"; import { type ElementClass } from "admin/api_flow_types"; import { areBoundingBoxesOverlappingOrTouching, map3, iterateThroughBounds } from "libs/utils"; @@ -372,12 +373,6 @@ class DataCube { if (shouldUpdateVoxel) { const labelFunc = (data: BucketDataArray): void => { - if (address[3] === 1) - console.log( - `labeled in bucket ${bucket.zoomedAddress.toString()}, voxel ${voxel.toString()}, voxelIndex ${voxelIndex}, with modulo ${voxel - .map(a => Math.floor(a / 2) % 32) - .toString()}`, - ); data[voxelIndex] = label; }; bucket.label(labelFunc); @@ -399,7 +394,7 @@ class DataCube { get2DAddress: Vector3 => Vector2, dimensionIndices: DimensionMap, viewportBoundings: BoundingBoxType, - zoomStep: number = 0, + zoomStep: number, ): ?LabeledVoxelsMap { // This flood-fill algorithm works in two nested levels and uses a list of buckets to flood fill. // On the inner level a bucket is flood-filled and if the iteration of the buckets data @@ -447,7 +442,7 @@ class DataCube { bucketData[initialVoxelIndex] = cellId; // Create an array saving the labeled voxel of the current slice for the current bucket, if there isn't already one. const currentLabeledVoxelMap = - bucketsWithLabeledVoxelsMap.get(currentBucket) || + bucketsWithLabeledVoxelsMap.get(currentBucket.zoomedAddress) || new Uint8Array(constants.BUCKET_WIDTH ** 2).fill(0); const markVoxelOfSliceAsLabeled = ([firstCoord, secondCoord]) => { currentLabeledVoxelMap[firstCoord * constants.BUCKET_WIDTH + secondCoord] = 1; @@ -484,171 +479,23 @@ class DataCube { const neighbourVoxelIndex = this.getVoxelIndexByVoxelOffset(neighbourVoxel3D); if (bucketData[neighbourVoxelIndex] === sourceCellId) { bucketData[neighbourVoxelIndex] = cellId; - console.log( - `labeled in bucket ${currentBucket.zoomedAddress.toString()}, voxel ${neighbourVoxel3D.toString()}, voxelIndex ${neighbourVoxelIndex}`, - ); markVoxelOfSliceAsLabeled(neighbourVoxel); neighbourVoxelStack.pushVoxel(neighbourVoxel); } } } } - bucketsWithLabeledVoxelsMap.set(currentBucket, currentLabeledVoxelMap); - } - for (const bucket of bucketsWithLabeledVoxelsMap.keys()) { - this.pushQueue.insert(bucket); - bucket.trigger("bucketLabeled"); + bucketsWithLabeledVoxelsMap.set(currentBucket.zoomedAddress, currentLabeledVoxelMap); } - return bucketsWithLabeledVoxelsMap; - } - - applyLabeledVoxelMapToResolution( - labeledVoxelMap: LabeledVoxelsMap, - sourceResolution: Vector3, - sourceZoomStep: number, - goalResolution: Vector3, - goalZoomStep: number, - cellId: number, - thirdDimension: number, - get3DAddress: Vector2 => Vector3, - ) { - const labeledBuckets = new Set(); - const isDownsampling = goalZoomStep > sourceZoomStep; - const scaleToSource = map3((val, index) => val / sourceResolution[index], goalResolution); - const scaleToGoal = map3((val, index) => val / goalResolution[index], sourceResolution); - const voxelsToLabelInEachDirection = map3(scaleValue => Math.ceil(scaleValue), scaleToGoal); - const voxelToGoalResolution = voxelInBucket => - map3((value, index) => Math.floor(value * scaleToGoal[index]), voxelInBucket); - for (const [labeledBucket, voxelMap] of labeledVoxelMap) { - const bucketsOfGoalResolution = this.getBucketsContainingBucket( - labeledBucket, - sourceResolution, - goalResolution, - goalZoomStep, - ); - if (!bucketsOfGoalResolution) { + for (const bucketZoomedAddress of bucketsWithLabeledVoxelsMap.keys()) { + const bucket = this.getBucket(bucketZoomedAddress); + if (bucket.type === "null") { continue; } - const labelVoxelInGoalResolution = (x, y, z) => { - let bucket = bucketsOfGoalResolution[0][0][0]; - if (isDownsampling) { - // If the annotation given by the voxelMap will be downsampled, the labeledBucket can only be within one bucket in the lower resolution. - // It is possible that the labeledBucket is does not have the same global origin as the bucket of the lower resolution. Thus an additional offset is needed. - const offset = [0, 0, 0]; - for (let index = 0; index < 3; index++) { - // Scaling the zoomed address of the bucket up to the source resolution and calculate the offset. - const upscaledZoomAddressPart = bucket.zoomedAddress[index] * scaleToSource[index]; - offset[index] = labeledBucket.zoomedAddress[index] - upscaledZoomAddressPart; - offset[index] = Math.round(offset[index] * constants.BUCKET_WIDTH * scaleToGoal[index]); - } - x += offset[0]; - y += offset[1]; - z += offset[2]; - } else { - // If this method upsamples the labeled voxels, the voxel can be within one out of many buckets. - // As the x, y, z values are already scaled up, the bucket the belong to is calculated and x, y, z get shrinked to be within that bucket. - const xBucket = Math.floor(x / constants.BUCKET_WIDTH); - const yBucket = Math.floor(y / constants.BUCKET_WIDTH); - const zBucket = Math.floor(z / constants.BUCKET_WIDTH); - bucket = bucketsOfGoalResolution[xBucket][yBucket][zBucket]; - x %= constants.BUCKET_WIDTH; - y %= constants.BUCKET_WIDTH; - z %= constants.BUCKET_WIDTH; - } - bucket.markAndAddBucketForUndo(); - const voxelIndex = this.getVoxelIndexByVoxelOffset([x, y, z]); - const bucketData = bucket.getOrCreateData(); - bucketData[voxelIndex] = cellId; - labeledBuckets.add(bucket); - console.log( - `labeled in bucket ${bucket.zoomedAddress.toString()}, voxel ${[ - x, - y, - z, - ].toString()}, voxelIndex ${voxelIndex}`, - ); - }; - for (let firstDim = 0; firstDim < constants.BUCKET_WIDTH; firstDim++) { - for (let secondDim = 0; secondDim < constants.BUCKET_WIDTH; secondDim++) { - if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { - const voxelInBucket = get3DAddress([firstDim, secondDim]); - // As the iteration is only over the first two dimensions the third dimension is not within the labeledBucket. - // Here we adjust the third dimension to be with the source labeledBucket. - voxelInBucket[thirdDimension] %= constants.BUCKET_WIDTH; - const voxelInGoalResolution = voxelToGoalResolution(voxelInBucket); - const maxVoxelBoundingsInGoalResolution = [ - voxelInGoalResolution[0] + voxelsToLabelInEachDirection[0], - voxelInGoalResolution[1] + voxelsToLabelInEachDirection[1], - voxelInGoalResolution[2] + voxelsToLabelInEachDirection[2], - ]; - iterateThroughBounds( - voxelInGoalResolution, - maxVoxelBoundingsInGoalResolution, - labelVoxelInGoalResolution, - ); - } - } - } - } - for (const bucket of labeledBuckets.keys()) { - console.log(`labeled in bucket ${bucket.zoomedAddress.toString()}`); this.pushQueue.insert(bucket); bucket.trigger("bucketLabeled"); } - } - - getBucketsContainingBucket( - bucket: DataBucket, - bucketResolution: Vector3, - goalResolution: Vector3, - zoomStep: number, - ): ?Array>> { - const mapToGoalResolution = (value, index) => - Math.floor(value * (bucketResolution[index] / goalResolution[index])); - const bucketMin = [bucket.zoomedAddress[0], bucket.zoomedAddress[1], bucket.zoomedAddress[2]]; - const bucketMax = [bucketMin[0] + 1, bucketMin[1] + 1, bucketMin[2] + 1]; - // If the buckets zoomStep is smaller than the wanted zoom step, - // then the bucket is completely contained by a bucket of the higher goalResolution. - const bucketMinInOtherResolution = map3(mapToGoalResolution, bucketMin); - const bucketMaxInOtherResolution = map3(mapToGoalResolution, bucketMax); - const bucketsInGoalResolution = []; - // Iteration over all three dimensions until all buckets of the goal resolution - // that overlap with the given bucket are added to bucketsInGoalResolution. - // Note: The bucketsInGoalResolution.length === 0 check ensures that the bucket containing the given bucket - // will be added to the array when the goalResolution is lower than the buckets resolution. - for ( - let x = bucketMinInOtherResolution[0]; - x < bucketMaxInOtherResolution[0] || bucketsInGoalResolution.length === 0; - x++ - ) { - const bucketsInYDirection = []; - for ( - let y = bucketMinInOtherResolution[1]; - y < bucketMaxInOtherResolution[1] || bucketsInYDirection.length === 0; - y++ - ) { - const bucketsInZDirection = []; - for ( - let z = bucketMinInOtherResolution[2]; - z < bucketMaxInOtherResolution[2] || bucketsInZDirection.length === 0; - z++ - ) { - const bucketsZoomedAddress = [x, y, z, zoomStep]; - const currentBucketInGoalResolution = this.getOrCreateBucket(bucketsZoomedAddress); - if (currentBucketInGoalResolution.type === "null") { - console.warn( - `The bucket at ${bucket.zoomedAddress.toString()} has not matching bucket` + - ` in resolution ${goalResolution.toString()}. The buckets address is ${bucketsZoomedAddress.toString()}`, - ); - return null; - } - bucketsInZDirection.push(currentBucketInGoalResolution); - } - bucketsInYDirection.push(bucketsInZDirection); - } - bucketsInGoalResolution.push(bucketsInYDirection); - } - return bucketsInGoalResolution; + return bucketsWithLabeledVoxelsMap; } setBucketData(zoomedAddress: Vector4, data: Uint8Array) { diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 55c9f7f6a61..4311f5996f9 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -20,6 +20,9 @@ import { select, take, } from "oxalis/model/sagas/effect-generators"; +import sampleVoxelMapToResolution, { + applyVoxelMap, +} from "oxalis/model/volumetracing/volume_annotation_sampling"; import { type UpdateAction, updateVolumeTracing, @@ -49,6 +52,7 @@ import Constants, { type Vector2, type Vector3, VolumeToolEnum, + type LabeledVoxelsMap, } from "oxalis/constants"; import Dimensions from "oxalis/model/dimensions"; import Model from "oxalis/model"; @@ -159,12 +163,11 @@ export function* editVolumeLayerAsync(): Generator { activeViewport, numberOfSlices, ); - const currentResolution = yield* select(state => getCurrentResolution(state)); yield* call( labelWithIterator, currentLayer.getCircleVoxelIterator( addToLayerAction.position, - currentResolution, + activeResolution, currentViewportBounding, ), contourTracingMode, @@ -300,9 +303,9 @@ export function* floodFill(): Saga { const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); const allResolutions = yield* select(state => getResolutions(state.dataset)); const activeResolution = allResolutions[activeZoomStep]; - // The flood fill and the applyLabeledVoxelMapToResolution method of the cube iterates within the bucket. + // The floodfill and applyVoxelMap methods of iterates within the bucket. // Thus thirdDimensionValue must also be within the initial bucket in the correct resolution. - const thirdDimensionValue = + let thirdDimensionValue = Math.floor(seedVoxel[dimensionIndices[2]] / activeResolution[dimensionIndices[2]]) % Constants.BUCKET_WIDTH; const get3DAddress = (voxel: Vector2) => { @@ -319,7 +322,7 @@ export function* floodFill(): Saga { voxel[dimensionIndices[1]], ]; const currentViewportBounding = yield* call(getBoundingsFromPosition, planeId, 1); - const bucketsWithLabeledVoxelMap = cube.floodFill( + const labeledVoxelMapFromFloodFill = cube.floodFill( seedVoxel, activeCellId, get3DAddress, @@ -328,24 +331,53 @@ export function* floodFill(): Saga { currentViewportBounding, activeZoomStep, ); - if (!bucketsWithLabeledVoxelMap) { + if (labeledVoxelMapFromFloodFill == null) { continue; } - for (let zoomStep = 0; zoomStep < allResolutions.length; zoomStep++) { - if (zoomStep === activeZoomStep) { - continue; - } + let currentLabeledVoxelMapFromFloodFill: LabeledVoxelsMap = labeledVoxelMapFromFloodFill; + // debugger; + // First upscale the voxel map and apply it to all higher resolutions. + for (let zoomStep = activeZoomStep - 1; zoomStep >= 0; zoomStep--) { + const goalResolution = allResolutions[zoomStep]; + const sourceResolution = allResolutions[zoomStep + 1]; + currentLabeledVoxelMapFromFloodFill = sampleVoxelMapToResolution( + currentLabeledVoxelMapFromFloodFill, + cube, + sourceResolution, + zoomStep + 1, + goalResolution, + zoomStep, + dimensionIndices, + seedVoxel[dimensionIndices[2]], + ); + // Adjust thirdDimensionValue so get3DAddress returns the third dimension value + // in the goal resolution to apply the voxelMap correctly. + thirdDimensionValue = + Math.floor(seedVoxel[dimensionIndices[2]] / goalResolution[dimensionIndices[2]]) % + Constants.BUCKET_WIDTH; + applyVoxelMap(currentLabeledVoxelMapFromFloodFill, cube, activeCellId, get3DAddress); + } + currentLabeledVoxelMapFromFloodFill = labeledVoxelMapFromFloodFill; + // Next we downscale the annotation and apply it. + for (let zoomStep = activeZoomStep + 1; zoomStep < allResolutions.length; zoomStep++) { const goalResolution = allResolutions[zoomStep]; - cube.applyLabeledVoxelMapToResolution( - bucketsWithLabeledVoxelMap, - activeResolution, - activeZoomStep, + const sourceResolution = allResolutions[zoomStep - 1]; + currentLabeledVoxelMapFromFloodFill = sampleVoxelMapToResolution( + currentLabeledVoxelMapFromFloodFill, + cube, + sourceResolution, + zoomStep - 1, goalResolution, zoomStep, - activeCellId, - dimensionIndices[2], - get3DAddress, + dimensionIndices, + seedVoxel[dimensionIndices[2]], ); + // Adjust thirdDimensionValue so get3DAddress returns the third dimension value + // in the goal resolution to apply the voxelMap correctly. + thirdDimensionValue = + Math.floor(seedVoxel[dimensionIndices[2]] / goalResolution[dimensionIndices[2]]) % + Constants.BUCKET_WIDTH; + applyVoxelMap(currentLabeledVoxelMapFromFloodFill, cube, activeCellId, get3DAddress); } yield* put(finishAnnotationStrokeAction()); cube.triggerPushQueue(); diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 5ddef22226b..6d807ae9752 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -14,6 +14,7 @@ function upsampleVoxelMap( goalResolution: Vector3, goalZoomStep: number, dimensionIndices: DimensionMap, + thirdDimensionVoxelValue: number, ): LabeledVoxelsMap { // TODO: Add comment if (sourceZoomStep <= goalZoomStep) { @@ -23,12 +24,16 @@ function upsampleVoxelMap( const scaleToSource = map3((val, index) => val / sourceResolution[index], goalResolution); // This array serves multiple purposes. It has a name / variable for each purpose. const scaleToGoal = map3((val, index) => val / goalResolution[index], sourceResolution); - const numberOfBucketWithSourceBucket = scaleToGoal; + const numberOfBucketWithinSourceBucket = scaleToGoal; const singleVoxelBoundsInGoalResolution = scaleToGoal; const boundsOfGoalBucketWithinSourceBucket = map3( value => Math.ceil(value * constants.BUCKET_WIDTH), scaleToSource, ); + // This is the buckets zoomed address part of the third dimension. + const thirdDimensionBucketValue = Math.floor( + thirdDimensionVoxelValue / goalResolution[dimensionIndices[2]] / constants.BUCKET_WIDTH, + ); for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { const labeledBucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); if (labeledBucket.type === "null") { @@ -39,14 +44,15 @@ function upsampleVoxelMap( (value, index) => Math.floor(value * scaleToGoal[index]), labeledBucket.getAddress(), ); + goalBaseBucketAddress[dimensionIndices[2]] = thirdDimensionBucketValue; for ( let firstDimBucketOffset = 0; - firstDimBucketOffset < numberOfBucketWithSourceBucket[dimensionIndices[0]]; + firstDimBucketOffset < numberOfBucketWithinSourceBucket[dimensionIndices[0]]; firstDimBucketOffset++ ) { for ( let secondDimBucketOffset = 0; - secondDimBucketOffset < numberOfBucketWithSourceBucket[dimensionIndices[1]]; + secondDimBucketOffset < numberOfBucketWithinSourceBucket[dimensionIndices[1]]; secondDimBucketOffset++ ) { const currentGoalBucketAddress = [...goalBaseBucketAddress]; @@ -231,6 +237,7 @@ export default function sampleVoxelMapToResolution( goalResolution: Vector3, goalZoomStep: number, dimensionIndices: DimensionMap, + thirdDimensionVoxelValue: number, ): LabeledVoxelsMap { if (sourceZoomStep < goalZoomStep) { return downsampleVoxelMap( @@ -251,6 +258,7 @@ export default function sampleVoxelMapToResolution( goalResolution, goalZoomStep, dimensionIndices, + thirdDimensionVoxelValue, ); } else { return labeledVoxelMap; @@ -275,11 +283,13 @@ export function applyVoxelMap( for (let secondDim = 0; secondDim < constants.BUCKET_WIDTH; secondDim++) { if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { const voxelToLabel = get3DAddress([firstDim, secondDim]); - const voxelAddress = dataCube.getVoxelIndex(voxelToLabel, zoomedStep); - data[voxelAddress] = cellId; + // The voxelToLabel is already within the bucket and in the correct resolution. + const voxelAddress = dataCube.getVoxelIndexByVoxelOffset(voxelToLabel); + if (zoomedStep === 0) data[voxelAddress] = cellId; } } } + dataCube.pushQueue.insert(bucket); bucket.trigger("bucketLabeled"); } } diff --git a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js index d2136f8e4b3..59d7ef714e4 100644 --- a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js +++ b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js @@ -46,7 +46,7 @@ test.beforeEach(t => { const mockedLayer = { resolutions: [[1, 1, 1], [2, 2, 2], [4, 4, 4], [8, 8, 8], [16, 16, 16], [32, 32, 32]], }; - const cube = new Cube([100, 100, 100], 3, "uint32", mockedLayer); + const cube = new Cube([1024, 1024, 1024], 3, "uint32", mockedLayer); const pullQueue = { add: sinon.stub(), pull: sinon.stub(), @@ -97,6 +97,7 @@ test("Upsampling an annotation should work in the top left part of a bucket", t [1, 1, 1], 0, [0, 1, 2], + 5, ); const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; @@ -140,6 +141,7 @@ test("Upsampling an annotation should work in the top right part of a bucket", t [1, 1, 1], 0, [0, 1, 2], + 5, ); const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; @@ -183,6 +185,7 @@ test("Upsampling an annotation should work in the bottom left part of a bucket", [1, 1, 1], 0, [0, 1, 2], + 5, ); const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; @@ -226,6 +229,7 @@ test("Upsampling an annotation should work in the bottom right part of a bucket" [1, 1, 1], 0, [0, 1, 2], + 5, ); const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; @@ -246,6 +250,50 @@ test("Upsampling an annotation should work in the bottom right part of a bucket" } }); +test("Upsampling an annotation where the annotation slice is in the lower part of the bucket should upsample to the correct bucket", t => { + const { cube } = t.context; + const sourceVoxelMap = getEmptyVoxelMap(); + [[5, 5], [5, 6], [6, 5], [6, 6]].forEach(([firstDim, secondDim]) => + labelVoxelInVoxelMap(firstDim, secondDim, sourceVoxelMap), + ); + const goalVoxelMap = getEmptyVoxelMap(); + [[10, 10], [10, 11], [10, 12], [10, 13], [11, 10], [11, 11], [11, 12], [11, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap), + ); + [[12, 10], [12, 11], [12, 12], [12, 13], [13, 10], [13, 11], [13, 12], [13, 13]].forEach( + ([firstDim, secondDim]) => labelVoxelInVoxelMap(firstDim, secondDim, goalVoxelMap), + ); + const bucket = cube.getOrCreateBucket([0, 0, 0, 1]); + const labeledVoxelsMap = new Map([[bucket.zoomedAddress, sourceVoxelMap]]); + const upsampledVoxelMapPerBucket = sampleVoxelMapToResolution( + labeledVoxelsMap, + cube, + [2, 2, 2], + 1, + [1, 1, 1], + 0, + [0, 1, 2], + 40, + ); + const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); + const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; + const upsampledVoxelMap = upsampledVoxelMapAsArray[0][1]; + t.deepEqual( + bucketZoomedAddress, + [0, 0, 1, 0], + "The bucket of the upsampled map should be correct.", + ); + for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) { + t.is( + getVoxelMapEntry(firstDim, secondDim, upsampledVoxelMap), + getVoxelMapEntry(firstDim, secondDim, goalVoxelMap), + "The labeled voxels of the upsampled voxel map should match the expected labels", + ); + } + } +}); + test("Upsampling an annotation should work across more than one resolution", t => { const { cube } = t.context; const sourceVoxelMap = getEmptyVoxelMap(); @@ -269,6 +317,7 @@ test("Upsampling an annotation should work across more than one resolution", t = [1, 1, 1], 0, [0, 1, 2], + 5, ); const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; @@ -317,6 +366,7 @@ test("Downsampling annotation of neighbour buckets should result in one downsamp [2, 2, 2], 1, [0, 1, 2], + 5, ); const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; @@ -369,6 +419,7 @@ test("Downsampling annotation should work across more than one resolution", t => [4, 4, 4], 2, [0, 1, 2], + 5, ); const upsampledVoxelMapAsArray: LabeledVoxelsMapAsArray = Array.from(upsampledVoxelMapPerBucket); const bucketZoomedAddress = upsampledVoxelMapAsArray[0][0]; From e58fafb7f4ef5b75cb37acd6c68bb2026a50d95a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 14 Sep 2020 18:43:12 +0200 Subject: [PATCH 045/121] add comments to sampling methods --- .../model/volumetracing/volume_annotation_sampling.js | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 6d807ae9752..f13b15df695 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -16,7 +16,10 @@ function upsampleVoxelMap( dimensionIndices: DimensionMap, thirdDimensionVoxelValue: number, ): LabeledVoxelsMap { - // TODO: Add comment + // This method upsamples a given LabeledVoxelsMap. For each bucket in the LabeledVoxelsMap this function + // iterating over the buckets in the higher resolution that are covered by the bucket. + // For each covered bucket all labeled voxel entries are upsampled with a kernel an marked in an array for the covered bucket. + // Therefore all covered buckets with their marked array build the upsampled version of the given LabeledVoxelsMap. if (sourceZoomStep <= goalZoomStep) { throw new Error("Trying to upsample a LabeledVoxelMap with the down sample function."); } @@ -100,7 +103,7 @@ function upsampleVoxelMap( kernelLeft * singleVoxelBoundsInGoalResolution[dimensionIndices[0]], kernelTop * singleVoxelBoundsInGoalResolution[dimensionIndices[1]], ]; - // The labeled voxel is upscaled. + // The labeled voxel is upscaled using a kernel. for ( let firstKernelOffset = 0; firstKernelOffset < singleVoxelBoundsInGoalResolution[dimensionIndices[0]]; @@ -141,6 +144,10 @@ function downsampleVoxelMap( goalZoomStep: number, dimensionIndices: DimensionMap, ): LabeledVoxelsMap { + // This method downsamples a LabeledVoxelsMap. For each bucket of the LabeledVoxelsMap + // the matching bucket the lower resolution is determined and all the labeledVoxels + // are downsampled to the lower resolution bucket. The downsampling uses a kernel to skip + // checking whether to label a downsampled voxels if already one labeled voxel matching the downsampled voxel is found. if (goalZoomStep <= sourceZoomStep) { throw new Error("Trying to upsample a LabeledVoxelMap with the down sample function."); } From 21d2428b5274858b97e9dfad90962ca0ef21379c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 14 Sep 2020 21:42:40 +0200 Subject: [PATCH 046/121] make copylayer work in multi resolutions --- .../oxalis/model/sagas/volumetracing_saga.js | 163 ++++++++++++------ 1 file changed, 113 insertions(+), 50 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 4311f5996f9..4506e0ea1bf 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -42,6 +42,7 @@ import { getCurrentResolution, getRequestLogZoomStep, } from "oxalis/model/accessors/flycam_accessor"; +import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; import { getResolutions } from "oxalis/model/accessors/dataset_accessor"; import Constants, { type BoundingBoxType, @@ -54,7 +55,7 @@ import Constants, { VolumeToolEnum, type LabeledVoxelsMap, } from "oxalis/constants"; -import Dimensions from "oxalis/model/dimensions"; +import Dimensions, { type DimensionMap } from "oxalis/model/dimensions"; import Model from "oxalis/model"; import Toast from "libs/toast"; import VolumeLayer from "oxalis/model/volumetracing/volumelayer"; @@ -239,6 +240,10 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga } const segmentationLayer = yield* call([Model, Model.getSegmentationLayer]); + const { cube } = segmentationLayer; + const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); + const allResolutions = yield* select(state => getResolutions(state.dataset)); + const dimensionIndices = Dimensions.getIndices(activeViewport); const position = Dimensions.roundCoordinate(yield* select(state => getPosition(state.flycam))); const [halfViewportExtentX, halfViewportExtentY] = yield* call( getHalfViewportExtents, @@ -246,6 +251,7 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga ); const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); + const labeledVoxelMapOfCopiedVoxel: LabeledVoxelsMap = new Map(); function copyVoxelLabel(voxelTemplateAddress, voxelTargetAddress) { const templateLabelValue = segmentationLayer.cube.getDataValue(voxelTemplateAddress); @@ -256,7 +262,26 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga // Do not overwrite already labelled voxels if (currentLabelValue === 0) { - api.data.labelVoxels([voxelTargetAddress], templateLabelValue); + console.log( + `labeling at ${voxelTargetAddress.toString()} with ${templateLabelValue} in zoomStep ${activeZoomStep}`, + ); + api.data.labelVoxels([voxelTargetAddress], templateLabelValue, activeZoomStep); + const bucket = cube.getBucket( + cube.positionToZoomedAddress(voxelTargetAddress, activeZoomStep), + ); + if (bucket.type === "null") { + return; + } + const labeledVoxelInBucket = cube.getVoxelOffset(voxelTargetAddress, activeZoomStep); + const labelMapOfBucket = + labeledVoxelMapOfCopiedVoxel.get(bucket.zoomedAddress) || + new Uint8Array(Constants.BUCKET_WIDTH ** 2).fill(0); + const labeledVoxel2D = [ + labeledVoxelInBucket[dimensionIndices[0]], + labeledVoxelInBucket[dimensionIndices[1]], + ]; + labelMapOfBucket[labeledVoxel2D[0] * Constants.BUCKET_WIDTH + labeledVoxel2D[1]] = 1; + labeledVoxelMapOfCopiedVoxel.set(bucket.zoomedAddress, labelMapOfBucket); } } } @@ -268,8 +293,7 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga ); if (useDynamicSpaceDirection) { const spaceDirectionOrtho = yield* select(state => state.flycam.spaceDirectionOrtho); - const dim = Dimensions.getIndices(activeViewport)[2]; - direction = spaceDirectionOrtho[dim]; + direction = spaceDirectionOrtho[dimensionIndices[2]]; } const [tx, ty, tz] = Dimensions.transDim(position, activeViewport); @@ -282,6 +306,15 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga ); } } + applyLabeledVoxelMapToAllMissingResolutions( + labeledVoxelMapOfCopiedVoxel, + activeZoomStep, + dimensionIndices, + allResolutions, + cube, + activeCellId, + z, + ); yield* put(finishAnnotationStrokeAction()); } @@ -305,7 +338,7 @@ export function* floodFill(): Saga { const activeResolution = allResolutions[activeZoomStep]; // The floodfill and applyVoxelMap methods of iterates within the bucket. // Thus thirdDimensionValue must also be within the initial bucket in the correct resolution. - let thirdDimensionValue = + const thirdDimensionValue = Math.floor(seedVoxel[dimensionIndices[2]] / activeResolution[dimensionIndices[2]]) % Constants.BUCKET_WIDTH; const get3DAddress = (voxel: Vector2) => { @@ -334,56 +367,86 @@ export function* floodFill(): Saga { if (labeledVoxelMapFromFloodFill == null) { continue; } - let currentLabeledVoxelMapFromFloodFill: LabeledVoxelsMap = labeledVoxelMapFromFloodFill; - // debugger; - // First upscale the voxel map and apply it to all higher resolutions. - for (let zoomStep = activeZoomStep - 1; zoomStep >= 0; zoomStep--) { - const goalResolution = allResolutions[zoomStep]; - const sourceResolution = allResolutions[zoomStep + 1]; - currentLabeledVoxelMapFromFloodFill = sampleVoxelMapToResolution( - currentLabeledVoxelMapFromFloodFill, - cube, - sourceResolution, - zoomStep + 1, - goalResolution, - zoomStep, - dimensionIndices, - seedVoxel[dimensionIndices[2]], - ); - // Adjust thirdDimensionValue so get3DAddress returns the third dimension value - // in the goal resolution to apply the voxelMap correctly. - thirdDimensionValue = - Math.floor(seedVoxel[dimensionIndices[2]] / goalResolution[dimensionIndices[2]]) % - Constants.BUCKET_WIDTH; - applyVoxelMap(currentLabeledVoxelMapFromFloodFill, cube, activeCellId, get3DAddress); - } - currentLabeledVoxelMapFromFloodFill = labeledVoxelMapFromFloodFill; - // Next we downscale the annotation and apply it. - for (let zoomStep = activeZoomStep + 1; zoomStep < allResolutions.length; zoomStep++) { - const goalResolution = allResolutions[zoomStep]; - const sourceResolution = allResolutions[zoomStep - 1]; - currentLabeledVoxelMapFromFloodFill = sampleVoxelMapToResolution( - currentLabeledVoxelMapFromFloodFill, - cube, - sourceResolution, - zoomStep - 1, - goalResolution, - zoomStep, - dimensionIndices, - seedVoxel[dimensionIndices[2]], - ); - // Adjust thirdDimensionValue so get3DAddress returns the third dimension value - // in the goal resolution to apply the voxelMap correctly. - thirdDimensionValue = - Math.floor(seedVoxel[dimensionIndices[2]] / goalResolution[dimensionIndices[2]]) % - Constants.BUCKET_WIDTH; - applyVoxelMap(currentLabeledVoxelMapFromFloodFill, cube, activeCellId, get3DAddress); - } + applyLabeledVoxelMapToAllMissingResolutions( + labeledVoxelMapFromFloodFill, + activeZoomStep, + dimensionIndices, + allResolutions, + cube, + activeCellId, + seedVoxel[dimensionIndices[2]], + ); yield* put(finishAnnotationStrokeAction()); cube.triggerPushQueue(); } } +function applyLabeledVoxelMapToAllMissingResolutions( + labeledVoxelMapToApply: LabeledVoxelsMap, + activeZoomStep: number, + dimensionIndices: DimensionMap, + allResolutions: Array, + segmentationCube: DataCube, + cellId: number, + thirdDimensionOfSlice: number, +): void { + let currentLabeledVoxelMap: LabeledVoxelsMap = labeledVoxelMapToApply; + let thirdDimensionValue = thirdDimensionOfSlice; + const get3DAddress = (voxel: Vector2) => { + const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], thirdDimensionValue]; + const orderedVoxelWithThirdDimension = [ + unorderedVoxelWithThirdDimension[dimensionIndices[0]], + unorderedVoxelWithThirdDimension[dimensionIndices[1]], + unorderedVoxelWithThirdDimension[dimensionIndices[2]], + ]; + return orderedVoxelWithThirdDimension; + }; + // debugger; + // First upscale the voxel map and apply it to all higher resolutions. + for (let zoomStep = activeZoomStep - 1; zoomStep >= 0; zoomStep--) { + const goalResolution = allResolutions[zoomStep]; + const sourceResolution = allResolutions[zoomStep + 1]; + currentLabeledVoxelMap = sampleVoxelMapToResolution( + currentLabeledVoxelMap, + segmentationCube, + sourceResolution, + zoomStep + 1, + goalResolution, + zoomStep, + dimensionIndices, + thirdDimensionOfSlice, + ); + // Adjust thirdDimensionValue so get3DAddress returns the third dimension value + // in the goal resolution to apply the voxelMap correctly. + thirdDimensionValue = + Math.floor(thirdDimensionOfSlice / goalResolution[dimensionIndices[2]]) % + Constants.BUCKET_WIDTH; + applyVoxelMap(currentLabeledVoxelMap, segmentationCube, cellId, get3DAddress); + } + currentLabeledVoxelMap = labeledVoxelMapToApply; + // Next we downscale the annotation and apply it. + for (let zoomStep = activeZoomStep + 1; zoomStep < allResolutions.length; zoomStep++) { + const goalResolution = allResolutions[zoomStep]; + const sourceResolution = allResolutions[zoomStep - 1]; + currentLabeledVoxelMap = sampleVoxelMapToResolution( + currentLabeledVoxelMap, + segmentationCube, + sourceResolution, + zoomStep - 1, + goalResolution, + zoomStep, + dimensionIndices, + thirdDimensionOfSlice, + ); + // Adjust thirdDimensionValue so get3DAddress returns the third dimension value + // in the goal resolution to apply the voxelMap correctly. + thirdDimensionValue = + Math.floor(thirdDimensionOfSlice / goalResolution[dimensionIndices[2]]) % + Constants.BUCKET_WIDTH; + applyVoxelMap(currentLabeledVoxelMap, segmentationCube, cellId, get3DAddress); + } +} + export function* finishLayer( layer: VolumeLayer, activeTool: VolumeTool, From 8ae88ba1a79b89ec7414a64b6c2c266cd269030e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 14 Sep 2020 21:59:35 +0200 Subject: [PATCH 047/121] while upsampling annotate mutliple slices --- .../oxalis/model/sagas/volumetracing_saga.js | 26 ++++++++++++- .../volume_annotation_sampling.js | 39 ++++++++++++++----- 2 files changed, 53 insertions(+), 12 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 4506e0ea1bf..001eb2c21e1 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -314,6 +314,7 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga cube, activeCellId, z, + false, ); yield* put(finishAnnotationStrokeAction()); } @@ -375,6 +376,7 @@ export function* floodFill(): Saga { cube, activeCellId, seedVoxel[dimensionIndices[2]], + true, ); yield* put(finishAnnotationStrokeAction()); cube.triggerPushQueue(); @@ -389,6 +391,7 @@ function applyLabeledVoxelMapToAllMissingResolutions( segmentationCube: DataCube, cellId: number, thirdDimensionOfSlice: number, + shouldOverwrite: boolean, ): void { let currentLabeledVoxelMap: LabeledVoxelsMap = labeledVoxelMapToApply; let thirdDimensionValue = thirdDimensionOfSlice; @@ -421,7 +424,18 @@ function applyLabeledVoxelMapToAllMissingResolutions( thirdDimensionValue = Math.floor(thirdDimensionOfSlice / goalResolution[dimensionIndices[2]]) % Constants.BUCKET_WIDTH; - applyVoxelMap(currentLabeledVoxelMap, segmentationCube, cellId, get3DAddress); + const numberOfSlices = Math.ceil( + allResolutions[activeZoomStep][dimensionIndices[2]] / goalResolution[dimensionIndices[2]], + ); + applyVoxelMap( + currentLabeledVoxelMap, + segmentationCube, + cellId, + get3DAddress, + numberOfSlices, + dimensionIndices[2], + shouldOverwrite, + ); } currentLabeledVoxelMap = labeledVoxelMapToApply; // Next we downscale the annotation and apply it. @@ -443,7 +457,15 @@ function applyLabeledVoxelMapToAllMissingResolutions( thirdDimensionValue = Math.floor(thirdDimensionOfSlice / goalResolution[dimensionIndices[2]]) % Constants.BUCKET_WIDTH; - applyVoxelMap(currentLabeledVoxelMap, segmentationCube, cellId, get3DAddress); + applyVoxelMap( + currentLabeledVoxelMap, + segmentationCube, + cellId, + get3DAddress, + 1, + dimensionIndices[2], + shouldOverwrite, + ); } } diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index f13b15df695..7106da50343 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -277,22 +277,41 @@ export function applyVoxelMap( dataCube: DataCube, cellId: number, get3DAddress: Vector2 => Vector3, + numberOfSlicesToApply: number, + thirdDimensionIndex: 0 | 1 | 2, + shouldOverwrite: boolean = true, ) { for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { - const bucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); + let bucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); if (bucket.type === "null") { continue; } bucket.markAndAddBucketForUndo(); - const zoomedStep = bucket.zoomedAddress[3]; - const data = bucket.getOrCreateData(); - for (let firstDim = 0; firstDim < constants.BUCKET_WIDTH; firstDim++) { - for (let secondDim = 0; secondDim < constants.BUCKET_WIDTH; secondDim++) { - if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { - const voxelToLabel = get3DAddress([firstDim, secondDim]); - // The voxelToLabel is already within the bucket and in the correct resolution. - const voxelAddress = dataCube.getVoxelIndexByVoxelOffset(voxelToLabel); - if (zoomedStep === 0) data[voxelAddress] = cellId; + let data = bucket.getOrCreateData(); + const thirdDimensionValueInBucket = get3DAddress([0, 0])[2]; + for (let sliceCount = 0; sliceCount < numberOfSlicesToApply; sliceCount++) { + if (thirdDimensionValueInBucket + sliceCount === constants.BUCKET_WIDTH) { + // The current slice is in the next bucket in the third direction. + const nextBucketZoomedAddress = [...labeledBucketZoomedAddress]; + nextBucketZoomedAddress[thirdDimensionIndex]++; + bucket = dataCube.getOrCreateBucket(nextBucketZoomedAddress); + if (bucket.type === "null") { + continue; + } + data = bucket.getOrCreateData(); + } + for (let firstDim = 0; firstDim < constants.BUCKET_WIDTH; firstDim++) { + for (let secondDim = 0; secondDim < constants.BUCKET_WIDTH; secondDim++) { + if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { + const voxelToLabel = get3DAddress([firstDim, secondDim]); + voxelToLabel[thirdDimensionIndex] = + (voxelToLabel[thirdDimensionIndex] + sliceCount) % constants.BUCKET_WIDTH; + // The voxelToLabel is already within the bucket and in the correct resolution. + const voxelAddress = dataCube.getVoxelIndexByVoxelOffset(voxelToLabel); + if (shouldOverwrite || (!shouldOverwrite && data[voxelAddress] === 0)) { + data[voxelAddress] = cellId; + } + } } } } From bf65a243de291aeb3970d363308814a4a222e37d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 15 Sep 2020 11:28:22 +0200 Subject: [PATCH 048/121] Add todo about refactoring labeling voxels --- .../oxalis/model/bucket_data_handling/data_cube.js | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index b9d0fc664ec..4a885859579 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -337,6 +337,11 @@ class DataCube { label: number, activeCellId?: ?number = null, ): void { + // TODO: use segmentation layer resolutions. + // TODO: Do not label voxel in higher resolutions multiple times. + // -> Instead of using a voxel iterator, create a LabeledVoxelsMap for the brush stroke / trace tool. + // If this LabeledVoxelsMap exists, the up and downsampling methods can easily be used + // to apply the annotation to all needed resolutions, without labeling voxels multiple times. const numberOfResolutions = getResolutions(Store.getState().dataset).length; for (let zoomStep = 0; zoomStep < numberOfResolutions; ++zoomStep) { while (iterator.hasNext) { From 4b5adae9c5a7225e8579a370758f5ab0c0643504 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 15 Sep 2020 14:11:52 +0200 Subject: [PATCH 049/121] adjusted voxel labeling method to label in all resolutions the segmentation layer has --- .../model/accessors/dataset_accessor.js | 23 ++++++++++++++ .../model/bucket_data_handling/data_cube.js | 31 ++++++++++++++----- 2 files changed, 46 insertions(+), 8 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index d1b81285675..9a6c1087c4c 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -27,6 +27,8 @@ import { aggregateBoundingBox } from "libs/utils"; import { formatExtentWithLength, formatNumberToLength } from "libs/format_utils"; import messages from "messages"; +export type ResolutionsMap = Map; + export function getMostExtensiveResolutions(dataset: APIDataset): Array { return _.chain(dataset.dataSource.dataLayers) .map(dataLayer => dataLayer.resolutions) @@ -73,6 +75,27 @@ export function getDataLayers(dataset: APIDataset): DataLayerType[] { return dataset.dataSource.dataLayers; } +function _getResolutionMapOfSegmentationLayer(dataset: APIDataset): ResolutionsMap { + const segmentationLayer = getSegmentationLayer(dataset); + if (!segmentationLayer) { + return new Map(); + } + const resolutionsObject = new Map(); + const colorLayerResolutions = getResolutions(dataset); + colorLayerResolutions.forEach((resolution, zoomStep) => { + if ( + segmentationLayer.resolutions.some(segmentationLayerResolution => + _.isEqual(resolution, segmentationLayerResolution), + ) + ) { + resolutionsObject.set(zoomStep, [...resolution]); + } + }); + return resolutionsObject; +} + +export const getResolutionMapOfSegmentationLayer = memoizeOne(_getResolutionMapOfSegmentationLayer); + export function getLayerByName(dataset: APIDataset, layerName: string): DataLayerType { const dataLayers = getDataLayers(dataset); const hasUniqueNames = _.uniqBy(dataLayers, "name").length === dataLayers.length; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 4a885859579..905759280d5 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -15,7 +15,10 @@ import { type BucketDataArray, } from "oxalis/model/bucket_data_handling/bucket"; import { type VoxelIterator, VoxelNeighborStack2D } from "oxalis/model/volumetracing/volumelayer"; -import { getResolutions } from "oxalis/model/accessors/dataset_accessor"; +import { + getResolutions, + getResolutionMapOfSegmentationLayer, +} from "oxalis/model/accessors/dataset_accessor"; import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; @@ -34,7 +37,7 @@ import constants, { type LabeledVoxelsMap, } from "oxalis/constants"; import { type ElementClass } from "admin/api_flow_types"; -import { areBoundingBoxesOverlappingOrTouching, map3, iterateThroughBounds } from "libs/utils"; +import { areBoundingBoxesOverlappingOrTouching } from "libs/utils"; class CubeEntry { data: Map; boundary: Vector3; @@ -107,10 +110,22 @@ class DataCube { ]; this.arbitraryCube = new ArbitraryCubeAdapter(this, _.clone(cubeBoundary)); - - const resolutions = getResolutions(Store.getState().dataset); - for (let i = 0; i < this.ZOOM_STEP_COUNT; i++) { - const resolution = resolutions[i]; + // TODO: for segmentation add special case + const { dataset } = Store.getState(); + const colorLayerResolutionsArray = getResolutions(dataset); + let segmentationLayer = new Map(); + if (isSegmentation) { + segmentationLayer = getResolutionMapOfSegmentationLayer(dataset); + } + let numberOfSkippedResolutions = 0; + for (let i = 0; i < this.ZOOM_STEP_COUNT + numberOfSkippedResolutions; i++) { + const resolution = isSegmentation ? segmentationLayer.get(i) : colorLayerResolutionsArray[i]; + if (resolution == null) { + // As segmentation layer might be sparse, not all resolutions might exist. + // Skipping zoomStep + numberOfSkippedResolutions++; + continue; + } const zoomedCubeBoundary = [ Math.ceil(cubeBoundary[0] / resolution[0]) + 1, Math.ceil(cubeBoundary[1] / resolution[1]) + 1, @@ -342,8 +357,8 @@ class DataCube { // -> Instead of using a voxel iterator, create a LabeledVoxelsMap for the brush stroke / trace tool. // If this LabeledVoxelsMap exists, the up and downsampling methods can easily be used // to apply the annotation to all needed resolutions, without labeling voxels multiple times. - const numberOfResolutions = getResolutions(Store.getState().dataset).length; - for (let zoomStep = 0; zoomStep < numberOfResolutions; ++zoomStep) { + const resolutions = getResolutionMapOfSegmentationLayer(Store.getState().dataset); + for (const zoomStep of resolutions.keys()) { while (iterator.hasNext) { const voxel = iterator.getNext(); this.labelVoxelInResolution(voxel, label, zoomStep, activeCellId); From 97676a2494b414cde2719899aa3fbc02c17c6480 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 15 Sep 2020 14:17:25 +0200 Subject: [PATCH 050/121] added some comments with todos --- frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js | 3 +++ 1 file changed, 3 insertions(+) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 001eb2c21e1..48d4105efda 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -335,6 +335,7 @@ export function* floodFill(): Saga { const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); const dimensionIndices = Dimensions.getIndices(planeId); const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); + // use getResolutionMapOfSegmentationLayer const allResolutions = yield* select(state => getResolutions(state.dataset)); const activeResolution = allResolutions[activeZoomStep]; // The floodfill and applyVoxelMap methods of iterates within the bucket. @@ -383,6 +384,8 @@ export function* floodFill(): Saga { } } +// TODO: Iterate over all resolutions of the segmentation layer, not the resolutions of the color layers. +// To get all segmentation layer resolutions, use: getResolutionMapOfSegmentationLayer function applyLabeledVoxelMapToAllMissingResolutions( labeledVoxelMapToApply: LabeledVoxelsMap, activeZoomStep: number, From bc8e97e88af94ff1655e58ea2decf88adccc2d4f Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 23 Sep 2020 16:26:01 +0200 Subject: [PATCH 051/121] Update frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js --- .../oxalis/model/accessors/volumetracing_accessor.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js index e21c9afd7b0..32875284c36 100644 --- a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js @@ -51,7 +51,7 @@ export function isVolumeTraceToolDisallowed(state: OxalisState) { } // The current resolution is to high for allowing the trace tool // because to many voxel could be annotated at the same time. - const isZoomStepTooHigh = getRequestLogZoomStep(state) > 1.5; + const isZoomStepTooHigh = getRequestLogZoomStep(state) > 1; return isZoomStepTooHigh; } From 14c1b95580c826721f9ddcc90292b0da3eb34662 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 23 Sep 2020 16:37:25 +0200 Subject: [PATCH 052/121] always label in all resolutions when using api; show stack-icon when multi-slice-annotating --- frontend/javascripts/oxalis/api/api_latest.js | 4 ++-- frontend/javascripts/oxalis/api/api_v2.js | 2 +- .../model/bucket_data_handling/data_cube.js | 15 +++++++++++++++ .../javascripts/oxalis/model_initialization.js | 3 +++ .../oxalis/view/action-bar/volume_actions_view.js | 4 ++-- 5 files changed, 23 insertions(+), 5 deletions(-) diff --git a/frontend/javascripts/oxalis/api/api_latest.js b/frontend/javascripts/oxalis/api/api_latest.js index 093c00ee39a..b404627ea19 100644 --- a/frontend/javascripts/oxalis/api/api_latest.js +++ b/frontend/javascripts/oxalis/api/api_latest.js @@ -1040,13 +1040,13 @@ class DataApi { * @example // Set the segmentation id for some voxels to 1337 * api.data.labelVoxels([[1,1,1], [1,2,1], [2,1,1], [2,2,1]], 1337); */ - labelVoxels(voxels: Array, label: number, zoomStep: number = 0): void { + labelVoxels(voxels: Array, label: number): void { assertVolume(Store.getState().tracing); const segmentationLayer = this.model.getSegmentationLayer(); assertExists(segmentationLayer, "Segmentation layer not found!"); for (const voxel of voxels) { - segmentationLayer.cube.labelVoxelInResolution(voxel, label, zoomStep); + segmentationLayer.cube.labelVoxelInAllResolutions(voxel, label); } segmentationLayer.cube.pushQueue.push(); diff --git a/frontend/javascripts/oxalis/api/api_v2.js b/frontend/javascripts/oxalis/api/api_v2.js index afe58264ff8..bc0e572d669 100644 --- a/frontend/javascripts/oxalis/api/api_v2.js +++ b/frontend/javascripts/oxalis/api/api_v2.js @@ -656,7 +656,7 @@ class DataApi { assertExists(segmentationLayer, "Segmentation layer not found!"); for (const voxel of voxels) { - segmentationLayer.cube.labelVoxelInResolution(voxel, label, 0); + segmentationLayer.cube.labelVoxelInAllResolutions(voxel, label); } segmentationLayer.cube.pushQueue.push(); diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 905759280d5..a2df0371a8a 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -369,6 +369,21 @@ class DataCube { this.triggerPushQueue(); } + labelVoxelInAllResolutions(voxel: Vector3, label: number, activeCellId: ?number) { + // Also see labelVoxelsInAllResolutions + // TODO: use segmentation layer resolutions. + // TODO: Do not label voxel in higher resolutions multiple times. + // -> Instead of using a voxel iterator, create a LabeledVoxelsMap for the brush stroke / trace tool. + // If this LabeledVoxelsMap exists, the up and downsampling methods can easily be used + // to apply the annotation to all needed resolutions, without labeling voxels multiple times. + const resolutions = getResolutionMapOfSegmentationLayer(Store.getState().dataset); + for (const zoomStep of resolutions.keys()) { + this.labelVoxelInResolution(voxel, label, zoomStep, activeCellId); + } + + this.triggerPushQueue(); + } + labelVoxelInResolution( voxel: Vector3, label: number, diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index fa14034fb56..08dd7f154e5 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -455,6 +455,9 @@ function setupLayerForVolumeTracing( console.log(tracing.resolutions); + // Legacy tracings don't have the `tracing.resolutions` property + // since they were created before WK started to maintain multiple resolution + // in volume annotations. Therefore, this code falls back to mag1 const tracingResolutions = tracing.resolutions ? tracing.resolutions.map(({ x, y, z }) => [x, y, z]) : [[1, 1, 1]]; diff --git a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js index b854368c41b..79f4f1fb73a 100644 --- a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js @@ -49,8 +49,8 @@ class VolumeActionsView extends PureComponent { const { activeTool, activeResolution, isInMergerMode } = this.props; const hasResolutionWithHigherDimension = activeResolution.some(val => val > 1); const multiSliceAnnotationInfoIcon = hasResolutionWithHigherDimension ? ( - - place holder + + ) : null; const isTraceToolDisabled = isZoomStepTooHighForTraceTool(); From 367810fa3ed06ba843a68160de2fcad1a8a4d51f Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 23 Sep 2020 16:39:23 +0200 Subject: [PATCH 053/121] more comments --- frontend/javascripts/admin/api_flow_types.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/frontend/javascripts/admin/api_flow_types.js b/frontend/javascripts/admin/api_flow_types.js index 6a7a7c27755..184b324933e 100644 --- a/frontend/javascripts/admin/api_flow_types.js +++ b/frontend/javascripts/admin/api_flow_types.js @@ -615,6 +615,10 @@ export type ServerVolumeTracing = {| elementClass: ElementClass, fallbackLayer?: string, largestSegmentId: number, + // `resolutions` will be undefined for legacy annotations + // which were created before the multi-resolution capabilities + // were added to volume tracings. Also see: + // https://github.com/scalableminds/webknossos/pull/4755 resolutions?: Array, |}; From 13eefc273de559ddd786a6ecef7093e8374b67b9 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 23 Sep 2020 17:47:49 +0200 Subject: [PATCH 054/121] replace old unlinkFallbackLayer with new API for that (keep old UpdateAction for backwards compatibility) --- frontend/javascripts/admin/admin_rest_api.js | 9 ++++++++ frontend/javascripts/messages.js | 4 ++-- frontend/javascripts/oxalis/api/api_latest.js | 11 ++++++++++ frontend/javascripts/oxalis/api/api_v2.js | 11 ++++++++++ .../model/actions/volumetracing_actions.js | 9 +------- .../model/reducers/volumetracing_reducer.js | 5 ----- .../reducers/volumetracing_reducer_helpers.js | 10 --------- .../oxalis/model/sagas/update_actions.js | 3 +++ .../oxalis/model/sagas/volumetracing_saga.js | 6 ++--- .../view/settings/dataset_settings_view.js | 22 ++++++++++++------- 10 files changed, 54 insertions(+), 36 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.js b/frontend/javascripts/admin/admin_rest_api.js index ada9f6207fe..3adafccd31e 100644 --- a/frontend/javascripts/admin/admin_rest_api.js +++ b/frontend/javascripts/admin/admin_rest_api.js @@ -711,6 +711,15 @@ export async function downloadNml( saveAs(blob, filename); } +export async function unlinkFallbackSegmentation( + annotationId: string, + annotationType: APIAnnotationType, +): Promise { + await Request.receiveJSON(`/api/annotations/${annotationType}/${annotationId}/unlinkFallback`, { + method: "PATCH", + }); +} + // ### Datasets export async function getDatasets( isUnreported: ?boolean, diff --git a/frontend/javascripts/messages.js b/frontend/javascripts/messages.js index bbbbf43a4c2..9d474ef4709 100644 --- a/frontend/javascripts/messages.js +++ b/frontend/javascripts/messages.js @@ -140,11 +140,11 @@ instead. Only enable this option if you understand its effect. All layers will n 'Click with "CTRL + Left Mouse" on the desired cell to load it\'s isosurface.', ], "tracing.confirm_remove_fallback_layer.title": - "Are you sure you want to unlink the dataset's original segmentation layer from this tracing?", + "Are you sure you want to unlink the dataset's original segmentation layer?", "tracing.confirm_remove_fallback_layer.explanation": "This dataset already contains a segmentation layer provided by its author. If you do not wish to base your work on this original segmentation, you can unlink it by confirming this dialog.", "tracing.confirm_remove_fallback_layer.notes": - "Note, that this action cannot be undone. Also note, if you already started with your annotation work based on the original segmentation layer, some small chunks of the segmentation might have already been merged into your annotation for technical reasons.", + "Note that this action also removes segments which were already annotated manually. This step cannot be undone.", "tracing.area_to_fill_is_too_big": "The area you want to fill is too big. Please annotate the area in multiple strokes.", "layouting.missing_custom_layout_info": diff --git a/frontend/javascripts/oxalis/api/api_latest.js b/frontend/javascripts/oxalis/api/api_latest.js index b404627ea19..68fdf8636d7 100644 --- a/frontend/javascripts/oxalis/api/api_latest.js +++ b/frontend/javascripts/oxalis/api/api_latest.js @@ -509,6 +509,17 @@ class TracingApi { UrlManager.updateUnthrottled(); } + /** + * Reload tracing + * + * @example + * api.tracing.hardReload().then(() => ... ); + */ + async hardReload() { + await Model.ensureSavedState(); + location.reload(); + } + // SKELETONTRACING API /** diff --git a/frontend/javascripts/oxalis/api/api_v2.js b/frontend/javascripts/oxalis/api/api_v2.js index bc0e572d669..da5db302c30 100644 --- a/frontend/javascripts/oxalis/api/api_v2.js +++ b/frontend/javascripts/oxalis/api/api_v2.js @@ -324,6 +324,17 @@ class TracingApi { UrlManager.updateUnthrottled(); } + /** + * Reload tracing + * + * @example + * api.tracing.hardReload().then(() => ... ); + */ + async hardReload() { + await Model.ensureSavedState(); + location.reload(); + } + // SKELETONTRACING API /** diff --git a/frontend/javascripts/oxalis/model/actions/volumetracing_actions.js b/frontend/javascripts/oxalis/model/actions/volumetracing_actions.js index cda3ea0eaad..e965411f0a6 100644 --- a/frontend/javascripts/oxalis/model/actions/volumetracing_actions.js +++ b/frontend/javascripts/oxalis/model/actions/volumetracing_actions.js @@ -44,7 +44,6 @@ export type InferSegmentationInViewportAction = { type: "INFER_SEGMENT_IN_VIEWPORT", position: Vector3, }; -export type RemoveFallbackLayerAction = { type: "REMOVE_FALLBACK_LAYER" }; export type VolumeTracingAction = | InitializeVolumeTracingAction @@ -64,15 +63,13 @@ export type VolumeTracingAction = | CopySegmentationLayerAction | InferSegmentationInViewportAction | SetContourTracingModeAction - | AddBucketToUndoAction - | RemoveFallbackLayerAction; + | AddBucketToUndoAction; export const VolumeTracingSaveRelevantActions = [ "CREATE_CELL", "SET_ACTIVE_CELL", "SET_USER_BOUNDING_BOXES", "ADD_USER_BOUNDING_BOXES", - "REMOVE_FALLBACK_LAYER", ]; export const VolumeTracingUndoRelevantActions = ["START_EDITING", "COPY_SEGMENTATION_LAYER"]; @@ -167,7 +164,3 @@ export const inferSegmentationInViewportAction = ( type: "INFER_SEGMENT_IN_VIEWPORT", position, }); - -export const removeFallbackLayerAction = (): RemoveFallbackLayerAction => ({ - type: "REMOVE_FALLBACK_LAYER", -}); diff --git a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.js b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.js index fec760d5ca0..9b88f237dda 100644 --- a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.js +++ b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.js @@ -21,7 +21,6 @@ import { resetContourReducer, hideBrushReducer, setContourTracingModeReducer, - removeFallbackLayerReducer, } from "oxalis/model/reducers/volumetracing_reducer_helpers"; function VolumeTracingReducer(state: OxalisState, action: VolumeTracingAction): OxalisState { @@ -106,10 +105,6 @@ function VolumeTracingReducer(state: OxalisState, action: VolumeTracingAction): return setContourTracingModeReducer(state, action.mode); } - case "REMOVE_FALLBACK_LAYER": { - return removeFallbackLayerReducer(state); - } - default: return state; } diff --git a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.js b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.js index f94cfe9db82..f0eaae5b39a 100644 --- a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.js +++ b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.js @@ -140,13 +140,3 @@ export function setContourTracingModeReducer(state: OxalisState, mode: ContourMo }, }); } - -export function removeFallbackLayerReducer(state: OxalisState) { - return update(state, { - tracing: { - volume: { - $unset: ["fallbackLayer"], - }, - }, - }); -} diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.js b/frontend/javascripts/oxalis/model/sagas/update_actions.js index f3f5ca77aba..6236f44ce0e 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.js +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.js @@ -136,6 +136,9 @@ export type RevertToVersionUpdateAction = {| sourceVersion: number, |}, |}; + +// This action is not dispatched by our code, anymore, +// but we still need to keep it for backwards compatibility. export type RemoveFallbackLayerAction = {| name: "removeFallbackLayer", value: {}, diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 48d4105efda..7bdc8668119 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -254,18 +254,18 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga const labeledVoxelMapOfCopiedVoxel: LabeledVoxelsMap = new Map(); function copyVoxelLabel(voxelTemplateAddress, voxelTargetAddress) { - const templateLabelValue = segmentationLayer.cube.getDataValue(voxelTemplateAddress); + const templateLabelValue = cube.getDataValue(voxelTemplateAddress); // Only copy voxels from the previous layer which belong to the current cell if (templateLabelValue === activeCellId) { - const currentLabelValue = segmentationLayer.cube.getDataValue(voxelTargetAddress); + const currentLabelValue = cube.getDataValue(voxelTargetAddress); // Do not overwrite already labelled voxels if (currentLabelValue === 0) { console.log( `labeling at ${voxelTargetAddress.toString()} with ${templateLabelValue} in zoomStep ${activeZoomStep}`, ); - api.data.labelVoxels([voxelTargetAddress], templateLabelValue, activeZoomStep); + cube.labelVoxelInResolution([voxelTargetAddress], templateLabelValue, activeZoomStep); const bucket = cube.getBucket( cube.positionToZoomedAddress(voxelTargetAddress, activeZoomStep), ); diff --git a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js index b1de3dc2bc8..82ccaf69daa 100644 --- a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js +++ b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js @@ -20,7 +20,11 @@ import { DropdownSetting, ColorSetting, } from "oxalis/view/settings/setting_input_views"; -import { findDataPositionForLayer, clearCache } from "admin/admin_rest_api"; +import { + findDataPositionForLayer, + clearCache, + unlinkFallbackSegmentation, +} from "admin/admin_rest_api"; import { getGpuFactorsWithLabels } from "oxalis/model/bucket_data_handling/data_rendering_logic"; import { getMaxZoomValueForResolution } from "oxalis/model/accessors/flycam_accessor"; import { @@ -34,7 +38,6 @@ import { updateLayerSettingAction, updateUserSettingAction, } from "oxalis/model/actions/settings_actions"; -import { removeFallbackLayerAction } from "oxalis/model/actions/volumetracing_actions"; import Model from "oxalis/model"; import Store, { type DatasetConfiguration, @@ -74,7 +77,7 @@ type DatasetSettingsProps = {| onSetPosition: Vector3 => void, onZoomToResolution: Vector3 => number, onChangeUser: (key: $Keys, value: any) => void, - onRemoveFallbackLayer: () => void, + onUnlinkFallbackLayer: Tracing => Promise, tracing: Tracing, |}; @@ -161,12 +164,13 @@ class DatasetSettings extends React.PureComponent { content: (

{messages["tracing.confirm_remove_fallback_layer.explanation"]}

-

{messages["tracing.confirm_remove_fallback_layer.notes"]}

+

+ {messages["tracing.confirm_remove_fallback_layer.notes"]} +

), onOk: async () => { - this.props.onRemoveFallbackLayer(); - this.reloadLayerData(layerName); + this.props.onUnlinkFallbackLayer(this.props.tracing); }, width: 600, }); @@ -595,8 +599,10 @@ const mapDispatchToProps = (dispatch: Dispatch<*>) => ({ dispatch(setZoomStepAction(targetZoomValue)); return targetZoomValue; }, - onRemoveFallbackLayer() { - dispatch(removeFallbackLayerAction()); + async onUnlinkFallbackLayer(tracing: Tracing) { + const { annotationId, annotationType } = tracing; + await unlinkFallbackSegmentation(annotationId, annotationType); + await api.tracing.hardReload(); }, }); From 120a5881580e04900174c19d9523d956120a9838 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 24 Sep 2020 12:05:41 +0200 Subject: [PATCH 055/121] fix wrong parameter format --- .../oxalis/model/sagas/volumetracing_saga.js | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 7bdc8668119..2e1e289aa76 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -1,6 +1,7 @@ // @flow import _ from "lodash"; +import DataLayer from "oxalis/model/data_layer"; import { type CopySegmentationLayerAction, resetContourAction, @@ -239,7 +240,7 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga return; } - const segmentationLayer = yield* call([Model, Model.getSegmentationLayer]); + const segmentationLayer: DataLayer = yield* call([Model, Model.getSegmentationLayer]); const { cube } = segmentationLayer; const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); const allResolutions = yield* select(state => getResolutions(state.dataset)); @@ -262,10 +263,10 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga // Do not overwrite already labelled voxels if (currentLabelValue === 0) { - console.log( - `labeling at ${voxelTargetAddress.toString()} with ${templateLabelValue} in zoomStep ${activeZoomStep}`, - ); - cube.labelVoxelInResolution([voxelTargetAddress], templateLabelValue, activeZoomStep); + // console.log( + // `labeling at ${voxelTargetAddress.toString()} with ${templateLabelValue} in zoomStep ${activeZoomStep}`, + // ); + cube.labelVoxelInResolution(voxelTargetAddress, templateLabelValue, activeZoomStep); const bucket = cube.getBucket( cube.positionToZoomedAddress(voxelTargetAddress, activeZoomStep), ); From 0e922ef58303f2819de17e9f408d25442761a1a0 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 24 Sep 2020 16:17:03 +0200 Subject: [PATCH 056/121] refactor resolution handling to deal with sparse resolutions better (WIP/1) --- frontend/javascripts/admin/api_flow_types.js | 4 +- .../model/accessors/dataset_accessor.js | 124 +++++++++++++++--- .../model/bucket_data_handling/bucket.js | 1 - .../model/bucket_data_handling/data_cube.js | 54 ++++---- .../javascripts/oxalis/model/data_layer.js | 10 +- .../model/helpers/position_converter.js | 19 +++ .../oxalis/model/sagas/isosurface_saga.js | 36 ++--- .../oxalis/model/sagas/volumetracing_saga.js | 3 +- .../oxalis/model_initialization.js | 64 ++++----- .../view/right-menu/mapping_info_view.js | 10 +- 10 files changed, 223 insertions(+), 102 deletions(-) diff --git a/frontend/javascripts/admin/api_flow_types.js b/frontend/javascripts/admin/api_flow_types.js index 184b324933e..a5c3c2f2371 100644 --- a/frontend/javascripts/admin/api_flow_types.js +++ b/frontend/javascripts/admin/api_flow_types.js @@ -41,7 +41,7 @@ export type APIMapping = { type APIDataLayerBase = {| +name: string, +boundingBox: BoundingBoxObject, - +resolutions: Array, + +new_resolutions: Array, +elementClass: ElementClass, |}; @@ -619,7 +619,7 @@ export type ServerVolumeTracing = {| // which were created before the multi-resolution capabilities // were added to volume tracings. Also see: // https://github.com/scalableminds/webknossos/pull/4755 - resolutions?: Array, + new_resolutions?: Array, |}; export type ServerTracing = ServerSkeletonTracing | ServerVolumeTracing; diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 9a6c1087c4c..f93c4de4fcb 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -29,6 +29,103 @@ import messages from "messages"; export type ResolutionsMap = Map; +export class ResolutionInfo { + resolutions: Array; + resolutionMap: Map; + + constructor(resolutions: Array) { + this.resolutions = resolutions; + this._buildResolutionMap(); + } + + _buildResolutionMap() { + // Each resolution entry can be characterized by it's greatest resolution dimension. + // E.g., the resolution array [[1, 1, 1], [2, 2, 1], [4, 4, 2]] defines that + // a zoomstep of 2 corresponds to the resolution [2, 2, 1] (and not [4, 4, 2]). + // Therefore, the largest dim for each resolution has to be unique across all resolutions. + + // This function creates a map which maps from powerOfTwo (2**index) to resolution. + + const resolutions = this.resolutions; + + if (resolutions.length !== _.uniqBy(resolutions.map(_.max)).length) { + throw new Error("Max dimension in resolutions is not unique."); + } + + this.resolutionMap = new Map(); + for (const resolution of resolutions) { + this.resolutionMap.set(_.max(resolution), resolution); + } + } + + getResolutionsWithIndices(): Array<[number, Vector3]> { + return Array.from(this.resolutionMap.entries()).map(entry => { + const [powerOfTwo, resolution] = entry; + const resolutionIndex = Math.log2(powerOfTwo); + return [resolutionIndex, resolution]; + }); + } + + indexToPowerOf2(index: number): number { + return 2 ** index; + } + + hasIndex(index: number): boolean { + const powerOfTwo = this.indexToPowerOf2(index); + return this.resolutionMap.has(powerOfTwo); + } + + getResolutionByIndex(index: number): ?Vector3 { + const powerOfTwo = this.indexToPowerOf2(index); + return this.getResolutionByPowerOf2(powerOfTwo); + } + + getResolutionByIndexWithFallback(index: number): Vector3 { + const resolutionMaybe = this.getResolutionByIndex(index); + if (resolutionMaybe) { + return resolutionMaybe; + } else { + const powerOf2 = this.indexToPowerOf2(index); + return [powerOf2, powerOf2, powerOf2]; + } + } + + getResolutionByPowerOf2(powerOfTwo: number): ?Vector3 { + return this.resolutionMap.get(powerOfTwo); + } + + getHighestResolutionIndex(): number { + return Math.log2(this.getHighestResolutionPowerOf2()); + } + + getHighestResolutionPowerOf2(): number { + return _.max(Array.from(this.resolutionMap.keys())); + } + + getClosestExistingIndex(index: number): number { + if (this.hasIndex(index)) { + return index; + } + + const indices = this.getResolutionsWithIndices().map(entry => entry[0]); + const indicesWithDistances = indices.map(_index => { + const distance = Math.abs(index - _index); + return [_index, distance]; + }); + + const bestIndexWithDistance = _.head(_.sortBy(indicesWithDistances, entry => entry[1])); + return bestIndexWithDistance[0]; + } +} + +function _getResolutionInfo(resolutions: Array): ResolutionInfo { + return new ResolutionInfo(resolutions); +} + +// Don't use memoizeOne here, since we want to cache the resolutions for all layers +// (which are not that many). +export const getResolutionInfo = _.memoize(_getResolutionInfo); + export function getMostExtensiveResolutions(dataset: APIDataset): Array { return _.chain(dataset.dataSource.dataLayers) .map(dataLayer => dataLayer.resolutions) @@ -56,6 +153,12 @@ function _getResolutions(dataset: APIDataset): Vector3[] { // we memoize _getResolutions, as well. export const getResolutions = memoizeOne(_getResolutions); +function _getDatasetResolutionInfo(dataset: APIDataset): ResolutionInfo { + return new ResolutionInfo(getResolutions(dataset)); +} + +export const getDatasetResolutionInfo = memoizeOne(_getDatasetResolutionInfo); + function _getMaxZoomStep(maybeDataset: ?APIDataset): number { const minimumZoomStepCount = 1; const maxZoomstep = Maybe.fromNullable(maybeDataset) @@ -75,26 +178,17 @@ export function getDataLayers(dataset: APIDataset): DataLayerType[] { return dataset.dataSource.dataLayers; } -function _getResolutionMapOfSegmentationLayer(dataset: APIDataset): ResolutionsMap { +function _getResolutionInfoOfSegmentationLayer(dataset: APIDataset): ResolutionInfo { const segmentationLayer = getSegmentationLayer(dataset); if (!segmentationLayer) { - return new Map(); + return new ResolutionInfo([]); } - const resolutionsObject = new Map(); - const colorLayerResolutions = getResolutions(dataset); - colorLayerResolutions.forEach((resolution, zoomStep) => { - if ( - segmentationLayer.resolutions.some(segmentationLayerResolution => - _.isEqual(resolution, segmentationLayerResolution), - ) - ) { - resolutionsObject.set(zoomStep, [...resolution]); - } - }); - return resolutionsObject; + return getResolutionInfo(segmentationLayer.new_resolutions); } -export const getResolutionMapOfSegmentationLayer = memoizeOne(_getResolutionMapOfSegmentationLayer); +export const getResolutionInfoOfSegmentationLayer = memoizeOne( + _getResolutionInfoOfSegmentationLayer, +); export function getLayerByName(dataset: APIDataset, layerName: string): DataLayerType { const dataLayers = getDataLayers(dataset); diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js index 8b7c83a1f67..d5b293d3656 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js @@ -226,7 +226,6 @@ export class DataBucket { const bucketData = this.getOrCreateData(); this.markAndAddBucketForUndo(); labelFunc(bucketData); - this.dirty = true; this.throttledTriggerLabeled(); } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index a2df0371a8a..124661b7755 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -17,7 +17,8 @@ import { import { type VoxelIterator, VoxelNeighborStack2D } from "oxalis/model/volumetracing/volumelayer"; import { getResolutions, - getResolutionMapOfSegmentationLayer, + getResolutionInfoOfSegmentationLayer, + ResolutionInfo, } from "oxalis/model/accessors/dataset_accessor"; import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; @@ -50,7 +51,6 @@ class CubeEntry { class DataCube { MAXIMUM_BUCKET_COUNT = 5000; - ZOOM_STEP_COUNT: number; arbitraryCube: ArbitraryCubeAdapter; upperBoundary: Vector3; buckets: Array; @@ -85,7 +85,7 @@ class DataCube { constructor( upperBoundary: Vector3, - resolutionsLength: number, + resolutionInfo: ResolutionInfo, elementClass: ElementClass, isSegmentation: boolean, ) { @@ -93,9 +93,7 @@ class DataCube { this.elementClass = elementClass; this.isSegmentation = isSegmentation; - this.ZOOM_STEP_COUNT = resolutionsLength; - - this.MAX_ZOOM_STEP = this.ZOOM_STEP_COUNT - 1; + this.MAX_ZOOM_STEP = resolutionInfo.getHighestResolutionIndex(); _.extend(this, BackboneEvents); @@ -110,28 +108,24 @@ class DataCube { ]; this.arbitraryCube = new ArbitraryCubeAdapter(this, _.clone(cubeBoundary)); - // TODO: for segmentation add special case const { dataset } = Store.getState(); - const colorLayerResolutionsArray = getResolutions(dataset); - let segmentationLayer = new Map(); - if (isSegmentation) { - segmentationLayer = getResolutionMapOfSegmentationLayer(dataset); - } - let numberOfSkippedResolutions = 0; - for (let i = 0; i < this.ZOOM_STEP_COUNT + numberOfSkippedResolutions; i++) { - const resolution = isSegmentation ? segmentationLayer.get(i) : colorLayerResolutionsArray[i]; - if (resolution == null) { - // As segmentation layer might be sparse, not all resolutions might exist. - // Skipping zoomStep - numberOfSkippedResolutions++; - continue; - } + + // let resolutionInfo; + // if (isSegmentation) { + // // TODO (1): check this could simply use the resolution info of the layer. + // // Then, the cube map would be sparse, though. Is this a problem? + // resolutionInfo = getResolutionInfoOfSegmentationLayer(dataset); + // } else { + // resolutionInfo = getDatasetResolutionInfo(dataset); + // } + + for (const [resolutionIndex, resolution] of resolutionInfo.getResolutionsWithIndices()) { const zoomedCubeBoundary = [ Math.ceil(cubeBoundary[0] / resolution[0]) + 1, Math.ceil(cubeBoundary[1] / resolution[1]) + 1, Math.ceil(cubeBoundary[2] / resolution[2]) + 1, ]; - this.cubes[i] = new CubeEntry(zoomedCubeBoundary); + this.cubes[resolutionIndex] = new CubeEntry(zoomedCubeBoundary); } const shouldBeRestrictedByTracingBoundingBox = () => { @@ -208,7 +202,7 @@ class DataCube { } isWithinBounds([x, y, z, zoomStep]: Vector4): boolean { - if (zoomStep >= this.ZOOM_STEP_COUNT) { + if (zoomStep >= this.MAX_ZOOM_STEP + 1) { return false; } @@ -352,16 +346,15 @@ class DataCube { label: number, activeCellId?: ?number = null, ): void { - // TODO: use segmentation layer resolutions. // TODO: Do not label voxel in higher resolutions multiple times. // -> Instead of using a voxel iterator, create a LabeledVoxelsMap for the brush stroke / trace tool. // If this LabeledVoxelsMap exists, the up and downsampling methods can easily be used // to apply the annotation to all needed resolutions, without labeling voxels multiple times. - const resolutions = getResolutionMapOfSegmentationLayer(Store.getState().dataset); - for (const zoomStep of resolutions.keys()) { + const resolutionInfo = getResolutionInfoOfSegmentationLayer(Store.getState().dataset); + for (const [resolutionIndex, _resolution] of resolutionInfo.getResolutionsWithIndices()) { while (iterator.hasNext) { const voxel = iterator.getNext(); - this.labelVoxelInResolution(voxel, label, zoomStep, activeCellId); + this.labelVoxelInResolution(voxel, label, resolutionIndex, activeCellId); } iterator.reset(); } @@ -371,14 +364,13 @@ class DataCube { labelVoxelInAllResolutions(voxel: Vector3, label: number, activeCellId: ?number) { // Also see labelVoxelsInAllResolutions - // TODO: use segmentation layer resolutions. // TODO: Do not label voxel in higher resolutions multiple times. // -> Instead of using a voxel iterator, create a LabeledVoxelsMap for the brush stroke / trace tool. // If this LabeledVoxelsMap exists, the up and downsampling methods can easily be used // to apply the annotation to all needed resolutions, without labeling voxels multiple times. - const resolutions = getResolutionMapOfSegmentationLayer(Store.getState().dataset); - for (const zoomStep of resolutions.keys()) { - this.labelVoxelInResolution(voxel, label, zoomStep, activeCellId); + const resolutionInfo = getResolutionInfoOfSegmentationLayer(Store.getState().dataset); + for (const [resolutionIndex, _resolution] of resolutionInfo.getResolutionsWithIndices()) { + this.labelVoxelInResolution(voxel, label, resolutionIndex, activeCellId); } this.triggerPushQueue(); diff --git a/frontend/javascripts/oxalis/model/data_layer.js b/frontend/javascripts/oxalis/model/data_layer.js index b91009632f0..4499cbc0927 100644 --- a/frontend/javascripts/oxalis/model/data_layer.js +++ b/frontend/javascripts/oxalis/model/data_layer.js @@ -2,7 +2,7 @@ import type { ProgressCallback } from "libs/progress_callback"; import type { Vector3 } from "oxalis/constants"; -import { getLayerBoundaries } from "oxalis/model/accessors/dataset_accessor"; +import { getLayerBoundaries, getResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; import ConnectionInfo from "oxalis/model/data_connection_info"; import DataCube from "oxalis/model/bucket_data_handling/data_cube"; import ErrorHandling from "libs/error_handling"; @@ -23,7 +23,7 @@ class DataLayer { activeMapping: ?string; activeMappingType: MappingType = "JSON"; layerRenderingManager: LayerRenderingManager; - resolutions: Array; + new_resolutions: Array; fallbackLayer: ?string; constructor( @@ -35,16 +35,16 @@ class DataLayer { this.connectionInfo = connectionInfo; this.name = layerInfo.name; this.fallbackLayer = layerInfo.fallbackLayer != null ? layerInfo.fallbackLayer : null; - this.resolutions = layerInfo.resolutions; + this.new_resolutions = layerInfo.new_resolutions; const { dataset } = Store.getState(); const isSegmentation = layerInfo.category === "segmentation"; - ErrorHandling.assert(this.resolutions.length > 0, "Resolutions for layer cannot be empty"); + ErrorHandling.assert(this.new_resolutions.length > 0, "Resolutions for layer cannot be empty"); this.cube = new DataCube( getLayerBoundaries(dataset, this.name).upperBoundary, - this.resolutions.length, + getResolutionInfo(this.new_resolutions), layerInfo.elementClass, isSegmentation, ); diff --git a/frontend/javascripts/oxalis/model/helpers/position_converter.js b/frontend/javascripts/oxalis/model/helpers/position_converter.js index 2b2abfa9802..cb055f6eb0b 100644 --- a/frontend/javascripts/oxalis/model/helpers/position_converter.js +++ b/frontend/javascripts/oxalis/model/helpers/position_converter.js @@ -1,6 +1,7 @@ // @flow import constants, { type Vector3, type Vector4 } from "oxalis/constants"; +import { ResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; export function globalPositionToBaseBucket(pos: Vector3): Vector4 { return globalPositionToBucketPosition(pos, [[1, 1, 1]], 0); @@ -74,6 +75,7 @@ export function getResolutionsFactors(resolutionA: Vector3, resolutionB: Vector3 ]; } +// TODO (1): zoomedAddressToAnotherZoomStep usages should be converted to zoomedAddressToAnotherZoomStepWithInfo export function zoomedAddressToAnotherZoomStep( [x, y, z, resolutionIndex]: Vector4, resolutions: Array, @@ -91,6 +93,23 @@ export function zoomedAddressToAnotherZoomStep( ]; } +export function zoomedAddressToAnotherZoomStepWithInfo( + [x, y, z, resolutionIndex]: Vector4, + resolutionInfo: ResolutionInfo, + targetResolutionIndex: number, +): Vector4 { + const currentResolution = resolutionInfo.getResolutionByIndexWithFallback(resolutionIndex); + const targetResolution = resolutionInfo.getResolutionByIndexWithFallback(targetResolutionIndex); + const factors = getResolutionsFactors(currentResolution, targetResolution); + + return [ + Math.floor(x * factors[0]), + Math.floor(y * factors[1]), + Math.floor(z * factors[2]), + targetResolutionIndex, + ]; +} + export function getBucketExtent(resolutions: Vector3[], resolutionIndex: number): Vector3 { return bucketPositionToGlobalAddress([1, 1, 1, resolutionIndex], resolutions); } diff --git a/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js b/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js index ea0d265352d..5d3655c2300 100644 --- a/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js @@ -2,6 +2,7 @@ import { saveAs } from "file-saver"; import type { APIDataset } from "admin/api_flow_types"; +import { ResolutionInfo, getResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; import { changeActiveIsosurfaceCellAction, type ChangeActiveIsosurfaceCellAction, @@ -24,7 +25,7 @@ import { stlIsosurfaceConstants } from "oxalis/view/right-menu/meshes_view"; import { computeIsosurface } from "admin/admin_rest_api"; import { getFlooredPosition } from "oxalis/model/accessors/flycam_accessor"; import { setImportingMeshStateAction } from "oxalis/model/actions/ui_actions"; -import { zoomedAddressToAnotherZoomStep } from "oxalis/model/helpers/position_converter"; +import { zoomedAddressToAnotherZoomStepWithInfo } from "oxalis/model/helpers/position_converter"; import DataLayer from "oxalis/model/data_layer"; import Model from "oxalis/model"; import ThreeDMap from "libs/ThreeDMap"; @@ -61,8 +62,12 @@ function removeMapForSegment(segmentId: number): void { isosurfacesMap.delete(segmentId); } -function getZoomedCubeSize(zoomStep: number, resolutions: Array): Vector3 { - const [x, y, z] = zoomedAddressToAnotherZoomStep([...cubeSize, 0], resolutions, zoomStep); +function getZoomedCubeSize(zoomStep: number, resolutionInfo: ResolutionInfo): Vector3 { + const [x, y, z] = zoomedAddressToAnotherZoomStepWithInfo( + [...cubeSize, 0], + resolutionInfo, + zoomStep, + ); // Drop the last element of the Vector4; return [x, y, z]; } @@ -70,9 +75,9 @@ function getZoomedCubeSize(zoomStep: number, resolutions: Array): Vecto function clipPositionToCubeBoundary( position: Vector3, zoomStep: number, - resolutions: Array, + resolutionInfo: ResolutionInfo, ): Vector3 { - const zoomedCubeSize = getZoomedCubeSize(zoomStep, resolutions); + const zoomedCubeSize = getZoomedCubeSize(zoomStep, resolutionInfo); const currentCube = Utils.map3((el, idx) => Math.floor(el / zoomedCubeSize[idx]), position); const clippedPosition = Utils.map3((el, idx) => el * zoomedCubeSize[idx], currentCube); return clippedPosition; @@ -82,12 +87,12 @@ function getNeighborPosition( clippedPosition: Vector3, neighborId: number, zoomStep: number, - resolutions: Array, + resolutionInfo: ResolutionInfo, ): Vector3 { // front_xy, front_xz, front_yz, back_xy, back_xz, back_yz const neighborLookup = [[0, 0, -1], [0, -1, 0], [-1, 0, 0], [0, 0, 1], [0, 1, 0], [1, 0, 0]]; - const zoomedCubeSize = getZoomedCubeSize(zoomStep, resolutions); + const zoomedCubeSize = getZoomedCubeSize(zoomStep, resolutionInfo); const neighborMultiplier = neighborLookup[neighborId]; const neighboringPosition = [ clippedPosition[0] + neighborMultiplier[0] * zoomedCubeSize[0], @@ -149,11 +154,12 @@ function* ensureSuitableIsosurface( } const position = seedPosition != null ? seedPosition : yield* select(state => getFlooredPosition(state.flycam)); - const { resolutions } = layer; + const resolutionInfo = getResolutionInfo(layer.new_resolutions); + const preferredZoomStep = window.__isosurfaceZoomStep != null ? window.__isosurfaceZoomStep : 1; - const zoomStep = Math.min(preferredZoomStep, resolutions.length - 1); + const zoomStep = resolutionInfo.getClosestExistingIndex(preferredZoomStep); - const clippedPosition = clipPositionToCubeBoundary(position, zoomStep, resolutions); + const clippedPosition = clipPositionToCubeBoundary(position, zoomStep, resolutionInfo); batchCounterPerSegment[segmentId] = 0; yield* call( @@ -163,7 +169,7 @@ function* ensureSuitableIsosurface( segmentId, clippedPosition, zoomStep, - resolutions, + resolutionInfo, ); } @@ -173,7 +179,7 @@ function* loadIsosurfaceWithNeighbors( segmentId: number, clippedPosition: Vector3, zoomStep: number, - resolutions: Array, + resolutionInfo: ResolutionInfo, ): Saga { let positionsToRequest = [clippedPosition]; @@ -186,7 +192,7 @@ function* loadIsosurfaceWithNeighbors( segmentId, position, zoomStep, - resolutions, + resolutionInfo, ); positionsToRequest = positionsToRequest.concat(neighbors); } @@ -204,7 +210,7 @@ function* maybeLoadIsosurface( segmentId: number, clippedPosition: Vector3, zoomStep: number, - resolutions: Array, + resolutionInfo: ResolutionInfo, ): Saga> { const threeDMap = getMapForSegment(segmentId); @@ -245,7 +251,7 @@ function* maybeLoadIsosurface( getSceneController().addIsosurfaceFromVertices(vertices, segmentId); return neighbors.map(neighbor => - getNeighborPosition(clippedPosition, neighbor, zoomStep, resolutions), + getNeighborPosition(clippedPosition, neighbor, zoomStep, resolutionInfo), ); } diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 2e1e289aa76..0cd432b8922 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -207,6 +207,7 @@ function* createVolumeLayer(planeId: OrthoView): Saga { function* labelWithIterator(iterator, contourTracingMode): Saga { const allowUpdate = yield* select(state => state.tracing.restrictions.allowUpdate); + // todo: labelWithIterator (1) if (!allowUpdate) return; const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); @@ -385,7 +386,7 @@ export function* floodFill(): Saga { } } -// TODO: Iterate over all resolutions of the segmentation layer, not the resolutions of the color layers. +// TODO: (1) Iterate over all resolutions of the segmentation layer, not the resolutions of the color layers. // To get all segmentation layer resolutions, use: getResolutionMapOfSegmentationLayer function applyLabeledVoxelMapToAllMissingResolutions( labeledVoxelMapToApply: LabeledVoxelsMap, diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index 08dd7f154e5..e8bb1b8b516 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -320,28 +320,32 @@ function initializeDataset( dataset.dataSource.dataLayers = newDataLayers; }); - ensureDenseLayerResolutions(dataset); - ensureMatchingLayerResolutions(dataset); + // ensureDenseLayerResolutions(dataset); + // ensureMatchingLayerResolutions(dataset); Store.dispatch(setDatasetAction(dataset)); } -export function ensureDenseLayerResolutions(dataset: APIDataset) { - const mostExtensiveResolutions = convertToDenseResolution(getMostExtensiveResolutions(dataset)); - for (const layer of dataset.dataSource.dataLayers) { - layer.resolutions = convertToDenseResolution(layer.resolutions, mostExtensiveResolutions); - } -} - -export function ensureMatchingLayerResolutions(dataset: APIDataset): void { - const mostExtensiveResolutions = getMostExtensiveResolutions(dataset); - for (const layer of dataset.dataSource.dataLayers) { - for (const resolution of layer.resolutions) { - if (mostExtensiveResolutions.find(element => _.isEqual(resolution, element)) == null) { - Toast.error(messages["dataset.resolution_mismatch"], { sticky: true }); - } - } - } -} +// export function ensureDenseLayerResolutions(dataset: APIDataset) { +// const mostExtensiveResolutions = convertToDenseResolution(getMostExtensiveResolutions(dataset)); +// for (const layer of dataset.dataSource.dataLayers) { +// // For segmentation layer +// if (layer.category === "color") { +// layer.resolutions = convertToDenseResolution(layer.resolutions, mostExtensiveResolutions); +// } +// } +// } + +// TODO: (1) restore ensureMatchingLayerResolutions +// export function ensureMatchingLayerResolutions(dataset: APIDataset): void { +// const mostExtensiveResolutions = getMostExtensiveResolutions(dataset); +// for (const layer of dataset.dataSource.dataLayers) { +// for (const resolution of layer.resolutions) { +// if (mostExtensiveResolutions.find(element => _.isEqual(resolution, element)) == null) { +// Toast.error(messages["dataset.resolution_mismatch"], { sticky: true }); +// } +// } +// } +// } export function convertToDenseResolution( resolutions: Array, @@ -453,18 +457,19 @@ function setupLayerForVolumeTracing( const fallbackLayer = layers[fallbackLayerIndex]; const boundaries = getBoundaries(dataset); - console.log(tracing.resolutions); + console.log(tracing.new_resolutions); - // Legacy tracings don't have the `tracing.resolutions` property + // Legacy tracings don't have the `tracing.new_resolutions` property // since they were created before WK started to maintain multiple resolution - // in volume annotations. Therefore, this code falls back to mag1 - const tracingResolutions = tracing.resolutions - ? tracing.resolutions.map(({ x, y, z }) => [x, y, z]) + // in volume annotations. Therefore, this code falls back to mag (1, 1, 1) for + // that case. + const tracingResolutions = tracing.new_resolutions + ? tracing.new_resolutions.map(({ x, y, z }) => [x, y, z]) : [[1, 1, 1]]; console.log(tracingResolutions); const targetResolutions = - fallbackLayer != null ? fallbackLayer.resolutions : getResolutions(dataset); + fallbackLayer != null ? fallbackLayer.new_resolutions : getResolutions(dataset); const resolutionsAreSubset = (resAs, resBs) => resAs.every(resA => resBs.some(resB => _.isEqual(resA, resB))); @@ -473,11 +478,11 @@ function setupLayerForVolumeTracing( resolutionsAreSubset(tracingResolutions, targetResolutions); if (!doResolutionsMatch) { - if (tracing.resolutions) { + if (tracing.new_resolutions) { Toast.warning( messages["tracing.volume_resolution_mismatch"], {}, - `Tracing resolutions ${tracingResolutions.toString()} vs dataset resolutions ${targetResolutions.toString()}`, + `The magnifications of the volume tracing (${tracingResolutions.toString()}) don't match the dataset's magnifications (${targetResolutions.toString()}). This can happen when the magnification of the dataset was changed after this tracing was created. Note that there might be rendering issues for this reason.`, ); throw HANDLED_ERROR; } else { @@ -491,15 +496,14 @@ function setupLayerForVolumeTracing( category: "segmentation", largestSegmentId: tracing.largestSegmentId, boundingBox: convertBoundariesToBoundingBox(boundaries), - // volume tracing can only be done for the first resolution - resolutions: tracingResolutions, + new_resolutions: tracingResolutions, mappings: fallbackLayer != null && fallbackLayer.mappings != null ? fallbackLayer.mappings : [], // remember the name of the original layer, used to request mappings fallbackLayer: tracing.fallbackLayer, }; if (fallbackLayer != null) { - // Replace the orginal tracing layer + // Replace the original tracing layer layers[fallbackLayerIndex] = tracingLayer; } else { // Remove other segmentation layers, since we are adding a new one. diff --git a/frontend/javascripts/oxalis/view/right-menu/mapping_info_view.js b/frontend/javascripts/oxalis/view/right-menu/mapping_info_view.js index 36b1718ec2d..e0cc21b3eec 100644 --- a/frontend/javascripts/oxalis/view/right-menu/mapping_info_view.js +++ b/frontend/javascripts/oxalis/view/right-menu/mapping_info_view.js @@ -16,7 +16,11 @@ import type { OxalisState, Mapping, MappingType } from "oxalis/store"; import { calculateGlobalPos } from "oxalis/controller/viewmodes/plane_controller"; import { getMappingsForDatasetLayer, getAgglomeratesForDatasetLayer } from "admin/admin_rest_api"; import { getPosition, getRequestLogZoomStep } from "oxalis/model/accessors/flycam_accessor"; -import { getSegmentationLayer, getResolutions } from "oxalis/model/accessors/dataset_accessor"; +import { + getSegmentationLayer, + getResolutions, + getResolutionInfoOfSegmentationLayer, +} from "oxalis/model/accessors/dataset_accessor"; import { getVolumeTracing } from "oxalis/model/accessors/volumetracing_accessor"; import { setLayerMappingsAction } from "oxalis/model/actions/dataset_actions"; import { @@ -176,7 +180,9 @@ class MappingInfoView extends React.Component { : getNextUsableZoomStepForPosition(globalMousePosition); const getResolutionOfZoomStepAsString = usedZoomStep => { - const usedResolution = segmentationLayer ? segmentationLayer.resolutions[usedZoomStep] : null; + const usedResolution = getResolutionInfoOfSegmentationLayer(dataset).getResolutionByIndex( + usedZoomStep, + ); return usedResolution ? `${usedResolution[0]}-${usedResolution[1]}-${usedResolution[2]}` : "Not available"; From c0863537dc83764bd9030db41003676f1beef2d9 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 24 Sep 2020 17:33:15 +0200 Subject: [PATCH 057/121] undo temporary new_resolutions change --- frontend/javascripts/admin/api_flow_types.js | 4 ++-- .../oxalis/model/accessors/dataset_accessor.js | 2 +- frontend/javascripts/oxalis/model/data_layer.js | 8 ++++---- .../oxalis/model/sagas/isosurface_saga.js | 2 +- .../oxalis/model/sagas/volumetracing_saga.js | 1 - .../javascripts/oxalis/model_initialization.js | 14 +++++++------- 6 files changed, 15 insertions(+), 16 deletions(-) diff --git a/frontend/javascripts/admin/api_flow_types.js b/frontend/javascripts/admin/api_flow_types.js index a5c3c2f2371..184b324933e 100644 --- a/frontend/javascripts/admin/api_flow_types.js +++ b/frontend/javascripts/admin/api_flow_types.js @@ -41,7 +41,7 @@ export type APIMapping = { type APIDataLayerBase = {| +name: string, +boundingBox: BoundingBoxObject, - +new_resolutions: Array, + +resolutions: Array, +elementClass: ElementClass, |}; @@ -619,7 +619,7 @@ export type ServerVolumeTracing = {| // which were created before the multi-resolution capabilities // were added to volume tracings. Also see: // https://github.com/scalableminds/webknossos/pull/4755 - new_resolutions?: Array, + resolutions?: Array, |}; export type ServerTracing = ServerSkeletonTracing | ServerVolumeTracing; diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index f93c4de4fcb..adacb089554 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -183,7 +183,7 @@ function _getResolutionInfoOfSegmentationLayer(dataset: APIDataset): ResolutionI if (!segmentationLayer) { return new ResolutionInfo([]); } - return getResolutionInfo(segmentationLayer.new_resolutions); + return getResolutionInfo(segmentationLayer.resolutions); } export const getResolutionInfoOfSegmentationLayer = memoizeOne( diff --git a/frontend/javascripts/oxalis/model/data_layer.js b/frontend/javascripts/oxalis/model/data_layer.js index 4499cbc0927..49b8c69e1ee 100644 --- a/frontend/javascripts/oxalis/model/data_layer.js +++ b/frontend/javascripts/oxalis/model/data_layer.js @@ -23,7 +23,7 @@ class DataLayer { activeMapping: ?string; activeMappingType: MappingType = "JSON"; layerRenderingManager: LayerRenderingManager; - new_resolutions: Array; + resolutions: Array; fallbackLayer: ?string; constructor( @@ -35,16 +35,16 @@ class DataLayer { this.connectionInfo = connectionInfo; this.name = layerInfo.name; this.fallbackLayer = layerInfo.fallbackLayer != null ? layerInfo.fallbackLayer : null; - this.new_resolutions = layerInfo.new_resolutions; + this.resolutions = layerInfo.resolutions; const { dataset } = Store.getState(); const isSegmentation = layerInfo.category === "segmentation"; - ErrorHandling.assert(this.new_resolutions.length > 0, "Resolutions for layer cannot be empty"); + ErrorHandling.assert(this.resolutions.length > 0, "Resolutions for layer cannot be empty"); this.cube = new DataCube( getLayerBoundaries(dataset, this.name).upperBoundary, - getResolutionInfo(this.new_resolutions), + getResolutionInfo(this.resolutions), layerInfo.elementClass, isSegmentation, ); diff --git a/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js b/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js index 5d3655c2300..1af1a460e82 100644 --- a/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js @@ -154,7 +154,7 @@ function* ensureSuitableIsosurface( } const position = seedPosition != null ? seedPosition : yield* select(state => getFlooredPosition(state.flycam)); - const resolutionInfo = getResolutionInfo(layer.new_resolutions); + const resolutionInfo = getResolutionInfo(layer.resolutions); const preferredZoomStep = window.__isosurfaceZoomStep != null ? window.__isosurfaceZoomStep : 1; const zoomStep = resolutionInfo.getClosestExistingIndex(preferredZoomStep); diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 0cd432b8922..d0af8cd2da9 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -207,7 +207,6 @@ function* createVolumeLayer(planeId: OrthoView): Saga { function* labelWithIterator(iterator, contourTracingMode): Saga { const allowUpdate = yield* select(state => state.tracing.restrictions.allowUpdate); - // todo: labelWithIterator (1) if (!allowUpdate) return; const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index e8bb1b8b516..0e97fe1b084 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -457,19 +457,19 @@ function setupLayerForVolumeTracing( const fallbackLayer = layers[fallbackLayerIndex]; const boundaries = getBoundaries(dataset); - console.log(tracing.new_resolutions); + console.log(tracing.resolutions); - // Legacy tracings don't have the `tracing.new_resolutions` property + // Legacy tracings don't have the `tracing.resolutions` property // since they were created before WK started to maintain multiple resolution // in volume annotations. Therefore, this code falls back to mag (1, 1, 1) for // that case. - const tracingResolutions = tracing.new_resolutions - ? tracing.new_resolutions.map(({ x, y, z }) => [x, y, z]) + const tracingResolutions = tracing.resolutions + ? tracing.resolutions.map(({ x, y, z }) => [x, y, z]) : [[1, 1, 1]]; console.log(tracingResolutions); const targetResolutions = - fallbackLayer != null ? fallbackLayer.new_resolutions : getResolutions(dataset); + fallbackLayer != null ? fallbackLayer.resolutions : getResolutions(dataset); const resolutionsAreSubset = (resAs, resBs) => resAs.every(resA => resBs.some(resB => _.isEqual(resA, resB))); @@ -478,7 +478,7 @@ function setupLayerForVolumeTracing( resolutionsAreSubset(tracingResolutions, targetResolutions); if (!doResolutionsMatch) { - if (tracing.new_resolutions) { + if (tracing.resolutions) { Toast.warning( messages["tracing.volume_resolution_mismatch"], {}, @@ -496,7 +496,7 @@ function setupLayerForVolumeTracing( category: "segmentation", largestSegmentId: tracing.largestSegmentId, boundingBox: convertBoundariesToBoundingBox(boundaries), - new_resolutions: tracingResolutions, + resolutions: tracingResolutions, mappings: fallbackLayer != null && fallbackLayer.mappings != null ? fallbackLayer.mappings : [], // remember the name of the original layer, used to request mappings fallbackLayer: tracing.fallbackLayer, From 56a229de1f88e2026039ff7deba7759990379eac Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 25 Sep 2020 10:27:55 +0200 Subject: [PATCH 058/121] continue refactoring resolution handling to deal with sparse resolutions better (WIP/2) --> brush, paint-bucket and copy-segmentation tool seem to work --- .../model/accessors/dataset_accessor.js | 21 +++- .../oxalis/model/sagas/volumetracing_saga.js | 118 +++++++++++------- .../volume_annotation_sampling.js | 79 ++++++------ 3 files changed, 135 insertions(+), 83 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index adacb089554..1035608fc3e 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -80,6 +80,14 @@ export class ResolutionInfo { return this.getResolutionByPowerOf2(powerOfTwo); } + getResolutionByIndexOrThrow(index: number): Vector3 { + const resolution = this.getResolutionByIndex(index); + if (!resolution) { + throw new Error(`Resolution with in index {index} does not exist`); + } + return resolution; + } + getResolutionByIndexWithFallback(index: number): Vector3 { const resolutionMaybe = this.getResolutionByIndex(index); if (resolutionMaybe) { @@ -109,8 +117,17 @@ export class ResolutionInfo { const indices = this.getResolutionsWithIndices().map(entry => entry[0]); const indicesWithDistances = indices.map(_index => { - const distance = Math.abs(index - _index); - return [_index, distance]; + const distance = index - _index; + if (distance >= 0) { + // The candidate _index is smaller than the requested index. + // Since webKnossos only supports rendering from higher mags, + // when a mag is missing, we want to prioritize "higher" mags + // when looking for a substitute. Therefore, we artificially + // downrank the smaller mag _index. + return [_index, distance + 0.5]; + } else { + return [_index, Math.abs(distance)]; + } }); const bestIndexWithDistance = _.head(_.sortBy(indicesWithDistances, entry => entry[1])); diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index d0af8cd2da9..310ea416bb5 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -44,7 +44,10 @@ import { getRequestLogZoomStep, } from "oxalis/model/accessors/flycam_accessor"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; -import { getResolutions } from "oxalis/model/accessors/dataset_accessor"; +import { + getResolutionInfoOfSegmentationLayer, + ResolutionInfo, +} from "oxalis/model/accessors/dataset_accessor"; import Constants, { type BoundingBoxType, type ContourMode, @@ -243,7 +246,12 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga const segmentationLayer: DataLayer = yield* call([Model, Model.getSegmentationLayer]); const { cube } = segmentationLayer; const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); - const allResolutions = yield* select(state => getResolutions(state.dataset)); + const resolutionInfo = yield* select(state => + getResolutionInfoOfSegmentationLayer(state.dataset), + ); + const labeledZoomStep = resolutionInfo.getClosestExistingIndex(activeZoomStep); + console.log("labeledZoomStep", labeledZoomStep); + const dimensionIndices = Dimensions.getIndices(activeViewport); const position = Dimensions.roundCoordinate(yield* select(state => getPosition(state.flycam))); const [halfViewportExtentX, halfViewportExtentY] = yield* call( @@ -255,25 +263,22 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga const labeledVoxelMapOfCopiedVoxel: LabeledVoxelsMap = new Map(); function copyVoxelLabel(voxelTemplateAddress, voxelTargetAddress) { - const templateLabelValue = cube.getDataValue(voxelTemplateAddress); + const templateLabelValue = cube.getDataValue(voxelTemplateAddress, null, labeledZoomStep); // Only copy voxels from the previous layer which belong to the current cell if (templateLabelValue === activeCellId) { - const currentLabelValue = cube.getDataValue(voxelTargetAddress); + const currentLabelValue = cube.getDataValue(voxelTargetAddress, null, labeledZoomStep); // Do not overwrite already labelled voxels if (currentLabelValue === 0) { - // console.log( - // `labeling at ${voxelTargetAddress.toString()} with ${templateLabelValue} in zoomStep ${activeZoomStep}`, - // ); - cube.labelVoxelInResolution(voxelTargetAddress, templateLabelValue, activeZoomStep); + cube.labelVoxelInResolution(voxelTargetAddress, templateLabelValue, labeledZoomStep); const bucket = cube.getBucket( - cube.positionToZoomedAddress(voxelTargetAddress, activeZoomStep), + cube.positionToZoomedAddress(voxelTargetAddress, labeledZoomStep), ); if (bucket.type === "null") { return; } - const labeledVoxelInBucket = cube.getVoxelOffset(voxelTargetAddress, activeZoomStep); + const labeledVoxelInBucket = cube.getVoxelOffset(voxelTargetAddress, labeledZoomStep); const labelMapOfBucket = labeledVoxelMapOfCopiedVoxel.get(bucket.zoomedAddress) || new Uint8Array(Constants.BUCKET_WIDTH ** 2).fill(0); @@ -309,9 +314,9 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga } applyLabeledVoxelMapToAllMissingResolutions( labeledVoxelMapOfCopiedVoxel, - activeZoomStep, + labeledZoomStep, dimensionIndices, - allResolutions, + resolutionInfo, cube, activeCellId, z, @@ -336,13 +341,16 @@ export function* floodFill(): Saga { const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); const dimensionIndices = Dimensions.getIndices(planeId); const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); - // use getResolutionMapOfSegmentationLayer - const allResolutions = yield* select(state => getResolutions(state.dataset)); - const activeResolution = allResolutions[activeZoomStep]; + const resolutionInfo = yield* select(state => + getResolutionInfoOfSegmentationLayer(state.dataset), + ); + const labeledZoomStep = resolutionInfo.getClosestExistingIndex(activeZoomStep); + + const labeledResolution = resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); // The floodfill and applyVoxelMap methods of iterates within the bucket. // Thus thirdDimensionValue must also be within the initial bucket in the correct resolution. const thirdDimensionValue = - Math.floor(seedVoxel[dimensionIndices[2]] / activeResolution[dimensionIndices[2]]) % + Math.floor(seedVoxel[dimensionIndices[2]] / labeledResolution[dimensionIndices[2]]) % Constants.BUCKET_WIDTH; const get3DAddress = (voxel: Vector2) => { const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], thirdDimensionValue]; @@ -365,16 +373,16 @@ export function* floodFill(): Saga { get2DAddress, dimensionIndices, currentViewportBounding, - activeZoomStep, + labeledZoomStep, ); if (labeledVoxelMapFromFloodFill == null) { continue; } applyLabeledVoxelMapToAllMissingResolutions( labeledVoxelMapFromFloodFill, - activeZoomStep, + labeledZoomStep, dimensionIndices, - allResolutions, + resolutionInfo, cube, activeCellId, seedVoxel[dimensionIndices[2]], @@ -385,13 +393,17 @@ export function* floodFill(): Saga { } } -// TODO: (1) Iterate over all resolutions of the segmentation layer, not the resolutions of the color layers. -// To get all segmentation layer resolutions, use: getResolutionMapOfSegmentationLayer +function* pairwise(arr: Array): Generator<[T, T], *, *> { + for (let i = 0; i < arr.length - 1; i++) { + yield [arr[i], arr[i + 1]]; + } +} + function applyLabeledVoxelMapToAllMissingResolutions( labeledVoxelMapToApply: LabeledVoxelsMap, - activeZoomStep: number, + labeledZoomStep: number, dimensionIndices: DimensionMap, - allResolutions: Array, + resolutionInfo: ResolutionInfo, segmentationCube: DataCube, cellId: number, thirdDimensionOfSlice: number, @@ -408,28 +420,45 @@ function applyLabeledVoxelMapToAllMissingResolutions( ]; return orderedVoxelWithThirdDimension; }; - // debugger; - // First upscale the voxel map and apply it to all higher resolutions. - for (let zoomStep = activeZoomStep - 1; zoomStep >= 0; zoomStep--) { - const goalResolution = allResolutions[zoomStep]; - const sourceResolution = allResolutions[zoomStep + 1]; + + const labeledResolution = resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); + // Get all available resolutions and divide the list into two parts. + const allResolutionsWithIndices = resolutionInfo.getResolutionsWithIndices(); + // The pivotIndex is the index within allResolutionsWithIndices which refers to + // the labeled resolution. + const pivotIndex = allResolutionsWithIndices.findIndex( + ([index, resolution]) => index === labeledZoomStep, + ); + // `downsampleSequence` contains the current mag and all higher mags (to which + // should be downsampled) + const downsampleSequence = allResolutionsWithIndices.slice(pivotIndex); + // `upsampleSequence` contains the current mag and all lower mags (to which + // should be downsampled) + const upsampleSequence = allResolutionsWithIndices.slice(0, pivotIndex + 1).reverse(); + + // First upsample the voxel map and apply it to all better resolutions. + // sourceZoomStep will be higher than targetZoomStep + for (const [source, target] of pairwise(upsampleSequence)) { + const [sourceZoomStep, sourceResolution] = source; + const [targetZoomStep, targetResolution] = target; + currentLabeledVoxelMap = sampleVoxelMapToResolution( currentLabeledVoxelMap, segmentationCube, sourceResolution, - zoomStep + 1, - goalResolution, - zoomStep, + sourceZoomStep, + targetResolution, + targetZoomStep, dimensionIndices, thirdDimensionOfSlice, ); // Adjust thirdDimensionValue so get3DAddress returns the third dimension value - // in the goal resolution to apply the voxelMap correctly. + // in the target resolution to apply the voxelMap correctly. thirdDimensionValue = - Math.floor(thirdDimensionOfSlice / goalResolution[dimensionIndices[2]]) % + Math.floor(thirdDimensionOfSlice / targetResolution[dimensionIndices[2]]) % Constants.BUCKET_WIDTH; const numberOfSlices = Math.ceil( - allResolutions[activeZoomStep][dimensionIndices[2]] / goalResolution[dimensionIndices[2]], + labeledResolution[dimensionIndices[2]] / targetResolution[dimensionIndices[2]], ); applyVoxelMap( currentLabeledVoxelMap, @@ -442,24 +471,27 @@ function applyLabeledVoxelMapToAllMissingResolutions( ); } currentLabeledVoxelMap = labeledVoxelMapToApply; - // Next we downscale the annotation and apply it. - for (let zoomStep = activeZoomStep + 1; zoomStep < allResolutions.length; zoomStep++) { - const goalResolution = allResolutions[zoomStep]; - const sourceResolution = allResolutions[zoomStep - 1]; + + // Next we downsamplesample the annotation and apply it. + // sourceZoomStep will be lower than targetZoomStep + for (const [source, target] of pairwise(downsampleSequence)) { + const [sourceZoomStep, sourceResolution] = source; + const [targetZoomStep, targetResolution] = target; + currentLabeledVoxelMap = sampleVoxelMapToResolution( currentLabeledVoxelMap, segmentationCube, sourceResolution, - zoomStep - 1, - goalResolution, - zoomStep, + sourceZoomStep, + targetResolution, + targetZoomStep, dimensionIndices, thirdDimensionOfSlice, ); // Adjust thirdDimensionValue so get3DAddress returns the third dimension value - // in the goal resolution to apply the voxelMap correctly. + // in the target resolution to apply the voxelMap correctly. thirdDimensionValue = - Math.floor(thirdDimensionOfSlice / goalResolution[dimensionIndices[2]]) % + Math.floor(thirdDimensionOfSlice / targetResolution[dimensionIndices[2]]) % Constants.BUCKET_WIDTH; applyVoxelMap( currentLabeledVoxelMap, diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 7106da50343..8ab7e3bc106 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -11,8 +11,8 @@ function upsampleVoxelMap( dataCube: DataCube, sourceResolution: Vector3, sourceZoomStep: number, - goalResolution: Vector3, - goalZoomStep: number, + targetResolution: Vector3, + targetZoomStep: number, dimensionIndices: DimensionMap, thirdDimensionVoxelValue: number, ): LabeledVoxelsMap { @@ -20,22 +20,22 @@ function upsampleVoxelMap( // iterating over the buckets in the higher resolution that are covered by the bucket. // For each covered bucket all labeled voxel entries are upsampled with a kernel an marked in an array for the covered bucket. // Therefore all covered buckets with their marked array build the upsampled version of the given LabeledVoxelsMap. - if (sourceZoomStep <= goalZoomStep) { + if (sourceZoomStep <= targetZoomStep) { throw new Error("Trying to upsample a LabeledVoxelMap with the down sample function."); } - const labeledVoxelMapInGoalResolution: LabeledVoxelsMap = new Map(); - const scaleToSource = map3((val, index) => val / sourceResolution[index], goalResolution); + const labeledVoxelMapInTargetResolution: LabeledVoxelsMap = new Map(); + const scaleToSource = map3((val, index) => val / sourceResolution[index], targetResolution); // This array serves multiple purposes. It has a name / variable for each purpose. - const scaleToGoal = map3((val, index) => val / goalResolution[index], sourceResolution); + const scaleToGoal = map3((val, index) => val / targetResolution[index], sourceResolution); const numberOfBucketWithinSourceBucket = scaleToGoal; - const singleVoxelBoundsInGoalResolution = scaleToGoal; + const singleVoxelBoundsInTargetResolution = scaleToGoal; const boundsOfGoalBucketWithinSourceBucket = map3( value => Math.ceil(value * constants.BUCKET_WIDTH), scaleToSource, ); // This is the buckets zoomed address part of the third dimension. const thirdDimensionBucketValue = Math.floor( - thirdDimensionVoxelValue / goalResolution[dimensionIndices[2]] / constants.BUCKET_WIDTH, + thirdDimensionVoxelValue / targetResolution[dimensionIndices[2]] / constants.BUCKET_WIDTH, ); for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { const labeledBucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); @@ -65,13 +65,13 @@ function upsampleVoxelMap( let annotatedAtleastOneVoxel = false; const currentGoalBucket = dataCube.getOrCreateBucket([ ...currentGoalBucketAddress, - goalZoomStep, + targetZoomStep, ]); if (currentGoalBucket.type === "null") { console.warn( messages["sampling.could_not_get_or_create_bucket"]([ ...currentGoalBucketAddress, - goalZoomStep, + targetZoomStep, ]), ); continue; @@ -99,25 +99,25 @@ function upsampleVoxelMap( secondDimVoxelOffset ] === 1 ) { - const kernelTopLeftVoxelInGoalResolution = [ - kernelLeft * singleVoxelBoundsInGoalResolution[dimensionIndices[0]], - kernelTop * singleVoxelBoundsInGoalResolution[dimensionIndices[1]], + const kernelTopLeftVoxelInTargetResolution = [ + kernelLeft * singleVoxelBoundsInTargetResolution[dimensionIndices[0]], + kernelTop * singleVoxelBoundsInTargetResolution[dimensionIndices[1]], ]; // The labeled voxel is upscaled using a kernel. for ( let firstKernelOffset = 0; - firstKernelOffset < singleVoxelBoundsInGoalResolution[dimensionIndices[0]]; + firstKernelOffset < singleVoxelBoundsInTargetResolution[dimensionIndices[0]]; firstKernelOffset++ ) { for ( let secondKernelOffset = 0; - secondKernelOffset < singleVoxelBoundsInGoalResolution[dimensionIndices[1]]; + secondKernelOffset < singleVoxelBoundsInTargetResolution[dimensionIndices[1]]; secondKernelOffset++ ) { currentGoalVoxelMap[ - (kernelTopLeftVoxelInGoalResolution[0] + firstKernelOffset) * + (kernelTopLeftVoxelInTargetResolution[0] + firstKernelOffset) * constants.BUCKET_WIDTH + - kernelTopLeftVoxelInGoalResolution[1] + + kernelTopLeftVoxelInTargetResolution[1] + secondKernelOffset ] = 1; } @@ -127,12 +127,15 @@ function upsampleVoxelMap( } } if (annotatedAtleastOneVoxel) { - labeledVoxelMapInGoalResolution.set(currentGoalBucket.zoomedAddress, currentGoalVoxelMap); + labeledVoxelMapInTargetResolution.set( + currentGoalBucket.zoomedAddress, + currentGoalVoxelMap, + ); } } } } - return labeledVoxelMapInGoalResolution; + return labeledVoxelMapInTargetResolution; } function downsampleVoxelMap( @@ -140,20 +143,20 @@ function downsampleVoxelMap( dataCube: DataCube, sourceResolution: Vector3, sourceZoomStep: number, - goalResolution: Vector3, - goalZoomStep: number, + targetResolution: Vector3, + targetZoomStep: number, dimensionIndices: DimensionMap, ): LabeledVoxelsMap { // This method downsamples a LabeledVoxelsMap. For each bucket of the LabeledVoxelsMap // the matching bucket the lower resolution is determined and all the labeledVoxels // are downsampled to the lower resolution bucket. The downsampling uses a kernel to skip // checking whether to label a downsampled voxels if already one labeled voxel matching the downsampled voxel is found. - if (goalZoomStep <= sourceZoomStep) { + if (targetZoomStep <= sourceZoomStep) { throw new Error("Trying to upsample a LabeledVoxelMap with the down sample function."); } - const labeledVoxelMapInGoalResolution: LabeledVoxelsMap = new Map(); - const scaleToSource = map3((val, index) => val / sourceResolution[index], goalResolution); - const scaleToGoal = map3((val, index) => val / goalResolution[index], sourceResolution); + const labeledVoxelMapInTargetResolution: LabeledVoxelsMap = new Map(); + const scaleToSource = map3((val, index) => val / sourceResolution[index], targetResolution); + const scaleToGoal = map3((val, index) => val / targetResolution[index], sourceResolution); for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { const labeledBucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); if (labeledBucket.type === "null") { @@ -164,10 +167,10 @@ function downsampleVoxelMap( (value, index) => Math.floor(value * scaleToGoal[index]), labeledBucket.getAddress(), ); - const goalBucket = dataCube.getOrCreateBucket([...goalBucketAddress, goalZoomStep]); + const goalBucket = dataCube.getOrCreateBucket([...goalBucketAddress, targetZoomStep]); if (goalBucket.type === "null") { console.warn( - messages["sampling.could_not_get_or_create_bucket"]([...goalBucketAddress, goalZoomStep]), + messages["sampling.could_not_get_or_create_bucket"]([...goalBucketAddress, targetZoomStep]), ); continue; } @@ -186,7 +189,7 @@ function downsampleVoxelMap( bucketOffset, ); const goalVoxelMap = - labeledVoxelMapInGoalResolution.get(goalBucket.zoomedAddress) || + labeledVoxelMapInTargetResolution.get(goalBucket.zoomedAddress) || new Uint8Array(constants.BUCKET_WIDTH ** 2).fill(0); // Iterate over the voxelMap in the goal resolution and search in each voxel for a labeled voxel (kernel-wise iteration). const kernelSize = map3(scaleValue => Math.ceil(scaleValue), scaleToSource); @@ -231,9 +234,9 @@ function downsampleVoxelMap( } } } - labeledVoxelMapInGoalResolution.set(goalBucket.zoomedAddress, goalVoxelMap); + labeledVoxelMapInTargetResolution.set(goalBucket.zoomedAddress, goalVoxelMap); } - return labeledVoxelMapInGoalResolution; + return labeledVoxelMapInTargetResolution; } export default function sampleVoxelMapToResolution( @@ -241,29 +244,29 @@ export default function sampleVoxelMapToResolution( dataCube: DataCube, sourceResolution: Vector3, sourceZoomStep: number, - goalResolution: Vector3, - goalZoomStep: number, + targetResolution: Vector3, + targetZoomStep: number, dimensionIndices: DimensionMap, thirdDimensionVoxelValue: number, ): LabeledVoxelsMap { - if (sourceZoomStep < goalZoomStep) { + if (sourceZoomStep < targetZoomStep) { return downsampleVoxelMap( labeledVoxelMap, dataCube, sourceResolution, sourceZoomStep, - goalResolution, - goalZoomStep, + targetResolution, + targetZoomStep, dimensionIndices, ); - } else if (goalZoomStep < sourceZoomStep) { + } else if (targetZoomStep < sourceZoomStep) { return upsampleVoxelMap( labeledVoxelMap, dataCube, sourceResolution, sourceZoomStep, - goalResolution, - goalZoomStep, + targetResolution, + targetZoomStep, dimensionIndices, thirdDimensionVoxelValue, ); From dfdd94429bd08b09f2e59b5a5ace048e100be2ef Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 25 Sep 2020 11:12:45 +0200 Subject: [PATCH 059/121] fix picking segment color when being in a not-existing segmentation mag --- .../combinations/volumetracing_plane_controller.js | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/controller/combinations/volumetracing_plane_controller.js b/frontend/javascripts/oxalis/controller/combinations/volumetracing_plane_controller.js index e3334607dd4..d2d9f1eee0a 100644 --- a/frontend/javascripts/oxalis/controller/combinations/volumetracing_plane_controller.js +++ b/frontend/javascripts/oxalis/controller/combinations/volumetracing_plane_controller.js @@ -28,6 +28,7 @@ import { setActiveCellAction, } from "oxalis/model/actions/volumetracing_actions"; import { getPosition, getRequestLogZoomStep } from "oxalis/model/accessors/flycam_accessor"; +import { getResolutionInfoOfSegmentationLayer } from "oxalis/model/accessors/dataset_accessor"; import { getVolumeTool, getContourTracingMode, @@ -163,9 +164,14 @@ export function getPlaneMouseControls(_planeId: OrthoView): * { if (!segmentation) { return; } + const storeState = Store.getState(); + const logZoomStep = getRequestLogZoomStep(storeState); + const resolutionInfo = getResolutionInfoOfSegmentationLayer(storeState.dataset); + const existingZoomStep = resolutionInfo.getClosestExistingIndex(logZoomStep); + const cellId = segmentation.cube.getMappedDataValue( calculateGlobalPos(pos), - getRequestLogZoomStep(Store.getState()), + existingZoomStep, ); if (cellId > 0) { Store.dispatch(setActiveCellAction(cellId)); From 0f6699ac20a8b7f502ca251ac05133014f34cdfe Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 25 Sep 2020 15:08:15 +0200 Subject: [PATCH 060/121] ensure that prefetch saga, bucket picker and pull queue don't try to load buckets for not existing mags --- .../oxalis/model/accessors/dataset_accessor.js | 1 + .../oxalis/model/bucket_data_handling/data_cube.js | 2 +- .../bucket_data_handling/layer_rendering_manager.js | 10 ++++++++-- .../prefetch_strategy_arbitrary.js | 5 ++++- .../bucket_data_handling/prefetch_strategy_plane.js | 4 +++- .../javascripts/oxalis/model/sagas/prefetch_saga.js | 13 +++++++++++-- 6 files changed, 28 insertions(+), 7 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 1035608fc3e..0099320cb72 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -110,6 +110,7 @@ export class ResolutionInfo { return _.max(Array.from(this.resolutionMap.keys())); } + // todo: consider to only go upwards instead of both directions. getClosestExistingIndex(index: number): number { if (this.hasIndex(index)) { return index; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 124661b7755..7fbac927065 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -202,7 +202,7 @@ class DataCube { } isWithinBounds([x, y, z, zoomStep]: Vector4): boolean { - if (zoomStep >= this.MAX_ZOOM_STEP + 1) { + if (this.cubes[zoomStep] == null) { return false; } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js index 15aba4c03f1..e7f87dcdbbe 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js @@ -19,6 +19,8 @@ import { getByteCount, getElementClass, isLayerVisible, + getLayerByName, + getResolutionInfo, } from "oxalis/model/accessors/dataset_accessor"; import AsyncBucketPickerWorker from "oxalis/workers/async_bucket_picker.worker"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; @@ -163,7 +165,11 @@ export default class LayerRenderingManager { const { dataset, datasetConfiguration } = state; const isAnchorPointNew = this.maybeUpdateAnchorPoint(position, logZoomStep); - if (logZoomStep > this.cube.MAX_ZOOM_STEP) { + const layer = getLayerByName(dataset, this.name); + const resolutionInfo = getResolutionInfo(layer.resolutions); + const maximumResolutionIndex = resolutionInfo.getHighestResolutionIndex(); + + if (logZoomStep > maximumResolutionIndex) { // Don't render anything if the zoomStep is too high this.textureBucketManager.setActiveBuckets([], this.cachedAnchorPoint, isAnchorPointNew); return this.cachedAnchorPoint; @@ -244,7 +250,7 @@ export default class LayerRenderingManager { // In general, pull buckets which are not available but should be sent to the GPU const missingBuckets = bucketsWithPriorities .filter(({ bucket }) => !bucket.hasData()) - .filter(({ bucket }) => bucket.zoomedAddress[3] <= this.cube.MAX_ZOOM_STEP) + .filter(({ bucket }) => resolutionInfo.hasIndex(bucket.zoomedAddress[3])) .map(({ bucket, priority }) => ({ bucket: bucket.zoomedAddress, priority })); this.pullQueue.addAll(missingBuckets); diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.js b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.js index 41a762f8e87..ac01845c9dc 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.js @@ -6,6 +6,7 @@ import { M4x4, type Matrix4x4, V3 } from "libs/mjs"; import type { PullQueueItem } from "oxalis/model/bucket_data_handling/pullqueue"; import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; import PolyhedronRasterizer from "oxalis/model/bucket_data_handling/polyhedron_rasterizer"; +import { ResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; export class PrefetchStrategyArbitrary extends AbstractPrefetchStrategy { velocityRangeStart = 0; @@ -54,11 +55,13 @@ export class PrefetchStrategyArbitrary extends AbstractPrefetchStrategy { prefetch( matrix: Matrix4x4, - zoomStep: number, + activeZoomStep: number, position: Vector3, resolutions: Array, + resolutionInfo: ResolutionInfo, ): Array { const pullQueue = []; + const zoomStep = resolutionInfo.getClosestExistingIndex(activeZoomStep); const matrix0 = M4x4.clone(matrix); this.modifyMatrixForPoly(matrix0, zoomStep); diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js index 888ba65cdad..ac9e2965c91 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js @@ -3,6 +3,7 @@ import _ from "lodash"; import type { Area } from "oxalis/model/accessors/flycam_accessor"; +import { ResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; import type { PullQueueItem } from "oxalis/model/bucket_data_handling/pullqueue"; import { zoomedAddressToAnotherZoomStep } from "oxalis/model/helpers/position_converter"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; @@ -83,8 +84,9 @@ export class PrefetchStrategy extends AbstractPrefetchStrategy { activePlane: OrthoView, areas: OrthoViewMap, resolutions: Vector3[], + resolutionInfo: ResolutionInfo, ): Array { - const zoomStep = Math.min(currentZoomStep, cube.MAX_ZOOM_STEP); + const zoomStep = resolutionInfo.getClosestExistingIndex(currentZoomStep); const zoomStepDiff = currentZoomStep - zoomStep; const queueItemsForCurrentZoomStep = this.prefetchImpl( diff --git a/frontend/javascripts/oxalis/model/sagas/prefetch_saga.js b/frontend/javascripts/oxalis/model/sagas/prefetch_saga.js index 3b654dfc982..7e3331d5ef4 100644 --- a/frontend/javascripts/oxalis/model/sagas/prefetch_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/prefetch_saga.js @@ -13,7 +13,11 @@ import { getRequestLogZoomStep, getAreasFromState, } from "oxalis/model/accessors/flycam_accessor"; -import { getResolutions, isLayerVisible } from "oxalis/model/accessors/dataset_accessor"; +import { + getResolutions, + isLayerVisible, + getResolutionInfo, +} from "oxalis/model/accessors/dataset_accessor"; import DataLayer from "oxalis/model/data_layer"; import Model from "oxalis/model"; import constants, { type Vector3 } from "oxalis/constants"; @@ -96,6 +100,8 @@ function getTracingTypes(state: OxalisState) { export function* prefetchForPlaneMode(layer: DataLayer, previousProperties: Object): Saga { const position = yield* select(state => getPosition(state.flycam)); const zoomStep = yield* select(state => getRequestLogZoomStep(state)); + const resolutionInfo = getResolutionInfo(layer.resolutions); + const activePlane = yield* select(state => state.viewModeData.plane.activeViewport); const tracingTypes = yield* select(getTracingTypes); const { lastPosition, lastDirection, lastZoomStep, lastBucketPickerTick } = previousProperties; @@ -126,6 +132,7 @@ export function* prefetchForPlaneMode(layer: DataLayer, previousProperties: Obje activePlane, areas, resolutions, + resolutionInfo, ); if (bucketDebuggingFlags.visualizePrefetchedBuckets) { for (const item of buckets) { @@ -156,6 +163,8 @@ export function* prefetchForArbitraryMode( const matrix = yield* select(state => state.flycam.currentMatrix); const zoomStep = yield* select(state => getRequestLogZoomStep(state)); const tracingTypes = yield* select(getTracingTypes); + const resolutionInfo = getResolutionInfo(layer.resolutions); + const resolutions = yield* select(state => getResolutions(state.dataset)); const layerRenderingManager = yield* call( [Model, Model.getLayerRenderingManagerByName], @@ -175,7 +184,7 @@ export function* prefetchForArbitraryMode( strategy.inVelocityRange(connectionInfo.bandwidth) && strategy.inRoundTripTimeRange(connectionInfo.roundTripTime) ) { - const buckets = strategy.prefetch(matrix, zoomStep, position, resolutions); + const buckets = strategy.prefetch(matrix, zoomStep, position, resolutions, resolutionInfo); if (bucketDebuggingFlags.visualizePrefetchedBuckets) { for (const item of buckets) { const bucket = cube.getOrCreateBucket(item.bucket); From 3bccd0e647fedf26d2abba9cc3faf5ee50c74b28 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 25 Sep 2020 16:05:15 +0200 Subject: [PATCH 061/121] ensure legacy getResolutions method provides dense resolution set; clean up --- .../materials/plane_material_factory.js | 2 +- .../model/accessors/dataset_accessor.js | 34 +++++++++- .../model/bucket_data_handling/data_cube.js | 31 +++------- .../layer_rendering_manager.js | 10 ++- .../prefetch_strategy_plane.js | 3 +- .../oxalis/model/sagas/volumetracing_saga.js | 5 +- .../oxalis/model_initialization.js | 62 ++++--------------- .../view/settings/dataset_settings_view.js | 8 +-- .../volume_annotation_sampling.spec.js | 2 - 9 files changed, 68 insertions(+), 89 deletions(-) diff --git a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js index 15b748d114c..3d24de4da50 100644 --- a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js +++ b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js @@ -212,7 +212,7 @@ class PlaneMaterialFactory { const layerName = sanitizeName(dataLayer.name); this.uniforms[`${layerName}_maxZoomStep`] = { type: "f", - value: dataLayer.cube.MAX_ZOOM_STEP, + value: dataLayer.cube.resolutionInfo.getHighestResolutionIndex(), }; this.uniforms[`${layerName}_alpha`] = { type: "f", diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 0099320cb72..66c31414113 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -83,7 +83,7 @@ export class ResolutionInfo { getResolutionByIndexOrThrow(index: number): Vector3 { const resolution = this.getResolutionByIndex(index); if (!resolution) { - throw new Error(`Resolution with in index {index} does not exist`); + throw new Error(`Resolution with in index ${index} does not exist`); } return resolution; } @@ -152,13 +152,43 @@ export function getMostExtensiveResolutions(dataset: APIDataset): Array .valueOf(); } +export function convertToDenseResolution( + resolutions: Array, + fallbackDenseResolutions?: Array, +): Array { + // Each resolution entry can be characterized by it's greatest resolution dimension. + // E.g., the resolution array [[1, 1, 1], [2, 2, 1], [4, 4, 2]] defines that + // a log zoomstep of 2 corresponds to the resolution [2, 2, 1] (and not [4, 4, 2]). + // Therefore, the largest dim for each resolution has to be unique across all resolutions. + + // This function returns an array of resolutions, for which each index will + // hold a resolution with highest_dim === 2**index. + + if (resolutions.length !== _.uniqBy(resolutions.map(_.max)).length) { + throw new Error("Max dimension in resolutions is not unique."); + } + const paddedResolutionCount = 1 + Math.log2(_.max(resolutions.map(v => _.max(v)))); + const resolutionsLookUp = _.keyBy(resolutions, _.max); + const fallbackResolutionsLookUp = _.keyBy(fallbackDenseResolutions || [], _.max); + + return _.range(0, paddedResolutionCount).map(exp => { + const resPower = 2 ** exp; + // If the resolution does not exist, use either the given fallback resolution or an isotropic fallback + const fallback = fallbackResolutionsLookUp[resPower] || [resPower, resPower, resPower]; + return resolutionsLookUp[resPower] || fallback; + }); +} + function _getResolutions(dataset: APIDataset): Vector3[] { // Different layers can have different resolutions. At the moment, // unequal resolutions will result in undefined behavior. // However, if resolutions are subset of each other, everything should be fine. // For that case, returning the longest resolutions array should suffice - const mostExtensiveResolutions = getMostExtensiveResolutions(dataset); + // In the long term, getResolutions should not be used anymore. + // Instead, all the code should use the ResolutionInfo class which represents + // exactly which resolutions exist. + const mostExtensiveResolutions = convertToDenseResolution(getMostExtensiveResolutions(dataset)); if (!mostExtensiveResolutions) { return []; } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 7fbac927065..86ac29c6ac9 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -15,11 +15,7 @@ import { type BucketDataArray, } from "oxalis/model/bucket_data_handling/bucket"; import { type VoxelIterator, VoxelNeighborStack2D } from "oxalis/model/volumetracing/volumelayer"; -import { - getResolutions, - getResolutionInfoOfSegmentationLayer, - ResolutionInfo, -} from "oxalis/model/accessors/dataset_accessor"; +import { getResolutions, ResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; @@ -56,7 +52,6 @@ class DataCube { buckets: Array; bucketIterator: number = 0; bucketCount: number = 0; - MAX_ZOOM_STEP: number; cubes: Array; boundingBox: BoundingBox; pullQueue: PullQueue; @@ -68,6 +63,7 @@ class DataCube { trigger: Function; on: Function; off: Function; + resolutionInfo: ResolutionInfo; // The cube stores the buckets in a separate array for each zoomStep. For each // zoomStep the cube-array contains the boundaries and an array holding the buckets. @@ -92,8 +88,7 @@ class DataCube { this.upperBoundary = upperBoundary; this.elementClass = elementClass; this.isSegmentation = isSegmentation; - - this.MAX_ZOOM_STEP = resolutionInfo.getHighestResolutionIndex(); + this.resolutionInfo = resolutionInfo; _.extend(this, BackboneEvents); @@ -108,16 +103,6 @@ class DataCube { ]; this.arbitraryCube = new ArbitraryCubeAdapter(this, _.clone(cubeBoundary)); - const { dataset } = Store.getState(); - - // let resolutionInfo; - // if (isSegmentation) { - // // TODO (1): check this could simply use the resolution info of the layer. - // // Then, the cube map would be sparse, though. Is this a problem? - // resolutionInfo = getResolutionInfoOfSegmentationLayer(dataset); - // } else { - // resolutionInfo = getDatasetResolutionInfo(dataset); - // } for (const [resolutionIndex, resolution] of resolutionInfo.getResolutionsWithIndices()) { const zoomedCubeBoundary = [ @@ -350,8 +335,8 @@ class DataCube { // -> Instead of using a voxel iterator, create a LabeledVoxelsMap for the brush stroke / trace tool. // If this LabeledVoxelsMap exists, the up and downsampling methods can easily be used // to apply the annotation to all needed resolutions, without labeling voxels multiple times. - const resolutionInfo = getResolutionInfoOfSegmentationLayer(Store.getState().dataset); - for (const [resolutionIndex, _resolution] of resolutionInfo.getResolutionsWithIndices()) { + + for (const [resolutionIndex] of this.resolutionInfo.getResolutionsWithIndices()) { while (iterator.hasNext) { const voxel = iterator.getNext(); this.labelVoxelInResolution(voxel, label, resolutionIndex, activeCellId); @@ -368,8 +353,8 @@ class DataCube { // -> Instead of using a voxel iterator, create a LabeledVoxelsMap for the brush stroke / trace tool. // If this LabeledVoxelsMap exists, the up and downsampling methods can easily be used // to apply the annotation to all needed resolutions, without labeling voxels multiple times. - const resolutionInfo = getResolutionInfoOfSegmentationLayer(Store.getState().dataset); - for (const [resolutionIndex, _resolution] of resolutionInfo.getResolutionsWithIndices()) { + + for (const [resolutionIndex] of this.resolutionInfo.getResolutionsWithIndices()) { this.labelVoxelInResolution(voxel, label, resolutionIndex, activeCellId); } @@ -575,7 +560,7 @@ class DataCube { getVoxelOffset(voxel: Vector3, zoomStep: number = 0): Vector3 { // No `map` for performance reasons const voxelOffset = [0, 0, 0]; - const resolution = getResolutions(Store.getState().dataset)[zoomStep]; + const resolution = this.resolutionInfo.getResolutionByIndexWithFallback(zoomStep); for (let i = 0; i < 3; i++) { voxelOffset[i] = Math.floor(voxel[i] / resolution[i]) % constants.BUCKET_WIDTH; } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js index e7f87dcdbbe..eab83d43ae6 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js @@ -176,7 +176,10 @@ export default class LayerRenderingManager { } const resolutions = getResolutions(dataset); - const subBucketLocality = getSubBucketLocality(position, resolutions[logZoomStep]); + const subBucketLocality = getSubBucketLocality( + position, + resolutionInfo.getResolutionByIndexWithFallback(logZoomStep), + ); const areas = getAreasFromState(state); const matrix = getZoomedMatrix(state.flycam); @@ -269,8 +272,9 @@ export default class LayerRenderingManager { maybeUpdateAnchorPoint(position: Vector3, logZoomStep: number): boolean { const state = Store.getState(); - const resolutions = getResolutions(state.dataset); - const resolution = resolutions[logZoomStep]; + const layer = getLayerByName(state.dataset, this.name); + const resolutionInfo = getResolutionInfo(layer.resolutions); + const resolution = resolutionInfo.getResolutionByIndexWithFallback(logZoomStep); const addressSpaceDimensions = getAddressSpaceDimensions( state.temporaryConfiguration.gpuSetup.initializedGpuFactor, ); diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js index ac9e2965c91..52324483bf7 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js @@ -87,6 +87,7 @@ export class PrefetchStrategy extends AbstractPrefetchStrategy { resolutionInfo: ResolutionInfo, ): Array { const zoomStep = resolutionInfo.getClosestExistingIndex(currentZoomStep); + const maxZoomStep = resolutionInfo.getHighestResolutionIndex(); const zoomStepDiff = currentZoomStep - zoomStep; const queueItemsForCurrentZoomStep = this.prefetchImpl( @@ -102,7 +103,7 @@ export class PrefetchStrategy extends AbstractPrefetchStrategy { ); let queueItemsForFallbackZoomStep = []; - const fallbackZoomStep = Math.min(cube.MAX_ZOOM_STEP, currentZoomStep + 1); + const fallbackZoomStep = Math.min(maxZoomStep, currentZoomStep + 1); if (fallbackZoomStep > zoomStep) { queueItemsForFallbackZoomStep = this.prefetchImpl( cube, diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 310ea416bb5..d5eb8b18737 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -63,7 +63,6 @@ import Dimensions, { type DimensionMap } from "oxalis/model/dimensions"; import Model from "oxalis/model"; import Toast from "libs/toast"; import VolumeLayer from "oxalis/model/volumetracing/volumelayer"; -import api from "oxalis/api/internal_api"; import inferSegmentInViewport, { getHalfViewportExtents, } from "oxalis/model/sagas/automatic_brush_saga"; @@ -426,9 +425,7 @@ function applyLabeledVoxelMapToAllMissingResolutions( const allResolutionsWithIndices = resolutionInfo.getResolutionsWithIndices(); // The pivotIndex is the index within allResolutionsWithIndices which refers to // the labeled resolution. - const pivotIndex = allResolutionsWithIndices.findIndex( - ([index, resolution]) => index === labeledZoomStep, - ); + const pivotIndex = allResolutionsWithIndices.findIndex(([index]) => index === labeledZoomStep); // `downsampleSequence` contains the current mag and all higher mags (to which // should be downsampled) const downsampleSequence = allResolutionsWithIndices.slice(pivotIndex); diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index 0e97fe1b084..90dc3704816 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -26,6 +26,7 @@ import { getSegmentationLayer, isElementClassSupported, getResolutions, + convertToDenseResolution, } from "oxalis/model/accessors/dataset_accessor"; import { getSomeServerTracing } from "oxalis/model/accessors/tracing_accessor"; import { @@ -65,7 +66,7 @@ import Store, { type TraceOrViewCommand, type AnnotationType } from "oxalis/stor import Toast from "libs/toast"; import UrlManager, { type UrlManagerState } from "oxalis/controller/url_manager"; import * as Utils from "libs/utils"; -import constants, { ControlModeEnum, type Vector3 } from "oxalis/constants"; +import constants, { ControlModeEnum } from "oxalis/constants"; import messages from "messages"; import window from "libs/window"; @@ -320,58 +321,21 @@ function initializeDataset( dataset.dataSource.dataLayers = newDataLayers; }); - // ensureDenseLayerResolutions(dataset); - // ensureMatchingLayerResolutions(dataset); + ensureMatchingLayerResolutions(dataset); Store.dispatch(setDatasetAction(dataset)); } -// export function ensureDenseLayerResolutions(dataset: APIDataset) { -// const mostExtensiveResolutions = convertToDenseResolution(getMostExtensiveResolutions(dataset)); -// for (const layer of dataset.dataSource.dataLayers) { -// // For segmentation layer -// if (layer.category === "color") { -// layer.resolutions = convertToDenseResolution(layer.resolutions, mostExtensiveResolutions); -// } -// } -// } - -// TODO: (1) restore ensureMatchingLayerResolutions -// export function ensureMatchingLayerResolutions(dataset: APIDataset): void { -// const mostExtensiveResolutions = getMostExtensiveResolutions(dataset); -// for (const layer of dataset.dataSource.dataLayers) { -// for (const resolution of layer.resolutions) { -// if (mostExtensiveResolutions.find(element => _.isEqual(resolution, element)) == null) { -// Toast.error(messages["dataset.resolution_mismatch"], { sticky: true }); -// } -// } -// } -// } - -export function convertToDenseResolution( - resolutions: Array, - fallbackDenseResolutions?: Array, -): Array { - // Each resolution entry can be characterized by it's greatest resolution dimension. - // E.g., the resolution array [[1, 1, 1], [2, 2, 1], [4, 4, 2]] defines that - // a log zoomstep of 2 corresponds to the resolution [2, 2, 1] (and not [4, 4, 2]). - // Therefore, the largest dim for each resolution has to be unique across all resolutions. - - // This function returns an array of resolutions, for which each index will - // hold a resolution with highest_dim === 2**index. - - if (resolutions.length !== _.uniqBy(resolutions.map(_.max)).length) { - throw new Error("Max dimension in resolutions is not unique."); +export function ensureMatchingLayerResolutions(dataset: APIDataset): void { + const mostExtensiveResolutions = convertToDenseResolution(getMostExtensiveResolutions(dataset)); + + for (const layer of dataset.dataSource.dataLayers) { + const denseResolutions = convertToDenseResolution(layer.resolutions, mostExtensiveResolutions); + for (const resolution of denseResolutions) { + if (mostExtensiveResolutions.find(element => _.isEqual(resolution, element)) == null) { + Toast.error(messages["dataset.resolution_mismatch"], { sticky: true }); + } + } } - const paddedResolutionCount = 1 + Math.log2(_.max(resolutions.map(v => _.max(v)))); - const resolutionsLookUp = _.keyBy(resolutions, _.max); - const fallbackResolutionsLookUp = _.keyBy(fallbackDenseResolutions || [], _.max); - - return _.range(0, paddedResolutionCount).map(exp => { - const resPower = 2 ** exp; - // If the resolution does not exist, use either the given fallback resolution or an isotropic fallback - const fallback = fallbackResolutionsLookUp[resPower] || [resPower, resPower, resPower]; - return resolutionsLookUp[resPower] || fallback; - }); } function initializeSettings(initialUserSettings: Object, initialDatasetSettings: Object): void { diff --git a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js index 82ccaf69daa..5268420fe19 100644 --- a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js +++ b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js @@ -141,12 +141,12 @@ class DatasetSettings extends React.PureComponent { ); }; - getDeleteButton = (layerName: string) => ( + getDeleteButton = () => ( { - this.removeFallbackLayer(layerName); + this.removeFallbackLayer(); }} style={{ position: "absolute", @@ -158,7 +158,7 @@ class DatasetSettings extends React.PureComponent { ); - removeFallbackLayer = (layerName: string) => { + removeFallbackLayer = () => { Modal.confirm({ title: messages["tracing.confirm_remove_fallback_layer.title"], content: ( @@ -297,7 +297,7 @@ class DatasetSettings extends React.PureComponent { {hasHistogram ? this.getEditMinMaxButton(layerName, isInEditMode) : null} {this.getFindDataButton(layerName, isDisabled, isColorLayer)} {this.getReloadDataButton(layerName)} - {isFallbackLayer ? this.getDeleteButton(layerName) : null} + {isFallbackLayer ? this.getDeleteButton() : null} ); diff --git a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js index 59d7ef714e4..1b594be474f 100644 --- a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js +++ b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js @@ -2,8 +2,6 @@ * cube.spec.js * @flow */ -import _ from "lodash"; - import { tracing as skeletontracingServerObject } from "test/fixtures/skeletontracing_server_objects"; import sampleVoxelMapToResolution, { applyVoxelMap, From f5c6160b802578bd35fd049513690114c297d670 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 25 Sep 2020 16:30:57 +0200 Subject: [PATCH 062/121] fix initialization and volume annotation sampling spec --- .../oxalis/model/accessors/dataset_accessor.js | 4 ++-- frontend/javascripts/test/model/model.spec.js | 12 ------------ .../volumetracing/volume_annotation_sampling.spec.js | 12 ++++++------ 3 files changed, 8 insertions(+), 20 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 66c31414113..9263f17b476 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -188,12 +188,12 @@ function _getResolutions(dataset: APIDataset): Vector3[] { // In the long term, getResolutions should not be used anymore. // Instead, all the code should use the ResolutionInfo class which represents // exactly which resolutions exist. - const mostExtensiveResolutions = convertToDenseResolution(getMostExtensiveResolutions(dataset)); + const mostExtensiveResolutions = getMostExtensiveResolutions(dataset); if (!mostExtensiveResolutions) { return []; } - return mostExtensiveResolutions; + return convertToDenseResolution(mostExtensiveResolutions); } // _getResolutions itself is not very performance intensive, but other functions which rely diff --git a/frontend/javascripts/test/model/model.spec.js b/frontend/javascripts/test/model/model.spec.js index b1a382d4ff6..0aa85cb271d 100644 --- a/frontend/javascripts/test/model/model.spec.js +++ b/frontend/javascripts/test/model/model.spec.js @@ -73,18 +73,6 @@ test.beforeEach(t => { User.prototype.fetch.returns(Promise.resolve()); }); -// TODO: fix for Store-based model -// describe("Successful initialization", () => { -// it("should resolve", (done) => { -// model.fetch() -// .then(done) -// .catch((error) => { -// fail(error); -// done(); -// }); -// }); -// }); - test("Model Initialization: should throw a model.HANDLED_ERROR for missing data layers", t => { t.plan(1); const { model } = t.context; diff --git a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js index 1b594be474f..509bfa8c431 100644 --- a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js +++ b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js @@ -1,7 +1,5 @@ -/* - * cube.spec.js - * @flow - */ +// @flow +import { ResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; import { tracing as skeletontracingServerObject } from "test/fixtures/skeletontracing_server_objects"; import sampleVoxelMapToResolution, { applyVoxelMap, @@ -44,7 +42,9 @@ test.beforeEach(t => { const mockedLayer = { resolutions: [[1, 1, 1], [2, 2, 2], [4, 4, 4], [8, 8, 8], [16, 16, 16], [32, 32, 32]], }; - const cube = new Cube([1024, 1024, 1024], 3, "uint32", mockedLayer); + const resolutionInfo = new ResolutionInfo(mockedLayer.resolutions); + + const cube = new Cube([1024, 1024, 1024], resolutionInfo, "uint32", false); const pullQueue = { add: sinon.stub(), pull: sinon.stub(), @@ -467,7 +467,7 @@ test("A labeledVoxelMap should be applied correctly", t => { const addr = cube.getVoxelIndex([firstDim, secondDim, 5], 0); expectedBucketData[addr] = 1; }); - applyVoxelMap(labeledVoxelsMap, cube, 1, get3DAddress); + applyVoxelMap(labeledVoxelsMap, cube, 1, get3DAddress, 1, 2, true); const labeledBucketData = bucket.getOrCreateData(); for (let firstDim = 0; firstDim < Constants.BUCKET_WIDTH; firstDim++) { for (let secondDim = 0; secondDim < Constants.BUCKET_WIDTH; secondDim++) { From af7fc168e9b8ac5e03fb11f8701b6f9cc8278b53 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 25 Sep 2020 16:51:27 +0200 Subject: [PATCH 063/121] fix CI --- .../oxalis/model/sagas/volumetracing_saga.js | 3 +-- frontend/javascripts/test/model/binary/cube.spec.js | 4 +++- .../test/model/model_resolutions.spec.js | 13 +++++++------ .../test/sagas/volumetracing_saga.spec.js | 3 +++ 4 files changed, 14 insertions(+), 9 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index d5eb8b18737..e789ffef6eb 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -126,12 +126,11 @@ export function* editVolumeLayerAsync(): Generator { numberOfSlices, ); if (activeTool === VolumeToolEnum.BRUSH) { - const currentResolution = yield* select(state => getCurrentResolution(state)); yield* call( labelWithIterator, currentLayer.getCircleVoxelIterator( startEditingAction.position, - currentResolution, + activeResolution, activeViewportBounding, ), contourTracingMode, diff --git a/frontend/javascripts/test/model/binary/cube.spec.js b/frontend/javascripts/test/model/binary/cube.spec.js index 102ee06bc56..ef7613e9977 100644 --- a/frontend/javascripts/test/model/binary/cube.spec.js +++ b/frontend/javascripts/test/model/binary/cube.spec.js @@ -10,6 +10,7 @@ import datasetServerObject from "test/fixtures/dataset_server_object"; import mockRequire from "mock-require"; import runAsync from "test/helpers/run-async"; import sinon from "sinon"; +import { ResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; mockRequire.stopAll(); @@ -54,7 +55,8 @@ test.beforeEach(t => { const mockedLayer = { resolutions: [[1, 1, 1], [2, 2, 2], [4, 4, 4], [8, 8, 8], [16, 16, 16], [32, 32, 32]], }; - const cube = new Cube([100, 100, 100], 3, "uint32", mockedLayer); + const resolutionInfo = new ResolutionInfo(mockedLayer.resolutions); + const cube = new Cube([100, 100, 100], resolutionInfo, "uint32", false); const pullQueue = { add: sinon.stub(), pull: sinon.stub(), diff --git a/frontend/javascripts/test/model/model_resolutions.spec.js b/frontend/javascripts/test/model/model_resolutions.spec.js index 8ebfce87460..54524feaf46 100644 --- a/frontend/javascripts/test/model/model_resolutions.spec.js +++ b/frontend/javascripts/test/model/model_resolutions.spec.js @@ -1,11 +1,11 @@ // @noflow import test from "ava"; +import { ensureMatchingLayerResolutions } from "oxalis/model_initialization"; import { - ensureDenseLayerResolutions, - ensureMatchingLayerResolutions, convertToDenseResolution, -} from "oxalis/model_initialization"; + getMostExtensiveResolutions, +} from "oxalis/model/accessors/dataset_accessor"; test("Simple convertToDenseResolution", t => { const denseResolutions = convertToDenseResolution([[2, 2, 1], [4, 4, 2]]); @@ -25,7 +25,6 @@ test("Complex convertToDenseResolution", t => { ], }, }; - ensureDenseLayerResolutions(dataset); ensureMatchingLayerResolutions(dataset); const expectedResolutions = [ [1, 1, 1], @@ -35,7 +34,9 @@ test("Complex convertToDenseResolution", t => { [16, 16, 2], [32, 32, 4], ]; + const mostExtensiveResolutions = convertToDenseResolution(getMostExtensiveResolutions(dataset)); + const densify = layer => convertToDenseResolution(layer.resolutions, mostExtensiveResolutions); - t.deepEqual(dataset.dataSource.dataLayers[0].resolutions, expectedResolutions); - t.deepEqual(dataset.dataSource.dataLayers[1].resolutions, expectedResolutions); + t.deepEqual(densify(dataset.dataSource.dataLayers[0]), expectedResolutions); + t.deepEqual(densify(dataset.dataSource.dataLayers[1]), expectedResolutions); }); diff --git a/frontend/javascripts/test/sagas/volumetracing_saga.spec.js b/frontend/javascripts/test/sagas/volumetracing_saga.spec.js index 486d90425a7..ac08299c140 100644 --- a/frontend/javascripts/test/sagas/volumetracing_saga.spec.js +++ b/frontend/javascripts/test/sagas/volumetracing_saga.spec.js @@ -129,6 +129,7 @@ test("VolumeTracingSaga should add values to volume layer (saga test)", t => { const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10); saga.next(volumeLayer); saga.next(OrthoViews.PLANE_XY); + saga.next([1, 1, 1]); saga.next(); saga.next({ addToLayerAction: addToLayerActionFn([1, 2, 3]) }); saga.next(OrthoViews.PLANE_XY); @@ -152,6 +153,7 @@ test("VolumeTracingSaga should finish a volume layer (saga test)", t => { const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10); saga.next(volumeLayer); saga.next(OrthoViews.PLANE_XY); + saga.next([1, 1, 1]); saga.next(); saga.next({ addToLayerAction: addToLayerActionFn([1, 2, 3]) }); saga.next(OrthoViews.PLANE_XY); @@ -175,6 +177,7 @@ test("VolumeTracingSaga should finish a volume layer in delete mode (saga test)" const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10); saga.next(volumeLayer); saga.next(OrthoViews.PLANE_XY); + saga.next([1, 1, 1]); saga.next(); saga.next({ addToLayerAction: addToLayerActionFn([1, 2, 3]) }); saga.next(OrthoViews.PLANE_XY); From f1f6eb97582ec1073e2780a1434acdabc5aaace9 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 25 Sep 2020 18:03:49 +0200 Subject: [PATCH 064/121] update snapshots --- .../annotations.e2e.js.md | 54 ++++++++++++++++++ .../annotations.e2e.js.snap | Bin 8850 -> 9115 bytes 2 files changed, 54 insertions(+) diff --git a/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md b/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md index 4fb38e7e940..69e4b1caedc 100644 --- a/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md @@ -1612,6 +1612,33 @@ Generated by [AVA](https://ava.li). elementClass: 'uint32', id: 'id', largestSegmentId: 0, + resolutions: [ + { + x: 1, + y: 1, + z: 1, + }, + { + x: 2, + y: 2, + z: 2, + }, + { + x: 4, + y: 4, + z: 4, + }, + { + x: 8, + y: 8, + z: 8, + }, + { + x: 16, + y: 16, + z: 16, + }, + ], userBoundingBoxes: [], version: 0, zoomLevel: 1, @@ -1646,6 +1673,33 @@ Generated by [AVA](https://ava.li). elementClass: 'uint32', id: 'id', largestSegmentId: 0, + resolutions: [ + { + x: 1, + y: 1, + z: 1, + }, + { + x: 2, + y: 2, + z: 2, + }, + { + x: 4, + y: 4, + z: 4, + }, + { + x: 8, + y: 8, + z: 8, + }, + { + x: 16, + y: 16, + z: 16, + }, + ], userBoundingBoxes: [], version: 0, zoomLevel: 1, diff --git a/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap b/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap index cc84ad7e511104e87d85f66df4744fd95bfc3c51..97889ecbd8ed76a1cdc2ebe898679d9570dbf5a5 100644 GIT binary patch literal 9115 zcmYj$Wl)?=(>1n>yGxL;xU;xB1b25?+&u)BkO09#uqC(!hhRY$UtEHQz~TfCuHof= zeth*^Kc>2By62pkt7^LEw5*{ly`G1Sm#>qjAAJBn777y0%;3@F={c1OSfr}em}-kI z0R!oOAL?-)W{}?C$}o$@Y*~m}J zXaEI$GJuqn$<@|dLi3cWwu1OWe{yn}&iJT(e?>I%?q|49PG>sQFOuNx;l(3uzvy!? z*wF{F8@dj5Trq+flZPehQAZ*4)~O=x=#i@ z@!z5le|$K65bg@}%x~;jngwmWN$Lx`O#P53$=6_ea2?-bv7Q(bJd#>O&GG^~>@k(VXB3SX8qwf7TvNX(P%081iY2w^TYok z_-r;G|5TMrn>;kiG(p*_JE@^HQ>ofCclLS%RV^p{TDq>h{AHblFl%*q-w{{oG%JyJ zJQpwoHjD_I#>EvS+3z*4CoL#ka!w)WGiGN+(}PLWV;v!RMoP<=G1>j0mnEdC!+2#W zVKBW+g7wGF+?+;F0j#K279Y!8(k=%lIS5iLv2>jy!)nR>P8xM+{O+x3^5xI(Un~EJ zD&9d7G_qhMd{~3_#({78jtAW!3BAk2)nW;Svk=}~=rBd9LS_~hscEjp^m;+_mtbkt zk1-UN9(2deCR>KN(^xQ5X5-`Gd@u1@n0Hl+zQmS|rk>3LwJJ{3S89Kfz zGu3A1e$JX-rR=tG9j`d4JnLpyJOn>BVi@KQ77%NRA5fS2RMi>OFXNQynp!o_mo>kU zPTqYR4UahV|1K}xdLEG1e`Md<&z{@z)B6%{s(8WYVP?xix!;Yf2G5{AKVGk6el@u7 zF!@A6ctzWnxSfvCr_>!%s-G}ZYkC>*z;BRYP@kepI1j8UT(TBef~a!w4cHTNGS}!F z8Q9G8@}j#T@kH@Ny#CxUSy*Be6LgK0v{fxOWjiye&uWoxYiFm}#IU9FNS29NuA-=L zz>lGhpglvojSq8Vz0-`n7gqU);?Boyj<4FUlFC~*@0W*L4$R=Nwia9>-Zfdq&|Pl3 zVYo$m#H*d=6~fSuet>a%(Zs}n1DJTDORd2{q)<$j=EUs-!dJ+c`^2&(m{5J}H*CgA zr4y9`N1`g0z`b75)Wj=PQ!ix@+e#Gmg`D%$JXO-}p9Q`QrKSZww3vZe@fGU{;aqiI z+ZGWLZv{e+dduXm)~tee5>^#WPVPVWPxpSmCsS;jsh*=68#kmrW;Y-!KJ-oC&#h@R zx;aT}G4e1qrP5R2jg4Y^P5SXF^%@gsR9%#5*Rv^+hqqODgZ-(61DI2uYfm*;>zydI z#xq(-p;sA*YsBDPdtdOrhvttEbSWdMySI8TZZF0*VWEVB+D0hLZ9(GwWJuC$b~Lq0 zYr>?UI_;==^TG*XD!(0yj}>eJ<`{YkAKq44uI);7$Q4#lmW~06t--lRnL~oQ4eg^P z!>zcyQ}uLuO2mp;ldlUxFE7_Gxtr;RB=`pOZLe+MFE>YaTezQRpsZ(o60aF&INC;usaiPaLnIjQeR{@>RQ?6aOV>-aNH>SxW#XIxF&<6Kc(lArtj(^N2XlQSf?ee|Gf&o$9>%2VfM^W#m= zG%TxOc+Iy>rpLsg2~I;R3hB!V8Z58pwN;c*7>y~8Mdw{c^s7wqVa)Z|M~s`S>|yv( z$*@HP4xut}13zb6gVVHR+)t6XIzA9%BqeA0xiLHbIv!lp3(Lutatz1_lRM+Q<^Pfu zztv6nY{w;wwNJGd%)}*G3tEv14*uk_@Ga+qDh;y&Ip3hQ23zW*W#X!;n2)k`F9$d? z=lF!4vax@BP^terJIueRj3a{rx70z1hirp5#JKV;e7D#+EYUC}%e9{zq33M2;cnN#A*!eyinz+1X-_!mLo(xZn=_o6bb4|64-jxg1 zF`YUZFG7w}ZPIatlUFJKQa^f>FIi!_8lQl_;h7xKoZH*pCflAEVS~fcGOhy!sF4I(V%8Fi$eHqu zL`NueI>E15q|q^5hm840T$Q%pK+PjcXgGo4<>!Xb=i{?~YtCfmYx$W{ppbGU>gwXZ zU~(r7(|46ORKe=4lO09Fv3Kmtx3D_{%E*py>JLe0EMa;MD0as(0a1G*>*IFNo@vJG zSe5c5GzWyCmvj(GC-P+Eg6hyLw~Ls&u6F61;hjqPG1d>i71?D!Q-!=a=fdFY4|A6H zVyE!$Z!FWDTSGL;UR`9UCt%w1bF~)21*mbxQ4V1j2wyK+KNn+ucg~ONM?BevD3OG# zxB-#)k1TUxvD^65RKEV$0q=?^ys!WbayR zd{s~I_btB`WSTpYcPmJDULRwgTLB7W3xS-{oLhRC=7e6RqS&&pGj8lnZxhYGS$T~U^Zu5xBXW9T%5UMS(STvQTh3N)m;j{`9!P<% zSY;Y$)&y=%R%Vw#SMGD5E4hNA&Fh277;?X&8LYoOLsyL+NYhm7agM6pHIMttpr*xRm9Ze+GUQjX`D5JWyWI-zTtO(E=w}*7RMXEzyFW*x z*~?qzl{MY1rgwgJf0?Lk_COFdqnw&F2fs5+Qou2FZGZTD6<_yr1^rvXSJMO~YX<@G zGMxt34YR2#sw}(1Ql)Stv3deGX1|}=s!rMBrU_HIV2nxdCpMqD@vs_o!^<_?6KlsrW$@wlK8V;lhYk7G^b+J(2 zEB@${nRj~!#s7zXRE#g(=Jx4;L!}$Ap>q~c{0Q+-lHZ zqD3IrV|SuXMptbx+;mkmBrsIiW==@iW|Bv72Z(Z0cYp+)Bzv__Q^#ujlgwN0KANv} zfKtcuag)q_2nsI&ru9f}C_ItrxC!Q_jEyRP$=l6QWvM6t-(XbFsbkU%!N@eoREfkE zkILM!De6cUy-H_|&m%h`pmLe-JtL)Hv`Xvh=Me`X&;?BBC15y(26R})@Yqq9 zs)nG;v8}(?#Ijgfh~buZmCH1_6~T}9eCpbl_7lZ}vlYnF_|!a4JD`E=B1x+~uq;qW zethpFeeQX6giIH~8^hAoD`E|F?X^5ZjVJ6U;>FqTRp%MY67Lh8yrOIQUJ<^eYd%%4 zNa#vbR^Iliht+2Mt*4E8e>7>VQgvrgLATBr?Q+GAnS*@Mm+Q6SFFO87rjUn^rY)9f z6+6FwD03Aue&by`IQr*JVw(=3$4*3@gSphV%JL=LOSAsNcKWbXl=*M~$rL3eH(SMm zA6qj95x-MT@bb+UBa~NY^mX^eG(HlJ(vd=eY9?~ykNCnh}YXYAl)~0gyhUh+_x!>}2usu3qNmF^>{`G2WJ(Is;zzYY40UAL1 z7^U8dju^VKLKHFsUtP6%Y@M{}V}YCLG>%U7l5$wMQiH?A2(9VJIN%JlP8x%VubvfE ztrBX5{6GQHioAhQN=GW|IeEe4qxX7Q5ct!tY7uiAOUFf( za@VFHIF`iOH(lhl{$9t6@XQ3^-1fvtbrQ6SivtG8`R95uv@s0@ISE7}Ha#Ox6~Slw zO^@g(TlCpDkZ3LhX?SFWbpA6s1%Xwp`t{z0qr6uh_{-VW1y7`X>_cu!cj!Ojlt*-P z%=~Z)-Uz128~WUL{|YYe@SUQ@_HK4<9-x*vt4G#;P2n>^{!QY&T_-p|5hH`qAIpXq zkSGA+LoVZL?#%3|gK_>`jyTNuC_8O^(GDBYZW}6Nl$My=zZ)dq_86A&@uKJSYd`zm zjp&jj`?_TN7G4H3c2$vn%KT-}M*9|0HU(~iI_?IC2hKR83l)vM4qSDf3N1d^|8Vgz2t(p)3$67JK%+Ls{tC>GZ2Bjz_<1XXsI{Z-iRoEw-VRG{`|UW*+Xdz14^h%D z<%-)~7ES5<;X(tiZ2tX_#70yTs17#ys0#;)s-o{?E8Ac`PR8=dnjZ$`M&3__L}eaZ zPrW5OGVi*-1OugP1#}czdKmALtK#hO2yQ6eCXTjOs9cGGP9umTDjy}OFiDMWn^BFT zai+;}bl~^o{3kKw$`&=`?CuWT_cl)1^gnQG=NK&IAig`ig)w5W2Y-N}N%JWPI`X57 z?3EK#F70-jVheUS-@V+9SK{ke4{_q56=9po8Bc+N8^vuk)4KK9Lo0p=gNpxh$?f=E zgof0mf#Q3yK99eHWwHXF7QB4Cl|3c1unl!Y$}8}YLSbQUzBW0v8Yu#BD?!AYpErKIsZA^ zEvbSXk`}7)oqtBrM2dc7NFImkPu1a%X%g0Gfnw;Ft-bj4ai12bPQGzxYpq%^3HPtd zO)adkSeBh16L<799PARG8d>P)8=p6JYXdSf=gqlpsJG{uGH1scD;zqj-47Cu94ZF} zFY6-h5L(Z^4n#oZd{Q`1E9j}+4qP;=TpX!IbhNiP^*y;q_oyv zBZNf81$J=yYXVwPa?Zp86XmdUYE%%Uoyc_DZPJxYYFpN`Q9nK5)~{)wGx7|f zX1cSC7P&QQ#6G^Wv*wE0?SK5uzw}QqlH2o5hBhX6TRKnMLYq8Ff0J#2-u^b}>{-kk z6uvcmfAKbSal*q#G}LXt2vUg~PVl*qHT6)M@9fc$OB zws|N|+12iyRZ!CmWl21?$ABRCuK-kg(R)L~lVSu_Od8vBb64&qvm@6b&oF4jnJrVR zBVohlWLDp^_T3Gbn+w&UVpp~f^DLNP!{s&iwAbeJ*f)O1MxTUrN<4b$(X$~RLc^m^ zahyou##Z;nVv*DhISW_jIsTG%IW6d|$>}G*qO0T{@`J!W5_$JLt80i>_z7-z^w*lX zHAJE4N_8SexqK~j0xDc(O0c8*8Huz zM*?iX)AM^mFBH$TY;B{!;lTT0zgZG+Fem8byrf>28(Y<~dA1Hi_7WKN1@3%_dji6& z=*6hOEXzx|9?gZSvGk}oNX79YY1|*V;tIw?8GZ8Mit~ml|L`o)*R@4kFF7Ox;`qFi z{~p5B&v=*6!vK{HiAjYW5f>&!cPX4G9KAn`r#PHxQ%Jp&AdIKqt zvm5P(zd}3{YWA+zOeDbvja{uxlY^hc97)6v)x%#~{R(-Y`TOK{`s7^AkxXsb)D@^X zalKZ0YmEFQf(54R9d_{zC(@2}1d#scdX4M$9|rvOnU+_~sF}}s3MA24##39N^+t1h z74n|A716k|*3oj)r)c&r=ktz#-5ritti^YxN-6f~CLz*_C*Vf%Mr{yQCw8~kw+43SEUkV6+v$t?u=#n&wuzcdR1p zi9Hc*%}asl0W9q;`g}FEIu%>(n7OQH;(XfBt{O6x`c>hd#h)Bln4v1zxGqP~@4{d< zeZ=TxQ1N!KP}>V}?RQ>DWUe70++>fVoHY7H3)mEQ;rz_!-6hlF``qauU@;WjLHLVd z8FLM275nkk@4uihv)KC&~PXU-qa*| zB}+?z&uJ~bg#Hg#Oo8`{e7dRF=jQ5$<gpu zcSvYa9;eqXxcbwnJ+6as#kK$VLpK8V#^$orsVzQBOb|L-vvF>>O;yV}Qynkott$W+YCG0h?z(LE8aUl#Xnz)WuY!wnLN`3JjjGOis z5Zb-{^&Y*Be*@;s6u?UX^RhcueBXwjW4Y0rt&ycKVjl)=QhI{Ez{mP>h9JC7tTlAy z-Zy6nNBNx>)o{VfzRBYHTZOh)`mK7u*MpAm%kZrUHmJKoO;#SN5b&bwBn>?(aW@5-(64 zd1t%MC1V!5D$W|O4YeBSuve%)fuuU+gZ?h~twV^;J(Cy4BT7*Y@!q)*v#&D)b_Hyj z!v;Z>f_WcF4a?f4HNa`l`+}8=qKH|6)&zTH4KP+#UE49??<37}b3MT_mvTA5Fz=K= zGjIwOY5!dZ!_pO4r(1WniuBQ-ZXaz7*VaQ8T_ig)^%?_E?wbv<(UuA3h^l&E^Vx2(_Q{XQhBsSQd#{Y z+OHJwipJ=hPKDv-mgyPjwjg?a5nW6ET-?bUFf_;USldL24!7scRlV~U!k1}8J}TAq z$V)m0t8^pnRSHC=GY8sNGmadpluU>^|0k@-l&;Dr#lNRt2I3(FFxkQB@zqh(VA8!+ zMx)YQ5!wHh_XB>mZoJdcgD)>CIw-Kgh!9*O3jWOiN3rYuZ7m)|4MnRF2QQ!}9;=Uo zd?DW$vQZZz@nKp8JM)R9$Xv!q3sxHAC0>wgxpN{B;7WiEC%HOs4%UzGKXV|rEn{?4 z^2dFo^1_{t41N)@@!sKYcJMDWbMX0+-W21-+JkuYoP{-i;G{kDYO)_szFCjkmOle$ zlzGqIx>Bm4nJf}H<7=8?z|nZ~ROi|kOfcc5XE?Wjf$~l2UN2sJH?*lTB5*b|TR!EH zI>yhgT9^WVH^KmcB!c<*_lIn^Np)qF9}x@)p?c&_G`#36fmK<=kbrF&xg&%fl<}g^ zQ{7@IT*{!M+V%R?7oqd{zH)@HfI48Z1eue@>=c-zFtIb5Tamo1KU3LvxM@8y9XLZn z$kfKD0g-kxd2r(gbzA2A2b~95evJJJKUS7JeE947Sk?F5vzhe4xbHMd0Z{$CL1WN< znqrBt_be=hB*KUN)nxNzUJ56=crRwb$p=rQyI!DSRkpyBn-Ba6?28S0>m|N~Ed>aVEU|2B zl@&E!O32C{$zDc*2b=-5uOQW^6lfoNC9TPWnuPhl+29KXqg7dSoC$oc* z(r^*MXVgx71g%J{XsyvTh}g)1an8wc9hGjB!e|2F=vD1u8GQ4Q0y#H5+!Hxig?g*fKxzJYh?go-4k9;zU-rQk;S-chkQB!tMXb?lbR1V z=a02u5c)gxP0t8N6c1o)l%OzyxLT07miz^c2nx~r-e%xdB)|>j0k$U}pVtvAzE`NT zK4Q$`g`I^f6d{SACJ_AGP!&fc&JiTOC=bS=fRWYXzXL4su5J|&=;JU3oa`NQM(;|U ziQJOKkRvRSqG32Myo(~1OeTKaA?0YnL{q$p?HD6)S8>0hTScsUz7C+hydr3vI1`hO z(H~{1H9$>V;TL{b_HWff)tq_{EqeOwcRKd3dY3(6ve~}wRVF6+`tQKw@vr9C*ziWO zm4!YzGZSog7n{PQ>ez+m`f1YzIgZ6*lBwpvLEx|69CFy5d+MA-DZ{arkI?w zbJ6W*v_PH02Mc|+nzp~4+zE!BUfjHfv1~b4`>AVOjO5P_+uPUU-UKz2YaMk@5hmQq2fpLe31Ae%M0hC>ew&M^)k<2o6BR#UJNUfoA1+|WMnyG zrYdcm9wUm8oecTkZg44&CW0hVCYIjm+qn-t>L9x*sTI2qy+>J2JQY}hNYITyjtrNK zbG?0RUND@ya&^LLJ(D^h$3sF3VHUhX z5fdWiGeTR}GD2JBFGtfq45+8{9THrCaN^p7YhAuiv`ME-+^YSLfBCP~;K{24Q)BHh zQ|>3?SvFUO5;C&3CR#G?Wa`GPTcov=*ez<;9!r7|$h*#$)f;4l?D}F} z{=gkE%2MLO3+vqf&PjL$qOXZ#4Th*!8-d??C={eFUqt*WHGJIS20c4MX~_a-2~>^)KG&p2=s9ii-opKygRCKfuJH^>CN z>lK~EELOPixAoMWYCCzNcWf$QXdA7(3xCO8_vEDC#DQKCn1TM+LHK_i6$van?O-j< zf1;paSn%|>_z_=`U?8a78pK+DZm!vllqUYt?8+Ps@jZ`Dp#hf59+A90?T3rYr(pq6 zxiW5cwwoBT@oABg^@61}{{ca#QF<~pOy?4sYqV*a2r=GIqv&2fV32{eXi&^3y3hIl zu5{$E@^e|JL?J~F^9T}PP(kWrdZwH%GFcR^zU2tMTFMas9hR&w6_>7jK#_oMA=CY7 zWca+F2xR*5-Yhx)3w?(U(u!*BUDTI~r29gzb4clm?{lRf^&y>HjvrYaRjuL?5@sB= zO?XBmqQ2AWbq-E>#5Bc7^e2z+jDHDWLn^hF%5W9EGgYBQ3F&>%w;X=~eZ+XJ=Eayv z(4L)s6KzkBg;S;%%p}bz`_gnms?%wYO{a_A25EohUnu)LQuA1k_J*4kja1)q5Kb=J zh~tJlr57yt;_X8LMgHrcAQhb+7+Zo)f#RnxmHq;mjlU4ZQu$7ZC7e199~MYIF0<`> zYlE4lSwNR1v(xJ1!vO-ruI%d?hJ$x!mZGDq`}d+u_MvJ%PAe)+@Za!-}SI*X{C8w|X=^-nOX z;qqVT;jh*UJ=K6Vue7FxIiu=7C3Z^_#`s_Ut~~~n9YrP7+ zIYd+zpN74X@OL{{)OZrhI#~Ky#}m+FNha`LP;~jH?g#&~pEH3ycwhhj@E5P#dw<^f VhDxi&mQT<4p*Pghd4G|R{tpKFS3Lj# literal 8850 zcma)Bg;$i%_Xd_-nnjUrL^^f}=|)6ax>@P&Zg!*-|bLKrW_j&G}Gk2c%j-rtwtG>r8FJC85Kh^+YVmzGH`p*;Ilc<~SLK7mK zsIDI@NJ58HCQINjj(y&gu;2wEKR)wyD%4a3xrRMB6@lpKt*8I*s%dwR&UQ9Pj zvzBtRmNs)k66=I_S*sD%ol$QNF#UK^2=4gAvg-Kiu)F-Nr@=Z|f}*b@wt^$Tdwq9j zYrZ;1rfXLPq0>4;CLH7a_TOgY>vmfL%!U@PzAyfck%*6coF@`+jJb1vvD8&J9DD9L z$IOr(uIV7%ed993X_}11YVUkl|0m%RIz7}IL15Qj7DJavHqg-1{zZAL2^cEeZ*uoh zpTWMpteyZu{ZgWJMqK2!CR&A^D;^OjiX)5xzSEXU@-tNAzX#*C5_;hU7X zhr54%Co=V&4Dsc!_cZA26}y?1{xHay2=E)`7%D0-kPENscJJu0v1azEJEOl$$T1{y%L6Aw)*`kJa0 z@Y|_;^HX%q94$|2YHHG?9<&q41Y=Q6s~-s_fCc75P8Q>1Drw7JW-{zWTg}f4@nuq?nDlJ=f`EBx}nPGqXbnup4cq z`3@7MSa)x%(1u?qZ-yj}P|Yn;=?M-=_Tb8-^hJ)jX%n9W|anKRZ!P zs3s}a8NM@wK)`PJLeY@`Q$kxyCAIq0p7}SBzov#oi*>mI`V#VkpFhVtY*D6SsjGU99+jq>#_`?uX`$n%8pT0%S`)x!Ot+(v zmN3I#Is9TcNr^43Ekc1R2k={7x+T5I9m{-b(@u2xEUmg0HD5h%rUT;4H>_sX z>|5<~@QYEx$3D=Szw>=+d}`ttq5H`?*~?kmO2h&1f#86`9mbk* zplbGkan^cNjN>CdsdeR~{)mUGyT^UgA%#-s_nP?ltwl&3u~>{-PVLn{)}ron zS`28nsUG|2ZKb7CG;P$CPrjSxbsDRcR>i)QVC$Q+ zWD#r=n{Hb!v}9W-Vw7aclqlu{gY*7=F~}v&oXz&d?tR;?aJ83dE#rM>QI`FX^G$fb zt2G>wY3DbjN?mK!Eq?QPW6qyU>cq`g`e!IY)#brp_u{ie&W&F_`W1t|U%(%x?3{N!0db$#calxVbAJezqJ)m6yBvih~N@fJ>nRf3-0Q0$r{^FR(Iq4%x2TPmS(}96#C7F&uP3~ z6=Cec3tfywl#h~&QPG`?@B()d_TL-0O=V}8$(nr5S@@!UvF@{XEcN76S3}uvx8hu{ z&)DfrYwLSMXVOe~Eg7)wU9|qotyC|2`fYyc;TvRYA;geSby%|POGHlOu=bqrfWcJO zld=T?rDh`tE&phxx@+8$!KO+}6lbXgr1UW=zShKA&-ue;HKUgQq{*G!!=k)XpCr>$ zGBZN^m>Ss@5fXxZK21=Pr1b=|fJoCR_^nU&f`~X5S0DFdX~XRFbn}e#+`J;*M+d@4 z=T~n=E%K=&!vSVtVE68Sd+1Ir7FULjA2ViQKBU{^2`pPrb$-xR4MXJe^T=aSk`LdCOVDYiYBDxT^m0q4qMR-;dUm@YtE}ZRAxk zn~PG~=KK9c2Q zniE)_V~i_AV8~v=zl0~p!PN%9r1^Kv5mN82fN)lZbLKdZSwP%<^&tW|aA)gK@=CE3 zo4U+_8E^T~t-LcT&~g;#a&-p?O@I&cdWUG8H+6s!l-DXMAW$Ol4CXCCR-DrzRV z+NfzBPV(_q&h>Z?e^1oYbXWG*Jgnc(SpxokTrhPBBg{FHue`+J07v&>eD9!b$kgh* zR~WFz35$L{`3s;4dBD7A<>f1V&H8VH-#5R|kpK!6iunX9x4Tu`nSvhn5_mlc5TKnZ zfwnc5ys2?Nd#Eo>;RkK=3>*4xHt(CT_Q~Ao5MYmpn=8eb z&}m+J7}S|{?mx4awRP(=s0J&LP$TLwR9GU$Vv(%{Qql zs8wQF#hHDV@=&ek)E1MdyOp(guVBI$ultzPwQp5(7 z)<#ZN;G`wMa2N2cE|0qkqBce2Pj|ZfqFI57XUAO=F?MVVN9(>f_-VE!z}SIslnR6f zTR-aS9e@o59Cdy7(7rQC-RYn;~~W%qzX07?^*OT=BY(>DR!Pf^qpmyL;Cl%V0R+tsY=?jC_>{(k0m zP}Nrs&Evm8-nLJJd~ElR>qb6)oJB9z z=>_~XJV&R8ACC2(lq9J6)VT|SG_WC%H%zI&@Q!2u9Pv_>PM!jsw8BZ(2KSB^1WFRs zXA7j8nbWi|l*4~^IHp*3v30>7*dw*6ZWiDCl@!javIjjZDX&9)+Fp@lva zJcwEst4V$IOqSK;Aj>_#b(izOqYcLG4RG)1Q1{8as94~wT zr7}IZt1bti#NXQ>>{fFpZe+vp{r*~AbY|^O%_UZa6E}88B%|0kA{oMj^3d^e;{%T& z%xN_+!3AQgiygvihO;{`=(90fIyUL1g>CoXA1z8U*nQrwRhr{+UA6d#m>)1vy~KlwLbauD>K>9SVxN5;D*lOjy!?! z)4?U{fO8I0$*X|RPFQ)|I($#Kelj=K5Ek|Lq5SZJgewt0qZv%h#;Tq71md?zaLhdA z1*7kA`c2658xGlnEhc;LrycxYf4w$Z7CN`5hDHlQd2E68mlBThG9S0R3Kkxc)kRd~ zAJ{9^;!3p8)=I2xkLFpj*&*Yu}v zE4V#&4CKDXOx>vdF&O_P)WPR1e?H;X;J7XEmHz~LR64^cpi=ehsPj|Nf$Hqrjgz}a z5Qbm4>$qaf#jXmt)i|yw5e(%W75m3am4d+FTMO%x0gJXRsjh}4;oPDNJ#%(w7B@}e zT$o?t^k;9iC$SoI=S-@_NhWoQh#%>LW~>?U+l-!iZ>(vAeFFhToVj0~EXMN55U=4> zMHQ$LUK3}Mj?N|<_i(BuI$UkPtnrqU3}zv{(+BZogm@0|Mt{tENEd6evE?4}b&cW%^n!zW3`8&;2&A&~lULr0J( zmg7`-U=qjEFL#OCiSv`*kIbPMRA@^!x^K$AIYky~;i7J16yWmG!`s`eo6|g>cXPYE zQYxL*SHx9vYfQ{E+phE?qL{M2Va~PmqF2KuJ7Eq#rDurzq|^16s;6(A=Xd|)%;ohd ze={+gL`zv++otu8lNW^sR9t$Y@C2Bk2|T4 zGO!CKe1{MEnR3WK=pj#wL(dzGxf@pKR{ywxGyCPeqZ4Jf3eMh18Ue$f$ ziQBe7&WbvQ25-9!KXw1-;>u;&5n&ihDsf}<8Gq@*x;xz3^Q`OmKrp1;=x=}Qrc<+U zb6Dqjf~ll4ootJk3RE_a3qU&SCD0UbdujHC`-x~Yt``1f9XK1o|7r- z+MLCIdF|r8rq;`VNq%=Tbu$-j5A{mz{PeW^vM<`Bs`g(dM!yPtrS=KeJVzRPwo+n~ zam^~f$|;P0|MB~qa|p4Yrg4Mw-6FF%+|c9GkLPH%6Z`T|@R-)$J7RP(D=)Du*J5jC zziPc^G%J7wyf;@*w9>dJ7jN$O z%zf2t#_=7Rn)-=D@XB)uM8GSu_LUJ0P{Rw(&4q z%m;Z{PyUAaei}v?V;Ns}$4fy&@U+hGf`+DM&$O zs7JbZ6U47t<~?a~m3G8L{=v|Nd2fTtpNas>zxNRrC>j8jexl{`Jd> zQl8g+J>lB*K!E9(BeCR_2z_f>&6m^(pWgP99NV#s7)TaQGgIhefvO+E9<8-&M57*F zv|&u_Mt*+jFpg@kUZn~5<)uuKGb)aCcq3$J31I;P1xmJC$`T1q&*OMtCgMGb0`Y=j zco0M(2Im^QTjIbU2Wo3A$n{Ru#PyBp{ACm-e#Ga6+Zx>=Lo~sPD=5QSS z-(xZaf#R}b#^}`tTTPhDXO^e%zmR>OM+H*dM!+gBo=&WjHPD#$unBX7?B4`lrR)(cLK*2i!a z%R6osrMvDWS*qB0t9CN&5_wb>g@{{=2Qc+D*>H$Q@v?$&vZBSjk8U*HiMJEMpEC}$ z^s@AV)&i60B_29lcQ&EVhqT##UwL2VC<%|egT^p~uP#u8*gk$6#+b8xJ-f}@n0lQh z{*A#Lgbs(vE|6bF{lkc*d6lOy%zR@AWl4|+%H#R_4oN-b66d_`Zn6on?Mn^A=_E=7 z&M45`3}!qGu{ETa00c&5{x>`I^_HAWW=!JRVnn$dVRBTlf*px)XK&I?r`Qd<3uKd* zwv*Zpev9FRw8xlJZ*|NVyqhWE22sHTI{|h02QS8YR`uT(CXZfHYY%UgQ3nhUe&VRs z+^|pO<2(H+oEaqP5#V3(>q9#4S}x*y(5QbNqfnQ2;Af2wGeM*DWJnmIl zdP{o(6|Fty>dukw&N|4Hf0VSoi9CRisz442-yUhjvBS6cJRR>6^gL!LM+>#J{3=(| zxGQ7}xhu3hpHvjCs&~)Om|GS}ler&GxQDp%bm*IFbXiWDjU7_fj}|hgPX~BBt`KW| zJ1zH3RsF9y!=Copl2wtk<@8+cGS`xNm+YL;n0ruK#_|M*PgTr|udT~z<*knQZ|-4v ztMBrQz>t#Ft*)7q%daaAZP`Ax&O%!O!J#}If0xpwW!WmUUSv%hacVeUacMXkGL=g+ zt)U~T?hkCWBy>trV#=|7f^BL6AY%+mum=GS(Q7j!CCq!Pj6CPyxq|@)mu`QC5;|5UKzF-Awg2qwuC|Bc~ zqQ0oy|A|_aKnFL-RP+-VWe^LN)aC=HgPE^Gh(a202p)>k_M$DotM7r#5hNZcO>lTS zyl2fNm_vZkDJ5HCi4}NcCiqV+lqWcR9WIr9C>ROcdxWz{D&e`92PdHG_k7Tq1d*~{ z>J54?fhjl;MiRsv2L9O%ovUYn`ifmukjL0Pg1mGN)b3_X`-}GAJw(3%E1$ASXjIe` zh#pA|Jye+USIHZCLBg6#=%D8;Wh_<8YD!Q9l#lLwWPzKALxSQwu^@(~kLF6^jYdZG zO566K@gd+kP#lVmw1?Ma5VKv1Kh)!3Sr8LI5K@l4vhC?5Ulu3vjE1W*K2A+?^>XY%JRxzet zj|#>)9frId9{Nmy5d9^@E8t6bV6+U44#jS^#zf-I#0p*l`3m_ZViHx%+C)?2t_z~R zVt3&5c4${>rtBrk=%YKp0Yxi$2`vED_O&K7wSPvs z6LEGA7D$t$&RA>YZ5K-(t&p{!b2%gyx))x1c#BZ#|Z~q=-N0w@!SD9#o_`Mh{>ZGa2aA986QQRzWV$X&|A1QwE`}}S&u&X z5k!rGK}clqix{h;de$tDteLL!7hH55e)e&&BeSsQh;tUDl%_-C?h1QX zf8V&k-X-af)Zh2o?wbOAo+H=eH|KK)wbqqeR~jDG^z1QHr<;|~ZcDiVJ36BU)> z%~w?`nR#zSSUV(XADh7HNHfJWG&GKj-XbIEB)eoqe1%1Pc0WFCfkEkb>t~TRc9&0X zKG2ASTA?V7Drg*xawg{XGmI6WN91F&UOL$?_YOq;FwD%!6hLy`p}s2cc^-uqZoj&!grqld`Z1$) z#`iP2S0oj@DSv=f0EI$}2<@Qlw5ujaXrvzw^4p3N^7jt~>J9083?Wc6Z4Zs;oK1ll z1&-5iBDF&8wY5W({#EBrxb# z*Ve&Kk<`I!(|drE-?;ok1;6hRH1JFlYZ=_3{SQZMur*%?cY7MDmRGv2F5c6d=-#xJ z)I|gTJnJ^s{f>g)0|t}vdpWN1ThM{+s)%sISV=fV=R*3p@2#rbTiv)1{Wc|oD|B0j z6b3cBr|ogSKK<)X$d^!_sKFd)jrQPf{{zLG6j^YmrAu39DKkGF1Ls>B@WZ`Q?=35} z%lp)Sbzh`UkJEPGC-%b)PEY71<0>Xi08N0d^n=Z{|GO#C@`8^od!OEycq<`U_{~)m z!J*o6cxg>2{G-xS@`5lC4r!DI#+UbaWOP!=$0|e1p5LDLt@Yq2fwC|oAx6kxEjPU( z+77TlvQ}{_Sb!DfkdHbsr#I=-OQXcF zI~l5NN*2ff(kvjv883qcxvcR=Eny-HI$M2`H^ZrnKzT%tv&vdrdSEmMkQ7AEdptb4 zqtr|xLs(RDdRu_iQdYa3OSsNCg7XO(A12Br6kX294`Qh4jq=Q5T?EU42vkz|Uw5@PPRUiQVic_Aj7W`3iFABzr zQ*((MNwr&?-*~pN`0PTP`A^hh5f2+v$HWT4m^i?Qa7;L_(9ip?5jYP9gHn+RE1yUI zDhm^80!3m*fYLXR%T8yB-Bk7hz1~j3^~E9jFS;e+-6 z5nJ8yiwZ^SpiSQoWY0g(_`v{NMU@Db4y4S}C_Rq4fsxIwPaXs+o;{Kv)qeSp*Qx;m zGr^IHS_73@e>os2$tC26NzOKGT`a!~Chf)RFZn|d{7=&97KdmnsC+yjf~kRp_zQOl z0kS%xZ@7bQRdso~s9&RdO603-xbYN~{(bKUoBz^`G4r8&Hf%iqy$|+{NGn9Gw3HD6r9d From ff1a4dee0262e0c9102fd4054d5da2364b88cabd Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 28 Sep 2020 16:03:37 +0200 Subject: [PATCH 065/121] tune warning text --- .../javascripts/oxalis/view/action-bar/volume_actions_view.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js index 79f4f1fb73a..d8ec8d97948 100644 --- a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js @@ -49,7 +49,7 @@ class VolumeActionsView extends PureComponent { const { activeTool, activeResolution, isInMergerMode } = this.props; const hasResolutionWithHigherDimension = activeResolution.some(val => val > 1); const multiSliceAnnotationInfoIcon = hasResolutionWithHigherDimension ? ( - + ) : null; From 4074ea84aa314e0e854cc9aba01ab4492f66f06d Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 28 Sep 2020 17:01:07 +0200 Subject: [PATCH 066/121] make merger mode work in higher resolutions --- frontend/javascripts/oxalis/api/api_latest.js | 27 ++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/frontend/javascripts/oxalis/api/api_latest.js b/frontend/javascripts/oxalis/api/api_latest.js index 68fdf8636d7..71fa0884c94 100644 --- a/frontend/javascripts/oxalis/api/api_latest.js +++ b/frontend/javascripts/oxalis/api/api_latest.js @@ -43,7 +43,11 @@ import { getTreeGroupsMap, } from "oxalis/model/accessors/skeletontracing_accessor"; import { getActiveCellId, getVolumeTool } from "oxalis/model/accessors/volumetracing_accessor"; -import { getLayerBoundaries, getLayerByName } from "oxalis/model/accessors/dataset_accessor"; +import { + getLayerBoundaries, + getLayerByName, + getResolutionInfo, +} from "oxalis/model/accessors/dataset_accessor"; import { getPosition, getRotation } from "oxalis/model/accessors/flycam_accessor"; import { parseNml } from "oxalis/model/helpers/nml_helpers"; import { overwriteAction } from "oxalis/model/helpers/overwrite_action_middleware"; @@ -845,7 +849,11 @@ class DataApi { } /** - * Returns raw binary data for a given layer, position and zoom level. + * Returns raw binary data for a given layer, position and zoom level. If the zoom + * level is not provided, the first magnification will be used. If this + * magnification does not exist, the next existing magnification will be used. + * If the zoom level is provided and points to a not existent magnification, + * 0 will be returned. * * @example // Return the greyscale value for a bucket * const position = [123, 123, 123]; @@ -857,7 +865,20 @@ class DataApi { * @example // Get the segmentation id for a segmentation layer * const segmentId = await api.data.getDataValue("segmentation", position); */ - async getDataValue(layerName: string, position: Vector3, zoomStep: number = 0): Promise { + async getDataValue( + layerName: string, + position: Vector3, + _zoomStep: ?number = null, + ): Promise { + let zoomStep; + if (_zoomStep != null) { + zoomStep = _zoomStep; + } else { + const layer = getLayerByName(Store.getState().dataset, layerName); + const resolutionInfo = getResolutionInfo(layer.resolutions); + zoomStep = resolutionInfo.getClosestExistingIndex(0); + } + const cube = this.model.getCubeByLayerName(layerName); const pullQueue = this.model.getPullQueueByLayerName(layerName); const bucketAddress = cube.positionToZoomedAddress(position, zoomStep); From b293c2ac4d468122c2986479caf3e26a8c956f7f Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 29 Sep 2020 12:28:57 +0200 Subject: [PATCH 067/121] show a status indicator in the viewports if a layer cannot be rendered due to the zoom step --- frontend/javascripts/libs/react_helpers.js | 25 +++++++++++ .../model/accessors/accessor_helpers.js | 39 +++++++++++++++++ .../model/accessors/dataset_accessor.js | 19 ++++++++ .../oxalis/view/action_bar_view.js | 3 +- .../javascripts/oxalis/view/input_catcher.js | 2 + .../oxalis/view/viewport_status_indicator.js | 43 +++++++++++++++++++ package.json | 1 + yarn.lock | 10 ++++- 8 files changed, 140 insertions(+), 2 deletions(-) create mode 100644 frontend/javascripts/oxalis/model/accessors/accessor_helpers.js create mode 100644 frontend/javascripts/oxalis/view/viewport_status_indicator.js diff --git a/frontend/javascripts/libs/react_helpers.js b/frontend/javascripts/libs/react_helpers.js index d1cc95c927f..c10c951d76d 100644 --- a/frontend/javascripts/libs/react_helpers.js +++ b/frontend/javascripts/libs/react_helpers.js @@ -1,6 +1,9 @@ // @flow import { useState, useEffect, useRef } from "react"; +import { useStore } from "react-redux"; + +import type { OxalisState } from "oxalis/store"; // From https://overreacted.io/making-setinterval-declarative-with-react-hooks/ export function useInterval(callback: Function, delay: ?number) { @@ -42,4 +45,26 @@ export function useFetch( return value; } +/* + Instead of recomputing derived values on every store change, + this hook throttles such computations at a given interval. + This is done by checking whether the store changed in a polling + manner. + Only use this if your component doesn't need high frequency + updates. + */ +export function usePolledState(callback: OxalisState => void, interval: number = 1000) { + const store = useStore(); + const oldState = useRef(null); + + useInterval(() => { + const state = store.getState(); + if (oldState.current === state) { + return; + } + oldState.current = state; + callback(state); + }, interval); +} + export default {}; diff --git a/frontend/javascripts/oxalis/model/accessors/accessor_helpers.js b/frontend/javascripts/oxalis/model/accessors/accessor_helpers.js new file mode 100644 index 00000000000..71906fc36ee --- /dev/null +++ b/frontend/javascripts/oxalis/model/accessors/accessor_helpers.js @@ -0,0 +1,39 @@ +// @flow +import _ from "lodash"; + +/* + Wraps a given function so that it returns the same instance on consecutive + calls if the new and old return value are equal (given a equality function). + + Motivation: + Most of our selectors are memoized which ensures that passing the same input, + produces the same outputs. As a result, instances are shared and shallow + comparisons minimize potential re-renders. + However, some selectors depend on the entire store state since they derive + complex computations. Every small change to the store, will produce a new + output even if it's equal to the last output. + So, instead of carefully decomposing all selectors to ensure maximum + instance-reusages, the `reuseInstanceOnEquality` can be used. + + As a rule of thumb, this wrapper should be used for selectors which need + the entire store state. + */ +export function reuseInstanceOnEquality) => R>( + fn: F, + equalityFunction: (R, R) => boolean = _.isEqual, +): F { + let lastResult: R; + + // $FlowIgnore This function has the same interface as F. + return (...args: Array): R => { + const result = fn(...args); + if (result === lastResult || equalityFunction(result, lastResult)) { + return lastResult; + } else { + lastResult = result; + return lastResult; + } + }; +} + +export default {}; diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 9263f17b476..6113e5a755a 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -10,11 +10,13 @@ import type { APISegmentationLayer, ElementClass, } from "admin/api_flow_types"; +import { getRequestLogZoomStep } from "oxalis/model/accessors/flycam_accessor"; import type { Settings, DataLayerType, DatasetConfiguration, BoundingBoxObject, + OxalisState, } from "oxalis/store"; import ErrorHandling from "libs/error_handling"; import constants, { @@ -26,6 +28,7 @@ import constants, { import { aggregateBoundingBox } from "libs/utils"; import { formatExtentWithLength, formatNumberToLength } from "libs/format_utils"; import messages from "messages"; +import { reuseInstanceOnEquality } from "oxalis/model/accessors/accessor_helpers"; export type ResolutionsMap = Map; @@ -515,6 +518,22 @@ export function getEnabledLayers( }); } +function _getMissingLayersNames(state: OxalisState) { + const { dataset } = state; + const zoomStep = getRequestLogZoomStep(state); + + const missingLayerNames = getEnabledLayers(dataset, state.datasetConfiguration) + .map((layer: DataLayerType) => ({ + name: layer.category === "segmentation" ? "segmentation" : layer.name, + resolutionInfo: getResolutionInfo(layer.resolutions), + })) + .filter(({ resolutionInfo }) => !resolutionInfo.hasIndex(zoomStep)) + .map(({ name }) => name); + return missingLayerNames; +} + +export const getMissingLayersNames = reuseInstanceOnEquality(_getMissingLayersNames); + export function getThumbnailURL(dataset: APIDataset): string { const datasetName = dataset.name; const organizationName = dataset.owningOrganization; diff --git a/frontend/javascripts/oxalis/view/action_bar_view.js b/frontend/javascripts/oxalis/view/action_bar_view.js index 091e5b441c6..ec69cd69ecb 100644 --- a/frontend/javascripts/oxalis/view/action_bar_view.js +++ b/frontend/javascripts/oxalis/view/action_bar_view.js @@ -1,5 +1,5 @@ // @flow -import { Alert, Dropdown } from "antd"; +import { Alert, Icon, Dropdown } from "antd"; import { connect } from "react-redux"; import * as React from "react"; @@ -181,6 +181,7 @@ class ActionBarView extends React.PureComponent { )} {showVersionRestore ? VersionRestoreWarning : null} + {!isReadOnly && hasVolume && isVolumeSupported ? : null} {isArbitrarySupported ? : null} {isTraceMode ? null : this.renderStartTracingButton()} diff --git a/frontend/javascripts/oxalis/view/input_catcher.js b/frontend/javascripts/oxalis/view/input_catcher.js index aa8ecb92a5f..57a178976a6 100644 --- a/frontend/javascripts/oxalis/view/input_catcher.js +++ b/frontend/javascripts/oxalis/view/input_catcher.js @@ -5,6 +5,7 @@ import * as React from "react"; import { ArbitraryViewport, type Rect, type Viewport } from "oxalis/constants"; import { setInputCatcherRects } from "oxalis/model/actions/view_mode_actions"; import Scalebar from "oxalis/view/scalebar"; +import ViewportStatusIndicator from "oxalis/view/viewport_status_indicator"; import Store from "oxalis/store"; import makeRectRelativeToCanvas from "oxalis/view/layouting/layout_canvas_adapter"; @@ -89,6 +90,7 @@ class InputCatcher extends React.PureComponent { className={`inputcatcher ${viewportID}`} style={{ position: "relative" }} > + {this.props.displayScalebars && viewportID !== "arbitraryViewport" ? ( ) : null} diff --git a/frontend/javascripts/oxalis/view/viewport_status_indicator.js b/frontend/javascripts/oxalis/view/viewport_status_indicator.js new file mode 100644 index 00000000000..6eb020dc689 --- /dev/null +++ b/frontend/javascripts/oxalis/view/viewport_status_indicator.js @@ -0,0 +1,43 @@ +// @flow + +import * as React from "react"; + +import { Icon, Tooltip } from "antd"; +import { getMissingLayersNames } from "oxalis/model/accessors/dataset_accessor"; +import { usePolledState } from "libs/react_helpers"; + +const { useState } = React; + +export default function ViewportStatusIndicator() { + const [missingLayerNames, setMissingLayerNames] = useState([]); + usePolledState(state => { + const newMissingLayersNames = getMissingLayersNames(state); + setMissingLayerNames(newMissingLayersNames); + }); + + if (missingLayerNames.length === 0) { + return null; + } + const pluralS = missingLayerNames.length > 1 ? "s" : ""; + const pronounAndVerb = missingLayerNames.length > 1 ? "they don't" : "it doesn't"; + console.log("rerendering WarningIndicator"); + + return ( + + The layer{pluralS} {missingLayerNames.join(", ")} cannot be rendered because{" "} + {pronounAndVerb} exist in the current magnification. Please adjust the zoom level to + change the active magnification.{" "} +
+ } + > +
+ +
+ + ); +} diff --git a/package.json b/package.json index 5ab4d15d954..846839968a5 100644 --- a/package.json +++ b/package.json @@ -140,6 +140,7 @@ "autodll-webpack-plugin": "^0.4.2", "backbone-events-standalone": "^0.2.7", "base64-js": "^1.2.1", + "beautiful-react-hooks": "^0.30.5", "classnames": "^2.2.5", "clipboard-js": "^0.2.0", "comlink": "^4.3.0", diff --git a/yarn.lock b/yarn.lock index f9389f136f1..009341448f6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2290,6 +2290,14 @@ bcrypt-pbkdf@^1.0.0: dependencies: tweetnacl "^0.14.3" +beautiful-react-hooks@^0.30.5: + version "0.30.5" + resolved "https://registry.yarnpkg.com/beautiful-react-hooks/-/beautiful-react-hooks-0.30.5.tgz#84fc22134da7cc474abece430febffbd0b3e6bec" + integrity sha512-5GDhcCqoHpOUZiqtLMs8U+zR6d8TQNk8XMRDh9H8g+LJpJPzJFUpt+YSKgCI0r5zE+ZdGRuiNZ6iZ4TLCsKk/Q== + dependencies: + lodash.debounce "^4.0.8" + lodash.throttle "^4.1.1" + big.js@^3.1.3: version "3.2.0" resolved "https://registry.yarnpkg.com/big.js/-/big.js-3.2.0.tgz#a5fc298b81b9e0dca2e458824784b65c52ba588e" @@ -7512,7 +7520,7 @@ lodash.sortby@^4.7.0: resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= -lodash.throttle@^4.0.0, lodash.throttle@^4.0.1: +lodash.throttle@^4.0.0, lodash.throttle@^4.0.1, lodash.throttle@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/lodash.throttle/-/lodash.throttle-4.1.1.tgz#c23e91b710242ac70c37f1e1cda9274cc39bf2f4" integrity sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ= From 4005e7dfd71bb9a3688e4dd81ce7f8ea8f010caa Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 29 Sep 2020 12:41:25 +0200 Subject: [PATCH 068/121] reduce tooltip rerenderings by making styles constant --- .../model/accessors/accessor_helpers.js | 3 +++ .../view/action-bar/dataset_position_view.js | 24 +++++++++++++++---- 2 files changed, 22 insertions(+), 5 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/accessor_helpers.js b/frontend/javascripts/oxalis/model/accessors/accessor_helpers.js index 71906fc36ee..b72e4135462 100644 --- a/frontend/javascripts/oxalis/model/accessors/accessor_helpers.js +++ b/frontend/javascripts/oxalis/model/accessors/accessor_helpers.js @@ -17,6 +17,9 @@ import _ from "lodash"; As a rule of thumb, this wrapper should be used for selectors which need the entire store state. + + Note that this function isn't of any use if the return type of the passed + function is a primitive value. */ export function reuseInstanceOnEquality) => R>( fn: F, diff --git a/frontend/javascripts/oxalis/view/action-bar/dataset_position_view.js b/frontend/javascripts/oxalis/view/action-bar/dataset_position_view.js index 476d0cbe931..22abd343f23 100644 --- a/frontend/javascripts/oxalis/view/action-bar/dataset_position_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/dataset_position_view.js @@ -23,6 +23,17 @@ type Props = {| |}; const positionIconStyle = { transform: "rotate(-45deg)" }; +const warningColors = { color: "rgb(255, 155, 85)", borderColor: "rgb(241, 122, 39)" }; +const copyPositionDefaultStyle = { padding: "0 10px" }; +const copyPositionErrorStyle = { + ...copyPositionDefaultStyle, + ...warningColors, +}; +const positionInputDefaultStyle = { textAlign: "center" }; +const positionInputErrorStyle = { + ...positionInputDefaultStyle, + ...warningColors, +}; class DatasetPositionView extends PureComponent { copyPositionToClipboard = async () => { @@ -72,10 +83,13 @@ class DatasetPositionView extends PureComponent { render() { const position = V3.floor(getPosition(this.props.flycam)); const { isOutOfDatasetBounds, isOutOfTaskBounds } = this.isPositionOutOfBounds(position); - const maybeErrorColor = + const copyPositionStyle = + isOutOfDatasetBounds || isOutOfTaskBounds ? copyPositionErrorStyle : copyPositionDefaultStyle; + const positionInputStyle = isOutOfDatasetBounds || isOutOfTaskBounds - ? { color: "rgb(255, 155, 85)", borderColor: "rgb(241, 122, 39)" } - : {}; + ? positionInputErrorStyle + : positionInputDefaultStyle; + let maybeErrorMessage = null; if (isOutOfDatasetBounds) { maybeErrorMessage = message["tracing.out_of_dataset_bounds"]; @@ -91,7 +105,7 @@ class DatasetPositionView extends PureComponent { @@ -101,7 +115,7 @@ class DatasetPositionView extends PureComponent { value={position} onChange={this.handleChangePosition} autosize - style={{ textAlign: "center", ...maybeErrorColor }} + style={positionInputStyle} /> {isArbitraryMode ? ( From b6442cdc8d879a6e5c91088617d72d325282bb7e Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 29 Sep 2020 15:22:22 +0200 Subject: [PATCH 069/121] fix enabled 'Render Missing Data Black' for missing mags --- .../materials/plane_material_factory.js | 24 ++++++ .../model/accessors/dataset_accessor.js | 33 ++++++-- .../oxalis/model/helpers/listener_helpers.js | 1 - .../oxalis/shaders/main_data_fragment.glsl.js | 78 +++++++++++-------- .../oxalis/view/viewport_status_indicator.js | 19 +++-- 5 files changed, 107 insertions(+), 48 deletions(-) diff --git a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js index 3d24de4da50..64cf4ac0208 100644 --- a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js +++ b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js @@ -25,6 +25,7 @@ import { getElementClass, getBoundaries, getEnabledLayers, + getMissingLayersForCurrentZoom, } from "oxalis/model/accessors/dataset_accessor"; import { getRequestLogZoomStep, getZoomValue } from "oxalis/model/accessors/flycam_accessor"; import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; @@ -218,6 +219,13 @@ class PlaneMaterialFactory { type: "f", value: 1, }; + // If the `_missing` uniform is true, the layer + // cannot (and should not) be rendered in the + // current mag. + this.uniforms[`${layerName}_missing`] = { + type: "f", + value: 0, + }; } for (const name of getSanitizedColorLayerNames()) { @@ -333,6 +341,22 @@ class PlaneMaterialFactory { ), ); + this.storePropertyUnsubscribers.push( + listenToStoreProperty( + storeState => getMissingLayersForCurrentZoom(storeState), + missingLayers => { + const missingLayerNames = missingLayers.map(l => l.name); + for (const dataLayer of Model.getAllLayers()) { + const sanitizedName = sanitizeName(dataLayer.name); + this.uniforms[`${sanitizedName}_missing`].value = missingLayerNames.includes( + dataLayer.name, + ); + } + }, + true, + ), + ); + this.storePropertyUnsubscribers.push( listenToStoreProperty( storeState => storeState.userConfiguration.sphericalCapRadius, diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 6113e5a755a..707db859cb9 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -3,6 +3,7 @@ import Maybe from "data.maybe"; import _ from "lodash"; import memoizeOne from "memoize-one"; +import { getMaxZoomStepDiff } from "oxalis/model/bucket_data_handling/loading_strategy_logic"; import type { APIAllowedMode, APIDataset, @@ -518,21 +519,39 @@ export function getEnabledLayers( }); } -function _getMissingLayersNames(state: OxalisState) { +function _getMissingLayersForCurrentZoom(state: OxalisState) { const { dataset } = state; const zoomStep = getRequestLogZoomStep(state); - const missingLayerNames = getEnabledLayers(dataset, state.datasetConfiguration) + const { renderMissingDataBlack } = state.datasetConfiguration; + const maxZoomStepDiff = getMaxZoomStepDiff(state.datasetConfiguration.loadingStrategy); + + const missingLayers = getEnabledLayers(dataset, state.datasetConfiguration) .map((layer: DataLayerType) => ({ - name: layer.category === "segmentation" ? "segmentation" : layer.name, + layer, resolutionInfo: getResolutionInfo(layer.resolutions), })) - .filter(({ resolutionInfo }) => !resolutionInfo.hasIndex(zoomStep)) - .map(({ name }) => name); - return missingLayerNames; + .filter(({ resolutionInfo }) => { + const isMissing = !resolutionInfo.hasIndex(zoomStep); + if (!isMissing || renderMissingDataBlack) { + return isMissing; + } + + // The current magnification is missing and fallback rendering + // is activated. Thus, check whether one of the fallback + // zoomSteps can be rendered. + return !_.range(1, maxZoomStepDiff + 1).some(diff => { + const fallbackZoomstep = zoomStep + diff; + return resolutionInfo.hasIndex(fallbackZoomstep); + }); + }) + .map(({ layer }) => layer); + return missingLayers; } -export const getMissingLayersNames = reuseInstanceOnEquality(_getMissingLayersNames); +export const getMissingLayersForCurrentZoom = reuseInstanceOnEquality( + _getMissingLayersForCurrentZoom, +); export function getThumbnailURL(dataset: APIDataset): string { const datasetName = dataset.name; diff --git a/frontend/javascripts/oxalis/model/helpers/listener_helpers.js b/frontend/javascripts/oxalis/model/helpers/listener_helpers.js index 61639103325..0a77fff007b 100644 --- a/frontend/javascripts/oxalis/model/helpers/listener_helpers.js +++ b/frontend/javascripts/oxalis/model/helpers/listener_helpers.js @@ -20,7 +20,6 @@ export function listenToStoreProperty( callHandlerOnSubscribe: ?boolean = false, ): () => void { let currentValue; - function handleChange() { const nextValue = select(Store.getState()); // When callHandlerOnSubscribe is used, the initial value can be 0. In that case, diff --git a/frontend/javascripts/oxalis/shaders/main_data_fragment.glsl.js b/frontend/javascripts/oxalis/shaders/main_data_fragment.glsl.js index 051af3e7625..68a83253988 100644 --- a/frontend/javascripts/oxalis/shaders/main_data_fragment.glsl.js +++ b/frontend/javascripts/oxalis/shaders/main_data_fragment.glsl.js @@ -64,6 +64,7 @@ const int dataTextureCountPerLayer = <%= dataTextureCountPerLayer %>; uniform float <%= name %>_data_texture_width; uniform float <%= name %>_maxZoomStep; uniform float <%= name %>_alpha; + uniform float <%= name %>_missing; <% }) %> <% if (hasSegmentation) { %> @@ -148,47 +149,58 @@ void main() { } <% if (hasSegmentation) { %> - vec4 id = getSegmentationId(worldCoordUVW); + vec4 id = vec4(0.); + vec4 cellIdUnderMouse = vec4(0.); + float <%= segmentationName%>_effective_alpha = <%= segmentationName %>_alpha * (1. - <%= segmentationName %>_missing); - vec3 flooredMousePosUVW = transDim(floor(globalMousePosition)); + if (<%= segmentationName%>_effective_alpha > 0.) { + id = getSegmentationId(worldCoordUVW); + + vec3 flooredMousePosUVW = transDim(floor(globalMousePosition)); + + // When hovering an isosurface in the 3D viewport, the hoveredIsosurfaceId contains + // the hovered cell id. Otherwise, we use the mouse position to look up the active cell id. + // Passing the mouse position from the 3D viewport is not an option here, since that position + // isn't on the orthogonal planes necessarily. + cellIdUnderMouse = length(hoveredIsosurfaceId) > 0.1 ? hoveredIsosurfaceId : getSegmentationId(flooredMousePosUVW); + } else { + + } - // When hovering an isosurface in the 3D viewport, the hoveredIsosurfaceId contains - // the hovered cell id. Otherwise, we use the mouse position to look up the active cell id. - // Passing the mouse position from the 3D viewport is not an option here, since that position - // isn't on the orthogonal planes necessarily. - vec4 cellIdUnderMouse = length(hoveredIsosurfaceId) > 0.1 ? hoveredIsosurfaceId : getSegmentationId(flooredMousePosUVW); <% } %> // Get Color Value(s) vec3 data_color = vec3(0.0); vec3 color_value = vec3(0.0); <% _.each(colorLayerNames, function(name, layerIndex){ %> - - // Get grayscale value for <%= name %> - color_value = - getMaybeFilteredColorOrFallback( - <%= name %>_lookup_texture, - <%= formatNumberAsGLSLFloat(layerIndex) %>, - <%= name %>_data_texture_width, - <%= formatNumberAsGLSLFloat(packingDegreeLookup[name]) %>, - worldCoordUVW, - false, - fallbackGray - ).xyz; - - <% if (packingDegreeLookup[name] === 2.0) { %> - // Workaround for 16-bit color layers - color_value = vec3(color_value.g * 256.0 + color_value.r); - <% } %> - // Keep the color in bounds of min and max - color_value = clamp(color_value, <%= name %>_min, <%= name %>_max); - // Scale the color value according to the histogram settings - color_value = (color_value - <%= name %>_min) / (<%= name %>_max - <%= name %>_min); - - // Maybe invert the color using the inverting_factor - color_value = abs(color_value - <%= name %>_is_inverted); - // Multiply with color and alpha for <%= name %> - data_color += color_value * <%= name %>_alpha * <%= name %>_color; + float <%= name %>_effective_alpha = <%= name %>_alpha * (1. - <%= name %>_missing); + if (<%= name %>_effective_alpha > 0.) { + // Get grayscale value for <%= name %> + color_value = + getMaybeFilteredColorOrFallback( + <%= name %>_lookup_texture, + <%= formatNumberAsGLSLFloat(layerIndex) %>, + <%= name %>_data_texture_width, + <%= formatNumberAsGLSLFloat(packingDegreeLookup[name]) %>, + worldCoordUVW, + false, + fallbackGray + ).xyz; + + <% if (packingDegreeLookup[name] === 2.0) { %> + // Workaround for 16-bit color layers + color_value = vec3(color_value.g * 256.0 + color_value.r); + <% } %> + // Keep the color in bounds of min and max + color_value = clamp(color_value, <%= name %>_min, <%= name %>_max); + // Scale the color value according to the histogram settings + color_value = (color_value - <%= name %>_min) / (<%= name %>_max - <%= name %>_min); + + // Maybe invert the color using the inverting_factor + color_value = abs(color_value - <%= name %>_is_inverted); + // Multiply with color and alpha for <%= name %> + data_color += color_value * <%= name %>_alpha * <%= name %>_color; + } <% }) %> data_color = clamp(data_color, 0.0, 1.0); diff --git a/frontend/javascripts/oxalis/view/viewport_status_indicator.js b/frontend/javascripts/oxalis/view/viewport_status_indicator.js index 6eb020dc689..edd8f0a9433 100644 --- a/frontend/javascripts/oxalis/view/viewport_status_indicator.js +++ b/frontend/javascripts/oxalis/view/viewport_status_indicator.js @@ -3,16 +3,21 @@ import * as React from "react"; import { Icon, Tooltip } from "antd"; -import { getMissingLayersNames } from "oxalis/model/accessors/dataset_accessor"; +import { getMissingLayersForCurrentZoom } from "oxalis/model/accessors/dataset_accessor"; import { usePolledState } from "libs/react_helpers"; const { useState } = React; export default function ViewportStatusIndicator() { const [missingLayerNames, setMissingLayerNames] = useState([]); + usePolledState(state => { - const newMissingLayersNames = getMissingLayersNames(state); - setMissingLayerNames(newMissingLayersNames); + const newMissingLayersNames = getMissingLayersForCurrentZoom(state); + setMissingLayerNames( + newMissingLayersNames.map(layer => + layer.category === "segmentation" ? "Segmentation" : layer.name, + ), + ); }); if (missingLayerNames.length === 0) { @@ -20,15 +25,15 @@ export default function ViewportStatusIndicator() { } const pluralS = missingLayerNames.length > 1 ? "s" : ""; const pronounAndVerb = missingLayerNames.length > 1 ? "they don't" : "it doesn't"; - console.log("rerendering WarningIndicator"); return ( - The layer{pluralS} {missingLayerNames.join(", ")} cannot be rendered because{" "} - {pronounAndVerb} exist in the current magnification. Please adjust the zoom level to - change the active magnification.{" "} + The layer{pluralS} {missingLayerNames.map(name => `"${name}"`).join(", ")} cannot be + rendered because {pronounAndVerb} exist in the current magnification. Please adjust the + zoom level to change the active magnification. Also consider disabling the option + “Render Missing Data Black” if this is not already the case.
} > From adc1c94a8f5c940c337bacc7c0afff3e6b18f50a Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 29 Sep 2020 15:49:05 +0200 Subject: [PATCH 070/121] add issue links to todo comments --- .../javascripts/oxalis/model/accessors/dataset_accessor.js | 1 - .../oxalis/model/bucket_data_handling/data_cube.js | 4 ++-- .../javascripts/oxalis/model/helpers/position_converter.js | 3 ++- .../oxalis/model/volumetracing/volume_annotation_sampling.js | 4 ++-- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 707db859cb9..964caeb7bdd 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -114,7 +114,6 @@ export class ResolutionInfo { return _.max(Array.from(this.resolutionMap.keys())); } - // todo: consider to only go upwards instead of both directions. getClosestExistingIndex(index: number): number { if (this.hasIndex(index)) { return index; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 86ac29c6ac9..9f6ca1bfbba 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -331,7 +331,7 @@ class DataCube { label: number, activeCellId?: ?number = null, ): void { - // TODO: Do not label voxel in higher resolutions multiple times. + // TODO: Do not label voxel in higher resolutions multiple times (also see https://github.com/scalableminds/webknossos/issues/4838) // -> Instead of using a voxel iterator, create a LabeledVoxelsMap for the brush stroke / trace tool. // If this LabeledVoxelsMap exists, the up and downsampling methods can easily be used // to apply the annotation to all needed resolutions, without labeling voxels multiple times. @@ -349,7 +349,7 @@ class DataCube { labelVoxelInAllResolutions(voxel: Vector3, label: number, activeCellId: ?number) { // Also see labelVoxelsInAllResolutions - // TODO: Do not label voxel in higher resolutions multiple times. + // TODO: Do not label voxel in higher resolutions multiple times (also see https://github.com/scalableminds/webknossos/issues/4838) // -> Instead of using a voxel iterator, create a LabeledVoxelsMap for the brush stroke / trace tool. // If this LabeledVoxelsMap exists, the up and downsampling methods can easily be used // to apply the annotation to all needed resolutions, without labeling voxels multiple times. diff --git a/frontend/javascripts/oxalis/model/helpers/position_converter.js b/frontend/javascripts/oxalis/model/helpers/position_converter.js index cb055f6eb0b..57c4147717e 100644 --- a/frontend/javascripts/oxalis/model/helpers/position_converter.js +++ b/frontend/javascripts/oxalis/model/helpers/position_converter.js @@ -75,7 +75,8 @@ export function getResolutionsFactors(resolutionA: Vector3, resolutionB: Vector3 ]; } -// TODO (1): zoomedAddressToAnotherZoomStep usages should be converted to zoomedAddressToAnotherZoomStepWithInfo +// TODO: zoomedAddressToAnotherZoomStep usages should be converted to zoomedAddressToAnotherZoomStepWithInfo +// See: https://github.com/scalableminds/webknossos/issues/4838 export function zoomedAddressToAnotherZoomStep( [x, y, z, resolutionIndex]: Vector4, resolutions: Array, diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 8ab7e3bc106..3d54809b452 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -148,9 +148,9 @@ function downsampleVoxelMap( dimensionIndices: DimensionMap, ): LabeledVoxelsMap { // This method downsamples a LabeledVoxelsMap. For each bucket of the LabeledVoxelsMap - // the matching bucket the lower resolution is determined and all the labeledVoxels + // the matching bucket of the lower resolution is determined and all the labeledVoxels // are downsampled to the lower resolution bucket. The downsampling uses a kernel to skip - // checking whether to label a downsampled voxels if already one labeled voxel matching the downsampled voxel is found. + // checking whether to label a downsampled voxel if already one labeled voxel matching the downsampled voxel is found. if (targetZoomStep <= sourceZoomStep) { throw new Error("Trying to upsample a LabeledVoxelMap with the down sample function."); } From 1e2a47477642c134c444fa53bf411b07dc510d92 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 30 Sep 2020 15:37:51 +0200 Subject: [PATCH 071/121] implement some feedback --- CHANGELOG.unreleased.md | 2 +- frontend/javascripts/oxalis/api/api_latest.js | 4 ++-- frontend/javascripts/oxalis/api/api_v2.js | 4 ++-- frontend/javascripts/oxalis/constants.js | 7 +++++-- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index b3c9ab3e93f..57063b3e53e 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -16,7 +16,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Added the possibility to remove the fallback segmentation layer from a hybrid/volume tracing. Accessible by a minus button next to the layer's settings. [#4741](https://github.com/scalableminds/webknossos/pull/4766) - Added the possibility to undo and redo volume annotation strokes. [#4771](https://github.com/scalableminds/webknossos/pull/4771) - Added the possibility to navigate to the preceding/subsequent node by pressing "ctrl + ," or "ctrl + ." in a skeleton tracing. [#4147](https://github.com/scalableminds/webknossos/pull/4784) -- Added multi-resolution volume annotations. [#4755](https://github.com/scalableminds/webknossos/pull/4755) +- Added multi-resolution volume annotations. Note that already existing volume tracings will still only contain data in the first magnification. If you want to migrate an old volume tracing, you can download and re-import it. [#4755](https://github.com/scalableminds/webknossos/pull/4755) ### Changed - New volume/hybrid annotations are now automatically multi-resolution volume annotations. [#4755](https://github.com/scalableminds/webknossos/pull/4755) diff --git a/frontend/javascripts/oxalis/api/api_latest.js b/frontend/javascripts/oxalis/api/api_latest.js index 71fa0884c94..1c310e366f4 100644 --- a/frontend/javascripts/oxalis/api/api_latest.js +++ b/frontend/javascripts/oxalis/api/api_latest.js @@ -514,10 +514,10 @@ class TracingApi { } /** - * Reload tracing + * Reload tracing by reloading the entire page. * * @example - * api.tracing.hardReload().then(() => ... ); + * api.tracing.hardReload() */ async hardReload() { await Model.ensureSavedState(); diff --git a/frontend/javascripts/oxalis/api/api_v2.js b/frontend/javascripts/oxalis/api/api_v2.js index da5db302c30..ff4adf71949 100644 --- a/frontend/javascripts/oxalis/api/api_v2.js +++ b/frontend/javascripts/oxalis/api/api_v2.js @@ -325,10 +325,10 @@ class TracingApi { } /** - * Reload tracing + * Reload tracing by reloading the entire page. * * @example - * api.tracing.hardReload().then(() => ... ); + * api.tracing.hardReload() */ async hardReload() { await Model.ensureSavedState(); diff --git a/frontend/javascripts/oxalis/constants.js b/frontend/javascripts/oxalis/constants.js index f68a26f19b4..dd52afa4cdc 100644 --- a/frontend/javascripts/oxalis/constants.js +++ b/frontend/javascripts/oxalis/constants.js @@ -143,8 +143,11 @@ const Constants = { BUCKET_WIDTH: 32, BUCKET_SIZE: 32 ** 3, VIEWPORT_WIDTH, - // About the area the brush reaches at maximum radius (pi * 300 ^ 2). - AUTO_FILL_AREA_LIMIT: 200000, + // The area of the maximum radius (pi * 300 ^ 2) is 282690. + // We multiply this with 5, since the labeling is not done + // during mouse movement, but afterwards. So, a bit of a + // waiting time should be acceptable. + AUTO_FILL_AREA_LIMIT: 5 * 282690, // The amount of buckets which is required per layer can be customized // via the settings. The value which we expose for customization is a factor From b0906434d9c8f05cbd88c76660af4375a2e13063 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 30 Sep 2020 15:47:58 +0200 Subject: [PATCH 072/121] Apply suggestions from code review Co-authored-by: Daniel --- .../oxalis/model/accessors/dataset_accessor.js | 2 +- .../oxalis/model/accessors/volumetracing_accessor.js | 8 ++++---- .../oxalis/model/bucket_data_handling/data_cube.js | 4 ++-- .../oxalis/view/action-bar/volume_actions_view.js | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 964caeb7bdd..5db97c93802 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -87,7 +87,7 @@ export class ResolutionInfo { getResolutionByIndexOrThrow(index: number): Vector3 { const resolution = this.getResolutionByIndex(index); if (!resolution) { - throw new Error(`Resolution with in index ${index} does not exist`); + throw new Error(`Resolution with index ${index} does not exist.`); } return resolution; } diff --git a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js index 32875284c36..450cdf793dd 100644 --- a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js @@ -49,8 +49,8 @@ export function isVolumeTraceToolDisallowed(state: OxalisState) { if (state.tracing.volume == null) { return false; } - // The current resolution is to high for allowing the trace tool - // because to many voxel could be annotated at the same time. + // The current resolution is too high to allow the trace tool + // because too many voxels could be annotated at the same time. const isZoomStepTooHigh = getRequestLogZoomStep(state) > 1; return isZoomStepTooHigh; } @@ -63,7 +63,7 @@ export function isSegmentationMissingForZoomstep( } export function getNumberOfSlicesForResolution(activeResolution: Vector3, activePlane: OrthoView) { - const thirdDimenstionIndex = Dimensions.thirdDimensionForPlane(activePlane); - const numberOfSlices = activeResolution[thirdDimenstionIndex]; + const thirdDimensionIndex = Dimensions.thirdDimensionForPlane(activePlane); + const numberOfSlices = activeResolution[thirdDimensionIndex]; return numberOfSlices; } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 9f6ca1bfbba..667bd52ae6f 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -317,7 +317,7 @@ class DataCube { if ( Math.sqrt((x - 100) * (x - 100) + (y - 100) * (y - 100) + (z - 100) * (z - 100)) <= 20 ) { - this.labelVoxelInResolution([x, y, z], 5, 0); + this.labelVoxelInResolution([x, y, z], 0, 5); } } } @@ -415,7 +415,7 @@ class DataCube { // Additionally a map is created that saves all labeled voxels for each bucket. This map is returned at the end. // // Note: It is possible that a bucket is multiple times added to the list of buckets. This is intended - // because a border of the "neighbour volume shape" might leave the neighbour bucket and enter is somewhere else. + // because a border of the "neighbour volume shape" might leave the neighbour bucket and enter it somewhere else. // If it would not be possible to have the same neighbour bucket in the list multiple times, // not all of the target area in the neighbour bucket might be filled. const bucketsWithLabeledVoxelsMap: LabeledVoxelsMap = new Map(); diff --git a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js index d8ec8d97948..088093c4d90 100644 --- a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js @@ -55,7 +55,7 @@ class VolumeActionsView extends PureComponent { ) : null; const isTraceToolDisabled = isZoomStepTooHighForTraceTool(); const traceToolDisabledTooltip = isTraceToolDisabled - ? "Your zoom is low to use the trace tool. Please zoom in further to use it." + ? "Your zoom is too low to use the trace tool. Please zoom in further to use it." : ""; return ( From 1b888e08c7fa4d81d2708e786b6c2c1a876f4366 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 30 Sep 2020 15:48:14 +0200 Subject: [PATCH 073/121] more PR feedback --- .../javascripts/oxalis/model/accessors/dataset_accessor.js | 6 ++---- .../oxalis/model/bucket_data_handling/data_cube.js | 1 - .../javascripts/oxalis/model/volumetracing/volumelayer.js | 2 +- frontend/javascripts/oxalis/model_initialization.js | 4 +--- 4 files changed, 4 insertions(+), 9 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 964caeb7bdd..e41540f40f9 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -204,12 +204,10 @@ function _getResolutions(dataset: APIDataset): Vector3[] { // we memoize _getResolutions, as well. export const getResolutions = memoizeOne(_getResolutions); -function _getDatasetResolutionInfo(dataset: APIDataset): ResolutionInfo { - return new ResolutionInfo(getResolutions(dataset)); +export function getDatasetResolutionInfo(dataset: APIDataset): ResolutionInfo { + return getResolutionInfo(getResolutions(dataset)); } -export const getDatasetResolutionInfo = memoizeOne(_getDatasetResolutionInfo); - function _getMaxZoomStep(maybeDataset: ?APIDataset): number { const minimumZoomStepCount = 1; const maxZoomstep = Maybe.fromNullable(maybeDataset) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 9f6ca1bfbba..0879992c004 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -442,7 +442,6 @@ class DataCube { continue; } const bucketData = currentBucket.getOrCreateData(); - // initialVoxelInBucket const initialVoxelIndex = this.getVoxelIndexByVoxelOffset(initialVoxelInBucket); if (bucketData[initialVoxelIndex] !== sourceCellId) { // Ignoring neighbour buckets whose cellId at the initial voxel does not match the source cell id. diff --git a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js index d31b9018ecf..6489a9f3536 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js @@ -240,7 +240,7 @@ class VolumeLayer { const maxCoord2d = this.get2DCoordinate(this.maxCoord); // The maximum area is scaled by 3 as the min and maxCoord will always be three slices apart, - // because in lines 171 + 172 a value of 2 is subtracted / added when the values get updated. + // because in `updateArea` a value of 2 is subtracted / added when the values get updated. if (this.getArea() > Constants.AUTO_FILL_AREA_LIMIT * 3) { Toast.info(messages["tracing.area_to_fill_is_too_big"]); return VoxelIterator.finished(); diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index 90dc3704816..d7e208d90d0 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -421,8 +421,6 @@ function setupLayerForVolumeTracing( const fallbackLayer = layers[fallbackLayerIndex]; const boundaries = getBoundaries(dataset); - console.log(tracing.resolutions); - // Legacy tracings don't have the `tracing.resolutions` property // since they were created before WK started to maintain multiple resolution // in volume annotations. Therefore, this code falls back to mag (1, 1, 1) for @@ -431,7 +429,7 @@ function setupLayerForVolumeTracing( ? tracing.resolutions.map(({ x, y, z }) => [x, y, z]) : [[1, 1, 1]]; - console.log(tracingResolutions); + console.log("Volume tracing resolutions:", tracingResolutions); const targetResolutions = fallbackLayer != null ? fallbackLayer.resolutions : getResolutions(dataset); From ebc10aa7e4bb0eb8df1a829b7e11933f3b9d7eb2 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 30 Sep 2020 16:22:44 +0200 Subject: [PATCH 074/121] also implement getIndexOrClosestHigherIndex for ResolutionInfo and use where appropriate --- .../oxalis/model/accessors/dataset_accessor.js | 15 +++++++++++++++ .../prefetch_strategy_arbitrary.js | 7 ++++++- .../prefetch_strategy_plane.js | 7 ++++++- 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index d6f612f69bb..ab5dec5e04a 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -137,6 +137,21 @@ export class ResolutionInfo { const bestIndexWithDistance = _.head(_.sortBy(indicesWithDistances, entry => entry[1])); return bestIndexWithDistance[0]; } + + getIndexOrClosestHigherIndex(requestedIndex: number): ?number { + if (this.hasIndex(requestedIndex)) { + return requestedIndex; + } + + const indices = this.getResolutionsWithIndices().map(entry => entry[0]); + for (const index of indices) { + if (index > requestedIndex) { + // Return the first existing index which is higher than the requestedIndex + return index; + } + } + return null; + } } function _getResolutionInfo(resolutions: Array): ResolutionInfo { diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.js b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.js index ac01845c9dc..8f57ccb7ce6 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_arbitrary.js @@ -61,7 +61,12 @@ export class PrefetchStrategyArbitrary extends AbstractPrefetchStrategy { resolutionInfo: ResolutionInfo, ): Array { const pullQueue = []; - const zoomStep = resolutionInfo.getClosestExistingIndex(activeZoomStep); + const zoomStep = resolutionInfo.getIndexOrClosestHigherIndex(activeZoomStep); + if (zoomStep == null) { + // The layer cannot be rendered at this zoom step, as necessary magnifications + // are missing. Don't prefetch anything. + return pullQueue; + } const matrix0 = M4x4.clone(matrix); this.modifyMatrixForPoly(matrix0, zoomStep); diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js index 52324483bf7..bb3c0d98d29 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/prefetch_strategy_plane.js @@ -86,7 +86,12 @@ export class PrefetchStrategy extends AbstractPrefetchStrategy { resolutions: Vector3[], resolutionInfo: ResolutionInfo, ): Array { - const zoomStep = resolutionInfo.getClosestExistingIndex(currentZoomStep); + const zoomStep = resolutionInfo.getIndexOrClosestHigherIndex(currentZoomStep); + if (zoomStep == null) { + // The layer cannot be rendered at this zoom step, as necessary magnifications + // are missing. Don't prefetch anything. + return []; + } const maxZoomStep = resolutionInfo.getHighestResolutionIndex(); const zoomStepDiff = currentZoomStep - zoomStep; From f16777ee6daf1c05f73424f20d3996c6cd288d82 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 30 Sep 2020 16:31:38 +0200 Subject: [PATCH 075/121] return true in isVolumeTraceToolDisallowed if no volume tracing exists --- .../oxalis/model/accessors/volumetracing_accessor.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js index 450cdf793dd..54b45c2bd4a 100644 --- a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js @@ -47,7 +47,7 @@ export function getContourTracingMode(volumeTracing: VolumeTracing): ContourMode export function isVolumeTraceToolDisallowed(state: OxalisState) { if (state.tracing.volume == null) { - return false; + return true; } // The current resolution is too high to allow the trace tool // because too many voxels could be annotated at the same time. From 3fe1160181d3b961c977cbf7945980ca4d402e50 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 30 Sep 2020 16:35:00 +0200 Subject: [PATCH 076/121] fix front-end merge conflicts --- .../oxalis/model/sagas/isosurface_saga.js | 24 ------------------ .../oxalis/model/sagas/volumetracing_saga.js | 25 ------------------- 2 files changed, 49 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js b/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js index f8fe8999298..d81ef48e9dc 100644 --- a/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/isosurface_saga.js @@ -172,14 +172,8 @@ function* ensureSuitableIsosurface( segmentId, clippedPosition, zoomStep, -<<<<<<< HEAD resolutionInfo, -||||||| 983a4d2e7 - resolutions, -======= - resolutions, removeExistingIsosurface, ->>>>>>> ffc659b255cff6929cd7666b297e6a452caa8db5 ); } @@ -189,14 +183,8 @@ function* loadIsosurfaceWithNeighbors( segmentId: number, clippedPosition: Vector3, zoomStep: number, -<<<<<<< HEAD resolutionInfo: ResolutionInfo, -||||||| 983a4d2e7 - resolutions: Array, -======= - resolutions: Array, removeExistingIsosurface: boolean, ->>>>>>> ffc659b255cff6929cd7666b297e6a452caa8db5 ): Saga { let isInitialRequest = true; let positionsToRequest = [clippedPosition]; @@ -209,14 +197,8 @@ function* loadIsosurfaceWithNeighbors( segmentId, position, zoomStep, -<<<<<<< HEAD resolutionInfo, -||||||| 983a4d2e7 - resolutions, -======= - resolutions, removeExistingIsosurface && isInitialRequest, ->>>>>>> ffc659b255cff6929cd7666b297e6a452caa8db5 ); isInitialRequest = false; positionsToRequest = positionsToRequest.concat(neighbors); @@ -235,14 +217,8 @@ function* maybeLoadIsosurface( segmentId: number, clippedPosition: Vector3, zoomStep: number, -<<<<<<< HEAD resolutionInfo: ResolutionInfo, -||||||| 983a4d2e7 - resolutions: Array, -======= - resolutions: Array, removeExistingIsosurface: boolean, ->>>>>>> ffc659b255cff6929cd7666b297e6a452caa8db5 ): Saga> { const threeDMap = getMapForSegment(segmentId); diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 7b198334d97..c1d7d2dca44 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -37,19 +37,10 @@ import { isVolumeTraceToolDisallowed, getNumberOfSlicesForResolution, } from "oxalis/model/accessors/volumetracing_accessor"; -<<<<<<< HEAD -||||||| 983a4d2e7 -import { getPosition, getRotation } from "oxalis/model/accessors/flycam_accessor"; -======= import { getPosition, getFlooredPosition, getRotation, -} from "oxalis/model/accessors/flycam_accessor"; ->>>>>>> ffc659b255cff6929cd7666b297e6a452caa8db5 -import { - getPosition, - getRotation, getCurrentResolution, getRequestLogZoomStep, } from "oxalis/model/accessors/flycam_accessor"; @@ -193,19 +184,11 @@ export function* editVolumeLayerAsync(): Generator { } } -<<<<<<< HEAD function* getBoundingsFromPosition( currentViewport: OrthoView, numberOfSlices: number, ): Saga { - const position = Dimensions.roundCoordinate(yield* select(state => getPosition(state.flycam))); -||||||| 983a4d2e7 -function* getBoundingsFromPosition(currentViewport: OrthoView): Saga { - const position = Dimensions.roundCoordinate(yield* select(state => getPosition(state.flycam))); -======= -function* getBoundingsFromPosition(currentViewport: OrthoView): Saga { const position = yield* select(state => getFlooredPosition(state.flycam)); ->>>>>>> ffc659b255cff6929cd7666b297e6a452caa8db5 const halfViewportExtents = yield* call(getHalfViewportExtents, currentViewport); const halfViewportExtentsUVW = Dimensions.transDim([...halfViewportExtents, 0], currentViewport); const thirdDimension = Dimensions.thirdDimensionForPlane(currentViewport); @@ -259,7 +242,6 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga return; } -<<<<<<< HEAD const segmentationLayer: DataLayer = yield* call([Model, Model.getSegmentationLayer]); const { cube } = segmentationLayer; const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); @@ -270,14 +252,7 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga console.log("labeledZoomStep", labeledZoomStep); const dimensionIndices = Dimensions.getIndices(activeViewport); - const position = Dimensions.roundCoordinate(yield* select(state => getPosition(state.flycam))); -||||||| 983a4d2e7 - const segmentationLayer = yield* call([Model, Model.getSegmentationLayer]); - const position = Dimensions.roundCoordinate(yield* select(state => getPosition(state.flycam))); -======= - const segmentationLayer = yield* call([Model, Model.getSegmentationLayer]); const position = yield* select(state => getFlooredPosition(state.flycam)); ->>>>>>> ffc659b255cff6929cd7666b297e6a452caa8db5 const [halfViewportExtentX, halfViewportExtentY] = yield* call( getHalfViewportExtents, activeViewport, From 74aa5fd13a4b3c09505060da66e3876faf73d494 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 30 Sep 2020 17:15:55 +0200 Subject: [PATCH 077/121] reduce usages of getResolutionByIndexWithFallback and make that function correct (remove isotropic fallback in favor of failing for unclear cases --- .../model/accessors/dataset_accessor.js | 23 +++++++++++++++---- .../model/bucket_data_handling/data_cube.js | 12 +++++++++- .../layer_rendering_manager.js | 11 +++++++-- .../model/helpers/position_converter.js | 13 +++++++++-- 4 files changed, 49 insertions(+), 10 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 30b799c4f58..fd769cdbc86 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -92,14 +92,27 @@ export class ResolutionInfo { return resolution; } - getResolutionByIndexWithFallback(index: number): Vector3 { - const resolutionMaybe = this.getResolutionByIndex(index); + getResolutionByIndexWithFallback( + index: number, + fallbackResolutionInfo: ?ResolutionInfo, + ): Vector3 { + let resolutionMaybe = this.getResolutionByIndex(index); if (resolutionMaybe) { return resolutionMaybe; - } else { - const powerOf2 = this.indexToPowerOf2(index); - return [powerOf2, powerOf2, powerOf2]; } + + resolutionMaybe = + fallbackResolutionInfo != null ? fallbackResolutionInfo.getResolutionByIndex(index) : null; + if (resolutionMaybe) { + return resolutionMaybe; + } + + if (index === 0) { + // If the index is 0, only mag 1-1-1 can be meant. + return [1, 1, 1]; + } + + throw new Error(""); } getResolutionByPowerOf2(powerOfTwo: number): ?Vector3 { diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index e5caa98ab37..163b85ed791 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -424,6 +424,11 @@ class DataCube { if (seedBucket.type === "null") { return null; } + if (!this.resolutionInfo.hasIndex(zoomStep)) { + throw new Error( + `DataCube.floodFill was called with a zoomStep of ${zoomStep} which does not exist for the current magnification.`, + ); + } const seedVoxelIndex = this.getVoxelIndex(seedVoxel, zoomStep); const sourceCellId = seedBucket.getOrCreateData()[seedVoxelIndex]; if (sourceCellId === cellId) { @@ -528,6 +533,11 @@ class DataCube { } getDataValue(voxel: Vector3, mapping: ?Mapping, zoomStep: number = 0): number { + if (!this.resolutionInfo.hasIndex(zoomStep)) { + throw new Error( + `DataCube.getDataValue was called with a zoomStep of ${zoomStep} which does not exist for the current layer.`, + ); + } const bucket = this.getBucket(this.positionToZoomedAddress(voxel, zoomStep)); const voxelIndex = this.getVoxelIndex(voxel, zoomStep); @@ -559,7 +569,7 @@ class DataCube { getVoxelOffset(voxel: Vector3, zoomStep: number = 0): Vector3 { // No `map` for performance reasons const voxelOffset = [0, 0, 0]; - const resolution = this.resolutionInfo.getResolutionByIndexWithFallback(zoomStep); + const resolution = this.resolutionInfo.getResolutionByIndexOrThrow(zoomStep); for (let i = 0; i < 3; i++) { voxelOffset[i] = Math.floor(voxel[i] / resolution[i]) % constants.BUCKET_WIDTH; } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js index eab83d43ae6..0791732e903 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.js @@ -21,6 +21,7 @@ import { isLayerVisible, getLayerByName, getResolutionInfo, + getDatasetResolutionInfo, } from "oxalis/model/accessors/dataset_accessor"; import AsyncBucketPickerWorker from "oxalis/workers/async_bucket_picker.worker"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; @@ -167,6 +168,7 @@ export default class LayerRenderingManager { const layer = getLayerByName(dataset, this.name); const resolutionInfo = getResolutionInfo(layer.resolutions); + const datasetResolutionInfo = getDatasetResolutionInfo(dataset); const maximumResolutionIndex = resolutionInfo.getHighestResolutionIndex(); if (logZoomStep > maximumResolutionIndex) { @@ -178,7 +180,7 @@ export default class LayerRenderingManager { const resolutions = getResolutions(dataset); const subBucketLocality = getSubBucketLocality( position, - resolutionInfo.getResolutionByIndexWithFallback(logZoomStep), + resolutionInfo.getResolutionByIndexWithFallback(logZoomStep, datasetResolutionInfo), ); const areas = getAreasFromState(state); @@ -274,7 +276,12 @@ export default class LayerRenderingManager { const state = Store.getState(); const layer = getLayerByName(state.dataset, this.name); const resolutionInfo = getResolutionInfo(layer.resolutions); - const resolution = resolutionInfo.getResolutionByIndexWithFallback(logZoomStep); + const datasetResolutionInfo = getDatasetResolutionInfo(state.dataset); + + const resolution = resolutionInfo.getResolutionByIndexWithFallback( + logZoomStep, + datasetResolutionInfo, + ); const addressSpaceDimensions = getAddressSpaceDimensions( state.temporaryConfiguration.gpuSetup.initializedGpuFactor, ); diff --git a/frontend/javascripts/oxalis/model/helpers/position_converter.js b/frontend/javascripts/oxalis/model/helpers/position_converter.js index 57c4147717e..81dd1516a8c 100644 --- a/frontend/javascripts/oxalis/model/helpers/position_converter.js +++ b/frontend/javascripts/oxalis/model/helpers/position_converter.js @@ -76,6 +76,8 @@ export function getResolutionsFactors(resolutionA: Vector3, resolutionB: Vector3 } // TODO: zoomedAddressToAnotherZoomStep usages should be converted to zoomedAddressToAnotherZoomStepWithInfo +// Note that this is not trivial since zoomedAddressToAnotherZoomStepWithInfo will throw on not existing +// resolution indices (in contrast to zoomedAddressToAnotherZoomStep). // See: https://github.com/scalableminds/webknossos/issues/4838 export function zoomedAddressToAnotherZoomStep( [x, y, z, resolutionIndex]: Vector4, @@ -94,13 +96,20 @@ export function zoomedAddressToAnotherZoomStep( ]; } +/* + Please note that this function will fail if the passed resolutionIndex or + targetResolutionIndex don't exist in the resolutionInfo. + */ export function zoomedAddressToAnotherZoomStepWithInfo( [x, y, z, resolutionIndex]: Vector4, resolutionInfo: ResolutionInfo, targetResolutionIndex: number, ): Vector4 { - const currentResolution = resolutionInfo.getResolutionByIndexWithFallback(resolutionIndex); - const targetResolution = resolutionInfo.getResolutionByIndexWithFallback(targetResolutionIndex); + const currentResolution = resolutionInfo.getResolutionByIndexWithFallback(resolutionIndex, null); + const targetResolution = resolutionInfo.getResolutionByIndexWithFallback( + targetResolutionIndex, + null, + ); const factors = getResolutionsFactors(currentResolution, targetResolution); return [ From a5e7987c2d386e193035f090276a969f5a95d873 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 30 Sep 2020 17:41:21 +0200 Subject: [PATCH 078/121] simplify is2DVoxelInsideBucket method in Bucket --- .../model/bucket_data_handling/bucket.js | 21 ++++++++----------- .../model/bucket_data_handling/data_cube.js | 5 +++++ 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js index d5b293d3656..86db17d2859 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.js @@ -7,6 +7,7 @@ import BackboneEvents from "backbone-events-standalone"; import * as THREE from "three"; import _ from "lodash"; +import { mod } from "libs/utils"; import { bucketPositionToGlobalAddress, zoomedAddressToAnotherZoomStep, @@ -195,22 +196,18 @@ export class DataBucket { zoomStep, ]; let isVoxelOutside = false; - const adjustedVoxel = voxel; + const adjustedVoxel = [voxel[0], voxel[1]]; for (let dimensionIndex = 0; dimensionIndex < 2; ++dimensionIndex) { const dimension = dimensionIndices[dimensionIndex]; - if (voxel[dimensionIndex] < 0) { + if (voxel[dimensionIndex] < 0 || voxel[dimensionIndex] >= Constants.BUCKET_WIDTH) { isVoxelOutside = true; - const offset = Math.ceil(-voxel[dimensionIndex] / Constants.BUCKET_WIDTH); - neighbourBucketAddress[dimension] -= offset; - // Add a full bucket width to the coordinate below 0 to avoid error's - // caused by the modulo operation used in getVoxelOffset. - adjustedVoxel[dimensionIndex] += Constants.BUCKET_WIDTH * offset; - } else if (voxel[dimensionIndex] >= Constants.BUCKET_WIDTH) { - isVoxelOutside = true; - const offset = Math.floor(voxel[dimensionIndex] / Constants.BUCKET_WIDTH); - neighbourBucketAddress[dimension] += offset; - adjustedVoxel[dimensionIndex] -= Constants.BUCKET_WIDTH * offset; + const sign = Math.sign(voxel[dimensionIndex]); + const offset = Math.ceil(Math.abs(voxel[dimensionIndex]) / Constants.BUCKET_WIDTH); + // If the voxel coordinate is below 0, sign is negative and will lower the neighbor + // bucket address + neighbourBucketAddress[dimension] += sign * offset; } + adjustedVoxel[dimensionIndex] = mod(adjustedVoxel[dimensionIndex], Constants.BUCKET_WIDTH); } return { isVoxelOutside, neighbourBucketAddress, adjustedVoxel }; }; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 163b85ed791..da272c0ea80 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -566,6 +566,11 @@ class DataCube { return x + y * constants.BUCKET_WIDTH + z * constants.BUCKET_WIDTH ** 2; } + /* + Given a global coordinate `voxel`, this method returns the coordinate + within the bucket to which `voxel` belongs. + So, the returned value for x, y and z will be between 0 and 32. + */ getVoxelOffset(voxel: Vector3, zoomStep: number = 0): Vector3 { // No `map` for performance reasons const voxelOffset = [0, 0, 0]; From 717fe2e01173c4f7bd28084aa47327bae89dbb18 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 2 Oct 2020 10:14:19 +0200 Subject: [PATCH 079/121] Apply suggestions from code review Co-authored-by: Daniel --- .../oxalis/model/sagas/volumetracing_saga.js | 10 +++++----- .../model/volumetracing/volume_annotation_sampling.js | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index c1d7d2dca44..d72bb42b55c 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -346,7 +346,7 @@ export function* floodFill(): Saga { const labeledZoomStep = resolutionInfo.getClosestExistingIndex(activeZoomStep); const labeledResolution = resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); - // The floodfill and applyVoxelMap methods of iterates within the bucket. + // The floodfill and applyVoxelMap methods iterate within the bucket. // Thus thirdDimensionValue must also be within the initial bucket in the correct resolution. const thirdDimensionValue = Math.floor(seedVoxel[dimensionIndices[2]] / labeledResolution[dimensionIndices[2]]) % @@ -430,7 +430,7 @@ function applyLabeledVoxelMapToAllMissingResolutions( // should be downsampled) const downsampleSequence = allResolutionsWithIndices.slice(pivotIndex); // `upsampleSequence` contains the current mag and all lower mags (to which - // should be downsampled) + // should be upsampled) const upsampleSequence = allResolutionsWithIndices.slice(0, pivotIndex + 1).reverse(); // First upsample the voxel map and apply it to all better resolutions. @@ -469,7 +469,7 @@ function applyLabeledVoxelMapToAllMissingResolutions( } currentLabeledVoxelMap = labeledVoxelMapToApply; - // Next we downsamplesample the annotation and apply it. + // Next we downsample the annotation and apply it. // sourceZoomStep will be lower than targetZoomStep for (const [source, target] of pairwise(downsampleSequence)) { const [sourceZoomStep, sourceResolution] = source; @@ -528,13 +528,13 @@ export function* ensureNoTraceToolInLowResolutions(): Saga<*> { yield* take("INITIALIZE_VOLUMETRACING"); while (true) { yield* take(["ZOOM_IN", "ZOOM_OUT", "ZOOM_BY_DELTA", "SET_ZOOM_STEP"]); - const isResolutionToLowForTraceTool = yield* select(state => + const isResolutionTooLowForTraceTool = yield* select(state => isVolumeTraceToolDisallowed(state), ); const isTraceToolActive = yield* select( state => enforceVolumeTracing(state.tracing).activeTool === VolumeToolEnum.TRACE, ); - if (isResolutionToLowForTraceTool && isTraceToolActive) { + if (isResolutionTooLowForTraceTool && isTraceToolActive) { yield* put(setToolAction(VolumeToolEnum.MOVE)); } } diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 3d54809b452..404fc20ebca 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -17,8 +17,8 @@ function upsampleVoxelMap( thirdDimensionVoxelValue: number, ): LabeledVoxelsMap { // This method upsamples a given LabeledVoxelsMap. For each bucket in the LabeledVoxelsMap this function - // iterating over the buckets in the higher resolution that are covered by the bucket. - // For each covered bucket all labeled voxel entries are upsampled with a kernel an marked in an array for the covered bucket. + // iterates over the buckets in the higher resolution that are covered by the bucket. + // For each covered bucket all labeled voxel entries are upsampled with a kernel and marked in an array for the covered bucket. // Therefore all covered buckets with their marked array build the upsampled version of the given LabeledVoxelsMap. if (sourceZoomStep <= targetZoomStep) { throw new Error("Trying to upsample a LabeledVoxelMap with the down sample function."); From 9bad842e47a7be1b3cf1ca137243c3293aee50f5 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 2 Oct 2020 10:10:32 +0200 Subject: [PATCH 080/121] integrate more PR feedback; resolve merge conflict in changelog; rename missing to unrenderable etc --- CHANGELOG.unreleased.md | 17 +---------- .../materials/plane_material_factory.js | 10 +++---- .../model/accessors/dataset_accessor.js | 30 ++++++++++++++----- .../oxalis/model/sagas/volumetracing_saga.js | 1 - .../oxalis/shaders/main_data_fragment.glsl.js | 8 ++--- .../oxalis/view/action_bar_view.js | 1 - .../oxalis/view/viewport_status_indicator.js | 16 +++++----- 7 files changed, 39 insertions(+), 44 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 6315884281a..4f30938f4f0 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -11,22 +11,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released [Commits](https://github.com/scalableminds/webknossos/compare/20.10.0...HEAD) ### Added -<<<<<<< HEAD -- Added a tool to initiate a flood fill in a volume tracing with the active cell id. [#4780](https://github.com/scalableminds/webknossos/pull/4780) -- Added the possibility to merge volume tracings both via file upload (zip of zips) and when viewing projects/tasks as compound annotations. [#4709](https://github.com/scalableminds/webknossos/pull/4709) -- Added the possibility to remove the fallback segmentation layer from a hybrid/volume tracing. Accessible by a minus button next to the layer's settings. [#4741](https://github.com/scalableminds/webknossos/pull/4766) -- Added the possibility to undo and redo volume annotation strokes. [#4771](https://github.com/scalableminds/webknossos/pull/4771) -- Added the possibility to navigate to the preceding/subsequent node by pressing "ctrl + ," or "ctrl + ." in a skeleton tracing. [#4147](https://github.com/scalableminds/webknossos/pull/4784) -- Added multi-resolution volume annotations. Note that already existing volume tracings will still only contain data in the first magnification. If you want to migrate an old volume tracing, you can download and re-import it. [#4755](https://github.com/scalableminds/webknossos/pull/4755) -||||||| 983a4d2e7 -- Added a tool to initiate a flood fill in a volume tracing with the active cell id. [#4780](https://github.com/scalableminds/webknossos/pull/4780) -- Added the possibility to merge volume tracings both via file upload (zip of zips) and when viewing projects/tasks as compound annotations. [#4709](https://github.com/scalableminds/webknossos/pull/4709) -- Added the possibility to remove the fallback segmentation layer from a hybrid/volume tracing. Accessible by a minus button next to the layer's settings. [#4741](https://github.com/scalableminds/webknossos/pull/4766) -- Added the possibility to undo and redo volume annotation strokes. [#4771](https://github.com/scalableminds/webknossos/pull/4771) -- Added the possibility to navigate to the preceding/subsequent node by pressing "ctrl + ," or "ctrl + ." in a skeleton tracing. [#4147](https://github.com/scalableminds/webknossos/pull/4784) -======= -- ->>>>>>> ffc659b255cff6929cd7666b297e6a452caa8db5 +- Added multi-resolution volume annotations. Note that already existing volume tracings will still only contain data in the first magnification. If you want to migrate an old volume tracing, you can download and re-import it. [#4755](https://github.com/scalableminds/webknossos/pull/4755) ### Changed - New volume/hybrid annotations are now automatically multi-resolution volume annotations. [#4755](https://github.com/scalableminds/webknossos/pull/4755) diff --git a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js index 64cf4ac0208..afa2bcf805e 100644 --- a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js +++ b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js @@ -25,7 +25,7 @@ import { getElementClass, getBoundaries, getEnabledLayers, - getMissingLayersForCurrentZoom, + getUnrenderableLayersForCurrentZoom, } from "oxalis/model/accessors/dataset_accessor"; import { getRequestLogZoomStep, getZoomValue } from "oxalis/model/accessors/flycam_accessor"; import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; @@ -343,12 +343,12 @@ class PlaneMaterialFactory { this.storePropertyUnsubscribers.push( listenToStoreProperty( - storeState => getMissingLayersForCurrentZoom(storeState), - missingLayers => { - const missingLayerNames = missingLayers.map(l => l.name); + storeState => getUnrenderableLayersForCurrentZoom(storeState), + unrenderableLayers => { + const unrenderableLayerNames = unrenderableLayers.map(l => l.name); for (const dataLayer of Model.getAllLayers()) { const sanitizedName = sanitizeName(dataLayer.name); - this.uniforms[`${sanitizedName}_missing`].value = missingLayerNames.includes( + this.uniforms[`${sanitizedName}_unrenderable`].value = unrenderableLayerNames.includes( dataLayer.name, ); } diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index fd769cdbc86..17f0b1d31e5 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -112,7 +112,7 @@ export class ResolutionInfo { return [1, 1, 1]; } - throw new Error(""); + throw new Error(`Resolution could not be determined for index ${index}`); } getResolutionByPowerOf2(powerOfTwo: number): ?Vector3 { @@ -544,22 +544,36 @@ export function getEnabledLayers( }); } -function _getMissingLayersForCurrentZoom(state: OxalisState) { +/* + This function returns layers which cannot be rendered (since + the current magnification is missing), even though they should + be rendered (since they are enabled). The function takes fallback + magnifications into account if renderMissingDataBlack is disabled. + */ +function _getUnrenderableLayersForCurrentZoom(state: OxalisState) { const { dataset } = state; const zoomStep = getRequestLogZoomStep(state); const { renderMissingDataBlack } = state.datasetConfiguration; const maxZoomStepDiff = getMaxZoomStepDiff(state.datasetConfiguration.loadingStrategy); - const missingLayers = getEnabledLayers(dataset, state.datasetConfiguration) + const unrenderableLayers = getEnabledLayers(dataset, state.datasetConfiguration) .map((layer: DataLayerType) => ({ layer, resolutionInfo: getResolutionInfo(layer.resolutions), })) .filter(({ resolutionInfo }) => { const isMissing = !resolutionInfo.hasIndex(zoomStep); - if (!isMissing || renderMissingDataBlack) { - return isMissing; + if (!isMissing) { + // The layer exists. Thus, it is not unrenderable. + return false; + } + + if (renderMissingDataBlack) { + // We already know that the layer is missing. Since `renderMissingDataBlack` + // is enabled, the fallback magnifications don't matter. The layer cannot be + // rendered. + return true; } // The current magnification is missing and fallback rendering @@ -571,11 +585,11 @@ function _getMissingLayersForCurrentZoom(state: OxalisState) { }); }) .map(({ layer }) => layer); - return missingLayers; + return unrenderableLayers; } -export const getMissingLayersForCurrentZoom = reuseInstanceOnEquality( - _getMissingLayersForCurrentZoom, +export const getUnrenderableLayersForCurrentZoom = reuseInstanceOnEquality( + _getUnrenderableLayersForCurrentZoom, ); export function getThumbnailURL(dataset: APIDataset): string { diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index c1d7d2dca44..27107f2577c 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -249,7 +249,6 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga getResolutionInfoOfSegmentationLayer(state.dataset), ); const labeledZoomStep = resolutionInfo.getClosestExistingIndex(activeZoomStep); - console.log("labeledZoomStep", labeledZoomStep); const dimensionIndices = Dimensions.getIndices(activeViewport); const position = yield* select(state => getFlooredPosition(state.flycam)); diff --git a/frontend/javascripts/oxalis/shaders/main_data_fragment.glsl.js b/frontend/javascripts/oxalis/shaders/main_data_fragment.glsl.js index 68a83253988..7753b29e578 100644 --- a/frontend/javascripts/oxalis/shaders/main_data_fragment.glsl.js +++ b/frontend/javascripts/oxalis/shaders/main_data_fragment.glsl.js @@ -64,7 +64,7 @@ const int dataTextureCountPerLayer = <%= dataTextureCountPerLayer %>; uniform float <%= name %>_data_texture_width; uniform float <%= name %>_maxZoomStep; uniform float <%= name %>_alpha; - uniform float <%= name %>_missing; + uniform float <%= name %>_unrenderable; <% }) %> <% if (hasSegmentation) { %> @@ -151,7 +151,7 @@ void main() { <% if (hasSegmentation) { %> vec4 id = vec4(0.); vec4 cellIdUnderMouse = vec4(0.); - float <%= segmentationName%>_effective_alpha = <%= segmentationName %>_alpha * (1. - <%= segmentationName %>_missing); + float <%= segmentationName%>_effective_alpha = <%= segmentationName %>_alpha * (1. - <%= segmentationName %>_unrenderable); if (<%= segmentationName%>_effective_alpha > 0.) { id = getSegmentationId(worldCoordUVW); @@ -163,8 +163,6 @@ void main() { // Passing the mouse position from the 3D viewport is not an option here, since that position // isn't on the orthogonal planes necessarily. cellIdUnderMouse = length(hoveredIsosurfaceId) > 0.1 ? hoveredIsosurfaceId : getSegmentationId(flooredMousePosUVW); - } else { - } <% } %> @@ -173,7 +171,7 @@ void main() { vec3 data_color = vec3(0.0); vec3 color_value = vec3(0.0); <% _.each(colorLayerNames, function(name, layerIndex){ %> - float <%= name %>_effective_alpha = <%= name %>_alpha * (1. - <%= name %>_missing); + float <%= name %>_effective_alpha = <%= name %>_alpha * (1. - <%= name %>_unrenderable); if (<%= name %>_effective_alpha > 0.) { // Get grayscale value for <%= name %> color_value = diff --git a/frontend/javascripts/oxalis/view/action_bar_view.js b/frontend/javascripts/oxalis/view/action_bar_view.js index ec69cd69ecb..c03f4d79d2e 100644 --- a/frontend/javascripts/oxalis/view/action_bar_view.js +++ b/frontend/javascripts/oxalis/view/action_bar_view.js @@ -181,7 +181,6 @@ class ActionBarView extends React.PureComponent { )} {showVersionRestore ? VersionRestoreWarning : null} - {!isReadOnly && hasVolume && isVolumeSupported ? : null} {isArbitrarySupported ? : null} {isTraceMode ? null : this.renderStartTracingButton()} diff --git a/frontend/javascripts/oxalis/view/viewport_status_indicator.js b/frontend/javascripts/oxalis/view/viewport_status_indicator.js index edd8f0a9433..fdc5d090a8b 100644 --- a/frontend/javascripts/oxalis/view/viewport_status_indicator.js +++ b/frontend/javascripts/oxalis/view/viewport_status_indicator.js @@ -3,34 +3,34 @@ import * as React from "react"; import { Icon, Tooltip } from "antd"; -import { getMissingLayersForCurrentZoom } from "oxalis/model/accessors/dataset_accessor"; +import { getUnrenderableLayersForCurrentZoom } from "oxalis/model/accessors/dataset_accessor"; import { usePolledState } from "libs/react_helpers"; const { useState } = React; export default function ViewportStatusIndicator() { - const [missingLayerNames, setMissingLayerNames] = useState([]); + const [unrenderableLayerNames, setUnrenderableLayerNames] = useState([]); usePolledState(state => { - const newMissingLayersNames = getMissingLayersForCurrentZoom(state); - setMissingLayerNames( + const newMissingLayersNames = getUnrenderableLayersForCurrentZoom(state); + setUnrenderableLayerNames( newMissingLayersNames.map(layer => layer.category === "segmentation" ? "Segmentation" : layer.name, ), ); }); - if (missingLayerNames.length === 0) { + if (unrenderableLayerNames.length === 0) { return null; } - const pluralS = missingLayerNames.length > 1 ? "s" : ""; - const pronounAndVerb = missingLayerNames.length > 1 ? "they don't" : "it doesn't"; + const pluralS = unrenderableLayerNames.length > 1 ? "s" : ""; + const pronounAndVerb = unrenderableLayerNames.length > 1 ? "they don't" : "it doesn't"; return ( - The layer{pluralS} {missingLayerNames.map(name => `"${name}"`).join(", ")} cannot be + The layer{pluralS} {unrenderableLayerNames.map(name => `"${name}"`).join(", ")} cannot be rendered because {pronounAndVerb} exist in the current magnification. Please adjust the zoom level to change the active magnification. Also consider disabling the option “Render Missing Data Black” if this is not already the case. From d01caceee97bbeaf8929dffeb690323bc72ce988 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 2 Oct 2020 11:26:28 +0200 Subject: [PATCH 081/121] refactor applyLabeledVoxelMapToAllMissingResolutions --- frontend/javascripts/oxalis/constants.js | 2 + .../javascripts/oxalis/model/dimensions.js | 4 ++ .../oxalis/model/sagas/volumetracing_saga.js | 55 ++++++++++--------- 3 files changed, 35 insertions(+), 26 deletions(-) diff --git a/frontend/javascripts/oxalis/constants.js b/frontend/javascripts/oxalis/constants.js index dd52afa4cdc..d5fe59bc620 100644 --- a/frontend/javascripts/oxalis/constants.js +++ b/frontend/javascripts/oxalis/constants.js @@ -127,6 +127,8 @@ export const Unicode = { MultiplicationSymbol: "×", }; +// Maps from a bucket address to a Buffer which contains +// the labeled voxels export type LabeledVoxelsMap = Map; const Constants = { diff --git a/frontend/javascripts/oxalis/model/dimensions.js b/frontend/javascripts/oxalis/model/dimensions.js index dd6ad817470..09afd2ba999 100644 --- a/frontend/javascripts/oxalis/model/dimensions.js +++ b/frontend/javascripts/oxalis/model/dimensions.js @@ -32,6 +32,10 @@ const Dimensions = { return [array[ind[0]], array[ind[1]], array[ind[2]]]; }, + transDimWithIndices(array: Vector3, indices: DimensionMap): Vector3 { + return [array[indices[0]], array[indices[1]], array[indices[2]]]; + }, + planeForThirdDimension(dim: DimensionIndices): OrthoView { // Return the plane in which dim is always the same switch (dim) { diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index ad9acc787a3..16cc4aefa4e 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -398,25 +398,28 @@ function* pairwise(arr: Array): Generator<[T, T], *, *> { } function applyLabeledVoxelMapToAllMissingResolutions( - labeledVoxelMapToApply: LabeledVoxelsMap, + inputLabeledVoxelMap: LabeledVoxelsMap, labeledZoomStep: number, dimensionIndices: DimensionMap, resolutionInfo: ResolutionInfo, segmentationCube: DataCube, cellId: number, - thirdDimensionOfSlice: number, + thirdDimensionOfSlice: number, // this value is specified in global (mag1) coords shouldOverwrite: boolean, ): void { - let currentLabeledVoxelMap: LabeledVoxelsMap = labeledVoxelMapToApply; - let thirdDimensionValue = thirdDimensionOfSlice; - const get3DAddress = (voxel: Vector2) => { - const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], thirdDimensionValue]; - const orderedVoxelWithThirdDimension = [ - unorderedVoxelWithThirdDimension[dimensionIndices[0]], - unorderedVoxelWithThirdDimension[dimensionIndices[1]], - unorderedVoxelWithThirdDimension[dimensionIndices[2]], - ]; - return orderedVoxelWithThirdDimension; + const get3DAddressCreator = (targetResolution: Vector3) => { + const sampledThirdDimensionValue = + Math.floor(thirdDimensionOfSlice / targetResolution[dimensionIndices[2]]) % + Constants.BUCKET_WIDTH; + + return (voxel: Vector2) => { + const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], sampledThirdDimensionValue]; + const orderedVoxelWithThirdDimension = Dimensions.transDimWithIndices( + unorderedVoxelWithThirdDimension, + dimensionIndices, + ); + return orderedVoxelWithThirdDimension; + }; }; const labeledResolution = resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); @@ -432,6 +435,9 @@ function applyLabeledVoxelMapToAllMissingResolutions( // should be upsampled) const upsampleSequence = allResolutionsWithIndices.slice(0, pivotIndex + 1).reverse(); + // On each sampling step, a new LabeledVoxelMap is acquired + // which is used as the input for the next down-/upsampling + let currentLabeledVoxelMap: LabeledVoxelsMap = inputLabeledVoxelMap; // First upsample the voxel map and apply it to all better resolutions. // sourceZoomStep will be higher than targetZoomStep for (const [source, target] of pairwise(upsampleSequence)) { @@ -448,11 +454,7 @@ function applyLabeledVoxelMapToAllMissingResolutions( dimensionIndices, thirdDimensionOfSlice, ); - // Adjust thirdDimensionValue so get3DAddress returns the third dimension value - // in the target resolution to apply the voxelMap correctly. - thirdDimensionValue = - Math.floor(thirdDimensionOfSlice / targetResolution[dimensionIndices[2]]) % - Constants.BUCKET_WIDTH; + const numberOfSlices = Math.ceil( labeledResolution[dimensionIndices[2]] / targetResolution[dimensionIndices[2]], ); @@ -460,13 +462,16 @@ function applyLabeledVoxelMapToAllMissingResolutions( currentLabeledVoxelMap, segmentationCube, cellId, - get3DAddress, + get3DAddressCreator(targetResolution), numberOfSlices, dimensionIndices[2], shouldOverwrite, ); } - currentLabeledVoxelMap = labeledVoxelMapToApply; + + // Reset currentLabeledVoxelMap to start downsampling + // from the input LabeledVoxelsMap + currentLabeledVoxelMap = inputLabeledVoxelMap; // Next we downsample the annotation and apply it. // sourceZoomStep will be lower than targetZoomStep @@ -484,17 +489,15 @@ function applyLabeledVoxelMapToAllMissingResolutions( dimensionIndices, thirdDimensionOfSlice, ); - // Adjust thirdDimensionValue so get3DAddress returns the third dimension value - // in the target resolution to apply the voxelMap correctly. - thirdDimensionValue = - Math.floor(thirdDimensionOfSlice / targetResolution[dimensionIndices[2]]) % - Constants.BUCKET_WIDTH; + + const numberOfSlices = 1; + applyVoxelMap( currentLabeledVoxelMap, segmentationCube, cellId, - get3DAddress, - 1, + get3DAddressCreator(targetResolution), + numberOfSlices, dimensionIndices[2], shouldOverwrite, ); From a313fc3093256abc94b4ebf8078442aa183efc34 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 2 Oct 2020 11:35:28 +0200 Subject: [PATCH 082/121] further refactoring of applyLabeledVoxelMapToAllMissingResolutions --- .../oxalis/model/sagas/volumetracing_saga.js | 116 ++++++++---------- 1 file changed, 49 insertions(+), 67 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 16cc4aefa4e..77f888a6e7d 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -407,10 +407,15 @@ function applyLabeledVoxelMapToAllMissingResolutions( thirdDimensionOfSlice: number, // this value is specified in global (mag1) coords shouldOverwrite: boolean, ): void { + const thirdDim = dimensionIndices[2]; + + // This function creates a `get3DAddress` function which maps from + // a 2D vector address to the corresponding 3D vector address. + // The input address is local to a slice in the LabeledVoxelsMap (that's + // why it's 2D). The output address is local to the corresponding bucket. const get3DAddressCreator = (targetResolution: Vector3) => { const sampledThirdDimensionValue = - Math.floor(thirdDimensionOfSlice / targetResolution[dimensionIndices[2]]) % - Constants.BUCKET_WIDTH; + Math.floor(thirdDimensionOfSlice / targetResolution[thirdDim]) % Constants.BUCKET_WIDTH; return (voxel: Vector2) => { const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], sampledThirdDimensionValue]; @@ -422,86 +427,63 @@ function applyLabeledVoxelMapToAllMissingResolutions( }; }; - const labeledResolution = resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); // Get all available resolutions and divide the list into two parts. - const allResolutionsWithIndices = resolutionInfo.getResolutionsWithIndices(); // The pivotIndex is the index within allResolutionsWithIndices which refers to // the labeled resolution. - const pivotIndex = allResolutionsWithIndices.findIndex(([index]) => index === labeledZoomStep); // `downsampleSequence` contains the current mag and all higher mags (to which // should be downsampled) - const downsampleSequence = allResolutionsWithIndices.slice(pivotIndex); // `upsampleSequence` contains the current mag and all lower mags (to which // should be upsampled) + const labeledResolution = resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); + const allResolutionsWithIndices = resolutionInfo.getResolutionsWithIndices(); + const pivotIndex = allResolutionsWithIndices.findIndex(([index]) => index === labeledZoomStep); + const downsampleSequence = allResolutionsWithIndices.slice(pivotIndex); const upsampleSequence = allResolutionsWithIndices.slice(0, pivotIndex + 1).reverse(); - // On each sampling step, a new LabeledVoxelMap is acquired - // which is used as the input for the next down-/upsampling - let currentLabeledVoxelMap: LabeledVoxelsMap = inputLabeledVoxelMap; - // First upsample the voxel map and apply it to all better resolutions. - // sourceZoomStep will be higher than targetZoomStep - for (const [source, target] of pairwise(upsampleSequence)) { - const [sourceZoomStep, sourceResolution] = source; - const [targetZoomStep, targetResolution] = target; - - currentLabeledVoxelMap = sampleVoxelMapToResolution( - currentLabeledVoxelMap, - segmentationCube, - sourceResolution, - sourceZoomStep, - targetResolution, - targetZoomStep, - dimensionIndices, - thirdDimensionOfSlice, - ); + // Given a sequence of resolutions, the inputLabeledVoxelMap is applied + // over all these resolutions. + function processSamplingSequence(samplingSequence, getNumberOfSlices) { + // On each sampling step, a new LabeledVoxelMap is acquired + // which is used as the input for the next down-/upsampling + let currentLabeledVoxelMap: LabeledVoxelsMap = inputLabeledVoxelMap; + + for (const [source, target] of pairwise(samplingSequence)) { + const [sourceZoomStep, sourceResolution] = source; + const [targetZoomStep, targetResolution] = target; + + currentLabeledVoxelMap = sampleVoxelMapToResolution( + currentLabeledVoxelMap, + segmentationCube, + sourceResolution, + sourceZoomStep, + targetResolution, + targetZoomStep, + dimensionIndices, + thirdDimensionOfSlice, + ); - const numberOfSlices = Math.ceil( - labeledResolution[dimensionIndices[2]] / targetResolution[dimensionIndices[2]], - ); - applyVoxelMap( - currentLabeledVoxelMap, - segmentationCube, - cellId, - get3DAddressCreator(targetResolution), - numberOfSlices, - dimensionIndices[2], - shouldOverwrite, - ); + const numberOfSlices = getNumberOfSlices(targetResolution); + applyVoxelMap( + currentLabeledVoxelMap, + segmentationCube, + cellId, + get3DAddressCreator(targetResolution), + numberOfSlices, + thirdDim, + shouldOverwrite, + ); + } } - // Reset currentLabeledVoxelMap to start downsampling - // from the input LabeledVoxelsMap - currentLabeledVoxelMap = inputLabeledVoxelMap; + // First upsample the voxel map and apply it to all better resolutions. + // sourceZoomStep will be higher than targetZoomStep + processSamplingSequence(upsampleSequence, targetResolution => + Math.ceil(labeledResolution[thirdDim] / targetResolution[thirdDim]), + ); // Next we downsample the annotation and apply it. // sourceZoomStep will be lower than targetZoomStep - for (const [source, target] of pairwise(downsampleSequence)) { - const [sourceZoomStep, sourceResolution] = source; - const [targetZoomStep, targetResolution] = target; - - currentLabeledVoxelMap = sampleVoxelMapToResolution( - currentLabeledVoxelMap, - segmentationCube, - sourceResolution, - sourceZoomStep, - targetResolution, - targetZoomStep, - dimensionIndices, - thirdDimensionOfSlice, - ); - - const numberOfSlices = 1; - - applyVoxelMap( - currentLabeledVoxelMap, - segmentationCube, - cellId, - get3DAddressCreator(targetResolution), - numberOfSlices, - dimensionIndices[2], - shouldOverwrite, - ); - } + processSamplingSequence(downsampleSequence, _targetResolution => 1); } export function* finishLayer( From 043af121a4f65f43a2e2658ec27fc1b92470e34f Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 2 Oct 2020 11:51:12 +0200 Subject: [PATCH 083/121] remove unused import --- frontend/javascripts/oxalis/view/action_bar_view.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/view/action_bar_view.js b/frontend/javascripts/oxalis/view/action_bar_view.js index c03f4d79d2e..091e5b441c6 100644 --- a/frontend/javascripts/oxalis/view/action_bar_view.js +++ b/frontend/javascripts/oxalis/view/action_bar_view.js @@ -1,5 +1,5 @@ // @flow -import { Alert, Icon, Dropdown } from "antd"; +import { Alert, Dropdown } from "antd"; import { connect } from "react-redux"; import * as React from "react"; From c91432cf72118c4be860a45e3646a5aa3f21dde4 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 2 Oct 2020 11:55:29 +0200 Subject: [PATCH 084/121] fix flow after upgrade --- .../javascripts/oxalis/model/accessors/accessor_helpers.js | 2 +- .../javascripts/oxalis/model/volumetracing/volumelayer.js | 6 +++--- .../model/volumetracing/volume_annotation_sampling.spec.js | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/accessor_helpers.js b/frontend/javascripts/oxalis/model/accessors/accessor_helpers.js index b72e4135462..0931faae88a 100644 --- a/frontend/javascripts/oxalis/model/accessors/accessor_helpers.js +++ b/frontend/javascripts/oxalis/model/accessors/accessor_helpers.js @@ -27,7 +27,7 @@ export function reuseInstanceOnEquality) => R>( ): F { let lastResult: R; - // $FlowIgnore This function has the same interface as F. + // $FlowFixMe[incompatible-return] This function has the same interface as F. return (...args: Array): R => { const result = fn(...args); if (result === lastResult || equalityFunction(result, lastResult)) { diff --git a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js index 6489a9f3536..a04905d1b9e 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js @@ -20,7 +20,7 @@ import { getNumberOfSlicesForResolution, } from "oxalis/model/accessors/volumetracing_accessor"; import { getBaseVoxelFactors } from "oxalis/model/scaleinfo"; -import Dimensions from "oxalis/model/dimensions"; +import Dimensions, { type DimensionIndices } from "oxalis/model/dimensions"; import Drawing from "libs/drawing"; import messages from "messages"; import Toast from "libs/toast"; @@ -39,7 +39,7 @@ export class VoxelIterator { boundingBox: ?BoundingBoxType; next: Vector3; currentSlice = 0; - thirdDimensionIndex: number; + thirdDimensionIndex: DimensionIndices; static finished(): VoxelIterator { const iterator = new VoxelIterator([], 0, 0, [0, 0], () => [0, 0, 0], 0); @@ -53,7 +53,7 @@ export class VoxelIterator { height: number, minCoord2d: Vector2, get3DCoordinate: Vector2 => Vector3, - thirdDimensionIndex: number, + thirdDimensionIndex: DimensionIndices, numberOfSlices: number = 1, boundingBox?: ?BoundingBoxType, ) { diff --git a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js index 509bfa8c431..332e44bdd85 100644 --- a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js +++ b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js @@ -35,7 +35,7 @@ const Cube = mockRequire.reRequire("oxalis/model/bucket_data_handling/data_cube" // Ava's recommendation for Flow types // https://github.com/avajs/ava/blob/master/docs/recipes/flow.md#typing-tcontext const test: TestInterface<{ - cube: Cube, + cube: typeof Cube, }> = (anyTest: any); test.beforeEach(t => { From f5b62bedbed46d06d2634bc8d256975901db6ea4 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 2 Oct 2020 11:58:21 +0200 Subject: [PATCH 085/121] improve comment for LabeledVoxelsMap --- frontend/javascripts/oxalis/constants.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/oxalis/constants.js b/frontend/javascripts/oxalis/constants.js index d5fe59bc620..b81f96bd53b 100644 --- a/frontend/javascripts/oxalis/constants.js +++ b/frontend/javascripts/oxalis/constants.js @@ -127,8 +127,10 @@ export const Unicode = { MultiplicationSymbol: "×", }; -// Maps from a bucket address to a Buffer which contains -// the labeled voxels +// A LabeledVoxelsMap maps from a bucket address +// to a 2D slice of labeled voxels. These labeled voxels +// are stored in a Uint8Array in a binary way (which cell +// id the voxels should be changed to is not encoded). export type LabeledVoxelsMap = Map; const Constants = { From 6cd2187110f28f786dd970322d17a4b788b807a7 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 2 Oct 2020 12:01:28 +0200 Subject: [PATCH 086/121] fix uniform definion --- .../oxalis/geometries/materials/plane_material_factory.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js index 88f8ab9541f..32e1ae4e2ca 100644 --- a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js +++ b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.js @@ -219,10 +219,10 @@ class PlaneMaterialFactory { type: "f", value: 1, }; - // If the `_missing` uniform is true, the layer + // If the `_unrenderable` uniform is true, the layer // cannot (and should not) be rendered in the // current mag. - this.uniforms[`${layerName}_missing`] = { + this.uniforms[`${layerName}_unrenderable`] = { type: "f", value: 0, }; From 942c4babefcfd82b5e16f85c0de662c153241835 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 2 Oct 2020 12:10:14 +0200 Subject: [PATCH 087/121] fix getDataValue for missing resolution --- .../oxalis/model/bucket_data_handling/data_cube.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index da272c0ea80..d97002a0069 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -534,9 +534,7 @@ class DataCube { getDataValue(voxel: Vector3, mapping: ?Mapping, zoomStep: number = 0): number { if (!this.resolutionInfo.hasIndex(zoomStep)) { - throw new Error( - `DataCube.getDataValue was called with a zoomStep of ${zoomStep} which does not exist for the current layer.`, - ); + return 0; } const bucket = this.getBucket(this.positionToZoomedAddress(voxel, zoomStep)); const voxelIndex = this.getVoxelIndex(voxel, zoomStep); From a2167293eb3290602c900795cb88a356eb2d13bf Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 14:12:19 +0200 Subject: [PATCH 088/121] Improve performance of volume annotation tools (#4848) * tmp: use LabeledVoxelsMap for brushing and TypedArray for brushing map (first measurements yield 20x better performance) * restore all of the brush functionality; create LabeledVoxelMap in current mag; further performance optimizations * clean up VoxelIterator a bit * more clean up * clean up and fix CI * further clean up * remove todo * use unzoomed centroid when updating direction * fix contour drawing in mag > 1 * fix flow * fix circle-drawing for anisotropic mags * update changelog * rename VoxelIterator to VoxelBuffer2D * optimize draw circle method by comparing squared dist with squared radius * rename variables and change comment for bx and by * remove obsolete comment * add another comment to fillCircle call --- CHANGELOG.unreleased.md | 1 + frontend/javascripts/libs/drawing.js | 3 +- .../model/accessors/volumetracing_accessor.js | 9 +- .../bucket_data_handling/bounding_box.js | 61 +++- .../model/bucket_data_handling/data_cube.js | 38 +-- .../model/helpers/position_converter.js | 37 +++ .../oxalis/model/sagas/volumetracing_saga.js | 199 +++++++---- .../volume_annotation_sampling.js | 16 +- .../oxalis/model/volumetracing/volumelayer.js | 314 ++++++++---------- frontend/javascripts/oxalis/store.js | 1 + .../volume_annotation_sampling.spec.js | 8 +- .../test/sagas/volumetracing_saga.spec.js | 20 +- 12 files changed, 399 insertions(+), 308 deletions(-) diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 1a227b0515f..faa450ca045 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -16,6 +16,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released ### Changed - New volume/hybrid annotations are now automatically multi-resolution volume annotations. [#4755](https://github.com/scalableminds/webknossos/pull/4755) - The position input of tracings now accepts decimal input. When losing focus the values are cut off at the comma. [#4803](https://github.com/scalableminds/webknossos/pull/4803) +- Improved performance of volume annotations (brush and trace tool). [#4848](https://github.com/scalableminds/webknossos/pull/4848) - webknossos.org only: Accounts associated with new organizations can now be created even when a datastore is unreachable. The necessary folders are created lazily when needed. [#4846](https://github.com/scalableminds/webknossos/pull/4846) ### Fixed diff --git a/frontend/javascripts/libs/drawing.js b/frontend/javascripts/libs/drawing.js index ed6cc9ba8e9..e17ecbd4fbe 100644 --- a/frontend/javascripts/libs/drawing.js +++ b/frontend/javascripts/libs/drawing.js @@ -184,9 +184,10 @@ class Drawing { scaleY: number, paint: (number, number) => void, ) { + const squaredRadius = radius ** 2; for (let posX = x - radius; posX < x + radius; posX++) { for (let posY = y - radius; posY < y + radius; posY++) { - if (Math.sqrt(((posX - x) / scaleX) ** 2 + ((posY - y) / scaleY) ** 2) < radius) { + if (((posX - x) / scaleX) ** 2 + ((posY - y) / scaleY) ** 2 < squaredRadius) { paint(posX, posY); } } diff --git a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js index 54b45c2bd4a..3b9007279b6 100644 --- a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.js @@ -5,8 +5,7 @@ import Maybe from "data.maybe"; import { getRequestLogZoomStep } from "oxalis/model/accessors/flycam_accessor"; import type { Tracing, VolumeTracing, OxalisState } from "oxalis/store"; -import Dimensions from "oxalis/model/dimensions"; -import type { VolumeTool, ContourMode, Vector3, OrthoView } from "oxalis/constants"; +import type { VolumeTool, ContourMode } from "oxalis/constants"; import type { HybridServerTracing, ServerVolumeTracing } from "admin/api_flow_types"; export function getVolumeTracing(tracing: Tracing): Maybe { @@ -61,9 +60,3 @@ export function isSegmentationMissingForZoomstep( ): boolean { return getRequestLogZoomStep(state) > maxZoomStepForSegmentation; } - -export function getNumberOfSlicesForResolution(activeResolution: Vector3, activePlane: OrthoView) { - const thirdDimensionIndex = Dimensions.thirdDimensionForPlane(activePlane); - const numberOfSlices = activeResolution[thirdDimensionIndex]; - return numberOfSlices; -} diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bounding_box.js b/frontend/javascripts/oxalis/model/bucket_data_handling/bounding_box.js index 6f2ae571b7f..c850aad8c0e 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bounding_box.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bounding_box.js @@ -5,8 +5,8 @@ import _ from "lodash"; +import { V3 } from "libs/mjs"; import { getResolutions } from "oxalis/model/accessors/dataset_accessor"; -import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; import Store from "oxalis/store"; import constants, { type BoundingBoxType, @@ -17,17 +17,17 @@ import constants, { class BoundingBox { boundingBox: ?BoundingBoxType; - cube: DataCube; min: Vector3; max: Vector3; - constructor(boundingBox: ?BoundingBoxType, cube: DataCube) { + // If maxRestriction is provided, the passed boundingBox is automatically + // clipped to maxRestriction + constructor(boundingBox: ?BoundingBoxType, maxRestriction?: Vector3) { this.boundingBox = boundingBox; - this.cube = cube; // Min is including this.min = [0, 0, 0]; // Max is excluding - this.max = _.clone(this.cube.upperBoundary); + this.max = maxRestriction != null ? _.clone(maxRestriction) : [Infinity, Infinity, Infinity]; if (boundingBox != null) { for (const i of Vector3Indicies) { @@ -66,6 +66,57 @@ class BoundingBox { min[0] < x && x < max[0] - 1 && min[1] < y && y < max[1] - 1 && min[2] < z && z < max[2] - 1 ); } + + intersectedWith(other: BoundingBox): BoundingBox { + const newMin = [ + Math.max(this.min[0], other.min[0]), + Math.max(this.min[1], other.min[1]), + Math.max(this.min[2], other.min[2]), + ]; + const newMax = [ + Math.min(this.max[0], other.max[0]), + Math.min(this.max[1], other.max[1]), + Math.min(this.max[2], other.max[2]), + ]; + + return new BoundingBox({ min: newMin, max: newMax }); + } + + chunkIntoBuckets() { + const size = V3.sub(this.max, this.min); + const start = [...this.min]; + const chunkSize = [32, 32, 32]; + const chunkBorderAlignments = [32, 32, 32]; + + // Move the start to be aligned correctly. This doesn't actually change + // the start of the first chunk, because we'll intersect with `self`, + // but it'll lead to all chunk borders being aligned correctly. + const startAdjust = [ + start[0] % chunkBorderAlignments[0], + start[1] % chunkBorderAlignments[1], + start[2] % chunkBorderAlignments[2], + ]; + + const boxes = []; + + for (const x of _.range(start[0] - startAdjust[0], start[0] + size[0], chunkSize[0])) { + for (const y of _.range(start[1] - startAdjust[1], start[1] + size[1], chunkSize[1])) { + for (const z of _.range(start[2] - startAdjust[2], start[2] + size[2], chunkSize[2])) { + const newMin = [x, y, z]; + boxes.push( + this.intersectedWith( + new BoundingBox({ + min: newMin, + max: V3.add(newMin, chunkSize), + }), + ), + ); + } + } + } + + return boxes; + } } export default BoundingBox; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index d97002a0069..939614451f7 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -14,7 +14,7 @@ import { NullBucket, type BucketDataArray, } from "oxalis/model/bucket_data_handling/bucket"; -import { type VoxelIterator, VoxelNeighborStack2D } from "oxalis/model/volumetracing/volumelayer"; +import { VoxelNeighborStack2D } from "oxalis/model/volumetracing/volumelayer"; import { getResolutions, ResolutionInfo } from "oxalis/model/accessors/dataset_accessor"; import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; @@ -122,14 +122,14 @@ class DataCube { shouldBeRestrictedByTracingBoundingBox() ? getSomeTracing(Store.getState().tracing).boundingBox : null, - this, + this.upperBoundary, ); listenToStoreProperty( state => getSomeTracing(state.tracing).boundingBox, boundingBox => { if (shouldBeRestrictedByTracingBoundingBox()) { - this.boundingBox = new BoundingBox(boundingBox, this); + this.boundingBox = new BoundingBox(boundingBox, this.upperBoundary); } }, ); @@ -326,34 +326,10 @@ class DataCube { this.trigger("volumeLabeled"); } - labelVoxelsInAllResolutions( - iterator: VoxelIterator, - label: number, - activeCellId?: ?number = null, - ): void { - // TODO: Do not label voxel in higher resolutions multiple times (also see https://github.com/scalableminds/webknossos/issues/4838) - // -> Instead of using a voxel iterator, create a LabeledVoxelsMap for the brush stroke / trace tool. - // If this LabeledVoxelsMap exists, the up and downsampling methods can easily be used - // to apply the annotation to all needed resolutions, without labeling voxels multiple times. - - for (const [resolutionIndex] of this.resolutionInfo.getResolutionsWithIndices()) { - while (iterator.hasNext) { - const voxel = iterator.getNext(); - this.labelVoxelInResolution(voxel, label, resolutionIndex, activeCellId); - } - iterator.reset(); - } - - this.triggerPushQueue(); - } - labelVoxelInAllResolutions(voxel: Vector3, label: number, activeCellId: ?number) { - // Also see labelVoxelsInAllResolutions - // TODO: Do not label voxel in higher resolutions multiple times (also see https://github.com/scalableminds/webknossos/issues/4838) - // -> Instead of using a voxel iterator, create a LabeledVoxelsMap for the brush stroke / trace tool. - // If this LabeledVoxelsMap exists, the up and downsampling methods can easily be used - // to apply the annotation to all needed resolutions, without labeling voxels multiple times. - + // This function is only provided for the wK front-end api and should not be used internally, + // since it only operates on one voxel and therefore is not performance-optimized. + // Please make use of a LabeledVoxelsMap instead. for (const [resolutionIndex] of this.resolutionInfo.getResolutionsWithIndices()) { this.labelVoxelInResolution(voxel, label, resolutionIndex, activeCellId); } @@ -560,7 +536,7 @@ class DataCube { return this.getDataValue(voxel, this.isMappingEnabled() ? this.getMapping() : null, zoomStep); } - getVoxelIndexByVoxelOffset([x, y, z]: Vector3): number { + getVoxelIndexByVoxelOffset([x, y, z]: Vector3 | Float32Array): number { return x + y * constants.BUCKET_WIDTH + z * constants.BUCKET_WIDTH ** 2; } diff --git a/frontend/javascripts/oxalis/model/helpers/position_converter.js b/frontend/javascripts/oxalis/model/helpers/position_converter.js index 81dd1516a8c..3f8cffec23c 100644 --- a/frontend/javascripts/oxalis/model/helpers/position_converter.js +++ b/frontend/javascripts/oxalis/model/helpers/position_converter.js @@ -25,6 +25,31 @@ export function globalPositionToBucketPosition( ]; } +export function scaleGlobalPositionWithResolution( + [x, y, z]: Vector3, + resolution: Vector3, +): Vector3 { + return [ + Math.floor(x / resolution[0]), + Math.floor(y / resolution[1]), + Math.floor(z / resolution[2]), + ]; +} + +export function zoomedPositionToGlobalPosition( + [x, y, z]: Vector3, + currentResolution: Vector3, +): Vector3 { + return [x * currentResolution[0], y * currentResolution[1], z * currentResolution[2]]; +} + +export function scaleGlobalPositionWithResolutionFloat( + [x, y, z]: Vector3, + resolution: Vector3, +): Vector3 { + return [x / resolution[0], y / resolution[1], z / resolution[2]]; +} + export function globalPositionToBucketPositionFloat( [x, y, z]: Vector3, resolutions: Array, @@ -75,6 +100,18 @@ export function getResolutionsFactors(resolutionA: Vector3, resolutionB: Vector3 ]; } +export function zoomedPositionToZoomedAddress( + [x, y, z]: Vector3, + resolutionIndex: number, +): Vector4 { + return [ + Math.floor(x / constants.BUCKET_WIDTH), + Math.floor(y / constants.BUCKET_WIDTH), + Math.floor(z / constants.BUCKET_WIDTH), + resolutionIndex, + ]; +} + // TODO: zoomedAddressToAnotherZoomStep usages should be converted to zoomedAddressToAnotherZoomStepWithInfo // Note that this is not trivial since zoomedAddressToAnotherZoomStepWithInfo will throw on not existing // resolution indices (in contrast to zoomedAddressToAnotherZoomStep). diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 77f888a6e7d..9e96b363b47 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -35,13 +35,11 @@ import type { VolumeTracing, Flycam } from "oxalis/store"; import { enforceVolumeTracing, isVolumeTraceToolDisallowed, - getNumberOfSlicesForResolution, } from "oxalis/model/accessors/volumetracing_accessor"; import { getPosition, getFlooredPosition, getRotation, - getCurrentResolution, getRequestLogZoomStep, } from "oxalis/model/accessors/flycam_accessor"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; @@ -60,6 +58,7 @@ import Constants, { VolumeToolEnum, type LabeledVoxelsMap, } from "oxalis/constants"; +import BoundingBox from "oxalis/model/bucket_data_handling/bounding_box"; import Dimensions, { type DimensionMap } from "oxalis/model/dimensions"; import Model from "oxalis/model"; import Toast from "libs/toast"; @@ -67,6 +66,7 @@ import VolumeLayer from "oxalis/model/volumetracing/volumelayer"; import inferSegmentInViewport, { getHalfViewportExtents, } from "oxalis/model/sagas/automatic_brush_saga"; +import { zoomedPositionToZoomedAddress } from "oxalis/model/helpers/position_converter"; export function* watchVolumeTracingAsync(): Saga { yield* take("WK_READY"); @@ -119,21 +119,10 @@ export function* editVolumeLayerAsync(): Generator { const currentLayer = yield* call(createVolumeLayer, startEditingAction.planeId); const initialViewport = yield* select(state => state.viewModeData.plane.activeViewport); - let activeResolution = yield* select(state => getCurrentResolution(state)); - let numberOfSlices = getNumberOfSlicesForResolution(activeResolution, initialViewport); - const activeViewportBounding = yield* call( - getBoundingsFromPosition, - initialViewport, - numberOfSlices, - ); if (activeTool === VolumeToolEnum.BRUSH) { yield* call( - labelWithIterator, - currentLayer.getCircleVoxelIterator( - startEditingAction.position, - activeResolution, - activeViewportBounding, - ), + labelWithVoxelBuffer2D, + currentLayer.getCircleVoxelBuffer2D(startEditingAction.position), contourTracingMode, ); } @@ -160,20 +149,9 @@ export function* editVolumeLayerAsync(): Generator { currentLayer.addContour(addToLayerAction.position); } if (activeTool === VolumeToolEnum.BRUSH) { - activeResolution = yield* select(state => getCurrentResolution(state)); - numberOfSlices = getNumberOfSlicesForResolution(activeResolution, initialViewport); - const currentViewportBounding = yield* call( - getBoundingsFromPosition, - activeViewport, - numberOfSlices, - ); yield* call( - labelWithIterator, - currentLayer.getCircleVoxelIterator( - addToLayerAction.position, - activeResolution, - currentViewportBounding, - ), + labelWithVoxelBuffer2D, + currentLayer.getCircleVoxelBuffer2D(addToLayerAction.position), contourTracingMode, ); } @@ -204,32 +182,131 @@ function* getBoundingsFromPosition( function* createVolumeLayer(planeId: OrthoView): Saga { const position = yield* select(state => getFlooredPosition(state.flycam)); const thirdDimValue = position[Dimensions.thirdDimensionForPlane(planeId)]; - return new VolumeLayer(planeId, thirdDimValue); + + const labeledResolution = yield* select(state => { + const resolutionInfo = getResolutionInfoOfSegmentationLayer(state.dataset); + const requestedZoomStep = getRequestLogZoomStep(state); + const labeledZoomStep = resolutionInfo.getClosestExistingIndex(requestedZoomStep); + return resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); + }); + + return new VolumeLayer(planeId, thirdDimValue, labeledResolution); } -function* labelWithIterator(iterator, contourTracingMode): Saga { +function* labelWithVoxelBuffer2D(voxelBuffer, contourTracingMode): Saga { const allowUpdate = yield* select(state => state.tracing.restrictions.allowUpdate); if (!allowUpdate) return; const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); const segmentationLayer = yield* call([Model, Model.getSegmentationLayer]); const { cube } = segmentationLayer; - switch (contourTracingMode) { - case ContourModeEnum.DRAW_OVERWRITE: - yield* call([cube, cube.labelVoxelsInAllResolutions], iterator, activeCellId); - break; - case ContourModeEnum.DRAW: - yield* call([cube, cube.labelVoxelsInAllResolutions], iterator, activeCellId, 0); - break; - case ContourModeEnum.DELETE_FROM_ACTIVE_CELL: - yield* call([cube, cube.labelVoxelsInAllResolutions], iterator, 0, activeCellId); - break; - case ContourModeEnum.DELETE_FROM_ANY_CELL: - yield* call([cube, cube.labelVoxelsInAllResolutions], iterator, 0); - break; - default: - throw new Error("Invalid volume tracing mode."); + + const currentLabeledVoxelMap: LabeledVoxelsMap = new Map(); + + const activeViewport = yield* select(state => state.viewModeData.plane.activeViewport); + const dimensionIndices = Dimensions.getIndices(activeViewport); + + const resolutionInfo = yield* select(state => + getResolutionInfoOfSegmentationLayer(state.dataset), + ); + + const requestedZoomStep = yield* select(state => getRequestLogZoomStep(state)); + const labeledZoomStep = resolutionInfo.getClosestExistingIndex(requestedZoomStep); + const labeledResolution = resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); + + const get3DCoordinateFromLocal2D = ([x, y]) => + voxelBuffer.get3DCoordinate([x + voxelBuffer.minCoord2d[0], y + voxelBuffer.minCoord2d[1]]); + const topLeft3DCoord = get3DCoordinateFromLocal2D([0, 0]); + const bottomRight3DCoord = get3DCoordinateFromLocal2D([voxelBuffer.width, voxelBuffer.height]); + // Since the bottomRight3DCoord is exclusive for the described bounding box, + // the third dimension has to be increased by one (otherwise, the volume of the bounding + // box would be empty) + bottomRight3DCoord[dimensionIndices[2]]++; + + const outerBoundingBox = new BoundingBox({ + min: topLeft3DCoord, + max: bottomRight3DCoord, + }); + + const bucketBoundingBoxes = outerBoundingBox.chunkIntoBuckets(); + + for (const boundingBoxChunk of bucketBoundingBoxes) { + const { min, max } = boundingBoxChunk; + const bucketZoomedAddress = zoomedPositionToZoomedAddress(min, labeledZoomStep); + + if (currentLabeledVoxelMap.get(bucketZoomedAddress)) { + throw new Error("When iterating over the buckets, we shouldn't visit the same bucket twice"); + } + + const labelMapOfBucket = new Uint8Array(Constants.BUCKET_WIDTH ** 2); + currentLabeledVoxelMap.set(bucketZoomedAddress, labelMapOfBucket); + + // globalA (first dim) and globalB (secondB) are global coordinates + // which can be used to index into the 2D slice of the VoxelBuffer2D (when subtracting the minCoord2d) + // and the LabeledVoxelMap + for (let globalA = min[dimensionIndices[0]]; globalA < max[dimensionIndices[0]]; globalA++) { + for (let globalB = min[dimensionIndices[1]]; globalB < max[dimensionIndices[1]]; globalB++) { + if ( + voxelBuffer.map[ + voxelBuffer.linearizeIndex( + globalA - voxelBuffer.minCoord2d[0], + globalB - voxelBuffer.minCoord2d[1], + ) + ] + ) { + labelMapOfBucket[ + (globalA % Constants.BUCKET_WIDTH) * Constants.BUCKET_WIDTH + + (globalB % Constants.BUCKET_WIDTH) + ] = 1; + } + } + } } + + const shouldOverwrite = [ + ContourModeEnum.DRAW_OVERWRITE, + ContourModeEnum.DELETE_FROM_ANY_CELL, + ].includes(contourTracingMode); + + // Since the LabeledVoxelMap is created in the current magnification, + // we only need to annotate one slice in this mag. + // `applyLabeledVoxelMapToAllMissingResolutions` will take care of + // annotating multiple slices + const numberOfSlices = 1; + const thirdDim = dimensionIndices[2]; + + const isDeleting = [ + ContourModeEnum.DELETE_FROM_ACTIVE_CELL, + ContourModeEnum.DELETE_FROM_ANY_CELL, + ].includes(contourTracingMode); + const newCellIdValue = isDeleting ? 0 : activeCellId; + const overwritableValue = isDeleting ? activeCellId : 0; + + applyVoxelMap( + currentLabeledVoxelMap, + cube, + newCellIdValue, + voxelBuffer.getFast3DCoordinate, + numberOfSlices, + thirdDim, + shouldOverwrite, + overwritableValue, + ); + + // thirdDimensionOfSlice needs to be provided in global coordinates + const thirdDimensionOfSlice = + topLeft3DCoord[dimensionIndices[2]] * labeledResolution[dimensionIndices[2]]; + applyLabeledVoxelMapToAllMissingResolutions( + currentLabeledVoxelMap, + labeledZoomStep, + dimensionIndices, + resolutionInfo, + cube, + newCellIdValue, + thirdDimensionOfSlice, + shouldOverwrite, + overwritableValue, + ); } function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga { @@ -244,11 +321,11 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga const segmentationLayer: DataLayer = yield* call([Model, Model.getSegmentationLayer]); const { cube } = segmentationLayer; - const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); + const requestedZoomStep = yield* select(state => getRequestLogZoomStep(state)); const resolutionInfo = yield* select(state => getResolutionInfoOfSegmentationLayer(state.dataset), ); - const labeledZoomStep = resolutionInfo.getClosestExistingIndex(activeZoomStep); + const labeledZoomStep = resolutionInfo.getClosestExistingIndex(requestedZoomStep); const dimensionIndices = Dimensions.getIndices(activeViewport); const position = yield* select(state => getFlooredPosition(state.flycam)); @@ -338,11 +415,11 @@ export function* floodFill(): Saga { const seedVoxel = Dimensions.roundCoordinate(position); const activeCellId = yield* select(state => enforceVolumeTracing(state.tracing).activeCellId); const dimensionIndices = Dimensions.getIndices(planeId); - const activeZoomStep = yield* select(state => getRequestLogZoomStep(state)); + const requestedZoomStep = yield* select(state => getRequestLogZoomStep(state)); const resolutionInfo = yield* select(state => getResolutionInfoOfSegmentationLayer(state.dataset), ); - const labeledZoomStep = resolutionInfo.getClosestExistingIndex(activeZoomStep); + const labeledZoomStep = resolutionInfo.getClosestExistingIndex(requestedZoomStep); const labeledResolution = resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); // The floodfill and applyVoxelMap methods iterate within the bucket. @@ -405,7 +482,10 @@ function applyLabeledVoxelMapToAllMissingResolutions( segmentationCube: DataCube, cellId: number, thirdDimensionOfSlice: number, // this value is specified in global (mag1) coords + // if shouldOverwrite is false, a voxel is only overwritten if + // its old value is equal to overwritableValue. shouldOverwrite: boolean, + overwritableValue: number = 0, ): void { const thirdDim = dimensionIndices[2]; @@ -417,13 +497,10 @@ function applyLabeledVoxelMapToAllMissingResolutions( const sampledThirdDimensionValue = Math.floor(thirdDimensionOfSlice / targetResolution[thirdDim]) % Constants.BUCKET_WIDTH; - return (voxel: Vector2) => { - const unorderedVoxelWithThirdDimension = [voxel[0], voxel[1], sampledThirdDimensionValue]; - const orderedVoxelWithThirdDimension = Dimensions.transDimWithIndices( - unorderedVoxelWithThirdDimension, - dimensionIndices, - ); - return orderedVoxelWithThirdDimension; + return (x: number, y: number, out: Vector3 | Float32Array) => { + out[dimensionIndices[0]] = x; + out[dimensionIndices[1]] = y; + out[dimensionIndices[2]] = sampledThirdDimensionValue; }; }; @@ -471,6 +548,7 @@ function applyLabeledVoxelMapToAllMissingResolutions( numberOfSlices, thirdDim, shouldOverwrite, + overwritableValue, ); } } @@ -496,15 +574,10 @@ export function* finishLayer( } if (activeTool === VolumeToolEnum.TRACE || activeTool === VolumeToolEnum.BRUSH) { - const currentResolution = yield* select(state => getCurrentResolution(state)); - yield* call( - labelWithIterator, - layer.getVoxelIterator(activeTool, currentResolution), - contourTracingMode, - ); + yield* call(labelWithVoxelBuffer2D, layer.getVoxelBuffer2D(activeTool), contourTracingMode); } - yield* put(updateDirectionAction(layer.getCentroid())); + yield* put(updateDirectionAction(layer.getUnzoomedCentroid())); yield* put(resetContourAction()); } diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 404fc20ebca..16f3b8d72a0 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -1,6 +1,6 @@ // @flow -import constants, { type Vector2, type Vector3, type LabeledVoxelsMap } from "oxalis/constants"; +import constants, { type Vector3, type LabeledVoxelsMap } from "oxalis/constants"; import { map3 } from "libs/utils"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; import messages from "messages"; @@ -279,10 +279,13 @@ export function applyVoxelMap( labeledVoxelMap: LabeledVoxelsMap, dataCube: DataCube, cellId: number, - get3DAddress: Vector2 => Vector3, + get3DAddress: (number, number, Vector3 | Float32Array) => void, numberOfSlicesToApply: number, thirdDimensionIndex: 0 | 1 | 2, + // if shouldOverwrite is false, a voxel is only overwritten if + // its old value is equal to overwritableValue. shouldOverwrite: boolean = true, + overwritableValue: number = 0, ) { for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { let bucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); @@ -291,7 +294,9 @@ export function applyVoxelMap( } bucket.markAndAddBucketForUndo(); let data = bucket.getOrCreateData(); - const thirdDimensionValueInBucket = get3DAddress([0, 0])[2]; + const out = new Float32Array(3); + get3DAddress(0, 0, out); + const thirdDimensionValueInBucket = out[2]; for (let sliceCount = 0; sliceCount < numberOfSlicesToApply; sliceCount++) { if (thirdDimensionValueInBucket + sliceCount === constants.BUCKET_WIDTH) { // The current slice is in the next bucket in the third direction. @@ -306,12 +311,13 @@ export function applyVoxelMap( for (let firstDim = 0; firstDim < constants.BUCKET_WIDTH; firstDim++) { for (let secondDim = 0; secondDim < constants.BUCKET_WIDTH; secondDim++) { if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { - const voxelToLabel = get3DAddress([firstDim, secondDim]); + get3DAddress(firstDim, secondDim, out); + const voxelToLabel = out; voxelToLabel[thirdDimensionIndex] = (voxelToLabel[thirdDimensionIndex] + sliceCount) % constants.BUCKET_WIDTH; // The voxelToLabel is already within the bucket and in the correct resolution. const voxelAddress = dataCube.getVoxelIndexByVoxelOffset(voxelToLabel); - if (shouldOverwrite || (!shouldOverwrite && data[voxelAddress] === 0)) { + if (shouldOverwrite || (!shouldOverwrite && data[voxelAddress] === overwritableValue)) { data[voxelAddress] = cellId; } } diff --git a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js index a04905d1b9e..8fac0f8b31f 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js @@ -5,8 +5,13 @@ import _ from "lodash"; +import { + scaleGlobalPositionWithResolution, + scaleGlobalPositionWithResolutionFloat, + zoomedPositionToGlobalPosition, +} from "oxalis/model/helpers/position_converter"; import Constants, { - type BoundingBoxType, + OrthoViews, type OrthoView, type Vector2, type Vector3, @@ -15,134 +20,50 @@ import Constants, { type VolumeTool, } from "oxalis/constants"; import { V3 } from "libs/mjs"; -import { - enforceVolumeTracing, - getNumberOfSlicesForResolution, -} from "oxalis/model/accessors/volumetracing_accessor"; +import { enforceVolumeTracing } from "oxalis/model/accessors/volumetracing_accessor"; import { getBaseVoxelFactors } from "oxalis/model/scaleinfo"; -import Dimensions, { type DimensionIndices } from "oxalis/model/dimensions"; +import Dimensions from "oxalis/model/dimensions"; import Drawing from "libs/drawing"; import messages from "messages"; import Toast from "libs/toast"; import Store from "oxalis/store"; -export class VoxelIterator { - hasNext: boolean = true; - map: boolean[][]; - x = 0; - y = 0; +/* + A VoxelBuffer2D instance holds a two dimensional slice + of painted (binary) voxels. It is used by the + VolumeLayer class to describe how volume operations + should be applied. + */ +export class VoxelBuffer2D { + map: Uint8Array; width: number; height: number; minCoord2d: Vector2; get3DCoordinate: Vector2 => Vector3; - numberOfSlices: number; - boundingBox: ?BoundingBoxType; - next: Vector3; - currentSlice = 0; - thirdDimensionIndex: DimensionIndices; - - static finished(): VoxelIterator { - const iterator = new VoxelIterator([], 0, 0, [0, 0], () => [0, 0, 0], 0); - iterator.hasNext = false; - return iterator; + getFast3DCoordinate: (number, number, Vector3 | Float32Array) => void; + + static empty(): VoxelBuffer2D { + return new VoxelBuffer2D(new Uint8Array(0), 0, 0, [0, 0], () => [0, 0, 0], () => {}); } constructor( - map: boolean[][], + map: Uint8Array, width: number, height: number, minCoord2d: Vector2, get3DCoordinate: Vector2 => Vector3, - thirdDimensionIndex: DimensionIndices, - numberOfSlices: number = 1, - boundingBox?: ?BoundingBoxType, + getFast3DCoordinate: (number, number, Vector3 | Float32Array) => void, ) { this.map = map; this.width = width; this.height = height; this.minCoord2d = minCoord2d; this.get3DCoordinate = get3DCoordinate; - this.thirdDimensionIndex = thirdDimensionIndex; - this.boundingBox = boundingBox; - this.numberOfSlices = numberOfSlices; - this.reset(); - } - - get3DCoordinateWithSliceOffset(position: Vector2): Vector3 { - const threeDPosition = this.get3DCoordinate(position); - threeDPosition[this.thirdDimensionIndex] += this.currentSlice; - return threeDPosition; - } - - reset(resetSliceCount: boolean = true) { - this.x = 0; - this.y = 0; - if (resetSliceCount) { - this.currentSlice = 0; - } - if (!this.map || !this.map[0]) { - this.hasNext = false; - } else { - const firstCoordinate = this.get3DCoordinateWithSliceOffset(this.minCoord2d); - if (this.map[0][0] && this.isCoordinateInBounds(firstCoordinate)) { - this.next = firstCoordinate; - } else { - this.getNext(); - } - } - } - - isCoordinateInBounds(coor: Vector3): boolean { - if (!this.boundingBox) { - return true; - } - return ( - coor[0] >= this.boundingBox.min[0] && - coor[0] <= this.boundingBox.max[0] && - coor[1] >= this.boundingBox.min[1] && - coor[1] <= this.boundingBox.max[1] && - coor[2] >= this.boundingBox.min[2] && - coor[2] <= this.boundingBox.max[2] - ); + this.getFast3DCoordinate = getFast3DCoordinate; } - nextSlice() { - ++this.currentSlice; - if (this.currentSlice < this.numberOfSlices) { - this.reset(false); - return true; - } - return false; - } - - getNext(): Vector3 { - const res = this.next; - let foundNext = false; - while (!foundNext) { - this.x = (this.x + 1) % this.width; - if (this.x === 0) { - this.y++; - } - if (this.y === this.height) { - const hasNextSlice = this.nextSlice(); - if (!hasNextSlice) { - foundNext = true; - this.hasNext = false; - } - } else if (this.map[this.x][this.y]) { - const currentCoordinate = this.get3DCoordinateWithSliceOffset([ - this.x + this.minCoord2d[0], - this.y + this.minCoord2d[1], - ]); - // Check if position is in bounds. - if (this.isCoordinateInBounds(currentCoordinate)) { - this.next = currentCoordinate; - this.hasNext = true; - foundNext = true; - } - } - } - return res; + linearizeIndex(x: number, y: number): number { + return x * this.height + y; } } @@ -176,24 +97,36 @@ export class VoxelNeighborStack2D { } class VolumeLayer { + /* + From the outside, the VolumeLayer accepts only global positions. Internally, + these are converted to the actual used resolution (activeResolution). + Therefore, members of this class are in the resolution space of + `activeResolution`. + */ + plane: OrthoView; thirdDimensionValue: number; contourList: Array; maxCoord: ?Vector3; minCoord: ?Vector3; + activeResolution: Vector3; - constructor(plane: OrthoView, thirdDimensionValue: number) { + constructor(plane: OrthoView, thirdDimensionValue: number, activeResolution: Vector3) { this.plane = plane; - this.thirdDimensionValue = thirdDimensionValue; this.maxCoord = null; this.minCoord = null; + this.activeResolution = activeResolution; + + const thirdDim = Dimensions.thirdDimensionForPlane(this.plane); + this.thirdDimensionValue = Math.floor(thirdDimensionValue / this.activeResolution[thirdDim]); } - addContour(pos: Vector3): void { - this.updateArea(pos); + addContour(globalPos: Vector3): void { + this.updateArea(globalPos); } - updateArea(pos: Vector3): void { + updateArea(globalPos: Vector3): void { + const pos = scaleGlobalPositionWithResolution(globalPos, this.activeResolution); let [maxCoord, minCoord] = [this.maxCoord, this.minCoord]; if (maxCoord == null || minCoord == null) { @@ -221,21 +154,25 @@ class VolumeLayer { getContourList() { const volumeTracing = enforceVolumeTracing(Store.getState().tracing); - return volumeTracing.contourList; + const globalContourList = volumeTracing.contourList; + + return globalContourList.map(point => + scaleGlobalPositionWithResolutionFloat(point, this.activeResolution), + ); } isEmpty(): boolean { return this.getContourList().length === 0; } - getVoxelIterator(mode: VolumeTool, activeResolution: Vector3): VoxelIterator { + getVoxelBuffer2D(mode: VolumeTool): VoxelBuffer2D { if (this.isEmpty() || this.minCoord == null) { - return VoxelIterator.finished(); + return VoxelBuffer2D.empty(); } const minCoord2d = this.get2DCoordinate(this.minCoord); if (this.maxCoord == null) { - return VoxelIterator.finished(); + return VoxelBuffer2D.empty(); } const maxCoord2d = this.get2DCoordinate(this.maxCoord); @@ -243,26 +180,20 @@ class VolumeLayer { // because in `updateArea` a value of 2 is subtracted / added when the values get updated. if (this.getArea() > Constants.AUTO_FILL_AREA_LIMIT * 3) { Toast.info(messages["tracing.area_to_fill_is_too_big"]); - return VoxelIterator.finished(); + return VoxelBuffer2D.empty(); } const width = maxCoord2d[0] - minCoord2d[0] + 1; const height = maxCoord2d[1] - minCoord2d[1] + 1; - const map = new Array(width); - for (let x = 0; x < width; x++) { - map[x] = new Array(height); - for (let y = 0; y < height; y++) { - map[x][y] = true; - } - } + const map = new Uint8Array(width * height).fill(1); - const setMap = (x: number, y: number, value = true) => { + const setMap = (x: number, y: number, value: number = 1) => { x = Math.floor(x); y = Math.floor(y); // Leave a 1px border in order for fillOutsideArea to work if (x > minCoord2d[0] && x < maxCoord2d[0] && y > minCoord2d[1] && y < maxCoord2d[1]) { - map[x - minCoord2d[0]][y - minCoord2d[1]] = value; + map[(x - minCoord2d[0]) * height + (y - minCoord2d[1])] = value; } }; @@ -277,44 +208,39 @@ class VolumeLayer { // area if you consider narrow shapes. // Also, it will be very clear where to start the filling // algorithm. - this.drawOutlineVoxels((x, y) => setMap(x, y, false), mode); + this.drawOutlineVoxels((x, y) => setMap(x, y, 0), mode); this.fillOutsideArea(map, width, height); this.drawOutlineVoxels(setMap, mode); - const numberOfSlices = getNumberOfSlicesForResolution(activeResolution, this.plane); - const thirdDimensionIndex = Dimensions.thirdDimensionForPlane(this.plane); - - const iterator = new VoxelIterator( + const buffer2D = new VoxelBuffer2D( map, width, height, minCoord2d, this.get3DCoordinate.bind(this), - thirdDimensionIndex, - numberOfSlices, + this.getFast3DCoordinateFunction(), ); - return iterator; + return buffer2D; } - getCircleVoxelIterator( - position: Vector3, - activeResolution: Vector3, - boundings?: ?BoundingBoxType, - ): VoxelIterator { + getCircleVoxelBuffer2D(position: Vector3): VoxelBuffer2D { const state = Store.getState(); const { brushSize } = state.userConfiguration; + const dimIndices = Dimensions.getIndices(this.plane); - const radius = Math.round(brushSize / 2); - const width = 2 * radius; - const height = 2 * radius; + const unzoomedRadius = Math.round(brushSize / 2); + const width = Math.floor((2 * unzoomedRadius) / this.activeResolution[dimIndices[0]]); + const height = Math.floor((2 * unzoomedRadius) / this.activeResolution[dimIndices[1]]); - const map = new Array(width); - for (let x = 0; x < width; x++) { - map[x] = new Array(height).fill(false); - } - const floatingCoord2d = this.get2DCoordinate(position); - const coord2d = [Math.floor(floatingCoord2d[0]), Math.floor(floatingCoord2d[1])]; - const minCoord2d = [coord2d[0] - radius, coord2d[1] - radius]; + const map = new Uint8Array(width * height).fill(0); + + const floatingCoord2d = this.get2DCoordinate( + scaleGlobalPositionWithResolutionFloat(position, this.activeResolution), + ); + const minCoord2d = [ + Math.floor(floatingCoord2d[0] - width / 2), + Math.floor(floatingCoord2d[1] - height / 2), + ]; // Use the baseVoxelFactors to scale the circle, otherwise it'll become an ellipse const [scaleX, scaleY] = this.get2DCoordinate( @@ -322,29 +248,34 @@ class VolumeLayer { ); const setMap = (x, y) => { - map[x][y] = true; + map[x * height + y] = 1; }; - Drawing.fillCircle(radius, radius, radius, scaleX, scaleY, setMap); - - const numberOfSlices = getNumberOfSlicesForResolution(activeResolution, this.plane); - const thirdDimensionIndex = Dimensions.thirdDimensionForPlane(this.plane); + Drawing.fillCircle( + Math.floor(unzoomedRadius / this.activeResolution[dimIndices[0]]), + Math.floor(unzoomedRadius / this.activeResolution[dimIndices[1]]), + // the unzoomedRadius is adapted to the correct resolution by the + // following scale parameters + unzoomedRadius, + scaleX / this.activeResolution[dimIndices[0]], + scaleY / this.activeResolution[dimIndices[1]], + setMap, + ); - const iterator = new VoxelIterator( + const buffer2D = new VoxelBuffer2D( map, width, height, minCoord2d, this.get3DCoordinate.bind(this), - thirdDimensionIndex, - numberOfSlices, - boundings, + this.getFast3DCoordinateFunction(), ); - return iterator; + return buffer2D; } drawOutlineVoxels(setMap: (number, number) => void, mode: VolumeTool): void { const contourList = this.getContourList(); const state = Store.getState(); + const dimIndices = Dimensions.getIndices(this.plane); const [scaleX, scaleY] = this.get2DCoordinate( getBaseVoxelFactors(state.dataset.dataSource.scale), ); @@ -359,16 +290,23 @@ class VolumeLayer { p2 = this.get2DCoordinate(contourList[(i + 1) % contourList.length]); Drawing.drawLine2d(p1[0], p1[1], p2[0], p2[1], setMap); } else if (mode === VolumeToolEnum.BRUSH) { - Drawing.fillCircle(p1[0], p1[1], radius, scaleX, scaleY, setMap); + Drawing.fillCircle( + p1[0], + p1[1], + radius, + scaleX / this.activeResolution[dimIndices[0]], + scaleY / this.activeResolution[dimIndices[1]], + setMap, + ); } } } - fillOutsideArea(map: boolean[][], width: number, height: number): void { + fillOutsideArea(map: Uint8Array, width: number, height: number): void { const setMap = (x, y) => { - map[x][y] = false; + map[x * height + y] = 0; }; - const isEmpty = (x, y) => map[x][y] === true; + const isEmpty = (x, y) => map[x * height + y] === 1; // Fill everything BUT the cell Drawing.fillArea(0, 0, width, height, false, isEmpty, setMap); @@ -376,33 +314,45 @@ class VolumeLayer { get2DCoordinate(coord3d: Vector3): Vector2 { // Throw out 'thirdCoordinate' which is equal anyways - const result = []; - for (let i = 0; i <= 2; i++) { - if (i !== Dimensions.thirdDimensionForPlane(this.plane)) { - result.push(coord3d[i]); - } - } - return [result[0], result[1]]; + const transposed = Dimensions.transDim(coord3d, this.plane); + return [transposed[0], transposed[1]]; } get3DCoordinate(coord2d: Vector2): Vector3 { - // Put thirdCoordinate back in - const index = Dimensions.thirdDimensionForPlane(this.plane); - let index2d = 0; - const res = [0, 0, 0]; - - for (let i = 0; i <= 2; i++) { - if (i !== index) { - res[i] = coord2d[index2d++]; - } else { - res[i] = this.thirdDimensionValue; + return Dimensions.transDim([coord2d[0], coord2d[1], this.thirdDimensionValue], this.plane); + } + + getFast3DCoordinateFunction(): ( + coordX: number, + coordY: number, + out: Vector3 | Float32Array, + ) => void { + switch (this.plane) { + case OrthoViews.PLANE_XY: + return (coordX, coordY, out) => { + out[0] = coordX; + out[1] = coordY; + out[2] = this.thirdDimensionValue; + }; + case OrthoViews.PLANE_YZ: + return (coordX, coordY, out) => { + out[0] = this.thirdDimensionValue; + out[1] = coordY; + out[2] = coordX; + }; + case OrthoViews.PLANE_XZ: + return (coordX, coordY, out) => { + out[0] = coordX; + out[1] = this.thirdDimensionValue; + out[2] = coordY; + }; + default: { + throw new Error("Unknown plane id"); } } - - return res; } - getCentroid(): Vector3 { + getUnzoomedCentroid(): Vector3 { // Formula: // https://en.wikipedia.org/wiki/Centroid#Centroid_of_polygon @@ -421,7 +371,9 @@ class VolumeLayer { const cx = sumCx / 6 / area; const cy = sumCy / 6 / area; - return this.get3DCoordinate([cx, cy]); + const zoomedPosition = this.get3DCoordinate([cx, cy]); + const pos = zoomedPositionToGlobalPosition(zoomedPosition, this.activeResolution); + return pos; } } diff --git a/frontend/javascripts/oxalis/store.js b/frontend/javascripts/oxalis/store.js index db20afe9fc7..c6f2c211782 100644 --- a/frontend/javascripts/oxalis/store.js +++ b/frontend/javascripts/oxalis/store.js @@ -222,6 +222,7 @@ export type VolumeTracing = {| +activeCellId: number, +lastCentroid: ?Vector3, +contourTracingMode: ContourMode, + // Stores points of the currently drawn region in global coordinates +contourList: Array, +cells: VolumeCellMap, +fallbackLayer?: string, diff --git a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js index 332e44bdd85..68ddbf8086d 100644 --- a/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js +++ b/frontend/javascripts/test/model/volumetracing/volume_annotation_sampling.spec.js @@ -4,7 +4,7 @@ import { tracing as skeletontracingServerObject } from "test/fixtures/skeletontr import sampleVoxelMapToResolution, { applyVoxelMap, } from "oxalis/model/volumetracing/volume_annotation_sampling"; -import Constants, { type Vector2, type Vector4 } from "oxalis/constants"; +import Constants, { type Vector3, type Vector4 } from "oxalis/constants"; import anyTest, { type TestInterface } from "ava"; import datasetServerObject from "test/fixtures/dataset_server_object"; import mockRequire from "mock-require"; @@ -461,7 +461,11 @@ test("A labeledVoxelMap should be applied correctly", t => { labelVoxelInVoxelMap(firstDim, secondDim, voxelMap), ); labeledVoxelsMap.set(bucket.zoomedAddress, voxelMap); - const get3DAddress = (voxel2D: Vector2) => [...voxel2D, 5]; + const get3DAddress = (x: number, y: number, out: Vector3 | Float32Array) => { + out[0] = x; + out[1] = y; + out[2] = 5; + }; const expectedBucketData = new Uint32Array(Constants.BUCKET_SIZE).fill(0); voxelsToLabel.forEach(([firstDim, secondDim]) => { const addr = cube.getVoxelIndex([firstDim, secondDim, 5], 0); diff --git a/frontend/javascripts/test/sagas/volumetracing_saga.spec.js b/frontend/javascripts/test/sagas/volumetracing_saga.spec.js index fa1a543a8b7..0999071cebd 100644 --- a/frontend/javascripts/test/sagas/volumetracing_saga.spec.js +++ b/frontend/javascripts/test/sagas/volumetracing_saga.spec.js @@ -19,8 +19,8 @@ import { withoutUpdateTracing } from "../helpers/saveHelpers"; const mockedVolumeLayer = { isEmpty: () => false, finish: _.noop, - getVoxelIterator: _.noop, - getCentroid: _.noop, + getVoxelBuffer2D: _.noop, + getUnzoomedCentroid: _.noop, }; mockRequire("app", { currentUser: { firstName: "SCM", lastName: "Boy" } }); @@ -113,6 +113,9 @@ test("VolumeTracingSaga should create a volume layer (saga test)", t => { saga.next(VolumeToolEnum.BRUSH); const startEditingSaga = execCall(t, saga.next(false)); startEditingSaga.next(); + // Pass position + startEditingSaga.next([1, 1, 1]); + // Pass active resolution const layer = startEditingSaga.next([1, 1, 1]).value; t.is(layer.plane, OrthoViews.PLANE_XY); }); @@ -126,11 +129,9 @@ test("VolumeTracingSaga should add values to volume layer (saga test)", t => { saga.next(ContourModeEnum.DRAW_OVERWRITE); saga.next(VolumeToolEnum.TRACE); saga.next(false); - const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10); + const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10, [1, 1, 1]); saga.next(volumeLayer); saga.next(OrthoViews.PLANE_XY); - saga.next([1, 1, 1]); - saga.next(); saga.next({ addToLayerAction: addToLayerActionFn([1, 2, 3]) }); saga.next(OrthoViews.PLANE_XY); saga.next({ addToLayerAction: addToLayerActionFn([2, 3, 4]) }); @@ -150,11 +151,9 @@ test("VolumeTracingSaga should finish a volume layer (saga test)", t => { saga.next(ContourModeEnum.DRAW_OVERWRITE); saga.next(VolumeToolEnum.TRACE); saga.next(false); - const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10); + const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10, [1, 1, 1]); saga.next(volumeLayer); saga.next(OrthoViews.PLANE_XY); - saga.next([1, 1, 1]); - saga.next(); saga.next({ addToLayerAction: addToLayerActionFn([1, 2, 3]) }); saga.next(OrthoViews.PLANE_XY); // Validate that finishLayer was called @@ -174,11 +173,9 @@ test("VolumeTracingSaga should finish a volume layer in delete mode (saga test)" saga.next(ContourModeEnum.DELETE_FROM_ACTIVE_CELL); saga.next(VolumeToolEnum.TRACE); saga.next(false); - const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10); + const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10, [1, 1, 1]); saga.next(volumeLayer); saga.next(OrthoViews.PLANE_XY); - saga.next([1, 1, 1]); - saga.next(); saga.next({ addToLayerAction: addToLayerActionFn([1, 2, 3]) }); saga.next(OrthoViews.PLANE_XY); // Validate that finishLayer was called @@ -193,7 +190,6 @@ test("finishLayer saga should emit resetContourAction and then be done (saga tes // $FlowFixMe[incompatible-call] const saga = finishLayer(mockedVolumeLayer, VolumeToolEnum.TRACE); saga.next(); - saga.next([1, 1, 1]); saga.next(); expectValueDeepEqual(t, saga.next(), put(resetContourAction)); t.true(saga.next().done); From 01d8061db535fccea21a5aa7edbdd6892027c7cd Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 14:27:03 +0200 Subject: [PATCH 089/121] ensure that rendered volume magnification is also the mag in which is annotated - also ensure that the volume toolbar is disabled properly when the segmentation cannot be rendered for some reason --- .../volumetracing_plane_controller.js | 3 + .../model/accessors/dataset_accessor.js | 67 +++++++++++++++++-- .../oxalis/model/sagas/volumetracing_saga.js | 48 ++++++++----- .../view/action-bar/volume_actions_view.js | 21 ++++-- .../test/sagas/volumetracing_saga.spec.js | 37 ++++------ 5 files changed, 125 insertions(+), 51 deletions(-) diff --git a/frontend/javascripts/oxalis/controller/combinations/volumetracing_plane_controller.js b/frontend/javascripts/oxalis/controller/combinations/volumetracing_plane_controller.js index f7553056b2a..6f59d3d65c5 100644 --- a/frontend/javascripts/oxalis/controller/combinations/volumetracing_plane_controller.js +++ b/frontend/javascripts/oxalis/controller/combinations/volumetracing_plane_controller.js @@ -26,6 +26,7 @@ import { copySegmentationLayerAction, inferSegmentationInViewportAction, setActiveCellAction, + resetContourAction, } from "oxalis/model/actions/volumetracing_actions"; import { getPosition, getRequestLogZoomStep } from "oxalis/model/accessors/flycam_accessor"; import { getResolutionInfoOfSegmentationLayer } from "oxalis/model/accessors/dataset_accessor"; @@ -117,6 +118,7 @@ export function getPlaneMouseControls(_planeId: OrthoView): * { if (tool === VolumeToolEnum.TRACE || tool === VolumeToolEnum.BRUSH) { Store.dispatch(finishEditingAction()); + Store.dispatch(resetContourAction()); } }, @@ -156,6 +158,7 @@ export function getPlaneMouseControls(_planeId: OrthoView): * { if (tool === VolumeToolEnum.TRACE || tool === VolumeToolEnum.BRUSH) { Store.dispatch(finishEditingAction()); Store.dispatch(setContourTracingModeAction(ContourModeEnum.IDLE)); + Store.dispatch(resetContourAction()); } }, diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 407fa75d8ab..97547439867 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -34,8 +34,8 @@ import { reuseInstanceOnEquality } from "oxalis/model/accessors/accessor_helpers export type ResolutionsMap = Map; export class ResolutionInfo { - resolutions: Array; - resolutionMap: Map; + resolutions: $ReadOnlyArray; + resolutionMap: $ReadOnlyMap; constructor(resolutions: Array) { this.resolutions = resolutions; @@ -578,14 +578,73 @@ function _getUnrenderableLayersForCurrentZoom(state: OxalisState) { // is activated. Thus, check whether one of the fallback // zoomSteps can be rendered. return !_.range(1, maxZoomStepDiff + 1).some(diff => { - const fallbackZoomstep = zoomStep + diff; - return resolutionInfo.hasIndex(fallbackZoomstep); + const fallbackZoomStep = zoomStep + diff; + return resolutionInfo.hasIndex(fallbackZoomStep); }); }) .map(({ layer }) => layer); return unrenderableLayers; } +/* + This function returns the resolution and zoom step in which the segmentation + layer is currently rendered (if it is rendered). These properties should be used + when labeling volume data. + */ +export function getRenderableResolutionForSegmentation( + state: OxalisState, +): ?{ resolution: Vector3, zoomStep: number } { + const { dataset } = state; + const requestedZoomStep = getRequestLogZoomStep(state); + const { renderMissingDataBlack } = state.datasetConfiguration; + const maxZoomStepDiff = getMaxZoomStepDiff(state.datasetConfiguration.loadingStrategy); + const resolutionInfo = getResolutionInfoOfSegmentationLayer(state.dataset); + const segmentationLayer = getSegmentationLayer(dataset); + + if (!segmentationLayer) { + return null; + } + + // Check whether the segmentation layer is enabled + const segmentationSettings = state.datasetConfiguration.layers[segmentationLayer.name]; + if (segmentationSettings.isDisabled) { + return null; + } + + // Check whether the requested zoom step exists + if (resolutionInfo.hasIndex(requestedZoomStep)) { + return { + zoomStep: requestedZoomStep, + resolution: resolutionInfo.getResolutionByIndexOrThrow(requestedZoomStep), + }; + } + + // Since `renderMissingDataBlack` is enabled, the fallback magnifications + // should not be considered. + // rendered. + if (renderMissingDataBlack) { + return null; + } + + // The current magnification is missing and fallback rendering + // is activated. Thus, check whether one of the fallback + // zoomSteps can be rendered. + for ( + let fallbackZoomStep = requestedZoomStep + 1; + fallbackZoomStep <= requestedZoomStep + maxZoomStepDiff; + fallbackZoomStep++ + ) { + if (resolutionInfo.hasIndex(fallbackZoomStep)) { + return { + zoomStep: fallbackZoomStep, + resolution: resolutionInfo.getResolutionByIndexOrThrow(fallbackZoomStep), + }; + } + } + + return null; +} + export const getUnrenderableLayersForCurrentZoom = reuseInstanceOnEquality( _getUnrenderableLayersForCurrentZoom, ); diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 9e96b363b47..b685af6e07b 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -4,7 +4,6 @@ import _ from "lodash"; import DataLayer from "oxalis/model/data_layer"; import { type CopySegmentationLayerAction, - resetContourAction, updateDirectionAction, setToolAction, finishAnnotationStrokeAction, @@ -46,6 +45,7 @@ import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; import { getResolutionInfoOfSegmentationLayer, ResolutionInfo, + getRenderableResolutionForSegmentation, } from "oxalis/model/accessors/dataset_accessor"; import Constants, { type BoundingBoxType, @@ -116,7 +116,24 @@ export function* editVolumeLayerAsync(): Generator { if (isZoomStepTooHighForTraceTool && activeTool === VolumeToolEnum.TRACE) { continue; } - const currentLayer = yield* call(createVolumeLayer, startEditingAction.planeId); + + const maybeLabeledResolutionWithZoomStep = yield* select( + getRenderableResolutionForSegmentation, + ); + if (!maybeLabeledResolutionWithZoomStep) { + // Volume data is currently not rendered. Don't annotate anything. + continue; + } + + const { + zoomStep: labeledZoomStep, + resolution: labeledResolution, + } = maybeLabeledResolutionWithZoomStep; + const currentLayer = yield* call( + createVolumeLayer, + startEditingAction.planeId, + labeledResolution, + ); const initialViewport = yield* select(state => state.viewModeData.plane.activeViewport); if (activeTool === VolumeToolEnum.BRUSH) { @@ -124,6 +141,7 @@ export function* editVolumeLayerAsync(): Generator { labelWithVoxelBuffer2D, currentLayer.getCircleVoxelBuffer2D(startEditingAction.position), contourTracingMode, + labeledZoomStep, ); } @@ -153,11 +171,12 @@ export function* editVolumeLayerAsync(): Generator { labelWithVoxelBuffer2D, currentLayer.getCircleVoxelBuffer2D(addToLayerAction.position), contourTracingMode, + labeledZoomStep, ); } } - yield* call(finishLayer, currentLayer, activeTool, contourTracingMode); + yield* call(finishLayer, currentLayer, activeTool, contourTracingMode, labeledZoomStep); yield* put(finishAnnotationStrokeAction()); } } @@ -179,21 +198,14 @@ function* getBoundingsFromPosition( return currentViewportBounding; } -function* createVolumeLayer(planeId: OrthoView): Saga { +function* createVolumeLayer(planeId: OrthoView, labeledResolution: Vector3): Saga { const position = yield* select(state => getFlooredPosition(state.flycam)); const thirdDimValue = position[Dimensions.thirdDimensionForPlane(planeId)]; - const labeledResolution = yield* select(state => { - const resolutionInfo = getResolutionInfoOfSegmentationLayer(state.dataset); - const requestedZoomStep = getRequestLogZoomStep(state); - const labeledZoomStep = resolutionInfo.getClosestExistingIndex(requestedZoomStep); - return resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); - }); - return new VolumeLayer(planeId, thirdDimValue, labeledResolution); } -function* labelWithVoxelBuffer2D(voxelBuffer, contourTracingMode): Saga { +function* labelWithVoxelBuffer2D(voxelBuffer, contourTracingMode, labeledZoomStep): Saga { const allowUpdate = yield* select(state => state.tracing.restrictions.allowUpdate); if (!allowUpdate) return; @@ -209,9 +221,6 @@ function* labelWithVoxelBuffer2D(voxelBuffer, contourTracingMode): Saga { const resolutionInfo = yield* select(state => getResolutionInfoOfSegmentationLayer(state.dataset), ); - - const requestedZoomStep = yield* select(state => getRequestLogZoomStep(state)); - const labeledZoomStep = resolutionInfo.getClosestExistingIndex(requestedZoomStep); const labeledResolution = resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); const get3DCoordinateFromLocal2D = ([x, y]) => @@ -568,17 +577,22 @@ export function* finishLayer( layer: VolumeLayer, activeTool: VolumeTool, contourTracingMode: ContourMode, + labeledZoomStep: number, ): Saga { if (layer == null || layer.isEmpty()) { return; } if (activeTool === VolumeToolEnum.TRACE || activeTool === VolumeToolEnum.BRUSH) { - yield* call(labelWithVoxelBuffer2D, layer.getVoxelBuffer2D(activeTool), contourTracingMode); + yield* call( + labelWithVoxelBuffer2D, + layer.getVoxelBuffer2D(activeTool), + contourTracingMode, + labeledZoomStep, + ); } yield* put(updateDirectionAction(layer.getUnzoomedCentroid())); - yield* put(resetContourAction()); } export function* ensureNoTraceToolInLowResolutions(): Saga<*> { diff --git a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js index 088093c4d90..85f02560966 100644 --- a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js @@ -11,7 +11,7 @@ import { } from "oxalis/model/accessors/volumetracing_accessor"; import { setToolAction, createCellAction } from "oxalis/model/actions/volumetracing_actions"; import ButtonComponent from "oxalis/view/components/button_component"; -import { getCurrentResolution } from "oxalis/model/accessors/flycam_accessor"; +import { getRenderableResolutionForSegmentation } from "oxalis/model/accessors/dataset_accessor"; import Store, { type OxalisState } from "oxalis/store"; // Workaround until github.com/facebook/flow/issues/1113 is fixed @@ -25,7 +25,7 @@ type Props = {| // eslint-disable-next-line react/no-unused-prop-types zoomStep: number, isInMergerMode: boolean, - activeResolution: Vector3, + labeledResolution: ?Vector3, |}; const isZoomStepTooHighForTraceTool = () => isVolumeTraceToolDisallowed(Store.getState()); @@ -46,8 +46,9 @@ class VolumeActionsView extends PureComponent { }; render() { - const { activeTool, activeResolution, isInMergerMode } = this.props; - const hasResolutionWithHigherDimension = activeResolution.some(val => val > 1); + const { activeTool, labeledResolution, isInMergerMode } = this.props; + const isLabelingPossible = labeledResolution != null; + const hasResolutionWithHigherDimension = (labeledResolution || []).some(val => val > 1); const multiSliceAnnotationInfoIcon = hasResolutionWithHigherDimension ? ( @@ -75,12 +76,15 @@ class VolumeActionsView extends PureComponent { Trace {activeTool === "TRACE" ? multiSliceAnnotationInfoIcon : null} - + Brush {activeTool === "BRUSH" ? multiSliceAnnotationInfoIcon : null} @@ -97,11 +101,14 @@ class VolumeActionsView extends PureComponent { } function mapStateToProps(state: OxalisState): Props { + const maybeResolutionWithZoomStep = getRenderableResolutionForSegmentation(state); + const labeledResolution = + maybeResolutionWithZoomStep != null ? maybeResolutionWithZoomStep.resolution : null; return { activeTool: enforceVolumeTracing(state.tracing).activeTool, zoomStep: state.flycam.zoomStep, isInMergerMode: state.temporaryConfiguration.isMergerModeEnabled, - activeResolution: getCurrentResolution(state), + labeledResolution, }; } diff --git a/frontend/javascripts/test/sagas/volumetracing_saga.spec.js b/frontend/javascripts/test/sagas/volumetracing_saga.spec.js index 0999071cebd..cd93611392c 100644 --- a/frontend/javascripts/test/sagas/volumetracing_saga.spec.js +++ b/frontend/javascripts/test/sagas/volumetracing_saga.spec.js @@ -2,7 +2,6 @@ import "test/sagas/volumetracing_saga.mock.js"; import { take, put, call } from "redux-saga/effects"; -import _ from "lodash"; import update from "immutability-helper"; import { OrthoViews, VolumeToolEnum, ContourModeEnum } from "oxalis/constants"; @@ -16,13 +15,6 @@ import test from "ava"; import { expectValueDeepEqual, execCall } from "../helpers/sagaHelpers"; import { withoutUpdateTracing } from "../helpers/saveHelpers"; -const mockedVolumeLayer = { - isEmpty: () => false, - finish: _.noop, - getVoxelBuffer2D: _.noop, - getUnzoomedCentroid: _.noop, -}; - mockRequire("app", { currentUser: { firstName: "SCM", lastName: "Boy" } }); mockRequire("oxalis/model/sagas/root_saga", function*() { yield; @@ -66,7 +58,6 @@ const setActiveCellAction = VolumeTracingActions.setActiveCellAction(ACTIVE_CELL const startEditingAction = VolumeTracingActions.startEditingAction([0, 0, 0], OrthoViews.PLANE_XY); const addToLayerActionFn = VolumeTracingActions.addToLayerAction; const finishEditingAction = VolumeTracingActions.finishEditingAction(); -const resetContourAction = VolumeTracingActions.resetContourAction(); test("VolumeTracingSaga shouldn't do anything if unchanged (saga test)", t => { const saga = saveTracingTypeAsync("volume"); @@ -111,11 +102,11 @@ test("VolumeTracingSaga should create a volume layer (saga test)", t => { saga.next(startEditingAction); saga.next(ContourModeEnum.DRAW_OVERWRITE); saga.next(VolumeToolEnum.BRUSH); - const startEditingSaga = execCall(t, saga.next(false)); + saga.next(false); + // pass labeled resolution + const startEditingSaga = execCall(t, saga.next({ resolution: [1, 1, 1], zoomStep: 0 })); startEditingSaga.next(); // Pass position - startEditingSaga.next([1, 1, 1]); - // Pass active resolution const layer = startEditingSaga.next([1, 1, 1]).value; t.is(layer.plane, OrthoViews.PLANE_XY); }); @@ -129,6 +120,7 @@ test("VolumeTracingSaga should add values to volume layer (saga test)", t => { saga.next(ContourModeEnum.DRAW_OVERWRITE); saga.next(VolumeToolEnum.TRACE); saga.next(false); + saga.next({ resolution: [1, 1, 1], zoomStep: 0 }); // pass labeled resolution const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10, [1, 1, 1]); saga.next(volumeLayer); saga.next(OrthoViews.PLANE_XY); @@ -151,6 +143,7 @@ test("VolumeTracingSaga should finish a volume layer (saga test)", t => { saga.next(ContourModeEnum.DRAW_OVERWRITE); saga.next(VolumeToolEnum.TRACE); saga.next(false); + saga.next({ resolution: [1, 1, 1], zoomStep: 0 }); // pass labeled resolution const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10, [1, 1, 1]); saga.next(volumeLayer); saga.next(OrthoViews.PLANE_XY); @@ -160,7 +153,7 @@ test("VolumeTracingSaga should finish a volume layer (saga test)", t => { expectValueDeepEqual( t, saga.next({ finishEditingAction }), - call(finishLayer, volumeLayer, VolumeToolEnum.TRACE, ContourModeEnum.DRAW_OVERWRITE), + call(finishLayer, volumeLayer, VolumeToolEnum.TRACE, ContourModeEnum.DRAW_OVERWRITE, 0), ); }); @@ -173,6 +166,7 @@ test("VolumeTracingSaga should finish a volume layer in delete mode (saga test)" saga.next(ContourModeEnum.DELETE_FROM_ACTIVE_CELL); saga.next(VolumeToolEnum.TRACE); saga.next(false); + saga.next({ resolution: [1, 1, 1], zoomStep: 0 }); // pass labeled resolution const volumeLayer = new VolumeLayer(OrthoViews.PLANE_XY, 10, [1, 1, 1]); saga.next(volumeLayer); saga.next(OrthoViews.PLANE_XY); @@ -182,15 +176,12 @@ test("VolumeTracingSaga should finish a volume layer in delete mode (saga test)" expectValueDeepEqual( t, saga.next({ finishEditingAction }), - call(finishLayer, volumeLayer, VolumeToolEnum.TRACE, ContourModeEnum.DELETE_FROM_ACTIVE_CELL), + call( + finishLayer, + volumeLayer, + VolumeToolEnum.TRACE, + ContourModeEnum.DELETE_FROM_ACTIVE_CELL, + 0, + ), ); }); - -test("finishLayer saga should emit resetContourAction and then be done (saga test)", t => { - // $FlowFixMe[incompatible-call] - const saga = finishLayer(mockedVolumeLayer, VolumeToolEnum.TRACE); - saga.next(); - saga.next(); - expectValueDeepEqual(t, saga.next(), put(resetContourAction)); - t.true(saga.next().done); -}); From 9cfc5224dbb6b57317ca241457a238de6f987c3e Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 14:30:35 +0200 Subject: [PATCH 090/121] Update frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js Co-authored-by: Daniel --- frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index b685af6e07b..f995daa97a8 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -250,7 +250,7 @@ function* labelWithVoxelBuffer2D(voxelBuffer, contourTracingMode, labeledZoomSte const labelMapOfBucket = new Uint8Array(Constants.BUCKET_WIDTH ** 2); currentLabeledVoxelMap.set(bucketZoomedAddress, labelMapOfBucket); - // globalA (first dim) and globalB (secondB) are global coordinates + // globalA (first dim) and globalB (second dim) are global coordinates // which can be used to index into the 2D slice of the VoxelBuffer2D (when subtracting the minCoord2d) // and the LabeledVoxelMap for (let globalA = min[dimensionIndices[0]]; globalA < max[dimensionIndices[0]]; globalA++) { From 185b5d98331b15a7f3eeb66390e9efc3a67f3aeb Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 15:02:09 +0200 Subject: [PATCH 091/121] fix issues from merging master --- .../oxalis/model/sagas/volumetracing_saga.js | 10 +++++++--- .../oxalis/model/volumetracing/volumelayer.js | 8 ++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 4ef9b5f3461..2f8f58dea3a 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -169,13 +169,17 @@ export function* editVolumeLayerAsync(): Generator { currentLayer.addContour(addToLayerAction.position); } if (activeTool === VolumeToolEnum.BRUSH) { - const rectangleIterator = currentLayer.getRectangleVoxelIterator( + const rectangleIterator = currentLayer.getRectangleVoxelBuffer2D( lastPosition, addToLayerAction.position, - currentViewportBounding, ); if (rectangleIterator) { - yield* call(labelWithIterator, rectangleIterator, contourTracingMode); + yield* call( + labelWithVoxelBuffer2D, + rectangleIterator, + contourTracingMode, + labeledZoomStep, + ); } yield* call( labelWithVoxelBuffer2D, diff --git a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js index bb31213f1a0..27c14608e97 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js @@ -303,11 +303,7 @@ class VolumeLayer { return [xa, ya, xb, yb, xc, yc, xd, yd]; } - getRectangleVoxelBuffer2D( - lastPosition: Vector3, - position: Vector3, - boundings?: ?BoundingBoxType, - ): ?VoxelBuffer2D { + getRectangleVoxelBuffer2D(lastPosition: Vector3, position: Vector3): ?VoxelBuffer2D { const state = Store.getState(); const { brushSize } = state.userConfiguration; @@ -357,7 +353,7 @@ class VolumeLayer { height, minCoord2d, this.get3DCoordinate.bind(this), - boundings, + this.getFast3DCoordinateFunction(), ); return voxelBuffer2D; } From e4d22fa96e4edfece5a9b24e566dec697928a138 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 7 Oct 2020 15:06:21 +0200 Subject: [PATCH 092/121] fix upload/download --- conf/messages | 2 +- .../dataformats/BucketProvider.scala | 2 +- .../dataformats/wkw/WKWDataFormatHelper.scala | 31 +++++++++++---- .../volume/VolumeTracingBucketHelper.scala | 39 ++++++------------- .../tracings/volume/VolumeTracingLayer.scala | 19 ++++----- .../volume/VolumeTracingService.scala | 12 +++--- 6 files changed, 51 insertions(+), 54 deletions(-) diff --git a/conf/messages b/conf/messages index c12ef3308a3..66129c1a4c8 100644 --- a/conf/messages +++ b/conf/messages @@ -193,7 +193,7 @@ tracing=Annotation tracing.notFound=Tracing couldn’t be found annotation.create.failed=Failed to create annotation. -annotation.volume.uint64=Creating volume tracings on a uint64 fallback layer is not allowed. +annotation.volume.uint64=Creating volume annotations with uint64 fallback layer is not supported by wK yet. annotation.notFound=Annotation couldn’t be found annotation.notFound.considerLoggingIn=Annotation couldn’t be found. If the annotation is not public, you need to log in to see it. annotation.invalid=Invalid annotation diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala index 88f34a52f12..b38ee21e17f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala @@ -47,6 +47,6 @@ trait BucketProvider extends FoxImplicits with LazyLogging { _.cutOutBucket(readInstruction.dataLayer, readInstruction.bucket)) } - def bucketStream(resolution: Int, version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte])] = + def bucketStream(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte])] = Iterator.empty } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala index e4fc2df036c..1aef0a3db10 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala @@ -7,6 +7,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, Data import com.scalableminds.webknossos.datastore.models.{BucketPosition, CubePosition} import com.scalableminds.webknossos.wrap.VoxelType import net.liftweb.common.{Box, Failure, Full} +import com.scalableminds.util.tools.ExtendedTypes._ trait WKWDataFormatHelper { @@ -43,19 +44,35 @@ trait WKWDataFormatHelper { .resolve(s"header.${dataFileExtension}") def parseWKWFilePath(path: String): Option[BucketPosition] = { - val CubeRx = s".*(\\d+)/z(\\d+)/y(\\d+)/x(\\d+).${dataFileExtension}".r + val CubeRx = s"(|.*/)(\\d+|\\d+-\\d+-\\d+)/z(\\d+)/y(\\d+)/x(\\d+).${dataFileExtension}".r path match { - case CubeRx(res, z, y, x) => - Some( - BucketPosition(x.toInt * DataLayer.bucketLength, - y.toInt * DataLayer.bucketLength, - z.toInt * DataLayer.bucketLength, - Point3D(res.toInt, res.toInt, res.toInt))) + case CubeRx(_, resolutionStr, z, y, x) => + val resolutionOpt = parseResolution(resolutionStr) + resolutionOpt match { + case Some(resolution) => + Some( + BucketPosition(x.toInt * resolution.x * DataLayer.bucketLength, + y.toInt * resolution.y * DataLayer.bucketLength, + z.toInt * resolution.z * DataLayer.bucketLength, + resolution)) + case _ => None + } case _ => None } } + protected def parseResolution(resolutionStr: String): Option[Point3D] = + resolutionStr.toIntOpt match { + case Some(resolutionInt) => Some(Point3D(resolutionInt, resolutionInt, resolutionInt)) + case None => + val pattern = """(\d+)-(\d+)-(\d+)""".r + resolutionStr match { + case pattern(x, y, z) => Some(Point3D(x.toInt, y.toInt, z.toInt)) + case _ => None + } + } + def voxelTypeToElementClass(voxelType: VoxelType.Value, voxelSize: Int): Box[ElementClass.Value] = (voxelType, voxelSize) match { case (VoxelType.UInt8, 1) => Full(ElementClass.uint8) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index e8566ac164b..f11e3404270 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -1,10 +1,8 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume -import java.nio.{ByteBuffer, ByteOrder} - import com.scalableminds.util.geometry.Point3D import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.util.tools.ExtendedTypes._ +import com.scalableminds.webknossos.datastore.dataformats.wkw.WKWDataFormatHelper import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, ElementClass} import com.scalableminds.webknossos.datastore.services.DataConverter @@ -17,13 +15,11 @@ import com.scalableminds.webknossos.tracingstore.tracings.{ } import com.scalableminds.webknossos.wrap.WKWMortonHelper import com.typesafe.scalalogging.LazyLogging + import scala.concurrent.duration._ import net.jpountz.lz4.{LZ4Compressor, LZ4Factory, LZ4FastDecompressor} -import net.liftweb.common._ -import spire.math.{UByte, UInt, ULong, UShort} import scala.concurrent.ExecutionContext.Implicits.global -import scala.reflect.ClassTag trait VolumeBucketCompression extends LazyLogging { @@ -68,7 +64,7 @@ trait VolumeBucketCompression extends LazyLogging { } } -trait BucketKeys extends WKWMortonHelper { +trait BucketKeys extends WKWMortonHelper with WKWDataFormatHelper with LazyLogging { protected def buildBucketKey(dataLayerName: String, bucket: BucketPosition): String = { val mortonIndex = mortonEncode(bucket.x, bucket.y, bucket.z) s"$dataLayerName/${formatResolution(bucket.resolution)}/$mortonIndex-[${bucket.x},${bucket.y},${bucket.z}]" @@ -80,8 +76,8 @@ trait BucketKeys extends WKWMortonHelper { else s"${resolution.x}-${resolution.y}-${resolution.z}" - protected def buildKeyPrefix(dataLayerName: String, resolution: Int): String = - s"$dataLayerName/$resolution/" + protected def buildKeyPrefix(dataLayerName: String): String = + s"$dataLayerName/" protected def parseBucketKey(key: String): Option[(String, BucketPosition)] = { val keyRx = "([0-9a-z-]+)/(\\d+|\\d+-\\d+-\\d+)/-?\\d+-\\[(\\d+),(\\d+),(\\d+)]".r @@ -107,17 +103,6 @@ trait BucketKeys extends WKWMortonHelper { } } - protected def parseResolution(resolutionStr: String): Option[Point3D] = - resolutionStr.toIntOpt match { - case Some(resolutionInt) => Some(Point3D(resolutionInt, resolutionInt, resolutionInt)) - case None => - val pattern = """(\d+)-(\d+)-(\d+)""".r - resolutionStr match { - case pattern(x, y, z) => Some(Point3D(x.toInt, y.toInt, z.toInt)) - case _ => None - } - } - } trait VolumeTracingBucketHelper @@ -139,6 +124,7 @@ trait VolumeTracingBucketHelper bucket: BucketPosition, version: Option[Long] = None): Fox[Array[Byte]] = { val key = buildBucketKey(dataLayer.name, bucket) + val dataFox = loadBucketFromCache(key) match { case Some(data) => Fox.successful(data) case None => volumeDataStore.get(key, version, mayBeEmpty = Some(true)) @@ -176,22 +162,19 @@ trait VolumeTracingBucketHelper } } - def bucketStream(dataLayer: VolumeTracingLayer, - resolution: Int, - version: Option[Long]): Iterator[(BucketPosition, Array[Byte])] = { - val key = buildKeyPrefix(dataLayer.name, resolution) + def bucketStream(dataLayer: VolumeTracingLayer, version: Option[Long]): Iterator[(BucketPosition, Array[Byte])] = { + val key = buildKeyPrefix(dataLayer.name) new BucketIterator(key, volumeDataStore, expectedUncompressedBucketSizeFor(dataLayer), version) } def bucketStreamWithVersion(dataLayer: VolumeTracingLayer, - resolution: Int, version: Option[Long]): Iterator[(BucketPosition, Array[Byte], Long)] = { - val key = buildKeyPrefix(dataLayer.name, resolution) + val key = buildKeyPrefix(dataLayer.name) new VersionedBucketIterator(key, volumeDataStore, expectedUncompressedBucketSizeFor(dataLayer), version) } - def bucketStreamFromCache(dataLayer: VolumeTracingLayer, resolution: Int): Iterator[(BucketPosition, Array[Byte])] = { - val keyPrefix = buildKeyPrefix(dataLayer.name, resolution) + def bucketStreamFromCache(dataLayer: VolumeTracingLayer): Iterator[(BucketPosition, Array[Byte])] = { + val keyPrefix = buildKeyPrefix(dataLayer.name) val keyValuePairs = volumeDataCache.findAllConditionalWithKey(key => key.startsWith(keyPrefix)) keyValuePairs.flatMap { case (bucketKey, data) => diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index cbc7ecba7d0..4d2800ef088 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -20,8 +20,7 @@ import scala.concurrent.duration.FiniteDuration trait AbstractVolumeTracingBucketProvider extends BucketProvider with VolumeTracingBucketHelper with FoxImplicits { - def bucketStreamWithVersion(resolution: Int, - version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte], Long)] + def bucketStreamWithVersion(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte], Long)] } class VolumeTracingBucketProvider(layer: VolumeTracingLayer) extends AbstractVolumeTracingBucketProvider { @@ -33,12 +32,11 @@ class VolumeTracingBucketProvider(layer: VolumeTracingLayer) extends AbstractVol implicit ec: ExecutionContext): Fox[Array[Byte]] = loadBucket(layer, readInstruction.bucket, readInstruction.version) - override def bucketStream(resolution: Int, version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte])] = - bucketStream(layer, resolution, version) + override def bucketStream(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte])] = + bucketStream(layer, version) - def bucketStreamWithVersion(resolution: Int, - version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte], Long)] = - bucketStreamWithVersion(layer, resolution, version) + def bucketStreamWithVersion(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte], Long)] = + bucketStreamWithVersion(layer, version) } class TemporaryVolumeTracingBucketProvider(layer: VolumeTracingLayer) extends AbstractVolumeTracingBucketProvider { @@ -59,11 +57,10 @@ class TemporaryVolumeTracingBucketProvider(layer: VolumeTracingLayer) extends Ab _ <- bool2Fox(temporaryTracingStore.contains(layer.name)) ?~> "Temporary Volume Tracing expired" } yield () - override def bucketStream(resolution: Int, version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte])] = - bucketStreamFromCache(layer, resolution) + override def bucketStream(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte])] = + bucketStreamFromCache(layer) - def bucketStreamWithVersion(resolution: Int, - version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte], Long)] = + def bucketStreamWithVersion(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte], Long)] = throw new NotImplementedException // Temporary Volume Tracings do not support versioning } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index e6a83955b0c..acffbcbe924 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -127,7 +127,7 @@ class VolumeTracingService @Inject()( tracing: VolumeTracing): Fox[VolumeTracing] = { val sourceTracing = find(tracingId, Some(sourceVersion)) val dataLayer = volumeTracingLayer(tracingId, tracing) - val bucketStream = dataLayer.volumeBucketProvider.bucketStreamWithVersion(1) + val bucketStream = dataLayer.volumeBucketProvider.bucketStreamWithVersion() bucketStream.foreach { case (bucketPosition, _, version) => @@ -342,7 +342,7 @@ class VolumeTracingService @Inject()( private def allDataToOutputStream(tracingId: String, tracing: VolumeTracing, os: OutputStream): Future[Unit] = { val dataLayer = volumeTracingLayer(tracingId, tracing) val buckets: Iterator[NamedStream] = - new WKWBucketStreamSink(dataLayer)(dataLayer.bucketProvider.bucketStream(1, Some(tracing.version))) + new WKWBucketStreamSink(dataLayer)(dataLayer.bucketProvider.bucketStream(Some(tracing.version))) val zipResult = ZipIO.zip(buckets, os) @@ -404,7 +404,7 @@ class VolumeTracingService @Inject()( for { isTemporaryTracing <- isTemporaryTracing(sourceId) sourceDataLayer = volumeTracingLayer(sourceId, sourceTracing, isTemporaryTracing) - buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream(1) + buckets: Iterator[(BucketPosition, Array[Byte])] = sourceDataLayer.bucketProvider.bucketStream() destinationDataLayer = volumeTracingLayer(destinationId, destinationTracing) _ <- Fox.combined(buckets.map { case (bucketPosition, bucketData) => @@ -513,7 +513,7 @@ class VolumeTracingService @Inject()( val resulutionSet = new mutable.HashSet[Point3D]() val dataLayer = volumeTracingLayer(selector.tracingId, tracing) val bucketStream: Iterator[(BucketPosition, Array[Byte])] = - dataLayer.bucketProvider.bucketStream(1, Some(tracing.version)) + dataLayer.bucketProvider.bucketStream(Some(tracing.version)) bucketStream.foreach { case (bucketPosition, _) => resulutionSet.add(bucketPosition.resolution) @@ -530,7 +530,7 @@ class VolumeTracingService @Inject()( val dataLayer = volumeTracingLayer(selector.tracingId, tracing) val labelSet: mutable.Set[UnsignedInteger] = scala.collection.mutable.Set() val bucketStream: Iterator[(BucketPosition, Array[Byte])] = - dataLayer.bucketProvider.bucketStream(1, Some(tracing.version)) + dataLayer.bucketProvider.bucketStream(Some(tracing.version)) bucketStream.foreach { case (bucketPosition, data) => if (data.length > 1 && (resolutionsMatch || bucketPosition.resolution == Point3D(1, 1, 1))) { // skip reverted buckets @@ -546,7 +546,7 @@ class VolumeTracingService @Inject()( case ((selector, tracing), sourceVolumeIndex) => val dataLayer = volumeTracingLayer(selector.tracingId, tracing) val bucketStream: Iterator[(BucketPosition, Array[Byte])] = - dataLayer.bucketProvider.bucketStream(1, Some(tracing.version)) + dataLayer.bucketProvider.bucketStream(Some(tracing.version)) bucketStream.foreach { case (bucketPosition, data) => if (data.length > 1 && (resolutionsMatch || bucketPosition.resolution == Point3D(1, 1, 1))) { From b662143967f8193fefa992a46dfca9a2e6d42072 Mon Sep 17 00:00:00 2001 From: Florian M Date: Wed, 7 Oct 2020 15:22:38 +0200 Subject: [PATCH 093/121] fix issues after merge --- .../tracingstore/tracings/volume/VolumeTracingService.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 350a4c9cf0e..b67eb82d6d5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -491,7 +491,7 @@ class VolumeTracingService @Inject()( for { tracing <- find(tracingId) ?~> "tracing.notFound" volumeLayer = volumeTracingLayer(tracingId, tracing) - bucketStream = volumeLayer.bucketProvider.bucketStream(1, Some(tracing.version)) + bucketStream = volumeLayer.bucketProvider.bucketStream(Some(tracing.version)) bucketPosOpt = if (bucketStream.hasNext) { val bucket = bucketStream.next() val bucketPos = bucket._1 @@ -608,7 +608,7 @@ class VolumeTracingService @Inject()( } mergedVolume.addLabelSet(importLabelSet) - volumeLayer.bucketProvider.bucketStream(1).foreach { + volumeLayer.bucketProvider.bucketStream().foreach { case (position, bytes) => if (!isAllZero(bytes)) { mergedVolume.add(0, position, bytes) From 35dd31e861f90524f79e53288fc09399cd3e8299 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 15:50:29 +0200 Subject: [PATCH 094/121] disable continuous drawing for now since its performance needs to be adapted to the multi-resolution feature --- frontend/javascripts/libs/drawing.js | 8 +++--- .../oxalis/model/sagas/volumetracing_saga.js | 25 ++++++++++--------- .../volume_annotation_sampling.js | 23 +++++++++-------- .../oxalis/model/volumetracing/volumelayer.js | 10 +++++++- 4 files changed, 38 insertions(+), 28 deletions(-) diff --git a/frontend/javascripts/libs/drawing.js b/frontend/javascripts/libs/drawing.js index 576bde48b9f..b414a162637 100644 --- a/frontend/javascripts/libs/drawing.js +++ b/frontend/javascripts/libs/drawing.js @@ -139,8 +139,8 @@ class Drawing { y1: number, x2: number, y2: number, - bufX0: Array, - bufX1: Array, + bufX0: Float32Array, + bufX1: Float32Array, minX: number, maxX: number, ): void { @@ -183,8 +183,8 @@ class Drawing { const [minX, maxX] = [Math.min(xa, xb, xc, xd), Math.max(xa, xb, xc, xd)]; const [minY, maxY] = [Math.min(ya, yb, yc, yd), Math.max(ya, yb, yc, yd)]; - const bufX0 = new Array(maxY); - const bufX1 = new Array(maxY); + const bufX0 = new Float32Array(maxY); + const bufX1 = new Float32Array(maxY); this.paintBorder(xa, ya, xb, yb, bufX0, bufX1, minX, maxX); this.paintBorder(xb, yb, xc, yc, bufX0, bufX1, minX, maxX); diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index a4381ef244e..cb979873dab 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -169,18 +169,19 @@ export function* editVolumeLayerAsync(): Generator { currentLayer.addContour(addToLayerAction.position); } if (activeTool === VolumeToolEnum.BRUSH) { - const rectangleIterator = currentLayer.getRectangleVoxelBuffer2D( - lastPosition, - addToLayerAction.position, - ); - if (rectangleIterator) { - yield* call( - labelWithVoxelBuffer2D, - rectangleIterator, - contourTracingMode, - labeledZoomStep, - ); - } + // Disable continuous drawing for performance reasons + // const rectangleVoxelBuffer2D = currentLayer.getRectangleVoxelBuffer2D( + // lastPosition, + // addToLayerAction.position, + // ); + // if (rectangleVoxelBuffer2D) { + // yield* call( + // labelWithVoxelBuffer2D, + // rectangleVoxelBuffer2D, + // contourTracingMode, + // labeledZoomStep, + // ); + // } yield* call( labelWithVoxelBuffer2D, currentLayer.getCircleVoxelBuffer2D(addToLayerAction.position), diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 16f3b8d72a0..9fb0c6cc6e4 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -37,10 +37,13 @@ function upsampleVoxelMap( const thirdDimensionBucketValue = Math.floor( thirdDimensionVoxelValue / targetResolution[dimensionIndices[2]] / constants.BUCKET_WIDTH, ); + const warnAboutCouldNotCreate = _.once(zoomedAddress => { + console.warn(messages["sampling.could_not_get_or_create_bucket"](zoomedAddress)); + }); for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { const labeledBucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); if (labeledBucket.type === "null") { - console.warn(messages["sampling.could_not_get_or_create_bucket"](labeledBucketZoomedAddress)); + warnAboutCouldNotCreate(labeledBucket); continue; } const goalBaseBucketAddress = map3( @@ -68,12 +71,7 @@ function upsampleVoxelMap( targetZoomStep, ]); if (currentGoalBucket.type === "null") { - console.warn( - messages["sampling.could_not_get_or_create_bucket"]([ - ...currentGoalBucketAddress, - targetZoomStep, - ]), - ); + console.warn(warnAboutCouldNotCreate([...currentGoalBucketAddress, targetZoomStep])); continue; } const currentGoalVoxelMap = new Uint8Array(constants.BUCKET_WIDTH ** 2).fill(0); @@ -157,10 +155,15 @@ function downsampleVoxelMap( const labeledVoxelMapInTargetResolution: LabeledVoxelsMap = new Map(); const scaleToSource = map3((val, index) => val / sourceResolution[index], targetResolution); const scaleToGoal = map3((val, index) => val / targetResolution[index], sourceResolution); + + const warnAboutCouldNotCreate = _.once(zoomedAddress => { + console.warn(messages["sampling.could_not_get_or_create_bucket"](zoomedAddress)); + }); + for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { const labeledBucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); if (labeledBucket.type === "null") { - console.warn(messages["sampling.could_not_get_or_create_bucket"](labeledBucketZoomedAddress)); + warnAboutCouldNotCreate(labeledBucketZoomedAddress); continue; } const goalBucketAddress = map3( @@ -169,9 +172,7 @@ function downsampleVoxelMap( ); const goalBucket = dataCube.getOrCreateBucket([...goalBucketAddress, targetZoomStep]); if (goalBucket.type === "null") { - console.warn( - messages["sampling.could_not_get_or_create_bucket"]([...goalBucketAddress, targetZoomStep]), - ); + warnAboutCouldNotCreate([...goalBucketAddress, targetZoomStep]); continue; } // Scale the bucket address back to the source scale to calculate the offset the source bucket has to the goalBucket. diff --git a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js index 27c14608e97..28991c7b46c 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volumelayer.js @@ -303,7 +303,15 @@ class VolumeLayer { return [xa, ya, xb, yb, xc, yc, xd, yd]; } - getRectangleVoxelBuffer2D(lastPosition: Vector3, position: Vector3): ?VoxelBuffer2D { + getRectangleVoxelBuffer2D( + lastUnzoomedPosition: Vector3, + unzoomedPosition: Vector3, + ): ?VoxelBuffer2D { + const lastPosition = scaleGlobalPositionWithResolution( + lastUnzoomedPosition, + this.activeResolution, + ); + const position = scaleGlobalPositionWithResolution(unzoomedPosition, this.activeResolution); const state = Store.getState(); const { brushSize } = state.userConfiguration; From b5ceaed296e168538e029fac1b8424de4e7f68a7 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 15:58:24 +0200 Subject: [PATCH 095/121] fix linting --- frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js | 4 ++-- .../oxalis/model/volumetracing/volume_annotation_sampling.js | 1 + 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index cb979873dab..a3a0c0aac53 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -145,7 +145,7 @@ export function* editVolumeLayerAsync(): Generator { ); } - let lastPosition = startEditingAction.position; + // let lastPosition = startEditingAction.position; while (true) { const { addToLayerAction, finishEditingAction } = yield* race({ addToLayerAction: _take("ADD_TO_LAYER"), @@ -189,7 +189,7 @@ export function* editVolumeLayerAsync(): Generator { labeledZoomStep, ); } - lastPosition = addToLayerAction.position; + // lastPosition = addToLayerAction.position; } yield* call(finishLayer, currentLayer, activeTool, contourTracingMode, labeledZoomStep); diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 9fb0c6cc6e4..3d6b8474bc8 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -1,5 +1,6 @@ // @flow +import _ from "lodash"; import constants, { type Vector3, type LabeledVoxelsMap } from "oxalis/constants"; import { map3 } from "libs/utils"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; From 0d11203d4f129ae8656866c59381aa87b80d4d6b Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 15:58:34 +0200 Subject: [PATCH 096/121] adapt headline in import modal --- frontend/javascripts/oxalis/view/nml_upload_zone_container.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/view/nml_upload_zone_container.js b/frontend/javascripts/oxalis/view/nml_upload_zone_container.js index 4c9dcb95421..cc5854305bd 100644 --- a/frontend/javascripts/oxalis/view/nml_upload_zone_container.js +++ b/frontend/javascripts/oxalis/view/nml_upload_zone_container.js @@ -187,7 +187,7 @@ class NmlUploadZoneContainer extends React.PureComponent { const pluralS = this.state.files.length > 1 ? "s" : ""; return ( 0} onCancel={() => this.setState({ files: [] })} footer={ From 7b5831ba9f0a7ca7b195f93e1e2c5b7633aa83b4 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 16:01:16 +0200 Subject: [PATCH 097/121] change tracing to annotation --- frontend/javascripts/oxalis/view/nml_upload_zone_container.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/view/nml_upload_zone_container.js b/frontend/javascripts/oxalis/view/nml_upload_zone_container.js index cc5854305bd..e2c70cdfc1a 100644 --- a/frontend/javascripts/oxalis/view/nml_upload_zone_container.js +++ b/frontend/javascripts/oxalis/view/nml_upload_zone_container.js @@ -187,7 +187,7 @@ class NmlUploadZoneContainer extends React.PureComponent { const pluralS = this.state.files.length > 1 ? "s" : ""; return ( 0} onCancel={() => this.setState({ files: [] })} footer={ From b6fb784461a912457d2262a4a579528c6ca7e097 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 16:14:53 +0200 Subject: [PATCH 098/121] change magnification to resolution in user-facing strings and some internal ones --- .../admin/tasktype/task_type_create_view.js | 8 ++++---- frontend/javascripts/messages.js | 2 +- frontend/javascripts/oxalis/api/api_latest.js | 6 +++--- .../oxalis/model/accessors/dataset_accessor.js | 12 ++++++------ .../oxalis/model/bucket_data_handling/data_cube.js | 2 +- frontend/javascripts/oxalis/model_initialization.js | 2 +- .../oxalis/view/settings/dataset_settings_view.js | 2 +- .../oxalis/view/viewport_status_indicator.js | 6 +++--- 8 files changed, 20 insertions(+), 20 deletions(-) diff --git a/frontend/javascripts/admin/tasktype/task_type_create_view.js b/frontend/javascripts/admin/tasktype/task_type_create_view.js index b7881bb1b10..07a6a3f2c49 100644 --- a/frontend/javascripts/admin/tasktype/task_type_create_view.js +++ b/frontend/javascripts/admin/tasktype/task_type_create_view.js @@ -51,7 +51,7 @@ function isValidMagnification(rule, value, callback) { if (value === "" || value == null || (Math.log(value) / Math.log(2)) % 1 === 0) { callback(); } else { - callback("The magnification must be stated as a power of two (e.g., 1 or 2 or 4 or 8 ...)"); + callback("The resolution must be stated as a power of two (e.g., 1 or 2 or 4 or 8 ...)"); } } @@ -64,7 +64,7 @@ function getMagnificationAdaptedSettings(rawSettings) { allowedMagnifications.max != null && allowedMagnifications.min > allowedMagnifications.max ) { - Toast.error("Minimum magnification must not be greater than maximum magnification."); + Toast.error("Minimum resolution must not be greater than maximum resolution."); return null; } @@ -301,9 +301,9 @@ class TaskTypeCreateView extends React.PureComponent { valuePropName: "checked", })( - Restrict Magnifications{" "} + Restrict Resolutions{" "} diff --git a/frontend/javascripts/messages.js b/frontend/javascripts/messages.js index 7d378950308..2d21097d245 100644 --- a/frontend/javascripts/messages.js +++ b/frontend/javascripts/messages.js @@ -111,7 +111,7 @@ instead. Only enable this option if you understand its effect. All layers will n "tracing.uint64_segmentation_warning": "This is an unsigned 64-bit segmentation. The displayed ids are truncated to 32-bit. Thus, they might not match the ids on the server.", "tracing.segmentation_zoom_warning_agglomerate": - "Segmentation data which is mapped using an agglomerate file cannot be rendered in this magnification. Please zoom in further.", + "Segmentation data which is mapped using an agglomerate file cannot be rendered in this resolution. Please zoom in further.", "tracing.no_access": "You are not allowed to access this annotation.", "tracing.no_allowed_mode": "There was no valid allowed annotation mode specified.", "tracing.volume_missing_segmentation": "Volume is allowed, but segmentation does not exist.", diff --git a/frontend/javascripts/oxalis/api/api_latest.js b/frontend/javascripts/oxalis/api/api_latest.js index 85dc927d136..bee2d7fc99d 100644 --- a/frontend/javascripts/oxalis/api/api_latest.js +++ b/frontend/javascripts/oxalis/api/api_latest.js @@ -855,9 +855,9 @@ class DataApi { /** * Returns raw binary data for a given layer, position and zoom level. If the zoom - * level is not provided, the first magnification will be used. If this - * magnification does not exist, the next existing magnification will be used. - * If the zoom level is provided and points to a not existent magnification, + * level is not provided, the first resolution will be used. If this + * resolution does not exist, the next existing resolution will be used. + * If the zoom level is provided and points to a not existent resolution, * 0 will be returned. * * @example // Return the greyscale value for a bucket diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 97547439867..fe86ff7daa0 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -544,9 +544,9 @@ export function getEnabledLayers( /* This function returns layers which cannot be rendered (since - the current magnification is missing), even though they should + the current resolution is missing), even though they should be rendered (since they are enabled). The function takes fallback - magnifications into account if renderMissingDataBlack is disabled. + resolutions into account if renderMissingDataBlack is disabled. */ function _getUnrenderableLayersForCurrentZoom(state: OxalisState) { const { dataset } = state; @@ -569,12 +569,12 @@ function _getUnrenderableLayersForCurrentZoom(state: OxalisState) { if (renderMissingDataBlack) { // We already know that the layer is missing. Since `renderMissingDataBlack` - // is enabled, the fallback magnifications don't matter. The layer cannot be + // is enabled, the fallback resolutions don't matter. The layer cannot be // rendered. return true; } - // The current magnification is missing and fallback rendering + // The current resolution is missing and fallback rendering // is activated. Thus, check whether one of the fallback // zoomSteps can be rendered. return !_.range(1, maxZoomStepDiff + 1).some(diff => { @@ -619,14 +619,14 @@ export function getRenderableResolutionForSegmentation( }; } - // Since `renderMissingDataBlack` is enabled, the fallback magnifications + // Since `renderMissingDataBlack` is enabled, the fallback resolutions // should not be considered. // rendered. if (renderMissingDataBlack) { return null; } - // The current magnification is missing and fallback rendering + // The current resolution is missing and fallback rendering // is activated. Thus, check whether one of the fallback // zoomSteps can be rendered. for ( diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js index 939614451f7..7109024a641 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.js @@ -402,7 +402,7 @@ class DataCube { } if (!this.resolutionInfo.hasIndex(zoomStep)) { throw new Error( - `DataCube.floodFill was called with a zoomStep of ${zoomStep} which does not exist for the current magnification.`, + `DataCube.floodFill was called with a zoomStep of ${zoomStep} which does not exist for the current resolution.`, ); } const seedVoxelIndex = this.getVoxelIndex(seedVoxel, zoomStep); diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index 2146ab16ff0..ccfb1027281 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -446,7 +446,7 @@ function setupLayerForVolumeTracing( Toast.warning( messages["tracing.volume_resolution_mismatch"], {}, - `The magnifications of the volume tracing (${tracingResolutions.toString()}) don't match the dataset's magnifications (${targetResolutions.toString()}). This can happen when the magnification of the dataset was changed after this tracing was created. Note that there might be rendering issues for this reason.`, + `The resolutions of the volume tracing (${tracingResolutions.toString()}) don't match the dataset's resolutions (${targetResolutions.toString()}). This can happen when the resolution of the dataset was changed after this tracing was created. Note that there might be rendering issues for this reason.`, ); throw HANDLED_ERROR; } else { diff --git a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js index 593bbfa7d01..7cb19858ab8 100644 --- a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js +++ b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js @@ -562,7 +562,7 @@ class DatasetSettings extends React.PureComponent { label={ {settings.renderMissingDataBlack}{" "} - + diff --git a/frontend/javascripts/oxalis/view/viewport_status_indicator.js b/frontend/javascripts/oxalis/view/viewport_status_indicator.js index fdc5d090a8b..b0302f76c04 100644 --- a/frontend/javascripts/oxalis/view/viewport_status_indicator.js +++ b/frontend/javascripts/oxalis/view/viewport_status_indicator.js @@ -31,9 +31,9 @@ export default function ViewportStatusIndicator() { title={
The layer{pluralS} {unrenderableLayerNames.map(name => `"${name}"`).join(", ")} cannot be - rendered because {pronounAndVerb} exist in the current magnification. Please adjust the - zoom level to change the active magnification. Also consider disabling the option - “Render Missing Data Black” if this is not already the case. + rendered because {pronounAndVerb} exist in the current resolution. Please adjust the zoom + level to change the active resolution. Also consider disabling the option “Render + Missing Data Black” if this is not already the case.
} > From 8c9d8e6c5648ec01cba52641a8597c5b150c913e Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 16:24:56 +0200 Subject: [PATCH 099/121] make resolution-warning less verbose --- .../oxalis/view/viewport_status_indicator.js | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/frontend/javascripts/oxalis/view/viewport_status_indicator.js b/frontend/javascripts/oxalis/view/viewport_status_indicator.js index b0302f76c04..65da9593419 100644 --- a/frontend/javascripts/oxalis/view/viewport_status_indicator.js +++ b/frontend/javascripts/oxalis/view/viewport_status_indicator.js @@ -4,12 +4,15 @@ import * as React from "react"; import { Icon, Tooltip } from "antd"; import { getUnrenderableLayersForCurrentZoom } from "oxalis/model/accessors/dataset_accessor"; +import { getCurrentResolution } from "oxalis/model/accessors/flycam_accessor"; import { usePolledState } from "libs/react_helpers"; const { useState } = React; export default function ViewportStatusIndicator() { const [unrenderableLayerNames, setUnrenderableLayerNames] = useState([]); + const [renderMissingDataBlack, setRenderMissingDataBlack] = useState(true); + const [currentResolution, setCurrentResolution] = useState([1, 1, 1]); usePolledState(state => { const newMissingLayersNames = getUnrenderableLayersForCurrentZoom(state); @@ -18,6 +21,10 @@ export default function ViewportStatusIndicator() { layer.category === "segmentation" ? "Segmentation" : layer.name, ), ); + + setRenderMissingDataBlack(state.datasetConfiguration.renderMissingDataBlack); + + setCurrentResolution(getCurrentResolution(state)); }); if (unrenderableLayerNames.length === 0) { @@ -26,14 +33,17 @@ export default function ViewportStatusIndicator() { const pluralS = unrenderableLayerNames.length > 1 ? "s" : ""; const pronounAndVerb = unrenderableLayerNames.length > 1 ? "they don't" : "it doesn't"; + const renderMissingDataBlackHint = renderMissingDataBlack + ? ` Also consider disabling the option "Render Missing Data Black".` + : null; + return ( - The layer{pluralS} {unrenderableLayerNames.map(name => `"${name}"`).join(", ")} cannot be - rendered because {pronounAndVerb} exist in the current resolution. Please adjust the zoom - level to change the active resolution. Also consider disabling the option “Render - Missing Data Black” if this is not already the case. + The layer{pluralS} {unrenderableLayerNames.map(name => `"${name}"`).join(", ")}{" "} + {pronounAndVerb} exist in the current resolution {currentResolution.join("-")}. Adjust the + zoom level to change the active resolution.{renderMissingDataBlackHint} } > From 0893310cb8a967f9dce98c25183b884db80a4070 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 16:53:58 +0200 Subject: [PATCH 100/121] try to fix e.trigger is not a function --- .../volume_annotation_sampling.js | 40 +++++++++++++++---- 1 file changed, 32 insertions(+), 8 deletions(-) diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 3d6b8474bc8..cd9ca1f3d89 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -4,6 +4,7 @@ import _ from "lodash"; import constants, { type Vector3, type LabeledVoxelsMap } from "oxalis/constants"; import { map3 } from "libs/utils"; import type DataCube from "oxalis/model/bucket_data_handling/data_cube"; +import { type Bucket } from "oxalis/model/bucket_data_handling/bucket"; import messages from "messages"; import type { DimensionMap } from "oxalis/model/dimensions"; @@ -44,7 +45,7 @@ function upsampleVoxelMap( for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { const labeledBucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); if (labeledBucket.type === "null") { - warnAboutCouldNotCreate(labeledBucket); + warnAboutCouldNotCreate(labeledBucket.getAddress()); continue; } const goalBaseBucketAddress = map3( @@ -289,27 +290,49 @@ export function applyVoxelMap( shouldOverwrite: boolean = true, overwritableValue: number = 0, ) { + function preprocessBucket(bucket) { + if (bucket.type === "null") { + return; + } + bucket.markAndAddBucketForUndo(); + } + + function postprocessBucket(bucket) { + if (bucket.type === "null") { + return; + } + dataCube.pushQueue.insert(bucket); + bucket.trigger("bucketLabeled"); + } + for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { - let bucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); + let bucket: Bucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); if (bucket.type === "null") { continue; } - bucket.markAndAddBucketForUndo(); - let data = bucket.getOrCreateData(); + preprocessBucket(bucket); const out = new Float32Array(3); get3DAddress(0, 0, out); const thirdDimensionValueInBucket = out[2]; for (let sliceCount = 0; sliceCount < numberOfSlicesToApply; sliceCount++) { - if (thirdDimensionValueInBucket + sliceCount === constants.BUCKET_WIDTH) { + const newThirdDimValue = thirdDimensionValueInBucket + sliceCount; + if (newThirdDimValue > 0 && newThirdDimValue % constants.BUCKET_WIDTH === 0) { // The current slice is in the next bucket in the third direction. const nextBucketZoomedAddress = [...labeledBucketZoomedAddress]; nextBucketZoomedAddress[thirdDimensionIndex]++; + + postprocessBucket(bucket); + bucket = dataCube.getOrCreateBucket(nextBucketZoomedAddress); if (bucket.type === "null") { continue; } - data = bucket.getOrCreateData(); + preprocessBucket(bucket); } + if (bucket.type === "null") { + continue; + } + const data = bucket.getOrCreateData(); for (let firstDim = 0; firstDim < constants.BUCKET_WIDTH; firstDim++) { for (let secondDim = 0; secondDim < constants.BUCKET_WIDTH; secondDim++) { if (voxelMap[firstDim * constants.BUCKET_WIDTH + secondDim] === 1) { @@ -326,7 +349,8 @@ export function applyVoxelMap( } } } - dataCube.pushQueue.insert(bucket); - bucket.trigger("bucketLabeled"); + + // Post-processing: add to pushQueue and notify about labeling + postprocessBucket(bucket); } } From 4abf801f6a95522155610e76bb32160439bdff4e Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 16:55:16 +0200 Subject: [PATCH 101/121] fix linting --- frontend/javascripts/oxalis/view/viewport_status_indicator.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/view/viewport_status_indicator.js b/frontend/javascripts/oxalis/view/viewport_status_indicator.js index 65da9593419..da91130c143 100644 --- a/frontend/javascripts/oxalis/view/viewport_status_indicator.js +++ b/frontend/javascripts/oxalis/view/viewport_status_indicator.js @@ -34,7 +34,7 @@ export default function ViewportStatusIndicator() { const pronounAndVerb = unrenderableLayerNames.length > 1 ? "they don't" : "it doesn't"; const renderMissingDataBlackHint = renderMissingDataBlack - ? ` Also consider disabling the option "Render Missing Data Black".` + ? " Also consider disabling the option “Render Missing Data Black”." : null; return ( From b739ceedc96303a7e9ebd25b90fa139246d76852 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 17:04:09 +0200 Subject: [PATCH 102/121] fix bug in applyLabeledVoxelMap --- .../oxalis/model/volumetracing/volume_annotation_sampling.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index cd9ca1f3d89..7d86ba6abb3 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -45,7 +45,7 @@ function upsampleVoxelMap( for (const [labeledBucketZoomedAddress, voxelMap] of labeledVoxelMap) { const labeledBucket = dataCube.getOrCreateBucket(labeledBucketZoomedAddress); if (labeledBucket.type === "null") { - warnAboutCouldNotCreate(labeledBucket.getAddress()); + warnAboutCouldNotCreate(labeledBucketZoomedAddress); continue; } const goalBaseBucketAddress = map3( @@ -316,7 +316,7 @@ export function applyVoxelMap( const thirdDimensionValueInBucket = out[2]; for (let sliceCount = 0; sliceCount < numberOfSlicesToApply; sliceCount++) { const newThirdDimValue = thirdDimensionValueInBucket + sliceCount; - if (newThirdDimValue > 0 && newThirdDimValue % constants.BUCKET_WIDTH === 0) { + if (sliceCount > 0 && newThirdDimValue % constants.BUCKET_WIDTH === 0) { // The current slice is in the next bucket in the third direction. const nextBucketZoomedAddress = [...labeledBucketZoomedAddress]; nextBucketZoomedAddress[thirdDimensionIndex]++; From b724847ef00912530330dc2375cd38971b46282f Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 17:07:24 +0200 Subject: [PATCH 103/121] fix flow --- .../model/volumetracing/volume_annotation_sampling.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js index 7d86ba6abb3..4e4646a7546 100644 --- a/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js +++ b/frontend/javascripts/oxalis/model/volumetracing/volume_annotation_sampling.js @@ -290,14 +290,14 @@ export function applyVoxelMap( shouldOverwrite: boolean = true, overwritableValue: number = 0, ) { - function preprocessBucket(bucket) { + function preprocessBucket(bucket: Bucket) { if (bucket.type === "null") { return; } bucket.markAndAddBucketForUndo(); } - function postprocessBucket(bucket) { + function postprocessBucket(bucket: Bucket) { if (bucket.type === "null") { return; } @@ -322,12 +322,12 @@ export function applyVoxelMap( nextBucketZoomedAddress[thirdDimensionIndex]++; postprocessBucket(bucket); - bucket = dataCube.getOrCreateBucket(nextBucketZoomedAddress); + preprocessBucket(bucket); + if (bucket.type === "null") { continue; } - preprocessBucket(bucket); } if (bucket.type === "null") { continue; From 74fc76393532b51a56546b9c4373fb2515ce1463 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Wed, 7 Oct 2020 17:36:56 +0200 Subject: [PATCH 104/121] show fallback-not-included warning only when there is fallback data --- frontend/javascripts/admin/admin_rest_api.js | 4 ++-- .../oxalis/view/action-bar/tracing_actions_view.js | 6 +++--- frontend/javascripts/oxalis/view/action_bar_view.js | 5 ++++- frontend/javascripts/oxalis/view/version_list.js | 4 ++-- 4 files changed, 11 insertions(+), 8 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.js b/frontend/javascripts/admin/admin_rest_api.js index 41e6427e88c..059437b0bca 100644 --- a/frontend/javascripts/admin/admin_rest_api.js +++ b/frontend/javascripts/admin/admin_rest_api.js @@ -703,14 +703,14 @@ export function convertToHybridTracing(annotationId: string): Promise { export async function downloadNml( annotationId: string, annotationType: APIAnnotationType, - showVolumeDownloadWarning?: boolean = false, + showVolumeFallbackDownloadWarning?: boolean = false, versions?: Versions = {}, ) { const possibleVersionString = Object.entries(versions) // $FlowIssue[incompatible-type] Flow returns val as mixed here due to the use of Object.entries .map(([key, val]) => `${key}Version=${val}`) .join("&"); - if (showVolumeDownloadWarning) { + if (showVolumeFallbackDownloadWarning) { Toast.info(messages["annotation.no_fallback_data_included"], { timeout: 12000 }); } const downloadUrl = `/api/annotations/${annotationType}/${annotationId}/download?${possibleVersionString}`; diff --git a/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.js b/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.js index 2b80fff98b8..481bdc7683d 100644 --- a/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.js @@ -35,7 +35,7 @@ import features from "features"; type OwnProps = {| layoutMenu: React.Node, - hasVolume: boolean, + hasVolumeFallback: boolean, |}; type StateProps = {| annotationType: APIAnnotationType, @@ -278,8 +278,8 @@ class TracingActionsView extends React.PureComponent { handleDownload = async () => { await Model.ensureSavedState(); - const { annotationId, annotationType, hasVolume } = this.props; - downloadNml(annotationId, annotationType, hasVolume); + const { annotationId, annotationType, hasVolumeFallback } = this.props; + downloadNml(annotationId, annotationType, hasVolumeFallback); }; handleFinishAndGetNextTask = async () => { diff --git a/frontend/javascripts/oxalis/view/action_bar_view.js b/frontend/javascripts/oxalis/view/action_bar_view.js index 091e5b441c6..36c7c6b80a1 100644 --- a/frontend/javascripts/oxalis/view/action_bar_view.js +++ b/frontend/javascripts/oxalis/view/action_bar_view.js @@ -47,6 +47,7 @@ type StateProps = {| viewMode: ViewMode, controlMode: ControlMode, hasVolume: boolean, + hasVolumeFallback: boolean, hasSkeleton: boolean, showVersionRestore: boolean, isReadOnly: boolean, @@ -147,6 +148,7 @@ class ActionBarView extends React.PureComponent { render() { const { hasVolume, + hasVolumeFallback, isReadOnly, dataset, showVersionRestore, @@ -175,7 +177,7 @@ class ActionBarView extends React.PureComponent {
{isTraceMode && !showVersionRestore ? ( - + ) : ( )} @@ -209,6 +211,7 @@ const mapStateToProps = (state: OxalisState): StateProps => ({ controlMode: state.temporaryConfiguration.controlMode, showVersionRestore: state.uiInformation.showVersionRestore, hasVolume: state.tracing.volume != null, + hasVolumeFallback: state.tracing.volume != null && state.tracing.volume.fallbackLayer != null, hasSkeleton: state.tracing.skeleton != null, isReadOnly: !state.tracing.restrictions.allowUpdate, }); diff --git a/frontend/javascripts/oxalis/view/version_list.js b/frontend/javascripts/oxalis/view/version_list.js index 0280337b83e..1d9a1709da1 100644 --- a/frontend/javascripts/oxalis/view/version_list.js +++ b/frontend/javascripts/oxalis/view/version_list.js @@ -108,8 +108,8 @@ class VersionList extends React.Component { Store.dispatch(setAnnotationAllowUpdateAction(true)); } else { const { annotationType, annotationId, volume } = Store.getState().tracing; - const includesVolumeData = volume != null; - downloadNml(annotationId, annotationType, includesVolumeData, { + const includesVolumeFallbackData = volume != null && volume.fallbackLayer != null; + downloadNml(annotationId, annotationType, includesVolumeFallbackData, { // $FlowIssue[invalid-computed-prop] See https://github.com/facebook/flow/issues/8299 [this.props.tracingType]: version, }); From 2d8c9dbb28c41eae43f4cb53956caf4c5abce72f Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 8 Oct 2020 12:21:09 +0200 Subject: [PATCH 105/121] fix anisotropic bucket adresses --- .../tracings/volume/VolumeTracingLayer.scala | 6 +++-- .../volume/VolumeTracingService.scala | 27 ++++++++++++++----- 2 files changed, 24 insertions(+), 9 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index 4d2800ef088..4a1f061600b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -70,7 +70,8 @@ case class VolumeTracingLayer( elementClass: ElementClass.Value, largestSegmentId: Long, isTemporaryTracing: Boolean = false, - defaultViewConfiguration: Option[SegmentationLayerViewConfiguration] = None + defaultViewConfiguration: Option[SegmentationLayerViewConfiguration] = None, + volumeResolutions: List[Point3D] = List.empty )(implicit val volumeDataStore: FossilDBClient, implicit val volumeDataCache: TemporaryVolumeDataStore, implicit val temporaryTracingStore: TemporaryTracingStore[VolumeTracing]) @@ -90,7 +91,8 @@ case class VolumeTracingLayer( val mappings: Option[Set[String]] = None - val resolutions: List[Point3D] = List(Point3D(1, 1, 1)) // unused for volume tracings + val resolutions: List[Point3D] = if (volumeResolutions.nonEmpty) volumeResolutions else List(Point3D(1, 1, 1)) + override def containsResolution(resolution: Point3D) = true // allow requesting buckets of all resolutions. database takes care of missing. } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index b67eb82d6d5..64d94f55e0a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -89,9 +89,9 @@ class VolumeTracingService @Inject()( case Full(t) => action match { case a: UpdateBucketVolumeAction => - val resolution = math.pow(2, a.zoomStep).toInt + val resolution = lookUpVolumeResolution(t, a.zoomStep) val bucket = - BucketPosition(a.position.x, a.position.y, a.position.z, Point3D(resolution, resolution, resolution)) + BucketPosition(a.position.x, a.position.y, a.position.z, resolution) saveBucket(volumeTracingLayer(tracingId, t), bucket, a.data, updateGroup.version).map(_ => t) case a: UpdateTracingVolumeAction => Fox.successful( @@ -123,6 +123,16 @@ class VolumeTracingService @Inject()( updateGroup.actions.map(_.addTimestamp(updateGroup.timestamp)).map(_.transformToCompact)) } yield Fox.successful(()) + private def lookUpVolumeResolution(tracing: VolumeTracing, zoomStep: Int): Point3D = + if (tracing.resolutions.nonEmpty) { + if (tracing.resolutions.length >= zoomStep) { + tracing.resolutions(zoomStep) + } else Point3D(0, 0, 0) + } else { + val isotropicResolution = math.pow(2, zoomStep).toInt + Point3D(isotropicResolution, isotropicResolution, isotropicResolution) + } + private def revertToVolumeVersion(tracingId: String, sourceVersion: Long, newVersion: Long, @@ -426,11 +436,14 @@ class VolumeTracingService @Inject()( private def volumeTracingLayer(tracingId: String, tracing: VolumeTracing, isTemporaryTracing: Boolean = false): VolumeTracingLayer = - VolumeTracingLayer(tracingId, - tracing.boundingBox, - tracing.elementClass, - tracing.largestSegmentId, - isTemporaryTracing) + VolumeTracingLayer( + tracingId, + tracing.boundingBox, + tracing.elementClass, + tracing.largestSegmentId, + isTemporaryTracing, + volumeResolutions = tracing.resolutions.map(point3DFromProto).toList + ) def updateActionLog(tracingId: String): Fox[JsValue] = { def versionedTupleToJson(tuple: (Long, List[CompactVolumeUpdateAction])): JsObject = From 43efca6ccc1c1bfbbe065b84d812dcf6c5c9c170 Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 8 Oct 2020 13:27:54 +0200 Subject: [PATCH 106/121] treat empty resolution list as no resolution list --- frontend/javascripts/oxalis/model_initialization.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index ccfb1027281..3976fe04a61 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -423,11 +423,13 @@ function setupLayerForVolumeTracing( const fallbackLayer = layers[fallbackLayerIndex]; const boundaries = getBoundaries(dataset); + const tracingHasResolutionList = Boolean(tracing.resolutions) && tracing.resolutions.length > 0; + // Legacy tracings don't have the `tracing.resolutions` property // since they were created before WK started to maintain multiple resolution // in volume annotations. Therefore, this code falls back to mag (1, 1, 1) for // that case. - const tracingResolutions = tracing.resolutions + const tracingResolutions = tracingHasResolutionList ? tracing.resolutions.map(({ x, y, z }) => [x, y, z]) : [[1, 1, 1]]; @@ -442,7 +444,7 @@ function setupLayerForVolumeTracing( resolutionsAreSubset(tracingResolutions, targetResolutions); if (!doResolutionsMatch) { - if (tracing.resolutions) { + if (tracingHasResolutionList) { Toast.warning( messages["tracing.volume_resolution_mismatch"], {}, From b69d062cc48fe2545d3aa84be1446b7b229eb23f Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 8 Oct 2020 13:33:06 +0200 Subject: [PATCH 107/121] change Boolean() to != null --- frontend/javascripts/oxalis/model_initialization.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index 3976fe04a61..23679816e55 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -423,7 +423,7 @@ function setupLayerForVolumeTracing( const fallbackLayer = layers[fallbackLayerIndex]; const boundaries = getBoundaries(dataset); - const tracingHasResolutionList = Boolean(tracing.resolutions) && tracing.resolutions.length > 0; + const tracingHasResolutionList = tracing.resolutions != null && tracing.resolutions.length > 0; // Legacy tracings don't have the `tracing.resolutions` property // since they were created before WK started to maintain multiple resolution From 20e10fdc197f0e86a2b6d710997ac4e2db6767e5 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 8 Oct 2020 13:36:06 +0200 Subject: [PATCH 108/121] fix flow in model initialization --- frontend/javascripts/oxalis/model_initialization.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index 23679816e55..8c03b700c30 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -423,14 +423,15 @@ function setupLayerForVolumeTracing( const fallbackLayer = layers[fallbackLayerIndex]; const boundaries = getBoundaries(dataset); - const tracingHasResolutionList = tracing.resolutions != null && tracing.resolutions.length > 0; + const resolutions = tracing.resolutions || []; + const tracingHasResolutionList = resolutions.length > 0; // Legacy tracings don't have the `tracing.resolutions` property // since they were created before WK started to maintain multiple resolution // in volume annotations. Therefore, this code falls back to mag (1, 1, 1) for // that case. const tracingResolutions = tracingHasResolutionList - ? tracing.resolutions.map(({ x, y, z }) => [x, y, z]) + ? resolutions.map(({ x, y, z }) => [x, y, z]) : [[1, 1, 1]]; console.log("Volume tracing resolutions:", tracingResolutions); From 76f0e51e86ef3a317336ce2995154507f91b2e48 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 8 Oct 2020 13:59:27 +0200 Subject: [PATCH 109/121] ensure volume layer is saved before reloading; fixes #4857 --- frontend/javascripts/oxalis/api/api_latest.js | 11 ++++++++--- .../oxalis/view/right-menu/trees_tab_view.js | 2 +- .../oxalis/view/settings/dataset_settings_view.js | 4 ++-- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/frontend/javascripts/oxalis/api/api_latest.js b/frontend/javascripts/oxalis/api/api_latest.js index bee2d7fc99d..c6a6ab83608 100644 --- a/frontend/javascripts/oxalis/api/api_latest.js +++ b/frontend/javascripts/oxalis/api/api_latest.js @@ -738,13 +738,18 @@ class DataApi { /** * Invalidates all downloaded buckets of the given layer so that they are reloaded. */ - reloadBuckets(layerName: string): void { - _.forEach(this.model.dataLayers, dataLayer => { + async reloadBuckets(layerName: string): Promise { + await Promise.all(Object.keys(this.model.dataLayers).map(async (currentLayerName) => { + const dataLayer = this.model.dataLayers[currentLayerName] + if (dataLayer.cube.isSegmentation) { + await Model.ensureSavedState(); + } + if (dataLayer.name === layerName) { dataLayer.cube.collectAllBuckets(); dataLayer.layerRenderingManager.refresh(); } - }); + })); } /** diff --git a/frontend/javascripts/oxalis/view/right-menu/trees_tab_view.js b/frontend/javascripts/oxalis/view/right-menu/trees_tab_view.js index 310d05d3a15..13416d64541 100644 --- a/frontend/javascripts/oxalis/view/right-menu/trees_tab_view.js +++ b/frontend/javascripts/oxalis/view/right-menu/trees_tab_view.js @@ -218,7 +218,7 @@ export async function importTracingFiles(files: Array, createGroupForEachF Store.dispatch(setVersionNumberAction(oldVolumeTracing.version + 1, "volume")); Store.dispatch(setMaxCellAction(newLargestSegmentId)); await clearCache(dataset, oldVolumeTracing.tracingId); - api.data.reloadBuckets(oldVolumeTracing.tracingId); + await api.data.reloadBuckets(oldVolumeTracing.tracingId); window.needsRerender = true; } } diff --git a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js index 7cb19858ab8..3c1468f0d2b 100644 --- a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js +++ b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js @@ -450,7 +450,7 @@ class DatasetSettings extends React.PureComponent { reloadLayerData = async (layerName: string): Promise => { await clearCache(this.props.dataset, layerName); - api.data.reloadBuckets(layerName); + await api.data.reloadBuckets(layerName); window.needsRerender = true; Toast.success(`Successfully reloaded data of layer ${layerName}.`); }; @@ -466,7 +466,7 @@ class DatasetSettings extends React.PureComponent { const { layers } = this.props.datasetConfiguration; const reloadAllLayersPromises = Object.keys(layers).map(async layerName => { await clearCache(this.props.dataset, layerName); - api.data.reloadBuckets(layerName); + await api.data.reloadBuckets(layerName); }); await Promise.all(reloadAllLayersPromises); window.needsRerender = true; From a3785b2c379728257ba408c5eb3434267cbe0254 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 8 Oct 2020 14:20:14 +0200 Subject: [PATCH 110/121] format --- frontend/javascripts/oxalis/api/api_latest.js | 22 ++++++++++--------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/frontend/javascripts/oxalis/api/api_latest.js b/frontend/javascripts/oxalis/api/api_latest.js index c6a6ab83608..39240e2cd1a 100644 --- a/frontend/javascripts/oxalis/api/api_latest.js +++ b/frontend/javascripts/oxalis/api/api_latest.js @@ -739,17 +739,19 @@ class DataApi { * Invalidates all downloaded buckets of the given layer so that they are reloaded. */ async reloadBuckets(layerName: string): Promise { - await Promise.all(Object.keys(this.model.dataLayers).map(async (currentLayerName) => { - const dataLayer = this.model.dataLayers[currentLayerName] - if (dataLayer.cube.isSegmentation) { - await Model.ensureSavedState(); - } + await Promise.all( + Object.keys(this.model.dataLayers).map(async currentLayerName => { + const dataLayer = this.model.dataLayers[currentLayerName]; + if (dataLayer.cube.isSegmentation) { + await Model.ensureSavedState(); + } - if (dataLayer.name === layerName) { - dataLayer.cube.collectAllBuckets(); - dataLayer.layerRenderingManager.refresh(); - } - })); + if (dataLayer.name === layerName) { + dataLayer.cube.collectAllBuckets(); + dataLayer.layerRenderingManager.refresh(); + } + }), + ); } /** From 1a34d5010d169999c77420b60f5a67de929adcc3 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 8 Oct 2020 14:39:46 +0200 Subject: [PATCH 111/121] disable copy-segmentation feature for higher mags due to performance --- .../oxalis/model/sagas/volumetracing_saga.js | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index a3a0c0aac53..b6109d1db0e 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -344,6 +344,18 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga return; } + // Disable copy-segmentation for the same zoom steps where the trace tool is forbidden, too, + // to avoid large performance lags. + // This restriction should be soften'ed when https://github.com/scalableminds/webknossos/issues/4639 + // is solved. + const isResolutionTooLow = yield* select(state => isVolumeTraceToolDisallowed(state)); + if (isResolutionTooLow) { + Toast.warning( + 'The "copy segmentation"-feature is not supported at this zoom level. Please zoom in further.', + ); + return; + } + const segmentationLayer: DataLayer = yield* call([Model, Model.getSegmentationLayer]); const { cube } = segmentationLayer; const requestedZoomStep = yield* select(state => getRequestLogZoomStep(state)); @@ -372,7 +384,7 @@ function* copySegmentationLayer(action: CopySegmentationLayerAction): Saga // Do not overwrite already labelled voxels if (currentLabelValue === 0) { cube.labelVoxelInResolution(voxelTargetAddress, templateLabelValue, labeledZoomStep); - const bucket = cube.getBucket( + const bucket = cube.getOrCreateBucket( cube.positionToZoomedAddress(voxelTargetAddress, labeledZoomStep), ); if (bucket.type === "null") { From 9f7ef92fb536425ca00d7f067cfc323b53274c7c Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 22 Oct 2020 16:47:35 +0200 Subject: [PATCH 112/121] fix some merge-related problems --- .../oxalis/model/sagas/volumetracing_saga.js | 13 +++---------- .../oxalis/view/action-bar/volume_actions_view.js | 4 +--- 2 files changed, 4 insertions(+), 13 deletions(-) diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js index 7f5e9349172..3544e13dc5b 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.js @@ -52,7 +52,6 @@ import Constants, { type ContourMode, type OverwriteMode, ContourModeEnum, - OverwriteModeEnum, type OrthoView, type VolumeTool, type Vector2, @@ -142,8 +141,8 @@ export function* editVolumeLayerAsync(): Generator { labelWithVoxelBuffer2D, currentLayer.getCircleVoxelBuffer2D(startEditingAction.position), contourTracingMode, - labeledZoomStep, overwriteMode, + labeledZoomStep, ); } @@ -304,10 +303,7 @@ function* labelWithVoxelBuffer2D( } } - const shouldOverwrite = [ - ContourModeEnum.DRAW_OVERWRITE, - ContourModeEnum.DELETE_FROM_ANY_CELL, - ].includes(contourTracingMode); + const shouldOverwrite = contourTracingMode === ContourModeEnum.DRAW; // Since the LabeledVoxelMap is created in the current magnification, // we only need to annotate one slice in this mag. @@ -316,10 +312,7 @@ function* labelWithVoxelBuffer2D( const numberOfSlices = 1; const thirdDim = dimensionIndices[2]; - const isDeleting = [ - ContourModeEnum.DELETE_FROM_ACTIVE_CELL, - ContourModeEnum.DELETE_FROM_ANY_CELL, - ].includes(contourTracingMode); + const isDeleting = contourTracingMode === ContourModeEnum.DELETE; const newCellIdValue = isDeleting ? 0 : activeCellId; const overwritableValue = isDeleting ? activeCellId : 0; diff --git a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js index 42252646ad1..3879b67057f 100644 --- a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js @@ -11,7 +11,6 @@ import { VolumeToolEnum, type OverwriteMode, OverwriteModeEnum, - type Vector3, } from "oxalis/constants"; import { document } from "libs/window"; import { @@ -184,7 +183,6 @@ export default function VolumeActionsView() { state => state.temporaryConfiguration.activeMapping.mappingColors, ); - const zoomStep = useSelector(state => state.flycam.zoomStep); const maybeResolutionWithZoomStep = useSelector(getRenderableResolutionForSegmentation); const labeledResolution = maybeResolutionWithZoomStep != null ? maybeResolutionWithZoomStep.resolution : null; @@ -274,7 +272,7 @@ export default function VolumeActionsView() { Date: Fri, 23 Oct 2020 15:57:13 +0200 Subject: [PATCH 113/121] fix styling of multi-slice-icon --- .../oxalis/view/action-bar/volume_actions_view.js | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js index 3879b67057f..f5cf9fba09a 100644 --- a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js @@ -102,7 +102,7 @@ function toggleOverwriteMode(overwriteMode) { const narrowButtonStyle = { paddingLeft: 10, - width: 38, + paddingRight: 8, }; const handleSetTool = (event: { target: { value: VolumeTool } }) => { @@ -191,7 +191,7 @@ export default function VolumeActionsView() { const hasResolutionWithHigherDimension = (labeledResolution || []).some(val => val > 1); const multiSliceAnnotationInfoIcon = hasResolutionWithHigherDimension ? ( - + ) : null; const isTraceToolDisabled = isZoomStepTooHighForTraceTool(); @@ -283,7 +283,6 @@ export default function VolumeActionsView() { className="svg-gray-to-highlighted-blue" style={{ opacity: isInMergerMode ? 0.5 : 1 }} /> - {multiSliceAnnotationInfoIcon} {adaptedActiveTool === "TRACE" ? multiSliceAnnotationInfoIcon : null} Date: Fri, 23 Oct 2020 16:12:00 +0200 Subject: [PATCH 114/121] throw an exception if the resolution could not be looked up instead of defaulting to 0,0,0 --- .../tracingstore/tracings/volume/VolumeTracingService.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 3ea8ef53126..a904b0d1ad3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -128,7 +128,7 @@ class VolumeTracingService @Inject()( if (tracing.resolutions.nonEmpty) { if (tracing.resolutions.length >= zoomStep) { tracing.resolutions(zoomStep) - } else Point3D(0, 0, 0) + } else throw new Exception(s"Received bucket with zoomStep (${zoomStep}), could not look up that resolution tracing.resolutions (${tracing.resolutions})") } else { val isotropicResolution = math.pow(2, zoomStep).toInt Point3D(isotropicResolution, isotropicResolution, isotropicResolution) From 675e74e57886acb8ea411d3ad5679d8ff614db15 Mon Sep 17 00:00:00 2001 From: Florian M Date: Fri, 23 Oct 2020 16:25:04 +0200 Subject: [PATCH 115/121] look up volume bucket resolution by zoom step ** 2 rather than index --- .../tracings/volume/VolumeTracingService.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 3ea8ef53126..b1d2174f380 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -126,9 +126,10 @@ class VolumeTracingService @Inject()( private def lookUpVolumeResolution(tracing: VolumeTracing, zoomStep: Int): Point3D = if (tracing.resolutions.nonEmpty) { - if (tracing.resolutions.length >= zoomStep) { - tracing.resolutions(zoomStep) - } else Point3D(0, 0, 0) + tracing.resolutions.find(r => r.maxDim == math.pow(2, zoomStep)).getOrElse { + throw new Exception( + s"Received bucket with zoomStep ($zoomStep), could not look up that resolution tracing.resolutions (${tracing.resolutions})") + } } else { val isotropicResolution = math.pow(2, zoomStep).toInt Point3D(isotropicResolution, isotropicResolution, isotropicResolution) From ba4772aee64a05203ad385bedc11c360372fbc13 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 23 Oct 2020 16:32:38 +0200 Subject: [PATCH 116/121] fix that toolbar rerendered on every state change --- .../oxalis/model/accessors/dataset_accessor.js | 10 +++++++--- .../oxalis/model/accessors/flycam_accessor.js | 2 +- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index fe86ff7daa0..0f8a9a58c7d 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -586,12 +586,16 @@ function _getUnrenderableLayersForCurrentZoom(state: OxalisState) { return unrenderableLayers; } +export const getUnrenderableLayersForCurrentZoom = reuseInstanceOnEquality( + _getUnrenderableLayersForCurrentZoom, +); + /* This function returns the resolution and zoom step in which the segmentation layer is currently rendered (if it is rendered). These properties should be used when labeling volume data. */ -export function getRenderableResolutionForSegmentation( +function _getRenderableResolutionForSegmentation( state: OxalisState, ): ?{ resolution: Vector3, zoomStep: number } { const { dataset } = state; @@ -645,8 +649,8 @@ export function getRenderableResolutionForSegmentation( return null; } -export const getUnrenderableLayersForCurrentZoom = reuseInstanceOnEquality( - _getUnrenderableLayersForCurrentZoom, +export const getRenderableResolutionForSegmentation = reuseInstanceOnEquality( + _getRenderableResolutionForSegmentation, ); export function getThumbnailURL(dataset: APIDataset): string { diff --git a/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js b/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js index fd7df52bfce..8ad1ba00993 100644 --- a/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js @@ -249,7 +249,7 @@ export function getRequestLogZoomStep(state: OxalisState): number { export function getCurrentResolution(state: OxalisState): Vector3 { const resolutions = getResolutions(state.dataset); const logZoomStep = getRequestLogZoomStep(state); - return resolutions[logZoomStep]; + return resolutions[logZoomStep] || [1, 1, 1]; } export function getValidZoomRangeForUser(state: OxalisState): [number, number] { From 13224ede88497d9d06562da25cc1a9d1eb175d97 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 23 Oct 2020 16:44:10 +0200 Subject: [PATCH 117/121] improve handling of volume-is-disabled case in toolbar --- .../view/action-bar/volume_actions_view.js | 23 +++++++++++-------- .../oxalis/view/viewport_status_indicator.js | 8 +++---- 2 files changed, 17 insertions(+), 14 deletions(-) diff --git a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js index f5cf9fba09a..6d0086f51bd 100644 --- a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js @@ -189,13 +189,15 @@ export default function VolumeActionsView() { const isLabelingPossible = labeledResolution != null; const hasResolutionWithHigherDimension = (labeledResolution || []).some(val => val > 1); + const multiSliceAnnotationInfoIcon = hasResolutionWithHigherDimension ? ( ) : null; - const isTraceToolDisabled = isZoomStepTooHighForTraceTool(); - const traceToolDisabledTooltip = isTraceToolDisabled + const isTraceToolImpossible = isZoomStepTooHighForTraceTool(); + const isTraceToolDisabled = isInMergerMode || isTraceToolImpossible || !isLabelingPossible; + const traceToolDisabledTooltip = isTraceToolImpossible ? "Your zoom is too low to use the trace tool. Please zoom in further to use it." : ""; @@ -204,10 +206,10 @@ export default function VolumeActionsView() { // the tools via the w shortcut. In that case, the effect-hook is re-executed // and the tool is switched to MOVE. useEffect(() => { - if (isInMergerMode) { + if (isInMergerMode || !isLabelingPossible) { Store.dispatch(setToolAction(VolumeToolEnum.MOVE)); } - }, [isInMergerMode, activeTool]); + }, [isInMergerMode, activeTool, isLabelingPossible]); const isShiftPressed = useKeyPress("Shift"); const isControlPressed = useKeyPress("Control"); @@ -230,8 +232,9 @@ export default function VolumeActionsView() { : null; const previousMoveToolHint = usePrevious(moveToolHint); - const disabledVolumeExplanation = - "Volume annotation is disabled while the merger mode is active."; + const disabledVolumeExplanation = isLabelingPossible + ? "Volume annotation is disabled while the merger mode is active." + : "Volume annotation is disabled since no segmentation data can be shown at the current magnification. Please adjust the zoom level."; const moveToolDescription = `Pointer – Use left-click to move around${ hasSkeleton ? " and right-click to create new skeleton nodes" : "" @@ -273,7 +276,7 @@ export default function VolumeActionsView() { @@ -281,14 +284,14 @@ export default function VolumeActionsView() { src="/assets/images/lasso.svg" alt="Trace Tool Icon" className="svg-gray-to-highlighted-blue" - style={{ opacity: isInMergerMode ? 0.5 : 1 }} + style={{ opacity: isTraceToolDisabled ? 0.5 : 1 }} /> {adaptedActiveTool === "TRACE" ? multiSliceAnnotationInfoIcon : null} @@ -297,7 +300,7 @@ export default function VolumeActionsView() { diff --git a/frontend/javascripts/oxalis/view/viewport_status_indicator.js b/frontend/javascripts/oxalis/view/viewport_status_indicator.js index da91130c143..a2d34bb7626 100644 --- a/frontend/javascripts/oxalis/view/viewport_status_indicator.js +++ b/frontend/javascripts/oxalis/view/viewport_status_indicator.js @@ -31,7 +31,7 @@ export default function ViewportStatusIndicator() { return null; } const pluralS = unrenderableLayerNames.length > 1 ? "s" : ""; - const pronounAndVerb = unrenderableLayerNames.length > 1 ? "they don't" : "it doesn't"; + const dontVerb = unrenderableLayerNames.length > 1 ? "don't" : "doesn't"; const renderMissingDataBlackHint = renderMissingDataBlack ? " Also consider disabling the option “Render Missing Data Black”." @@ -41,9 +41,9 @@ export default function ViewportStatusIndicator() { - The layer{pluralS} {unrenderableLayerNames.map(name => `"${name}"`).join(", ")}{" "} - {pronounAndVerb} exist in the current resolution {currentResolution.join("-")}. Adjust the - zoom level to change the active resolution.{renderMissingDataBlackHint} + The layer{pluralS} {unrenderableLayerNames.map(name => `"${name}"`).join(", ")} {dontVerb}{" "} + exist in the current resolution {currentResolution.join("-")}. Adjust the zoom level to + change the active resolution.{renderMissingDataBlackHint}
} > From afe796a8715dafaecfa0b97230231bbdfb4087bd Mon Sep 17 00:00:00 2001 From: Florian M Date: Mon, 26 Oct 2020 10:05:42 +0100 Subject: [PATCH 118/121] replace exception by fox.failure if volume resolution lookup fails --- .../volume/VolumeTracingService.scala | 27 ++++++++++++------- 1 file changed, 17 insertions(+), 10 deletions(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index b1d2174f380..3c253640a2e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -89,10 +89,7 @@ class VolumeTracingService @Inject()( case Full(t) => action match { case a: UpdateBucketVolumeAction => - val resolution = lookUpVolumeResolution(t, a.zoomStep) - val bucket = - BucketPosition(a.position.x, a.position.y, a.position.z, resolution) - saveBucket(volumeTracingLayer(tracingId, t), bucket, a.data, updateGroup.version).map(_ => t) + updateBucket(tracingId, t, a, updateGroup.version) case a: UpdateTracingVolumeAction => Fox.successful( t.copy( @@ -124,15 +121,25 @@ class VolumeTracingService @Inject()( updateGroup.actions.map(_.addTimestamp(updateGroup.timestamp)).map(_.transformToCompact)) } yield Fox.successful(()) - private def lookUpVolumeResolution(tracing: VolumeTracing, zoomStep: Int): Point3D = + private def updateBucket(tracingId: String, + volumeTracing: VolumeTracing, + action: UpdateBucketVolumeAction, + updateGroupVersion: Long): Fox[VolumeTracing] = + for { + resolution <- lookUpVolumeResolution(volumeTracing, action.zoomStep) + bucket = BucketPosition(action.position.x, action.position.y, action.position.z, resolution) + _ <- saveBucket(volumeTracingLayer(tracingId, volumeTracing), bucket, action.data, updateGroupVersion) + } yield volumeTracing + + private def lookUpVolumeResolution(tracing: VolumeTracing, zoomStep: Int): Fox[Point3D] = if (tracing.resolutions.nonEmpty) { - tracing.resolutions.find(r => r.maxDim == math.pow(2, zoomStep)).getOrElse { - throw new Exception( - s"Received bucket with zoomStep ($zoomStep), could not look up that resolution tracing.resolutions (${tracing.resolutions})") - } + tracing.resolutions + .find(r => r.maxDim == math.pow(2, zoomStep)) + .map(point3DFromProto) + .toFox ?~> s"Received bucket with zoomStep ($zoomStep), no matching resolution found in tracing (has ${tracing.resolutions})" } else { val isotropicResolution = math.pow(2, zoomStep).toInt - Point3D(isotropicResolution, isotropicResolution, isotropicResolution) + Fox.successful(Point3D(isotropicResolution, isotropicResolution, isotropicResolution)) } private def revertToVolumeVersion(tracingId: String, From 1d963a2142f2a1091605cd315826506ba77281f1 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 26 Oct 2020 15:31:49 +0100 Subject: [PATCH 119/121] show data type and resolutions of layer in tooltip next to layer name (therefore, the dtype tag is removed) --- .../model/accessors/dataset_accessor.js | 4 +++ .../view/settings/dataset_settings_view.js | 31 ++++++++++++++++--- 2 files changed, 31 insertions(+), 4 deletions(-) diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js index 0f8a9a58c7d..a8c114fd2fa 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.js @@ -62,6 +62,10 @@ export class ResolutionInfo { } } + getResolutionList(): Array { + return Array.from(this.resolutionMap.entries()).map(entry => entry[1]); + } + getResolutionsWithIndices(): Array<[number, Vector3]> { return Array.from(this.resolutionMap.entries()).map(entry => { const [powerOfTwo, resolution] = entry; diff --git a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js index 67d178ad5b9..0dcdff5ba14 100644 --- a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js +++ b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js @@ -3,7 +3,7 @@ * @flow */ -import { Col, Collapse, Icon, Row, Select, Switch, Tag, Tooltip, Modal } from "antd"; +import { Col, Collapse, Icon, Row, Select, Switch, Tooltip, Modal } from "antd"; import type { Dispatch } from "redux"; import { connect } from "react-redux"; import * as React from "react"; @@ -32,6 +32,8 @@ import { getElementClass, getLayerBoundaries, getDefaultIntensityRangeOfLayer, + getLayerByName, + getResolutionInfo, } from "oxalis/model/accessors/dataset_accessor"; import { setPositionAction, setZoomStepAction } from "oxalis/model/actions/flycam_actions"; import { @@ -257,11 +259,11 @@ class DatasetSettings extends React.PureComponent { this.props.onChangeLayer(layerName, "isDisabled", !isVisible); }; const onChange = (value, event) => { - if (!event.ctrlKey) { + if (!event.ctrlKey && !event.altKey && !event.shiftKey) { setSingleLayerVisibility(value); return; } - // If ctrl is pressed, toggle between "all layers visible" and + // If a modifier is pressed, toggle between "all layers visible" and // "only selected layer visible". if (this.isLayerExclusivelyVisible(layerName)) { this.setVisibilityForAllLayers(true); @@ -272,6 +274,9 @@ class DatasetSettings extends React.PureComponent { }; const hasHistogram = this.props.histogramData[layerName] != null; + const layer = getLayerByName(this.props.dataset, layerName); + const resolutions = getResolutionInfo(layer.resolutions).getResolutionList(); + return ( @@ -279,7 +284,25 @@ class DatasetSettings extends React.PureComponent { {!isColorLayer && isVolumeTracing ? "Volume Annotation" : layerName} - {elementClass} + + + Data Type: {elementClass} +
+ Available resolutions: +
    + {resolutions.map(r => ( +
  • {r.join("-")}
  • + ))} +
+ + } + placement="left" + > + +
+ {hasHistogram ? this.getEditMinMaxButton(layerName, isInEditMode) : null} {this.getFindDataButton(layerName, isDisabled, isColorLayer)} {this.getReloadDataButton(layerName)} From 8370cfeae0e31c4c22431fe830c0b23e65f62360 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 3 Nov 2020 14:20:03 +0100 Subject: [PATCH 120/121] fix syntax error --- .../tracingstore/tracings/volume/VolumeTracingLayer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index 1095cd6affc..785eb8f83c3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -72,7 +72,7 @@ case class VolumeTracingLayer( largestSegmentId: Long, isTemporaryTracing: Boolean = false, defaultViewConfiguration: Option[LayerViewConfiguration] = None, - adminViewConfiguration: Option[LayerViewConfiguration] = None + adminViewConfiguration: Option[LayerViewConfiguration] = None, volumeResolutions: List[Point3D] = List.empty )(implicit val volumeDataStore: FossilDBClient, implicit val volumeDataCache: TemporaryVolumeDataStore, From c9e7531215f1c1647c6a7a95cfc4421a176f39bf Mon Sep 17 00:00:00 2001 From: Florian M Date: Thu, 5 Nov 2020 16:18:43 +0100 Subject: [PATCH 121/121] Enforce TaskType Resolution Restrictions for Multi-Res Volume Tasks (#4891) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * [WIP] enforce task type magnification settings in tasks * pass allowedMagnifications to rpc methods * pass magnifications as query string * respect resolution restrictions for volume tracings. * add option to downsample existing volume tracing * remove outdated assertion, update resolution list after downsampling * sleep 10s * remove sleep and bump dev-proxy-timeout to 5 min * fix task creation with resolution restrictions * use inclusive check * when uploading zips with differing resolution sets, fail * remove unused import * refresh snapshots (tracings contain organization name) * write header.wkw to every mag in volume download, fix anisotropic directory names * add resolution-samenss assertion in volume dnd import. fix flow types * fix restriction passing in initial data * Add button to trigger downsampling of volume annotation. The reload button next to the active-resolution-indicator only appears for explorative annotations with volume data. It opens a modal which explains the action and its consequences. While the operation is running, the modal blocks further usages of webKnossos. After the operation has completed, the complete page is reloaded. * fix parameter passing in duplicate rpc * avoid undefined behavior of bucket iterator by checking hasNext. do not relabel when “merging” only one volume * make tasktype resolution restrictions immutable. sort resolution list * update migration guide * remove debug output * improve initiate-volume-downsampling modal and move its button to the layer settings * show toast when mag-restriction is violated instead of prohibiting the zoom change in the first place * don't allow editing mag-restrictions in already created tasktype * remove some more debug output, unify variable names * forbid creating new nodes or using volume tools when the current mag was forbidden by the task type * clean up * Update MIGRATIONS.unreleased.md Co-authored-by: Philipp Otto * fix styling of button-link * use ListBuffer instead of HashSet as per pr feedback * integrate some feedback * Apply suggestions from code review Co-authored-by: Daniel * remove unnecessary SET_TOOL dependence * do map ids if initialLargestSegmentId != 0 Co-authored-by: Philipp Otto Co-authored-by: Philipp Otto Co-authored-by: Daniel --- MIGRATIONS.unreleased.md | 8 +- app/controllers/AnnotationController.scala | 15 +- app/controllers/TaskController.scala | 75 ++++-- app/controllers/TaskTypeController.scala | 1 + .../WKTracingStoreController.scala | 11 +- app/models/annotation/AnnotationService.scala | 124 ++++++--- .../annotation/AnnotationSettings.scala | 38 ++- .../annotation/TracingStoreRpcClient.scala | 14 +- app/models/task/TaskType.scala | 12 +- conf/messages | 5 + conf/webknossos.latest.routes | 3 +- frontend/javascripts/admin/admin_rest_api.js | 14 +- .../admin/tasktype/task_type_create_view.js | 6 +- frontend/javascripts/libs/utils.js | 4 +- frontend/javascripts/oxalis/api/api_latest.js | 25 ++ .../skeletontracing_plane_controller.js | 16 +- .../oxalis/model/accessors/flycam_accessor.js | 24 +- .../javascripts/oxalis/model/data_layer.js | 4 + .../oxalis/model/sagas/root_saga.js | 3 +- .../oxalis/model/sagas/task_saga.js | 63 ++++- .../oxalis/model_initialization.js | 25 +- .../view/action-bar/volume_actions_view.js | 35 ++- .../view/settings/dataset_settings_view.js | 156 ++++++++++- .../annotations.e2e.js.md | 6 + .../annotations.e2e.js.snap | Bin 9167 -> 9196 bytes frontend/javascripts/types/api_flow_types.js | 4 + tools/proxy/proxy.js | 5 +- .../scalableminds/util/geometry/Point3D.scala | 3 + .../com/scalableminds/util/tools/Fox.scala | 3 + .../dataformats/wkw/WKWBucketProvider.scala | 4 +- .../dataformats/wkw/WKWBucketStreamSink.scala | 18 +- .../dataformats/wkw/WKWDataFormatHelper.scala | 17 +- .../webknossos/datastore/rpc/RPCRequest.scala | 4 +- .../TracingStoreWkRpcClient.scala | 4 +- .../controllers/TracingController.scala | 10 +- .../controllers/VolumeTracingController.scala | 55 ++-- .../tracings/TracingService.scala | 10 +- .../skeleton/SkeletonTracingService.scala | 10 +- .../volume/VolumeTracingDownsampling.scala | 130 ++++++---- .../volume/VolumeTracingService.scala | 242 +++++++++++------- ...alableminds.webknossos.tracingstore.routes | 4 +- .../proto/SkeletonTracing.proto | 2 +- .../proto/VolumeTracing.proto | 2 +- 43 files changed, 872 insertions(+), 342 deletions(-) diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index 90e4674f5e4..dfef61ad2f8 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -6,7 +6,13 @@ This project adheres to [Calendar Versioning](http://calver.org/) `0Y.0M.MICRO`. User-facing changes are documented in the [changelog](CHANGELOG.released.md). ## Unreleased -- To convert individual legacy volume annotations to multi-resolution volume annotations, download and re-upload them (upload may take a while). +- As volume annotations in arbitrary magnifications are now supported and the behavior of magnification restrictions of tasks has changed (allow full zoom, but disable tools unless in correct magnification), you may want to restrict all volume and hybrid task types to mag 1 to achieve the old behavior (mag1-only): +``` +update webknossos.tasktypes +set settings_allowedmagnifications = '{"min":1,"max":1,"shouldRestrict":true}' +where (tracingtype = 'volume' or tracingtype = 'hybrid') +and (settings_allowedmagnifications is null or settings_allowedmagnifications::json->>'shouldRestrict'='false'); +``` ### Postgres Evolutions: - [057-add-layer-specific-view-configs.sql](conf/evolutions/056-add-layer-specific-view-configs.sql) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index e29056d8ecb..04d3e8a2184 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -166,7 +166,20 @@ class AnnotationController @Inject()( for { _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.makeHybrid.explorationalsOnly" annotation <- provider.provideAnnotation(typ, id, request.identity) - _ <- annotationService.makeAnnotationHybrid(annotation) ?~> "annotation.makeHybrid.failed" + organization <- organizationDAO.findOne(request.identity._organization) + _ <- annotationService.makeAnnotationHybrid(annotation, organization.name) ?~> "annotation.makeHybrid.failed" + updated <- provider.provideAnnotation(typ, id, request.identity) + json <- annotationService.publicWrites(updated, Some(request.identity)) ?~> "annotation.write.failed" + } yield { + JsonOk(json) + } + } + + def downsample(typ: String, id: String) = sil.SecuredAction.async { implicit request => + for { + _ <- bool2Fox(AnnotationType.Explorational.toString == typ) ?~> "annotation.downsample.explorationalsOnly" + annotation <- provider.provideAnnotation(typ, id, request.identity) + _ <- annotationService.downsampleAnnotation(annotation) ?~> "annotation.downsample.failed" updated <- provider.provideAnnotation(typ, id, request.identity) json <- annotationService.publicWrites(updated, Some(request.identity)) ?~> "annotation.write.failed" } yield { diff --git a/app/controllers/TaskController.scala b/app/controllers/TaskController.scala index a78843be2be..9f5ed11040c 100755 --- a/app/controllers/TaskController.scala +++ b/app/controllers/TaskController.scala @@ -9,13 +9,13 @@ import com.scalableminds.util.geometry.{BoundingBox, Point3D, Vector3D} import com.scalableminds.util.mvc.ResultBox import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.tracingstore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} -import com.scalableminds.webknossos.tracingstore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} +import com.scalableminds.webknossos.tracingstore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.tracingstore.tracings.volume.ResolutionRestrictions import com.scalableminds.webknossos.tracingstore.tracings.{ProtoGeometryImplicits, TracingType} import javax.inject.Inject -import models.annotation.nml.NmlResults.NmlParseResult import models.annotation.nml.NmlService import models.annotation._ -import models.binary.{DataSet, DataSetDAO, DataSetService} +import models.binary.{DataSet, DataSetDAO} import models.project.{Project, ProjectDAO} import models.task._ import models.team.{Team, TeamDAO} @@ -74,7 +74,6 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, dataSetDAO: DataSetDAO, userTeamRolesDAO: UserTeamRolesDAO, userService: UserService, - dataSetService: DataSetService, tracingStoreService: TracingStoreService, teamDAO: TeamDAO, taskDAO: TaskDAO, @@ -88,7 +87,7 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, with ProtoGeometryImplicits with FoxImplicits { - val MAX_OPEN_TASKS = conf.WebKnossos.Tasks.maxOpenPerUser + private val MAX_OPEN_TASKS: Int = conf.WebKnossos.Tasks.maxOpenPerUser def read(taskId: String) = sil.SecuredAction.async { implicit request => for { @@ -114,8 +113,9 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, } yield result } - def duplicateAllBaseTracings(taskParametersList: List[TaskParameters], - organizationId: ObjectId)(implicit ctx: DBAccessContext, m: MessagesProvider) = + private def duplicateAllBaseTracings(taskParametersList: List[TaskParameters], organizationId: ObjectId)( + implicit ctx: DBAccessContext, + m: MessagesProvider): Fox[List[TaskParameters]] = Fox.serialCombined(taskParametersList)( params => Fox @@ -141,9 +141,10 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, annotation: Annotation, params: TaskParameters, tracingStoreClient: TracingStoreRpcClient, - organizationId: ObjectId)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[String] = + organizationId: ObjectId, + resolutionRestrictions: ResolutionRestrictions)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[String] = annotation.volumeTracingId - .map(id => tracingStoreClient.duplicateVolumeTracing(id)) + .map(id => tracingStoreClient.duplicateVolumeTracing(id, resolutionRestrictions = resolutionRestrictions)) .getOrElse( annotationService .createVolumeTracingBase( @@ -152,13 +153,15 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, params.boundingBox, params.editPosition, params.editRotation, - false + volumeShowFallbackLayer = false, + resolutionRestrictions = resolutionRestrictions ) - .flatMap(tracingStoreClient.saveVolumeTracing(_))) + .flatMap(tracingStoreClient.saveVolumeTracing(_, resolutionRestrictions = resolutionRestrictions))) - def duplicateBaseTracings(baseAnnotation: BaseAnnotation, taskParameters: TaskParameters, organizationId: ObjectId)( - implicit ctx: DBAccessContext, - m: MessagesProvider) = { + private def duplicateBaseTracings( + baseAnnotation: BaseAnnotation, + taskParameters: TaskParameters, + organizationId: ObjectId)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[BaseAnnotation] = { @SuppressWarnings(Array("TraversableHead")) // We check if nonCancelledTaskAnnotations are empty before so head always works def checkForTask(taskId: ObjectId): Fox[Annotation] = @@ -196,13 +199,16 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, duplicateSkeletonTracingOrCreateSkeletonTracingBase(annotation, taskParameters, tracingStoreClient).map(Some(_)) else Fox.successful(None) newVolumeId <- if (taskType.tracingType == TracingType.volume || taskType.tracingType == TracingType.hybrid) - duplicateVolumeTracingOrCreateVolumeTracingBase(annotation, taskParameters, tracingStoreClient, organizationId) - .map(Some(_)) + duplicateVolumeTracingOrCreateVolumeTracingBase(annotation, + taskParameters, + tracingStoreClient, + organizationId, + taskType.settings.resolutionRestrictions).map(Some(_)) else Fox.successful(None) } yield BaseAnnotation(baseAnnotationIdValidated.id, newSkeletonId, newVolumeId) } - def createTaskSkeletonTracingBases(paramsList: List[TaskParameters])( + private def createTaskSkeletonTracingBases(paramsList: List[TaskParameters])( implicit ctx: DBAccessContext, m: MessagesProvider): Fox[List[Option[SkeletonTracing]]] = Fox.serialCombined(paramsList) { params => @@ -221,7 +227,7 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, } yield skeletonTracingOpt } - def createTaskVolumeTracingBases(paramsList: List[TaskParameters], organizationId: ObjectId)( + private def createTaskVolumeTracingBases(paramsList: List[TaskParameters], organizationId: ObjectId)( implicit ctx: DBAccessContext, m: MessagesProvider): Fox[List[Option[(VolumeTracing, Option[File])]]] = Fox.serialCombined(paramsList) { params => @@ -236,7 +242,8 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, params.boundingBox, params.editPosition, params.editRotation, - volumeShowFallbackLayer = false + volumeShowFallbackLayer = false, + resolutionRestrictions = taskType.settings.resolutionRestrictions ) .map(v => Some((v, None))) } else Fox.successful(None) @@ -296,7 +303,8 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, params.boundingBox, params.editPosition, params.editRotation, - volumeShowFallbackLayer = false + volumeShowFallbackLayer = false, + resolutionRestrictions = taskType.settings.resolutionRestrictions ) .map(v => (v, None))) @@ -449,6 +457,7 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, bool2Fox(fullTasks.forall(tuple => tuple._1.baseAnnotation.isDefined || tuple._2.isDefined || tuple._3.isDefined)) def assertAllOnSameDataset(firstDatasetName: String): Fox[String] = { + @scala.annotation.tailrec def allOnSameDatasetIter( requestedTasksRest: List[(TaskParameters, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])], dataSetName: String): Boolean = @@ -490,12 +499,8 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, case _ => savedId } } - volumeTracingIds: List[Box[Option[String]]] <- Fox.sequence(requestedTasks.map(_.map(_._3)).map { - case Full(Some((tracing, initialFile))) => - tracingStoreClient.saveVolumeTracing(tracing, initialFile).map(Some(_)) - case f: Failure => box2Fox(f) - case _ => Fox.successful(None) - }) + volumeTracingIds: List[Box[Option[String]]] <- Fox.sequence( + requestedTasks.map(requestedTask => saveVolumeTracingIfPresent(requestedTask, tracingStoreClient))) skeletonTracingsIdsMerged = mergeTracingIds((requestedTasks.map(_.map(_._1)), skeletonTracingIds).zipped.toList, isSkeletonId = true) volumeTracingsIdsMerged = mergeTracingIds((requestedTasks.map(_.map(_._1)), volumeTracingIds).zipped.toList, @@ -526,6 +531,24 @@ class TaskController @Inject()(annotationDAO: AnnotationDAO, } yield Ok(Json.toJson(result)) } + private def saveVolumeTracingIfPresent( + requestedTaskBox: Box[(TaskParameters, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])], + tracingStoreClient: TracingStoreRpcClient)(implicit ctx: DBAccessContext): Fox[Option[String]] = + requestedTaskBox.map { tuple => + (tuple._1, tuple._3) + } match { + case Full((params: TaskParameters, Some((tracing, initialFile)))) => + for { + taskTypeIdValidated <- ObjectId.parse(params.taskTypeId) ?~> "taskType.id.invalid" + taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" + saveResult <- tracingStoreClient + .saveVolumeTracing(tracing, initialFile, resolutionRestrictions = taskType.settings.resolutionRestrictions) + .map(Some(_)) + } yield saveResult + case f: Failure => box2Fox(f) + case _ => Fox.successful(None) + } + private def warnIfTeamHasNoAccess(requestedTasks: List[TaskParameters], dataSet: DataSet)( implicit ctx: DBAccessContext): Fox[List[String]] = { val projectNames = requestedTasks.map(_.projectName).distinct diff --git a/app/controllers/TaskTypeController.scala b/app/controllers/TaskTypeController.scala index 456e7fa774d..cb13f8eb9db 100755 --- a/app/controllers/TaskTypeController.scala +++ b/app/controllers/TaskTypeController.scala @@ -66,6 +66,7 @@ class TaskTypeController @Inject()(taskTypeDAO: TaskTypeDAO, taskTypeIdValidated <- ObjectId.parse(taskTypeId) ?~> "taskType.id.invalid" taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" ~> NOT_FOUND _ <- bool2Fox(taskTypeFromForm.tracingType == taskType.tracingType) ?~> "taskType.tracingTypeImmutable" + _ <- bool2Fox(taskTypeFromForm.settings.allowedMagnifications == taskType.settings.allowedMagnifications) ?~> "taskType.allowedMagnificationsImmutable" updatedTaskType = taskTypeFromForm.copy(_id = taskType._id) _ <- Fox.assertTrue(userService.isTeamManagerOrAdminOf(request.identity, taskType._team)) ?~> "notAllowed" ~> FORBIDDEN _ <- Fox diff --git a/app/controllers/WKTracingStoreController.scala b/app/controllers/WKTracingStoreController.scala index 69a44dd3c35..093d1bca72d 100644 --- a/app/controllers/WKTracingStoreController.scala +++ b/app/controllers/WKTracingStoreController.scala @@ -56,15 +56,14 @@ class WKTracingStoreController @Inject()(tracingStoreService: TracingStoreServic if (annotation.state == Finished) Fox.failure("annotation already finshed") else Fox.successful(()) - def dataSource(name: String, organizationName: String, dataSetName: String): Action[AnyContent] = Action.async { - implicit request => + def dataSource(name: String, organizationName: Option[String], dataSetName: String): Action[AnyContent] = + Action.async { implicit request => tracingStoreService.validateAccess(name) { _ => implicit val ctx: DBAccessContext = GlobalAccessContext - val organizationNameOpt = if (organizationName == "") None else Some(organizationName) for { - organizationIdOpt <- Fox.runOptional(organizationNameOpt) { + organizationIdOpt <- Fox.runOptional(organizationName) { organizationDAO.findOneByName(_)(GlobalAccessContext).map(_._id) - } ?~> Messages("organization.notFound", organizationNameOpt.getOrElse("")) ~> NOT_FOUND + } ?~> Messages("organization.notFound", organizationName.getOrElse("")) ~> NOT_FOUND organizationId <- Fox.fillOption(organizationIdOpt) { dataSetDAO.getOrganizationForDataSet(dataSetName)(GlobalAccessContext) } ?~> Messages("dataSet.noAccess", dataSetName) ~> FORBIDDEN @@ -74,5 +73,5 @@ class WKTracingStoreController @Inject()(tracingStoreService: TracingStoreServic dataSource <- dataSetService.dataSourceFor(dataSet) } yield Ok(Json.toJson(dataSource)) } - } + } } diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 73d5a118047..7758d4c97d1 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -19,7 +19,11 @@ import com.scalableminds.webknossos.tracingstore.VolumeTracing.{VolumeTracing, V import com.scalableminds.webknossos.tracingstore.geometry.{Color, NamedBoundingBox} import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.skeleton.{NodeDefaults, SkeletonTracingDefaults} -import com.scalableminds.webknossos.tracingstore.tracings.volume.{VolumeTracingDefaults, VolumeTracingDownsampling} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + ResolutionRestrictions, + VolumeTracingDefaults, + VolumeTracingDownsampling +} import com.typesafe.scalalogging.LazyLogging import javax.inject.Inject import models.annotation.AnnotationState._ @@ -101,31 +105,42 @@ class AnnotationService @Inject()(annotationInformationProvider: AnnotationInfor private def createVolumeTracing( dataSource: DataSource, + organizationName: String, fallbackLayer: Option[SegmentationLayer], boundingBox: Option[BoundingBox] = None, startPosition: Option[Point3D] = None, - startRotation: Option[Vector3D] = None - ): VolumeTracing = - VolumeTracing( - None, - boundingBoxToProto(boundingBox.getOrElse(dataSource.boundingBox)), - System.currentTimeMillis(), - dataSource.id.name, - point3DToProto(startPosition.getOrElse(dataSource.center)), - vector3DToProto(startRotation.getOrElse(vector3DFromProto(VolumeTracingDefaults.editRotation))), - elementClassToProto(fallbackLayer.map(layer => layer.elementClass).getOrElse(VolumeTracingDefaults.elementClass)), - fallbackLayer.map(_.name), - fallbackLayer.map(_.largestSegmentId).getOrElse(VolumeTracingDefaults.largestSegmentId), - 0, - VolumeTracingDefaults.zoomLevel, - resolutions = VolumeTracingDownsampling.resolutionsForVolumeTracing(dataSource, fallbackLayer).map(point3DToProto) - ) + startRotation: Option[Vector3D] = None, + resolutionRestrictions: ResolutionRestrictions = ResolutionRestrictions.empty + ): Fox[VolumeTracing] = { + val resolutions = VolumeTracingDownsampling.resolutionsForVolumeTracing(dataSource, fallbackLayer) + val resolutionsRestricted = resolutionRestrictions.filterAllowed(resolutions) + for { + _ <- bool2Fox(resolutionsRestricted.nonEmpty) ?~> "annotation.volume.resolutionRestrictionsTooTight" + } yield + VolumeTracing( + None, + boundingBoxToProto(boundingBox.getOrElse(dataSource.boundingBox)), + System.currentTimeMillis(), + dataSource.id.name, + point3DToProto(startPosition.getOrElse(dataSource.center)), + vector3DToProto(startRotation.getOrElse(vector3DFromProto(VolumeTracingDefaults.editRotation))), + elementClassToProto( + fallbackLayer.map(layer => layer.elementClass).getOrElse(VolumeTracingDefaults.elementClass)), + fallbackLayer.map(_.name), + fallbackLayer.map(_.largestSegmentId).getOrElse(VolumeTracingDefaults.largestSegmentId), + 0, + VolumeTracingDefaults.zoomLevel, + organizationName = Some(organizationName), + resolutions = resolutionsRestricted.map(point3DToProto) + ) + } - def createTracings( + def createTracingsForExplorational( dataSet: DataSet, dataSource: DataSource, tracingType: TracingType.Value, withFallback: Boolean, + organizationName: String, oldTracingId: Option[String] = None)(implicit ctx: DBAccessContext): Fox[(Option[String], Option[String])] = { def getFallbackLayer(): Option[SegmentationLayer] = if (withFallback) { @@ -143,17 +158,21 @@ class AnnotationService @Inject()(annotationInformationProvider: AnnotationInfor userBBoxOpt = oldTracingOpt.flatMap(_._1.userBoundingBox).map(NamedBoundingBox(0, None, None, None, _)) userBBoxes = oldTracingOpt.map(_._1.userBoundingBoxes ++ userBBoxOpt) skeletonTracingId <- client.saveSkeletonTracing( - SkeletonTracingDefaults.createInstance.copy(dataSetName = dataSet.name, - editPosition = dataSource.center, - userBoundingBox = None, - userBoundingBoxes = userBBoxes.getOrElse(Seq.empty))) + SkeletonTracingDefaults.createInstance.copy( + dataSetName = dataSet.name, + editPosition = dataSource.center, + userBoundingBox = None, + organizationName = Some(organizationName), + userBoundingBoxes = userBBoxes.getOrElse(Seq.empty) + )) } yield (Some(skeletonTracingId), None) case TracingType.volume => for { client <- tracingStoreService.clientFor(dataSet) fallbackLayer = getFallbackLayer() _ <- bool2Fox(fallbackLayer.forall(_.elementClass != ElementClass.uint64)) ?~> "annotation.volume.uint64" - volumeTracingId <- client.saveVolumeTracing(createVolumeTracing(dataSource, fallbackLayer)) + volumeTracing <- createVolumeTracing(dataSource, organizationName, fallbackLayer) + volumeTracingId <- client.saveVolumeTracing(volumeTracing) } yield (None, Some(volumeTracingId)) case TracingType.hybrid => for { @@ -161,8 +180,11 @@ class AnnotationService @Inject()(annotationInformationProvider: AnnotationInfor fallbackLayer = getFallbackLayer() _ <- bool2Fox(fallbackLayer.forall(_.elementClass != ElementClass.uint64)) ?~> "annotation.volume.uint64" skeletonTracingId <- client.saveSkeletonTracing( - SkeletonTracingDefaults.createInstance.copy(dataSetName = dataSet.name, editPosition = dataSource.center)) - volumeTracingId <- client.saveVolumeTracing(createVolumeTracing(dataSource, fallbackLayer)) + SkeletonTracingDefaults.createInstance.copy(dataSetName = dataSet.name, + editPosition = dataSource.center, + organizationName = Some(organizationName))) + volumeTracing <- createVolumeTracing(dataSource, organizationName, fallbackLayer) + volumeTracingId <- client.saveVolumeTracing(volumeTracing) } yield (Some(skeletonTracingId), Some(volumeTracingId)) } } @@ -173,8 +195,13 @@ class AnnotationService @Inject()(annotationInformationProvider: AnnotationInfor for { dataSet <- dataSetDAO.findOne(_dataSet) ?~> "dataSet.noAccessById" dataSource <- dataSetService.dataSourceFor(dataSet) + organization <- organizationDAO.findOne(user._organization) usableDataSource <- dataSource.toUsable ?~> Messages("dataSet.notImported", dataSource.id.name) - tracingIds <- createTracings(dataSet, usableDataSource, tracingType, withFallback) + tracingIds <- createTracingsForExplorational(dataSet, + usableDataSource, + tracingType, + withFallback, + organization.name) teamId <- selectSuitableTeam(user, dataSet) annotation = Annotation( ObjectId.generate, @@ -190,15 +217,21 @@ class AnnotationService @Inject()(annotationInformationProvider: AnnotationInfor annotation } - def makeAnnotationHybrid(annotation: Annotation)(implicit ctx: DBAccessContext) = { + def makeAnnotationHybrid(annotation: Annotation, organizationName: String)( + implicit ctx: DBAccessContext): Fox[Unit] = { def createNewTracings(dataSet: DataSet, dataSource: DataSource) = annotation.tracingType match { case TracingType.skeleton => - createTracings(dataSet, dataSource, TracingType.volume, true).flatMap { + createTracingsForExplorational(dataSet, dataSource, TracingType.volume, true, organizationName).flatMap { case (_, Some(volumeId)) => annotationDAO.updateVolumeTracingId(annotation._id, volumeId) case _ => Fox.failure("unexpectedReturn") } case TracingType.volume => - createTracings(dataSet, dataSource, TracingType.skeleton, false, annotation.volumeTracingId).flatMap { + createTracingsForExplorational(dataSet, + dataSource, + TracingType.skeleton, + false, + organizationName, + annotation.volumeTracingId).flatMap { case (Some(skeletonId), _) => annotationDAO.updateSkeletonTracingId(annotation._id, skeletonId) case _ => Fox.failure("unexpectedReturn") } @@ -213,6 +246,17 @@ class AnnotationService @Inject()(annotationInformationProvider: AnnotationInfor } + def downsampleAnnotation(annotation: Annotation)(implicit ctx: DBAccessContext): Fox[Unit] = + for { + dataSet <- dataSetDAO.findOne(annotation._dataSet) ?~> "dataSet.notFoundForAnnotation" + originalVolumeTracingId <- annotation.volumeTracingId ?~> "annotation.downsample.volumeOnly" + rpcClient <- tracingStoreService.clientFor(dataSet) + newVolumeTracingId <- rpcClient.duplicateVolumeTracing(originalVolumeTracingId, downsample = true) + _ = logger.info( + s"Replacing volume tracing $originalVolumeTracingId by downsampled copy $newVolumeTracingId for annotation ${annotation._id}.") + _ <- annotationDAO.updateVolumeTracingId(annotation._id, newVolumeTracingId) + } yield () + // WARNING: needs to be repeatable, might be called multiple times for an annotation def finish(annotation: Annotation, user: User, restrictions: AnnotationRestrictions)( implicit ctx: DBAccessContext): Fox[String] = { @@ -336,14 +380,16 @@ class AnnotationService @Inject()(annotationInformationProvider: AnnotationInfor ) } - def createVolumeTracingBase( - dataSetName: String, - organizationId: ObjectId, - boundingBox: Option[BoundingBox], - startPosition: Point3D, - startRotation: Vector3D, - volumeShowFallbackLayer: Boolean)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[VolumeTracing] = + def createVolumeTracingBase(dataSetName: String, + organizationId: ObjectId, + boundingBox: Option[BoundingBox], + startPosition: Point3D, + startRotation: Vector3D, + volumeShowFallbackLayer: Boolean, + resolutionRestrictions: ResolutionRestrictions)(implicit ctx: DBAccessContext, + m: MessagesProvider): Fox[VolumeTracing] = for { + organization <- organizationDAO.findOne(organizationId) dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organizationId) ?~> Messages("dataset.notFound", dataSetName) dataSource <- dataSetService.dataSourceFor(dataSet).flatMap(_.toUsable) @@ -356,14 +402,16 @@ class AnnotationService @Inject()(annotationInformationProvider: AnnotationInfor } else None _ <- bool2Fox(fallbackLayer.forall(_.elementClass != ElementClass.uint64)) ?~> "annotation.volume.uint64" - volumeTracing = createVolumeTracing( + volumeTracing <- createVolumeTracing( dataSource, + organization.name, fallbackLayer = fallbackLayer, boundingBox = boundingBox.flatMap { box => if (box.isEmpty) None else Some(box) }, startPosition = Some(startPosition), - startRotation = Some(startRotation) + startRotation = Some(startRotation), + resolutionRestrictions = resolutionRestrictions ) } yield volumeTracing diff --git a/app/models/annotation/AnnotationSettings.scala b/app/models/annotation/AnnotationSettings.scala index fc30880a9fd..27254448b00 100755 --- a/app/models/annotation/AnnotationSettings.scala +++ b/app/models/annotation/AnnotationSettings.scala @@ -1,18 +1,31 @@ package models.annotation import com.scalableminds.webknossos.tracingstore.tracings.TracingType +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ResolutionRestrictions, VolumeTracingDownsampling} import models.annotation.AnnotationSettings._ -import play.api.data.validation.ValidationError +import models.binary.DataSet import play.api.libs.json._ +object AllowedMagnifications { + implicit val format: Format[AllowedMagnifications] = Json.format[AllowedMagnifications] +} + case class AnnotationSettings( allowedModes: List[String] = SKELETON_MODES, preferredMode: Option[String] = None, branchPointsAllowed: Boolean = true, somaClickingAllowed: Boolean = true, mergerMode: Boolean = false, - allowedMagnifications: Option[JsValue] = None -) + allowedMagnifications: Option[AllowedMagnifications] = None +) { + def resolutionRestrictions: ResolutionRestrictions = + allowedMagnifications match { + case None => ResolutionRestrictions.empty + case Some(allowedMags) => + if (allowedMags.shouldRestrict) ResolutionRestrictions(Some(allowedMags.min), Some(allowedMags.max)) + else ResolutionRestrictions.empty + } +} object AnnotationSettings { val ORTHOGONAL = "orthogonal" @@ -22,9 +35,9 @@ object AnnotationSettings { val SKELETON_MODES = List(ORTHOGONAL, OBLIQUE, FLIGHT) val VOLUME_MODES = List(VOLUME) - val ALL_MODES = SKELETON_MODES ::: VOLUME_MODES + val ALL_MODES: List[String] = SKELETON_MODES ::: VOLUME_MODES - def defaultFor(tracingType: TracingType.Value) = tracingType match { + def defaultFor(tracingType: TracingType.Value): AnnotationSettings = tracingType match { case TracingType.skeleton => AnnotationSettings(allowedModes = SKELETON_MODES) case TracingType.volume => @@ -33,9 +46,9 @@ object AnnotationSettings { AnnotationSettings(allowedModes = ALL_MODES) } - implicit val annotationSettingsWrites = Json.writes[AnnotationSettings] + implicit val annotationSettingsWrites: OWrites[AnnotationSettings] = Json.writes[AnnotationSettings] - implicit val annotationSettingsReads = + implicit val annotationSettingsReads: Reads[AnnotationSettings] = Json .reads[AnnotationSettings] .filter(JsonValidationError("annotation.preferedMode.invalid")) { a => @@ -45,3 +58,14 @@ object AnnotationSettings { a.allowedModes.forall(ALL_MODES.contains) } } + +case class AllowedMagnifications( + shouldRestrict: Boolean, + min: Int, + max: Int +) { + def toQueryString: String = + if (shouldRestrict) + s"minResolution=$min&maxResolution=$max" + else "" +} diff --git a/app/models/annotation/TracingStoreRpcClient.scala b/app/models/annotation/TracingStoreRpcClient.scala index a26498d7eb4..38e6b846834 100644 --- a/app/models/annotation/TracingStoreRpcClient.scala +++ b/app/models/annotation/TracingStoreRpcClient.scala @@ -14,6 +14,7 @@ import com.scalableminds.util.tools.JsonHelper.boxFormat import com.scalableminds.util.tools.JsonHelper.optionFormat import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, DataSourceLike} +import com.scalableminds.webknossos.tracingstore.tracings.volume.ResolutionRestrictions import com.typesafe.scalalogging.LazyLogging import models.binary.{DataSet, DataStoreRpcClient} import net.liftweb.common.Box @@ -87,11 +88,16 @@ class TracingStoreRpcClient(tracingStore: TracingStore, dataSet: DataSet, rpc: R def duplicateVolumeTracing(volumeTracingId: String, fromTask: Boolean = false, - dataSetBoundingBox: Option[BoundingBox] = None): Fox[String] = { + dataSetBoundingBox: Option[BoundingBox] = None, + resolutionRestrictions: ResolutionRestrictions = ResolutionRestrictions.empty, + downsample: Boolean = false): Fox[String] = { logger.debug("Called to duplicate VolumeTracing." + baseInfo) rpc(s"${tracingStore.url}/tracings/volume/${volumeTracingId}/duplicate") .addQueryString("token" -> TracingStoreRpcClient.webKnossosToken) .addQueryString("fromTask" -> fromTask.toString) + .addQueryStringOptional("minResolution", resolutionRestrictions.minStr) + .addQueryStringOptional("maxResolution", resolutionRestrictions.maxStr) + .addQueryString("downsample" -> downsample.toString) .postWithJsonResponse[Option[BoundingBox], String](dataSetBoundingBox) } @@ -138,7 +144,9 @@ class TracingStoreRpcClient(tracingStore: TracingStore, dataSet: DataSet, rpc: R private def packVolumeDataZips(files: List[File]): File = ZipIO.zipToTempFile(files) - def saveVolumeTracing(tracing: VolumeTracing, initialData: Option[File] = None): Fox[String] = { + def saveVolumeTracing(tracing: VolumeTracing, + initialData: Option[File] = None, + resolutionRestrictions: ResolutionRestrictions = ResolutionRestrictions.empty): Fox[String] = { logger.debug("Called to create VolumeTracing." + baseInfo) for { tracingId <- rpc(s"${tracingStore.url}/tracings/volume/save") @@ -148,6 +156,8 @@ class TracingStoreRpcClient(tracingStore: TracingStore, dataSet: DataSet, rpc: R case Some(file) => rpc(s"${tracingStore.url}/tracings/volume/${tracingId}/initialData") .addQueryString("token" -> TracingStoreRpcClient.webKnossosToken) + .addQueryStringOptional("minResolution", resolutionRestrictions.minStr) + .addQueryStringOptional("maxResolution", resolutionRestrictions.maxStr) .post(file) case _ => Fox.successful(()) diff --git a/app/models/task/TaskType.scala b/app/models/task/TaskType.scala index 1d49d06fb28..e9c2aad25f7 100755 --- a/app/models/task/TaskType.scala +++ b/app/models/task/TaskType.scala @@ -1,11 +1,12 @@ package models.task import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} -import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.tracingstore.tracings.TracingType import com.scalableminds.webknossos.schema.Tables._ +import com.scalableminds.webknossos.tracingstore.tracings.volume.ResolutionRestrictions import javax.inject.Inject -import models.annotation.AnnotationSettings +import models.annotation.{AllowedMagnifications, AnnotationSettings} import models.team.TeamDAO import slick.jdbc.PostgresProfile.api._ import play.api.libs.json._ @@ -24,7 +25,7 @@ case class TaskType( tracingType: TracingType.Value = TracingType.skeleton, created: Long = System.currentTimeMillis(), isDeleted: Boolean = false -) extends FoxImplicits {} +) class TaskTypeService @Inject()(teamDAO: TeamDAO)(implicit ec: ExecutionContext) { @@ -76,7 +77,10 @@ class TaskTypeDAO @Inject()(sqlClient: SQLClient)(implicit ec: ExecutionContext) r.settingsBranchpointsallowed, r.settingsSomaclickingallowed, r.settingsMergermode, - r.settingsAllowedmagnifications.map(Json.parse) + r.settingsAllowedmagnifications + .map(Json.parse) + .map(_.validate[AllowedMagnifications]) + .flatMap(JsonHelper.jsResultToOpt) ), r.recommendedconfiguration.map(Json.parse), tracingType, diff --git a/conf/messages b/conf/messages index e946076f860..53c8f10a378 100644 --- a/conf/messages +++ b/conf/messages @@ -198,6 +198,8 @@ tracing.notFound=Tracing couldn’t be found annotation.create.failed=Failed to create annotation. annotation.volume.uint64=Creating volume annotations with uint64 fallback layer is not supported by wK yet. +annotation.volume.resolutionRestrictionsTooTight=Task type resolution restrictions are too tight, resulting annotation has no resolutions. +annotation.volume.resolutionsDoNotMatch=Could not merge volume annotations, as their resolutions differ. Please ensure each annotation has the same set of resolutions. annotation.notFound=Annotation couldn’t be found annotation.notFound.considerLoggingIn=Annotation couldn’t be found. If the annotation is not public, you need to log in to see it. annotation.invalid=Invalid annotation @@ -232,6 +234,8 @@ annotation.unlinkFallback.noVolume=Could not unlink fallback segmentation (only annotation.makeHybrid.explorationalsOnly=Could not convert annotation to hybrid annotation because it is only allowed for explorational annotations. annotation.makeHybrid.failed=Could not convert to hybrid. annotation.makeHybrid.alreadyHybrid=Could not convert annotation to hybrid annotation because it is already a hybrid annotation. +annotation.downsample.explorationalsOnly=Could not downsample annotation because it is only allowed for explorational annotations. +annotation.downsample.volumeOnly=Downsampling is only possible for volume or hybrid annotations. annotationType.notFound=Could not find the specified annotation type. annotation.cantMergeVolumes=This annotation collection contains volume tracings. Merging volume tracings is not yet supported. Please use download instead. annotation.reopen.tooLate=The annotation cannot be reopened anymore, since it's been finished for too long. @@ -276,6 +280,7 @@ taskType.deleteFailure=Task type “{0}” deletion failed taskType.noAnnotations=We couldn’t find finished annotations for this task type taskType.id.invalid=The provided task type id is invalid. taskType.tracingTypeImmutable=Tracing types of task types are immutable. Consider creating a new task type. +taskType.allowedMagnificationsImmutable=Resolution restrictions of task types are immutable. Consider creating a new task type. taskType.mismatch=The tracing type of the task type is {0}, but found a {1} tracing as base. Consider choosing another task type or correct the NMLs. tracingType.invalid=The provided tracing type is invalid. diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index c8b68df7690..a40fda51918 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -91,7 +91,7 @@ GET /tracingstore POST /tracingstores/:name/handleTracingUpdateReport controllers.WKTracingStoreController.handleTracingUpdateReport(name: String) POST /tracingstores/:name/validateUserAccess controllers.UserTokenController.validateAccessViaTracingstore(name: String, token: Option[String]) PUT /tracingstores/:name controllers.TracingStoreController.update(name: String) -GET /tracingstores/:name/dataSource/:organizationName/:dataSetName controllers.WKTracingStoreController.dataSource(name: String, organizationName: String, dataSetName: String) +GET /tracingstores/:name/dataSource/:dataSetName controllers.WKTracingStoreController.dataSource(name: String, organizationName: Option[String], dataSetName: String) # User access tokens for datastore authentification POST /userToken/generate controllers.UserTokenController.generateTokenForDataStore @@ -109,6 +109,7 @@ PATCH /annotations/:typ/:id/transfer GET /annotations/:typ/:id/info controllers.AnnotationController.info(typ: String, id: String, timestamp: Long) PATCH /annotations/:typ/:id/makeHybrid controllers.AnnotationController.makeHybrid(typ: String, id: String) +PATCH /annotations/:typ/:id/downsample controllers.AnnotationController.downsample(typ: String, id: String) PATCH /annotations/:typ/:id/unlinkFallback controllers.AnnotationController.unlinkFallback(typ: String, id: String) DELETE /annotations/:typ/:id controllers.AnnotationController.cancel(typ: String, id: String) POST /annotations/:typ/:id/merge/:mergedTyp/:mergedId controllers.AnnotationController.merge(typ: String, id: String, mergedTyp: String, mergedId: String) diff --git a/frontend/javascripts/admin/admin_rest_api.js b/frontend/javascripts/admin/admin_rest_api.js index cc2d4435639..3e5b68cf28a 100644 --- a/frontend/javascripts/admin/admin_rest_api.js +++ b/frontend/javascripts/admin/admin_rest_api.js @@ -742,6 +742,18 @@ export async function unlinkFallbackSegmentation( }); } +// When the annotation is open, please use the corresponding method +// in api_latest.js. It will take care of saving the annotation and +// reloading it. +export async function downsampleSegmentation( + annotationId: string, + annotationType: APIAnnotationType, +): Promise { + await Request.receiveJSON(`/api/annotations/${annotationType}/${annotationId}/downsample`, { + method: "PATCH", + }); +} + // ### Datasets export async function getDatasets( isUnreported: ?boolean, @@ -1202,7 +1214,7 @@ export function updateUserConfiguration(userConfiguration: Object): Object { }); } -// ### TimeTracking +// ### Time Tracking export async function getTimeTrackingForUserByMonth( userEmail: string, day: moment$Moment, diff --git a/frontend/javascripts/admin/tasktype/task_type_create_view.js b/frontend/javascripts/admin/tasktype/task_type_create_view.js index 83fac6db69b..2779c4f40de 100644 --- a/frontend/javascripts/admin/tasktype/task_type_create_view.js +++ b/frontend/javascripts/admin/tasktype/task_type_create_view.js @@ -300,7 +300,7 @@ class TaskTypeCreateView extends React.PureComponent { {getFieldDecorator("settings.allowedMagnifications.shouldRestrict", { valuePropName: "checked", })( - + Restrict Resolutions{" "} { Minimum:{" "} {getFieldDecorator("settings.allowedMagnifications.min", { rules: [{ validator: isValidMagnification }], - })()} + })()}
@@ -335,7 +335,7 @@ class TaskTypeCreateView extends React.PureComponent { Maximum:{" "} {getFieldDecorator("settings.allowedMagnifications.max", { rules: [{ validator: isValidMagnification }], - })()} + })()}
diff --git a/frontend/javascripts/libs/utils.js b/frontend/javascripts/libs/utils.js index 3dd4ce14c7c..c66a365c2e0 100644 --- a/frontend/javascripts/libs/utils.js +++ b/frontend/javascripts/libs/utils.js @@ -119,8 +119,8 @@ export function jsonStringify(json: Object) { return JSON.stringify(json, null, " "); } -export function clamp(a: number, x: number, b: number): number { - return Math.max(a, Math.min(b, x)); +export function clamp(min: number, value: number, max: number): number { + return Math.max(min, Math.min(max, value)); } export function zeroPad(num: number, zeros: number = 0): string { diff --git a/frontend/javascripts/oxalis/api/api_latest.js b/frontend/javascripts/oxalis/api/api_latest.js index 3902259e4e3..9457e589d49 100644 --- a/frontend/javascripts/oxalis/api/api_latest.js +++ b/frontend/javascripts/oxalis/api/api_latest.js @@ -32,6 +32,7 @@ import { finishAnnotation, getMappingsForDatasetLayer, requestTask, + downsampleSegmentation, } from "admin/admin_rest_api"; import { findTreeByNodeId, @@ -744,6 +745,30 @@ class TracingApi { } Store.dispatch(setToolAction(tool)); } + + /** + * Use this method to create a complete resolution pyramid by downsampling the lowest present mag (e.g., mag 1). + This method will save the current changes and then reload the page after the downsampling + has finished. + This function can only be used for non-tasks. + + Note that this invoking this method will not block the UI. Thus, user actions can be performed during the + downsampling. The caller should prohibit this (e.g., by showing a not-closable modal during the process). + */ + async downsampleSegmentation() { + const state = Store.getState(); + const { annotationId, annotationType, volume } = state.tracing; + if (state.task != null) { + throw new Error("Cannot downsample segmentation for a task."); + } + if (volume == null) { + throw new Error("Cannot downsample segmentation for annotation without volume data."); + } + + await this.save(); + await downsampleSegmentation(annotationId, annotationType); + await this.hardReload(); + } } /** diff --git a/frontend/javascripts/oxalis/controller/combinations/skeletontracing_plane_controller.js b/frontend/javascripts/oxalis/controller/combinations/skeletontracing_plane_controller.js index 2fd471583a1..df920e44447 100644 --- a/frontend/javascripts/oxalis/controller/combinations/skeletontracing_plane_controller.js +++ b/frontend/javascripts/oxalis/controller/combinations/skeletontracing_plane_controller.js @@ -26,6 +26,7 @@ import { getPosition, getRotationOrtho, getRequestLogZoomStep, + isMagRestrictionViolated, } from "oxalis/model/accessors/flycam_accessor"; import { setActiveNodeAction, @@ -85,7 +86,20 @@ export function getPlaneMouseControls(planeView: PlaneView) { leftClick: (pos: Point2, plane: OrthoView, event: MouseEvent, isTouch: boolean) => onClick(planeView, pos, event.shiftKey, event.altKey, event.ctrlKey, plane, isTouch, event), rightClick: (pos: Point2, plane: OrthoView, event: MouseEvent) => { - const { volume } = Store.getState().tracing; + const state = Store.getState(); + if (isMagRestrictionViolated(state)) { + // The current zoom value violates the specified magnification-restriction in the + // task type. Therefore, we abort the action here. + // Actually, one would need to handle more skeleton actions (e.g., deleting a node), + // but not all (e.g., deleting a tree from the tree tab should be allowed). Therefore, + // this solution is a bit of a shortcut. However, it should cover 90% of the use case + // for restricting the rendered magnification. + // See https://github.com/scalableminds/webknossos/pull/4891 for context and + // https://github.com/scalableminds/webknossos/issues/4838 for the follow-up issue. + return; + } + + const { volume } = state.tracing; if (!volume || volume.activeTool === VolumeToolEnum.MOVE) { // We avoid creating nodes when in brushing mode. setWaypoint(calculateGlobalPos(pos), event.ctrlKey); diff --git a/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js b/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js index 9c448c60149..e076832a756 100644 --- a/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js +++ b/frontend/javascripts/oxalis/model/accessors/flycam_accessor.js @@ -289,11 +289,14 @@ export function getZoomValue(flycam: Flycam): number { return flycam.zoomStep; } -function getValidTaskZoomRange(state: OxalisState): [number, number] { +export function getValidTaskZoomRange( + state: OxalisState, + respectRestriction: boolean = false, +): [number, number] { const defaultRange = [userSettings.zoom.minimum, Infinity]; const { allowedMagnifications } = state.tracing.restrictions; - if (!allowedMagnifications || !allowedMagnifications.shouldRestrict) { + if (!respectRestriction || !allowedMagnifications || !allowedMagnifications.shouldRestrict) { return defaultRange; } @@ -315,6 +318,23 @@ function getValidTaskZoomRange(state: OxalisState): [number, number] { return [min, max]; } +export function isMagRestrictionViolated(state: OxalisState): boolean { + const { allowedMagnifications } = state.tracing.restrictions; + + if (!allowedMagnifications || !allowedMagnifications.shouldRestrict) { + return false; + } + + const zoomStep = getRequestLogZoomStep(state); + if (allowedMagnifications.min != null && zoomStep < Math.log2(allowedMagnifications.min)) { + return true; + } + if (allowedMagnifications.max != null && zoomStep > Math.log2(allowedMagnifications.max)) { + return true; + } + return false; +} + export function getPlaneScalingFactor( state: OxalisState, flycam: Flycam, diff --git a/frontend/javascripts/oxalis/model/data_layer.js b/frontend/javascripts/oxalis/model/data_layer.js index 49b8c69e1ee..4e02c3297c0 100644 --- a/frontend/javascripts/oxalis/model/data_layer.js +++ b/frontend/javascripts/oxalis/model/data_layer.js @@ -25,6 +25,7 @@ class DataLayer { layerRenderingManager: LayerRenderingManager; resolutions: Array; fallbackLayer: ?string; + fallbackLayerInfo: ?DataLayerType; constructor( layerInfo: DataLayerType, @@ -35,6 +36,9 @@ class DataLayer { this.connectionInfo = connectionInfo; this.name = layerInfo.name; this.fallbackLayer = layerInfo.fallbackLayer != null ? layerInfo.fallbackLayer : null; + this.fallbackLayerInfo = + layerInfo.fallbackLayerInfo != null ? layerInfo.fallbackLayerInfo : null; + this.resolutions = layerInfo.resolutions; const { dataset } = Store.getState(); diff --git a/frontend/javascripts/oxalis/model/sagas/root_saga.js b/frontend/javascripts/oxalis/model/sagas/root_saga.js index cd8c920bc8b..ededf55dc62 100644 --- a/frontend/javascripts/oxalis/model/sagas/root_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/root_saga.js @@ -25,7 +25,7 @@ import handleMeshChanges from "oxalis/model/sagas/handle_mesh_changes"; import isosurfaceSaga from "oxalis/model/sagas/isosurface_saga"; import { watchMaximumRenderableLayers } from "oxalis/model/sagas/dataset_saga"; import watchPushSettingsAsync from "oxalis/model/sagas/settings_saga"; -import watchTasksAsync from "oxalis/model/sagas/task_saga"; +import watchTasksAsync, { warnAboutMagRestriction } from "oxalis/model/sagas/task_saga"; import loadHistogramData from "oxalis/model/sagas/load_histogram_data_saga"; export default function* rootSaga(): Saga { @@ -40,6 +40,7 @@ function* restartableSaga(): Saga { try { yield _all([ _call(warnAboutSegmentationZoom), + _call(warnAboutMagRestriction), _call(watchPushSettingsAsync), _call(watchSkeletonTracingAsync), _call(collectUndoStates), diff --git a/frontend/javascripts/oxalis/model/sagas/task_saga.js b/frontend/javascripts/oxalis/model/sagas/task_saga.js index 04d05390de6..23f734528a2 100644 --- a/frontend/javascripts/oxalis/model/sagas/task_saga.js +++ b/frontend/javascripts/oxalis/model/sagas/task_saga.js @@ -1,24 +1,31 @@ // @flow import React from "react"; import _ from "lodash"; +import { Button } from "antd"; import type { APITaskType } from "types/api_flow_types"; -import { type Saga, call, put, select, take } from "oxalis/model/sagas/effect-generators"; +import { type Saga, call, put, select, _delay, take } from "oxalis/model/sagas/effect-generators"; +import { clamp } from "libs/utils"; +import { + getValidTaskZoomRange, + isMagRestrictionViolated, +} from "oxalis/model/accessors/flycam_accessor"; +import { setActiveUserAction } from "oxalis/model/actions/user_actions"; +import { setMergerModeEnabledAction } from "oxalis/model/actions/skeletontracing_actions"; import { setZoomStepAction } from "oxalis/model/actions/flycam_actions"; import { updateDatasetSettingAction, updateUserSettingAction, updateLayerSettingAction, } from "oxalis/model/actions/settings_actions"; -import { setActiveUserAction } from "oxalis/model/actions/user_actions"; -import { setMergerModeEnabledAction } from "oxalis/model/actions/skeletontracing_actions"; import { updateLastTaskTypeIdOfUser } from "admin/admin_rest_api"; +import Model from "oxalis/model"; import NewTaskDescriptionModal from "oxalis/view/new_task_description_modal"; import RecommendedConfigurationModal from "oxalis/view/recommended_configuration_modal"; +import Store from "oxalis/store"; import Toast from "libs/toast"; import messages from "messages"; import renderIndependently from "libs/render_independently"; -import Model from "oxalis/model"; function* maybeShowNewTaskTypeModal(taskType: APITaskType): Saga { // Users can acquire new tasks directly in the tracing view. Occasionally, @@ -124,3 +131,51 @@ export default function* watchTasksAsync(): Saga { yield* put(setActiveUserAction(fullUser)); } } + +export function* warnAboutMagRestriction(): Saga { + function* warnMaybe(): Saga { + const { allowUpdate } = yield* select(state => state.tracing.restrictions); + if (!allowUpdate) { + // If updates are not allowed in general, we return here, since we don't + // want to show any warnings when the user cannot edit the annotation in the first + // place (e.g., when viewing the annotation of another user). + return; + } + + const isViolated = yield* select(isMagRestrictionViolated); + const toastConfig = { sticky: true, key: "mag-restriction-warning" }; + + if (isViolated) { + const [min, max] = yield* select(storeState => getValidTaskZoomRange(storeState, true)); + const clampZoom = () => { + const currentZoomStep = Store.getState().flycam.zoomStep; + const newZoomValue = clamp(min, currentZoomStep, max); + Store.dispatch(setZoomStepAction(newZoomValue)); + }; + const message = ( + + Annotating data is restricted to a certain zoom range. Please adapt the zoom value so that + it is between {min.toFixed(2)} and {max.toFixed(2)}. Alternatively, click{" "} + {" "} + to adjust the zoom accordingly. + + ); + + Toast.error(message, toastConfig); + } else { + Toast.close(toastConfig.key); + } + } + + yield* take("WK_READY"); + // Wait before showing the initial warning. Due to initialization lag it may only be visible very briefly, otherwise. + yield _delay(5000); + yield* warnMaybe(); + + while (true) { + yield* take(["ZOOM_IN", "ZOOM_OUT", "ZOOM_BY_DELTA", "SET_ZOOM_STEP", "SET_STORED_LAYOUTS"]); + yield* warnMaybe(); + } +} diff --git a/frontend/javascripts/oxalis/model_initialization.js b/frontend/javascripts/oxalis/model_initialization.js index 274570a9881..b44721e1971 100644 --- a/frontend/javascripts/oxalis/model_initialization.js +++ b/frontend/javascripts/oxalis/model_initialization.js @@ -25,7 +25,6 @@ import { getMostExtensiveResolutions, getSegmentationLayer, isElementClassSupported, - getResolutions, convertToDenseResolution, } from "oxalis/model/accessors/dataset_accessor"; import { getSomeServerTracing } from "oxalis/model/accessors/tracing_accessor"; @@ -444,29 +443,6 @@ function setupLayerForVolumeTracing( ? resolutions.map(({ x, y, z }) => [x, y, z]) : [[1, 1, 1]]; - console.log("Volume tracing resolutions:", tracingResolutions); - const targetResolutions = - fallbackLayer != null ? fallbackLayer.resolutions : getResolutions(dataset); - - const resolutionsAreSubset = (resAs, resBs) => - resAs.every(resA => resBs.some(resB => _.isEqual(resA, resB))); - const doResolutionsMatch = - resolutionsAreSubset(targetResolutions, tracingResolutions) && - resolutionsAreSubset(tracingResolutions, targetResolutions); - - if (!doResolutionsMatch) { - if (tracingHasResolutionList) { - Toast.warning( - messages["tracing.volume_resolution_mismatch"], - {}, - `The resolutions of the volume tracing (${tracingResolutions.toString()}) don't match the dataset's resolutions (${targetResolutions.toString()}). This can happen when the resolution of the dataset was changed after this tracing was created. Note that there might be rendering issues for this reason.`, - ); - throw HANDLED_ERROR; - } else { - console.log("Detected legacy tracing with no resolution pyramid."); - } - } - const tracingLayer = { name: tracing.id, elementClass: tracing.elementClass, @@ -477,6 +453,7 @@ function setupLayerForVolumeTracing( mappings: fallbackLayer != null && fallbackLayer.mappings != null ? fallbackLayer.mappings : [], // remember the name of the original layer, used to request mappings fallbackLayer: tracing.fallbackLayer, + fallbackLayerInfo: fallbackLayer, }; if (fallbackLayer != null) { diff --git a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js index 6d0086f51bd..f6c56f6cd7b 100644 --- a/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js +++ b/frontend/javascripts/oxalis/view/action-bar/volume_actions_view.js @@ -17,6 +17,7 @@ import { enforceVolumeTracing, isVolumeTraceToolDisallowed, } from "oxalis/model/accessors/volumetracing_accessor"; +import { isMagRestrictionViolated } from "oxalis/model/accessors/flycam_accessor"; import { setToolAction, createCellAction, @@ -169,6 +170,26 @@ const mapId = id => { return cube.mapId(id); }; +const getExplanationForDisabledVolume = ( + isInMergerMode, + isLabelingPossible, + isZoomInvalidForTracing, +) => { + if (isZoomInvalidForTracing) { + return "Volume annotation is disabled since the current zoom value is not in the required range. Please adjust the zoom level."; + } + + if (isInMergerMode) { + return "Volume annotation is disabled while the merger mode is active."; + } + + if (!isLabelingPossible) { + return "Volume annotation is disabled since no segmentation data can be shown at the current magnification. Please adjust the zoom level."; + } + + return "Volume annotation is currently disabled."; +}; + export default function VolumeActionsView() { const hasSkeleton = useSelector(state => state.tracing.skeleton != null); const activeTool = useSelector(state => enforceVolumeTracing(state.tracing).activeTool); @@ -188,6 +209,8 @@ export default function VolumeActionsView() { maybeResolutionWithZoomStep != null ? maybeResolutionWithZoomStep.resolution : null; const isLabelingPossible = labeledResolution != null; + const isZoomInvalidForTracing = useSelector(isMagRestrictionViolated); + const hasResolutionWithHigherDimension = (labeledResolution || []).some(val => val > 1); const multiSliceAnnotationInfoIcon = hasResolutionWithHigherDimension ? ( @@ -206,10 +229,10 @@ export default function VolumeActionsView() { // the tools via the w shortcut. In that case, the effect-hook is re-executed // and the tool is switched to MOVE. useEffect(() => { - if (isInMergerMode || !isLabelingPossible) { + if (isInMergerMode || !isLabelingPossible || isZoomInvalidForTracing) { Store.dispatch(setToolAction(VolumeToolEnum.MOVE)); } - }, [isInMergerMode, activeTool, isLabelingPossible]); + }, [isInMergerMode, activeTool, isLabelingPossible, isZoomInvalidForTracing]); const isShiftPressed = useKeyPress("Shift"); const isControlPressed = useKeyPress("Control"); @@ -232,9 +255,11 @@ export default function VolumeActionsView() { : null; const previousMoveToolHint = usePrevious(moveToolHint); - const disabledVolumeExplanation = isLabelingPossible - ? "Volume annotation is disabled while the merger mode is active." - : "Volume annotation is disabled since no segmentation data can be shown at the current magnification. Please adjust the zoom level."; + const disabledVolumeExplanation = getExplanationForDisabledVolume( + isInMergerMode, + isLabelingPossible, + isZoomInvalidForTracing, + ); const moveToolDescription = `Pointer – Use left-click to move around${ hasSkeleton ? " and right-click to create new skeleton nodes" : "" diff --git a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js index 4dc973230ea..6f603560d97 100644 --- a/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js +++ b/frontend/javascripts/oxalis/view/settings/dataset_settings_view.js @@ -6,35 +6,34 @@ import { Col, Collapse, Icon, Row, Select, Switch, Tooltip, Modal } from "antd"; import type { Dispatch } from "redux"; import { connect } from "react-redux"; -import * as React from "react"; +import React, { useState } from "react"; import _ from "lodash"; -import { V3 } from "libs/mjs"; -import api from "oxalis/api/internal_api"; -import messages, { settings } from "messages"; import type { APIDataset } from "types/api_flow_types"; -import { AsyncIconButton } from "components/async_clickables"; +import { AsyncButton, AsyncIconButton } from "components/async_clickables"; import { SwitchSetting, NumberSliderSetting, DropdownSetting, ColorSetting, } from "oxalis/view/settings/setting_input_views"; +import { V3 } from "libs/mjs"; import { findDataPositionForLayer, clearCache, findDataPositionForVolumeTracing, unlinkFallbackSegmentation, } from "admin/admin_rest_api"; -import { getGpuFactorsWithLabels } from "oxalis/model/bucket_data_handling/data_rendering_logic"; -import { getMaxZoomValueForResolution } from "oxalis/model/accessors/flycam_accessor"; import { + getDefaultIntensityRangeOfLayer, getElementClass, getLayerBoundaries, - getDefaultIntensityRangeOfLayer, getLayerByName, getResolutionInfo, + getResolutions, } from "oxalis/model/accessors/dataset_accessor"; +import { getGpuFactorsWithLabels } from "oxalis/model/bucket_data_handling/data_rendering_logic"; +import { getMaxZoomValueForResolution } from "oxalis/model/accessors/flycam_accessor"; import { setPositionAction, setZoomStepAction } from "oxalis/model/actions/flycam_actions"; import { updateDatasetSettingAction, @@ -49,10 +48,13 @@ import Store, { type UserConfiguration, type HistogramDataForAllLayers, type Tracing, + type Task, } from "oxalis/store"; import Toast from "libs/toast"; import * as Utils from "libs/utils"; +import api from "oxalis/api/internal_api"; import constants, { type ViewMode, type Vector3 } from "oxalis/constants"; +import messages, { settings } from "messages"; import Histogram, { isHistogramSupported } from "./histogram_view"; @@ -76,9 +78,73 @@ type DatasetSettingsProps = {| onChangeUser: (key: $Keys, value: any) => void, onUnlinkFallbackLayer: Tracing => Promise, tracing: Tracing, + task: ?Task, |}; -class DatasetSettings extends React.PureComponent { +function DownsampleVolumeModal({ visible, hideDownsampleVolumeModal, magsToDownsample }) { + const [isDownsampling, setIsDownsampling] = useState(false); + + const handleTriggerDownsampling = async () => { + setIsDownsampling(true); + await api.tracing.downsampleSegmentation(); + setIsDownsampling(false); + }; + + return ( + +

+ This annotation does not have volume annotation data in all resolutions. Consequently, + annotation data cannot be rendered at all zoom values. By clicking "Downsample", + webKnossos will use the best resolution of the volume data to create all dependent + resolutions. +

+ +

+ The following resolutions will be added when clicking "Downsample":{" "} + {magsToDownsample.map(mag => mag.join("-")).join(", ")}. +

+ +

+ The cause for the missing resolutions can be one of the following: +

    +
  • + The annotation was created before webKnossos supported multi-resolution volume tracings. +
  • +
  • An old annotation was uploaded which did not include all resolutions.
  • +
  • The annotation was created in a task that was restricted to certain resolutions.
  • +
  • The dataset was mutated to have more resolutions.
  • +
+

+ +

+ Note that this action might take a few minutes. Afterwards, the annotation is reloaded. + Also, the version history of the volume data will be reset. +

+
+ + Downsample + +
+
+ ); +} + +type State = {| + isDownsampleVolumeModalVisible: boolean, +|}; + +class DatasetSettings extends React.PureComponent { + state = { + isDownsampleVolumeModalVisible: false, + }; + getFindDataButton = (layerName: string, isDisabled: boolean, isColorLayer: boolean) => { let tooltipText = isDisabled ? "You cannot search for data when the layer is disabled." @@ -303,6 +369,8 @@ class DatasetSettings extends React.PureComponent {
+ {isColorLayer ? null : this.getOptionalDownsampleVolumeIcon()} + {hasHistogram ? this.getEditMinMaxButton(layerName, isInEditMode) : null} {this.getFindDataButton(layerName, isDisabled, isColorLayer)} {this.getReloadDataButton(layerName)} @@ -508,6 +576,70 @@ class DatasetSettings extends React.PureComponent { "Layers" ); + getVolumeMagsToDownsample = (): Array => { + if (this.props.task != null) { + return []; + } + const volumeTracing = this.props.tracing.volume; + if (volumeTracing == null) { + return []; + } + const segmentationLayer = Model.getSegmentationLayer(); + const fallbackLayerInfo = segmentationLayer.fallbackLayerInfo; + const volumeTargetResolutions = + fallbackLayerInfo != null + ? fallbackLayerInfo.resolutions + : getResolutions(this.props.dataset); + + const getMaxDim = resolution => Math.max(...resolution); + + const volumeTracingResolutions = segmentationLayer.resolutions; + + const sourceMag = _.minBy(volumeTracingResolutions, getMaxDim); + const possibleMags = volumeTargetResolutions.filter( + resolution => getMaxDim(resolution) >= getMaxDim(sourceMag), + ); + + const magsToDownsample = _.differenceWith(possibleMags, volumeTracingResolutions, _.isEqual); + return magsToDownsample; + }; + + getOptionalDownsampleVolumeIcon = () => { + const magsToDownsample = this.getVolumeMagsToDownsample(); + const hasExtensiveResolutions = magsToDownsample.length === 0; + + if (hasExtensiveResolutions) { + return null; + } + + return ( + + + Resolution Icon + + + ); + }; + + showDownsampleVolumeModal = () => { + this.setState({ isDownsampleVolumeModalVisible: true }); + }; + + hideDownsampleVolumeModal = () => { + this.setState({ isDownsampleVolumeModalVisible: false }); + }; + render() { const { layers } = this.props.datasetConfiguration; const segmentationLayerName = Model.getSegmentationLayerName(); @@ -594,6 +726,11 @@ class DatasetSettings extends React.PureComponent { onChange={this.onChangeRenderMissingDataBlack} /> + ); } @@ -606,6 +743,7 @@ const mapStateToProps = (state: OxalisState) => ({ histogramData: state.temporaryConfiguration.histogramData, dataset: state.dataset, tracing: state.tracing, + task: state.task, }); const mapDispatchToProps = (dispatch: Dispatch<*>) => ({ diff --git a/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md b/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md index bcb1293959f..a2aa66b5036 100644 --- a/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md @@ -20,6 +20,7 @@ Generated by [AVA](https://ava.li). z: 0, }, id: 'id', + organizationName: 'Organization_X', treeGroups: [ { children: [ @@ -1570,6 +1571,7 @@ Generated by [AVA](https://ava.li). z: 0, }, id: 'id', + organizationName: 'Organization_X', treeGroups: [], trees: [], userBoundingBoxes: [], @@ -1594,6 +1596,7 @@ Generated by [AVA](https://ava.li). z: 0, }, id: 'id', + organizationName: 'Organization_X', treeGroups: [], trees: [], userBoundingBoxes: [], @@ -1626,6 +1629,7 @@ Generated by [AVA](https://ava.li). elementClass: 'uint32', id: 'id', largestSegmentId: 0, + organizationName: 'Organization_X', resolutions: [ { x: 1, @@ -1687,6 +1691,7 @@ Generated by [AVA](https://ava.li). elementClass: 'uint32', id: 'id', largestSegmentId: 0, + organizationName: 'Organization_X', resolutions: [ { x: 1, @@ -1735,6 +1740,7 @@ Generated by [AVA](https://ava.li). z: 3, }, id: 'id', + organizationName: 'Organization_X', treeGroups: [], trees: [], userBoundingBoxes: [], diff --git a/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap b/frontend/javascripts/test/snapshots/public/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap index a7026c8b7a89fd4e65d7c86d49eb8786a08cb2b4..78ce4ff0398a6243d46726664d41706d31182e84 100644 GIT binary patch literal 9196 zcmXYWWmp_d(=|>A?jCG$Te7%2i?g_UaCZ&CgOde8kE)rmizG`ZDx7ZdM-N4(>kmes5o+ASIdt>jFJRDGo+OPrbkc{B#Mg zkpAC=dYq3Hprdye>z?CdvG~&*9qqTQBepAK?%T(jZ&;P-&FYRt>Q2yax)=}{h#)#9 zAt?>SM@yH2l-!ntoI)Pmb{WG{$yS;Yf~nZ|96SwB|1~5Wb3f~#j)nqo+&}gBXKt^kODJbs=xM23Oy|HQK0PZR5kxO+wN&2SP}ZPzE)^a z-RMsHTN6^^9rOMEWEeMY36sZVMNl!gX}J4e^hW%7#kX)N?jFRs|2GqEu?C5g-Vv(% zIY(mBou)Ovt`@hob586DwglpY?v%*|m|M3wq###U{CKodKl4~4!23sEUUmKS0_^IS zK{}l~jn?`2yt{iolv0$_=3-EX>Rvz12y&tt_HNarA_w2iW9S~+uhTKRsL_4J8j|jt zZPuPg(RnjypPLpNyBi`~HCt;1;~cwBDAkutojHi3O`v*q9sa0aVln5#6M{T_8#d{0tMw5J|OOHpGpA6YV=xhhCW2Q_!CP_ zIE#zz3BO!{dZ}lq8W^m_-x&X;sw7T{c*?L;kG~j~|MzWB?)$_(L0JnI4>#x3oGESr zk#Z$3Cp(Sri`F2xHoqs;IIf|;3gdC?BAhJJcEWPJi9*#yv1znXoeuxAorx4p@4l|ua!q%bePwYgTN=*8x*`hQ)Gwh(=yPhKxR_bl*-84{mNDYrw>gQX?tv1Qo?@$YoI=5oOLqC{QY|5e#|0*})5j(~zDrbYO?ww? z5ur-5l_sOKVx8~zlS6HiNQFM#3(C~BjPrPASl2X>7_{cA88`_TMwOP>QPd8b+I;#g zxTHrcrzRLVRK}}qqIYA81CJfqpgWV0k6~At2Nvr^1$>YHc-~d~$VK+(k~DYAE-7g) zK>ap)06UL`hB2T1HMa~9RliSPn5_V$CS1XydyL;-B$0^Z-Rc2uowpXfR*c7Jxwf(7 z?tF7%w7^!}fb}eU#_YgEQVzn2Z9Ajy#2KOI(1!5DW_$38qX)02fQ!?AYm4N_v#Y35 z5HN$Om9?3OKo3Ugf@I~N+?f_}8Ge&cVMHRIe5lxnbm)^Ot~$jgHfiXt{2@LqT37a? zhWGTf?Bi#=;hiR%TU%y7U&rG*QInodqCi~kz#q?ITq;83O6SmOogW}_#d(sMsnoOg z7-4f9kGz}BB9&o1pMEX7%KO!nuMV)b=o$?W{wb&GPOvpsyO0s{u(2CbmFt%7{5$vj zwNiExOKyur+pplbl6c(DRTv3QgTdicM&mK zo5&bvt0({5ehB?h%&t|O{T6!a<9X`Pq;Am-V~KHxH8E%5#DvH_^%^l_B>c7?uigP8#Du1bwcb$<*J$_+rmzAKxse!net{(TE z|2g=T>C<`vMf^X-`S7@dh5YII`i!m1Hia1LQueh!%oC><97<4yOAJ<}(R zZcmv(?uN(aay+@%Hqi|1x-_#s)u}b%nU)qch*nyL#HDtoSIGl(1at`YWWpg=LZ1&l z6!af$&>Q`@B9wuw8f>0xvV2 z3`9a|lm;*Vp73>w8eoib2^zv6F2!;)3HNlWDrXe;zPyGpqv5r-I>#scBQq%ay#SP8 zEmu+u(g?GOYXiHF>L;+yW&gQ}ZB@x!VB!RU1cbPmn594F0fe0E|d@otD+-S?3MBUMuo{(AdY)EiFS(-0u-uF&}rf;#B(0?|GqN#54xTtu+j z-_=048vZPUU0YH%10j$<9mbo89?UU#XvF%3M+=j7x(1MBx7R6v@CldjQ~VUAij|1VhwF8p zb+~CMn}Q?WBdY!G7e@9N(r}QsJL4j{3V(D=cq%6LI{Si2VYc_3RcvHI_C=k?1bnEY zP4Dl&Y>{9C>Ld7a#(R+WaFN}v{#*`x`LORNoVI=fKHCO2&AEmzlL(Td{enwG+0wd1 zrl7preJ_88y|*d+t_W@#jF!)U9fLLoSC>ykh5gT~6VC$U7d&6FB*`G%9Ef!i<$Fdr z$jqja(wp683W&B0m(mjg&DRuGXM%I8U@S&BPE=KrG6r#I`7@eWSnVKk-3T|U6^+TR zO_fR7D)6L0J$TY=XoOG;jnhhX9c?8O+>L4`Cw|t8)ouk=jHJ{@)0lLXIp+v@ZO@Tg z)Y1A93qj<=bgMBUgS8yk$Go@9Iw65H(I>gCreeQmQi|2S-@+jSTUCm13$9h23`GdZ z%$MY(A!33xe~+i>_{c64!!-QqzC9gqYQiiGwgTG?OPUzHZ-HM0xqsco*5zPzJQc@# zk*dWqzZ{8Z;^JAAg%f~IsYksjT4DAC%VQMM@=Jt>lcVo=W^q?LZJabI;C)-nLlp+>k%88Ygp_4$yKaw- z-GwZ&L-t?fgz{#+Waw7s2W-Aj5c`<(-7cOWFCm@EkNsSs)Cbh=@M#G0l#qG}X*sjc zhja%`PgA`M3iXw93Poq=D%1d6W*+H?BihMXlFzr(>|mf47}gB>GIPAW6@2Ed<^Axa zBiGIkFao|AlW)tzFpk`YXkE0d#h+tLvFXyq=7-Yw)!B0_eS5Bm2cp4#tz=?9WFoip|~NZ6Hpbjg`^BlqSCfRysA|@>JtT9t;%S z0e^&1@b?8%G=CURK#7%E`g9akzi};XrDanRNjCYWU0(?2_0yJVP-j7%@DD>m%-XLX zBmK_)nrW@2CSucImNSk={eXf6q$zGqY+I&_%m?08P1na_v;)dsr$INzZD7RwmD zGY*GMm&lP48IJnQs6*WN0FJXaKNR@oy8rXa2 zA|9ZcXxLX$r0A!yjUG8_l#Hj7wATDxAMGM9G3ZbO^3?$ECd1SM`Jc}`C6P9-VOS@4 z6nKt842vj*QTKElElx;2D*22&7+hYR{0oi!5qjy0%8lctV8LGUWXDxndTN{o3PJR& zO|xs8Tx05=mBsdx6Za;>>7I8o`9t$DkEshi^gtcooX$og>hk(O@Ec= zWD@B)e86U(S1f7cHAu-?YU-(|w3%SrN~N}W$lIvB-ar#aRW6QS_|Yq{U$JrlTeCGCewdrXnmX|AB z>R7O-TjVD>I#jM8X!FY_ZP7^}v!kKZX%;n8L4epa@1IS^$m1|cp+4uAkN@82nCj1F zS=ZEOZSGzW>9|y$*kssH2qUXwA-}akfkz0;CUC4G&cgb`ikUZL>U$vyctohn7}OhG z(h!9rq)hIYy`p&jP$=K|m!meL;M~|KNM=XL%mXzXzl)d(0xP42>=zwR^z=Dt~ zxlbB}UCPlDl12UYuyaqrmv6@qE9bX~_mg~WYFk-CWWDxu5w-x5zLk>BRr`8|LJ2$Z zXAAtWnCp$Kb~WVgKvu2Hx^Aq`0~crEEJeI;xCMDf5;ndPM@Jf%Zh9*T?2`8BQcH?#G1IcD}cT{~^Ar55Tpc z+Ud8a;L@)+DH3E?;nhWnwwnBN50LrT${;uVs@g~Vtm6_TE zM^e49&-SOCi?0MJKIf3DDzOFazcrqO%T`bQC5i#v*}N@u{e9W2Yc<_!a#LNxyUyGpJm`MyTJc1<$Lp`nij)+rY*N*t z#DRS-S@jo(vp1gBor4s`w=qN}#q}&S`VLz8q=*xO`}qbq(}7SX4v6IAlgpl(&HL!t z)IV$G3oDIU9f%fngwM!~&xZk8XHw&n2nD_t$p@l9F473J2bNX7pE! zRQWltg>lupxKVi(PTQ+f6F*cX8%&6A6G9)}1DrP$7C6Vsh0XoZMm<0JaB=TQeV#W9 zFQZdKsb&d?+uvhjU{)O<^j zwNg$vF-mO1GXT3gVmFa?g;vJ_-T)}7AiZwh@;&S#P(Vl^&4>Um)|D5qr23hcLl?Ey zS0C>j!VuHcVQPp8nFZdHT2FUV>VqX}cj|*#YX9e!B&xGvv+k9a;+CJV4$hf1gKKE( zHOl2NUhLi9W8>J_2KS1en>eACjD|xpkT=Dd^hU^<{nX(^SN4<+fhXAHh13rT)9=8vMsya+|%$DLqC)2DN1LY}bOkyn{LHFX5*XPL1Vo;Yf;OM{Cx-zIK9 zY$@bqZlFSVSOroTZ?Q^BNy@vj-ey$jTzlniK{&>C_GeT-En`z}xI%b%BQ8at?i)D# z#Go47!^fTH`X{dpuikrSPrMuQ9+CAcnF-{SziFs~!4t^5qW3fnK4JG@6seP}@p_hU zDf~P%$+&GC{_0R=%QbZ~(Gn>4w7vay)9K`0@bAr@$>dxw@hlELPIG34(#d&5pD}R#|RR zBpa!v{}infRxN;hkiPWWjkqW$&rao)uPQT}}I-p4`J|9dKczg@Wh%&a z52S;A8MEAs6RHw_L7?&8Q6RH9#nWQS9Io)=a4ton+V`+S&u(2itmo+H^H=x6x{2|< z-xBpEUa)j{&GzL(Cw#jG-kG?KL**P~r>HLz!=TzJ2*V2avmh$|_t!>cIU-ZcY5#Uv z^7ifx#&4L|^Yq`0XQ(e;aqpUK^Lxq;=VfrhxEG4 z27|8#WNsu^FI1>7-;r788+mkpeqtWNZ@B&q;qDZaJxLYsx!xth#r5gl!LWESvIIUUaOW9V0wi!DT}3C6*t-V$n$8izCw?Ye4`T(fO&zPSB2IB~MjebphV_pFHz%gpK~ zPP5Np^ZotM(`T;5`YtMpeT)=H#nmv{Xj&SkB%)$A{+CVux3?+UL8o*sQ}Gpm#`D3v zEH+z}8RW{u;pLXr`_eF|-(dVv;84-z3l{{x1tE{xW}$LgGSPTF+}S#hG?`Urnf^?J zFGp@m2CRNE6+?3^B`wEHjFl35FE+J;BlHng(dxvrz}-gh$-seY3`Q~d#2oVV54N!V z0{BhMh$Z2i`{%`UT}flbKNBAP5}_L>oM5S3iX3~MwtCVXdcWh2J08J^7I`XZ}2R8QIdY$6PX*j9C61UyhN7;=;Y`(=J2t0So%D!Ys zF?mQ;k0#uceDN!90|bQPj&3USHc&_L*WrVQOxx6a#i z=`T+px;Ke7e<=;&CRS??Ij(THx#ji5o6sU!Hdm9x5Tw_SfmN-7fAzk$62Rg+1XnGF z7B^oWBmRr%++2;9q9eUFndSH;xd>UxU%z=xk+m+gv)ico5H~Ujp{W^Ow`)b6ienlIgM-Ibky;r)-! zEBG`!i2m~->;#>#7JR4`{qiKZ6&Trr%}KyplRn@cf$kwBj&vA(u1?D9LL9+;CVCW; zvd$^YLSuNus@M3`DMc@$6nn|n5y&|@OW0(VjspG}xUp5uZKD74%s`HIfFTzq+ z#-R$LSyam4PLb38#c7Ab^j?%1Eg6cv$=998!!b@e2>%K4+PI311w~~!FeiKNZyvrM;}gQminF}@!`#K zUhFvGCRW^?$b~Y?%<)hZ0}IA1M~3X|m@>4Jn6O@kj|U$OUTs;jEWZ#_)5dR`utv#l zUJ;3I81)JOY2QS-o_VtNsX=Xb0Z`E7uC_uOnM=cK!GU>#KSq#0eq`|GH29^=TLPR0 z4D;}b4HBPjHAsw1-@Axx)iU0Het-n4NVa1}Wq-y%{N(tlIw=2v*Vk9=raWLBz&;RA zS}dRLmsa*)my)aC_*GXdI5p%|kLp^M3D5UX^z5ezy(qM((~#No`~>NRR00H zg%x$+wCY=Ll2Oe`pdKVfmVqng-x*;^8n}!nI=+jKS~wIZU#)_ z4y}3z-tD7;i0VS!6u!}R6O<1C_9+4FC-GjM{xYJb&?|+vug(?rUnvK1LZchZ)05Mk z5dRS~ubK~KT%{%9(e{?S4;`_VyHulUJ~1WyAKZYrCm&chxNk^_K%~q9IzW*ATM*SK zS{8cly2K5Z^|w|S1E!rlmR(&2d^yFI`+-(NHmzV81&Fab5EM{bVQKfW7ubMU`lHth z`=~EmkRk4AFW;*kZJ>9g?H4AfEd?HIKy(4wLUzDB9*V~lgk|>yZ1EK<4)=4yl&ta{ z7nIzxhfP3bsxUP#>Pr{;KEHBKav7@{LHRWEJ`TX!vW1Ns>V!}LEUnY?AIiD*qBZo2 z24G8`GG*~8+dWAW{V{-u_S&{$&GoB#pjqFsk1;=@frM1-Fc>H?6wWAIV&#f15vwtL zTFVwdn>09=%r}%L3?+uQxh6;$16+o<6YsF?>{RTom~Kg)Fdb1n=6W$yH%`x(9|Wa= zIxN|aBB_!TcQTaAnDxC<3@)i9J`qomXrM4?_sk2o=g~orYeYZ%m+H9GL9kZ?65LTE z5w*)Ek4x6<|KpE_Lwhd}(xEk{A*X|)s&JeS6+B?)&$XhHw@}FF1+sm(UuV8q=ji*z zxcCc;zoO`tpc5T9@Ot4Drc9eL#MqR3!H8pNKkf^!TKN42`VGnZ7(BVQPtsDl4c;j( z2r_>E-mNqAyqJWTjU!n?u4vX6EWeYC<0JWtHxEd(v7BQ_Y*8i~?q`}eo~@P$$wiHa zS3x#=cr1o!6TMRFVUtb(t5I;@FDDCphLMz-b2)Hr&tOYk*r)Ev@$T}2tDz6m7Yr3n zQtY$$i89q~Gf+g{@x0<#RCP6(aORb}85VZ!qp^eD%k*IEV1$Lu{h~o$rWfrbBz;rF zm3Q8OTI(-Q$3rQOy_hO%@1M8>PVfPV{Cth#KXxqt-`dw5m4gD1k&AY&yO2_w*G)X8 z(+wuD86HQY2i2eOgHHw71)tep{+$@wl`xj}jhg#PWe?qV7Jj5u9Doxw4e5JsqiZE8 zMH(e>E6vV2bq47YRD1g6*ABBr7$wooE;`|KuzZ|dc+6=xPP*-@878$iOL`i)lkc$! zZBj9G*qxW1Rxu<9d$%$zoAx4DrH1l2Ol}jTHlbDp| zXPxq>;ZDnj_SJu-fz^CLYG$>=1vU|eNyDNhN!2fyV!zs{9#j6|J|_Re<*+Iax4_2A zXrS*_*mY#+CL$YLEGi(|tix+N0ZCo^YEOS)^pBT;dA)wwbPMcH zFF>mt1!RmIRx+k31JklM=$>+ZZ|DZ8y*6*!|J0FVG*Q837M;XN;7%@Ht}iP`p2ujU zs(rzt!?F`Nsco9ARC{GD;Fqrjl1ypkszMmj8Dn7=zvte2iTJ=M!R^3lC38403ufj5 z2TO37QV4BtTdt<6PTa-R9M86LXU*Qoy_8;%_`h1Q%>Dmbg|xrq)Mu%0vt0JF2VJZ$ zNvYc#Bo$y2%HJ4WNE@&F;o66geD51ciz3G-8dBbB|L-jjbx2Zv;TzG}1~^UG=J*jU zQqoxtu!!513BX$=-l+xLG60kqj)D}F+s#v9pSC4FT}ESB67IM%-VsOvCu=RkU zbn@r84|wYC1l#1Q=RDvkD>Rb$^DF3sJ^*{eD!?e+KXv;)H3UaW25u{}3V~WgYAumr;!+;yTdW)CEhv8uA?@Xb3e_ zXoEhFWXAw}9-~9TOsKtnz#j3SqFj(rM&K5WsSxO{{DcCnZJ>KkuoDUh%qnmv<_Yf( zl3CS+YRbMvx9jtQtXe?brO&yJBHZO~k(A$K#PJwFjAc?xWhAwrDbjDr)EczXQ$BB} z&C~d~b!@fEW8YeJ4_|{4JXD`~OIg9|q|O z7wEV0FE^&hS;E!REm%i08QaJUB47%)|GZ#P+tk9SaTqd^M$n2E5*GqgPe=S%A%aGq zy>cPl2~vJyfTEs(21#F?oBoF>f)l}BvdlI5GZh}G*I<;M`Jb zF6wW|GKaq^*rkD_fMBN_8X^YGKSO`eOrJ>YD3|S42S-~)M}&L^@Jef=U05Zt6>`5g zfA5d=Fil?rjFu+epiW}lQKR3|yx_e>A6qwvNE$$;lD0(_aAW0f$6uB#FQg9nBfZPN zaojRAW#ZBP+kr(IYnk~V^T(aiq&V`@#)nD^mcuZG3o-tJIySQ}*moW89_jf9ija2W z?smSt7s!cyA&OWJ41U}(IV8*J62iPc6+JiAoF>^Jnc{=wA61NcNXEO)_>V~bRogxq zIN(KU*RXNNqdz`=lO$DVNsM8HJb$R^p&hplAo(75huPIVzPI7pF?VUU09oELo%YWb zwi#>r;%>WORJ%`2gmBKVe0D~Q6<@Bq%KSU@y`?8Hco4?yY(vu`iF${f32ygOaK4? literal 9167 zcma)hg;Sf)_cfLv!HPSf#ogVZ6ewCOXj3G(yA~2CF2zb{u;T6%FHoeoy9Fsyq!fyM zfb#PF{RQtcv$HdMpE-B$?wxyf&ng)!feqZ=d3Zax`+)s~39v9+_N%H+Bt#!Tj+FY$ zv`!<;@pu^j_hFyp;0KV;uMFvC7rkzu&CD#;GvxKGgAr4aJ>) zHt62)@G>0KK-xL~)=5Z|VEpD$W%KEG7+srO*lWMM2jS6$&oeZmmb2QkzvKrFI@ind zT!S1ZsgQO@e9H-deG=El%ch~7D-sL3O>xz*=&^>&&9;Q`9I>-fu}XKz)Y+ih5!CLKhR~CM5S^zy5wAv+jvny z%ehSRd9QTtzfl}xGe;;WN*M0y9fbQ|lMrmJT z>I*-rBOlM;9pm~^Y?m|d>yM^;66*4s) zm6|p8LWoU7VF)VfY%geg^hnc7dSwg^4P=M$zGorg8H zoWYvQ`!9{XV)AOHwhD_y=@~S4-qwg^IA~{{=*qZ|^!uf?LzZ@I`Rms7%6MiRL2X2; zWm6_5psoZ`L_MT>ar}*pMFp=*>-WL?`b@IWW4$k=g$uUgzeM}ooC8%Th<}PXj>B@g zvocK_g)7Im7s&(CbZWm}njIC?e*Z(2Gk?q)JxYbTQwt&JrB-D6v2+QhULzKd*zb|bC$K&S3Nqd^%3>x(j4BnS_gY9<8zwYkVH>K_ z4QTkL(DX=iCzj0QFMTvl(LYA;AB1XZDg0M!{J93;`O+f<$Pme6kh8-h7tPJi|Gl12Y+D( zEk^t8suf<<^$=%@gjRmN^kKc439ea3&1p7$gEC14cF?oG)@>Gv5mZrH#_Q&voJN~q zhz&1C3pPp`?#n1i^Y`BgN5um<^>G{$%;t1i9zJgrXO7bd0FUAc-FTOQo+Q`-%tJwo zRKLbv5VGe$mm9vOpLWedjm@vc`_E21TJXAz&CB>0Bny?D=NvET+ycr4=_37(4+?$g zq9R1!NBX{UvPa3itzKN!M7_0ge1QH^*19#%A+M*?PsJu?`$G+!(hOqe9y9X|yu^rA zI3FsR3&QI!4VxQ;zx=)5@{p z)vO~cs6*3n(RhdSK(*&@?fih{ z!jI3UlA4<26i1XIi;)ZX{Xsf;HNtkyI0UW- zKy_%iJZauqt1~?%^^PIa1kvg@rE}^Zg!V3?5IiP~-|&3A!PFNqv3REmY2fK_FTodm zx0=~9mKtStI6mD1(}(iTExe##q%YDi(zl6Ie-o-=+a6< zQ{_*B`u6@61f>unov16GENB*E^}ze?6|p=qZ~!W{#Hc|BWnIKbRylv`LEGf6wp))1 zA!TiYa<_gS*^#_;Q_sa@4v#n7fO~IO^V^1`ySJtAucYke&{c>e_AT;C9Y`iFw{$$e z9YJXdi&ajn?qMxhFOo zp?&Q%wYG+nH!@!BpZ0n6#JjDHgw5N#!fb!k|@m z0fX^Zb|c%Q1U)zcXgZ0zftddsBrZqCvKou?!3XyB7rkx4Q26Hv}9>{IN?aCaprsm&rnvkoXryaD_qYrps9smNhG z$l343v2fu!KlN5oyMoYOPEi^>QRuS(tj^hVWaHgC`_5dXceTgZSd>hw(Uvw+>b1$y z$=F(ADpD%PM8x|jlwIUH?poMeLDX)K&JxBy?+bGy5T_2D7<2t#%MhD93LBi#doRp`80F&mQl{AYvZ@mu^zOB#jzgzYcC=5c}?#RKMfR8 zy4LoE*Sz$FpTa^q0qIX9UAvF7PMs1$L@|zz2-cY*LhKqtF`VT%4z83NNJ6mwRXDPkP6p*WDVJFtvg>zoz{%OD=h8ePkZAn^$E~VQl$TmdmnOn~ zBAfu8-ZV6yhm~nkLsgXJ{LL{FyD9%t9Uk2$##!BZw z{j?X_BwLLEn<`$I{Xk9oKfA-Ew&7e6Od?+KU{hj0eMh|g+DGbCk$i1Z8wpIJERq)Z zcXV*}Bj_PdyYaR%BpA!$%g8?Jg_-^{;?fBr!(ZdpqTBaBoj~!;#CH$UI)wysBfDh> z{aj;e)rHDuh+6gSTCPx=Z{f~Dp(0j4g+m*0KAX%$lZE`Zhr#L|F3`)C5&c(NOVFlT z@l^`NF)wkYD^p>c3X_VxrXIZ*Tg)82U}6Mutx26+35t$)Fr6N8B2>IhD&I}i4Ny*c zD=k)}9>oeuz-%l->0--a42y$zoplWYStSU>2HyE5f3Nwg6_p5;kOcB0COp30i8_Fb zF3J@hK7Up`u*(BQ>bwRjdtPRj3(on{_yoIt&p$cs%lEIC?!8!#jQ)hGQUQm zcJU}B?RR|R| zDz*QTbWrhS_$w{n4TzLr{VqGC#Fb(TpN5v~OFD?Kn)i;PM@uAsy!O<)SpD;;e8wBV zj00Xc#CO96)?prFD})QO3}Su{2afp&nRm#^p72fz>1nOouyD;>|7i;`uncs^l-9D(And(Zu_!bD{!%e3o(G zE|!VGNf)8>dWwD*A=5+UQD=pja!$?gkHZ0>Sp02=)vZGcKwZZB!4?p}&`H8SrP5lj zyG&)N`f8QJO|EswMv`?CFl=?^8U9y~iyA9GzK_C_uI}v540x3Q#LjKxSwAkW&V-w{ zS`z4wBc%v34!2|~36S`=G-z)_%ktMH&Mw;;Sprdep4mpa%bls;qyn9x-3^arRsU0} z6h{j;g|a|i_D>kN(hE+s>e?lXMA~b$NbX2v(AfJZhkV5QfFT$9z?KeTSWtYf$`#x* zWH_Pww+knh;m=Df8Z=9sW1L7$0P4^48=8Ck_7^2IK!RS)Ga5OkXKeE3bPN`pZPJQOoN= zio7|V5WEf*wqwukfBS5KfDZ`0pjXoW0^LaR z!B&zJ`&utBZE{0VKKQKq<~=WjC@H+LWgnYSQDIdCZrWmx!*uUK>1x*UsSkilFku-O zwaR$ugU-v)=wb-_aO!*E)e3qmyHNX|UPekit*xk`F+?}}*Z$IR{bYM!>-RnN0-2+? z5Bzcxh5$pyMD~uy@qHwWmhk8ICL=TF$%;C$?{84==pe*QR~Kl~5efk%FH#f+b(sbK zPlZ%k37xxivb+1kG`%2`(W*)UP7gVsNv*HJu;r^U?}zTvb@po2j+bs7ei7Q~mSw?}N3_~UH9}WxDud|B_ zpo^>UYYf&ZhTqX(3CzC$8%KG#yQfjg7-rvd-N!Fp{QYkjKJ3y+VrTWus?fiG_uPhl zina(Y53N(4`Ih3rVG(dQqU;*g)^92ES74gJ4H5b^FaBm#Pe{vLRqpjqAIo>mMhct1 zsd;yhGuzmFdP|Ij>jcw6|0%!oO@LQd_N<1Kb@#oBn#b&aB?7^0neNH)>l7XUr5xkt z!Sl*d@r9HH%3xwQ3~Wa|rXRy6JWAuue1BOP^?Cf18-}P(-)sN0Fs!bto#U6)8!J+h zWLA3H`&+$_P6BhtEooDzGAKG-iOwJv*v8mc;jd0xq^&FZ<0Q+dOJxL1G^UDJOER68 zDI$EcX!JB(N1xz9qxX~RMu8m9sw6sMW+hF_F6mZP=+{859C2{JA~mr~{+P5H3D!j~ z)v9f#ZjS(%f-!)sb)c}4!YRCntg3$8SAV41+p(bdPT`YZ1C3deiyx^#^vL1!sH$JZ4;%jJs1^CbOf$+-#jyCnLbC z1`1r4Z|YtI7U#0YgVySCxQF0H1G0`J&-jaOjO^FcsoTX++5n2q?LfT;!qi!`u*257 zbczij#1m!VNOSL(Jlsu2+u}or#3N3i5*iKBnilWo#3-9Bxh8$zDi&T@d;`@5*HmM@ zob*-CKWLjEq>x*Dh5d5!QYAzdlRsQfx-jngU!(@a(pBKI))z1NX%oOOLmlyb+Q==} zV6DnI)y>Gf`_Y*}!j2(cvLcX1DJ}iJPw{GV(UgFx=p3e+EiH`eP`KYdoh^=Y=Viz*|U_j#Cy)&jfyJi<3`pMh&E4V#e*Tlxj4VW^P)_M z(Z+jEAoy0mz7_3p^tvxDLXiui(IpqQmt5C0YlD6)OTYNL+k1BU+%>5n{S!4Oh&w`+ zq-8lw|BrAC39L$h6KnKv=a92z-zl5b$Cn*nj^g+(EwZyqzN7EkCQ<@1^oANe!Z#5n z3K^NaPdlxi-#-3_{PlOvCsx(OA_(T?luYOg)BI>=o|0XlquIa=PxCL|H#=cTa&DfD z#QFxlE0gB?k!z6_@%rXnO?!3k28*_3%Jd7%G*lKU9hs43TsEvVHZo_+3d^rAv4h!C z+Bq~HG)s-~l22?n*rzWuG&d<%7G@g9td{DntGo{Fs8pCdZYCY-(L~w%F?|*w=uoD8 z%{p4=eK7`97NkhPHHgjP*Zq=E6#<}dmG45F`K zRrO*viPf~T^4)4jm5&ToLznZj!9@$OP+xN-sLD=DisowCnJ)>^UmTw_=Nh(YkUT2FhK~JO3!J!<&@9m@f-#rChgE5ZykAt z)M+SJK;QRYCto-Ms*vu1@R>##=O6*mC9Q>4pHJ}*r8Ay&y?bBVxfGMLDf0Jtn%?$V zvLstq5&Z!3G`$LNj=Cz@^|DMG?Xu+flt`3kVHIUig)P%dpPM#X3YKyUA=`YYdW{(Jrx7tyN(!itm- z4R3eU*UA;lVSbOfmQ!$}+yXNucaP2)-T092)>sJsumo5zrM?9WqH&GKl82e!+zW>W zp0H17g^&(zMDUElRC<(_lYDQAnW{>glNKF%tx1i@)4IJ<~;eCX$km zytHnlCX)u!Atq9`cvxa=?G4I>nSKyr#ak0v);T_g91n`CjQ=fa%0pf(>z> zXV2Gvz=-cw#e;eyw5CPdZLc|IBKx2(Oj zm#Kc3pXGK9(|qFv#JcYV-N`k^N@5hz)Q!R7_j)jS-m;}CJ%DH@N+w*y6S5+#$qu9BaC!@&5=*$m5!XE$UeJpED55XaepI;cNwA&~vWIJB8?>7K>I2S# z=p()lg>&Uo7)IQVqedknt(FMz7Q(fmGE1&^L~Sa&Jz^#RzQH9I-peUjK6PQblFo(TAreJoP zM#yJ{(uOF_yFM0wyFTNo-I4X%WGu#-WC!P5YGT{P`Q8L5EMaeao7m}yp$6Y#_cy7$Uzyk` z%2@>?abN~q3Xb+KSj~Inxqq^s{7{J61$iJf3nd*xf(KU+$5*O!;~Ez%({O;{h}@i> z9Nw_snKYDZ`t>B4-?VuYmbWlF#MUVK|^BX2rKf5Yt6YD+qwNKI%sULe3 zi`OL&HPsj3mWC@iX-rYO*dDo>X_Ah+LsEeYCPk3|wrXa%dchxej&>HrKF z9EuRKQ?MXwY}Vm05=o!|XvOWs?8d6F6%`bu)55Jxi$!jrkd`UO0A_d$#UNR)x*mae z1aEk#UwE} z6x0dANR{YWG|V2=$T&;@AoveegFqaNI2!z%#WPzLC%gz-$|d{>3_B7O<}QW^gyi2m zB8zqyDFqyU-y@J841_!BVV+`5BFNE;gw`+$dvQ3zY1`pf_dDH$e zR}^wMg#sQ0Wxm0=2W$szZvcy-`nuMzC*kd#`ja%FI2~Vq)T5X}a*El}`?)IysewZN z2H)a~c6_Wa$j7O|NDe3IeMvh1ISbhrT3()F8?jo}6q#nMEKKiG^oEgR9W*R|#+9M- zvxu6djd1&p)RBwh1fU`QqjW~N#&1u#0mB6_^aQZ^sh&^|>lADCKQfV^iLuHQ2H42gi_Yl&SSl;1Xa1 zs1T9Du(k?j9vfqUm3YI%l^Xz^IAPdA*dCy8**a2LEGs6(v9ay35aMBzp7-EwnBcQ9 z?CXdZXa>C2*ev8|Xn77;14kux&iz^9j-3~yD6lToiTYxNaq1ZBRk99=)2SkaY=z~ZqtR?WlK2!b)FVY2C?ZQP`0VM~W=-pucmG5ddpsV^8pBAqNS6Rm ztD|JV$s=O-JKa06_@E{gS)T+I&LV0Fx9^nz+j82xr`J5L+B;dp85h(8G6C;;O~Q%3 z%MqU|-U?dp|L)Amn5qKkM*LRP<39xm;*DWkN0fTU<^%`I{8qWZzhNxe=v%^r0{s!6 zdZ^LK<}o>UYjTk;Vx0>lEI;4GI=_4r_Kih6B&@8KVwPU0==6pu#~nGoXIClKx5;Z* z{4g2`0%0yu=O))iLfM5C<;Df~W`mxQn$8teil_OY@)i zy1+T#_QdFnhsf)g$iT?$In^yL{L?S8Lmg$xD9@b`OEuqu32_qBXJF?{6l#V~4D|R$ zq%o+-uA~??6YDw;k~g#$^=Ob`)P?!TMR3$nF zk{GHcy5Pin4HI*7YMKw||2%&aTv$EG|Dp?$X!B~JnXY)~Kgr>G`4%9RH5;F3Z3N&y zdh`U9wG%X=`|;F;r5#B1g^f?ul3Ze!%zY!}vF3v4@Mu0@V1)u!mzDL1c&E$Q+$aBC-{VIZ_=lbd8A`Mpy%1hycwM-tphGC2xn*_r)PZ~T|#@C(QeU>)P zAM8SzlCeKzJkFI;cCq+c5hF5Q=fNz?gjaC?vQBc2N2*k2r77FN6l1yI z6`lL&bQ7fdqBYK>Gp^CQc}iY%7C^r9SlP6+Qsq>D8vG6SL^Ld01wdezP-GOmB_z5)m0hJezYo=@)Wy0RSN1tC_; zIu$r16yM+*M+}vPU~nj44AY}QXSf+8ySw^;8*wG>AAI0*bE!Zf)}PR4OUge3L%!lX z6-$N)-+X;!{~?-0w`V6lNIUBb%x`BF#UFm6i}l$DKF)1Tuzu zjgT{x|D4ABKWZk%Ifw}%e!`e}_7|{IcJHU-2DfF05c%odGX5`qdNf6=>TVcM>sK^I zy}=e~xQ}26_jK*<<-k-rf(@-y@pj-SFM5yiOh{7q^7`yQ^qY{vd>$Hz=3nQ4PFm(GC?Iu2Lig=pUS+MUaD;Z^`zda5$vVDaKVS%*PPa)`zyeZYgCLoN<(uf5^7 zkBg54<7}knN#^aSgWvm?Dh0IoK2XegcduLV!nCh^HHRHd383j ziiJM(tyPopcB$2EDy-av(dIcG-Py^iZD#)M70xYihulDEX+z)l;f@}3TW+3+*!gVM zgq9|pt~UQppAPM~$`NjNU+gweyFOX*A6c5K`MlCBkWpW5DdE#$b@>1N_hM7RBMGE> Lc%t, +agglomerates?: Array, +fallbackLayer?: ?string, + // eslint-disable-next-line no-use-before-define + +fallbackLayerInfo?: APIDataLayer, |}; export type APIDataLayer = APIColorLayer | APISegmentationLayer; @@ -608,6 +610,7 @@ export type ServerSkeletonTracing = {| boundingBox?: ServerBoundingBox, trees: Array, treeGroups: ?Array, + organizationName?: string, |}; export type ServerVolumeTracing = {| @@ -622,6 +625,7 @@ export type ServerVolumeTracing = {| // were added to volume tracings. Also see: // https://github.com/scalableminds/webknossos/pull/4755 resolutions?: Array, + organizationName?: string, |}; export type ServerTracing = ServerSkeletonTracing | ServerVolumeTracing; diff --git a/tools/proxy/proxy.js b/tools/proxy/proxy.js index d8e5aff7ef6..122e4915924 100644 --- a/tools/proxy/proxy.js +++ b/tools/proxy/proxy.js @@ -5,7 +5,10 @@ const { spawn } = require("child_process"); const path = require("path"); const prefixLines = require("prefix-stream-lines"); -const proxy = httpProxy.createProxyServer(); +const proxy = httpProxy.createProxyServer({ + proxyTimeout: 5 * 60 * 1000, // 5 min + timeout: 5 * 60 * 1000, // 5 min +}); const app = express(); const ROOT = path.resolve(path.join(__dirname, "..", "..")); diff --git a/util/src/main/scala/com/scalableminds/util/geometry/Point3D.scala b/util/src/main/scala/com/scalableminds/util/geometry/Point3D.scala index f1cafcbe52f..d4c789cf07e 100644 --- a/util/src/main/scala/com/scalableminds/util/geometry/Point3D.scala +++ b/util/src/main/scala/com/scalableminds/util/geometry/Point3D.scala @@ -25,6 +25,9 @@ case class Point3D(x: Int, y: Int, z: Int) { def hasGreaterCoordinateAs(other: Point3D) = x > other.x || y > other.y || z > other.z + def isIsotropic: Boolean = + x == y && y == z + override def toString = "(%d, %d, %d)".format(x, y, z) def toList = List(x, y, z) diff --git a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala index 509db8b37f1..acad36356cf 100644 --- a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala +++ b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala @@ -157,6 +157,9 @@ object Fox extends FoxImplicits { Fox.successful(None) } + def runIfOptionTrue[B](condition: Option[Boolean])(f: => Fox[B])(implicit ec: ExecutionContext): Fox[Option[B]] = + runIf(condition.getOrElse(false))(f) + def fillOption[A](input: Option[A])(f: => Fox[A])(implicit ec: ExecutionContext): Fox[A] = input match { case Some(a) => Fox.successful(a) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala index 82a06af5431..e64b96091c7 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketProvider.scala @@ -38,7 +38,7 @@ class WKWBucketProvider(layer: WKWLayer) Some(readInstruction.dataSource.id), Some(readInstruction.dataLayer.name), readInstruction.baseDir, - resolutionAsTriple = false + resolutionAsTriple = Some(false) ).toFile if (wkwFile.exists()) { @@ -49,7 +49,7 @@ class WKWBucketProvider(layer: WKWLayer) Some(readInstruction.dataSource.id), Some(readInstruction.dataLayer.name), readInstruction.baseDir, - resolutionAsTriple = true + resolutionAsTriple = Some(true) ).toFile if (wkwFileAnisotropic.exists) { WKWFile(wkwFileAnisotropic).map(new WKWCube(_)) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketStreamSink.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketStreamSink.scala index 7cb0548f899..5f1fd832752 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketStreamSink.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWBucketStreamSink.scala @@ -2,11 +2,13 @@ package com.scalableminds.webknossos.datastore.dataformats.wkw import java.io.DataOutputStream +import com.scalableminds.util.geometry.Point3D import com.scalableminds.webknossos.datastore.models.BucketPosition import com.scalableminds.webknossos.datastore.models.datasource.DataLayer import com.scalableminds.util.io.{NamedFunctionStream, NamedStream} import com.scalableminds.webknossos.wrap.{BlockType, WKWFile, WKWHeader} +import scala.collection.mutable import scala.concurrent.Future class WKWBucketStreamSink(val layer: DataLayer) extends WKWDataFormatHelper { @@ -14,14 +16,18 @@ class WKWBucketStreamSink(val layer: DataLayer) extends WKWDataFormatHelper { def apply(bucketStream: Iterator[(BucketPosition, Array[Byte])]): Iterator[NamedStream] = { val (voxelType, numChannels) = WKWDataFormat.elementClassToVoxelType(layer.elementClass) val header = WKWHeader(1, DataLayer.bucketLength, BlockType.LZ4, voxelType, numChannels) + var resolutions = new mutable.HashSet[Point3D]() bucketStream.map { case (bucket, data) => val filePath = wkwFilePath(bucket.toCube(bucket.bucketLength)).toString - NamedFunctionStream(filePath, os => { - Future.successful(WKWFile.write(os, header, Array(data).toIterator)) - }) - } ++ Seq( - NamedFunctionStream(wkwHeaderFilePath(1).toString, - os => Future.successful(header.writeTo(new DataOutputStream(os), true)))) + resolutions += bucket.resolution + NamedFunctionStream( + filePath, + os => Future.successful(WKWFile.write(os, header, Array(data).toIterator)) + ) + } ++ resolutions.toSeq.map { resolution => + NamedFunctionStream(wkwHeaderFilePath(resolution).toString, + os => Future.successful(header.writeTo(new DataOutputStream(os), isHeaderFile = true))) + } } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala index 1aef0a3db10..0f936b3dae4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/wkw/WKWDataFormatHelper.scala @@ -18,20 +18,27 @@ trait WKWDataFormatHelper { dataSourceId: Option[DataSourceId] = None, dataLayerName: Option[String] = None, baseDir: Path = Paths.get(""), - resolutionAsTriple: Boolean = false + resolutionAsTriple: Option[Boolean] = None ): Path = baseDir .resolve(dataSourceId.map(_.team).getOrElse("")) .resolve(dataSourceId.map(_.name).getOrElse("")) .resolve(dataLayerName.getOrElse("")) - .resolve(if (resolutionAsTriple) s"${cube.resolution.x}-${cube.resolution.y}-${cube.resolution.z}" - else cube.resolution.maxDim.toString) + .resolve(formatResolution(cube.resolution, resolutionAsTriple)) .resolve(s"z${cube.z}") .resolve(s"y${cube.y}") .resolve(s"x${cube.x}.${dataFileExtension}") + private def formatResolution(resolution: Point3D, resolutionAsTripleOpt: Option[Boolean] = None): String = + resolutionAsTripleOpt.map { resolutionAsTriple => + if (resolutionAsTriple) s"${resolution.x}-${resolution.y}-${resolution.z}" + else resolution.maxDim.toString + }.getOrElse { + if (resolution.isIsotropic) resolution.maxDim.toString else s"${resolution.x}-${resolution.y}-${resolution.z}" + } + def wkwHeaderFilePath( - resolution: Int, + resolution: Point3D, dataSourceId: Option[DataSourceId] = None, dataLayerName: Option[String] = None, baseDir: Path = Paths.get("") @@ -40,7 +47,7 @@ trait WKWDataFormatHelper { .resolve(dataSourceId.map(_.team).getOrElse("")) .resolve(dataSourceId.map(_.name).getOrElse("")) .resolve(dataLayerName.getOrElse("")) - .resolve(resolution.toString) + .resolve(formatResolution(resolution)) .resolve(s"header.${dataFileExtension}") def parseWKWFilePath(path: String): Option[BucketPosition] = { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index 3398bd6d5f5..1597744014f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -171,9 +171,9 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient) extends FoxIm Full(result) } else { val errorMsg = s"Unsuccessful WS request to $url (ID: $id)." + - s"Status: ${result.status}. Response: ${result.bodyAsBytes.map(_.toChar).mkString.take(100)}" + s"Status: ${result.status}. Response: ${result.bodyAsBytes.map(_.toChar).mkString.take(2000)}" logger.error(errorMsg) - Failure(errorMsg) + Failure(errorMsg.take(400)) } } .recover { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreWkRpcClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreWkRpcClient.scala index 0bd93f2bb65..4155c1c810b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreWkRpcClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreWkRpcClient.scala @@ -41,8 +41,8 @@ class TracingStoreWkRpcClient @Inject()( "userToken" -> userToken)) def getDataSource(organizationNameOpt: Option[String], dataSetName: String): Fox[DataSourceLike] = - rpc( - s"$webKnossosUrl/api/tracingstores/$tracingStoreName/dataSource/${organizationNameOpt.getOrElse("")}/${dataSetName}") + rpc(s"$webKnossosUrl/api/tracingstores/$tracingStoreName/dataSource/${dataSetName}") + .addQueryStringOptional("organizationName", organizationNameOpt) .addQueryString("key" -> tracingStoreKey) .getWithJsonResponse[DataSourceLike] diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index 57f0af267a4..7cf14d96abb 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -225,11 +225,11 @@ trait TracingController[T <: GeneratedMessage with Message[T], Ts <: GeneratedMe newId = tracingService.generateTracingId mergedTracing = tracingService.merge(tracings.flatten) _ <- tracingService.save(mergedTracing, Some(newId), version = 0, toCache = !persist) - _ <- tracingService.mergeVolumeDataWithDownsampling(request.body.flatten, - tracings.flatten, - newId, - mergedTracing, - toCache = !persist) + _ <- tracingService.mergeVolumeData(request.body.flatten, + tracings.flatten, + newId, + mergedTracing, + toCache = !persist) } yield { Ok(Json.toJson(newId)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index ba59fa4adf2..cec7f0acc6b 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -7,12 +7,13 @@ import akka.stream.scaladsl.Source import com.google.inject.Inject import com.scalableminds.util.geometry.{BoundingBox, Point3D} import com.scalableminds.util.tools.ExtendedTypes.ExtendedString +import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.datasource.DataSourceLike import com.scalableminds.webknossos.datastore.models.{WebKnossosDataRequest, WebKnossosIsosurfaceRequest} import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.tracingstore.slacknotification.SlackNotificationService -import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ResolutionRestrictions, VolumeTracingService} import com.scalableminds.webknossos.tracingstore.{TracingStoreAccessTokenService, TracingStoreWkRpcClient} import play.api.i18n.Messages import play.api.libs.Files.TemporaryFile @@ -42,24 +43,25 @@ class VolumeTracingController @Inject()(val tracingService: VolumeTracingService implicit def unpackMultiple(tracings: VolumeTracings): List[Option[VolumeTracing]] = tracings.tracings.toList.map(_.tracing) - def initialData(tracingId: String) = Action.async { implicit request => - log { - logTime(slackNotificationService.reportUnusalRequest) { - accessTokenService.validateAccess(UserAccessRequest.webknossos) { - AllowRemoteOrigin { - for { - initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") - tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - originalResolutions <- tracingService.initializeWithData(tracingId, tracing, initialData).toFox - filledResolutions <- tracingService.downsample(tracingId: String, - tracing: VolumeTracing, - originalResolutions) - _ <- tracingService.updateResolutionList(tracingId, tracing, filledResolutions) - } yield Ok(Json.toJson(tracingId)) + def initialData(tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]) = Action.async { + implicit request => + log { + logTime(slackNotificationService.reportUnusalRequest) { + accessTokenService.validateAccess(UserAccessRequest.webknossos) { + AllowRemoteOrigin { + for { + initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") + tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") + resolutionRestrictions = ResolutionRestrictions(minResolution, maxResolution) + resolutions <- tracingService + .initializeWithData(tracingId, tracing, initialData, resolutionRestrictions) + .toFox + _ <- tracingService.updateResolutionList(tracingId, tracing, resolutions) + } yield Ok(Json.toJson(tracingId)) + } } } } - } } def mergedFromContents(persist: Boolean) = Action.async(validateProto[VolumeTracings]) { implicit request => @@ -84,11 +86,8 @@ class VolumeTracingController @Inject()(val tracingService: VolumeTracingService for { initialData <- request.body.asRaw.map(_.asFile) ?~> Messages("zipFile.notFound") tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - originalResolutions <- tracingService.initializeWithDataMultiple(tracingId, tracing, initialData).toFox - filledResolutions <- tracingService.downsample(tracingId: String, - tracing: VolumeTracing, - originalResolutions) - _ <- tracingService.updateResolutionList(tracingId, tracing, filledResolutions) + resolutions <- tracingService.initializeWithDataMultiple(tracingId, tracing, initialData).toFox + _ <- tracingService.updateResolutionList(tracingId, tracing, resolutions) } yield Ok(Json.toJson(tracingId)) } } @@ -146,7 +145,11 @@ class VolumeTracingController @Inject()(val tracingService: VolumeTracingService private def formatMissingBucketList(indices: List[Int]): String = "[" + indices.mkString(", ") + "]" - def duplicate(tracingId: String, fromTask: Option[Boolean]) = Action.async { implicit request => + def duplicate(tracingId: String, + fromTask: Option[Boolean], + minResolution: Option[Int], + maxResolution: Option[Int], + downsample: Option[Boolean]) = Action.async { implicit request => log { logTime(slackNotificationService.reportUnusalRequest) { accessTokenService.validateAccess(UserAccessRequest.webknossos) { @@ -154,7 +157,13 @@ class VolumeTracingController @Inject()(val tracingService: VolumeTracingService for { tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") dataSetBoundingBox = request.body.asJson.flatMap(_.validateOpt[BoundingBox].asOpt.flatten) - newId <- tracingService.duplicate(tracingId, tracing, fromTask.getOrElse(false), dataSetBoundingBox) + resolutionRestrictions = ResolutionRestrictions(minResolution, maxResolution) + (newId, newTracing) <- tracingService.duplicate(tracingId, + tracing, + fromTask.getOrElse(false), + dataSetBoundingBox, + resolutionRestrictions) + _ <- Fox.runIfOptionTrue(downsample)(tracingService.downsample(newId, newTracing)) } yield { Ok(Json.toJson(newId)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala index 19f5f64aaf6..030ef6be535 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/TracingService.scala @@ -172,10 +172,10 @@ trait TracingService[T <: GeneratedMessage with Message[T]] def merge(tracings: Seq[T]): T - def mergeVolumeDataWithDownsampling(tracingSelectors: Seq[TracingSelector], - tracings: Seq[T], - newId: String, - newTracing: T, - toCache: Boolean): Fox[Unit] + def mergeVolumeData(tracingSelectors: Seq[TracingSelector], + tracings: Seq[T], + newId: String, + newTracing: T, + toCache: Boolean): Fox[Unit] } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala index ebb3620d8e7..3bc7786af95 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/skeleton/SkeletonTracingService.scala @@ -176,11 +176,11 @@ class SkeletonTracingService @Inject()(tracingDataStore: TracingDataStore, ) } - def mergeVolumeDataWithDownsampling(tracingSelectors: Seq[TracingSelector], - tracings: Seq[SkeletonTracing], - newId: String, - newTracing: SkeletonTracing, - toCache: Boolean): Fox[Unit] = Fox.successful(()) + def mergeVolumeData(tracingSelectors: Seq[TracingSelector], + tracings: Seq[SkeletonTracing], + newId: String, + newTracing: SkeletonTracing, + toCache: Boolean): Fox[Unit] = Fox.successful(()) def updateActionLog(tracingId: String) = { def versionedTupleToJson(tuple: (Long, List[SkeletonUpdateAction])): JsObject = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index 83f42034368..31ea6cca0af 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -1,7 +1,7 @@ package com.scalableminds.webknossos.tracingstore.tracings.volume import com.scalableminds.util.geometry.Point3D -import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedIntegerArray} import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceLike, ElementClass} import com.scalableminds.webknossos.tracingstore.TracingStoreWkRpcClient @@ -28,7 +28,7 @@ object VolumeTracingDownsampling { def resolutionsForVolumeTracing(dataSource: DataSourceLike, fallbackLayer: Option[DataLayerLike]): List[Point3D] = { val fallBackLayerMags = fallbackLayer.map(_.resolutions) - fallBackLayerMags.getOrElse(dataSource.dataLayers.flatMap(_.resolutions).distinct) + fallBackLayerMags.getOrElse(dataSource.dataLayers.flatMap(_.resolutions).distinct).sortBy(_.maxDim) } } @@ -36,7 +36,8 @@ trait VolumeTracingDownsampling extends BucketKeys with ProtoGeometryImplicits with VolumeBucketCompression - with KeyValueStoreImplicits { + with KeyValueStoreImplicits + with FoxImplicits { val tracingDataStore: TracingDataStore val tracingStoreWkRpcClient: TracingStoreWkRpcClient @@ -46,56 +47,62 @@ trait VolumeTracingDownsampling version: Long, toCache: Boolean = false): Fox[Unit] - private def fillMapWithInitialBucketsInplace(bucketDataMap: mutable.HashMap[BucketPosition, Array[Byte]], - tracingId: String, - dataLayer: VolumeTracingLayer): Unit = { - val data: List[VersionedKeyValuePair[Array[Byte]]] = - tracingDataStore.volumeData.getMultipleKeys(tracingId, Some(tracingId)) - data.foreach { keyValuePair: VersionedKeyValuePair[Array[Byte]] => - val bucketPosition = parseBucketKey(keyValuePair.key).map(_._2) - bucketPosition.foreach { - bucketDataMap(_) = decompressIfNeeded(keyValuePair.value, - expectedUncompressedBucketSizeFor(dataLayer), - s"bucket $bucketPosition during downsampling") - } - } - } - def downsampleWithLayer(tracingId: String, tracing: VolumeTracing, dataLayer: VolumeTracingLayer)( - implicit ec: ExecutionContext): Fox[Set[Point3D]] = { + implicit ec: ExecutionContext): Fox[List[Point3D]] = { val bucketVolume = 32 * 32 * 32 - val originalMag = Point3D(1, 1, 1) for { - requiredMags <- getRequiredMags(tracing) + _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." + _ <- bool2Fox(tracing.resolutions.nonEmpty) ?~> "Cannot downsample tracing with no resolution list" + sourceMag = getSourceMag(tracing) + magsToCreate <- getMagsToCreate(tracing) elementClass = elementClassFromProto(tracing.elementClass) - bucketDataMap = new mutable.HashMap[BucketPosition, Array[Byte]]() { + bucketDataMapMutable = new mutable.HashMap[BucketPosition, Array[Byte]]() { override def default(key: BucketPosition): Array[Byte] = Array[Byte](0) } - _ = fillMapWithInitialBucketsInplace(bucketDataMap, tracingId, dataLayer) - originalBucketPositions: List[BucketPosition] = bucketDataMap.keys.toList - updatedBuckets = new mutable.HashSet[BucketPosition]() - _ = requiredMags.foldLeft(originalMag) { (previousMag, requiredMag) => + _ = fillMapWithSourceBucketsInplace(bucketDataMapMutable, tracingId, dataLayer, sourceMag) + originalBucketPositions = bucketDataMapMutable.keys.toList + updatedBucketsMutable = new mutable.ListBuffer[BucketPosition]() + _ = magsToCreate.foldLeft(sourceMag) { (previousMag, requiredMag) => downsampleMagFromMag(previousMag, requiredMag, originalBucketPositions, - bucketDataMap, - updatedBuckets, + bucketDataMapMutable, + updatedBucketsMutable, bucketVolume, elementClass, dataLayer) requiredMag } - _ <- Fox.serialCombined(updatedBuckets.toList) { bucketPosition: BucketPosition => - saveBucket(dataLayer, bucketPosition, bucketDataMap(bucketPosition), tracing.version) + _ <- Fox.serialCombined(updatedBucketsMutable.toList) { bucketPosition: BucketPosition => + saveBucket(dataLayer, bucketPosition, bucketDataMapMutable(bucketPosition), tracing.version) } - } yield requiredMags.toSet + originalMag + _ = logger.debug(s"Downsampled mags $magsToCreate from $sourceMag for volume tracing $tracingId.") + } yield sourceMag :: magsToCreate + } + + private def fillMapWithSourceBucketsInplace(bucketDataMap: mutable.HashMap[BucketPosition, Array[Byte]], + tracingId: String, + dataLayer: VolumeTracingLayer, + sourceMag: Point3D): Unit = { + val data: List[VersionedKeyValuePair[Array[Byte]]] = + tracingDataStore.volumeData.getMultipleKeys(tracingId, Some(tracingId)) + data.foreach { keyValuePair: VersionedKeyValuePair[Array[Byte]] => + val bucketPositionOpt = parseBucketKey(keyValuePair.key).map(_._2) + bucketPositionOpt.foreach { bucketPosition => + if (bucketPosition.resolution == sourceMag) { + bucketDataMap(bucketPosition) = decompressIfNeeded(keyValuePair.value, + expectedUncompressedBucketSizeFor(dataLayer), + s"bucket $bucketPosition during downsampling") + } + } + } } private def downsampleMagFromMag(previousMag: Point3D, requiredMag: Point3D, originalBucketPositions: List[BucketPosition], - bucketDataMap: mutable.HashMap[BucketPosition, Array[Byte]], - updatedBuckets: mutable.HashSet[BucketPosition], + bucketDataMapMutable: mutable.HashMap[BucketPosition, Array[Byte]], + updatedBucketsMutable: mutable.ListBuffer[BucketPosition], bucketVolume: Int, elementClass: ElementClass.Value, dataLayer: VolumeTracingLayer): Unit = { @@ -104,7 +111,7 @@ trait VolumeTracingDownsampling downsampledBucketPositions(originalBucketPositions, requiredMag).foreach { downsampledBucketPosition => val sourceBuckets: Seq[BucketPosition] = sourceBucketPositionsFor(downsampledBucketPosition, downScaleFactor, previousMag) - val sourceData: Seq[Array[Byte]] = sourceBuckets.map(bucketDataMap(_)) + val sourceData: Seq[Array[Byte]] = sourceBuckets.map(bucketDataMapMutable(_)) val downsampledData: Array[Byte] = if (sourceData.forall(_.sameElements(Array[Byte](0)))) Array[Byte](0) @@ -115,8 +122,8 @@ trait VolumeTracingDownsampling downsampleData(sourceDataTyped.grouped(bucketVolume).toArray, downScaleFactor, bucketVolume) UnsignedIntegerArray.toByteArray(dataDownscaledTyped, elementClass) } - bucketDataMap(downsampledBucketPosition) = downsampledData - updatedBuckets.add(downsampledBucketPosition) + bucketDataMapMutable(downsampledBucketPosition) = downsampledData + updatedBucketsMutable += downsampledBucketPosition } } @@ -190,20 +197,51 @@ trait VolumeTracingDownsampling private def mode[T](items: Seq[T]): T = items.groupBy(i => i).mapValues(_.size).maxBy(_._2)._1 - private def getRequiredMags(tracing: VolumeTracing): Fox[Seq[Point3D]] = + private def getSourceMag(tracing: VolumeTracing): Point3D = + tracing.resolutions.minBy(_.maxDim) + + private def getMagsToCreate(tracing: VolumeTracing): Fox[List[Point3D]] = + for { + requiredMags <- getRequiredMags(tracing) + sourceMag = getSourceMag(tracing) + magsToCreate = requiredMags.filter(_.maxDim > sourceMag.maxDim) + } yield magsToCreate + + protected def getRequiredMags(tracing: VolumeTracing): Fox[List[Point3D]] = for { dataSource: DataSourceLike <- tracingStoreWkRpcClient.getDataSource(tracing.organizationName, tracing.dataSetName) magsForTracing = VolumeTracingDownsampling.resolutionsForVolumeTracingByLayerName(dataSource, tracing.fallbackLayer) - magsToCreate = magsForTracing.filterNot(_.maxDim == 1).sortBy(_.maxDim) - } yield magsToCreate - - def resolutionsMatch(tracings: Seq[VolumeTracing]): Boolean = - tracings.headOption.forall { firstTracing => - tracings.forall(t => - resolveLegacyResolutionList(t.resolutions).toSet == resolveLegacyResolutionList(firstTracing.resolutions).toSet) - } + } yield magsForTracing.sortBy(_.maxDim) + + protected def restrictMagList(tracing: VolumeTracing, + resolutionRestrictions: ResolutionRestrictions): VolumeTracing = { + val tracingResolutions = + resolveLegacyResolutionList(tracing.resolutions) + val allowedResolutions = resolutionRestrictions.filterAllowed(tracingResolutions.map(point3DFromProto)) + tracing.withResolutions(allowedResolutions.map(point3DToProto)) + } - private def resolveLegacyResolutionList(resolutions: Seq[ProtoPoint3D]) = + protected def resolveLegacyResolutionList(resolutions: Seq[ProtoPoint3D]): Seq[ProtoPoint3D] = if (resolutions.isEmpty) Seq(ProtoPoint3D(1, 1, 1)) else resolutions } + +object ResolutionRestrictions { + def empty: ResolutionRestrictions = ResolutionRestrictions(None, None) +} + +case class ResolutionRestrictions( + min: Option[Int], + max: Option[Int] +) { + def filterAllowed(resolutions: Seq[Point3D]): Seq[Point3D] = + resolutions.filter(isAllowed) + + def isAllowed(resolution: Point3D): Boolean = + min.getOrElse(0) <= resolution.maxDim && max.getOrElse(Int.MaxValue) >= resolution.maxDim + + def isForbidden(resolution: Point3D): Boolean = !isAllowed(resolution) + + def minStr: Option[String] = min.map(_.toString) + def maxStr: Option[String] = max.map(_.toString) +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 3c253640a2e..55a93643602 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -182,19 +182,27 @@ class VolumeTracingService @Inject()( val resolutionSets = new mutable.HashSet[Set[Point3D]]() ZipIO.withUnziped(initialData) { case (_, is) => - val resulutionSet = new mutable.HashSet[Point3D]() + val resolutionSet = new mutable.HashSet[Point3D]() ZipIO.withUnziped(is) { case (fileName, _) => parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => - resulutionSet.add(bucketPosition.resolution) + resolutionSet.add(bucketPosition.resolution) } } - resolutionSets.add(resulutionSet.toSet) + if (resolutionSet.nonEmpty) { + resolutionSets.add(resolutionSet.toSet) + } } - val resolutionsMatch = resolutionSets.headOption.forall { head => + + // if none of the tracings contained any volume data. do not save buckets, use full resolution list + if (resolutionSets.isEmpty) return getRequiredMags(tracing).map(_.toSet) + + val resolutionsDoMatch = resolutionSets.headOption.forall { head => resolutionSets.forall(_ == head) } + if (!resolutionsDoMatch) return Fox.failure("annotation.volume.resolutionsDoNotMatch") + val mergedVolume = new MergedVolume(tracing.elementClass) ZipIO.withUnziped(initialData) { @@ -205,8 +213,8 @@ class VolumeTracingService @Inject()( WKWFile.read(is) { case (header, buckets) => if (header.numBlocksPerCube == 1) { - parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => - if (resolutionsMatch || bucketPosition.resolution == Point3D(1, 1, 1)) { + parseWKWFilePath(fileName.toString).map { _ => + if (buckets.hasNext) { val dataTyped = UnsignedIntegerArray.fromByteArray(buckets.next(), elementClassFromProto(tracing.elementClass)) val nonZeroData = UnsignedIntegerArray.filterNonZero(dataTyped) @@ -228,9 +236,11 @@ class VolumeTracingService @Inject()( case (header, buckets) => if (header.numBlocksPerCube == 1) { parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => - val data = buckets.next() - if (!isAllZero(data) && (resolutionsMatch || bucketPosition.resolution == Point3D(1, 1, 1))) { - mergedVolume.add(sourceVolumeIndex, bucketPosition, data) + if (buckets.hasNext) { + val data = buckets.next() + if (!isAllZero(data)) { + mergedVolume.add(sourceVolumeIndex, bucketPosition, data) + } } } } @@ -253,7 +263,7 @@ class VolumeTracingService @Inject()( def addLabelSet(labelSet: mutable.Set[UnsignedInteger]): Unit = labelSets += labelSet private def prepareLabelMaps(): Unit = - if (labelSets.isEmpty || labelMaps.nonEmpty) { + if (labelSets.isEmpty || (labelSets.length == 1 && initialLargestSegmentId == 0) || labelMaps.nonEmpty) { () } else { var i: UnsignedInteger = UnsignedInteger.zeroFromElementClass(elementClass) @@ -282,7 +292,7 @@ class VolumeTracingService @Inject()( case (valueTyped, index) => if (!valueTyped.isZero) { val byteValueMapped = - if (labelMaps.isEmpty || initialLargestSegmentId > 0 && sourceVolumeIndex == 0) valueTyped + if (labelMaps.isEmpty || (initialLargestSegmentId > 0 && sourceVolumeIndex == 0)) valueTyped else labelMaps(sourceVolumeIndex)(valueTyped) mutableBucketData(index) = byteValueMapped } @@ -322,7 +332,10 @@ class VolumeTracingService @Inject()( } - def initializeWithData(tracingId: String, tracing: VolumeTracing, initialData: File): Box[Set[Point3D]] = { + def initializeWithData(tracingId: String, + tracing: VolumeTracing, + initialData: File, + resolutionRestrictions: ResolutionRestrictions): Box[Set[Point3D]] = { if (tracing.version != 0L) { return Failure("Tracing has already been edited.") } @@ -336,12 +349,14 @@ class VolumeTracingService @Inject()( case (header, buckets) => if (header.numBlocksPerCube == 1) { parseWKWFilePath(fileName.toString).map { bucket => - val data = buckets.next() - if (isAllZero(data)) { - Fox.successful(()) - } else { - savedResolutions.add(bucket.resolution) - saveBucket(dataLayer, bucket, data, tracing.version) + if (buckets.hasNext) { + val data = buckets.next() + if (isAllZero(data) || resolutionRestrictions.isForbidden(bucket.resolution)) { + Fox.successful(()) + } else { + savedResolutions.add(bucket.resolution) + saveBucket(dataLayer, bucket, data, tracing.version) + } } } } @@ -407,12 +422,26 @@ class VolumeTracingService @Inject()( resolutions = VolumeTracingDownsampling.resolutionsForVolumeTracing(dataSource, None).map(point3DToProto) ) - @SuppressWarnings(Array("OptionGet")) //We suppress this warning because we check the option beforehand def duplicate(tracingId: String, - tracing: VolumeTracing, + sourceTracing: VolumeTracing, fromTask: Boolean, - dataSetBoundingBox: Option[BoundingBox]): Fox[String] = { - val newTaskTracing = if (fromTask && dataSetBoundingBox.isDefined) { + dataSetBoundingBox: Option[BoundingBox], + resolutionRestrictions: ResolutionRestrictions): Fox[(String, VolumeTracing)] = { + val tracingWithBB = addBoundingBoxFromTaskIfRequired(sourceTracing, fromTask, dataSetBoundingBox) + val tracingWithResolutionRestrictions = restrictMagList(tracingWithBB, resolutionRestrictions) + val newTracing = tracingWithResolutionRestrictions.withCreatedTimestamp(System.currentTimeMillis()).withVersion(0) + for { + _ <- bool2Fox(newTracing.resolutions.nonEmpty) ?~> "resolutionRestrictions.tooTight" + newId <- save(newTracing, None, newTracing.version) + _ <- duplicateData(tracingId, sourceTracing, newId, newTracing) + } yield (newId, newTracing) + } + + @SuppressWarnings(Array("OptionGet")) //We suppress this warning because we check the option beforehand + private def addBoundingBoxFromTaskIfRequired(tracing: VolumeTracing, + fromTask: Boolean, + dataSetBoundingBox: Option[BoundingBox]): VolumeTracing = + if (fromTask && dataSetBoundingBox.isDefined) { val newId = if (tracing.userBoundingBoxes.isEmpty) 1 else tracing.userBoundingBoxes.map(_.id).max + 1 tracing .addUserBoundingBoxes( @@ -420,13 +449,6 @@ class VolumeTracingService @Inject()( .withBoundingBox(dataSetBoundingBox.get) } else tracing - val newTracing = newTaskTracing.withCreatedTimestamp(System.currentTimeMillis()).withVersion(0) - for { - newId <- save(newTracing, None, newTracing.version) - _ <- duplicateData(tracingId, tracing, newId, newTracing) - } yield newId - } - def duplicateData(sourceId: String, sourceTracing: VolumeTracing, destinationId: String, @@ -438,7 +460,9 @@ class VolumeTracingService @Inject()( destinationDataLayer = volumeTracingLayer(destinationId, destinationTracing) _ <- Fox.combined(buckets.map { case (bucketPosition, bucketData) => - saveBucket(destinationDataLayer, bucketPosition, bucketData, destinationTracing.version) + if (destinationTracing.resolutions.contains(point3DToProto(bucketPosition.resolution))) { + saveBucket(destinationDataLayer, bucketPosition, bucketData, destinationTracing.version) + } else Fox.successful(()) }.toList) } yield () @@ -470,27 +494,22 @@ class VolumeTracingService @Inject()( def updateResolutionList(tracingId: String, tracing: VolumeTracing, - filledResolutions: Set[Point3D], - toCache: Boolean = false): Fox[String] = { - if (tracing.version != 0L) { - return Fox.failure("Tracing has already been edited.") - } - save(tracing.copy(resolutions = filledResolutions.map(point3DToProto).toSeq), - Some(tracingId), - tracing.version, - toCache) - } - - def downsample(tracingId: String, tracing: VolumeTracing, originalResolutions: Set[Point3D]): Fox[Set[Point3D]] = { - if (tracing.version != 0L) { - return Failure("Tracing has already been edited.") - } - if (originalResolutions == Set(Point3D(1, 1, 1))) { - // is legacy tracing, needs downsampling - val volumeLayer = volumeTracingLayer(tracingId, tracing) - downsampleWithLayer(tracingId, tracing, volumeLayer) - } else Fox.successful(originalResolutions) - } + resolutions: Set[Point3D], + toCache: Boolean = false): Fox[String] = + for { + _ <- bool2Fox(tracing.version == 0L) ?~> "Tracing has already been edited." + _ <- bool2Fox(resolutions.nonEmpty) ?~> "Resolution restrictions result in zero resolutions" + id <- save(tracing.copy(resolutions = resolutions.toList.sortBy(_.maxDim).map(point3DToProto)), + Some(tracingId), + tracing.version, + toCache) + } yield id + + def downsample(tracingId: String, tracing: VolumeTracing): Fox[Unit] = + for { + resultingResolutions <- downsampleWithLayer(tracingId, tracing, volumeTracingLayer(tracingId, tracing)) + _ <- updateResolutionList(tracingId, tracing, resultingResolutions.toSet) + } yield () def createIsosurface(tracingId: String, request: WebKnossosIsosurfaceRequest): Fox[(Array[Float], List[Int])] = for { @@ -547,71 +566,96 @@ class VolumeTracingService @Inject()( ) } - def mergeVolumeDataWithDownsampling(tracingSelectors: Seq[TracingSelector], - tracings: Seq[VolumeTracing], - newId: String, - newTracing: VolumeTracing, - toCache: Boolean): Fox[Unit] = { + def mergeVolumeData(tracingSelectors: Seq[TracingSelector], + tracings: Seq[VolumeTracing], + newId: String, + newTracing: VolumeTracing, + toCache: Boolean): Fox[Unit] = { val elementClass = tracings.headOption.map(_.elementClass).getOrElse(ElementClass.uint8) val resolutionSets = new mutable.HashSet[Set[Point3D]]() tracingSelectors.zip(tracings).foreach { case (selector, tracing) => - val resulutionSet = new mutable.HashSet[Point3D]() + val resolutionSet = new mutable.HashSet[Point3D]() val dataLayer = volumeTracingLayer(selector.tracingId, tracing) val bucketStream: Iterator[(BucketPosition, Array[Byte])] = dataLayer.bucketProvider.bucketStream(Some(tracing.version)) bucketStream.foreach { case (bucketPosition, _) => - resulutionSet.add(bucketPosition.resolution) + resolutionSet.add(bucketPosition.resolution) + } + if (resolutionSet.nonEmpty) { // empty tracings should have no impact in this check + resolutionSets.add(resolutionSet.toSet) } - } - val resolutionsMatch = resolutionSets.headOption.forall { head => - resolutionSets.forall(_ == head) } - val mergedVolume = new MergedVolume(elementClass) + if (resolutionSets.isEmpty) { + // None of the tracings contained any volume data. Do not save buckets, do not touch resolution list + Fox.successful(()) + } else { - tracingSelectors.zip(tracings).foreach { - case (selector, tracing) => - val dataLayer = volumeTracingLayer(selector.tracingId, tracing) - val labelSet: mutable.Set[UnsignedInteger] = scala.collection.mutable.Set() - val bucketStream: Iterator[(BucketPosition, Array[Byte])] = - dataLayer.bucketProvider.bucketStream(Some(tracing.version)) - bucketStream.foreach { - case (bucketPosition, data) => - if (resolutionsMatch || bucketPosition.resolution == Point3D(1, 1, 1)) { - val dataTyped = UnsignedIntegerArray.fromByteArray(data, elementClass) - val nonZeroData: Array[UnsignedInteger] = UnsignedIntegerArray.filterNonZero(dataTyped) - labelSet ++= nonZeroData - } + val resolutionsIntersection: Set[Point3D] = resolutionSets.headOption.map { head => + resolutionSets.foldLeft(head) { (acc, element) => + acc.intersect(element) } - mergedVolume.addLabelSet(labelSet) - } + }.getOrElse(Set.empty) + + val mergedVolume = new MergedVolume(elementClass) + + tracingSelectors.zip(tracings).foreach { + case (selector, tracing) => + val dataLayer = volumeTracingLayer(selector.tracingId, tracing) + val labelSet: mutable.Set[UnsignedInteger] = scala.collection.mutable.Set() + val bucketStream: Iterator[(BucketPosition, Array[Byte])] = + dataLayer.bucketProvider.bucketStream(Some(tracing.version)) + bucketStream.foreach { + case (bucketPosition, data) => + if (resolutionsIntersection.contains(bucketPosition.resolution)) { + val dataTyped = UnsignedIntegerArray.fromByteArray(data, elementClass) + val nonZeroData: Array[UnsignedInteger] = UnsignedIntegerArray.filterNonZero(dataTyped) + labelSet ++= nonZeroData + } + } + mergedVolume.addLabelSet(labelSet) + } - tracingSelectors.zip(tracings).zipWithIndex.foreach { - case ((selector, tracing), sourceVolumeIndex) => - val dataLayer = volumeTracingLayer(selector.tracingId, tracing) - val bucketStream: Iterator[(BucketPosition, Array[Byte])] = - dataLayer.bucketProvider.bucketStream(Some(tracing.version)) - bucketStream.foreach { - case (bucketPosition, data) => - if (data.length > 1 && (resolutionsMatch || bucketPosition.resolution == Point3D(1, 1, 1))) { - mergedVolume.add(sourceVolumeIndex, bucketPosition, data) - } - } + tracingSelectors.zip(tracings).zipWithIndex.foreach { + case ((selector, tracing), sourceVolumeIndex) => + val dataLayer = volumeTracingLayer(selector.tracingId, tracing) + val bucketStream: Iterator[(BucketPosition, Array[Byte])] = + dataLayer.bucketProvider.bucketStream(Some(tracing.version)) + bucketStream.foreach { + case (bucketPosition, data) => + if (data.length > 1 && resolutionsIntersection.contains(bucketPosition.resolution)) { + mergedVolume.add(sourceVolumeIndex, bucketPosition, data) + } + } + } + val destinationDataLayer = volumeTracingLayer(newId, newTracing) + for { + _ <- mergedVolume.saveTo(destinationDataLayer, newTracing.version, toCache) + _ <- updateResolutionList(newId, newTracing, mergedVolume.presentResolutions) + } yield () } - val destinationDataLayer = volumeTracingLayer(newId, newTracing) - for { - _ <- mergedVolume.saveTo(destinationDataLayer, newTracing.version, toCache) - filledResolutions <- downsample(newId, newTracing, mergedVolume.presentResolutions) - _ <- updateResolutionList(newId, newTracing, filledResolutions) - } yield () } def importVolumeData(tracingId: String, tracing: VolumeTracing, zipFile: File, currentVersion: Int): Fox[Long] = { if (currentVersion != tracing.version) return Fox.failure("version.mismatch") + val resolutionSet = new mutable.HashSet[Point3D]() + ZipIO.withUnziped(zipFile) { + case (fileName, _) => + parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => + resolutionSet.add(bucketPosition.resolution) + } + } + val resolutionsDoMatch = + resolutionSet.isEmpty || resolutionSet == resolveLegacyResolutionList(tracing.resolutions) + .map(point3DFromProto) + .toSet + + if (!resolutionsDoMatch) return Fox.failure("annotation.volume.resolutionsDoNotMatch") + val volumeLayer = volumeTracingLayer(tracingId, tracing) val mergedVolume = new MergedVolume(tracing.elementClass, tracing.largestSegmentId) @@ -620,7 +664,7 @@ class VolumeTracingService @Inject()( case (_, is) => WKWFile.read(is) { case (header, buckets) => - if (header.numBlocksPerCube == 1) { + if (header.numBlocksPerCube == 1 && buckets.hasNext) { val dataTyped = UnsignedIntegerArray.fromByteArray(buckets.next(), elementClassFromProto(tracing.elementClass)) val nonZeroData = UnsignedIntegerArray.filterNonZero(dataTyped) @@ -643,9 +687,11 @@ class VolumeTracingService @Inject()( case (header, buckets) => if (header.numBlocksPerCube == 1) { parseWKWFilePath(fileName.toString).map { bucketPosition: BucketPosition => - val data = buckets.next() - if (!isAllZero(data)) { - mergedVolume.add(1, bucketPosition, data) + if (buckets.hasNext) { + val data = buckets.next() + if (!isAllZero(data)) { + mergedVolume.add(1, bucketPosition, data) + } } } } diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index b7691c915a0..d93fb2075c9 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -8,14 +8,14 @@ GET /health @com.scalablemin # Volume tracings POST /volume/save @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.save POST /volume/saveMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.saveMultiple -POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(tracingId: String) +POST /volume/:tracingId/initialData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialData(tracingId: String, minResolution: Option[Int], maxResolution: Option[Int]) POST /volume/:tracingId/initialDataMultiple @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.initialDataMultiple(tracingId: String) GET /volume/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.get(tracingId: String, version: Option[Long]) POST /volume/:tracingId/update @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.update(tracingId: String) GET /volume/:tracingId/allData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allData(tracingId: String, version: Option[Long]) GET /volume/:tracingId/allDataBlocking @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.allDataBlocking(tracingId: String, version: Option[Long]) POST /volume/:tracingId/data @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.data(tracingId: String) -POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(tracingId: String, fromTask: Option[Boolean]) +POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], maxResolution: Option[Int], downsample: Option[Boolean]) POST /volume/:tracingId/unlinkFallback @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.unlinkFallback(tracingId: String) GET /volume/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateActionLog(tracingId: String) POST /volume/:tracingId/isosurface @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestIsosurface(tracingId: String) diff --git a/webknossos-tracingstore/proto/SkeletonTracing.proto b/webknossos-tracingstore/proto/SkeletonTracing.proto index 5a59ec20987..36b4687f214 100644 --- a/webknossos-tracingstore/proto/SkeletonTracing.proto +++ b/webknossos-tracingstore/proto/SkeletonTracing.proto @@ -63,7 +63,7 @@ message SkeletonTracing { optional BoundingBox userBoundingBox = 10; repeated TreeGroup treeGroups = 11; repeated NamedBoundingBox userBoundingBoxes = 12; - optional string organizationName = 13; + optional string organizationName = 13; // to identify the dataset (may differ from annotation orga) } message SkeletonTracingOpt { diff --git a/webknossos-tracingstore/proto/VolumeTracing.proto b/webknossos-tracingstore/proto/VolumeTracing.proto index a640b8a9296..632c5a75786 100644 --- a/webknossos-tracingstore/proto/VolumeTracing.proto +++ b/webknossos-tracingstore/proto/VolumeTracing.proto @@ -26,7 +26,7 @@ message VolumeTracing { required double zoomLevel = 11; optional BoundingBox userBoundingBox = 12; repeated NamedBoundingBox userBoundingBoxes = 13; - optional string organizationName = 14; + optional string organizationName = 14; // to identify the dataset (may differ from annotation orga) repeated Point3D resolutions = 15; }