diff --git a/.circleci/config.yml b/.circleci/config.yml
index c37789c1b4f..4a55f667750 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -2,7 +2,9 @@ version: 2
jobs:
build_test_deploy:
machine:
- image: ubuntu-1604:201903-01
+ image: ubuntu-2004:202111-02
+ docker_layer_caching: true
+ resource_class: large
environment:
USER_NAME: circleci
USER_UID: 1001
@@ -253,7 +255,7 @@ jobs:
curl
-X POST
-H "X-Auth-Token: $RELEASE_API_TOKEN"
- https://kube.scm.io/hooks/remove/webknossos/dev/master?user=CI+%28nightly%29
+ https://kubernetix.scm.io/hooks/remove/webknossos/dev/master?user=CI+%28nightly%29
- run:
name: Wait 3min
command: sleep 180
@@ -263,7 +265,7 @@ jobs:
curl
-X POST
-H "X-Auth-Token: $RELEASE_API_TOKEN"
- https://kube.scm.io/hooks/install/webknossos/dev/master?user=CI+%28nightly%29
+ https://kubernetix.scm.io/hooks/install/webknossos/dev/master?user=CI+%28nightly%29
- run:
name: Install dependencies and sleep at least 3min
command: |
@@ -272,7 +274,7 @@ jobs:
wait
- run:
name: Refresh datasets
- command: curl https://master.webknossos.xyz/data/triggers/checkInboxBlocking?token=secretSampleUserToken
+ command: curl -X POST --fail https://master.webknossos.xyz/data/triggers/checkInboxBlocking?token=$WK_AUTH_TOKEN
- run:
name: Run screenshot-tests
command: |
diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md
index 3f24eaa51e4..5022d3e1276 100644
--- a/CHANGELOG.unreleased.md
+++ b/CHANGELOG.unreleased.md
@@ -11,26 +11,33 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
[Commits](https://github.com/scalableminds/webknossos/compare/22.02.0...HEAD)
### Added
+- Viewport scale bars are now dynamically adjusted to display sensible values. [#5418](https://github.com/scalableminds/webknossos/pull/6034)
- Added the option to make a segment's ID active via the right-click context menu in the segments list. [#5935](https://github.com/scalableminds/webknossos/pull/6006)
- Added a button next to the histogram which adapts the contrast and brightness to the currently visible data. [#5961](https://github.com/scalableminds/webknossos/pull/5961)
- Running uploads can now be cancelled. [#5958](https://github.com/scalableminds/webknossos/pull/5958)
+- Added experimental min-cut feature to split a segment in a volume tracing with two seeds. [#5885](https://github.com/scalableminds/webknossos/pull/5885)
+- Annotations with multiple volume layers can now be uploaded. (Note that merging multiple annotations with multiple volume layers each is not supported.) [#6028](https://github.com/scalableminds/webknossos/pull/6028)
+- Decrease volume annotation download latency by using a different compression level. [#6036](https://github.com/scalableminds/webknossos/pull/6036)
### Changed
- Upgraded webpack build tool to v5 and all other webpack related dependencies to their latest version. Enabled persistent caching which speeds up server restarts during development as well as production builds. [#5969](https://github.com/scalableminds/webknossos/pull/5969)
- Improved stability when quickly volume-annotating large structures. [#6000](https://github.com/scalableminds/webknossos/pull/6000)
- The front-end API `labelVoxels` returns a promise now which fulfills as soon as the label operation was carried out. [#5955](https://github.com/scalableminds/webknossos/pull/5955)
+- Changed that webKnossos no longer tries to reach a save state where all updates are sent to the backend to be in sync with the frontend when the save is triggered by a timeout. [#5999](https://github.com/scalableminds/webknossos/pull/5999)
- When changing which layers are visible in an annotation, this setting is persisted in the annotation, so when you share it, viewers will see the same visibility configuration. [#5967](https://github.com/scalableminds/webknossos/pull/5967)
- Downloading public annotations is now also allowed without being authenticated. [#6001](https://github.com/scalableminds/webknossos/pull/6001)
- Downloaded volume annotation layers no longer produce zero-byte zipfiles but rather a valid header-only zip file with no contents. [#6022](https://github.com/scalableminds/webknossos/pull/6022)
- Changed a number of API routes from GET to POST to avoid unwanted side effects. [#6023](https://github.com/scalableminds/webknossos/pull/6023)
- Removed unused datastore route `checkInbox` (use `checkInboxBlocking` instead). [#6023](https://github.com/scalableminds/webknossos/pull/6023)
+- Migrated to Google Analytics 4. [#6031](https://github.com/scalableminds/webknossos/pull/6031)
### Fixed
- Fixed volume-related bugs which could corrupt the volume data in certain scenarios. [#5955](https://github.com/scalableminds/webknossos/pull/5955)
- Fixed the placeholder resolution computation for anisotropic layers with missing base resolutions. [#5983](https://github.com/scalableminds/webknossos/pull/5983)
- Fixed a bug where ad-hoc meshes were computed for a mapping, although it was disabled. [#5982](https://github.com/scalableminds/webknossos/pull/5982)
- Fixed a bug where volume annotation downloads would sometimes contain truncated zips. [#6009](https://github.com/scalableminds/webknossos/pull/6009)
-
+- Fixed a bug where downloaded multi-layer volume annotations would have the wrong data.zip filenames. [#6028](https://github.com/scalableminds/webknossos/pull/6028)
+- Fixed a bug which could cause an error message to appear when saving. [#6052](https://github.com/scalableminds/webknossos/pull/6052)
### Removed
diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md
index 787aa1daaaa..ff33b046042 100644
--- a/MIGRATIONS.unreleased.md
+++ b/MIGRATIONS.unreleased.md
@@ -7,6 +7,7 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md).
## Unreleased
[Commits](https://github.com/scalableminds/webknossos/compare/22.02.0...HEAD)
+- The config field `googleAnalytics.trackingId` needs to be changed to [GA4 measurement id](https://support.google.com/analytics/answer/10089681), if used.
### Postgres Evolutions:
- [081-annotation-viewconfiguration.sql](conf/evolutions/081-annotation-viewconfiguration.sql)
diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala
index fa59758cdb6..4a8e07a3b6f 100755
--- a/app/controllers/AnnotationIOController.scala
+++ b/app/controllers/AnnotationIOController.scala
@@ -1,6 +1,7 @@
package controllers
import java.io.{BufferedOutputStream, File, FileOutputStream}
+import java.util.zip.Deflater
import akka.actor.ActorSystem
import akka.stream.Materializer
@@ -11,7 +12,12 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils}
import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings}
import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings}
import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits
-import com.scalableminds.webknossos.datastore.models.datasource.{AbstractSegmentationLayer, SegmentationLayer}
+import com.scalableminds.webknossos.datastore.models.datasource.{
+ AbstractSegmentationLayer,
+ DataLayerLike,
+ GenericDataSource,
+ SegmentationLayer
+}
import com.scalableminds.webknossos.tracingstore.tracings.TracingType
import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingDefaults
import com.typesafe.scalalogging.LazyLogging
@@ -20,8 +26,8 @@ import javax.inject.Inject
import models.analytics.{AnalyticsService, DownloadAnnotationEvent, UploadAnnotationEvent}
import models.annotation.AnnotationState._
import models.annotation._
-import models.annotation.nml.NmlResults.NmlParseResult
-import models.annotation.nml.{NmlResults, NmlService, NmlWriter}
+import models.annotation.nml.NmlResults.{NmlParseResult, NmlParseSuccess}
+import models.annotation.nml.{NmlResults, NmlWriter}
import models.binary.{DataSet, DataSetDAO, DataSetService}
import models.organization.OrganizationDAO
import models.project.ProjectDAO
@@ -53,7 +59,7 @@ class AnnotationIOController @Inject()(
analyticsService: AnalyticsService,
sil: Silhouette[WkEnv],
provider: AnnotationInformationProvider,
- nmlService: NmlService)(implicit ec: ExecutionContext, val materializer: Materializer)
+ annotationUploadService: AnnotationUploadService)(implicit ec: ExecutionContext, val materializer: Materializer)
extends Controller
with FoxImplicits
with ProtoGeometryImplicits
@@ -64,7 +70,10 @@ class AnnotationIOController @Inject()(
value =
"""Upload NML(s) or ZIP(s) of NML(s) to create a new explorative annotation.
Expects:
- - As file attachment: any number of NML files or ZIP files containing NMLs, optionally with at most one volume data ZIP referenced from an NML in a ZIP
+ - As file attachment:
+ - Any number of NML files or ZIP files containing NMLs, optionally with volume data ZIPs referenced from an NML in a ZIP
+ - If multiple annotations are uploaded, they are merged into one.
+ - This is not supported if any of the annotations has multiple volume layers.
- As form parameter: createGroupForEachFile [String] should be one of "true" or "false"
- If "true": in merged annotation, create tree group wrapping the trees of each file
- If "false": in merged annotation, rename trees with the respective file name as prefix""",
@@ -86,42 +95,35 @@ Expects:
val overwritingDataSetName: Option[String] =
request.body.dataParts.get("datasetName").flatMap(_.headOption)
val attachedFiles = request.body.files.map(f => (f.ref.path.toFile, f.filename))
- val parsedFiles = nmlService.extractFromFiles(attachedFiles, useZipName = true, overwritingDataSetName)
- val tracingsProcessed = nmlService.wrapOrPrefixTrees(parsedFiles.parseResults, shouldCreateGroupForEachFile)
-
- val parseSuccesses: List[NmlParseResult] = tracingsProcessed.filter(_.succeeded)
+ val parsedFiles =
+ annotationUploadService.extractFromFiles(attachedFiles, useZipName = true, overwritingDataSetName)
+ val parsedFilesWraped =
+ annotationUploadService.wrapOrPrefixTrees(parsedFiles.parseResults, shouldCreateGroupForEachFile)
+ val parseResultsFiltered: List[NmlParseResult] = parsedFilesWraped.filter(_.succeeded)
- if (parseSuccesses.isEmpty) {
+ if (parseResultsFiltered.isEmpty) {
returnError(parsedFiles)
} else {
- val (skeletonTracings, volumeTracingsWithDataLocations) = extractTracings(parseSuccesses)
- val name = nameForUploaded(parseSuccesses.map(_.fileName))
- val description = descriptionForNMLs(parseSuccesses.map(_.description))
-
for {
- _ <- bool2Fox(skeletonTracings.nonEmpty || volumeTracingsWithDataLocations.nonEmpty) ?~> "nml.file.noFile"
- dataSet <- findDataSetForUploadedAnnotations(skeletonTracings, volumeTracingsWithDataLocations.map(_._1))
+ parseSuccesses <- Fox.serialCombined(parseResultsFiltered)(r => r.toSuccessBox)
+ name = nameForUploaded(parseResultsFiltered.map(_.fileName))
+ description = descriptionForNMLs(parseResultsFiltered.map(_.description))
+ _ <- assertNonEmpty(parseSuccesses)
+ skeletonTracings = parseSuccesses.flatMap(_.skeletonTracing)
+ // Create a list of volume layers for each uploaded (non-skeleton-only) annotation.
+ // This is what determines the merging strategy for volume layers
+ volumeLayersGroupedRaw = parseSuccesses.map(_.volumeLayers).filter(_.nonEmpty)
+ dataSet <- findDataSetForUploadedAnnotations(skeletonTracings,
+ volumeLayersGroupedRaw.flatten.map(_.tracing))
+ volumeLayersGrouped <- adaptVolumeTracingsToFallbackLayer(volumeLayersGroupedRaw, dataSet)
tracingStoreClient <- tracingStoreService.clientFor(dataSet)
- mergedVolumeTracingIdOpt <- Fox.runOptional(volumeTracingsWithDataLocations.headOption) { _ =>
- for {
- volumeTracingsAdapted <- Fox.serialCombined(volumeTracingsWithDataLocations)(v =>
- adaptPropertiesToFallbackLayer(v._1, dataSet))
- mergedIdOpt <- tracingStoreClient.mergeVolumeTracingsByContents(
- VolumeTracings(volumeTracingsAdapted.map(v => VolumeTracingOpt(Some(v)))),
- volumeTracingsWithDataLocations.map(t => parsedFiles.otherFiles.get(t._2).map(_.path.toFile)),
- persistTracing = true
- )
- } yield mergedIdOpt
- }
- mergedSkeletonTracingIdOpt <- Fox.runOptional(skeletonTracings.headOption) { _ =>
- tracingStoreClient.mergeSkeletonTracingsByContents(
- SkeletonTracings(skeletonTracings.map(t => SkeletonTracingOpt(Some(t)))),
- persistTracing = true)
- }
- annotationLayers <- AnnotationLayer.layersFromIds(mergedSkeletonTracingIdOpt, mergedVolumeTracingIdOpt)
+ mergedVolumeLayers <- mergeAndSaveVolumeLayers(volumeLayersGrouped,
+ tracingStoreClient,
+ parsedFiles.otherFiles)
+ mergedSkeletonLayers <- mergeAndSaveSkeletonLayers(skeletonTracings, tracingStoreClient)
annotation <- annotationService.createFrom(request.identity,
dataSet,
- annotationLayers,
+ mergedSkeletonLayers ::: mergedVolumeLayers,
AnnotationType.Explorational,
name,
description)
@@ -135,6 +137,55 @@ Expects:
}
}
+ private def mergeAndSaveVolumeLayers(volumeLayersGrouped: Seq[List[UploadedVolumeLayer]],
+ client: WKRemoteTracingStoreClient,
+ otherFiles: Map[String, TemporaryFile]): Fox[List[AnnotationLayer]] = {
+ if (volumeLayersGrouped.isEmpty) return Fox.successful(List())
+ if (volumeLayersGrouped.length > 1 && volumeLayersGrouped.exists(_.length > 1))
+ return Fox.failure("Cannot merge multiple annotations that each have multiple volume layers.")
+ if (volumeLayersGrouped.length == 1) { // Just one annotation was uploaded, keep its layers separate
+ Fox.serialCombined(volumeLayersGrouped.toList.flatten) { uploadedVolumeLayer =>
+ for {
+ savedTracingId <- client.saveVolumeTracing(uploadedVolumeLayer.tracing,
+ uploadedVolumeLayer.getDataZipFrom(otherFiles))
+ } yield
+ AnnotationLayer(
+ savedTracingId,
+ AnnotationLayerType.Volume,
+ uploadedVolumeLayer.name
+ )
+ }
+ } else { // Multiple annotations with volume layers (but at most one each) was uploaded merge those volume layers into one
+ val uploadedVolumeLayersFlat = volumeLayersGrouped.toList.flatten
+ for {
+ mergedTracingId <- client.mergeVolumeTracingsByContents(
+ VolumeTracings(uploadedVolumeLayersFlat.map(v => VolumeTracingOpt(Some(v.tracing)))),
+ uploadedVolumeLayersFlat.map(v => v.getDataZipFrom(otherFiles)),
+ persistTracing = true
+ )
+ } yield
+ List(
+ AnnotationLayer(
+ mergedTracingId,
+ AnnotationLayerType.Volume,
+ None
+ ))
+ }
+ }
+
+ private def mergeAndSaveSkeletonLayers(skeletonTracings: List[SkeletonTracing],
+ tracingStoreClient: WKRemoteTracingStoreClient): Fox[List[AnnotationLayer]] = {
+ if (skeletonTracings.isEmpty) return Fox.successful(List())
+ for {
+ mergedTracingId <- tracingStoreClient.mergeSkeletonTracingsByContents(
+ SkeletonTracings(skeletonTracings.map(t => SkeletonTracingOpt(Some(t)))),
+ persistTracing = true)
+ } yield List(AnnotationLayer(mergedTracingId, AnnotationLayerType.Skeleton, None))
+ }
+
+ private def assertNonEmpty(parseSuccesses: List[NmlParseSuccess]) =
+ bool2Fox(parseSuccesses.exists(p => p.skeletonTracing.nonEmpty || p.volumeLayers.nonEmpty)) ?~> "nml.file.noFile"
+
private def findDataSetForUploadedAnnotations(
skeletonTracings: List[SkeletonTracing],
volumeTracings: List[VolumeTracing])(implicit mp: MessagesProvider, ctx: DBAccessContext): Fox[DataSet] =
@@ -173,14 +224,6 @@ Expects:
Future.successful(JsonBadRequest(Messages("nml.file.noFile")))
}
- private def extractTracings(
- parseSuccesses: List[NmlParseResult]): (List[SkeletonTracing], List[(VolumeTracing, String)]) = {
- val tracings = parseSuccesses.flatMap(_.bothTracingOpts)
- val skeletons = tracings.flatMap(_._1)
- val volumes = tracings.flatMap(_._2)
- (skeletons, volumes)
- }
-
private def assertAllOnSameDataSet(skeletons: List[SkeletonTracing], volumes: List[VolumeTracing]): Fox[String] =
for {
dataSetName <- volumes.headOption.map(_.dataSetName).orElse(skeletons.headOption.map(_.dataSetName)).toFox
@@ -197,9 +240,23 @@ Expects:
} yield organizationNames.headOption
}
- private def adaptPropertiesToFallbackLayer(volumeTracing: VolumeTracing, dataSet: DataSet): Fox[VolumeTracing] =
+ private def adaptVolumeTracingsToFallbackLayer(volumeLayersGrouped: List[List[UploadedVolumeLayer]],
+ dataSet: DataSet): Fox[List[List[UploadedVolumeLayer]]] =
for {
dataSource <- dataSetService.dataSourceFor(dataSet).flatMap(_.toUsable)
+ allAdapted <- Fox.serialCombined(volumeLayersGrouped) { volumeLayers =>
+ Fox.serialCombined(volumeLayers) { volumeLayer =>
+ for {
+ tracingAdapted <- adaptPropertiesToFallbackLayer(volumeLayer.tracing, dataSource)
+ } yield volumeLayer.copy(tracing = tracingAdapted)
+ }
+ }
+ } yield allAdapted
+
+ private def adaptPropertiesToFallbackLayer[T <: DataLayerLike](volumeTracing: VolumeTracing,
+ dataSource: GenericDataSource[T]): Fox[VolumeTracing] =
+ for {
+ _ <- Fox.successful(())
fallbackLayer = dataSource.dataLayers.flatMap {
case layer: SegmentationLayer if volumeTracing.fallbackLayer contains layer.name => Some(layer)
case layer: AbstractSegmentationLayer if volumeTracing.fallbackLayer contains layer.name => Some(layer)
@@ -320,7 +377,8 @@ Expects:
_ = fetchedVolumeLayers.zipWithIndex.map {
case (volumeLayer, index) =>
volumeLayer.volumeDataOpt.foreach { volumeData =>
- val dataZipName = volumeLayer.volumeDataZipName(index, fetchedSkeletonLayers.length == 1)
+ val dataZipName = volumeLayer.volumeDataZipName(index, fetchedVolumeLayers.length == 1)
+ zipper.stream.setLevel(Deflater.BEST_SPEED)
zipper.addFileFromBytes(dataZipName, volumeData)
}
}
diff --git a/app/controllers/DataSetController.scala b/app/controllers/DataSetController.scala
index f859a53b3d3..b94f48a4b04 100755
--- a/app/controllers/DataSetController.scala
+++ b/app/controllers/DataSetController.scala
@@ -2,7 +2,7 @@ package controllers
import com.mohiva.play.silhouette.api.Silhouette
import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext}
-import com.scalableminds.util.geometry.Point3D
+import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.mvc.Filter
import com.scalableminds.util.tools.DefaultConverters._
import com.scalableminds.util.tools.{Fox, JsonHelper, Math}
@@ -87,7 +87,7 @@ class DataSetController @Inject()(userService: UserService,
Fox.successful(a)
case _ =>
val defaultCenterOpt = dataSet.adminViewConfiguration.flatMap(c =>
- c.get("position").flatMap(jsValue => JsonHelper.jsResultToOpt(jsValue.validate[Point3D])))
+ c.get("position").flatMap(jsValue => JsonHelper.jsResultToOpt(jsValue.validate[Vec3Int])))
val defaultZoomOpt = dataSet.adminViewConfiguration.flatMap(c =>
c.get("zoom").flatMap(jsValue => JsonHelper.jsResultToOpt(jsValue.validate[Double])))
dataSetService
diff --git a/app/controllers/JobsController.scala b/app/controllers/JobsController.scala
index fc3c5f1ede7..457d6485e2f 100644
--- a/app/controllers/JobsController.scala
+++ b/app/controllers/JobsController.scala
@@ -150,6 +150,33 @@ class JobsController @Inject()(jobDAO: JobDAO,
}
}
+ def runInferNeuronsJob(organizationName: String,
+ dataSetName: String,
+ layerName: String,
+ bbox: String): Action[AnyContent] =
+ sil.SecuredAction.async { implicit request =>
+ log(Some(slackNotificationService.noticeFailedJobRequest)) {
+ for {
+ organization <- organizationDAO.findOneByName(organizationName) ?~> Messages("organization.notFound",
+ organizationName)
+ _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferNeurons.notAllowed.organization" ~> FORBIDDEN
+ dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organization._id) ?~> Messages(
+ "dataSet.notFound",
+ dataSetName) ~> NOT_FOUND
+ command = "infer_neurons"
+ commandArgs = Json.obj(
+ "organization_name" -> organizationName,
+ "dataset_name" -> dataSetName,
+ "layer_name" -> layerName,
+ "webknossos_token" -> RpcTokenHolder.webKnossosToken,
+ "bbox" -> bbox,
+ )
+ job <- jobService.submitJob(command, commandArgs, request.identity, dataSet._dataStore) ?~> "job.couldNotRunNeuronInferral"
+ js <- jobService.publicWrites(job)
+ } yield Ok(js)
+ }
+ }
+
def runGlobalizeFloodfills(
organizationName: String,
dataSetName: String,
diff --git a/app/controllers/TaskController.scala b/app/controllers/TaskController.scala
index 0f6b72b6732..52e57b9aded 100755
--- a/app/controllers/TaskController.scala
+++ b/app/controllers/TaskController.scala
@@ -19,9 +19,8 @@ import io.swagger.annotations.{
ApiResponses
}
import javax.inject.Inject
-import models.annotation._
+import models.annotation.{AnnotationUploadService, _}
import models.annotation.nml.NmlResults.TracingBoxContainer
-import models.annotation.nml.NmlService
import models.project.ProjectDAO
import models.task._
import models.user._
@@ -42,7 +41,7 @@ class TaskController @Inject()(taskCreationService: TaskCreationService,
userService: UserService,
taskDAO: TaskDAO,
taskService: TaskService,
- nmlService: NmlService,
+ nmlService: AnnotationUploadService,
sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers)
extends Controller
with ResultBox
diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala
index dc0041a434b..918a01c56fd 100755
--- a/app/models/annotation/AnnotationService.scala
+++ b/app/models/annotation/AnnotationService.scala
@@ -5,13 +5,18 @@ import java.io.{BufferedOutputStream, File, FileOutputStream}
import akka.actor.ActorSystem
import akka.stream.Materializer
import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext}
-import com.scalableminds.util.geometry.{BoundingBox, Point3D, Scale, Vector3D}
+import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int}
import com.scalableminds.util.io.ZipIO
import com.scalableminds.util.mvc.Formatter
import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits, TextUtils}
import com.scalableminds.webknossos.datastore.SkeletonTracing._
import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings}
-import com.scalableminds.webknossos.datastore.geometry.Color
+import com.scalableminds.webknossos.datastore.geometry.{
+ ColorProto,
+ NamedBoundingBoxProto,
+ Vec3DoubleProto,
+ Vec3IntProto
+}
import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits, SkeletonTracingDefaults}
import com.scalableminds.webknossos.datastore.models.datasource.{
ElementClass,
@@ -53,7 +58,7 @@ case class DownloadAnnotation(skeletonTracingIdOpt: Option[String],
volumeTracingOpt: Option[VolumeTracing],
volumeDataOpt: Option[Array[Byte]],
name: String,
- scaleOpt: Option[Scale],
+ scaleOpt: Option[Vec3Double],
annotation: Annotation,
user: User,
taskOpt: Option[Task],
@@ -62,10 +67,10 @@ case class DownloadAnnotation(skeletonTracingIdOpt: Option[String],
// Used to pass duplicate properties when creating a new tracing to avoid masking them.
// Uses the proto-generated geometry classes, hence the full qualifiers.
case class RedundantTracingProperties(
- editPosition: com.scalableminds.webknossos.datastore.geometry.Point3D,
- editRotation: com.scalableminds.webknossos.datastore.geometry.Vector3D,
+ editPosition: Vec3IntProto,
+ editRotation: Vec3DoubleProto,
zoomLevel: Double,
- userBoundingBoxes: Seq[com.scalableminds.webknossos.datastore.geometry.NamedBoundingBox]
+ userBoundingBoxes: Seq[NamedBoundingBoxProto]
)
class AnnotationService @Inject()(
@@ -121,8 +126,8 @@ class AnnotationService @Inject()(
organizationName: String,
fallbackLayer: Option[SegmentationLayer],
boundingBox: Option[BoundingBox] = None,
- startPosition: Option[Point3D] = None,
- startRotation: Option[Vector3D] = None,
+ startPosition: Option[Vec3Int] = None,
+ startRotation: Option[Vec3Double] = None,
resolutionRestrictions: ResolutionRestrictions
): Fox[VolumeTracing] = {
val resolutions = VolumeTracingDownsampling.resolutionsForVolumeTracing(dataSource, fallbackLayer)
@@ -135,8 +140,8 @@ class AnnotationService @Inject()(
boundingBoxToProto(boundingBox.getOrElse(dataSource.boundingBox)),
System.currentTimeMillis(),
dataSource.id.name,
- point3DToProto(startPosition.getOrElse(dataSource.center)),
- vector3DToProto(startRotation.getOrElse(vector3DFromProto(VolumeTracingDefaults.editRotation))),
+ vec3IntToProto(startPosition.getOrElse(dataSource.center)),
+ vec3DoubleToProto(startRotation.getOrElse(vec3DoubleFromProto(VolumeTracingDefaults.editRotation))),
elementClassToProto(
fallbackLayer.map(layer => layer.elementClass).getOrElse(VolumeTracingDefaults.elementClass)),
fallbackLayer.map(_.name),
@@ -144,7 +149,7 @@ class AnnotationService @Inject()(
0,
VolumeTracingDefaults.zoomLevel,
organizationName = Some(organizationName),
- resolutions = resolutionsRestricted.map(point3DToProto)
+ resolutions = resolutionsRestricted.map(vec3IntToProto)
)
}
@@ -248,7 +253,7 @@ class AnnotationService @Inject()(
s.editRotation,
s.zoomLevel,
s.userBoundingBoxes ++ s.userBoundingBox.map(
- com.scalableminds.webknossos.datastore.geometry.NamedBoundingBox(0, None, None, None, _))
+ com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _))
)
case Right(v) =>
RedundantTracingProperties(
@@ -256,7 +261,7 @@ class AnnotationService @Inject()(
v.editRotation,
v.zoomLevel,
v.userBoundingBoxes ++ v.userBoundingBox.map(
- com.scalableminds.webknossos.datastore.geometry.NamedBoundingBox(0, None, None, None, _))
+ com.scalableminds.webknossos.datastore.geometry.NamedBoundingBoxProto(0, None, None, None, _))
)
}
@@ -430,14 +435,14 @@ class AnnotationService @Inject()(
def createSkeletonTracingBase(dataSetName: String,
boundingBox: Option[BoundingBox],
- startPosition: Point3D,
- startRotation: Vector3D): SkeletonTracing = {
+ startPosition: Vec3Int,
+ startRotation: Vec3Double): SkeletonTracing = {
val initialNode = NodeDefaults.createInstance.withId(1).withPosition(startPosition).withRotation(startRotation)
val initialTree = Tree(
1,
Seq(initialNode),
Seq.empty,
- Some(Color(1, 0, 0, 1)),
+ Some(ColorProto(1, 0, 0, 1)),
Seq(BranchPoint(initialNode.id, System.currentTimeMillis())),
Seq.empty,
"",
@@ -458,8 +463,8 @@ class AnnotationService @Inject()(
def createVolumeTracingBase(dataSetName: String,
organizationId: ObjectId,
boundingBox: Option[BoundingBox],
- startPosition: Point3D,
- startRotation: Vector3D,
+ startPosition: Vec3Int,
+ startRotation: Vec3Double,
volumeShowFallbackLayer: Boolean,
resolutionRestrictions: ResolutionRestrictions)(implicit ctx: DBAccessContext,
m: MessagesProvider): Fox[VolumeTracing] =
@@ -595,7 +600,7 @@ class AnnotationService @Inject()(
private def getTracingsScalesAndNamesFor(annotations: List[Annotation], skipVolumeData: Boolean)(
implicit ctx: DBAccessContext): Fox[List[List[DownloadAnnotation]]] = {
- def getSingleDownloadAnnotation(annotation: Annotation, scaleOpt: Option[Scale]) =
+ def getSingleDownloadAnnotation(annotation: Annotation, scaleOpt: Option[Vec3Double]) =
for {
user <- userService.findOneById(annotation._user, useCache = true) ?~> "user.notFound"
taskOpt <- Fox.runOptional(annotation._task)(taskDAO.findOne) ?~> "task.notFound"
diff --git a/app/models/annotation/nml/NmlService.scala b/app/models/annotation/AnnotationUploadService.scala
similarity index 85%
rename from app/models/annotation/nml/NmlService.scala
rename to app/models/annotation/AnnotationUploadService.scala
index 06d5923cf46..c709b9b4217 100644
--- a/app/models/annotation/nml/NmlService.scala
+++ b/app/models/annotation/AnnotationUploadService.scala
@@ -1,21 +1,28 @@
-package models.annotation.nml
+package models.annotation
import java.io.{File, FileInputStream, InputStream}
import java.nio.file.{Files, StandardCopyOption}
import com.scalableminds.util.io.ZipIO
import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, TreeGroup}
+import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing
import com.typesafe.scalalogging.LazyLogging
import javax.inject.Inject
import models.annotation.nml.NmlResults._
+import models.annotation.nml.{NmlParser, NmlResults}
import net.liftweb.common.{Box, Empty, Failure, Full}
import net.liftweb.util.Helpers.tryo
import play.api.i18n.MessagesProvider
import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator}
-class NmlService @Inject()(temporaryFileCreator: TemporaryFileCreator) extends LazyLogging {
+case class UploadedVolumeLayer(tracing: VolumeTracing, dataZipLocation: String, name: Option[String]) {
+ def getDataZipFrom(otherFiles: Map[String, TemporaryFile]): Option[File] =
+ otherFiles.get(dataZipLocation).map(_.path.toFile)
+}
+
+class AnnotationUploadService @Inject()(temporaryFileCreator: TemporaryFileCreator) extends LazyLogging {
- def extractFromNml(file: File, name: String, overwritingDataSetName: Option[String], isTaskUpload: Boolean)(
+ private def extractFromNml(file: File, name: String, overwritingDataSetName: Option[String], isTaskUpload: Boolean)(
implicit m: MessagesProvider): NmlParseResult =
extractFromNml(new FileInputStream(file), name, overwritingDataSetName, isTaskUpload)
@@ -31,8 +38,8 @@ class NmlService @Inject()(temporaryFileCreator: TemporaryFileCreator) extends L
isTaskUpload: Boolean,
basePath: Option[String] = None)(implicit m: MessagesProvider): NmlParseResult =
NmlParser.parse(name, inputStream, overwritingDataSetName, isTaskUpload, basePath) match {
- case Full((skeletonTracing, volumeTracingWithDataLocation, description)) =>
- NmlParseSuccess(name, skeletonTracing, volumeTracingWithDataLocation, description)
+ case Full((skeletonTracing, uploadedVolumeLayers, description)) =>
+ NmlParseSuccess(name, skeletonTracing, uploadedVolumeLayers, description)
case Failure(msg, _, chain) => NmlParseFailure(name, msg + chain.map(_ => formatChain(chain)).getOrElse(""))
case Empty => NmlParseEmpty(name)
}
@@ -75,8 +82,8 @@ class NmlService @Inject()(temporaryFileCreator: TemporaryFileCreator) extends L
if (parseResults.length > 1) {
parseResults.map {
- case NmlParseSuccess(name, Some(skeletonTracing), volumeTracingOpt, description) =>
- NmlParseSuccess(name, Some(renameTrees(name, skeletonTracing)), volumeTracingOpt, description)
+ case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, description) =>
+ NmlParseSuccess(name, Some(renameTrees(name, skeletonTracing)), uploadedVolumeLayers, description)
case r => r
}
} else {
@@ -97,8 +104,8 @@ class NmlService @Inject()(temporaryFileCreator: TemporaryFileCreator) extends L
}
parseResults.map {
- case NmlParseSuccess(name, Some(skeletonTracing), volumeTracingOpt, description) =>
- NmlParseSuccess(name, Some(wrapTreesInGroup(name, skeletonTracing)), volumeTracingOpt, description)
+ case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, description) =>
+ NmlParseSuccess(name, Some(wrapTreesInGroup(name, skeletonTracing)), uploadedVolumeLayers, description)
case r => r
}
}
diff --git a/app/models/annotation/nml/NmlParser.scala b/app/models/annotation/nml/NmlParser.scala
index c9fd209ca86..e4d24a2a721 100755
--- a/app/models/annotation/nml/NmlParser.scala
+++ b/app/models/annotation/nml/NmlParser.scala
@@ -5,14 +5,14 @@ import java.io.InputStream
import com.scalableminds.webknossos.datastore.models.datasource.ElementClass
import com.scalableminds.webknossos.datastore.SkeletonTracing._
import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing
-import com.scalableminds.webknossos.datastore.geometry.{Color, NamedBoundingBox}
+import com.scalableminds.webknossos.datastore.geometry.{ColorProto, NamedBoundingBoxProto}
import com.scalableminds.webknossos.tracingstore.tracings.ColorGenerator
import com.scalableminds.webknossos.tracingstore.tracings.skeleton.{MultiComponentTreeSplitter, TreeValidator}
-import com.scalableminds.webknossos.tracingstore.tracings.volume.Volume
-import com.scalableminds.util.geometry.{BoundingBox, Point3D, Vector3D}
+import com.scalableminds.util.geometry.{BoundingBox, Vec3Int, Vec3Double}
import com.scalableminds.util.tools.ExtendedTypes.{ExtendedDouble, ExtendedString}
import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits, SkeletonTracingDefaults}
import com.typesafe.scalalogging.LazyLogging
+import models.annotation.UploadedVolumeLayer
import net.liftweb.common.Box._
import net.liftweb.common.{Box, Empty, Failure}
import play.api.i18n.{Messages, MessagesProvider}
@@ -30,13 +30,12 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
private val DEFAULT_INTERPOLATION = false
private val DEFAULT_TIMESTAMP = 0L
- @SuppressWarnings(Array("TraversableHead")) //We check if volumes are empty before accessing the head
def parse(name: String,
nmlInputStream: InputStream,
overwritingDataSetName: Option[String],
isTaskUpload: Boolean,
basePath: Option[String] = None)(
- implicit m: MessagesProvider): Box[(Option[SkeletonTracing], Option[(VolumeTracing, String)], String)] =
+ implicit m: MessagesProvider): Box[(Option[SkeletonTracing], List[UploadedVolumeLayer], String)] =
try {
val data = XML.load(nmlInputStream)
for {
@@ -71,30 +70,31 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
logger.debug(s"Parsed NML file. Trees: ${treesSplit.size}, Volumes: ${volumes.size}")
- val volumeTracingWithDataLocation =
- if (volumes.isEmpty) None
- else
- Some(
- (VolumeTracing(
- None,
- boundingBoxToProto(taskBoundingBox.getOrElse(BoundingBox.empty)),
- timestamp,
- dataSetName,
- editPosition,
- editRotation,
- ElementClass.uint32,
- volumes.head.fallbackLayer,
- 0,
- 0,
- zoomLevel,
- None,
- userBoundingBoxes,
- organizationName
- ),
- basePath.getOrElse("") + volumes.head.location)
+ val volumeLayers: List[UploadedVolumeLayer] =
+ volumes.toList.map { v =>
+ UploadedVolumeLayer(
+ VolumeTracing(
+ None,
+ boundingBoxToProto(taskBoundingBox.getOrElse(BoundingBox.empty)),
+ timestamp,
+ dataSetName,
+ editPosition,
+ editRotation,
+ ElementClass.uint32,
+ v.fallbackLayerName,
+ 0,
+ 0,
+ zoomLevel,
+ None,
+ userBoundingBoxes,
+ organizationName
+ ),
+ basePath.getOrElse("") + v.dataZipPath,
+ v.name
)
+ }
- val skeletonTracing =
+ val skeletonTracingOpt: Option[SkeletonTracing] =
if (treesSplit.isEmpty) None
else
Some(
@@ -115,7 +115,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
)
)
- (skeletonTracing, volumeTracingWithDataLocation, description)
+ (skeletonTracingOpt, volumeLayers, description)
}
} catch {
case e: org.xml.sax.SAXParseException if e.getMessage.startsWith("Premature end of file") =>
@@ -146,8 +146,14 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
} yield TreeGroup(name, id, children)
}
- private def extractVolumes(volumeNodes: NodeSeq): immutable.Seq[Volume] =
- volumeNodes.map(node => Volume(getSingleAttribute(node, "location"), getSingleAttributeOpt(node, "fallbackLayer")))
+ private def extractVolumes(volumeNodes: NodeSeq): immutable.Seq[NmlVolumeTag] =
+ volumeNodes.map(
+ node =>
+ NmlVolumeTag(
+ getSingleAttribute(node, "location"),
+ getSingleAttributeOpt(node, "fallbackLayer"),
+ getSingleAttributeOpt(node, "name")
+ ))
private def parseTrees(treeNodes: NodeSeq,
branchPoints: Map[Int, List[BranchPoint]],
@@ -158,9 +164,9 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
.toSingleBox(Messages("nml.element.invalid", "trees"))
@SuppressWarnings(Array("TraversableHead")) // We check that size == 1 before accessing head
- private def parseBoundingBoxes(boundingBoxNodes: NodeSeq)(implicit m: MessagesProvider): Seq[NamedBoundingBox] =
+ private def parseBoundingBoxes(boundingBoxNodes: NodeSeq)(implicit m: MessagesProvider): Seq[NamedBoundingBoxProto] =
if (boundingBoxNodes.size == 1 && getSingleAttribute(boundingBoxNodes.head, "id").isEmpty) {
- Seq.empty ++ parseBoundingBox(boundingBoxNodes.head).map(NamedBoundingBox(0, None, None, None, _))
+ Seq.empty ++ parseBoundingBox(boundingBoxNodes.head).map(NamedBoundingBoxProto(0, None, None, None, _))
} else {
boundingBoxNodes.flatMap(node => {
val idText = getSingleAttribute(node, "id")
@@ -171,20 +177,20 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
color = parseColor(node)
boundingBox <- parseBoundingBox(node)
nameOpt = if (name.isEmpty) None else Some(name)
- } yield NamedBoundingBox(id, nameOpt, isVisible, color, boundingBox)
+ } yield NamedBoundingBoxProto(id, nameOpt, isVisible, color, boundingBox)
})
}
private def parseTaskBoundingBox(
nodes: NodeSeq,
isTask: Boolean,
- userBoundingBoxes: Seq[NamedBoundingBox]): Option[Either[BoundingBox, NamedBoundingBox]] =
+ userBoundingBoxes: Seq[NamedBoundingBoxProto]): Option[Either[BoundingBox, NamedBoundingBoxProto]] =
nodes.headOption.flatMap(node => parseBoundingBox(node)).map { bb =>
if (isTask) {
Left(bb)
} else {
val newId = if (userBoundingBoxes.isEmpty) 0 else userBoundingBoxes.map(_.id).max + 1
- Right(NamedBoundingBox(newId, Some("task bounding box"), None, Some(getRandomColor), bb))
+ Right(NamedBoundingBoxProto(newId, Some("task bounding box"), None, Some(getRandomColor), bb))
}
}
@@ -196,7 +202,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
width <- getSingleAttribute(node, "width").toIntOpt
height <- getSingleAttribute(node, "height").toIntOpt
depth <- getSingleAttribute(node, "depth").toIntOpt
- } yield BoundingBox(Point3D(topLeftX, topLeftY, topLeftZ), width, height, depth)
+ } yield BoundingBox(Vec3Int(topLeftX, topLeftY, topLeftZ), width, height, depth)
private def parseDataSetName(nodes: NodeSeq): String =
nodes.headOption.map(node => getSingleAttribute(node, "name")).getOrElse("")
@@ -213,10 +219,10 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
private def parseTime(nodes: NodeSeq): Long =
nodes.headOption.flatMap(node => getSingleAttribute(node, "ms").toLongOpt).getOrElse(DEFAULT_TIME)
- private def parseEditPosition(nodes: NodeSeq): Option[Point3D] =
- nodes.headOption.flatMap(parsePoint3D)
+ private def parseEditPosition(nodes: NodeSeq): Option[Vec3Int] =
+ nodes.headOption.flatMap(parseVec3Int)
- private def parseEditRotation(nodes: NodeSeq): Option[Vector3D] =
+ private def parseEditRotation(nodes: NodeSeq): Option[Vec3Double] =
nodes.headOption.flatMap(parseRotationForParams)
private def parseZoomLevel(nodes: NodeSeq) =
@@ -233,7 +239,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
} ?~ Messages("nml.node.id.invalid", "branchpoint", getSingleAttribute(branchPoint, "id"))
}.toList.toSingleBox(Messages("nml.element.invalid", "branchpoints"))
- private def parsePoint3D(node: XMLNode) = {
+ private def parseVec3Int(node: XMLNode) = {
val xText = getSingleAttribute(node, "x")
val yText = getSingleAttribute(node, "y")
val zText = getSingleAttribute(node, "z")
@@ -241,7 +247,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
x <- xText.toIntOpt.orElse(xText.toFloatOpt.map(math.round))
y <- yText.toIntOpt.orElse(yText.toFloatOpt.map(math.round))
z <- zText.toIntOpt.orElse(zText.toFloatOpt.map(math.round))
- } yield Point3D(x, y, z)
+ } yield Vec3Int(x, y, z)
}
private def parseRotationForParams(node: XMLNode) =
@@ -249,14 +255,14 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
rotX <- getSingleAttribute(node, "xRot").toDoubleOpt
rotY <- getSingleAttribute(node, "yRot").toDoubleOpt
rotZ <- getSingleAttribute(node, "zRot").toDoubleOpt
- } yield Vector3D(rotX, rotY, rotZ)
+ } yield Vec3Double(rotX, rotY, rotZ)
private def parseRotationForNode(node: XMLNode) =
for {
rotX <- getSingleAttribute(node, "rotX").toDoubleOpt
rotY <- getSingleAttribute(node, "rotY").toDoubleOpt
rotZ <- getSingleAttribute(node, "rotZ").toDoubleOpt
- } yield Vector3D(rotX, rotY, rotZ)
+ } yield Vec3Double(rotX, rotY, rotZ)
private def parseColorOpt(node: XMLNode) =
for {
@@ -265,7 +271,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
colorGreen <- getSingleAttribute(node, "color.b").toFloatOpt
colorAlpha <- getSingleAttribute(node, "color.a").toFloatOpt
} yield {
- Color(colorRed, colorBlue, colorGreen, colorAlpha)
+ ColorProto(colorRed, colorBlue, colorGreen, colorAlpha)
}
private def parseColor(node: XMLNode) =
@@ -277,7 +283,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
private def parseGroupId(node: XMLNode) =
getSingleAttribute(node, "groupId").toIntOpt
- private def parseVisibility(node: XMLNode, color: Option[Color]): Option[Boolean] =
+ private def parseVisibility(node: XMLNode, color: Option[ColorProto]): Option[Boolean] =
getSingleAttribute(node, "isVisible").toBooleanOpt match {
case Some(isVisible) => Some(isVisible)
case None => color.map(c => !c.a.isNearZero)
@@ -382,7 +388,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener
for {
id <- nodeIdText.toIntOpt ?~ Messages("nml.node.id.invalid", "", nodeIdText)
radius = getSingleAttribute(node, "radius").toFloatOpt.getOrElse(NodeDefaults.radius)
- position <- parsePoint3D(node) ?~ Messages("nml.node.attribute.invalid", "position", id)
+ position <- parseVec3Int(node) ?~ Messages("nml.node.attribute.invalid", "position", id)
} yield {
val viewport = parseViewport(node)
val resolution = parseResolution(node)
diff --git a/app/models/annotation/nml/NmlResults.scala b/app/models/annotation/nml/NmlResults.scala
index 54d8ac59b29..a9f0d454770 100644
--- a/app/models/annotation/nml/NmlResults.scala
+++ b/app/models/annotation/nml/NmlResults.scala
@@ -5,6 +5,7 @@ import java.io.File
import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing
import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing
import com.typesafe.scalalogging.LazyLogging
+import models.annotation.UploadedVolumeLayer
import net.liftweb.common.{Box, Empty, Failure, Full}
import play.api.libs.Files.TemporaryFile
@@ -13,8 +14,6 @@ object NmlResults extends LazyLogging {
sealed trait NmlParseResult {
def fileName: String
- def bothTracingOpts: Option[(Option[SkeletonTracing], Option[(VolumeTracing, String)])] = None
-
def description: Option[String] = None
def succeeded: Boolean
@@ -33,14 +32,11 @@ object NmlResults extends LazyLogging {
case class NmlParseSuccess(fileName: String,
skeletonTracing: Option[SkeletonTracing],
- volumeTracingWithDataLocation: Option[(VolumeTracing, String)],
+ volumeLayers: List[UploadedVolumeLayer],
_description: String)
extends NmlParseResult {
def succeeded = true
- override def bothTracingOpts: Option[(Option[SkeletonTracing], Option[(VolumeTracing, String)])] =
- Some((skeletonTracing, volumeTracingWithDataLocation))
-
override def description: Option[String] = Some(_description)
override def withName(name: String): NmlParseResult = this.copy(fileName = name)
@@ -69,6 +65,7 @@ object NmlResults extends LazyLogging {
case _ => false
}
+ // Used in task creation. Can only be used with single-layer volumes
def toBoxes: List[TracingBoxContainer] =
parseResults.map { parseResult =>
val successBox = parseResult.toSuccessBox
@@ -82,11 +79,14 @@ object NmlResults extends LazyLogging {
case _ => Failure("")
}
val volumeBox = successBox match {
- case Full(success) =>
- success.volumeTracingWithDataLocation match {
- case Some((tracing, name)) => Full((tracing, otherFiles.get(name).map(_.path.toFile)))
- case None => Empty
+ case Full(success) if success.volumeLayers.length <= 1 =>
+ success.volumeLayers.headOption match {
+ case Some(UploadedVolumeLayer(tracing, dataZipLocation, _)) =>
+ Full((tracing, otherFiles.get(dataZipLocation).map(_.path.toFile)))
+ case None => Empty
}
+ case Full(success) if success.volumeLayers.length > 1 =>
+ Failure("Cannot create tasks from multi-layer volume annotations.")
case f: Failure => f
case _ => Failure("")
}
diff --git a/app/models/annotation/nml/NmlVolumeTag.scala b/app/models/annotation/nml/NmlVolumeTag.scala
new file mode 100644
index 00000000000..cda53db23ef
--- /dev/null
+++ b/app/models/annotation/nml/NmlVolumeTag.scala
@@ -0,0 +1,3 @@
+package models.annotation.nml
+
+case class NmlVolumeTag(dataZipPath: String, fallbackLayerName: Option[String], name: Option[String]) {}
diff --git a/app/models/annotation/nml/NmlWriter.scala b/app/models/annotation/nml/NmlWriter.scala
index e5ed036911c..42343641b04 100644
--- a/app/models/annotation/nml/NmlWriter.scala
+++ b/app/models/annotation/nml/NmlWriter.scala
@@ -1,6 +1,6 @@
package models.annotation.nml
-import com.scalableminds.util.geometry.Scale
+import com.scalableminds.util.geometry.Vec3Double
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.util.xml.Xml
import com.scalableminds.webknossos.datastore.SkeletonTracing._
@@ -20,14 +20,14 @@ case class NmlParameters(
dataSetName: String,
organizationName: String,
description: Option[String],
- scale: Option[Scale],
+ scale: Option[Vec3Double],
createdTimestamp: Long,
- editPosition: Point3D,
- editRotation: Vector3D,
+ editPosition: Vec3IntProto,
+ editRotation: Vec3DoubleProto,
zoomLevel: Double,
activeNodeId: Option[Int],
- userBoundingBoxes: Seq[NamedBoundingBox],
- taskBoundingBox: Option[BoundingBox]
+ userBoundingBoxes: Seq[NamedBoundingBoxProto],
+ taskBoundingBox: Option[BoundingBoxProto]
)
class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits {
@@ -35,7 +35,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits {
def toNmlStream(annotationLayers: List[FetchedAnnotationLayer],
annotation: Option[Annotation],
- scale: Option[Scale],
+ scale: Option[Vec3Double],
volumeFilename: Option[String],
organizationName: String,
annotationOwner: Option[User],
@@ -57,7 +57,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits {
def toNml(annotationLayers: List[FetchedAnnotationLayer],
annotation: Option[Annotation],
- scale: Option[Scale],
+ scale: Option[Vec3Double],
volumeFilename: Option[String],
organizationName: String,
annotationOwner: Option[User],
@@ -93,7 +93,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits {
volumeLayers: List[FetchedAnnotationLayer],
annotation: Option[Annotation],
organizationName: String,
- scale: Option[Scale]): Fox[NmlParameters] =
+ scale: Option[Vec3Double]): Fox[NmlParameters] =
for {
parameterSourceAnnotationLayer <- selectLayerWithPrecedence(skeletonLayers, volumeLayers)
nmlParameters = parameterSourceAnnotationLayer.tracing match {
@@ -108,7 +108,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits {
s.editRotation,
s.zoomLevel,
s.activeNodeId,
- s.userBoundingBoxes ++ s.userBoundingBox.map(NamedBoundingBox(0, None, None, None, _)),
+ s.userBoundingBoxes ++ s.userBoundingBox.map(NamedBoundingBoxProto(0, None, None, None, _)),
s.boundingBox
)
case Right(v) =>
@@ -122,7 +122,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits {
v.editRotation,
v.zoomLevel,
None,
- v.userBoundingBoxes ++ v.userBoundingBox.map(NamedBoundingBox(0, None, None, None, _)),
+ v.userBoundingBoxes ++ v.userBoundingBox.map(NamedBoundingBoxProto(0, None, None, None, _)),
if (annotation.exists(_._task.isDefined)) Some(v.boundingBox) else None
)
}
@@ -310,7 +310,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits {
}
}
- def writeBoundingBox(b: BoundingBox)(implicit writer: XMLStreamWriter): Unit = {
+ def writeBoundingBox(b: BoundingBoxProto)(implicit writer: XMLStreamWriter): Unit = {
writer.writeAttribute("topLeftX", b.topLeft.x.toString)
writer.writeAttribute("topLeftY", b.topLeft.y.toString)
writer.writeAttribute("topLeftZ", b.topLeft.z.toString)
@@ -319,7 +319,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits {
writer.writeAttribute("depth", b.depth.toString)
}
- def writeColor(color: Option[Color])(implicit writer: XMLStreamWriter): Unit = {
+ def writeColor(color: Option[ColorProto])(implicit writer: XMLStreamWriter): Unit = {
writer.writeAttribute("color.r", color.map(_.r.toString).getOrElse(""))
writer.writeAttribute("color.g", color.map(_.g.toString).getOrElse(""))
writer.writeAttribute("color.b", color.map(_.b.toString).getOrElse(""))
diff --git a/app/models/binary/DataSet.scala b/app/models/binary/DataSet.scala
index 78b5802e6e9..3bc284a5139 100755
--- a/app/models/binary/DataSet.scala
+++ b/app/models/binary/DataSet.scala
@@ -1,7 +1,7 @@
package models.binary
import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext}
-import com.scalableminds.util.geometry.{BoundingBox, Point3D, Scale}
+import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int}
import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper}
import com.scalableminds.webknossos.datastore.models.datasource.DataSetViewConfiguration.DataSetViewConfiguration
import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration
@@ -40,7 +40,7 @@ case class DataSet(
isPublic: Boolean,
isUsable: Boolean,
name: String,
- scale: Option[Scale],
+ scale: Option[Vec3Double],
sharingToken: Option[String],
status: String,
logoUrl: Option[String],
@@ -65,15 +65,15 @@ class DataSetDAO @Inject()(sqlClient: SQLClient,
def isDeletedColumn(x: Datasets): Rep[Boolean] = x.isdeleted
- private def parseScaleOpt(literalOpt: Option[String]): Fox[Option[Scale]] = literalOpt match {
+ private def parseScaleOpt(literalOpt: Option[String]): Fox[Option[Vec3Double]] = literalOpt match {
case Some(literal) =>
for {
- scale <- Scale.fromList(parseArrayTuple(literal).map(_.toFloat)) ?~> "could not parse edit position"
+ scale <- Vec3Double.fromList(parseArrayTuple(literal).map(_.toDouble)) ?~> "could not parse dataset scale"
} yield Some(scale)
case None => Fox.successful(None)
}
- private def writeScaleLiteral(scale: Scale): String =
+ private def writeScaleLiteral(scale: Vec3Double): String =
writeStructTuple(List(scale.x, scale.y, scale.z).map(_.toString))
def parse(r: DatasetsRow): Fox[DataSet] =
@@ -339,12 +339,12 @@ class DataSetDAO @Inject()(sqlClient: SQLClient,
class DataSetResolutionsDAO @Inject()(sqlClient: SQLClient)(implicit ec: ExecutionContext)
extends SimpleSQLDAO(sqlClient) {
- def parseRow(row: DatasetResolutionsRow): Fox[Point3D] =
+ def parseRow(row: DatasetResolutionsRow): Fox[Vec3Int] =
for {
- resolution <- Point3D.fromList(parseArrayTuple(row.resolution).map(_.toInt)) ?~> "could not parse resolution"
+ resolution <- Vec3Int.fromList(parseArrayTuple(row.resolution).map(_.toInt)) ?~> "could not parse resolution"
} yield resolution
- def findDataResolutionForLayer(dataSetId: ObjectId, dataLayerName: String): Fox[List[Point3D]] =
+ def findDataResolutionForLayer(dataSetId: ObjectId, dataLayerName: String): Fox[List[Vec3Int]] =
for {
rows <- run(
DatasetResolutions.filter(r => r._Dataset === dataSetId.id && r.datalayername === dataLayerName).result)
@@ -388,7 +388,7 @@ class DataSetDataLayerDAO @Inject()(sqlClient: SQLClient, dataSetResolutionsDAO:
.fromSQL(parseArrayTuple(row.boundingbox).map(_.toInt))
.toFox ?~> "Could not parse boundingbox"
elementClass <- ElementClass.fromString(row.elementclass).toFox ?~> "Could not parse Layer ElementClass"
- standinResolutions: Option[List[Point3D]] = if (skipResolutions) Some(List.empty[Point3D]) else None
+ standinResolutions: Option[List[Vec3Int]] = if (skipResolutions) Some(List.empty[Vec3Int]) else None
resolutions <- Fox.fillOption(standinResolutions)(
dataSetResolutionsDAO.findDataResolutionForLayer(dataSetId, row.name) ?~> "Could not find resolution for layer")
defaultViewConfigurationOpt <- Fox.runOptional(row.defaultviewconfiguration)(
diff --git a/app/models/binary/WKRemoteDataStoreClient.scala b/app/models/binary/WKRemoteDataStoreClient.scala
index 31330838eb4..f77428ee3ec 100644
--- a/app/models/binary/WKRemoteDataStoreClient.scala
+++ b/app/models/binary/WKRemoteDataStoreClient.scala
@@ -1,6 +1,6 @@
package models.binary
-import com.scalableminds.util.geometry.Point3D
+import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.models.ImageThumbnail
import com.scalableminds.webknossos.datastore.rpc.RPC
@@ -19,7 +19,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, dataSet: DataSet, rpc: RPC)
width: Int,
height: Int,
zoom: Option[Double],
- center: Option[Point3D]): Fox[Array[Byte]] = {
+ center: Option[Vec3Int]): Fox[Array[Byte]] = {
logger.debug(s"Thumbnail called for: $organizationName-${dataSet.name} Layer: $dataLayerName")
rpc(s"${dataStore.url}/data/datasets/${urlEncode(organizationName)}/${dataSet.urlEncodedName}/layers/$dataLayerName/thumbnail.json")
.addQueryString("token" -> RpcTokenHolder.webKnossosToken)
diff --git a/app/models/job/Job.scala b/app/models/job/Job.scala
index 37e79b7f07b..9666a8c9fae 100644
--- a/app/models/job/Job.scala
+++ b/app/models/job/Job.scala
@@ -72,7 +72,7 @@ case class Job(
}
case "export_tiff" =>
Some(s"$dataStorePublicUrl/data/exports/${_id.id}/download")
- case "infer_nuclei" =>
+ case "infer_nuclei" | "infer_neurons" =>
returnValue.map { resultDatasetName =>
s"/datasets/$organizationName/$resultDatasetName/view"
}
diff --git a/app/models/mesh/Mesh.scala b/app/models/mesh/Mesh.scala
index be30dd48504..610443afc93 100644
--- a/app/models/mesh/Mesh.scala
+++ b/app/models/mesh/Mesh.scala
@@ -2,7 +2,7 @@ package models.mesh
import com.google.common.io.BaseEncoding
import com.scalableminds.util.accesscontext.DBAccessContext
-import com.scalableminds.util.geometry.Point3D
+import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.schema.Tables._
import javax.inject.Inject
@@ -19,7 +19,7 @@ case class MeshInfo(
_id: ObjectId,
_annotation: ObjectId,
description: String,
- position: Point3D,
+ position: Vec3Int,
created: Long = System.currentTimeMillis,
isDeleted: Boolean = false
)
@@ -27,13 +27,13 @@ case class MeshInfo(
case class MeshInfoParameters(
annotationId: ObjectId,
description: String,
- position: Point3D,
+ position: Vec3Int,
)
object MeshInfoParameters {
implicit val meshInfoParametersReads: Reads[MeshInfoParameters] =
((__ \ "annotationId").read[String](ObjectId.stringObjectIdReads("teamId")) and
(__ \ "description").read[String] and
- (__ \ "position").read[Point3D])((annotationId, description, position) =>
+ (__ \ "position").read[Vec3Int])((annotationId, description, position) =>
MeshInfoParameters(ObjectId(annotationId), description, position))
}
@@ -72,7 +72,7 @@ class MeshDAO @Inject()(sqlClient: SQLClient)(implicit ec: ExecutionContext)
def parseInfo(r: InfoTuple): Fox[MeshInfo] =
for {
- position <- Point3D.fromList(parseArrayTuple(r._4).map(_.toInt)) ?~> "could not parse mesh position"
+ position <- Vec3Int.fromList(parseArrayTuple(r._4).map(_.toInt)) ?~> "could not parse mesh position"
} yield {
MeshInfo(
ObjectId(r._1), //_id
@@ -111,7 +111,7 @@ class MeshDAO @Inject()(sqlClient: SQLClient)(implicit ec: ExecutionContext)
""")
} yield ()
- def updateOne(id: ObjectId, _annotation: ObjectId, description: String, position: Point3D)(
+ def updateOne(id: ObjectId, _annotation: ObjectId, description: String, position: Vec3Int)(
implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- assertUpdateAccess(id)
diff --git a/app/models/task/Task.scala b/app/models/task/Task.scala
index 1f5fb03742d..77480c16ce9 100755
--- a/app/models/task/Task.scala
+++ b/app/models/task/Task.scala
@@ -1,7 +1,7 @@
package models.task
import com.scalableminds.util.accesscontext.DBAccessContext
-import com.scalableminds.util.geometry.{BoundingBox, Point3D, Vector3D}
+import com.scalableminds.util.geometry.{BoundingBox, Vec3Int, Vec3Double}
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.schema.Tables.{profile, _}
import javax.inject.Inject
@@ -24,8 +24,8 @@ case class Task(
openInstances: Long,
tracingTime: Option[Long],
boundingBox: Option[BoundingBox],
- editPosition: Point3D,
- editRotation: Vector3D,
+ editPosition: Vec3Int,
+ editRotation: Vec3Double,
creationInfo: Option[String],
created: Long = System.currentTimeMillis(),
isDeleted: Boolean = false
@@ -40,8 +40,8 @@ class TaskDAO @Inject()(sqlClient: SQLClient, projectDAO: ProjectDAO)(implicit e
def parse(r: TasksRow): Fox[Task] =
for {
- editPosition <- Point3D.fromList(parseArrayTuple(r.editposition).map(_.toInt)) ?~> "could not parse edit position"
- editRotation <- Vector3D.fromList(parseArrayTuple(r.editrotation).map(_.toDouble)) ?~> "could not parse edit rotation"
+ editPosition <- Vec3Int.fromList(parseArrayTuple(r.editposition).map(_.toInt)) ?~> "could not parse edit position"
+ editRotation <- Vec3Double.fromList(parseArrayTuple(r.editrotation).map(_.toDouble)) ?~> "could not parse edit rotation"
} yield {
Task(
ObjectId(r._Id),
diff --git a/app/models/task/TaskCreationParameters.scala b/app/models/task/TaskCreationParameters.scala
index 653b9020598..ae073f0f1e0 100644
--- a/app/models/task/TaskCreationParameters.scala
+++ b/app/models/task/TaskCreationParameters.scala
@@ -1,6 +1,6 @@
package models.task
-import com.scalableminds.util.geometry.{BoundingBox, Point3D, Vector3D}
+import com.scalableminds.util.geometry.{BoundingBox, Vec3Int, Vec3Double}
import models.user.Experience
import play.api.libs.json.{Format, Json}
@@ -12,8 +12,8 @@ case class TaskParameters(
scriptId: Option[String],
boundingBox: Option[BoundingBox],
dataSet: String,
- editPosition: Point3D,
- editRotation: Vector3D,
+ editPosition: Vec3Int,
+ editRotation: Vec3Double,
creationInfo: Option[String],
description: Option[String],
baseAnnotation: Option[BaseAnnotation]
diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala
index 60a3359ac38..04db9d1d8cc 100644
--- a/app/models/task/TaskCreationService.scala
+++ b/app/models/task/TaskCreationService.scala
@@ -3,7 +3,7 @@ package models.task
import java.io.File
import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext}
-import com.scalableminds.util.geometry.{BoundingBox, Point3D, Vector3D}
+import com.scalableminds.util.geometry.{BoundingBox, Vec3Int, Vec3Double}
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings}
import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing
@@ -247,7 +247,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService,
volumeTracing: Box[VolumeTracing],
fileName: Box[String],
description: Box[Option[String]])(implicit m: MessagesProvider): Box[TaskParameters] = {
- val paramBox: Box[(Option[BoundingBox], String, Point3D, Vector3D)] = skeletonTracing match {
+ val paramBox: Box[(Option[BoundingBox], String, Vec3Int, Vec3Double)] = skeletonTracing match {
case Full(tracing) => Full((tracing.boundingBox, tracing.dataSetName, tracing.editPosition, tracing.editRotation))
case f: Failure => f
case Empty =>
diff --git a/app/models/user/UserService.scala b/app/models/user/UserService.scala
index 4f2154da3b6..244261bf69c 100755
--- a/app/models/user/UserService.scala
+++ b/app/models/user/UserService.scala
@@ -327,7 +327,8 @@ class UserService @Inject()(conf: WkConf,
"novelUserExperienceInfos" -> novelUserExperienceInfos,
"selectedTheme" -> multiUser.selectedTheme,
"created" -> user.created,
- "lastTaskTypeId" -> user.lastTaskTypeId.map(_.toString)
+ "lastTaskTypeId" -> user.lastTaskTypeId.map(_.toString),
+ "isSuperUser" -> multiUser.isSuperUser
)
}
}
diff --git a/app/models/user/time/TimeSpanService.scala b/app/models/user/time/TimeSpanService.scala
index 37a0bc03eea..b5203eea23a 100644
--- a/app/models/user/time/TimeSpanService.scala
+++ b/app/models/user/time/TimeSpanService.scala
@@ -90,7 +90,11 @@ class TimeSpanService @Inject()(annotationDAO: AnnotationDAO,
@SuppressWarnings(Array("TraversableHead", "TraversableLast")) // Only functions call this which put at least one timestamp in the seq
private def trackTime(timestamps: Seq[Long], _user: ObjectId, _annotation: Annotation)(
- implicit ctx: DBAccessContext) = {
+ implicit ctx: DBAccessContext): Fox[Unit] = {
+ if (timestamps.isEmpty) {
+ logger.warn("Timetracking called with empty timestamps list.")
+ return Fox.successful(())
+ }
// Only if the annotation belongs to the user, we are going to log the time on the annotation
val annotation = if (_annotation._user == _user) Some(_annotation) else None
val start = timestamps.head
diff --git a/app/views/main.scala.html b/app/views/main.scala.html
index ce689052473..5d9fdbff282 100755
--- a/app/views/main.scala.html
+++ b/app/views/main.scala.html
@@ -73,24 +73,12 @@
@if(conf.GoogleAnalytics.trackingId.nonEmpty) {
+
}