From 5ffa251e09808ac3110407030942e0ba0aec3e7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Thu, 12 Sep 2024 18:00:03 +0200 Subject: [PATCH 001/129] WIP: Adjust schema to allow duplicate dataset names & implement new uir dataset addressing scheme --- app/controllers/AnnotationController.scala | 12 +- app/controllers/ConfigurationController.scala | 21 ++-- app/controllers/DatasetController.scala | 90 +++++++-------- app/controllers/JobController.scala | 105 ++++++++++-------- .../WKRemoteTracingStoreController.scala | 10 +- .../DatasetConfigurationService.scala | 12 +- app/models/dataset/Dataset.scala | 67 ++++++++--- app/models/dataset/DatasetService.scala | 22 ++-- app/models/dataset/ThumbnailService.scala | 18 +-- app/models/user/UserService.scala | 6 +- app/opengraph/OpenGraphService.scala | 6 +- app/utils/ObjectId.scala | 2 +- ...1-decouple-dataset-directory-from-name.sql | 13 +++ conf/webknossos.latest.routes | 62 +++++------ .../dataset/dataset_settings_view.tsx | 2 +- .../dashboard/folders/details_sidebar.tsx | 2 +- .../dashboard/publication_card.tsx | 4 +- frontend/javascripts/oxalis/default_state.ts | 2 +- .../dataset_info_tab_view.tsx | 3 +- frontend/javascripts/types/api_flow_types.ts | 6 +- tools/postgres/schema.sql | 5 +- 21 files changed, 269 insertions(+), 201 deletions(-) create mode 100644 conf/evolutions/121-decouple-dataset-directory-from-name.sql diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 34c5efa5cd4..9074e9f6cfc 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -242,15 +242,15 @@ class AnnotationController @Inject()( } yield result } - def createExplorational(organizationId: String, datasetName: String): Action[List[AnnotationLayerParameters]] = + def createExplorational(organizationId: String, datasetNameAndId: String): Action[List[AnnotationLayerParameters]] = sil.SecuredAction.async(validateJson[List[AnnotationLayerParameters]]) { implicit request => for { organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( "organization.notFound", organizationId) ~> NOT_FOUND - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND annotation <- annotationService.createExplorationalFor( request.identity, dataset._id, @@ -263,7 +263,7 @@ class AnnotationController @Inject()( } def getSandbox(organization: String, - datasetName: String, + datasetNameAndId: String, typ: String, sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => @@ -272,9 +272,9 @@ class AnnotationController @Inject()( organization <- organizationDAO.findOne(organization)(GlobalAccessContext) ?~> Messages( "organization.notFound", organization) ~> NOT_FOUND - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id)(ctx) ?~> Messages( + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id)(ctx) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND tracingType <- TracingType.fromString(typ).toFox _ <- bool2Fox(tracingType == TracingType.skeleton) ?~> "annotation.sandbox.skeletonOnly" annotation = Annotation( diff --git a/app/controllers/ConfigurationController.scala b/app/controllers/ConfigurationController.scala index 6b57bb28cb7..98250dd0929 100755 --- a/app/controllers/ConfigurationController.scala +++ b/app/controllers/ConfigurationController.scala @@ -34,7 +34,7 @@ class ConfigurationController @Inject()( } def readDatasetViewConfiguration(organizationId: String, - datasetName: String, + datasetNameAndId: String, sharingToken: Option[String]): Action[List[String]] = sil.UserAwareAction.async(validateJson[List[String]]) { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) @@ -42,17 +42,18 @@ class ConfigurationController @Inject()( .flatMap(user => datasetConfigurationService.getDatasetViewConfigurationForUserAndDataset(request.body, user, - datasetName, + datasetNameAndId, organizationId)(GlobalAccessContext)) .orElse( - datasetConfigurationService.getDatasetViewConfigurationForDataset(request.body, datasetName, organizationId)( - ctx) + datasetConfigurationService.getDatasetViewConfigurationForDataset(request.body, + datasetNameAndId, + organizationId)(ctx) ) .getOrElse(Map.empty) .map(configuration => Ok(Json.toJson(configuration))) } - def updateDatasetViewConfiguration(organizationId: String, datasetName: String): Action[JsValue] = + def updateDatasetViewConfiguration(organizationId: String, datasetNameAndId: String): Action[JsValue] = sil.SecuredAction.async(parse.json(maxLength = 20480)) { implicit request => for { jsConfiguration <- request.body.asOpt[JsObject] ?~> "user.configuration.dataset.invalid" @@ -60,24 +61,24 @@ class ConfigurationController @Inject()( datasetConf = conf - "layers" layerConf = conf.get("layers") _ <- userService.updateDatasetViewConfiguration(request.identity, - datasetName, + datasetNameAndId, organizationId, datasetConf, layerConf) } yield JsonOk(Messages("user.configuration.dataset.updated")) } - def readDatasetAdminViewConfiguration(organizationId: String, datasetName: String): Action[AnyContent] = + def readDatasetAdminViewConfiguration(organizationId: String, datasetNameAndId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => datasetConfigurationService - .getCompleteAdminViewConfiguration(datasetName, organizationId) + .getCompleteAdminViewConfiguration(datasetNameAndId, organizationId) .map(configuration => Ok(Json.toJson(configuration))) } - def updateDatasetAdminViewConfiguration(organizationId: String, datasetName: String): Action[JsValue] = + def updateDatasetAdminViewConfiguration(organizationId: String, datasetNameAndId: String): Action[JsValue] = sil.SecuredAction.async(parse.json(maxLength = 20480)) { implicit request => for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> "dataset.notFound" ~> NOT_FOUND + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(GlobalAccessContext) _ <- datasetService.isEditableBy(dataset, Some(request.identity)) ?~> "notAllowed" ~> FORBIDDEN jsObject <- request.body.asOpt[JsObject].toFox ?~> "user.configuration.dataset.invalid" _ <- datasetConfigurationService.updateAdminViewConfigurationFor(dataset, jsObject.fields.toMap) diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index dfa608641a6..6c6f2dd9554 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -99,14 +99,14 @@ class DatasetController @Inject()(userService: UserService, (__ \ "metadata").readNullable[JsArray] and (__ \ "folderId").readNullable[ObjectId]).tupled - def removeFromThumbnailCache(organizationId: String, datasetName: String): Action[AnyContent] = + def removeFromThumbnailCache(organizationId: String, datasetNameAndId: String): Action[AnyContent] = sil.SecuredAction { - thumbnailCachingService.removeFromCache(organizationId, datasetName) + thumbnailCachingService.removeFromCache(organizationId, datasetNameAndId) Ok } def thumbnail(organizationId: String, - datasetName: String, + datasetNameAndId: String, dataLayerName: String, w: Option[Int], h: Option[Int], @@ -115,8 +115,14 @@ class DatasetController @Inject()(userService: UserService, sil.UserAwareAction.async { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) for { - _ <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(ctx) ?~> notFoundMessage(datasetName) ~> NOT_FOUND // To check Access Rights - image <- thumbnailService.getThumbnailWithCache(organizationId, datasetName, dataLayerName, w, h, mappingName) + _ <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(ctx) ?~> notFoundMessage( + datasetNameAndId) ~> NOT_FOUND // To check Access Rights + image <- thumbnailService.getThumbnailWithCache(organizationId, + datasetNameAndId, + dataLayerName, + w, + h, + mappingName) } yield { addRemoteOriginHeaders(Ok(image)).as(jpegMimeType).withHeaders(CACHE_CONTROL -> "public, max-age=86400") } @@ -237,12 +243,12 @@ class DatasetController @Inject()(userService: UserService, } } yield js.flatten - def accessList(organizationId: String, datasetName: String): Action[AnyContent] = sil.SecuredAction.async { + def accessList(organizationId: String, datasetNameAndId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { organization <- organizationDAO.findOne(organizationId) - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> notFoundMessage( + datasetNameAndId) ~> NOT_FOUND allowedTeams <- teamService.allowedTeamIdsForDataset(dataset, cumulative = true) ?~> "allowedTeams.notFound" usersByTeams <- userDAO.findAllByTeams(allowedTeams) adminsAndDatasetManagers <- userDAO.findAdminsAndDatasetManagersByOrg(organization._id) @@ -252,7 +258,7 @@ class DatasetController @Inject()(userService: UserService, } def read(organizationId: String, - datasetName: String, + datasetNameAndId: String, // Optional sharing token allowing access to datasets your team does not normally have access to.") sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => @@ -262,8 +268,8 @@ class DatasetController @Inject()(userService: UserService, organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( "organization.notFound", organizationId) - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id)(ctx) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id)(ctx) ?~> notFoundMessage( + datasetNameAndId) ~> NOT_FOUND _ <- Fox.runOptional(request.identity)(user => datasetLastUsedTimesDAO.updateForDatasetAndUser(dataset._id, user._id)) // Access checked above via dataset. In case of shared dataset/annotation, show datastore even if not otherwise accessible @@ -282,12 +288,12 @@ class DatasetController @Inject()(userService: UserService, } } - def health(organizationId: String, datasetName: String, sharingToken: Option[String]): Action[AnyContent] = + def health(organizationId: String, datasetNameAndId: String, sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(ctx) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(ctx) ?~> notFoundMessage( + datasetNameAndId) ~> NOT_FOUND dataSource <- datasetService.dataSourceFor(dataset) ?~> "dataSource.notFound" ~> NOT_FOUND usableDataSource <- dataSource.toUsable.toFox ?~> "dataset.notImported" datalayer <- usableDataSource.dataLayers.headOption.toFox ?~> "dataset.noLayers" @@ -298,28 +304,28 @@ class DatasetController @Inject()(userService: UserService, } yield Ok("Ok") } - def updatePartial(organizationId: String, datasetName: String): Action[DatasetUpdateParameters] = + def updatePartial(organizationId: String, datasetNameAndId: String): Action[DatasetUpdateParameters] = sil.SecuredAction.async(validateJson[DatasetUpdateParameters]) { implicit request => for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, request.identity._organization) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, request.identity._organization) ?~> notFoundMessage( + datasetNameAndId) ~> NOT_FOUND _ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN _ <- Fox.runOptional(request.body.metadata)(assertNoDuplicateMetadataKeys) _ <- datasetDAO.updatePartial(dataset._id, request.body) - updated <- datasetDAO.findOneByNameAndOrganization(datasetName, request.identity._organization) + updated <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, request.identity._organization) _ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated)) js <- datasetService.publicWrites(updated, Some(request.identity)) } yield Ok(js) } // Note that there exists also updatePartial (which will only expect the changed fields) - def update(organizationId: String, datasetName: String): Action[JsValue] = + def update(organizationId: String, datasetNameAndId: String): Action[JsValue] = sil.SecuredAction.async(parse.json) { implicit request => withJsonBodyUsing(datasetPublicReads) { case (description, displayName, sortingKey, isPublic, tags, metadata, folderId) => for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, request.identity._organization) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, request.identity._organization) ?~> notFoundMessage( + datasetNameAndId) ~> NOT_FOUND maybeUpdatedMetadata = metadata.getOrElse(dataset.metadata) _ <- assertNoDuplicateMetadataKeys(maybeUpdatedMetadata) _ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN @@ -333,17 +339,18 @@ class DatasetController @Inject()(userService: UserService, maybeUpdatedMetadata, folderId.getOrElse(dataset._folder) ) - updated <- datasetDAO.findOneByNameAndOrganization(datasetName, request.identity._organization) + updated <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, request.identity._organization) _ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated)) js <- datasetService.publicWrites(updated, Some(request.identity)) } yield Ok(Json.toJson(js)) } } - def updateTeams(organizationId: String, datasetName: String): Action[List[ObjectId]] = + def updateTeams(organizationId: String, datasetNameAndId: String): Action[List[ObjectId]] = sil.SecuredAction.async(validateJson[List[ObjectId]]) { implicit request => for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) ?~> notFoundMessage( + datasetNameAndId) ~> NOT_FOUND _ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN includeMemberOnlyTeams = request.identity.isDatasetManager userTeams <- if (includeMemberOnlyTeams) teamDAO.findAll else teamDAO.findAllEditable @@ -355,23 +362,23 @@ class DatasetController @Inject()(userService: UserService, } yield Ok(Json.toJson(newTeams)) } - def getSharingToken(organizationId: String, datasetName: String): Action[AnyContent] = + def getSharingToken(organizationId: String, datasetNameAndId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { organization <- organizationDAO.findOne(organizationId) _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN - token <- datasetService.getSharingToken(datasetName, organization._id) + token <- datasetService.getSharingToken(datasetNameAndId, organization._id) } yield Ok(Json.obj("sharingToken" -> token.trim)) } - def deleteSharingToken(organizationId: String, datasetName: String): Action[AnyContent] = sil.SecuredAction.async { - implicit request => + def deleteSharingToken(organizationId: String, datasetNameAndId: String): Action[AnyContent] = + sil.SecuredAction.async { implicit request => for { organization <- organizationDAO.findOne(organizationId) _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN - _ <- datasetDAO.updateSharingTokenByName(datasetName, organization._id, None) + _ <- datasetDAO.updateSharingTokenByIdOrName(datasetNameAndId, organization._id, None) } yield Ok - } + } def create(typ: String): Action[JsValue] = sil.SecuredAction.async(parse.json) { implicit request => Future.successful(JsonBadRequest(Messages("dataset.type.invalid", typ))) @@ -379,24 +386,13 @@ class DatasetController @Inject()(userService: UserService, def isValidNewName(organizationId: String, datasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => - for { - organization <- organizationDAO.findOne(organizationId) - _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN - validName <- datasetService.assertValidDatasetName(datasetName).futureBox - nameAlreadyExists <- (datasetService.assertNewDatasetName(datasetName, organization._id) ?~> "dataset.name.alreadyTaken").futureBox - errors = combineErrors(List(validName, nameAlreadyExists)) - valid = validName.isDefined && nameAlreadyExists.isDefined - } yield - errors match { - case Some(e) => Ok(Json.obj("isValid" -> valid, "errors" -> e.map(Messages(_)))) - case None => Ok(Json.obj("isValid" -> valid)) - } + Fox.successful(Ok(Json.obj("isValid" -> true))) } - def getOrganizationForDataset(datasetName: String): Action[AnyContent] = sil.UserAwareAction.async { + def getOrganizationForDataset(datasetNameAndId: String): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { - organizationId <- datasetDAO.getOrganizationIdForDataset(datasetName) + organizationId <- datasetDAO.getOrganizationIdForDataset(datasetNameAndId) organization <- organizationDAO.findOne(organizationId) } yield Ok(Json.obj("organization" -> organization._id)) } @@ -408,7 +404,7 @@ class DatasetController @Inject()(userService: UserService, } def segmentAnythingMask(organizationId: String, - datasetName: String, + datasetNameAndId: String, dataLayerName: String, intensityMin: Option[Float], intensityMax: Option[Float]): Action[SegmentAnythingMaskParameters] = @@ -417,8 +413,8 @@ class DatasetController @Inject()(userService: UserService, for { _ <- bool2Fox(conf.Features.segmentAnythingEnabled) ?~> "segmentAnything.notEnabled" _ <- bool2Fox(conf.SegmentAnything.uri.nonEmpty) ?~> "segmentAnything.noUri" - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> notFoundMessage( - datasetName) ~> NOT_FOUND + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) ?~> notFoundMessage( + datasetNameAndId) ~> NOT_FOUND dataSource <- datasetService.dataSourceFor(dataset) ?~> "dataSource.notFound" ~> NOT_FOUND usableDataSource <- dataSource.toUsable ?~> "dataset.notImported" dataLayer <- usableDataSource.dataLayers.find(_.name == dataLayerName) ?~> "dataset.noLayers" diff --git a/app/controllers/JobController.scala b/app/controllers/JobController.scala index 75a90765189..692ce0e8fd2 100644 --- a/app/controllers/JobController.scala +++ b/app/controllers/JobController.scala @@ -110,7 +110,7 @@ class JobController @Inject()( // Note that the dataset has to be registered by reserveUpload via the datastore first. def runConvertToWkwJob(organizationId: String, - datasetName: String, + datasetNameAndId: String, scale: String, unit: Option[String]): Action[AnyContent] = sil.SecuredAction.async { implicit request => @@ -121,14 +121,15 @@ class JobController @Inject()( voxelSizeUnit <- Fox.runOptional(unit)(u => LengthUnit.fromString(u).toFox) voxelSize = VoxelSize.fromFactorAndUnitWithDefault(voxelSizeFactor, voxelSizeUnit) _ <- bool2Fox(request.identity._organization == organization._id) ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( + dataset <- datasetDAO.findOneByNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND command = JobCommand.convert_to_wkw commandArgs = Json.obj( "organization_name" -> organizationId, "organization_display_name" -> organization.name, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "voxel_size_factor" -> voxelSize.factor.toUriLiteral, "voxel_size_unit" -> voxelSize.unit ) @@ -139,7 +140,7 @@ class JobController @Inject()( } def runComputeMeshFileJob(organizationId: String, - datasetName: String, + datasetNameAndId: String, layerName: String, mag: String, agglomerateView: Option[String]): Action[AnyContent] = @@ -149,14 +150,15 @@ class JobController @Inject()( "organization.notFound", organizationId) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.meshFile.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( + dataset <- datasetDAO.findOneByNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.compute_mesh_file commandArgs = Json.obj( "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "layer_name" -> layerName, "mag" -> mag, "agglomerate_view" -> agglomerateView @@ -166,7 +168,7 @@ class JobController @Inject()( } yield Ok(js) } - def runComputeSegmentIndexFileJob(organizationId: String, datasetName: String, layerName: String, + def runComputeSegmentIndexFileJob(organizationId: String, datasetNameAndId: String, layerName: String, ): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { @@ -174,14 +176,15 @@ class JobController @Inject()( "organization.notFound", organizationId) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.segmentIndexFile.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( + dataset <- datasetDAO.findOneByNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.compute_segment_index_file commandArgs = Json.obj( "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "segmentation_layer_name" -> layerName, ) job <- jobService.submitJob(command, commandArgs, request.identity, dataset._dataStore) ?~> "job.couldNotRunSegmentIndexFile" @@ -190,7 +193,7 @@ class JobController @Inject()( } def runInferNucleiJob(organizationId: String, - datasetName: String, + datasetNameAndId: String, layerName: String, newDatasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => @@ -200,15 +203,16 @@ class JobController @Inject()( "organization.notFound", organizationId) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferNuclei.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.infer_nuclei commandArgs = Json.obj( "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "layer_name" -> layerName, "new_dataset_name" -> newDatasetName ) @@ -219,7 +223,7 @@ class JobController @Inject()( } def runInferNeuronsJob(organizationId: String, - datasetName: String, + datasetNameAndId: String, layerName: String, bbox: String, newDatasetName: String): Action[AnyContent] = @@ -228,9 +232,9 @@ class JobController @Inject()( for { organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferNeurons.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) multiUser <- multiUserDAO.findOne(request.identity._multiUser) @@ -238,7 +242,8 @@ class JobController @Inject()( command = JobCommand.infer_neurons commandArgs = Json.obj( "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "new_dataset_name" -> newDatasetName, "layer_name" -> layerName, "bbox" -> bbox, @@ -250,7 +255,7 @@ class JobController @Inject()( } def runInferMitochondriaJob(organizationId: String, - datasetName: String, + datasetNameAndId: String, layerName: String, bbox: String, newDatasetName: String): Action[AnyContent] = @@ -259,9 +264,9 @@ class JobController @Inject()( for { organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferMitochondria.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) multiUser <- multiUserDAO.findOne(request.identity._multiUser) @@ -270,7 +275,8 @@ class JobController @Inject()( command = JobCommand.infer_mitochondria commandArgs = Json.obj( "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "new_dataset_name" -> newDatasetName, "layer_name" -> layerName, "bbox" -> bbox, @@ -282,7 +288,7 @@ class JobController @Inject()( } def runAlignSectionsJob(organizationId: String, - datasetName: String, + datasetNameAndId: String, layerName: String, newDatasetName: String, annotationId: Option[String] = None): Action[AnyContent] = @@ -291,9 +297,9 @@ class JobController @Inject()( for { organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.alignSections.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) _ <- Fox.runOptional(annotationId)(ObjectId.fromString) @@ -302,7 +308,8 @@ class JobController @Inject()( command = JobCommand.align_sections commandArgs = Json.obj( "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "new_dataset_name" -> newDatasetName, "layer_name" -> layerName, "annotation_id" -> annotationId @@ -314,7 +321,7 @@ class JobController @Inject()( } def runExportTiffJob(organizationId: String, - datasetName: String, + datasetNameAndId: String, bbox: String, layerName: Option[String], mag: Option[String], @@ -324,20 +331,21 @@ class JobController @Inject()( sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> Messages( + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND _ <- Fox.runOptional(layerName)(datasetService.assertValidLayerNameLax) _ <- Fox.runOptional(annotationLayerName)(datasetService.assertValidLayerNameLax) _ <- jobService.assertBoundingBoxLimits(bbox, mag) command = JobCommand.export_tiff exportFileName = if (asOmeTiff) - s"${formatDateForFilename(new Date())}__${datasetName}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.ome.tif" + s"${formatDateForFilename(new Date())}__${dataset.name}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.ome.tif" else - s"${formatDateForFilename(new Date())}__${datasetName}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.zip" + s"${formatDateForFilename(new Date())}__${dataset.name}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.zip" commandArgs = Json.obj( "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "bbox" -> bbox, "export_file_name" -> exportFileName, "layer_name" -> layerName, @@ -352,7 +360,7 @@ class JobController @Inject()( } def runMaterializeVolumeAnnotationJob(organizationId: String, - datasetName: String, + datasetNameAndId: String, fallbackLayerName: String, annotationId: String, annotationType: String, @@ -367,16 +375,17 @@ class JobController @Inject()( "organization.notFound", organizationId) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.materializeVolumeAnnotation.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(fallbackLayerName) command = JobCommand.materialize_volume_annotation _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(outputSegmentationLayerName) commandArgs = Json.obj( "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "fallback_layer_name" -> fallbackLayerName, "annotation_id" -> annotationId, "output_segmentation_layer_name" -> outputSegmentationLayerName, @@ -391,20 +400,21 @@ class JobController @Inject()( } } - def runFindLargestSegmentIdJob(organizationId: String, datasetName: String, layerName: String): Action[AnyContent] = + def runFindLargestSegmentIdJob(organizationId: String, datasetNameAndId: String, layerName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.findLargestSegmentId.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.find_largest_segment_id commandArgs = Json.obj( "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "layer_name" -> layerName ) job <- jobService.submitJob(command, commandArgs, request.identity, dataset._dataStore) ?~> "job.couldNotRunFindLargestSegmentId" @@ -413,16 +423,16 @@ class JobController @Inject()( } } - def runRenderAnimationJob(organizationId: String, datasetName: String): Action[AnimationJobOptions] = + def runRenderAnimationJob(organizationId: String, datasetNameAndId: String): Action[AnimationJobOptions] = sil.SecuredAction.async(validateJson[AnimationJobOptions]) { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) userOrganization <- organizationDAO.findOne(request.identity._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.renderAnimation.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) ?~> Messages( + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetNameAndId) ~> NOT_FOUND animationJobOptions = request.body _ <- Fox.runIf(userOrganization.pricingPlan == PricingPlan.Basic) { bool2Fox(animationJobOptions.includeWatermark) ?~> "job.renderAnimation.mustIncludeWatermark" @@ -432,11 +442,12 @@ class JobController @Inject()( } layerName = animationJobOptions.layerName _ <- datasetService.assertValidLayerNameLax(layerName) - exportFileName = s"webknossos_animation_${formatDateForFilename(new Date())}__${datasetName}__$layerName.mp4" + exportFileName = s"webknossos_animation_${formatDateForFilename(new Date())}__${dataset.name}__$layerName.mp4" command = JobCommand.render_animation commandArgs = Json.obj( "organization_name" -> organizationId, - "dataset_name" -> datasetName, + "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "export_file_name" -> exportFileName, "layer_name" -> animationJobOptions.layerName, "bounding_box" -> animationJobOptions.boundingBox.toLiteral, diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 99b14f82c45..0be68abb7aa 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -116,17 +116,17 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore def dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], - datasetName: String): Action[AnyContent] = + datasetNameAndId: String): Action[AnyContent] = Action.async { implicit request => tracingStoreService.validateAccess(name, key) { _ => implicit val ctx: DBAccessContext = GlobalAccessContext for { organizationIdWithFallback <- Fox.fillOption(organizationId) { - datasetDAO.getOrganizationIdForDataset(datasetName)(GlobalAccessContext) - } ?~> Messages("dataset.noAccess", datasetName) ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationIdWithFallback) ?~> Messages( + datasetDAO.getOrganizationIdForDataset(datasetNameAndId)(GlobalAccessContext) + } ?~> Messages("dataset.noAccess", datasetNameAndId) ~> FORBIDDEN + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationIdWithFallback) ?~> Messages( "dataset.noAccess", - datasetName) ~> FORBIDDEN + datasetNameAndId) ~> FORBIDDEN dataStore <- datasetService.dataStoreFor(dataset) } yield Ok(Json.toJson(dataStore.url)) } diff --git a/app/models/configuration/DatasetConfigurationService.scala b/app/models/configuration/DatasetConfigurationService.scala index 304ab039b6b..f543e35ab8a 100644 --- a/app/models/configuration/DatasetConfigurationService.scala +++ b/app/models/configuration/DatasetConfigurationService.scala @@ -22,10 +22,10 @@ class DatasetConfigurationService @Inject()(datasetService: DatasetService, def getDatasetViewConfigurationForUserAndDataset( requestedVolumeIds: List[String], user: User, - datasetName: String, + datasetNameAndId: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) datasetViewConfiguration <- userDatasetConfigurationDAO.findOneForUserAndDataset(user._id, dataset._id) @@ -35,10 +35,10 @@ class DatasetConfigurationService @Inject()(datasetService: DatasetService, def getDatasetViewConfigurationForDataset( requestedVolumeIds: List[String], - datasetName: String, + datasetNameAndId: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) datasetViewConfiguration = getDatasetViewConfigurationFromDefaultAndAdmin(dataset) @@ -52,10 +52,10 @@ class DatasetConfigurationService @Inject()(datasetService: DatasetService, defaultVC ++ adminVC } - def getCompleteAdminViewConfiguration(datasetName: String, organizationId: String)( + def getCompleteAdminViewConfiguration(datasetNameAndId: String, organizationId: String)( implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) datasetViewConfiguration = getDatasetViewConfigurationFromDefaultAndAdmin(dataset) datasetLayers <- datasetService.allLayersFor(dataset) layerConfigurations = getAllLayerAdminViewConfigForDataset(datasetLayers).view.mapValues(Json.toJson(_)).toMap diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index 7eb6d4f704d..230d44fbd3a 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -46,7 +46,7 @@ case class Dataset(_id: ObjectId, defaultViewConfiguration: Option[DatasetViewConfiguration] = None, adminViewConfiguration: Option[DatasetViewConfiguration] = None, description: Option[String] = None, - displayName: Option[String] = None, + path: String, isPublic: Boolean, isUsable: Boolean, name: String, @@ -71,7 +71,7 @@ case class DatasetCompactInfo( owningOrganization: String, folderId: ObjectId, isActive: Boolean, - displayName: String, + path: String, created: Instant, isEditable: Boolean, lastUsedByUser: Instant, @@ -132,7 +132,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA defaultViewConfigurationOpt, adminViewConfigurationOpt, r.description, - r.displayname, + r.path, r.ispublic, r.isusable, r.name, @@ -252,7 +252,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA o._id, d._folder, d.isUsable, - d.displayName, + d.path, d.created, COALESCE( ( @@ -318,7 +318,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA owningOrganization = row._3, folderId = row._4, isActive = row._5, - displayName = row._6, + path = row._6, created = row._7, isEditable = row._8, lastUsedByUser = row._9, @@ -400,6 +400,32 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA r <- rList.headOption } yield r + def findOneByIdOrNameAndOrganization(idAndName: String, organizationId: String)( + implicit ctx: DBAccessContext): Fox[Dataset] = { + getDatasetIdOrNameFromURIPath(idAndName) match { + case (Some(validId), None) => findOneByIdAndOrganization(validId, organizationId) + case (None, Some(datasetName)) => findOneByNameAndOrganization(datasetName, organizationId) + } + } + + private def getDatasetIdOrNameFromURIPath(datasetNameAndId: String): (Option[ObjectId], Option[String]) = { + val maybeIdStr = datasetNameAndId.split("-").lastOption + val maybeId = maybeIdStr.flatMap(ObjectId.fromStringSync) + maybeId match { + case Some(validId) => (Some(validId), None) + case None => (None, Some(datasetNameAndId)) + } + } + + private def getWhereClauseForDatasetIdOrName(datasetIdOrName: String): SqlToken = { + val (maybeId, maybeDatasetName) = getDatasetIdOrNameFromURIPath(datasetIdOrName) + maybeId match { + case Some(id) => q"_id = $id" + case None => q"name = $maybeDatasetName" + } + } + + // TODOM: Make private def findOneByNameAndOrganization(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Dataset] = for { accessQuery <- readAccessQuery @@ -411,6 +437,18 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA parsed <- parseFirst(r, s"$organizationId/$name") } yield parsed + private def findOneByIdAndOrganization(id: ObjectId, organizationId: String)( + implicit ctx: DBAccessContext): Fox[Dataset] = + for { + accessQuery <- readAccessQuery + r <- run(q"""SELECT $columns + FROM $existingCollectionName + WHERE _id = $id + AND _organization = $organizationId + AND $accessQuery""".as[DatasetsRow]) + parsed <- parseFirst(r, s"$organizationId/$id") + } yield parsed + def findAllByNamesAndOrganization(names: List[String], organizationId: String)( implicit ctx: DBAccessContext): Fox[List[Dataset]] = for { @@ -435,12 +473,13 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA /* Disambiguation method for legacy URLs and NMLs: if the user has access to multiple datasets of the same name, use the oldest. * This is reasonable, because the legacy URL/NML was likely created before this ambiguity became possible */ - def getOrganizationIdForDataset(datasetName: String)(implicit ctx: DBAccessContext): Fox[String] = + def getOrganizationIdForDataset(datasetNameAndId: String)(implicit ctx: DBAccessContext): Fox[String] = for { accessQuery <- readAccessQuery + whereClause = getWhereClauseForDatasetIdOrName(datasetNameAndId) rList <- run(q"""SELECT _organization FROM $existingCollectionName - WHERE name = $datasetName + WHERE $whereClause AND $accessQuery ORDER BY created ASC""".as[String]) r <- rList.headOption.toFox @@ -453,24 +492,26 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA r <- rList.headOption.toFox } yield r - def getSharingTokenByName(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Option[String]] = + def getSharingTokenByIdOrName(datasetNameAndId: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Option[String]] = for { accessQuery <- readAccessQuery + whereClause = getWhereClauseForDatasetIdOrName(datasetNameAndId) rList <- run(q"""SELECT sharingToken FROM webknossos.datasets_ - WHERE name = $name + WHERE $whereClause AND _organization = $organizationId AND $accessQuery""".as[Option[String]]) r <- rList.headOption.toFox } yield r - def updateSharingTokenByName(name: String, organizationId: String, sharingToken: Option[String])( + def updateSharingTokenByIdOrName(datasetNameAndId: String, organizationId: String, sharingToken: Option[String])( implicit ctx: DBAccessContext): Fox[Unit] = for { accessQuery <- readAccessQuery + whereClause = getWhereClauseForDatasetIdOrName(datasetNameAndId) _ <- run(q"""UPDATE webknossos.datasets SET sharingToken = $sharingToken - WHERE name = $name + WHERE name = $whereClause AND _organization = $organizationId AND $accessQuery""".asUpdate) } yield () @@ -559,7 +600,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA _id, _dataStore, _organization, _publication, _uploader, _folder, inboxSourceHash, defaultViewConfiguration, adminViewConfiguration, - description, displayName, isPublic, isUsable, + description, path, isPublic, isUsable, name, voxelSizeFactor, voxelSizeUnit, status, sharingToken, sortingKey, metadata, tags, created, isDeleted @@ -568,7 +609,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA ${d._id}, ${d._dataStore}, ${d._organization}, ${d._publication}, ${d._uploader}, ${d._folder}, ${d.inboxSourceHash}, $defaultViewConfiguration, $adminViewConfiguration, - ${d.description}, ${d.displayName}, ${d.isPublic}, ${d.isUsable}, + ${d.description}, ${d.path}, ${d.isPublic}, ${d.isUsable}, ${d.name}, ${d.voxelSize.map(_.factor)}, ${d.voxelSize.map(_.unit)}, ${d.status.take(1024)}, ${d.sharingToken}, ${d.sortingKey}, ${d.metadata}, ${d.tags}, ${d.created}, ${d.isDeleted} diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index a2a0c704e5f..2e5e2cece93 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -18,7 +18,7 @@ import models.folder.FolderDAO import models.organization.{Organization, OrganizationDAO} import models.team._ import models.user.{User, UserService} -import net.liftweb.common.{Box, Full} +import net.liftweb.common.{Box, Full, Empty} import play.api.libs.json.{JsObject, Json} import security.RandomIDGenerator import utils.{ObjectId, WkConf} @@ -103,6 +103,11 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, for { organization <- organizationDAO.findOne(owningOrganization) organizationRootFolder <- folderDAO.findOne(organization._rootFolder) + isNewDatasetName <- assertNewDatasetName(dataSource.id.name, organization._id).futureBox.map { + case Empty => true + case _ => false + } + datasetPath = if (isNewDatasetName) dataSource.id.name else newId.toString dataset = Dataset( newId, dataStore.name, @@ -114,7 +119,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, dataSource.defaultViewConfiguration, adminViewConfiguration = None, description = None, - displayName = None, + path = datasetPath, isPublic = false, isUsable = dataSource.isUsable, name = dataSource.id.name, @@ -236,17 +241,17 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, def deactivateUnreportedDataSources(existingDatasetIds: List[ObjectId], dataStore: DataStore): Fox[Unit] = datasetDAO.deactivateUnreported(existingDatasetIds, dataStore.name, unreportedStatus, inactiveStatusList) - def getSharingToken(datasetName: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[String] = { + def getSharingToken(datasetNameAndId: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[String] = { - def createAndSaveSharingToken(datasetName: String)(implicit ctx: DBAccessContext): Fox[String] = + def createAndSaveSharingToken(datasetNameAndId: String)(implicit ctx: DBAccessContext): Fox[String] = for { tokenValue <- new RandomIDGenerator().generate - _ <- datasetDAO.updateSharingTokenByName(datasetName, organizationId, Some(tokenValue)) + _ <- datasetDAO.updateSharingTokenByIdOrName(datasetNameAndId, organizationId, Some(tokenValue)) } yield tokenValue - datasetDAO.getSharingTokenByName(datasetName, organizationId).flatMap { + datasetDAO.getSharingTokenByIdOrName(datasetNameAndId, organizationId).flatMap { case Some(oldToken) => Fox.successful(oldToken) - case None => createAndSaveSharingToken(datasetName) + case None => createAndSaveSharingToken(datasetNameAndId) } } @@ -358,6 +363,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, organizationDAO.getUsedStorageForDataset(dataset._id)) } yield { Json.obj( + "id" -> dataset._id, "name" -> dataset.name, "dataSource" -> dataSource, "dataStore" -> dataStoreJs, @@ -367,7 +373,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, "isActive" -> dataset.isUsable, "isPublic" -> dataset.isPublic, "description" -> dataset.description, - "displayName" -> dataset.displayName, + "path" -> dataset.path, "created" -> dataset.created, "isEditable" -> isEditable, "lastUsedByUser" -> lastUsedByUser, diff --git a/app/models/dataset/ThumbnailService.scala b/app/models/dataset/ThumbnailService.scala index 88e8385c0da..35e156d3b72 100644 --- a/app/models/dataset/ThumbnailService.scala +++ b/app/models/dataset/ThumbnailService.scala @@ -37,7 +37,7 @@ class ThumbnailService @Inject()(datasetService: DatasetService, def getThumbnailWithCache( organizationId: String, - datasetName: String, + datasetNameAndId: String, layerName: String, w: Option[Int], h: Option[Int], @@ -45,7 +45,7 @@ class ThumbnailService @Inject()(datasetService: DatasetService, val width = com.scalableminds.util.tools.Math.clamp(w.getOrElse(DefaultThumbnailWidth), 1, MaxThumbnailWidth) val height = com.scalableminds.util.tools.Math.clamp(h.getOrElse(DefaultThumbnailHeight), 1, MaxThumbnailHeight) for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(GlobalAccessContext) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(GlobalAccessContext) image <- thumbnailCachingService.getOrLoad( dataset._id, layerName, @@ -53,13 +53,15 @@ class ThumbnailService @Inject()(datasetService: DatasetService, height, mappingName, _ => - getThumbnail(organizationId, datasetName, layerName, width, height, mappingName)(ec, GlobalAccessContext, mp) + getThumbnail(organizationId, datasetNameAndId, layerName, width, height, mappingName)(ec, + GlobalAccessContext, + mp) ) } yield image } private def getThumbnail(organizationId: String, - datasetName: String, + datasetNameAndId: String, layerName: String, width: Int, height: Int, @@ -67,12 +69,12 @@ class ThumbnailService @Inject()(datasetService: DatasetService, ctx: DBAccessContext, mp: MessagesProvider): Fox[Array[Byte]] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) dataSource <- datasetService.dataSourceFor(dataset) ?~> "dataSource.notFound" ~> NOT_FOUND usableDataSource <- dataSource.toUsable.toFox ?~> "dataset.notImported" layer <- usableDataSource.dataLayers.find(_.name == layerName) ?~> Messages("dataLayer.notFound", layerName) ~> NOT_FOUND viewConfiguration <- datasetConfigurationService.getDatasetViewConfigurationForDataset(List.empty, - datasetName, + datasetNameAndId, organizationId)(ctx) (mag1BoundingBox, mag, intensityRangeOpt, colorSettingsOpt) = selectParameters(viewConfiguration, usableDataSource, @@ -182,9 +184,9 @@ class ThumbnailCachingService @Inject()(datasetDAO: DatasetDAO, thumbnailDAO: Th } yield fromDbOrNew ) - def removeFromCache(organizationId: String, datasetName: String): Fox[Unit] = + def removeFromCache(organizationId: String, datasetNameAndId: String): Fox[Unit] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(GlobalAccessContext) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(GlobalAccessContext) _ <- removeFromCache(dataset._id) } yield () diff --git a/app/models/user/UserService.scala b/app/models/user/UserService.scala index a864bfaaa47..a2111334604 100755 --- a/app/models/user/UserService.scala +++ b/app/models/user/UserService.scala @@ -237,14 +237,14 @@ class UserService @Inject()(conf: WkConf, def updateDatasetViewConfiguration( user: User, - datasetName: String, + datasetNameAndId: String, organizationId: String, datasetConfiguration: DatasetViewConfiguration, layerConfiguration: Option[JsValue])(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[Unit] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(GlobalAccessContext) ?~> Messages( + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(GlobalAccessContext) ?~> Messages( "dataset.notFound", - datasetName) + datasetNameAndId) layerMap = layerConfiguration.flatMap(_.asOpt[Map[String, JsValue]]).getOrElse(Map.empty) _ <- Fox.serialCombined(layerMap.toList) { case (name, config) => diff --git a/app/opengraph/OpenGraphService.scala b/app/opengraph/OpenGraphService.scala index 09429e98321..0884fc0863a 100644 --- a/app/opengraph/OpenGraphService.scala +++ b/app/opengraph/OpenGraphService.scala @@ -117,7 +117,7 @@ class OpenGraphService @Inject()(datasetDAO: DatasetDAO, organization <- organizationDAO.findOne(dataset._organization) } yield OpenGraphTags( - Some(s"${dataset.displayName.getOrElse(datasetName)} | WEBKNOSSOS"), + Some(s"${dataset.name} | WEBKNOSSOS"), Some("View this dataset in WEBKNOSSOS"), thumbnailUri(dataset, layerOpt, organization, token) ) @@ -136,8 +136,8 @@ class OpenGraphService @Inject()(datasetDAO: DatasetDAO, layerOpt = layers.find(_.category == Category.color) } yield OpenGraphTags( - Some(s"${annotation.nameOpt.orElse(dataset.displayName).getOrElse(dataset.name)} | WEBKNOSSOS"), - Some(s"View this annotation on dataset ${dataset.displayName.getOrElse(dataset.name)} in WEBKNOSSOS"), + Some(s"${annotation.nameOpt.getOrElse(dataset.name)} | WEBKNOSSOS"), + Some(s"View this annotation on dataset ${dataset.name} in WEBKNOSSOS"), thumbnailUri(dataset, layerOpt, organization, token) ) case _ => Fox.failure("not a matching uri") diff --git a/app/utils/ObjectId.scala b/app/utils/ObjectId.scala index e787182aaf2..4b3c0c7ad6e 100644 --- a/app/utils/ObjectId.scala +++ b/app/utils/ObjectId.scala @@ -18,7 +18,7 @@ object ObjectId extends FoxImplicits { def fromCommaSeparated(idsStrOpt: Option[String])(implicit ec: ExecutionContext): Fox[List[ObjectId]] = parseCommaSeparated(idsStrOpt)(fromString) private def fromBsonId(bson: BSONObjectID) = ObjectId(bson.stringify) - private def fromStringSync(input: String) = BSONObjectID.parse(input).map(fromBsonId).toOption + def fromStringSync(input: String): Option[ObjectId] = BSONObjectID.parse(input).map(fromBsonId).toOption def dummyId: ObjectId = ObjectId("dummyObjectId") implicit object ObjectIdFormat extends Format[ObjectId] { diff --git a/conf/evolutions/121-decouple-dataset-directory-from-name.sql b/conf/evolutions/121-decouple-dataset-directory-from-name.sql new file mode 100644 index 00000000000..3cebe517504 --- /dev/null +++ b/conf/evolutions/121-decouple-dataset-directory-from-name.sql @@ -0,0 +1,13 @@ +START TRANSACTION; + +do $$ begin ASSERT (select schemaVersion from webknossos.releaseInformation) = 120, 'Previous schema version mismatch'; end; $$ LANGUAGE plpgsql; + +UPDATE webknossos.datasets SET displayName = name WHERE displayName IS NULL; +ALTER TABLE webknossos.datasets RENAME COLUMN name TO path; +ALTER TABLE webknossos.datasets RENAME COLUMN displayName TO name; +ALTER TABLE webknossos.datasets ALTER COLUMN name SET NOT NULL; + + +UPDATE webknossos.releaseInformation SET schemaVersion = 121; + +COMMIT TRANSACTION; diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index a55bb92097d..7d66e94c57e 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -43,10 +43,10 @@ POST /auth/createUserInOrganization/:organizationId # Configurations GET /user/userConfiguration controllers.ConfigurationController.read() PUT /user/userConfiguration controllers.ConfigurationController.update() -POST /datasetConfigurations/:organizationId/:datasetName controllers.ConfigurationController.readDatasetViewConfiguration(organizationId: String, datasetName: String, sharingToken: Option[String]) -PUT /datasetConfigurations/:organizationId/:datasetName controllers.ConfigurationController.updateDatasetViewConfiguration(organizationId: String, datasetName: String) -GET /datasetConfigurations/default/:organizationId/:datasetName controllers.ConfigurationController.readDatasetAdminViewConfiguration(organizationId: String, datasetName: String) -PUT /datasetConfigurations/default/:organizationId/:datasetName controllers.ConfigurationController.updateDatasetAdminViewConfiguration(organizationId: String, datasetName: String) +POST /datasetConfigurations/:organizationId/:datasetNameAndId controllers.ConfigurationController.readDatasetViewConfiguration(organizationId: String, datasetNameAndId: String, sharingToken: Option[String]) +PUT /datasetConfigurations/:organizationId/:datasetNameAndId controllers.ConfigurationController.updateDatasetViewConfiguration(organizationId: String, datasetNameAndId: String) +GET /datasetConfigurations/default/:organizationId/:datasetNameAndId controllers.ConfigurationController.readDatasetAdminViewConfiguration(organizationId: String, datasetNameAndId: String) +PUT /datasetConfigurations/default/:organizationId/:datasetNameAndId controllers.ConfigurationController.updateDatasetAdminViewConfiguration(organizationId: String, datasetNameAndId: String) # Users POST /user/tasks/request controllers.TaskController.request() @@ -73,25 +73,25 @@ GET /teams/:id/availableTasksReport GET /teams/:id/projectProgressReport controllers.ReportController.projectProgressReport(id: String) # Datasets -POST /datasets/:organizationId/:datasetName/createExplorational controllers.AnnotationController.createExplorational(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetName/sandbox/:typ controllers.AnnotationController.getSandbox(organizationId: String, datasetName: String, typ: String, sharingToken: Option[String]) +POST /datasets/:organizationId/:datasetNameAndId/createExplorational controllers.AnnotationController.createExplorational(organizationId: String, datasetNameAndId: String) +GET /datasets/:organizationId/:datasetNameAndId/sandbox/:typ controllers.AnnotationController.getSandbox(organizationId: String, datasetNameAndId: String, typ: String, sharingToken: Option[String]) GET /datasets controllers.DatasetController.list(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationId: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) POST /datasets controllers.DatasetController.create(typ: String) POST /datasets/exploreRemote controllers.DatasetController.exploreRemoteDataset() POST /datasets/exploreAndAddRemote controllers.DatasetController.exploreAndAddRemoteDataset() -GET /datasets/disambiguate/:datasetName/toNew controllers.DatasetController.getOrganizationForDataset(datasetName: String) -GET /datasets/:organizationId/:datasetName/health controllers.DatasetController.health(organizationId: String, datasetName: String, sharingToken: Option[String]) -PATCH /datasets/:organizationId/:datasetName controllers.DatasetController.update(organizationId: String, datasetName: String) -PATCH /datasets/:organizationId/:datasetName/updatePartial controllers.DatasetController.updatePartial(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetName/accessList controllers.DatasetController.accessList(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetName/sharingToken controllers.DatasetController.getSharingToken(organizationId: String, datasetName: String) -DELETE /datasets/:organizationId/:datasetName/sharingToken controllers.DatasetController.deleteSharingToken(organizationId: String, datasetName: String) -PATCH /datasets/:organizationId/:datasetName/teams controllers.DatasetController.updateTeams(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetName/layers/:layer/thumbnail controllers.DatasetController.thumbnail(organizationId: String, datasetName: String, layer: String, w: Option[Int], h: Option[Int], mappingName: Option[String], sharingToken: Option[String]) -POST /datasets/:organizationId/:datasetName/layers/:layer/segmentAnythingMask controllers.DatasetController.segmentAnythingMask(organizationId: String, datasetName: String, layer: String, intensityMin: Option[Float], intensityMax: Option[Float]) -PUT /datasets/:organizationId/:datasetName/clearThumbnailCache controllers.DatasetController.removeFromThumbnailCache(organizationId: String, datasetName: String) +GET /datasets/disambiguate/:datasetNameAndId/toNew controllers.DatasetController.getOrganizationForDataset(datasetNameAndId: String) +GET /datasets/:organizationId/:datasetNameAndId/health controllers.DatasetController.health(organizationId: String, datasetNameAndId: String, sharingToken: Option[String]) +PATCH /datasets/:organizationId/:datasetNameAndId controllers.DatasetController.update(organizationId: String, datasetNameAndId: String) +PATCH /datasets/:organizationId/:datasetNameAndId/updatePartial controllers.DatasetController.updatePartial(organizationId: String, datasetNameAndId: String) +GET /datasets/:organizationId/:datasetNameAndId/accessList controllers.DatasetController.accessList(organizationId: String, datasetNameAndId: String) +GET /datasets/:organizationId/:datasetNameAndId/sharingToken controllers.DatasetController.getSharingToken(organizationId: String, datasetNameAndId: String) +DELETE /datasets/:organizationId/:datasetNameAndId/sharingToken controllers.DatasetController.deleteSharingToken(organizationId: String, datasetNameAndId: String) +PATCH /datasets/:organizationId/:datasetNameAndId/teams controllers.DatasetController.updateTeams(organizationId: String, datasetNameAndId: String) +GET /datasets/:organizationId/:datasetNameAndId/layers/:layer/thumbnail controllers.DatasetController.thumbnail(organizationId: String, datasetNameAndId: String, layer: String, w: Option[Int], h: Option[Int], mappingName: Option[String], sharingToken: Option[String]) +POST /datasets/:organizationId/:datasetNameAndId/layers/:layer/segmentAnythingMask controllers.DatasetController.segmentAnythingMask(organizationId: String, datasetNameAndId: String, layer: String, intensityMin: Option[Float], intensityMax: Option[Float]) +PUT /datasets/:organizationId/:datasetNameAndId/clearThumbnailCache controllers.DatasetController.removeFromThumbnailCache(organizationId: String, datasetNameAndId: String) GET /datasets/:organizationId/:datasetName/isValidNewName controllers.DatasetController.isValidNewName(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetName controllers.DatasetController.read(organizationId: String, datasetName: String, sharingToken: Option[String]) +GET /datasets/:organizationId/:datasetNameAndId controllers.DatasetController.read(organizationId: String, datasetNameAndId: String, sharingToken: Option[String]) # Folders GET /folders/root controllers.FolderController.getRoot() @@ -109,7 +109,7 @@ PUT /datastores/:name/datasources PATCH /datastores/:name/status controllers.WKRemoteDataStoreController.statusUpdate(name: String, key: String) POST /datastores/:name/reserveUpload controllers.WKRemoteDataStoreController.reserveDatasetUpload(name: String, key: String, token: String) GET /datastores/:name/getUnfinishedUploadsForUser controllers.WKRemoteDataStoreController.getUnfinishedUploadsForUser(name: String, key: String, token: String, organizationName: String) -POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetName: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean) +POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetName: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean, path: Option[String]) POST /datastores/:name/deleteDataset controllers.WKRemoteDataStoreController.deleteDataset(name: String, key: String) GET /datastores/:name/jobExportProperties controllers.WKRemoteDataStoreController.jobExportProperties(name: String, key: String, jobId: String) GET /datastores/:name/findCredential controllers.WKRemoteDataStoreController.findCredential(name: String, key: String, credentialId: String) @@ -125,7 +125,7 @@ POST /tracingstores/:name/validateUserAccess PUT /tracingstores/:name controllers.TracingStoreController.update(name: String) GET /tracingstores/:name/dataSource controllers.WKRemoteTracingStoreController.dataSourceForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/dataSourceId controllers.WKRemoteTracingStoreController.dataSourceIdForTracing(name: String, key: String, tracingId: String) -GET /tracingstores/:name/dataStoreUri/:datasetName controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetName: String) +GET /tracingstores/:name/dataStoreUri/:datasetNameAndId controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetNameAndId: String) # User access tokens for datastore authentication POST /userToken/generate controllers.UserTokenController.generateTokenForDataStore() @@ -259,17 +259,17 @@ GET /time/overview GET /jobs/request controllers.WKRemoteWorkerController.requestJobs(key: String) GET /jobs controllers.JobController.list() GET /jobs/status controllers.JobController.status() -POST /jobs/run/convertToWkw/:organizationId/:datasetName controllers.JobController.runConvertToWkwJob(organizationId: String, datasetName: String, scale: String, unit: Option[String]) -POST /jobs/run/computeMeshFile/:organizationId/:datasetName controllers.JobController.runComputeMeshFileJob(organizationId: String, datasetName: String, layerName: String, mag: String, agglomerateView: Option[String]) -POST /jobs/run/computeSegmentIndexFile/:organizationId/:datasetName controllers.JobController.runComputeSegmentIndexFileJob(organizationId: String, datasetName: String, layerName: String) -POST /jobs/run/exportTiff/:organizationId/:datasetName controllers.JobController.runExportTiffJob(organizationId: String, datasetName: String, bbox: String, layerName: Option[String], mag: Option[String], annotationLayerName: Option[String], annotationId: Option[String], asOmeTiff: Boolean) -POST /jobs/run/inferNuclei/:organizationId/:datasetName controllers.JobController.runInferNucleiJob(organizationId: String, datasetName: String, layerName: String, newDatasetName: String) -POST /jobs/run/inferNeurons/:organizationId/:datasetName controllers.JobController.runInferNeuronsJob(organizationId: String, datasetName: String, layerName: String, bbox: String, newDatasetName: String) -POST /jobs/run/inferMitochondria/:organizationId/:datasetName controllers.JobController.runInferMitochondriaJob(organizationId: String, datasetName: String, layerName: String, bbox: String, newDatasetName: String) -POST /jobs/run/alignSections/:organizationId/:datasetName controllers.JobController.runAlignSectionsJob(organizationId: String, datasetName: String, layerName: String, newDatasetName: String, annotationId: Option[String]) -POST /jobs/run/materializeVolumeAnnotation/:organizationId/:datasetName controllers.JobController.runMaterializeVolumeAnnotationJob(organizationId: String, datasetName: String, fallbackLayerName: String, annotationId: String, annotationType: String, newDatasetName: String, outputSegmentationLayerName: String, mergeSegments: Boolean, volumeLayerName: Option[String]) -POST /jobs/run/findLargestSegmentId/:organizationId/:datasetName controllers.JobController.runFindLargestSegmentIdJob(organizationId: String, datasetName: String, layerName: String) -POST /jobs/run/renderAnimation/:organizationId/:datasetName controllers.JobController.runRenderAnimationJob(organizationId: String, datasetName: String) +POST /jobs/run/convertToWkw/:organizationId/:datasetNameAndId controllers.JobController.runConvertToWkwJob(organizationId: String, datasetNameAndId: String, scale: String, unit: Option[String]) +POST /jobs/run/computeMeshFile/:organizationId/:datasetNameAndId controllers.JobController.runComputeMeshFileJob(organizationId: String, datasetNameAndId: String, layerName: String, mag: String, agglomerateView: Option[String]) +POST /jobs/run/computeSegmentIndexFile/:organizationId/:datasetNameAndId controllers.JobController.runComputeSegmentIndexFileJob(organizationId: String, datasetNameAndId: String, layerName: String) +POST /jobs/run/exportTiff/:organizationId/:datasetNameAndId controllers.JobController.runExportTiffJob(organizationId: String, datasetNameAndId: String, bbox: String, layerName: Option[String], mag: Option[String], annotationLayerName: Option[String], annotationId: Option[String], asOmeTiff: Boolean) +POST /jobs/run/inferNuclei/:organizationId/:datasetNameAndId controllers.JobController.runInferNucleiJob(organizationId: String, datasetNameAndId: String, layerName: String, newDatasetName: String) +POST /jobs/run/inferNeurons/:organizationId/:datasetNameAndId controllers.JobController.runInferNeuronsJob(organizationId: String, datasetNameAndId: String, layerName: String, bbox: String, newDatasetName: String) +POST /jobs/run/inferMitochondria/:organizationId/:datasetNameAndId controllers.JobController.runInferMitochondriaJob(organizationId: String, datasetNameAndId: String, layerName: String, bbox: String, newDatasetName: String) +POST /jobs/run/alignSections/:organizationId/:datasetNameAndId controllers.JobController.runAlignSectionsJob(organizationId: String, datasetNameAndId: String, layerName: String, newDatasetName: String, annotationId: Option[String]) +POST /jobs/run/materializeVolumeAnnotation/:organizationId/:datasetNameAndId controllers.JobController.runMaterializeVolumeAnnotationJob(organizationId: String, datasetNameAndId: String, fallbackLayerName: String, annotationId: String, annotationType: String, newDatasetName: String, outputSegmentationLayerName: String, mergeSegments: Boolean, volumeLayerName: Option[String]) +POST /jobs/run/findLargestSegmentId/:organizationId/:datasetNameAndId controllers.JobController.runFindLargestSegmentIdJob(organizationId: String, datasetNameAndId: String, layerName: String) +POST /jobs/run/renderAnimation/:organizationId/:datasetNameAndId controllers.JobController.runRenderAnimationJob(organizationId: String, datasetNameAndId: String) GET /jobs/:id controllers.JobController.get(id: String) PATCH /jobs/:id/cancel controllers.JobController.cancel(id: String) POST /jobs/:id/status controllers.WKRemoteWorkerController.updateJobStatus(key: String, id: String) diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx index 7567b7fe756..35ebe7f483e 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx @@ -183,7 +183,7 @@ class DatasetSettingsView extends React.PureComponent )}{" "} - {selectedDataset.displayName || selectedDataset.name} + {selectedDataset.name} {renderOrganization()} diff --git a/frontend/javascripts/dashboard/publication_card.tsx b/frontend/javascripts/dashboard/publication_card.tsx index 2ff15ff2b9e..9d8b15ec44f 100644 --- a/frontend/javascripts/dashboard/publication_card.tsx +++ b/frontend/javascripts/dashboard/publication_card.tsx @@ -47,9 +47,7 @@ function getDisplayName(item: PublicationItem): string { ? "Unnamed annotation" : item.annotation.name; } - return item.dataset.displayName == null || item.dataset.displayName === "" - ? item.dataset.name - : item.dataset.displayName; + return item.dataset.name; } function getExtendedDetails(item: PublicationItem): ExtendedDatasetDetails { diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index 298aa359a17..515b20f0f31 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -148,7 +148,7 @@ const defaultState: OxalisState = { }, owningOrganization: "", description: null, - displayName: "Loading", + path: "Loading", allowedTeams: [], allowedTeamsCumulative: [], logoUrl: null, diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index 8baf46f4507..269ed36697b 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -313,7 +313,6 @@ export class DatasetInfoTabView extends React.PureComponent { getDatasetName() { const { name: datasetName, - displayName, description: datasetDescription, owningOrganization, } = this.props.dataset; @@ -342,7 +341,7 @@ export class DatasetInfoTabView extends React.PureComponent { }} > - {displayName || datasetName} + {datasetName} {getEditSettingsIcon()} diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index ccdc0e76b32..f5100c1bc59 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -196,7 +196,7 @@ type MutableAPIDatasetBase = MutableAPIDatasetId & { metadata: APIMetadataEntries | null | undefined; isEditable: boolean; isPublic: boolean; - displayName: string | null | undefined; + path: string; logoUrl: string | null | undefined; lastUsedByUser: number; sortingKey: number; @@ -230,7 +230,7 @@ export type APIDatasetCompactWithoutStatusAndLayerNames = Pick< | "name" | "folderId" | "isActive" - | "displayName" + | "path" | "created" | "isEditable" | "lastUsedByUser" @@ -255,7 +255,7 @@ export function convertDatasetToCompact(dataset: APIDataset): APIDatasetCompact name: dataset.name, folderId: dataset.folderId, isActive: dataset.isActive, - displayName: dataset.displayName, + path: dataset.path, created: dataset.created, isEditable: dataset.isEditable, lastUsedByUser: dataset.lastUsedByUser, diff --git a/tools/postgres/schema.sql b/tools/postgres/schema.sql index e0bb4f36260..0c467df5709 100644 --- a/tools/postgres/schema.sql +++ b/tools/postgres/schema.sql @@ -113,10 +113,11 @@ CREATE TABLE webknossos.datasets( defaultViewConfiguration JSONB, adminViewConfiguration JSONB, description TEXT, - displayName VARCHAR(256), + name VARCHAR(256) NOT NULL, + -- oldName VARCHAR(256), isPublic BOOLEAN NOT NULL DEFAULT false, isUsable BOOLEAN NOT NULL DEFAULT false, - name VARCHAR(256) NOT NULL, + path VARCHAR(256) NOT NULL, voxelSizeFactor webknossos.VECTOR3, voxelSizeUnit webknossos.LENGTH_UNIT, status VARCHAR(1024) NOT NULL DEFAULT '', From d34bdeb20b0152032b27804a90b50639e30f7ca5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 16 Sep 2024 17:13:19 +0200 Subject: [PATCH 002/129] reimplement proper dataset name checking route (still keep leave away the check for already existing name) --- app/controllers/DatasetController.scala | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index 6c6f2dd9554..63b9937a20a 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -19,6 +19,7 @@ import models.folder.FolderService import models.organization.OrganizationDAO import models.team.{TeamDAO, TeamService} import models.user.{User, UserDAO, UserService} +import net.liftweb.common.{Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.functional.syntax._ import play.api.libs.json._ @@ -386,7 +387,16 @@ class DatasetController @Inject()(userService: UserService, def isValidNewName(organizationId: String, datasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => - Fox.successful(Ok(Json.obj("isValid" -> true))) + for { + organization <- organizationDAO.findOne(organizationId) + _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN + validName <- datasetService.assertValidDatasetName(datasetName).futureBox + } yield + validName match { + case Full(e) => Ok(Json.obj("isValid" -> true)) + case Failure(msg, _, _) => Ok(Json.obj("isValid" -> false, "errors" -> Messages(msg))) + case _ => Ok(Json.obj("isValid" -> false, "errors" -> List("Unknown error"))) + } } def getOrganizationForDataset(datasetNameAndId: String): Action[AnyContent] = sil.UserAwareAction.async { From 49432a9f0d561ac2fe43ff3dea25316d94d5e926 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 17 Sep 2024 17:58:30 +0200 Subject: [PATCH 003/129] WIP: implement wk core backend routes to only use datasetId and no orgaId - Includes moving ObjectId to uitls package --- app/controllers/AiModelController.scala | 2 +- app/controllers/AnnotationController.scala | 28 ++-- .../AnnotationPrivateLinkController.scala | 2 +- app/controllers/ConfigurationController.scala | 54 ++++--- app/controllers/CredentialController.scala | 2 +- app/controllers/DatasetController.scala | 146 ++++++++++-------- app/controllers/JobController.scala | 141 ++++++++--------- app/controllers/LegacyApiController.scala | 2 +- app/controllers/MaintenanceController.scala | 2 +- app/controllers/MeshController.scala | 2 +- app/controllers/ProjectController.scala | 2 +- app/controllers/PublicationController.scala | 2 +- app/controllers/ReportController.scala | 2 +- app/controllers/ScriptController.scala | 2 +- app/controllers/TaskController.scala | 2 +- app/controllers/TaskTypeController.scala | 2 +- app/controllers/TeamController.scala | 2 +- app/controllers/TimeController.scala | 2 +- app/controllers/UserController.scala | 2 +- app/controllers/UserTokenController.scala | 4 +- .../WKRemoteDataStoreController.scala | 16 +- .../WKRemoteTracingStoreController.scala | 14 +- app/models/aimodels/AiInference.scala | 2 +- app/models/aimodels/AiModel.scala | 2 +- app/models/analytics/AnalyticsDAO.scala | 2 +- app/models/analytics/AnalyticsEvent.scala | 2 +- app/models/annotation/Annotation.scala | 2 +- .../annotation/AnnotationIdentifier.scala | 2 +- .../AnnotationInformationProvider.scala | 2 +- app/models/annotation/AnnotationMerger.scala | 2 +- .../annotation/AnnotationPrivateLink.scala | 2 +- app/models/annotation/AnnotationService.scala | 31 +--- .../AnnotationInformationHandler.scala | 2 +- .../handler/ProjectInformationHandler.scala | 2 +- .../SavedTracingInformationHandler.scala | 2 +- .../handler/TaskInformationHandler.scala | 2 +- .../handler/TaskTypeInformationHandler.scala | 2 +- .../DatasetConfigurationService.scala | 15 +- app/models/dataset/Dataset.scala | 53 +++---- app/models/dataset/DatasetService.scala | 30 ++-- app/models/dataset/Publication.scala | 2 +- app/models/dataset/ThumbnailService.scala | 33 ++-- .../dataset/WKRemoteDataStoreClient.scala | 18 +-- .../dataset/credential/CredentialDAO.scala | 2 +- .../credential/CredentialService.scala | 10 +- .../explore/WKExploreRemoteLayerService.scala | 2 +- app/models/folder/Folder.scala | 2 +- app/models/job/Job.scala | 2 +- app/models/mesh/Mesh.scala | 2 +- app/models/organization/Organization.scala | 2 +- app/models/project/Project.scala | 2 +- app/models/shortlinks/ShortLink.scala | 2 +- app/models/task/Script.scala | 2 +- app/models/task/Task.scala | 2 +- app/models/task/TaskCreationService.scala | 2 +- app/models/task/TaskType.scala | 2 +- app/models/team/Team.scala | 2 +- app/models/team/TeamMembership.scala | 3 +- app/models/user/EmailVerificationKey.scala | 2 +- app/models/user/MultiUser.scala | 2 +- app/models/user/User.scala | 2 +- app/models/user/UserService.scala | 8 +- app/models/user/time/TimeSpan.scala | 2 +- app/models/voxelytics/VoxelyticsDAO.scala | 2 +- app/models/voxelytics/VoxelyticsService.scala | 2 +- app/security/Token.scala | 2 +- app/utils/sql/SQLDAO.scala | 3 +- app/utils/sql/SecuredSQLDAO.scala | 2 +- app/utils/sql/SqlInterpolation.scala | 2 +- app/utils/sql/SqlTypeImplicits.scala | 2 +- conf/messages | 1 + conf/webknossos.latest.routes | 64 ++++---- conf/webknossos.versioned.routes | 11 ++ test/backend/SqlInterpolationTestSuite.scala | 2 +- .../requestparsing/DatasetURIParser.scala | 14 ++ .../util/requestparsing}/ObjectId.scala | 2 +- .../controllers/BinaryDataController.scala | 110 +++++++------ .../controllers/DSMeshController.scala | 10 +- .../controllers/DataSourceController.scala | 58 +++---- .../controllers/ZarrStreamingController.scala | 28 ++-- .../DatasetArrayBucketProvider.scala | 4 +- .../dataformats/layers/N5DataLayers.scala | 2 +- .../layers/PrecomputedDataLayers.scala | 2 +- .../dataformats/layers/WKWDataLayers.scala | 2 +- .../dataformats/layers/Zarr3DataLayers.scala | 2 +- .../dataformats/layers/ZarrDataLayers.scala | 2 +- .../datastore/datareaders/DatasetArray.scala | 4 +- .../datastore/datareaders/n5/N5Array.scala | 6 +- .../precomputed/PrecomputedArray.scala | 18 +-- .../datastore/datareaders/wkw/WKWArray.scala | 6 +- .../datareaders/zarr/ZarrArray.scala | 6 +- .../datareaders/zarr3/Zarr3Array.scala | 6 +- .../explore/ExploreLocalLayerService.scala | 16 +- .../explore/ExploreRemoteLayerService.scala | 4 +- .../datastore/models/UnfinishedUpload.scala | 4 +- .../models/datasource/DataLayer.scala | 2 +- .../models/datasource/DataSource.scala | 32 +++- .../models/datasource/InboxDataSource.scala | 4 +- .../services/AccessTokenService.scala | 22 +-- .../services/BinaryDataService.scala | 10 +- .../services/DSRemoteWebknossosClient.scala | 17 +- .../services/DataSourceRepository.scala | 18 +-- .../services/DataSourceService.scala | 2 +- .../services/DatasetIdRepository.scala | 40 +++++ .../services/uploading/ComposeService.scala | 6 +- .../services/uploading/UploadService.scala | 22 +-- .../storage/BucketProviderCache.scala | 4 +- .../RemoteSourceDescriptorService.scala | 16 +- ....scalableminds.webknossos.datastore.routes | 6 +- .../TSRemoteWebknossosClient.scala | 8 +- .../tracings/RemoteFallbackLayer.scala | 4 +- .../EditableMappingLayer.scala | 4 +- .../tracings/volume/VolumeTracingLayer.scala | 2 +- .../volume/Zarr3BucketStreamSink.scala | 4 +- 114 files changed, 692 insertions(+), 628 deletions(-) create mode 100644 util/src/main/scala/com/scalableminds/util/requestparsing/DatasetURIParser.scala rename {app/utils => util/src/main/scala/com/scalableminds/util/requestparsing}/ObjectId.scala (96%) create mode 100644 webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala diff --git a/app/controllers/AiModelController.scala b/app/controllers/AiModelController.scala index 3a332504239..6500cc380bf 100644 --- a/app/controllers/AiModelController.scala +++ b/app/controllers/AiModelController.scala @@ -11,7 +11,7 @@ import play.api.libs.json.{Json, OFormat} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import play.silhouette.api.Silhouette import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 9074e9f6cfc..c97b84a08c7 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -4,14 +4,11 @@ import org.apache.pekko.util.Timeout import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.geometry.BoundingBox +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType -import com.scalableminds.webknossos.datastore.models.annotation.{ - AnnotationLayer, - AnnotationLayerStatistics, - AnnotationLayerType -} +import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerStatistics, AnnotationLayerType} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.tracings.volume.ResolutionRestrictions @@ -242,15 +239,13 @@ class AnnotationController @Inject()( } yield result } - def createExplorational(organizationId: String, datasetNameAndId: String): Action[List[AnnotationLayerParameters]] = + def createExplorational(datasetId: String): Action[List[AnnotationLayerParameters]] = sil.SecuredAction.async(validateJson[List[AnnotationLayerParameters]]) { implicit request => for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( - "organization.notFound", - organizationId) ~> NOT_FOUND - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages( "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND + parsedDatasetId) ~> NOT_FOUND annotation <- annotationService.createExplorationalFor( request.identity, dataset._id, @@ -262,19 +257,16 @@ class AnnotationController @Inject()( } yield JsonOk(json) } - def getSandbox(organization: String, - datasetNameAndId: String, + def getSandbox(datasetId: String, typ: String, sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) // users with dataset sharing token may also get a sandbox annotation for { - organization <- organizationDAO.findOne(organization)(GlobalAccessContext) ?~> Messages( - "organization.notFound", - organization) ~> NOT_FOUND - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id)(ctx) ?~> Messages( + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId)(ctx) ?~> Messages( "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND + parsedDatasetId) ~> NOT_FOUND tracingType <- TracingType.fromString(typ).toFox _ <- bool2Fox(tracingType == TracingType.skeleton) ?~> "annotation.sandbox.skeletonOnly" annotation = Annotation( diff --git a/app/controllers/AnnotationPrivateLinkController.scala b/app/controllers/AnnotationPrivateLinkController.scala index ea3765154fe..e0798490542 100644 --- a/app/controllers/AnnotationPrivateLinkController.scala +++ b/app/controllers/AnnotationPrivateLinkController.scala @@ -13,7 +13,7 @@ import models.annotation._ import net.liftweb.common.Full import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{WkEnv, WkSilhouetteEnvironment} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext class AnnotationPrivateLinkController @Inject()( diff --git a/app/controllers/ConfigurationController.scala b/app/controllers/ConfigurationController.scala index 98250dd0929..74dae0b3a0a 100755 --- a/app/controllers/ConfigurationController.scala +++ b/app/controllers/ConfigurationController.scala @@ -2,6 +2,9 @@ package controllers import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.GlobalAccessContext +import com.scalableminds.util.requestparsing.ObjectId +import com.scalableminds.util.tools.Fox + import javax.inject.Inject import models.dataset.{DatasetDAO, DatasetService} import models.configuration.DatasetConfigurationService @@ -33,52 +36,47 @@ class ConfigurationController @Inject()( } yield JsonOk(Messages("user.configuration.updated")) } - def readDatasetViewConfiguration(organizationId: String, - datasetNameAndId: String, - sharingToken: Option[String]): Action[List[String]] = + def readDatasetViewConfiguration(datasetId: String, sharingToken: Option[String]): Action[List[String]] = sil.UserAwareAction.async(validateJson[List[String]]) { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) - request.identity.toFox - .flatMap(user => - datasetConfigurationService.getDatasetViewConfigurationForUserAndDataset(request.body, - user, - datasetNameAndId, - organizationId)(GlobalAccessContext)) - .orElse( - datasetConfigurationService.getDatasetViewConfigurationForDataset(request.body, - datasetNameAndId, - organizationId)(ctx) - ) - .getOrElse(Map.empty) - .map(configuration => Ok(Json.toJson(configuration))) + for { + parsedId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" + configuration <- request.identity.toFox + .flatMap(user => + datasetConfigurationService.getDatasetViewConfigurationForUserAndDataset(request.body, user, parsedId)( + GlobalAccessContext)) + .orElse( + datasetConfigurationService.getDatasetViewConfigurationForDataset(request.body, parsedId)(ctx) + ) + .getOrElse(Map.empty) + } yield Ok(Json.toJson(configuration)) } - def updateDatasetViewConfiguration(organizationId: String, datasetNameAndId: String): Action[JsValue] = + def updateDatasetViewConfiguration(datasetId: String): Action[JsValue] = sil.SecuredAction.async(parse.json(maxLength = 20480)) { implicit request => for { jsConfiguration <- request.body.asOpt[JsObject] ?~> "user.configuration.dataset.invalid" + parsedId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" conf = jsConfiguration.fields.toMap datasetConf = conf - "layers" layerConf = conf.get("layers") - _ <- userService.updateDatasetViewConfiguration(request.identity, - datasetNameAndId, - organizationId, - datasetConf, - layerConf) + _ <- userService.updateDatasetViewConfiguration(request.identity, parsedId, datasetConf, layerConf) } yield JsonOk(Messages("user.configuration.dataset.updated")) } - def readDatasetAdminViewConfiguration(organizationId: String, datasetNameAndId: String): Action[AnyContent] = + def readDatasetAdminViewConfiguration(datasetId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => - datasetConfigurationService - .getCompleteAdminViewConfiguration(datasetNameAndId, organizationId) - .map(configuration => Ok(Json.toJson(configuration))) + for { + parsedId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" + configuration <- datasetConfigurationService.getCompleteAdminViewConfiguration(parsedId) + } yield Ok(Json.toJson(configuration)) } - def updateDatasetAdminViewConfiguration(organizationId: String, datasetNameAndId: String): Action[JsValue] = + def updateDatasetAdminViewConfiguration(datasetNameAndId: String): Action[JsValue] = sil.SecuredAction.async(parse.json(maxLength = 20480)) { implicit request => for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(GlobalAccessContext) + parsedId <- ObjectId.fromString(datasetNameAndId) ?~> "Invalid dataset id" + dataset <- datasetDAO.findOne(parsedId)(GlobalAccessContext) _ <- datasetService.isEditableBy(dataset, Some(request.identity)) ?~> "notAllowed" ~> FORBIDDEN jsObject <- request.body.asOpt[JsObject].toFox ?~> "user.configuration.dataset.invalid" _ <- datasetConfigurationService.updateAdminViewConfigurationFor(dataset, jsObject.fields.toMap) diff --git a/app/controllers/CredentialController.scala b/app/controllers/CredentialController.scala index 786d3b4ddb4..c41798cf38f 100644 --- a/app/controllers/CredentialController.scala +++ b/app/controllers/CredentialController.scala @@ -11,7 +11,7 @@ import models.dataset.credential.CredentialDAO import play.api.libs.json.{JsValue, Json, OFormat} import play.api.mvc.{Action, PlayBodyParsers} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index 63b9937a20a..0625a49e1c8 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -3,6 +3,7 @@ package controllers import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.enumeration.ExtendedEnumeration import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, TristateOptionJsonHelper} import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate @@ -26,7 +27,7 @@ import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import play.silhouette.api.Silhouette import security.{URLSharing, WkEnv} -import utils.{MetadataAssertions, ObjectId, WkConf} +import utils.{MetadataAssertions, WkConf} import javax.inject.Inject import scala.concurrent.{ExecutionContext, Future} @@ -100,14 +101,15 @@ class DatasetController @Inject()(userService: UserService, (__ \ "metadata").readNullable[JsArray] and (__ \ "folderId").readNullable[ObjectId]).tupled - def removeFromThumbnailCache(organizationId: String, datasetNameAndId: String): Action[AnyContent] = - sil.SecuredAction { - thumbnailCachingService.removeFromCache(organizationId, datasetNameAndId) - Ok + def removeFromThumbnailCache(datasetId: String): Action[AnyContent] = + sil.SecuredAction.async { implicit request => + for { + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + _ <- thumbnailCachingService.removeFromCache(parsedDatasetId) + } yield Ok } - def thumbnail(organizationId: String, - datasetNameAndId: String, + def thumbnail(datasetId: String, dataLayerName: String, w: Option[Int], h: Option[Int], @@ -116,10 +118,9 @@ class DatasetController @Inject()(userService: UserService, sil.UserAwareAction.async { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) for { - _ <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(ctx) ?~> notFoundMessage( - datasetNameAndId) ~> NOT_FOUND // To check Access Rights - image <- thumbnailService.getThumbnailWithCache(organizationId, - datasetNameAndId, + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + _ <- datasetDAO.findOne(parsedDatasetId)(ctx) ?~> notFoundMessage(datasetId) ~> NOT_FOUND // To check Access Rights + image <- thumbnailService.getThumbnailWithCache(parsedDatasetId, dataLayerName, w, h, @@ -244,33 +245,29 @@ class DatasetController @Inject()(userService: UserService, } } yield js.flatten - def accessList(organizationId: String, datasetNameAndId: String): Action[AnyContent] = sil.SecuredAction.async { - implicit request => - for { - organization <- organizationDAO.findOne(organizationId) - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> notFoundMessage( - datasetNameAndId) ~> NOT_FOUND - allowedTeams <- teamService.allowedTeamIdsForDataset(dataset, cumulative = true) ?~> "allowedTeams.notFound" - usersByTeams <- userDAO.findAllByTeams(allowedTeams) - adminsAndDatasetManagers <- userDAO.findAdminsAndDatasetManagersByOrg(organization._id) - usersFiltered = (usersByTeams ++ adminsAndDatasetManagers).distinct.filter(!_.isUnlisted) - usersJs <- Fox.serialCombined(usersFiltered)(u => userService.compactWrites(u)) - } yield Ok(Json.toJson(usersJs)) + def accessList(datasetId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => + for { + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> notFoundMessage(parsedDatasetId.toString) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) + allowedTeams <- teamService.allowedTeamIdsForDataset(dataset, cumulative = true) ?~> "allowedTeams.notFound" + usersByTeams <- userDAO.findAllByTeams(allowedTeams) + adminsAndDatasetManagers <- userDAO.findAdminsAndDatasetManagersByOrg(organization._id) + usersFiltered = (usersByTeams ++ adminsAndDatasetManagers).distinct.filter(!_.isUnlisted) + usersJs <- Fox.serialCombined(usersFiltered)(u => userService.compactWrites(u)) + } yield Ok(Json.toJson(usersJs)) } - def read(organizationId: String, - datasetNameAndId: String, + def read(datasetId: String, // Optional sharing token allowing access to datasets your team does not normally have access to.") sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => log() { val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( - "organization.notFound", - organizationId) - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id)(ctx) ?~> notFoundMessage( - datasetNameAndId) ~> NOT_FOUND + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId)(ctx) ?~> notFoundMessage(datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ~> NOT_FOUND _ <- Fox.runOptional(request.identity)(user => datasetLastUsedTimesDAO.updateForDatasetAndUser(dataset._id, user._id)) // Access checked above via dataset. In case of shared dataset/annotation, show datastore even if not otherwise accessible @@ -289,44 +286,52 @@ class DatasetController @Inject()(userService: UserService, } } - def health(organizationId: String, datasetNameAndId: String, sharingToken: Option[String]): Action[AnyContent] = + def health(datasetId: String, sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(ctx) ?~> notFoundMessage( - datasetNameAndId) ~> NOT_FOUND + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId)(ctx) ?~> notFoundMessage(parsedDatasetId.toString) ~> NOT_FOUND dataSource <- datasetService.dataSourceFor(dataset) ?~> "dataSource.notFound" ~> NOT_FOUND usableDataSource <- dataSource.toUsable.toFox ?~> "dataset.notImported" datalayer <- usableDataSource.dataLayers.headOption.toFox ?~> "dataset.noLayers" _ <- datasetService .clientFor(dataset)(GlobalAccessContext) - .flatMap(_.findPositionWithData(organizationId, dataset, datalayer.name).flatMap(posWithData => + .flatMap(_.findPositionWithData(dataset, datalayer.name).flatMap(posWithData => bool2Fox(posWithData.value("position") != JsNull))) ?~> "dataset.loadingDataFailed" } yield Ok("Ok") } - def updatePartial(organizationId: String, datasetNameAndId: String): Action[DatasetUpdateParameters] = + // TODO: Maybe no longer needed. Remove? + def resolveDatasetNameToId(organizationId: String, datasetName: String): Action[AnyContent] = + sil.UserAwareAction.async { implicit request => + for { + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND + } yield Ok(Json.obj("datasetId" -> dataset._id)) + } + + def updatePartial(datasetId: String): Action[DatasetUpdateParameters] = sil.SecuredAction.async(validateJson[DatasetUpdateParameters]) { implicit request => for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, request.identity._organization) ?~> notFoundMessage( - datasetNameAndId) ~> NOT_FOUND + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> notFoundMessage(parsedDatasetId.toString) ~> NOT_FOUND _ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN _ <- Fox.runOptional(request.body.metadata)(assertNoDuplicateMetadataKeys) _ <- datasetDAO.updatePartial(dataset._id, request.body) - updated <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, request.identity._organization) + updated <- datasetDAO.findOne(parsedDatasetId) _ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated)) js <- datasetService.publicWrites(updated, Some(request.identity)) } yield Ok(js) } // Note that there exists also updatePartial (which will only expect the changed fields) - def update(organizationId: String, datasetNameAndId: String): Action[JsValue] = + def update(datasetNameAndId: String): Action[JsValue] = sil.SecuredAction.async(parse.json) { implicit request => withJsonBodyUsing(datasetPublicReads) { case (description, displayName, sortingKey, isPublic, tags, metadata, folderId) => for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, request.identity._organization) ?~> notFoundMessage( - datasetNameAndId) ~> NOT_FOUND + parsedDatasetId <- ObjectId.fromString(datasetNameAndId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> notFoundMessage(parsedDatasetId.toString) ~> NOT_FOUND maybeUpdatedMetadata = metadata.getOrElse(dataset.metadata) _ <- assertNoDuplicateMetadataKeys(maybeUpdatedMetadata) _ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN @@ -340,18 +345,18 @@ class DatasetController @Inject()(userService: UserService, maybeUpdatedMetadata, folderId.getOrElse(dataset._folder) ) - updated <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, request.identity._organization) + updated <- datasetDAO.findOne(parsedDatasetId) _ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated)) js <- datasetService.publicWrites(updated, Some(request.identity)) } yield Ok(Json.toJson(js)) } } - def updateTeams(organizationId: String, datasetNameAndId: String): Action[List[ObjectId]] = + def updateTeams(datasetId: String): Action[List[ObjectId]] = sil.SecuredAction.async(validateJson[List[ObjectId]]) { implicit request => for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) ?~> notFoundMessage( - datasetNameAndId) ~> NOT_FOUND + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> notFoundMessage(parsedDatasetId.toString) ~> NOT_FOUND _ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN includeMemberOnlyTeams = request.identity.isDatasetManager userTeams <- if (includeMemberOnlyTeams) teamDAO.findAll else teamDAO.findAllEditable @@ -363,21 +368,23 @@ class DatasetController @Inject()(userService: UserService, } yield Ok(Json.toJson(newTeams)) } - def getSharingToken(organizationId: String, datasetNameAndId: String): Action[AnyContent] = + def getSharingToken(datasetId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - organization <- organizationDAO.findOne(organizationId) - _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN - token <- datasetService.getSharingToken(datasetNameAndId, organization._id) + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> notFoundMessage(parsedDatasetId.toString) ~> NOT_FOUND + _ <- bool2Fox(dataset._organization == request.identity._organization) ~> FORBIDDEN + token <- datasetService.getSharingToken(dataset._id) } yield Ok(Json.obj("sharingToken" -> token.trim)) } - def deleteSharingToken(organizationId: String, datasetNameAndId: String): Action[AnyContent] = + def deleteSharingToken(datasetId: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - organization <- organizationDAO.findOne(organizationId) - _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN - _ <- datasetDAO.updateSharingTokenByIdOrName(datasetNameAndId, organization._id, None) + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> notFoundMessage(parsedDatasetId.toString) ~> NOT_FOUND + _ <- bool2Fox(dataset._organization == request.identity._organization) ~> FORBIDDEN + _ <- datasetDAO.updateSharingTokenById(dataset._id, None) } yield Ok } @@ -385,46 +392,56 @@ class DatasetController @Inject()(userService: UserService, Future.successful(JsonBadRequest(Messages("dataset.type.invalid", typ))) } - def isValidNewName(organizationId: String, datasetName: String): Action[AnyContent] = + def isValidNewName(datasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - organization <- organizationDAO.findOne(organizationId) - _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN validName <- datasetService.assertValidDatasetName(datasetName).futureBox } yield validName match { - case Full(e) => Ok(Json.obj("isValid" -> true)) + case Full(_) => Ok(Json.obj("isValid" -> true)) case Failure(msg, _, _) => Ok(Json.obj("isValid" -> false, "errors" -> Messages(msg))) case _ => Ok(Json.obj("isValid" -> false, "errors" -> List("Unknown error"))) } } - def getOrganizationForDataset(datasetNameAndId: String): Action[AnyContent] = sil.UserAwareAction.async { + def getOrganizationForDataset(datasetName: String): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { - organizationId <- datasetDAO.getOrganizationIdForDataset(datasetNameAndId) - organization <- organizationDAO.findOne(organizationId) + organizationId <- datasetDAO.getOrganizationIdForDataset(datasetName) + organization <- organizationDAO + .findOne(organizationId) // TODO: Check if this is necessary, it this needed to ensure that the orga still exists? } yield Ok(Json.obj("organization" -> organization._id)) } + def getDatasetIdFromNameAndOrganization(datasetName: String, organizationId: String): Action[AnyContent] = + sil.UserAwareAction.async { implicit request => + for { + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND + } yield + Ok( + Json.obj("id" -> dataset._id, + "name" -> dataset.name, + "organization" -> dataset._organization, + "path" -> dataset.path)) + } + private def notFoundMessage(datasetName: String)(implicit ctx: DBAccessContext, m: MessagesProvider): String = ctx.data match { case Some(_: User) => Messages("dataset.notFound", datasetName) case _ => Messages("dataset.notFoundConsiderLogin", datasetName) } - def segmentAnythingMask(organizationId: String, - datasetNameAndId: String, + def segmentAnythingMask(datasetId: String, dataLayerName: String, intensityMin: Option[Float], intensityMax: Option[Float]): Action[SegmentAnythingMaskParameters] = sil.SecuredAction.async(validateJson[SegmentAnythingMaskParameters]) { implicit request => log() { for { + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND _ <- bool2Fox(conf.Features.segmentAnythingEnabled) ?~> "segmentAnything.notEnabled" _ <- bool2Fox(conf.SegmentAnything.uri.nonEmpty) ?~> "segmentAnything.noUri" - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) ?~> notFoundMessage( - datasetNameAndId) ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> notFoundMessage(datasetId) ~> NOT_FOUND dataSource <- datasetService.dataSourceFor(dataset) ?~> "dataSource.notFound" ~> NOT_FOUND usableDataSource <- dataSource.toUsable ?~> "dataset.notImported" dataLayer <- usableDataSource.dataLayers.find(_.name == dataLayerName) ?~> "dataset.noLayers" @@ -442,7 +459,6 @@ class DatasetController @Inject()(userService: UserService, request.body.pointX.isDefined && request.body.pointY.isDefined)) ?~> "Missing pointX and pointY parameters for point interaction." beforeDataLoading = Instant.now data <- datastoreClient.getLayerData( - organizationId, dataset, dataLayer.name, request.body.surroundingBoundingBox, diff --git a/app/controllers/JobController.scala b/app/controllers/JobController.scala index 692ce0e8fd2..84cfa5bf4f2 100644 --- a/app/controllers/JobController.scala +++ b/app/controllers/JobController.scala @@ -13,12 +13,13 @@ import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{WkEnv, WkSilhouetteEnvironment} import telemetry.SlackNotificationService -import utils.{ObjectId, WkConf} +import utils.WkConf import java.util.Date import javax.inject.Inject import scala.concurrent.ExecutionContext import com.scalableminds.util.enumeration.ExtendedEnumeration +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.webknossos.datastore.models.{LengthUnit, VoxelSize} import models.team.PricingPlan @@ -109,24 +110,21 @@ class JobController @Inject()( } // Note that the dataset has to be registered by reserveUpload via the datastore first. - def runConvertToWkwJob(organizationId: String, - datasetNameAndId: String, - scale: String, - unit: Option[String]): Action[AnyContent] = + def runConvertToWkwJob(datasetId: String, scale: String, unit: Option[String]): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND voxelSizeFactor <- Vec3Double.fromUriLiteral(scale).toFox voxelSizeUnit <- Fox.runOptional(unit)(u => LengthUnit.fromString(u).toFox) voxelSize = VoxelSize.fromFactorAndUnitWithDefault(voxelSizeFactor, voxelSizeUnit) + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( - "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND command = JobCommand.convert_to_wkw commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_name" -> organization._id, "organization_display_name" -> organization.name, "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset @@ -139,24 +137,22 @@ class JobController @Inject()( } } - def runComputeMeshFileJob(organizationId: String, - datasetNameAndId: String, + def runComputeMeshFileJob(datasetId: String, layerName: String, mag: String, agglomerateView: Option[String]): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> Messages( "organization.notFound", - organizationId) + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.meshFile.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( - "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.compute_mesh_file commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_name" -> organization._id, "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "layer_name" -> layerName, @@ -168,21 +164,19 @@ class JobController @Inject()( } yield Ok(js) } - def runComputeSegmentIndexFileJob(organizationId: String, datasetNameAndId: String, layerName: String, - ): Action[AnyContent] = + def runComputeSegmentIndexFileJob(datasetId: String, layerName: String, ): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> Messages( "organization.notFound", - organizationId) + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.segmentIndexFile.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( - "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.compute_segment_index_file commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_name" -> dataset._organization, "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "segmentation_layer_name" -> layerName, @@ -192,25 +186,23 @@ class JobController @Inject()( } yield Ok(js) } - def runInferNucleiJob(organizationId: String, - datasetNameAndId: String, + def runInferNucleiJob(datasetId: String, layerName: String, newDatasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> Messages( "organization.notFound", - organizationId) + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferNuclei.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( - "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.infer_nuclei commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_name" -> dataset._organization, "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "layer_name" -> layerName, @@ -222,26 +214,25 @@ class JobController @Inject()( } } - def runInferNeuronsJob(organizationId: String, - datasetNameAndId: String, + def runInferNeuronsJob(datasetId: String, layerName: String, bbox: String, newDatasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferNeurons.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( - "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) multiUser <- multiUserDAO.findOne(request.identity._multiUser) _ <- Fox.runIf(!multiUser.isSuperUser)(jobService.assertBoundingBoxLimits(bbox, None)) command = JobCommand.infer_neurons commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_name" -> organization._id, "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "new_dataset_name" -> newDatasetName, @@ -287,19 +278,18 @@ class JobController @Inject()( } } - def runAlignSectionsJob(organizationId: String, - datasetNameAndId: String, + def runAlignSectionsJob(datasetId: String, layerName: String, newDatasetName: String, annotationId: Option[String] = None): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.alignSections.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( - "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) _ <- Fox.runOptional(annotationId)(ObjectId.fromString) @@ -307,7 +297,7 @@ class JobController @Inject()( _ <- bool2Fox(multiUser.isSuperUser) ?~> "job.alignSections.notAllowed.onlySuperUsers" command = JobCommand.align_sections commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_name" -> organization._id, "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "new_dataset_name" -> newDatasetName, @@ -320,8 +310,7 @@ class JobController @Inject()( } } - def runExportTiffJob(organizationId: String, - datasetNameAndId: String, + def runExportTiffJob(datasetId: String, bbox: String, layerName: Option[String], mag: Option[String], @@ -331,9 +320,12 @@ class JobController @Inject()( sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) ?~> Messages( - "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> Messages( + "organization.notFound", + dataset._organization) + _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.meshFile.notAllowed.organization" ~> FORBIDDEN _ <- Fox.runOptional(layerName)(datasetService.assertValidLayerNameLax) _ <- Fox.runOptional(annotationLayerName)(datasetService.assertValidLayerNameLax) _ <- jobService.assertBoundingBoxLimits(bbox, mag) @@ -343,7 +335,7 @@ class JobController @Inject()( else s"${formatDateForFilename(new Date())}__${dataset.name}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.zip" commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_name" -> organization._id, "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "bbox" -> bbox, @@ -359,8 +351,7 @@ class JobController @Inject()( } } - def runMaterializeVolumeAnnotationJob(organizationId: String, - datasetNameAndId: String, + def runMaterializeVolumeAnnotationJob(datasetId: String, fallbackLayerName: String, annotationId: String, annotationType: String, @@ -371,19 +362,18 @@ class JobController @Inject()( sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> Messages( "organization.notFound", - organizationId) + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.materializeVolumeAnnotation.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( - "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(fallbackLayerName) command = JobCommand.materialize_volume_annotation _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(outputSegmentationLayerName) commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_name" -> organization._id, "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "fallback_layer_name" -> fallbackLayerName, @@ -400,19 +390,20 @@ class JobController @Inject()( } } - def runFindLargestSegmentIdJob(organizationId: String, datasetNameAndId: String, layerName: String): Action[AnyContent] = + def runFindLargestSegmentIdJob(datasetId: String, + layerName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.findLargestSegmentId.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( - "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.find_largest_segment_id commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_name" -> organization._id, "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "layer_name" -> layerName @@ -423,16 +414,16 @@ class JobController @Inject()( } } - def runRenderAnimationJob(organizationId: String, datasetNameAndId: String): Action[AnimationJobOptions] = + def runRenderAnimationJob(datasetId: String): Action[AnimationJobOptions] = sil.SecuredAction.async(validateJson[AnimationJobOptions]) { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) - userOrganization <- organizationDAO.findOne(request.identity._organization) + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.renderAnimation.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( - "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND + userOrganization <- organizationDAO.findOne(request.identity._organization) animationJobOptions = request.body _ <- Fox.runIf(userOrganization.pricingPlan == PricingPlan.Basic) { bool2Fox(animationJobOptions.includeWatermark) ?~> "job.renderAnimation.mustIncludeWatermark" @@ -445,7 +436,7 @@ class JobController @Inject()( exportFileName = s"webknossos_animation_${formatDateForFilename(new Date())}__${dataset.name}__$layerName.mp4" command = JobCommand.render_animation commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_name" -> organization._id, "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "export_file_name" -> exportFileName, diff --git a/app/controllers/LegacyApiController.scala b/app/controllers/LegacyApiController.scala index 0bcf377cce6..f38876a4741 100644 --- a/app/controllers/LegacyApiController.scala +++ b/app/controllers/LegacyApiController.scala @@ -18,7 +18,7 @@ import play.api.http.HttpEntity import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers, Result} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/controllers/MaintenanceController.scala b/app/controllers/MaintenanceController.scala index b1d6fb78150..0e3ed916b11 100644 --- a/app/controllers/MaintenanceController.scala +++ b/app/controllers/MaintenanceController.scala @@ -7,7 +7,7 @@ import models.user.UserService import play.api.libs.json.{JsObject, Json, OFormat} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/controllers/MeshController.scala b/app/controllers/MeshController.scala index 66c81715698..9c9474b5c31 100644 --- a/app/controllers/MeshController.scala +++ b/app/controllers/MeshController.scala @@ -6,7 +6,7 @@ import models.annotation.AnnotationDAO import models.mesh.{MeshDAO, MeshInfo, MeshInfoParameters, MeshService} import play.api.mvc.{Action, AnyContent, PlayBodyParsers, RawBuffer} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/ProjectController.scala b/app/controllers/ProjectController.scala index 74897a7f327..ce50ef67a94 100644 --- a/app/controllers/ProjectController.scala +++ b/app/controllers/ProjectController.scala @@ -13,7 +13,7 @@ import play.api.i18n.Messages import play.api.libs.json.{JsValue, Json} import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/controllers/PublicationController.scala b/app/controllers/PublicationController.scala index 2bd629a4855..254848a2a0c 100755 --- a/app/controllers/PublicationController.scala +++ b/app/controllers/PublicationController.scala @@ -8,7 +8,7 @@ import models.dataset.{PublicationDAO, PublicationService} import play.api.libs.json._ import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/ReportController.scala b/app/controllers/ReportController.scala index a75f886464e..a8bc7e66deb 100644 --- a/app/controllers/ReportController.scala +++ b/app/controllers/ReportController.scala @@ -8,7 +8,7 @@ import models.user.{User, UserDAO, UserService} import play.api.libs.json.{Json, OFormat} import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SimpleSQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/controllers/ScriptController.scala b/app/controllers/ScriptController.scala index c0175ac8420..c96990e7aaf 100644 --- a/app/controllers/ScriptController.scala +++ b/app/controllers/ScriptController.scala @@ -10,7 +10,7 @@ import play.api.libs.json.Reads._ import play.api.libs.json._ import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/controllers/TaskController.scala b/app/controllers/TaskController.scala index d5e48d8919a..bcfe1323e4a 100755 --- a/app/controllers/TaskController.scala +++ b/app/controllers/TaskController.scala @@ -19,7 +19,7 @@ import play.api.i18n.Messages import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/controllers/TaskTypeController.scala b/app/controllers/TaskTypeController.scala index 86d29c15253..a1fddd85e7a 100755 --- a/app/controllers/TaskTypeController.scala +++ b/app/controllers/TaskTypeController.scala @@ -11,7 +11,7 @@ import play.api.i18n.Messages import play.api.libs.functional.syntax._ import play.api.libs.json.Reads._ import play.api.libs.json._ -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import play.api.mvc.{Action, AnyContent} import security.WkEnv diff --git a/app/controllers/TeamController.scala b/app/controllers/TeamController.scala index 38d5ad24a5d..8cd443c04a4 100755 --- a/app/controllers/TeamController.scala +++ b/app/controllers/TeamController.scala @@ -9,7 +9,7 @@ import play.api.i18n.Messages import play.api.libs.json._ import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/TimeController.scala b/app/controllers/TimeController.scala index 2499cf0df98..1312371fb53 100644 --- a/app/controllers/TimeController.scala +++ b/app/controllers/TimeController.scala @@ -13,7 +13,7 @@ import net.liftweb.common.Box import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent} import security.WkEnv -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext import scala.concurrent.duration.Duration diff --git a/app/controllers/UserController.scala b/app/controllers/UserController.scala index 6fbd048f64c..bd1f48fd5c9 100755 --- a/app/controllers/UserController.scala +++ b/app/controllers/UserController.scala @@ -13,7 +13,7 @@ import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.functional.syntax._ import play.api.libs.json._ import play.api.mvc._ -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import models.user.Theme.Theme diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index 83dd2c2175b..b7548d1c0bd 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -3,7 +3,7 @@ package controllers import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId import com.scalableminds.webknossos.datastore.services.AccessMode.AccessMode import com.scalableminds.webknossos.datastore.services.{ AccessMode, @@ -106,7 +106,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, } yield Ok(Json.toJson(answer)) } - private def handleDataSourceAccess(dataSourceId: DataSourceId, mode: AccessMode, userBox: Box[User])( + private def handleDataSourceAccess(dataSourceId: LegacyDataSourceId, mode: AccessMode, userBox: Box[User])( implicit ctx: DBAccessContext): Fox[UserAccessAnswer] = { // Write access is explicitly handled here depending on userBox, // Read access is ensured in findOneBySourceName, depending on the implicit DBAccessContext (to allow sharingTokens) diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 085465c3207..caa6b20de06 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -1,10 +1,11 @@ package controllers import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, GlobalAccessContext} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.models.UnfinishedUpload -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.{DatasetIdWithPath, LegacyDataSourceId} import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSourceLike => InboxDataSource} import com.scalableminds.webknossos.datastore.services.DataStoreStatus import com.scalableminds.webknossos.datastore.services.uploading.{LinkedLayerIdentifier, ReserveUploadInformation} @@ -28,7 +29,7 @@ import play.api.libs.json.{JsError, JsSuccess, JsValue, Json} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{WebknossosBearerTokenAuthenticatorService, WkSilhouetteEnvironment} import telemetry.SlackNotificationService -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.concurrent.{ExecutionContext, Future} @@ -58,6 +59,15 @@ class WKRemoteDataStoreController @Inject()( val bearerTokenService: WebknossosBearerTokenAuthenticatorService = wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService + def getDatasetIdWithPath(name: String, key: String, organizationId: String, datasetName: String): Action[AnyContent] = + Action.async { implicit request => + dataStoreService.validateAccess(name, key) { dataStore => + for { + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(GlobalAccessContext) ?~> Messages( + "dataset.notFound", + datasetName) ~> NOT_FOUND + } yield Ok(Json.toJson(DatasetIdWithPath(dataset._id, dataset.path))) + def reserveDatasetUpload(name: String, key: String, token: String): Action[ReserveUploadInformation] = Action.async(validateJson[ReserveUploadInformation]) { implicit request => dataStoreService.validateAccess(name, key) { dataStore => @@ -215,7 +225,7 @@ class WKRemoteDataStoreController @Inject()( def deleteDataset(name: String, key: String): Action[JsValue] = Action.async(parse.json) { implicit request => dataStoreService.validateAccess(name, key) { _ => for { - datasourceId <- request.body.validate[DataSourceId].asOpt.toFox ?~> "dataStore.upload.invalid" + datasourceId <- request.body.validate[LegacyDataSourceId].asOpt.toFox ?~> "dataStore.upload.invalid" existingDataset = datasetDAO .findOneByNameAndOrganization(datasourceId.name, datasourceId.team)(GlobalAccessContext) .futureBox diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 0be68abb7aa..7d2b84d9718 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -3,7 +3,7 @@ package controllers import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId import com.scalableminds.webknossos.tracingstore.TracingUpdatesReport import javax.inject.Inject @@ -109,24 +109,24 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore annotation <- annotationInformationProvider.annotationForTracing(tracingId) ?~> s"No annotation for tracing $tracingId" dataset <- datasetDAO.findOne(annotation._dataset) organization <- organizationDAO.findOne(dataset._organization) - } yield Ok(Json.toJson(DataSourceId(dataset.name, organization._id))) + } yield Ok(Json.toJson(LegacyDataSourceId(dataset.name, organization._id))) } } def dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], - datasetNameAndId: String): Action[AnyContent] = + datasetPath: String): Action[AnyContent] = Action.async { implicit request => tracingStoreService.validateAccess(name, key) { _ => implicit val ctx: DBAccessContext = GlobalAccessContext for { organizationIdWithFallback <- Fox.fillOption(organizationId) { - datasetDAO.getOrganizationIdForDataset(datasetNameAndId)(GlobalAccessContext) - } ?~> Messages("dataset.noAccess", datasetNameAndId) ~> FORBIDDEN - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationIdWithFallback) ?~> Messages( + datasetDAO.getOrganizationIdForDataset(datasetPath)(GlobalAccessContext) + } ?~> Messages("dataset.noAccess", datasetPath) ~> FORBIDDEN + dataset <- datasetDAO.findOneByNameAndOrganization(datasetPath, organizationIdWithFallback) ?~> Messages( "dataset.noAccess", - datasetNameAndId) ~> FORBIDDEN + datasetPath) ~> FORBIDDEN dataStore <- datasetService.dataStoreFor(dataset) } yield Ok(Json.toJson(dataStore.url)) } diff --git a/app/models/aimodels/AiInference.scala b/app/models/aimodels/AiInference.scala index d7595b68e54..0fe84aafa6c 100644 --- a/app/models/aimodels/AiInference.scala +++ b/app/models/aimodels/AiInference.scala @@ -10,7 +10,7 @@ import models.job.{JobDAO, JobService} import models.user.{User, UserDAO, UserService} import play.api.libs.json.{JsObject, Json} import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/aimodels/AiModel.scala b/app/models/aimodels/AiModel.scala index 053913b90e3..71c5e9406de 100644 --- a/app/models/aimodels/AiModel.scala +++ b/app/models/aimodels/AiModel.scala @@ -13,7 +13,7 @@ import slick.dbio.{DBIO, Effect, NoStream} import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep import slick.sql.SqlAction -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/analytics/AnalyticsDAO.scala b/app/models/analytics/AnalyticsDAO.scala index 4fa65f8a3d8..bc5153c96ce 100644 --- a/app/models/analytics/AnalyticsDAO.scala +++ b/app/models/analytics/AnalyticsDAO.scala @@ -1,7 +1,7 @@ package models.analytics import com.scalableminds.util.tools.Fox -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SimpleSQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/analytics/AnalyticsEvent.scala b/app/models/analytics/AnalyticsEvent.scala index cab884c9e6a..346a0fe3e15 100644 --- a/app/models/analytics/AnalyticsEvent.scala +++ b/app/models/analytics/AnalyticsEvent.scala @@ -8,7 +8,7 @@ import models.job.JobCommand.JobCommand import models.organization.Organization import models.user.User import play.api.libs.json._ -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/Annotation.scala b/app/models/annotation/Annotation.scala index 887bf7439c9..1929ae8f81f 100755 --- a/app/models/annotation/Annotation.scala +++ b/app/models/annotation/Annotation.scala @@ -15,7 +15,7 @@ import slick.jdbc.GetResult import slick.jdbc.TransactionIsolation.Serializable import slick.lifted.Rep import slick.sql.SqlAction -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SQLDAO, SimpleSQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/annotation/AnnotationIdentifier.scala b/app/models/annotation/AnnotationIdentifier.scala index 3b555e4fa9b..86c52df7f13 100644 --- a/app/models/annotation/AnnotationIdentifier.scala +++ b/app/models/annotation/AnnotationIdentifier.scala @@ -2,7 +2,7 @@ package models.annotation import com.scalableminds.util.tools.{Fox, FoxImplicits} import models.annotation.AnnotationType.AnnotationType -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/AnnotationInformationProvider.scala b/app/models/annotation/AnnotationInformationProvider.scala index 61a6561ffde..27d84229552 100755 --- a/app/models/annotation/AnnotationInformationProvider.scala +++ b/app/models/annotation/AnnotationInformationProvider.scala @@ -8,7 +8,7 @@ import models.annotation.AnnotationType.AnnotationType import models.annotation.handler.AnnotationInformationHandlerSelector import models.user.User import net.liftweb.common.Full -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/AnnotationMerger.scala b/app/models/annotation/AnnotationMerger.scala index 2c251f01462..c551fafbdc0 100644 --- a/app/models/annotation/AnnotationMerger.scala +++ b/app/models/annotation/AnnotationMerger.scala @@ -13,7 +13,7 @@ import javax.inject.Inject import models.annotation.AnnotationType.AnnotationType import models.dataset.DatasetDAO import models.user.User -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/AnnotationPrivateLink.scala b/app/models/annotation/AnnotationPrivateLink.scala index 5370f7add64..9fbd014991e 100644 --- a/app/models/annotation/AnnotationPrivateLink.scala +++ b/app/models/annotation/AnnotationPrivateLink.scala @@ -7,7 +7,7 @@ import com.scalableminds.webknossos.schema.Tables._ import play.api.libs.json.{JsValue, Json, OFormat} import security.RandomIDGenerator import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 51e16b3696b..655ed92dbec 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -5,41 +5,20 @@ import org.apache.pekko.stream.Materializer import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.io.{NamedStream, ZipIO} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits, TextUtils} import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} -import com.scalableminds.webknossos.datastore.geometry.{ - AdditionalCoordinateProto, - ColorProto, - NamedBoundingBoxProto, - Vec3DoubleProto, - Vec3IntProto -} +import com.scalableminds.webknossos.datastore.geometry.{AdditionalCoordinateProto, ColorProto, NamedBoundingBoxProto, Vec3DoubleProto, Vec3IntProto} import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.VoxelSize -import com.scalableminds.webknossos.datastore.models.annotation.{ - AnnotationLayer, - AnnotationLayerStatistics, - AnnotationLayerType, - AnnotationSource, - FetchedAnnotationLayer -} -import com.scalableminds.webknossos.datastore.models.datasource.{ - AdditionalAxis, - ElementClass, - DataSourceLike => DataSource, - SegmentationLayerLike => SegmentationLayer -} +import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerStatistics, AnnotationLayerType, AnnotationSource, FetchedAnnotationLayer} +import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, ElementClass, DataSourceLike => DataSource, SegmentationLayerLike => SegmentationLayer} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat -import com.scalableminds.webknossos.tracingstore.tracings.volume.{ - ResolutionRestrictions, - VolumeDataZipFormat, - VolumeTracingDefaults, - VolumeTracingDownsampling -} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ResolutionRestrictions, VolumeDataZipFormat, VolumeTracingDefaults, VolumeTracingDownsampling} import com.typesafe.scalalogging.LazyLogging import controllers.AnnotationLayerParameters import models.annotation.AnnotationState._ diff --git a/app/models/annotation/handler/AnnotationInformationHandler.scala b/app/models/annotation/handler/AnnotationInformationHandler.scala index 405e8e8173c..e9a9ac59b96 100755 --- a/app/models/annotation/handler/AnnotationInformationHandler.scala +++ b/app/models/annotation/handler/AnnotationInformationHandler.scala @@ -6,7 +6,7 @@ import javax.inject.Inject import models.annotation.AnnotationType.AnnotationType import models.annotation._ import models.user.User -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.annotation.{nowarn, tailrec} import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/handler/ProjectInformationHandler.scala b/app/models/annotation/handler/ProjectInformationHandler.scala index 5a54e31d04e..c771e2b52cf 100755 --- a/app/models/annotation/handler/ProjectInformationHandler.scala +++ b/app/models/annotation/handler/ProjectInformationHandler.scala @@ -7,7 +7,7 @@ import models.annotation._ import models.project.ProjectDAO import models.user.{User, UserService} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/handler/SavedTracingInformationHandler.scala b/app/models/annotation/handler/SavedTracingInformationHandler.scala index 1629290e9f7..7ef4c32856e 100755 --- a/app/models/annotation/handler/SavedTracingInformationHandler.scala +++ b/app/models/annotation/handler/SavedTracingInformationHandler.scala @@ -8,7 +8,7 @@ import javax.inject.Inject import models.annotation._ import models.dataset.DatasetDAO import models.user.{User, UserService} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/handler/TaskInformationHandler.scala b/app/models/annotation/handler/TaskInformationHandler.scala index e57b5955c29..18c9ed7100f 100755 --- a/app/models/annotation/handler/TaskInformationHandler.scala +++ b/app/models/annotation/handler/TaskInformationHandler.scala @@ -8,7 +8,7 @@ import models.task.TaskDAO import models.user.{User, UserService} import models.annotation.AnnotationState._ import models.project.ProjectDAO -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/handler/TaskTypeInformationHandler.scala b/app/models/annotation/handler/TaskTypeInformationHandler.scala index d70427921aa..233fb5ebdf8 100755 --- a/app/models/annotation/handler/TaskTypeInformationHandler.scala +++ b/app/models/annotation/handler/TaskTypeInformationHandler.scala @@ -7,7 +7,7 @@ import models.annotation._ import models.task.{TaskDAO, TaskTypeDAO} import models.user.{User, UserService} import models.annotation.AnnotationState._ -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/configuration/DatasetConfigurationService.scala b/app/models/configuration/DatasetConfigurationService.scala index f543e35ab8a..d798cd27cba 100644 --- a/app/models/configuration/DatasetConfigurationService.scala +++ b/app/models/configuration/DatasetConfigurationService.scala @@ -1,6 +1,7 @@ package models.configuration import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.datasource.DataLayerLike import com.scalableminds.webknossos.datastore.models.datasource.DatasetViewConfiguration.DatasetViewConfiguration @@ -22,11 +23,10 @@ class DatasetConfigurationService @Inject()(datasetService: DatasetService, def getDatasetViewConfigurationForUserAndDataset( requestedVolumeIds: List[String], user: User, - datasetNameAndId: String, - organizationId: String)(implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = + datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) + dataset <- datasetDAO.findOne(datasetId) datasetViewConfiguration <- userDatasetConfigurationDAO.findOneForUserAndDataset(user._id, dataset._id) datasetLayers <- datasetService.allLayersFor(dataset) @@ -35,10 +35,9 @@ class DatasetConfigurationService @Inject()(datasetService: DatasetService, def getDatasetViewConfigurationForDataset( requestedVolumeIds: List[String], - datasetNameAndId: String, - organizationId: String)(implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = + datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) + dataset <- datasetDAO.findOne(datasetId) datasetViewConfiguration = getDatasetViewConfigurationFromDefaultAndAdmin(dataset) @@ -52,10 +51,10 @@ class DatasetConfigurationService @Inject()(datasetService: DatasetService, defaultVC ++ adminVC } - def getCompleteAdminViewConfiguration(datasetNameAndId: String, organizationId: String)( + def getCompleteAdminViewConfiguration(datasetId: ObjectId)( implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) + dataset <- datasetDAO.findOne(datasetId) datasetViewConfiguration = getDatasetViewConfigurationFromDefaultAndAdmin(dataset) datasetLayers <- datasetService.allLayersFor(dataset) layerConfigurations = getAllLayerAdminViewConfigForDataset(datasetLayers).view.mapValues(Json.toJson(_)).toMap diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index 230d44fbd3a..442380c27bc 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -2,6 +2,7 @@ package models.dataset import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.requestparsing.{DatasetURIParser, ObjectId} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.datastore.models.{LengthUnit, VoxelSize} @@ -15,7 +16,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ Category, CoordinateTransformation, CoordinateTransformationType, - DataSourceId, + LegacyDataSourceId, ElementClass, ThinPlateSplineCorrespondences, DataLayerLike => DataLayer @@ -32,7 +33,6 @@ import slick.jdbc.TransactionIsolation.Serializable import slick.lifted.Rep import slick.sql.SqlAction import utils.sql.{SQLDAO, SimpleSQLDAO, SqlClient, SqlToken} -import utils.ObjectId import scala.concurrent.ExecutionContext @@ -81,7 +81,7 @@ case class DatasetCompactInfo( colorLayerNames: List[String], segmentationLayerNames: List[String], ) { - def dataSourceId = new DataSourceId(name, owningOrganization) + def dataSourceId = new LegacyDataSourceId(name, owningOrganization) } object DatasetCompactInfo { @@ -90,7 +90,8 @@ object DatasetCompactInfo { class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDAO, organizationDAO: OrganizationDAO)( implicit ec: ExecutionContext) - extends SQLDAO[Dataset, DatasetsRow, Datasets](sqlClient) { + extends SQLDAO[Dataset, DatasetsRow, Datasets](sqlClient) + with DatasetURIParser { protected val collection = Datasets protected def idColumn(x: Datasets): Rep[String] = x._Id @@ -401,39 +402,29 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA } yield r def findOneByIdOrNameAndOrganization(idAndName: String, organizationId: String)( - implicit ctx: DBAccessContext): Fox[Dataset] = { + implicit ctx: DBAccessContext): Fox[Dataset] = getDatasetIdOrNameFromURIPath(idAndName) match { - case (Some(validId), None) => findOneByIdAndOrganization(validId, organizationId) + case (Some(validId), None) => findOneByIdAndOrganization(validId, organizationId) case (None, Some(datasetName)) => findOneByNameAndOrganization(datasetName, organizationId) } - } - - private def getDatasetIdOrNameFromURIPath(datasetNameAndId: String): (Option[ObjectId], Option[String]) = { - val maybeIdStr = datasetNameAndId.split("-").lastOption - val maybeId = maybeIdStr.flatMap(ObjectId.fromStringSync) - maybeId match { - case Some(validId) => (Some(validId), None) - case None => (None, Some(datasetNameAndId)) - } - } private def getWhereClauseForDatasetIdOrName(datasetIdOrName: String): SqlToken = { val (maybeId, maybeDatasetName) = getDatasetIdOrNameFromURIPath(datasetIdOrName) maybeId match { case Some(id) => q"_id = $id" - case None => q"name = $maybeDatasetName" + case None => q"name = $maybeDatasetName" } } - // TODOM: Make private def findOneByNameAndOrganization(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Dataset] = for { accessQuery <- readAccessQuery r <- run(q"""SELECT $columns FROM $existingCollectionName - WHERE name = $name + WHERE path = $name AND _organization = $organizationId - AND $accessQuery""".as[DatasetsRow]) + AND $accessQuery + LIMIT 1""".as[DatasetsRow]) parsed <- parseFirst(r, s"$organizationId/$name") } yield parsed @@ -473,15 +464,15 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA /* Disambiguation method for legacy URLs and NMLs: if the user has access to multiple datasets of the same name, use the oldest. * This is reasonable, because the legacy URL/NML was likely created before this ambiguity became possible */ - def getOrganizationIdForDataset(datasetNameAndId: String)(implicit ctx: DBAccessContext): Fox[String] = + def getOrganizationIdForDataset(datasetName: String)(implicit ctx: DBAccessContext): Fox[String] = for { accessQuery <- readAccessQuery - whereClause = getWhereClauseForDatasetIdOrName(datasetNameAndId) rList <- run(q"""SELECT _organization FROM $existingCollectionName - WHERE $whereClause + WHERE name = $datasetName AND $accessQuery - ORDER BY created ASC""".as[String]) + ORDER BY created ASC + LIMIT 1""".as[String]) r <- rList.headOption.toFox } yield r @@ -492,27 +483,23 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA r <- rList.headOption.toFox } yield r - def getSharingTokenByIdOrName(datasetNameAndId: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Option[String]] = + def getSharingTokenById(datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[Option[String]] = for { accessQuery <- readAccessQuery - whereClause = getWhereClauseForDatasetIdOrName(datasetNameAndId) rList <- run(q"""SELECT sharingToken FROM webknossos.datasets_ - WHERE $whereClause - AND _organization = $organizationId + WHERE _id = $datasetId AND $accessQuery""".as[Option[String]]) r <- rList.headOption.toFox } yield r - def updateSharingTokenByIdOrName(datasetNameAndId: String, organizationId: String, sharingToken: Option[String])( + def updateSharingTokenById(datasetId: ObjectId, sharingToken: Option[String])( implicit ctx: DBAccessContext): Fox[Unit] = for { - accessQuery <- readAccessQuery - whereClause = getWhereClauseForDatasetIdOrName(datasetNameAndId) + accessQuery <- readAccessQuery // TODO: Why is this readAccessQuery and not writeAccessQuery? _ <- run(q"""UPDATE webknossos.datasets SET sharingToken = $sharingToken - WHERE name = $whereClause - AND _organization = $organizationId + WHERE _id = $datasetId AND $accessQuery""".asUpdate) } yield () diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 2e5e2cece93..9e34c9a0004 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -1,27 +1,21 @@ package models.dataset import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.datasource.inbox.{ - UnusableDataSource, - InboxDataSourceLike => InboxDataSource -} -import com.scalableminds.webknossos.datastore.models.datasource.{ - DataSourceId, - GenericDataSource, - DataLayerLike => DataLayer -} +import com.scalableminds.webknossos.datastore.models.datasource.inbox.{UnusableDataSource, InboxDataSourceLike => InboxDataSource} +import com.scalableminds.webknossos.datastore.models.datasource.{GenericDataSource, LegacyDataSourceId, DataLayerLike => DataLayer} import com.scalableminds.webknossos.datastore.rpc.RPC import com.typesafe.scalalogging.LazyLogging import models.folder.FolderDAO import models.organization.{Organization, OrganizationDAO} import models.team._ import models.user.{User, UserService} -import net.liftweb.common.{Box, Full, Empty} +import net.liftweb.common.{Box, Empty, Full} import play.api.libs.json.{JsObject, Json} import security.RandomIDGenerator -import utils.{ObjectId, WkConf} +import utils.WkConf import javax.inject.Inject import scala.concurrent.duration._ @@ -66,7 +60,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, datasetDAO.findOneByNameAndOrganization(name, organizationId)(GlobalAccessContext).reverse def createPreliminaryDataset(datasetName: String, organizationId: String, dataStore: DataStore): Fox[Dataset] = { - val unreportedDatasource = UnusableDataSource(DataSourceId(datasetName, organizationId), notYetUploadedStatus) + val unreportedDatasource = UnusableDataSource(LegacyDataSourceId(datasetName, organizationId), notYetUploadedStatus) createDataset(dataStore, organizationId, unreportedDatasource) } @@ -241,17 +235,17 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, def deactivateUnreportedDataSources(existingDatasetIds: List[ObjectId], dataStore: DataStore): Fox[Unit] = datasetDAO.deactivateUnreported(existingDatasetIds, dataStore.name, unreportedStatus, inactiveStatusList) - def getSharingToken(datasetNameAndId: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[String] = { + def getSharingToken(datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[String] = { - def createAndSaveSharingToken(datasetNameAndId: String)(implicit ctx: DBAccessContext): Fox[String] = + def createAndSaveSharingToken(datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[String] = for { tokenValue <- new RandomIDGenerator().generate - _ <- datasetDAO.updateSharingTokenByIdOrName(datasetNameAndId, organizationId, Some(tokenValue)) + _ <- datasetDAO.updateSharingTokenById(datasetId, Some(tokenValue)) } yield tokenValue - datasetDAO.getSharingTokenByIdOrName(datasetNameAndId, organizationId).flatMap { + datasetDAO.getSharingTokenById(datasetId).flatMap { case Some(oldToken) => Fox.successful(oldToken) - case None => createAndSaveSharingToken(datasetNameAndId) + case None => createAndSaveSharingToken(datasetId) } } @@ -261,7 +255,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> "organization.notFound" } dataLayers <- datasetDataLayerDAO.findAllForDataset(dataset._id) - dataSourceId = DataSourceId(dataset.name, organization._id) + dataSourceId = LegacyDataSourceId(dataset.name, organization._id) } yield { if (dataset.isUsable) for { diff --git a/app/models/dataset/Publication.scala b/app/models/dataset/Publication.scala index ee42cc4a9cf..ab30d3caab6 100644 --- a/app/models/dataset/Publication.scala +++ b/app/models/dataset/Publication.scala @@ -9,7 +9,7 @@ import play.api.http.Status.NOT_FOUND import play.api.libs.json.Format.GenericFormat import play.api.libs.json.{JsObject, Json} import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/dataset/ThumbnailService.scala b/app/models/dataset/ThumbnailService.scala index 35e156d3b72..56884d725d0 100644 --- a/app/models/dataset/ThumbnailService.scala +++ b/app/models/dataset/ThumbnailService.scala @@ -15,7 +15,7 @@ import net.liftweb.common.Full import play.api.http.Status.NOT_FOUND import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json.JsArray -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SimpleSQLDAO, SqlClient} import javax.inject.Inject @@ -36,8 +36,7 @@ class ThumbnailService @Inject()(datasetService: DatasetService, private val MaxThumbnailHeight = 4000 def getThumbnailWithCache( - organizationId: String, - datasetNameAndId: String, + parsedDatasetId: ObjectId, layerName: String, w: Option[Int], h: Option[Int], @@ -45,37 +44,28 @@ class ThumbnailService @Inject()(datasetService: DatasetService, val width = com.scalableminds.util.tools.Math.clamp(w.getOrElse(DefaultThumbnailWidth), 1, MaxThumbnailWidth) val height = com.scalableminds.util.tools.Math.clamp(h.getOrElse(DefaultThumbnailHeight), 1, MaxThumbnailHeight) for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(GlobalAccessContext) + dataset <- datasetDAO.findOne(parsedDatasetId)(GlobalAccessContext) image <- thumbnailCachingService.getOrLoad( dataset._id, layerName, width, height, mappingName, - _ => - getThumbnail(organizationId, datasetNameAndId, layerName, width, height, mappingName)(ec, - GlobalAccessContext, - mp) + _ => getThumbnail(dataset, layerName, width, height, mappingName)(ec, GlobalAccessContext, mp) ) } yield image } - private def getThumbnail(organizationId: String, - datasetNameAndId: String, - layerName: String, - width: Int, - height: Int, - mappingName: Option[String])(implicit ec: ExecutionContext, - ctx: DBAccessContext, - mp: MessagesProvider): Fox[Array[Byte]] = + private def getThumbnail(dataset: Dataset, layerName: String, width: Int, height: Int, mappingName: Option[String])( + implicit ec: ExecutionContext, + ctx: DBAccessContext, + mp: MessagesProvider): Fox[Array[Byte]] = for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId) dataSource <- datasetService.dataSourceFor(dataset) ?~> "dataSource.notFound" ~> NOT_FOUND usableDataSource <- dataSource.toUsable.toFox ?~> "dataset.notImported" layer <- usableDataSource.dataLayers.find(_.name == layerName) ?~> Messages("dataLayer.notFound", layerName) ~> NOT_FOUND - viewConfiguration <- datasetConfigurationService.getDatasetViewConfigurationForDataset(List.empty, - datasetNameAndId, - organizationId)(ctx) + viewConfiguration <- datasetConfigurationService.getDatasetViewConfigurationForDataset(List.empty, dataset._id)( + ctx) (mag1BoundingBox, mag, intensityRangeOpt, colorSettingsOpt) = selectParameters(viewConfiguration, usableDataSource, layerName, @@ -83,8 +73,7 @@ class ThumbnailService @Inject()(datasetService: DatasetService, width, height) client <- datasetService.clientFor(dataset) - image <- client.getDataLayerThumbnail(organizationId, - dataset, + image <- client.getDataLayerThumbnail(dataset, layerName, mag1BoundingBox, mag, diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index 4f21d737eec..c056d38c70a 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -16,7 +16,7 @@ import com.typesafe.scalalogging.LazyLogging import controllers.RpcTokenHolder import play.api.libs.json.JsObject import play.utils.UriEncoding -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt @@ -26,8 +26,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin private lazy val hasSegmentIndexFileCache: AlfuCache[(String, String, String), Boolean] = AlfuCache(timeToLive = 1 minute) - def getDataLayerThumbnail(organizationId: String, - dataset: Dataset, + def getDataLayerThumbnail(dataset: Dataset, dataLayerName: String, mag1BoundingBox: BoundingBox, mag: Vec3Int, @@ -35,8 +34,8 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin intensityRangeOpt: Option[(Double, Double)], colorSettingsOpt: Option[ThumbnailColorSettings]): Fox[Array[Byte]] = { val targetMagBoundingBox = mag1BoundingBox / mag - logger.debug(s"Thumbnail called for: $organizationId/${dataset.name}, Layer: $dataLayerName") - rpc(s"${dataStore.url}/data/datasets/${urlEncode(organizationId)}/${dataset.urlEncodedName}/layers/$dataLayerName/thumbnail.jpg") + logger.debug(s"Thumbnail called for: ${dataset._id}, Layer: $dataLayerName") + rpc(s"${dataStore.url}/data/datasets/${urlEncode(dataset._organization)}/${urlEncode(dataset.path)}/layers/$dataLayerName/thumbnail.jpg") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("mag" -> mag.toMagLiteral()) .addQueryString("x" -> mag1BoundingBox.topLeft.x.toString) @@ -52,7 +51,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin .getWithBytesResponse } - def getLayerData(organizationId: String, + def getLayerData( dataset: Dataset, layerName: String, mag1BoundingBox: BoundingBox, @@ -61,15 +60,14 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin val targetMagBoundingBox = mag1BoundingBox / mag logger.debug(s"Fetching raw data. Mag $mag, mag1 bbox: $mag1BoundingBox, target-mag bbox: $targetMagBoundingBox") rpc( - s"${dataStore.url}/data/datasets/${urlEncode(organizationId)}/${dataset.urlEncodedName}/layers/$layerName/readData") + s"${dataStore.url}/data/datasets/${urlEncode(dataset._organization)}/${urlEncode(dataset.path)}/layers/$layerName/readData") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .postJsonWithBytesResponse( RawCuboidRequest(mag1BoundingBox.topLeft, targetMagBoundingBox.size, mag, additionalCoordinates)) } - def findPositionWithData(organizationId: String, dataset: Dataset, dataLayerName: String): Fox[JsObject] = - rpc( - s"${dataStore.url}/data/datasets/${urlEncode(organizationId)}/${dataset.urlEncodedName}/layers/$dataLayerName/findData") + def findPositionWithData(dataset: Dataset, dataLayerName: String): Fox[JsObject] = + rpc(s"${dataStore.url}/data/datasets/${dataset._id}/layers/$dataLayerName/findData") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .getWithJsonResponse[JsObject] diff --git a/app/models/dataset/credential/CredentialDAO.scala b/app/models/dataset/credential/CredentialDAO.scala index bb74ca7b842..6a90fb91a4b 100644 --- a/app/models/dataset/credential/CredentialDAO.scala +++ b/app/models/dataset/credential/CredentialDAO.scala @@ -11,7 +11,7 @@ import com.scalableminds.webknossos.schema.Tables.{Credentials, CredentialsRow} import net.liftweb.common.Box.tryo import play.api.libs.json.Json import utils.sql.{SecuredSQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/dataset/credential/CredentialService.scala b/app/models/dataset/credential/CredentialService.scala index 9fdacdadbdd..8af19c40608 100644 --- a/app/models/dataset/credential/CredentialService.scala +++ b/app/models/dataset/credential/CredentialService.scala @@ -1,16 +1,10 @@ package models.dataset.credential +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.storage.{ - DataVaultCredential, - DataVaultService, - GoogleServiceAccountCredential, - HttpBasicAuthCredential, - S3AccessKeyCredential -} +import com.scalableminds.webknossos.datastore.storage.{DataVaultCredential, DataVaultService, GoogleServiceAccountCredential, HttpBasicAuthCredential, S3AccessKeyCredential} import net.liftweb.common.Box.tryo import play.api.libs.json.Json -import utils.ObjectId import java.net.URI import javax.inject.Inject diff --git a/app/models/dataset/explore/WKExploreRemoteLayerService.scala b/app/models/dataset/explore/WKExploreRemoteLayerService.scala index 1b809e183d7..0c097da44ff 100644 --- a/app/models/dataset/explore/WKExploreRemoteLayerService.scala +++ b/app/models/dataset/explore/WKExploreRemoteLayerService.scala @@ -19,7 +19,7 @@ import models.user.User import net.liftweb.common.Box.tryo import play.api.libs.json.{Json, OFormat} import security.WkSilhouetteEnvironment -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import java.net.URI import javax.inject.Inject diff --git a/app/models/folder/Folder.scala b/app/models/folder/Folder.scala index 8ac15596ef3..87a5689402c 100644 --- a/app/models/folder/Folder.scala +++ b/app/models/folder/Folder.scala @@ -13,7 +13,7 @@ import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep import slick.sql.SqlAction import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import scala.annotation.tailrec diff --git a/app/models/job/Job.scala b/app/models/job/Job.scala index 9e39f96ec8d..c07221ca6c5 100644 --- a/app/models/job/Job.scala +++ b/app/models/job/Job.scala @@ -11,7 +11,7 @@ import slick.jdbc.PostgresProfile.api._ import slick.jdbc.TransactionIsolation.Serializable import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/mesh/Mesh.scala b/app/models/mesh/Mesh.scala index 37c123b5726..99c29e2bb46 100644 --- a/app/models/mesh/Mesh.scala +++ b/app/models/mesh/Mesh.scala @@ -12,7 +12,7 @@ import play.api.libs.json.Json._ import play.api.libs.json._ import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/organization/Organization.scala b/app/models/organization/Organization.scala index 185e18f0135..3faaa9ffd4d 100644 --- a/app/models/organization/Organization.scala +++ b/app/models/organization/Organization.scala @@ -8,7 +8,7 @@ import com.scalableminds.webknossos.schema.Tables._ import models.team.PricingPlan import models.team.PricingPlan.PricingPlan import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/project/Project.scala b/app/models/project/Project.scala index ce5b1890e23..b5dff79911c 100755 --- a/app/models/project/Project.scala +++ b/app/models/project/Project.scala @@ -13,7 +13,7 @@ import net.liftweb.common.Full import play.api.libs.functional.syntax._ import play.api.libs.json._ import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/shortlinks/ShortLink.scala b/app/models/shortlinks/ShortLink.scala index eb4f0d94f38..b7e0e72c8c5 100644 --- a/app/models/shortlinks/ShortLink.scala +++ b/app/models/shortlinks/ShortLink.scala @@ -7,7 +7,7 @@ import play.api.libs.json.{Json, OFormat} import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep import utils.sql.{SqlClient, SQLDAO} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/task/Script.scala b/app/models/task/Script.scala index 0b689eb2f17..6f2d33cb5b3 100644 --- a/app/models/task/Script.scala +++ b/app/models/task/Script.scala @@ -7,7 +7,7 @@ import com.scalableminds.webknossos.schema.Tables._ import models.user.{UserDAO, UserService} import play.api.libs.json._ import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/task/Task.scala b/app/models/task/Task.scala index dabd1ca434f..8de67f8817d 100755 --- a/app/models/task/Task.scala +++ b/app/models/task/Task.scala @@ -2,6 +2,7 @@ package models.task import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.schema.Tables._ @@ -12,7 +13,6 @@ import models.user.Experience import slick.jdbc.PostgresProfile.api._ import slick.jdbc.TransactionIsolation.Serializable import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId import scala.concurrent.ExecutionContext import scala.concurrent.duration.FiniteDuration diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index 1cf52a573d3..49f98d11cf2 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -21,7 +21,7 @@ import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json.{JsObject, Json} import telemetry.SlackNotificationService -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/task/TaskType.scala b/app/models/task/TaskType.scala index 06903e8566f..739f62bddd1 100755 --- a/app/models/task/TaskType.scala +++ b/app/models/task/TaskType.scala @@ -1,6 +1,7 @@ package models.task import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.schema.Tables._ @@ -11,7 +12,6 @@ import models.annotation.{AnnotationSettings, TracingMode} import models.team.TeamDAO import play.api.libs.json._ import slick.lifted.Rep -import utils.ObjectId import utils.sql.{EnumerationArrayValue, SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/team/Team.scala b/app/models/team/Team.scala index 377515d3b64..77bb8370410 100755 --- a/app/models/team/Team.scala +++ b/app/models/team/Team.scala @@ -16,7 +16,7 @@ import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json._ import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/team/TeamMembership.scala b/app/models/team/TeamMembership.scala index 0d61ee5b1e5..b934e017bb0 100755 --- a/app/models/team/TeamMembership.scala +++ b/app/models/team/TeamMembership.scala @@ -1,11 +1,12 @@ package models.team import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox + import javax.inject.Inject import play.api.libs.functional.syntax._ import play.api.libs.json._ -import utils.ObjectId case class TeamMembership(teamId: ObjectId, isTeamManager: Boolean) diff --git a/app/models/user/EmailVerificationKey.scala b/app/models/user/EmailVerificationKey.scala index 03a087f51d5..7c930ec40f6 100644 --- a/app/models/user/EmailVerificationKey.scala +++ b/app/models/user/EmailVerificationKey.scala @@ -1,11 +1,11 @@ package models.user +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.schema.Tables import com.scalableminds.webknossos.schema.Tables.{Emailverificationkeys, EmailverificationkeysRow} import slick.lifted.{Rep, TableQuery} -import utils.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/user/MultiUser.scala b/app/models/user/MultiUser.scala index a1da71d02f7..a6ecea902aa 100644 --- a/app/models/user/MultiUser.scala +++ b/app/models/user/MultiUser.scala @@ -10,7 +10,7 @@ import models.user.Theme.Theme import play.api.libs.json.Format.GenericFormat import play.api.libs.json.{JsObject, Json} import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/user/User.scala b/app/models/user/User.scala index cc2c951716e..482bbf09623 100644 --- a/app/models/user/User.scala +++ b/app/models/user/User.scala @@ -17,7 +17,7 @@ import slick.jdbc.PostgresProfile.api._ import slick.jdbc.TransactionIsolation.Serializable import slick.lifted.Rep import utils.sql.{SQLDAO, SimpleSQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.ExecutionContext diff --git a/app/models/user/UserService.scala b/app/models/user/UserService.scala index a2111334604..8e959ee6ce5 100755 --- a/app/models/user/UserService.scala +++ b/app/models/user/UserService.scala @@ -2,6 +2,7 @@ package models.user import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.cache.AlfuCache +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.security.SCrypt import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -237,14 +238,11 @@ class UserService @Inject()(conf: WkConf, def updateDatasetViewConfiguration( user: User, - datasetNameAndId: String, - organizationId: String, + datasetId: ObjectId, datasetConfiguration: DatasetViewConfiguration, layerConfiguration: Option[JsValue])(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[Unit] = for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(GlobalAccessContext) ?~> Messages( - "dataset.notFound", - datasetNameAndId) + dataset <- datasetDAO.findOne(datasetId)(GlobalAccessContext) ?~> Messages("dataset.notFound", datasetId) layerMap = layerConfiguration.flatMap(_.asOpt[Map[String, JsValue]]).getOrElse(Map.empty) _ <- Fox.serialCombined(layerMap.toList) { case (name, config) => diff --git a/app/models/user/time/TimeSpan.scala b/app/models/user/time/TimeSpan.scala index cad7cb003e4..9a54413fccf 100644 --- a/app/models/user/time/TimeSpan.scala +++ b/app/models/user/time/TimeSpan.scala @@ -8,7 +8,7 @@ import models.annotation.AnnotationType.AnnotationType import play.api.libs.json.{JsArray, JsObject, JsValue, Json} import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/voxelytics/VoxelyticsDAO.scala b/app/models/voxelytics/VoxelyticsDAO.scala index 799d72391fd..fe3e8a42426 100644 --- a/app/models/voxelytics/VoxelyticsDAO.scala +++ b/app/models/voxelytics/VoxelyticsDAO.scala @@ -4,7 +4,7 @@ import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import models.user.User import play.api.libs.json._ -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SimpleSQLDAO, SqlClient, SqlToken} import javax.inject.Inject diff --git a/app/models/voxelytics/VoxelyticsService.scala b/app/models/voxelytics/VoxelyticsService.scala index 1c4480836e0..fd6a06aa589 100644 --- a/app/models/voxelytics/VoxelyticsService.scala +++ b/app/models/voxelytics/VoxelyticsService.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits} import models.user.User import models.voxelytics.VoxelyticsRunState.VoxelyticsRunState import play.api.libs.json.{JsArray, JsObject, Json, OFormat} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/security/Token.scala b/app/security/Token.scala index e4153df5bd4..57d6db2777e 100644 --- a/app/security/Token.scala +++ b/app/security/Token.scala @@ -10,7 +10,7 @@ import com.scalableminds.webknossos.schema.Tables._ import TokenType.TokenType import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.{SQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/utils/sql/SQLDAO.scala b/app/utils/sql/SQLDAO.scala index 8ef7548d1ef..d6f07033404 100644 --- a/app/utils/sql/SQLDAO.scala +++ b/app/utils/sql/SQLDAO.scala @@ -1,15 +1,14 @@ package utils.sql import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import slick.lifted.{AbstractTable, Rep, TableQuery} -import utils.ObjectId import javax.inject.Inject import scala.annotation.nowarn import scala.concurrent.ExecutionContext - import slick.jdbc.PostgresProfile.api._ abstract class SQLDAO[C, R, X <: AbstractTable[R]] @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionContext) diff --git a/app/utils/sql/SecuredSQLDAO.scala b/app/utils/sql/SecuredSQLDAO.scala index 3935434de65..47bc7bef32d 100644 --- a/app/utils/sql/SecuredSQLDAO.scala +++ b/app/utils/sql/SecuredSQLDAO.scala @@ -1,11 +1,11 @@ package utils.sql import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox import models.user.User import net.liftweb.common.Full import security.{SharingTokenContainer, UserSharingTokenContainer} -import utils.ObjectId import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/utils/sql/SqlInterpolation.scala b/app/utils/sql/SqlInterpolation.scala index 61f72de4490..9c1cd68e268 100644 --- a/app/utils/sql/SqlInterpolation.scala +++ b/app/utils/sql/SqlInterpolation.scala @@ -7,7 +7,7 @@ import slick.dbio.{Effect, NoStream} import slick.jdbc._ import slick.sql.{SqlAction, SqlStreamingAction} import slick.util.DumpInfo -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import java.sql.{PreparedStatement, Types} import scala.collection.mutable diff --git a/app/utils/sql/SqlTypeImplicits.scala b/app/utils/sql/SqlTypeImplicits.scala index 569e41b334e..86548fbc732 100644 --- a/app/utils/sql/SqlTypeImplicits.scala +++ b/app/utils/sql/SqlTypeImplicits.scala @@ -4,7 +4,7 @@ import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import play.api.libs.json.JsValue import slick.jdbc.{GetResult, PositionedResult} -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import scala.concurrent.duration.FiniteDuration diff --git a/conf/messages b/conf/messages index ae9f4836e2e..ff6a23a626e 100644 --- a/conf/messages +++ b/conf/messages @@ -79,6 +79,7 @@ braintracing.exists=Great, you already have an account on braintracing.org. Plea dataset=Dataset dataset.notFound=Dataset {0} does not exist or could not be accessed +dataset.notFoundByIdOrName= Could not find dataset {0} based on id or name. dataset.notFoundConsiderLogin=Dataset {0} does not exist or could not be accessed. You may need to log in. dataset.notFoundForAnnotation=The Dataset for this annotation does not exist or could not be accessed. dataset.noAccess=Could not access dataset {0}. Does your team have access? diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 7d66e94c57e..2a1081e289e 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -43,10 +43,10 @@ POST /auth/createUserInOrganization/:organizationId # Configurations GET /user/userConfiguration controllers.ConfigurationController.read() PUT /user/userConfiguration controllers.ConfigurationController.update() -POST /datasetConfigurations/:organizationId/:datasetNameAndId controllers.ConfigurationController.readDatasetViewConfiguration(organizationId: String, datasetNameAndId: String, sharingToken: Option[String]) -PUT /datasetConfigurations/:organizationId/:datasetNameAndId controllers.ConfigurationController.updateDatasetViewConfiguration(organizationId: String, datasetNameAndId: String) -GET /datasetConfigurations/default/:organizationId/:datasetNameAndId controllers.ConfigurationController.readDatasetAdminViewConfiguration(organizationId: String, datasetNameAndId: String) -PUT /datasetConfigurations/default/:organizationId/:datasetNameAndId controllers.ConfigurationController.updateDatasetAdminViewConfiguration(organizationId: String, datasetNameAndId: String) +POST /datasetConfigurations/:datasetId controllers.ConfigurationController.readDatasetViewConfiguration(datasetId: String, sharingToken: Option[String]) +PUT /datasetConfigurations/:datasetId controllers.ConfigurationController.updateDatasetViewConfiguration(datasetId: String) +GET /datasetConfigurations/default/:datasetId controllers.ConfigurationController.readDatasetAdminViewConfiguration(datasetId: String) +PUT /datasetConfigurations/default/:datasetId controllers.ConfigurationController.updateDatasetAdminViewConfiguration(datasetId: String) # Users POST /user/tasks/request controllers.TaskController.request() @@ -73,25 +73,26 @@ GET /teams/:id/availableTasksReport GET /teams/:id/projectProgressReport controllers.ReportController.projectProgressReport(id: String) # Datasets -POST /datasets/:organizationId/:datasetNameAndId/createExplorational controllers.AnnotationController.createExplorational(organizationId: String, datasetNameAndId: String) -GET /datasets/:organizationId/:datasetNameAndId/sandbox/:typ controllers.AnnotationController.getSandbox(organizationId: String, datasetNameAndId: String, typ: String, sharingToken: Option[String]) +POST /datasets/:datasetId/createExplorational controllers.AnnotationController.createExplorational(datasetId: String) +GET /datasets/:datasetId/sandbox/:typ controllers.AnnotationController.getSandbox(datasetId: String, typ: String, sharingToken: Option[String]) GET /datasets controllers.DatasetController.list(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationId: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) POST /datasets controllers.DatasetController.create(typ: String) POST /datasets/exploreRemote controllers.DatasetController.exploreRemoteDataset() POST /datasets/exploreAndAddRemote controllers.DatasetController.exploreAndAddRemoteDataset() -GET /datasets/disambiguate/:datasetNameAndId/toNew controllers.DatasetController.getOrganizationForDataset(datasetNameAndId: String) -GET /datasets/:organizationId/:datasetNameAndId/health controllers.DatasetController.health(organizationId: String, datasetNameAndId: String, sharingToken: Option[String]) -PATCH /datasets/:organizationId/:datasetNameAndId controllers.DatasetController.update(organizationId: String, datasetNameAndId: String) -PATCH /datasets/:organizationId/:datasetNameAndId/updatePartial controllers.DatasetController.updatePartial(organizationId: String, datasetNameAndId: String) -GET /datasets/:organizationId/:datasetNameAndId/accessList controllers.DatasetController.accessList(organizationId: String, datasetNameAndId: String) -GET /datasets/:organizationId/:datasetNameAndId/sharingToken controllers.DatasetController.getSharingToken(organizationId: String, datasetNameAndId: String) -DELETE /datasets/:organizationId/:datasetNameAndId/sharingToken controllers.DatasetController.deleteSharingToken(organizationId: String, datasetNameAndId: String) -PATCH /datasets/:organizationId/:datasetNameAndId/teams controllers.DatasetController.updateTeams(organizationId: String, datasetNameAndId: String) -GET /datasets/:organizationId/:datasetNameAndId/layers/:layer/thumbnail controllers.DatasetController.thumbnail(organizationId: String, datasetNameAndId: String, layer: String, w: Option[Int], h: Option[Int], mappingName: Option[String], sharingToken: Option[String]) -POST /datasets/:organizationId/:datasetNameAndId/layers/:layer/segmentAnythingMask controllers.DatasetController.segmentAnythingMask(organizationId: String, datasetNameAndId: String, layer: String, intensityMin: Option[Float], intensityMax: Option[Float]) -PUT /datasets/:organizationId/:datasetNameAndId/clearThumbnailCache controllers.DatasetController.removeFromThumbnailCache(organizationId: String, datasetNameAndId: String) -GET /datasets/:organizationId/:datasetName/isValidNewName controllers.DatasetController.isValidNewName(organizationId: String, datasetName: String) -GET /datasets/:organizationId/:datasetNameAndId controllers.DatasetController.read(organizationId: String, datasetNameAndId: String, sharingToken: Option[String]) +GET /datasets/disambiguate/:datasetName/toNew controllers.DatasetController.getOrganizationForDataset(datasetName: String) +GET /datasets/disambiguate/:organizationId/:datasetName/toId controllers.DatasetController.getDatasetIdFromNameAndOrganization(datasetName: String, organizationId: String) +GET /datasets/:datasetId/health controllers.DatasetController.health(datasetId: String, sharingToken: Option[String]) +PATCH /datasets/:datasetId controllers.DatasetController.update(datasetId: String) +PATCH /datasets/:datasetId/updatePartial controllers.DatasetController.updatePartial(datasetId: String) +GET /datasets/:datasetId/accessList controllers.DatasetController.accessList(datasetId: String) +GET /datasets/:datasetId/sharingToken controllers.DatasetController.getSharingToken(datasetId: String) +DELETE /datasets/:datasetId/sharingToken controllers.DatasetController.deleteSharingToken(datasetId: String) +PATCH /datasets/:datasetId/teams controllers.DatasetController.updateTeams(datasetId: String) +GET /datasets/:datasetId/layers/:layer/thumbnail controllers.DatasetController.thumbnail(datasetId: String, layer: String, w: Option[Int], h: Option[Int], mappingName: Option[String], sharingToken: Option[String]) +POST /datasets/:datasetId/layers/:layer/segmentAnythingMask controllers.DatasetController.segmentAnythingMask(datasetId: String, layer: String, intensityMin: Option[Float], intensityMax: Option[Float]) +PUT /datasets/:datasetId/clearThumbnailCache controllers.DatasetController.removeFromThumbnailCache(datasetId: String) +GET /datasets/:datasetName/isValidNewName controllers.DatasetController.isValidNewName(datasetName: String) +GET /datasets/:datasetId controllers.DatasetController.read(datasetId: String, sharingToken: Option[String]) # Folders GET /folders/root controllers.FolderController.getRoot() @@ -117,6 +118,7 @@ POST /datastores/:name/validateUserAccess POST /datastores controllers.DataStoreController.create() DELETE /datastores/:name controllers.DataStoreController.delete(name: String) PUT /datastores/:name controllers.DataStoreController.update(name: String) +GET /datastores/:name/:organizationId/:datasetName/getDatasetId // TODO? controllers.WKRemoteDataStoreController.getDatasetIdWithPath(name: String, key: String, organizationId: String, datasetName: String) # Tracingstores GET /tracingstore controllers.TracingStoreController.listOne() @@ -125,7 +127,7 @@ POST /tracingstores/:name/validateUserAccess PUT /tracingstores/:name controllers.TracingStoreController.update(name: String) GET /tracingstores/:name/dataSource controllers.WKRemoteTracingStoreController.dataSourceForTracing(name: String, key: String, tracingId: String) GET /tracingstores/:name/dataSourceId controllers.WKRemoteTracingStoreController.dataSourceIdForTracing(name: String, key: String, tracingId: String) -GET /tracingstores/:name/dataStoreUri/:datasetNameAndId controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetNameAndId: String) +GET /tracingstores/:name/dataStoreUri/:datasetPath controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetPath: String) # User access tokens for datastore authentication POST /userToken/generate controllers.UserTokenController.generateTokenForDataStore() @@ -259,17 +261,17 @@ GET /time/overview GET /jobs/request controllers.WKRemoteWorkerController.requestJobs(key: String) GET /jobs controllers.JobController.list() GET /jobs/status controllers.JobController.status() -POST /jobs/run/convertToWkw/:organizationId/:datasetNameAndId controllers.JobController.runConvertToWkwJob(organizationId: String, datasetNameAndId: String, scale: String, unit: Option[String]) -POST /jobs/run/computeMeshFile/:organizationId/:datasetNameAndId controllers.JobController.runComputeMeshFileJob(organizationId: String, datasetNameAndId: String, layerName: String, mag: String, agglomerateView: Option[String]) -POST /jobs/run/computeSegmentIndexFile/:organizationId/:datasetNameAndId controllers.JobController.runComputeSegmentIndexFileJob(organizationId: String, datasetNameAndId: String, layerName: String) -POST /jobs/run/exportTiff/:organizationId/:datasetNameAndId controllers.JobController.runExportTiffJob(organizationId: String, datasetNameAndId: String, bbox: String, layerName: Option[String], mag: Option[String], annotationLayerName: Option[String], annotationId: Option[String], asOmeTiff: Boolean) -POST /jobs/run/inferNuclei/:organizationId/:datasetNameAndId controllers.JobController.runInferNucleiJob(organizationId: String, datasetNameAndId: String, layerName: String, newDatasetName: String) -POST /jobs/run/inferNeurons/:organizationId/:datasetNameAndId controllers.JobController.runInferNeuronsJob(organizationId: String, datasetNameAndId: String, layerName: String, bbox: String, newDatasetName: String) -POST /jobs/run/inferMitochondria/:organizationId/:datasetNameAndId controllers.JobController.runInferMitochondriaJob(organizationId: String, datasetNameAndId: String, layerName: String, bbox: String, newDatasetName: String) -POST /jobs/run/alignSections/:organizationId/:datasetNameAndId controllers.JobController.runAlignSectionsJob(organizationId: String, datasetNameAndId: String, layerName: String, newDatasetName: String, annotationId: Option[String]) -POST /jobs/run/materializeVolumeAnnotation/:organizationId/:datasetNameAndId controllers.JobController.runMaterializeVolumeAnnotationJob(organizationId: String, datasetNameAndId: String, fallbackLayerName: String, annotationId: String, annotationType: String, newDatasetName: String, outputSegmentationLayerName: String, mergeSegments: Boolean, volumeLayerName: Option[String]) -POST /jobs/run/findLargestSegmentId/:organizationId/:datasetNameAndId controllers.JobController.runFindLargestSegmentIdJob(organizationId: String, datasetNameAndId: String, layerName: String) -POST /jobs/run/renderAnimation/:organizationId/:datasetNameAndId controllers.JobController.runRenderAnimationJob(organizationId: String, datasetNameAndId: String) +POST /jobs/run/convertToWkw/:datasetId controllers.JobController.runConvertToWkwJob(datasetId: String, scale: String, unit: Option[String]) +POST /jobs/run/computeMeshFile/:datasetId controllers.JobController.runComputeMeshFileJob(datasetId: String, layerName: String, mag: String, agglomerateView: Option[String]) +POST /jobs/run/computeSegmentIndexFile/:datasetId controllers.JobController.runComputeSegmentIndexFileJob(datasetId: String, layerName: String) +POST /jobs/run/exportTiff/:datasetId controllers.JobController.runExportTiffJob(datasetId: String, bbox: String, layerName: Option[String], mag: Option[String], annotationLayerName: Option[String], annotationId: Option[String], asOmeTiff: Boolean) +POST /jobs/run/inferNuclei/:datasetId controllers.JobController.runInferNucleiJob(datasetId: String, layerName: String, newDatasetName: String) +POST /jobs/run/inferNeurons/:datasetId controllers.JobController.runInferNeuronsJob(datasetId: String, layerName: String, bbox: String, newDatasetName: String) +POST /jobs/run/inferMitochondria/:datasetId controllers.JobController.runInferMitochondriaJob(datasetId: String, layerName: String, bbox: String, newDatasetName: String) +POST /jobs/run/alignSections/:datasetId controllers.JobController.runAlignSectionsJob(datasetId: String, layerName: String, newDatasetName: String, annotationId: Option[String]) +POST /jobs/run/materializeVolumeAnnotation/:datasetId controllers.JobController.runMaterializeVolumeAnnotationJob(datasetId: String, fallbackLayerName: String, annotationId: String, annotationType: String, newDatasetName: String, outputSegmentationLayerName: String, mergeSegments: Boolean, volumeLayerName: Option[String]) +POST /jobs/run/findLargestSegmentId/:datasetId controllers.JobController.runFindLargestSegmentIdJob(datasetId: String, layerName: String) +POST /jobs/run/renderAnimation/:datasetId controllers.JobController.runRenderAnimationJob(datasetId: String) GET /jobs/:id controllers.JobController.get(id: String) PATCH /jobs/:id/cancel controllers.JobController.cancel(id: String) POST /jobs/:id/status controllers.WKRemoteWorkerController.updateJobStatus(key: String, id: String) diff --git a/conf/webknossos.versioned.routes b/conf/webknossos.versioned.routes index cfbc59297f1..48bb3dfe9fe 100644 --- a/conf/webknossos.versioned.routes +++ b/conf/webknossos.versioned.routes @@ -4,6 +4,7 @@ # Note: keep this in sync with the reported version numbers in the utils.ApiVersioning trait # version log: + # changed in v9: Datasets are now identified by their id, not their name. # changed in v8: Datasets' name was renamed to id and the displayName is now named name. # changed in v7: datasources now contain scale as object with keys factor and unit, e.g. {"factor": [11,11,5.5], "unit": "nm"} # changed in v6: isValidName always returns Ok, with a JSON object containing possible errors and key "isValid" @@ -12,6 +13,16 @@ # new in v3: annotation info and finish request now take timestamp # new in v2: annotation json contains visibility enum instead of booleans +-> /v8/ webknossos.latest.Routes + +# v8: support changes to v9 +PATCH || /datasets/:organizationId/:datasetNameAndId controllers.DatasetController.update(organizationId: String, datasetNameAndId: String) +GET || /datasets/:organizationId/:datasetNameAndId/sharingToken controllers.DatasetController.getSharingToken(organizationId: String, datasetNameAndId: String) +PATCH || /datasets/:organizationId/:datasetNameAndId/teams controllers.DatasetController.updateTeams(organizationId: String, datasetNameAndId: String) +GET || /datasets/:organizationId/:datasetNameAndId controllers.DatasetController.read(organizationId: String, datasetNameAndId: String, sharingToken: Option[String]) +GET /datasets/:datasetName/isValidNewName controllers.DatasetController.isValidNewName(datasetName: String) + + -> /v8/ webknossos.latest.Routes # v7: support changes to v8 diff --git a/test/backend/SqlInterpolationTestSuite.scala b/test/backend/SqlInterpolationTestSuite.scala index 4d0164f1dfc..557e39ec5bc 100644 --- a/test/backend/SqlInterpolationTestSuite.scala +++ b/test/backend/SqlInterpolationTestSuite.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.time.Instant import models.job.JobState import org.scalatestplus.play.PlaySpec import play.api.libs.json.Json -import utils.ObjectId +import com.scalableminds.util.requestparsing.ObjectId import utils.sql.SqlInterpolation.sqlInterpolation import utils.sql._ diff --git a/util/src/main/scala/com/scalableminds/util/requestparsing/DatasetURIParser.scala b/util/src/main/scala/com/scalableminds/util/requestparsing/DatasetURIParser.scala new file mode 100644 index 00000000000..957ebfe2457 --- /dev/null +++ b/util/src/main/scala/com/scalableminds/util/requestparsing/DatasetURIParser.scala @@ -0,0 +1,14 @@ +package com.scalableminds.util.requestparsing + +trait DatasetURIParser { + + def getDatasetIdOrNameFromURIPath(datasetNameAndId: String): (Option[ObjectId], Option[String]) = { + val maybeIdStr = datasetNameAndId.split("-").lastOption + val maybeId = maybeIdStr.flatMap(ObjectId.fromStringSync) + maybeId match { + case Some(validId) => (Some(validId), None) + case None => (None, Some(datasetNameAndId)) + } + } + +} diff --git a/app/utils/ObjectId.scala b/util/src/main/scala/com/scalableminds/util/requestparsing/ObjectId.scala similarity index 96% rename from app/utils/ObjectId.scala rename to util/src/main/scala/com/scalableminds/util/requestparsing/ObjectId.scala index 4b3c0c7ad6e..53034b2cd75 100644 --- a/app/utils/ObjectId.scala +++ b/util/src/main/scala/com/scalableminds/util/requestparsing/ObjectId.scala @@ -1,4 +1,4 @@ -package utils +package com.scalableminds.util.requestparsing import com.scalableminds.util.tools.TextUtils.parseCommaSeparated import com.scalableminds.util.tools.{Fox, FoxImplicits} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index bac34609baf..3903b1eb673 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -3,10 +3,11 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.image.{Color, JPEGWriter} +import com.scalableminds.util.requestparsing.DatasetURIParser import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.DataStoreConfig -import com.scalableminds.webknossos.datastore.helpers.MissingBucketHeaders +import com.scalableminds.webknossos.datastore.helpers.{LegacyDatasetResolver, MissingBucketHeaders} import com.scalableminds.webknossos.datastore.image.{ImageCreator, ImageCreatorParameters} import com.scalableminds.webknossos.datastore.models.DataRequestCollection._ import com.scalableminds.webknossos.datastore.models.datasource._ @@ -22,6 +23,7 @@ import net.liftweb.common.Box.tryo import play.api.i18n.Messages import play.api.libs.json.Json import play.api.mvc.{AnyContent, _} + import scala.concurrent.duration.DurationInt import java.io.ByteArrayOutputStream import java.nio.{ByteBuffer, ByteOrder} @@ -36,12 +38,16 @@ class BinaryDataController @Inject()( slackNotificationService: DSSlackNotificationService, adHocMeshServiceHolder: AdHocMeshServiceHolder, findDataService: FindDataService, + remoteWebknossosClient: DSRemoteWebknossosClient, + datasetIdRepository: DatasetIdRepository )(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with MissingBucketHeaders { override def allowRemoteOrigin: Boolean = true + implicit val remoteWebknossosClientImplicit: DSRemoteWebknossosClient = remoteWebknossosClient + val binaryDataService: BinaryDataService = binaryDataServiceHolder.binaryDataService adHocMeshServiceHolder.dataStoreAdHocMeshConfig = (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) @@ -50,28 +56,36 @@ class BinaryDataController @Inject()( def requestViaWebknossos( token: Option[String], organizationId: String, - datasetName: String, + datasetIdOrName: String, dataLayerName: String ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { - logTime(slackNotificationService.noticeSlowRequest) { - val t = Instant.now - for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, - dataLayerName) ~> NOT_FOUND - (data, indices) <- requestData(dataSource, dataLayer, request.body) - duration = Instant.since(t) - _ = if (duration > (10 seconds)) - logger.info( - s"Complete data request for $organizationId/$datasetName/$dataLayerName took $duration." - + request.body.headOption - .map(firstReq => s" First of ${request.body.size} requests was $firstReq") - .getOrElse("")) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + for { + datasetIdWithPath <- datasetIdRepository + .getDatasetIdFromIdOrName(organizationId, datasetIdOrName) ~> NOT_FOUND ?~> Messages( + "dataset.notFoundByIdOrName", + datasetIdOrName) + datasourceId = LegacyDataSourceId(datasetIdWithPath.id.toString, "") + response <- accessTokenService.validateAccess( + UserAccessRequest.readDataSources(datasourceId), + urlOrHeaderToken(token, request) + ) { + logTime(slackNotificationService.noticeSlowRequest) { + val t = Instant.now + for { + (dataSource, dataLayer) <- dataSourceRepository + .getDataSourceAndDataLayer(dataSourceId, dataLayerName) ~> NOT_FOUND + (data, indices) <- requestData(dataSource, dataLayer, request.body) + duration = Instant.since(t) + _ = if (duration > (10 seconds)) + logger.info( + s"Complete data request for $organizationId/$datasetIdOrName/$dataLayerName took $duration." + + request.body.headOption + .map(firstReq => s" First of ${request.body.size} requests was $firstReq") + .getOrElse("")) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } } - } + } yield response } /** @@ -80,7 +94,7 @@ class BinaryDataController @Inject()( def requestRawCuboid( token: Option[String], organizationId: String, - datasetName: String, + datasetIdOrName: String, dataLayerName: String, // Mag1 coordinates of the top-left corner of the bounding box x: Int, @@ -96,32 +110,35 @@ class BinaryDataController @Inject()( halfByte: Boolean, mappingName: Option[String] ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), - urlOrHeaderToken(token, request)) { - for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, - dataLayerName) ~> NOT_FOUND - magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" - request = DataRequest( - VoxelPosition(x, y, z, magParsed), - width, - height, - depth, - DataServiceRequestSettings(halfByte = halfByte, appliedAgglomerate = mappingName) - ) - (data, indices) <- requestData(dataSource, dataLayer, request) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + { + val dataSourceId = LegacyDataSourceId.fromDatasetIdOrNameAndOrganizationId(datasetIdOrName, organizationId) + accessTokenService.validateAccess(UserAccessRequest.readDataSources(dataSourceId), + urlOrHeaderToken(token, request)) { + for { + (dataSource, dataLayer) <- dataSourceRepository + .getDataSourceAndDataLayer(dataSourceId, dataLayerName) ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" + request = DataRequest( + VoxelPosition(x, y, z, magParsed), + width, + height, + depth, + DataServiceRequestSettings(halfByte = halfByte, appliedAgglomerate = mappingName) + ) + (data, indices) <- requestData(dataSource, dataLayer, request) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } } } def requestRawCuboidPost( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String ): Action[RawCuboidRequest] = Action.async(validateJson[RawCuboidRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + // TODO :D + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, @@ -144,7 +161,7 @@ class BinaryDataController @Inject()( y: Int, z: Int, cubeSize: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, @@ -166,7 +183,7 @@ class BinaryDataController @Inject()( def thumbnailJpeg(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, x: Int, y: Int, @@ -179,7 +196,8 @@ class BinaryDataController @Inject()( intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + // TODO :D + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, @@ -227,7 +245,7 @@ class BinaryDataController @Inject()( dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, @@ -248,7 +266,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, @@ -290,7 +308,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, @@ -309,7 +327,7 @@ class BinaryDataController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala index f2f4d5921c0..7a2f768b6ff 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala @@ -2,7 +2,7 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId import com.scalableminds.webknossos.datastore.services._ import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} @@ -28,7 +28,7 @@ class DSMeshController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { meshFiles <- meshFileService.exploreMeshFiles(organizationId, datasetName, dataLayerName) @@ -49,7 +49,7 @@ class DSMeshController @Inject()( targetMappingName: Option[String], editableMappingTracingId: Option[String]): Action[ListMeshChunksRequest] = Action.async(validateJson[ListMeshChunksRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { _ <- Fox.successful(()) @@ -82,7 +82,7 @@ class DSMeshController @Inject()( datasetName: String, dataLayerName: String): Action[MeshChunkDataRequestList] = Action.async(validateJson[MeshChunkDataRequestList]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { (data, encoding) <- meshFileService.readMeshChunk(organizationId, datasetName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" @@ -99,7 +99,7 @@ class DSMeshController @Inject()( datasetName: String, dataLayerName: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 2e05d04252d..f5a506b198e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -16,7 +16,7 @@ import com.scalableminds.webknossos.datastore.helpers.{ SegmentStatisticsParameters } import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId, GenericDataSource} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, LegacyDataSourceId, GenericDataSource} import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.datastore.services.uploading.{ CancelUploadInformation, @@ -46,7 +46,7 @@ import scala.concurrent.duration._ class DataSourceController @Inject()( dataSourceRepository: DataSourceRepository, dataSourceService: DataSourceService, - remoteWebknossosClient: DSRemoteWebknossosClient, + remoteWebknossosClient: DSRemoteWebknossosClient, // duplicate? accessTokenService: DataStoreAccessTokenService, val binaryDataServiceHolder: BinaryDataServiceHolder, connectomeFileService: ConnectomeFileService, @@ -56,7 +56,7 @@ class DataSourceController @Inject()( exploreRemoteLayerService: ExploreRemoteLayerService, uploadService: UploadService, composeService: ComposeService, - val dsRemoteWebknossosClient: DSRemoteWebknossosClient, + val dsRemoteWebknossosClient: DSRemoteWebknossosClient, // duplicate? val dsRemoteTracingstoreClient: DSRemoteTracingstoreClient, )(implicit bodyParsers: PlayBodyParsers, ec: ExecutionContext) extends Controller @@ -69,7 +69,7 @@ class DataSourceController @Inject()( Action.async { implicit request => { accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { // Read directly from file, not from repository to ensure recent changes are seen val dataSource: InboxDataSource = @@ -252,7 +252,7 @@ class DataSourceController @Inject()( dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { addNoCacheHeaderFallback( Ok(Json.toJson(dataSourceService.exploreMappings(organizationId, datasetName, dataLayerName)))) @@ -265,7 +265,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox @@ -282,7 +282,7 @@ class DataSourceController @Inject()( mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox @@ -303,7 +303,7 @@ class DataSourceController @Inject()( mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox @@ -322,7 +322,7 @@ class DataSourceController @Inject()( mappingName: String, segmentId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox @@ -340,7 +340,7 @@ class DataSourceController @Inject()( dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox @@ -365,7 +365,7 @@ class DataSourceController @Inject()( dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox @@ -391,7 +391,7 @@ class DataSourceController @Inject()( dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox @@ -411,11 +411,11 @@ class DataSourceController @Inject()( def update(token: Option[String], organizationId: String, datasetName: String): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.writeDataSource(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { _ <- Fox.successful(()) - dataSource <- dataSourceRepository.find(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( + dataSource <- dataSourceRepository.find(LegacyDataSourceId(datasetName, organizationId)).toFox ?~> Messages( "dataSource.notFound") ~> NOT_FOUND _ <- dataSourceService.updateDataSource(request.body.copy(id = dataSource.id), expectExisting = true) } yield Ok @@ -430,7 +430,7 @@ class DataSourceController @Inject()( Action.async(validateJson[DataSource]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.administrateDataSources, urlOrHeaderToken(token, request)) { for { - _ <- bool2Fox(dataSourceRepository.find(DataSourceId(datasetName, organizationId)).isEmpty) ?~> Messages( + _ <- bool2Fox(dataSourceRepository.find(LegacyDataSourceId(datasetName, organizationId)).isEmpty) ?~> Messages( "dataSource.alreadyPresent") _ <- remoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( @@ -445,10 +445,10 @@ class DataSourceController @Inject()( ), urlOrHeaderToken(token, request) ) ?~> "dataset.upload.validation.failed" - _ <- dataSourceService.updateDataSource(request.body.copy(id = DataSourceId(datasetName, organizationId)), + _ <- dataSourceService.updateDataSource(request.body.copy(id = LegacyDataSourceId(datasetName, organizationId)), expectExisting = false) _ <- remoteWebknossosClient.reportUpload( - DataSourceId(datasetName, organizationId), + LegacyDataSourceId(datasetName, organizationId), 0L, needsConversion = false, viaAddRoute = true, @@ -518,7 +518,7 @@ class DataSourceController @Inject()( def deleteOnDisk(token: Option[String], organizationId: String, datasetName: String): Action[AnyContent] = Action.async { implicit request => - val dataSourceId = DataSourceId(datasetName, organizationId) + val dataSourceId = LegacyDataSourceId(datasetName, organizationId) accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId), urlOrHeaderToken(token, request)) { for { @@ -539,7 +539,7 @@ class DataSourceController @Inject()( _ <- Fox.serialCombined(request.body.layers.map(_.datasetId).toList)( id => accessTokenService.assertUserAccess( - UserAccessRequest.readDataSources(DataSourceId(id.name, id.owningOrganization)), + UserAccessRequest.readDataSources(LegacyDataSourceId(id.name, id.owningOrganization)), userToken)) dataSource <- composeService.composeDataset(request.body, userToken) _ <- dataSourceRepository.updateDataSource(dataSource) @@ -552,7 +552,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { val connectomeFileNames = connectomeFileService.exploreConnectomeFiles(organizationId, datasetName, dataLayerName) @@ -574,7 +574,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[ByAgglomerateIdsRequest] = Action.async(validateJson[ByAgglomerateIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { meshFilePath <- Fox.successful( @@ -591,7 +591,7 @@ class DataSourceController @Inject()( dataLayerName: String, direction: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { meshFilePath <- Fox.successful( @@ -609,7 +609,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { meshFilePath <- Fox.successful( @@ -625,7 +625,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { meshFilePath <- Fox.successful( @@ -641,7 +641,7 @@ class DataSourceController @Inject()( dataSetName: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(dataSetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(dataSetName, organizationId)), urlOrHeaderToken(token, request)) { val segmentIndexFileOpt = segmentIndexFileService.getSegmentIndexFile(organizationId, dataSetName, dataLayerName).toOption @@ -659,7 +659,7 @@ class DataSourceController @Inject()( dataLayerName: String, segmentId: String): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { segmentIds <- segmentIdsForAgglomerateIdIfNeeded( @@ -698,7 +698,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = Action.async(validateJson[GetMultipleSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { segmentIdsAndBucketPositions <- Fox.serialCombined(request.body.segmentIds) { segmentOrAgglomerateId => @@ -732,7 +732,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) @@ -755,7 +755,7 @@ class DataSourceController @Inject()( datasetName: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index 2c94c3386e6..06ab27a165a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -55,7 +55,7 @@ class ZarrStreamingController @Inject()( datasetName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, @@ -73,7 +73,7 @@ class ZarrStreamingController @Inject()( datasetName: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, @@ -153,10 +153,10 @@ class ZarrStreamingController @Inject()( datasetName: String, zarrVersion: Int, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { - dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetName, organizationId)).toFox ~> NOT_FOUND + dataSource <- dataSourceRepository.findUsable(LegacyDataSourceId(datasetName, organizationId)).toFox ~> NOT_FOUND dataLayers = dataSource.dataLayers zarrLayers = dataLayers.map(convertLayerToZarrLayer(_, zarrVersion)) zarrSource = GenericDataSource[DataLayer](dataSource.id, zarrLayers, dataSource.scale) @@ -211,7 +211,7 @@ class ZarrStreamingController @Inject()( else urlOrHeaderToken(token, request) volumeAnnotationLayers = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume) dataSource <- dataSourceRepository - .findUsable(DataSourceId(annotationSource.datasetName, annotationSource.organizationId)) + .findUsable(LegacyDataSourceId(annotationSource.datasetName, annotationSource.organizationId)) .toFox ~> NOT_FOUND dataSourceLayers = dataSource.dataLayers .filter(dL => !volumeAnnotationLayers.exists(_.name == dL.name)) @@ -236,7 +236,7 @@ class ZarrStreamingController @Inject()( mag: String, coordinates: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { rawZarrCube(organizationId, datasetName, dataLayerName, mag, coordinates) } @@ -307,7 +307,7 @@ class ZarrStreamingController @Inject()( dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { zArray(organizationId, datasetName, dataLayerName, mag) } @@ -329,7 +329,7 @@ class ZarrStreamingController @Inject()( dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { zarrJsonForMag(organizationId, datasetName, dataLayerName, mag) } @@ -403,7 +403,7 @@ class ZarrStreamingController @Inject()( mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { dataLayerMagFolderContents(organizationId, datasetName, dataLayerName, mag, zarrVersion) } @@ -467,7 +467,7 @@ class ZarrStreamingController @Inject()( datasetName: String, dataLayerName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { dataLayerFolderContents(organizationId, datasetName, dataLayerName, zarrVersion) } @@ -528,10 +528,10 @@ class ZarrStreamingController @Inject()( datasetName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { for { - dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetName, organizationId)).toFox ?~> Messages( + dataSource <- dataSourceRepository.findUsable(LegacyDataSourceId(datasetName, organizationId)).toFox ?~> Messages( "dataSource.notFound") ~> NOT_FOUND layerNames = dataSource.dataLayers.map((dataLayer: DataLayer) => dataLayer.name) additionalVersionDependantFiles = if (zarrVersion == 2) List(NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -552,7 +552,7 @@ class ZarrStreamingController @Inject()( for { annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) dataSource <- dataSourceRepository - .findUsable(DataSourceId(annotationSource.datasetName, annotationSource.organizationId)) + .findUsable(LegacyDataSourceId(annotationSource.datasetName, annotationSource.organizationId)) .toFox ?~> Messages("dataSource.notFound") ~> NOT_FOUND annotationLayerNames = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume).map(_.name) dataSourceLayerNames = dataSource.dataLayers @@ -577,7 +577,7 @@ class ZarrStreamingController @Inject()( datasetName: String, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationId)), + UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), urlOrHeaderToken(token, request)) { Ok(zGroupJson) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala index 4638466d316..c0b907885f2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala @@ -11,7 +11,7 @@ import com.scalableminds.webknossos.datastore.datareaders.wkw.WKWArray import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrArray import com.scalableminds.webknossos.datastore.datareaders.zarr3.Zarr3Array import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.{DataFormat, DataLayer, DataSourceId, ElementClass} +import com.scalableminds.webknossos.datastore.models.datasource.{DataFormat, DataLayer, LegacyDataSourceId, ElementClass} import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import com.typesafe.scalalogging.LazyLogging @@ -22,7 +22,7 @@ import ucar.ma2.{Array => MultiArray} import scala.concurrent.ExecutionContext class DatasetArrayBucketProvider(dataLayer: DataLayer, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], sharedChunkContentsCacheOpt: Option[AlfuCache[String, MultiArray]]) extends BucketProvider diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala index ba9d7db10e7..5474e633844 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala @@ -14,7 +14,7 @@ trait N5Layer extends DataLayerWithMagLocators { val dataFormat: DataFormat.Value = DataFormat.n5 def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]) = new DatasetArrayBucketProvider(this, dataSourceId, remoteSourceDescriptorServiceOpt, sharedChunkContentsCache) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala index bb8e7ccda48..535b699f883 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala @@ -14,7 +14,7 @@ trait PrecomputedLayer extends DataLayerWithMagLocators { val dataFormat: DataFormat.Value = DataFormat.neuroglancerPrecomputed def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]) = new DatasetArrayBucketProvider(this, dataSourceId, remoteSourceDescriptorServiceOpt, sharedChunkContentsCache) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala index ee117e5f234..27b3ac37c2e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala @@ -20,7 +20,7 @@ trait WKWLayer extends DataLayer { val dataFormat: DataFormat.Value = DataFormat.wkw override def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider = new DatasetArrayBucketProvider(this, dataSourceId, remoteSourceDescriptorServiceOpt, sharedChunkContentsCache) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala index 54a343879d4..04c2af1baa9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala @@ -14,7 +14,7 @@ trait Zarr3Layer extends DataLayerWithMagLocators { val dataFormat: DataFormat.Value = DataFormat.zarr3 def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]) = new DatasetArrayBucketProvider(this, dataSourceId, remoteSourceDescriptorServiceOpt, sharedChunkContentsCache) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala index 17810f5efb3..0242b50d0eb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala @@ -12,7 +12,7 @@ import ucar.ma2.{Array => MultiArray} trait ZarrLayer extends DataLayerWithMagLocators { def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]) = new DatasetArrayBucketProvider(this, dataSourceId, remoteSourceDescriptorServiceOpt, sharedChunkContentsCache) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index b04fa57ee80..06234ca8e3a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox, option2Fox} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import net.liftweb.common.Box.tryo @@ -17,7 +17,7 @@ import scala.collection.immutable.NumericRange import scala.concurrent.ExecutionContext class DatasetArray(vaultPath: VaultPath, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layerName: String, header: DatasetHeader, // axisOrder and additionalAxes match those from “outer” metadata, and can directly be used to compute chunk indices. diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala index 27541d75dd1..c0401118cae 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala @@ -4,7 +4,7 @@ import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.util.cache.AlfuCache import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, ChunkReader, DatasetArray, DatasetHeader} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.typesafe.scalalogging.LazyLogging import com.scalableminds.util.tools.Fox.box2Fox @@ -16,7 +16,7 @@ import scala.concurrent.ExecutionContext object N5Array extends LazyLogging { def open(path: VaultPath, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layerName: String, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], @@ -40,7 +40,7 @@ object N5Array extends LazyLogging { } class N5Array(vaultPath: VaultPath, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layerName: String, header: DatasetHeader, axisOrder: AxisOrder, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala index f7cc98ef8d1..29cbe7b00f0 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.io.ZipIO import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, DatasetArray} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Box.tryo @@ -20,13 +20,13 @@ import ucar.ma2.{Array => MultiArray} object PrecomputedArray extends LazyLogging { def open( - magPath: VaultPath, - dataSourceId: DataSourceId, - layerName: String, - axisOrderOpt: Option[AxisOrder], - channelIndex: Option[Int], - additionalAxes: Option[Seq[AdditionalAxis]], - sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[PrecomputedArray] = + magPath: VaultPath, + dataSourceId: LegacyDataSourceId, + layerName: String, + axisOrderOpt: Option[AxisOrder], + channelIndex: Option[Int], + additionalAxes: Option[Seq[AdditionalAxis]], + sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[PrecomputedArray] = for { headerBytes <- (magPath.parent / PrecomputedHeader.FILENAME_INFO) .readBytes() ?~> s"Could not read header at ${PrecomputedHeader.FILENAME_INFO}" @@ -49,7 +49,7 @@ object PrecomputedArray extends LazyLogging { } class PrecomputedArray(vaultPath: VaultPath, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layerName: String, header: PrecomputedScaleHeader, axisOrder: AxisOrder, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala index c29e42d399f..3f874e31665 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala @@ -8,7 +8,7 @@ import com.scalableminds.util.tools.JsonHelper.bool2Box import com.scalableminds.webknossos.datastore.dataformats.wkw.{MortonEncoding, WKWDataFormatHelper, WKWHeader} import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, ChunkUtils, DatasetArray} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, LegacyDataSourceId} import net.liftweb.common.Box import net.liftweb.common.Box.tryo import ucar.ma2.{Array => MultiArray} @@ -19,7 +19,7 @@ import scala.concurrent.ExecutionContext object WKWArray extends WKWDataFormatHelper { def open(path: VaultPath, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layerName: String, sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[WKWArray] = for { @@ -38,7 +38,7 @@ object WKWArray extends WKWDataFormatHelper { } class WKWArray(vaultPath: VaultPath, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layerName: String, header: WKWHeader, axisOrder: AxisOrder, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala index dbc1b5af04d..c4b31765252 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.cache.AlfuCache import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, DatasetArray, DatasetHeader} import ucar.ma2.{Array => MultiArray} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Box.tryo @@ -15,7 +15,7 @@ import scala.concurrent.ExecutionContext object ZarrArray extends LazyLogging { def open(path: VaultPath, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layerName: String, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], @@ -42,7 +42,7 @@ object ZarrArray extends LazyLogging { } class ZarrArray(vaultPath: VaultPath, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layerName: String, header: DatasetHeader, axisOrder: AxisOrder, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala index 874dd1d38fd..6b95816192a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.tools.Fox.box2Fox import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, ChunkReader, ChunkUtils, DatasetArray} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, LegacyDataSourceId} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Box.tryo import ucar.ma2.{Array => MultiArray} @@ -16,7 +16,7 @@ import scala.concurrent.ExecutionContext object Zarr3Array extends LazyLogging { def open(path: VaultPath, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layerName: String, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], @@ -39,7 +39,7 @@ object Zarr3Array extends LazyLogging { } class Zarr3Array(vaultPath: VaultPath, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layerName: String, header: Zarr3ArrayHeader, axisOrder: AxisOrder, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala index 461360fe92d..55b584d5bb1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala @@ -7,7 +7,7 @@ import com.scalableminds.webknossos.datastore.datareaders.n5.N5Header import com.scalableminds.webknossos.datastore.models.datasource.{ DataLayerWithMagLocators, DataSource, - DataSourceId, + LegacyDataSourceId, DataSourceWithMagLocators, GenericDataSource } @@ -26,7 +26,7 @@ class ExploreLocalLayerService @Inject()(dataVaultService: DataVaultService) extends ExploreLayerUtils with FoxImplicits { - def exploreLocal(path: Path, dataSourceId: DataSourceId, layerDirectory: String = "")( + def exploreLocal(path: Path, dataSourceId: LegacyDataSourceId, layerDirectory: String = "")( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = for { _ <- Fox.successful(()) @@ -40,7 +40,7 @@ class ExploreLocalLayerService @Inject()(dataVaultService: DataVaultService) dataSource <- Fox.firstSuccess(explored) ?~> "Could not explore local data source" } yield dataSource - private def exploreLocalZarrArray(path: Path, dataSourceId: DataSourceId, layerDirectory: String)( + private def exploreLocalZarrArray(path: Path, dataSourceId: LegacyDataSourceId, layerDirectory: String)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = for { magDirectories <- tryo(Files.list(path.resolve(layerDirectory)).iterator().asScala.toList).toFox ?~> s"Could not resolve color directory as child of $path" @@ -57,28 +57,28 @@ class ExploreLocalLayerService @Inject()(dataVaultService: DataVaultService) dataSource = new DataSourceWithMagLocators(dataSourceId, relativeLayers, voxelSize) } yield dataSource - private def exploreLocalNgffArray(path: Path, dataSourceId: DataSourceId)( + private def exploreLocalNgffArray(path: Path, dataSourceId: LegacyDataSourceId)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = exploreLocalLayer( layers => layers.map(selectLastTwoDirectories), new NgffExplorer )(path, dataSourceId, "") - private def exploreLocalNeuroglancerPrecomputed(path: Path, dataSourceId: DataSourceId, layerDirectory: String)( + private def exploreLocalNeuroglancerPrecomputed(path: Path, dataSourceId: LegacyDataSourceId, layerDirectory: String)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = exploreLocalLayer( layers => layers.map(selectLastDirectory), new PrecomputedExplorer )(path, dataSourceId, layerDirectory) - private def exploreLocalN5Multiscales(path: Path, dataSourceId: DataSourceId, layerDirectory: String)( + private def exploreLocalN5Multiscales(path: Path, dataSourceId: LegacyDataSourceId, layerDirectory: String)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = exploreLocalLayer( layers => layers.map(selectLastDirectory), new N5MultiscalesExplorer )(path, dataSourceId, layerDirectory) - private def exploreLocalN5Array(path: Path, dataSourceId: DataSourceId)( + private def exploreLocalN5Array(path: Path, dataSourceId: LegacyDataSourceId)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = for { _ <- Fox.successful(()) @@ -111,7 +111,7 @@ class ExploreLocalLayerService @Inject()(dataVaultService: DataVaultService) private def exploreLocalLayer( makeLayersRelative: List[DataLayerWithMagLocators] => List[DataLayerWithMagLocators], - explorer: RemoteLayerExplorer)(path: Path, dataSourceId: DataSourceId, layerDirectory: String)( + explorer: RemoteLayerExplorer)(path: Path, dataSourceId: LegacyDataSourceId, layerDirectory: String)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = for { fullPath <- Fox.successful(path.resolve(layerDirectory)) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala index 964a889a5ce..1877b4f8bff 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala @@ -8,7 +8,7 @@ import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.datasource.{ DataLayer, DataLayerWithMagLocators, - DataSourceId, + LegacyDataSourceId, GenericDataSource } import com.scalableminds.webknossos.datastore.services.DSRemoteWebknossosClient @@ -66,7 +66,7 @@ class ExploreRemoteLayerService @Inject()(dataVaultService: DataVaultService, _ <- bool2Fox(layersWithVoxelSizes.nonEmpty) ?~> "Detected zero layers" (layers, voxelSize) <- adaptLayersAndVoxelSize(layersWithVoxelSizes, preferredVoxelSize) dataSource = GenericDataSource[DataLayer]( - DataSourceId("", ""), // Frontend will prompt user for a good name + LegacyDataSourceId("", ""), // Frontend will prompt user for a good name layers, voxelSize ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/UnfinishedUpload.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/UnfinishedUpload.scala index 8f8947b6a9c..c315bd6585d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/UnfinishedUpload.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/UnfinishedUpload.scala @@ -1,11 +1,11 @@ package com.scalableminds.webknossos.datastore.models import com.scalableminds.util.time.Instant -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId import play.api.libs.json.{Format, Json} case class UnfinishedUpload(uploadId: String, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, folderId: String, created: Instant, filePaths: Option[List[String]], diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala index 030c33ee148..be4ad0b1a83 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala @@ -198,7 +198,7 @@ trait DataLayer extends DataLayerLike { def lengthOfUnderlyingCubes(resolution: Vec3Int): Int def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider def bucketProviderCacheKey: String = this.name diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala index d43245b55ab..b42e449b76e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.models import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} +import com.scalableminds.util.requestparsing.{DatasetURIParser, ObjectId} import com.scalableminds.webknossos.datastore.models.datasource.DatasetViewConfiguration.DatasetViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource.inbox.GenericInboxDataSource import play.api.libs.json._ @@ -8,12 +9,35 @@ import play.api.libs.json._ package object datasource { // here team is not (yet) renamed to organization to avoid migrating all jsons - case class DataSourceId(name: String, team: String) { + case class LegacyDataSourceId(name: String, team: String) extends DatasetURIParser { override def toString: String = s"DataSourceId($team/$name)" } - object DataSourceId { - implicit val dataSourceIdFormat: Format[DataSourceId] = Json.format[DataSourceId] + object LegacyDataSourceId extends DatasetURIParser { + implicit val dataSourceIdFormat: Format[LegacyDataSourceId] = Json.format[LegacyDataSourceId] + + def fromDatasetNameAndIdAndOrganizationId(datasetNameAndId: String, organizationId: String): LegacyDataSourceId = { + val (maybeId, maybeDatasetName) = getDatasetIdOrNameFromURIPath(datasetNameAndId) + maybeId match { + case Some(validId) => LegacyDataSourceId(validId.toString, organizationId) + case None => LegacyDataSourceId(maybeDatasetName.getOrElse(datasetNameAndId), organizationId) + } + } + def fromDatasetIdOrNameAndOrganizationId(datasetIdOrName: String, organizationId: String): LegacyDataSourceId = { + val parsedId = ObjectId.fromStringSync(datasetIdOrName) + parsedId match { + case Some(validId) => LegacyDataSourceId(validId.toString, organizationId) + case None => LegacyDataSourceId(datasetIdOrName, organizationId) + } + } + } + + case class DatasetIdWithPath(id: ObjectId, path: String) { + override def toString: String = s"DatasetIdWithPath($id, $path)" + } + + object DatasetIdWithPath { + implicit val datasetIdWithPathFormat: Format[DatasetIdWithPath] = Json.format[DatasetIdWithPath] } object DatasetViewConfiguration { @@ -21,7 +45,7 @@ package object datasource { implicit val jsonFormat: Format[DatasetViewConfiguration] = Format.of[DatasetViewConfiguration] } - case class GenericDataSource[+T <: DataLayerLike](id: DataSourceId, + case class GenericDataSource[+T <: DataLayerLike](id: LegacyDataSourceId, dataLayers: List[T], scale: VoxelSize, defaultViewConfiguration: Option[DatasetViewConfiguration] = None) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala index 43d73851bb3..0b0425e6aa2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala @@ -8,7 +8,7 @@ package object inbox { trait GenericInboxDataSource[+T <: DataLayerLike] { - def id: DataSourceId + def id: LegacyDataSourceId def toUsable: Option[GenericDataSource[T]] @@ -35,7 +35,7 @@ package object inbox { } } - case class UnusableDataSource[+T <: DataLayerLike](id: DataSourceId, + case class UnusableDataSource[+T <: DataLayerLike](id: LegacyDataSourceId, status: String, scale: Option[VoxelSize] = None, existingDataSourceProperties: Option[JsValue] = None) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index 2e89f193607..57ca8c9e1de 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -4,7 +4,7 @@ import com.google.inject.Inject import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.enumeration.ExtendedEnumeration import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId import play.api.libs.json.{Json, OFormat} import play.api.mvc.Result import play.api.mvc.Results.Forbidden @@ -25,31 +25,31 @@ object AccessResourceType extends ExtendedEnumeration { case class UserAccessAnswer(granted: Boolean, msg: Option[String] = None) object UserAccessAnswer { implicit val jsonFormat: OFormat[UserAccessAnswer] = Json.format[UserAccessAnswer] } -case class UserAccessRequest(resourceId: DataSourceId, resourceType: AccessResourceType.Value, mode: AccessMode.Value) +case class UserAccessRequest(resourceId: LegacyDataSourceId, resourceType: AccessResourceType.Value, mode: AccessMode.Value) object UserAccessRequest { implicit val jsonFormat: OFormat[UserAccessRequest] = Json.format[UserAccessRequest] - def deleteDataSource(dataSourceId: DataSourceId): UserAccessRequest = + def deleteDataSource(dataSourceId: LegacyDataSourceId): UserAccessRequest = UserAccessRequest(dataSourceId, AccessResourceType.datasource, AccessMode.delete) def administrateDataSources: UserAccessRequest = - UserAccessRequest(DataSourceId("", ""), AccessResourceType.datasource, AccessMode.administrate) + UserAccessRequest(LegacyDataSourceId("", ""), AccessResourceType.datasource, AccessMode.administrate) def administrateDataSources(organizationId: String): UserAccessRequest = - UserAccessRequest(DataSourceId("", organizationId), AccessResourceType.datasource, AccessMode.administrate) - def readDataSources(dataSourceId: DataSourceId): UserAccessRequest = + UserAccessRequest(LegacyDataSourceId("", organizationId), AccessResourceType.datasource, AccessMode.administrate) + def readDataSources(dataSourceId: LegacyDataSourceId): UserAccessRequest = UserAccessRequest(dataSourceId, AccessResourceType.datasource, AccessMode.read) - def writeDataSource(dataSourceId: DataSourceId): UserAccessRequest = + def writeDataSource(dataSourceId: LegacyDataSourceId): UserAccessRequest = UserAccessRequest(dataSourceId, AccessResourceType.datasource, AccessMode.write) def readTracing(tracingId: String): UserAccessRequest = - UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.read) + UserAccessRequest(LegacyDataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.read) def writeTracing(tracingId: String): UserAccessRequest = - UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.write) + UserAccessRequest(LegacyDataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.write) def downloadJobExport(jobId: String): UserAccessRequest = - UserAccessRequest(DataSourceId(jobId, ""), AccessResourceType.jobExport, AccessMode.read) + UserAccessRequest(LegacyDataSourceId(jobId, ""), AccessResourceType.jobExport, AccessMode.read) def webknossos: UserAccessRequest = - UserAccessRequest(DataSourceId("webknossos", ""), AccessResourceType.webknossos, AccessMode.administrate) + UserAccessRequest(LegacyDataSourceId("webknossos", ""), AccessResourceType.webknossos, AccessMode.administrate) } trait AccessTokenService { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index f82f1a9233b..7abf60a1974 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.tools.ExtendedTypes.ExtendedArraySeq import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.helpers.DatasetDeleter import com.scalableminds.webknossos.datastore.models.BucketPosition -import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataLayer, DataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataLayer, LegacyDataSourceId} import com.scalableminds.webknossos.datastore.models.requests.{DataReadInstruction, DataServiceDataRequest} import com.scalableminds.webknossos.datastore.storage._ import com.typesafe.scalalogging.LazyLogging @@ -87,7 +87,7 @@ class BinaryDataService(val dataBaseDir: Path, val readInstruction = DataReadInstruction(dataBaseDir, request.dataSource, request.dataLayer, bucket, request.settings.version) // dataSource is null and unused for volume tracings. Insert dummy DataSourceId (also unused in that case) - val dataSourceId = if (request.dataSource != null) request.dataSource.id else DataSourceId("", "") + val dataSourceId = if (request.dataSource != null) request.dataSource.id else LegacyDataSourceId("", "") val bucketProvider = bucketProviderCache.getOrLoadAndPut((dataSourceId, request.dataLayer.bucketProviderCacheKey))(_ => request.dataLayer.bucketProvider(remoteSourceDescriptorServiceOpt, dataSourceId, sharedChunkContentsCache)) @@ -175,14 +175,14 @@ class BinaryDataService(val dataBaseDir: Path, } def clearCache(organizationId: String, datasetName: String, layerName: Option[String]): (Int, Int, Int) = { - val dataSourceId = DataSourceId(datasetName, organizationId) + val dataSourceId = LegacyDataSourceId(datasetName, organizationId) def agglomerateFileMatchPredicate(agglomerateKey: AgglomerateFileKey) = agglomerateKey.datasetName == datasetName && agglomerateKey.organizationId == organizationId && layerName.forall( _ == agglomerateKey.layerName) - def bucketProviderPredicate(key: (DataSourceId, String)): Boolean = - key._1 == DataSourceId(datasetName, organizationId) && layerName.forall(_ == key._2) + def bucketProviderPredicate(key: (LegacyDataSourceId, String)): Boolean = + key._1 == LegacyDataSourceId(datasetName, organizationId) && layerName.forall(_ == key._2) val closedAgglomerateFileHandleCount = agglomerateServiceOpt.map(_.agglomerateFileCache.clear(agglomerateFileMatchPredicate)).getOrElse(0) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index 0703f638076..b599bbe15c6 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -4,13 +4,14 @@ import org.apache.pekko.actor.ActorSystem import com.google.inject.Inject import com.google.inject.name.Named import com.scalableminds.util.cache.AlfuCache +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.helpers.IntervalScheduler import com.scalableminds.webknossos.datastore.models.UnfinishedUpload import com.scalableminds.webknossos.datastore.models.annotation.AnnotationSource -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.{DatasetIdWithPath, LegacyDataSourceId} import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSourceLike import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.uploading.ReserveUploadInformation @@ -66,7 +67,7 @@ class DSRemoteWebknossosClient @Inject()( def reportDataSource(dataSource: InboxDataSourceLike): Fox[_] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/datasource") .addQueryString("key" -> dataStoreKey) - .put(dataSource) + .put(dataSource) // TODO: def getUnfinishedUploadsForUser(userTokenOpt: Option[String], organizationName: String): Fox[List[UnfinishedUpload]] = for { @@ -78,7 +79,7 @@ class DSRemoteWebknossosClient @Inject()( .getWithJsonResponse[List[UnfinishedUpload]] } yield unfinishedUploads - def reportUpload(dataSourceId: DataSourceId, + def reportUpload(dataSourceId: LegacyDataSourceId, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean, @@ -109,7 +110,7 @@ class DSRemoteWebknossosClient @Inject()( .post(info) } yield () - def deleteDataSource(id: DataSourceId): Fox[_] = + def deleteDataSource(id: LegacyDataSourceId): Fox[_] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/deleteDataset").addQueryString("key" -> dataStoreKey).post(id) def getJobExportProperties(jobId: String): Fox[JobExportProperties] = @@ -164,4 +165,12 @@ class DSRemoteWebknossosClient @Inject()( .silent .getWithJsonResponse[DataVaultCredential] ) + + def resolveDatasetNameToId(organizationId: String, datasetName: String): Fox[DatasetIdWithPath] = + for { + datasetIdWithPath <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/:$organizationId/:$datasetName/getDatasetId") + .addQueryString("key" -> dataStoreKey) + .silent + .getWithJsonResponse[DatasetIdWithPath] + } yield datasetIdWithPath } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala index 645af371134..412cb67fa1d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala @@ -4,7 +4,7 @@ import org.apache.pekko.actor.ActorSystem import com.google.inject.Inject import com.google.inject.name.Named import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DatasetIdWithPath, LegacyDataSourceId} import com.scalableminds.webknossos.datastore.storage.TemporaryStore import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging @@ -16,24 +16,24 @@ class DataSourceRepository @Inject()( remoteWebknossosClient: DSRemoteWebknossosClient, @Named("webknossos-datastore") val system: ActorSystem )(implicit ec: ExecutionContext) - extends TemporaryStore[DataSourceId, InboxDataSource](system) + extends TemporaryStore[DatasetIdWithPath, InboxDataSource](system) with LazyLogging with FoxImplicits { - def getDataSourceAndDataLayer(organizationId: String, datasetName: String, dataLayerName: String)( + def getDataSourceAndDataLayer(datasetId: DatasetIdWithPath, dataLayerName: String)( implicit m: MessagesProvider): Fox[(DataSource, DataLayer)] = for { - dataSource <- findUsable(DataSourceId(datasetName, organizationId)).toFox ?~> Messages("dataSource.notFound") + dataSource <- findUsable(datasetId).toFox ?~> Messages("dataSource.notFound") dataLayer <- dataSource.getDataLayer(dataLayerName) ?~> Messages("dataLayer.notFound", dataLayerName) } yield (dataSource, dataLayer) - def findUsable(id: DataSourceId): Option[DataSource] = - find(id).flatMap(_.toUsable) + def findUsable(datasetId: DatasetIdWithPath): Option[DataSource] = + find(datasetId).flatMap(_.toUsable) - def updateDataSource(dataSource: InboxDataSource): Fox[Unit] = + def updateDataSource(datasetId: DatasetIdWithPath, dataSource: InboxDataSource): Fox[Unit] = for { _ <- Fox.successful(()) - _ = insert(dataSource.id, dataSource) + _ = insert(datasetId, dataSource) _ <- remoteWebknossosClient.reportDataSource(dataSource) } yield () @@ -45,7 +45,7 @@ class DataSourceRepository @Inject()( _ <- remoteWebknossosClient.reportDataSources(dataSources) } yield () - def cleanUpDataSource(dataSourceId: DataSourceId): Fox[Unit] = + def cleanUpDataSource(dataSourceId: LegacyDataSourceId): Fox[Unit] = for { _ <- Fox.successful(remove(dataSourceId)) _ <- remoteWebknossosClient.deleteDataSource(dataSourceId) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala index defc8350811..168f076b229 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala @@ -209,7 +209,7 @@ class DataSourceService @Inject()( } def dataSourceFromDir(path: Path, organizationId: String): InboxDataSource = { - val id = DataSourceId(path.getFileName.toString, organizationId) + val id = LegacyDataSourceId(path.getFileName.toString, organizationId) val propertiesFile = path.resolve(propertiesFileName) if (new File(propertiesFile.toString).exists()) { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala new file mode 100644 index 00000000000..2133acb94a4 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala @@ -0,0 +1,40 @@ +package com.scalableminds.webknossos.datastore.services + +import com.google.inject.Inject +import com.google.inject.name.Named +import com.scalableminds.util.requestparsing.{DatasetURIParser, ObjectId} +import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, LegacyDataSourceId} +import com.scalableminds.webknossos.datastore.storage.TemporaryStore +import com.typesafe.scalalogging.LazyLogging +import org.apache.pekko.actor.ActorSystem +import play.api.i18n.{Messages, MessagesProvider} + +import scala.concurrent.ExecutionContext + +/* This class is used to resolve legacy dataset addressing by caching a mapping from the datasource id + based on what is given by the URI path to a DatasetIdWithPath that contains the actual id of the dataset . */ +class DatasetIdRepository @Inject()( + remoteWebknossosClient: DSRemoteWebknossosClient, + @Named("webknossos-datastore") val system: ActorSystem +)(implicit ec: ExecutionContext) + extends TemporaryStore[LegacyDataSourceId, ObjectId](system) + with LazyLogging + with FoxImplicits + with DatasetURIParser { + + def getDatasetIdFromIdOrName(datasetIdOrName: String, organizationId: String): Fox[ObjectId] = { + val dataSourceId = LegacyDataSourceId(datasetIdOrName, organizationId) + find(dataSourceId) match { + case Some(datasetId) => Fox.successful(datasetId) + case None => + val (maybeId, _) = getDatasetIdOrNameFromURIPath(datasetIdOrName) + val resolvedId = maybeId match { + case Some(id) => Fox.successful(id) + case None => remoteWebknossosClient.resolveDatasetNameToId(organizationId, datasetIdOrName) + } + resolvedId.map(insert(dataSourceId, _)).flatMap(_ => resolvedId) + } + } +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala index 26d16943db4..87498a3ff87 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala @@ -89,7 +89,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, private def getLayerFromComposeLayer(composeLayer: ComposeRequestLayer, uploadDir: Path): Fox[DataLayer] = for { dataSourceId <- Fox.successful( - DataSourceId(composeLayer.datasetId.name, composeLayer.datasetId.owningOrganization)) + LegacyDataSourceId(composeLayer.datasetId.name, composeLayer.datasetId.owningOrganization)) dataSource <- Fox.option2Fox(dataSourceRepository.find(dataSourceId)) ds <- Fox.option2Fox(dataSource.toUsable) layer <- Fox.option2Fox(ds.dataLayers.find(_.name == composeLayer.sourceName)) @@ -144,7 +144,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, for { layers <- Fox.serialCombined(composeRequest.layers.toList)(getLayerFromComposeLayer(_, uploadDir)) dataSource = GenericDataSource( - DataSourceId(composeRequest.newDatasetName, organizationId), + LegacyDataSourceId(composeRequest.newDatasetName, organizationId), layers, composeRequest.voxelSize, None @@ -153,7 +153,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, } yield dataSource } - private def isLayerRemote(dataSourceId: DataSourceId, layerName: String) = { + private def isLayerRemote(dataSourceId: LegacyDataSourceId, layerName: String) = { val layerPath = dataBaseDir.resolve(dataSourceId.team).resolve(dataSourceId.name).resolve(layerName) !Files.exists(layerPath) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index 8949a462d1f..196b8de6a46 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -120,7 +120,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, s"upload___${uploadId}___file___${fileName}___chunkCount" private def redisKeyForFileChunkSet(uploadId: String, fileName: String): String = s"upload___${uploadId}___file___${fileName}___chunkSet" - private def redisKeyForUploadId(datasourceId: DataSourceId): String = + private def redisKeyForUploadId(datasourceId: LegacyDataSourceId): String = s"upload___${Json.stringify(Json.toJson(datasourceId))}___datasourceId" private def redisKeyForFilePaths(uploadId: String): String = s"upload___${uploadId}___filePaths" @@ -139,8 +139,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, private def uploadDirectory(organizationId: String, uploadId: String): Path = dataBaseDir.resolve(organizationId).resolve(uploadingDir).resolve(uploadId) - def getDataSourceIdByUploadId(uploadId: String): Fox[DataSourceId] = - getObjectFromRedis[DataSourceId](redisKeyForDataSourceId(uploadId)) + def getDataSourceIdByUploadId(uploadId: String): Fox[LegacyDataSourceId] = + getObjectFromRedis[LegacyDataSourceId](redisKeyForDataSourceId(uploadId)) def reserveUpload(reserveUploadInformation: ReserveUploadInformation): Fox[Unit] = for { @@ -149,10 +149,10 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, String.valueOf(reserveUploadInformation.totalFileCount)) _ <- runningUploadMetadataStore.insert( redisKeyForDataSourceId(reserveUploadInformation.uploadId), - Json.stringify(Json.toJson(DataSourceId(reserveUploadInformation.name, reserveUploadInformation.organization))) + Json.stringify(Json.toJson(LegacyDataSourceId(reserveUploadInformation.name, reserveUploadInformation.organization))) ) _ <- runningUploadMetadataStore.insert( - redisKeyForUploadId(DataSourceId(reserveUploadInformation.name, reserveUploadInformation.organization)), + redisKeyForUploadId(LegacyDataSourceId(reserveUploadInformation.name, reserveUploadInformation.organization)), reserveUploadInformation.uploadId ) filePaths = Json.stringify(Json.toJson(reserveUploadInformation.filePaths.getOrElse(List.empty))) @@ -269,7 +269,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, } } - def finishUpload(uploadInformation: UploadInformation, checkCompletion: Boolean = true): Fox[(DataSourceId, Long)] = { + def finishUpload(uploadInformation: UploadInformation, checkCompletion: Boolean = true): Fox[(LegacyDataSourceId, Long)] = { val uploadId = uploadInformation.uploadId for { @@ -304,7 +304,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, private def postProcessUploadedDataSource(datasetNeedsConversion: Boolean, unpackToDir: Path, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layersToLink: Option[List[LinkedLayerIdentifier]]): Fox[Unit] = if (datasetNeedsConversion) Fox.successful(()) @@ -328,7 +328,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, } private def exploreLocalDatasource(path: Path, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, typ: UploadedDataSourceType.Value): Fox[Unit] = for { _ <- Fox.runIf(typ == UploadedDataSourceType.ZARR)( @@ -338,7 +338,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, } yield () private def tryExploringMultipleLayers(path: Path, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, typ: UploadedDataSourceType.Value): Fox[Option[Path]] = for { layerDirs <- typ match { @@ -363,7 +363,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, } yield path private def cleanUpOnFailure[T](result: Box[T], - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, datasetNeedsConversion: Boolean, label: String): Fox[Unit] = result match { @@ -399,7 +399,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, _ <- bool2Fox(list.forall(identity)) } yield () - private def dataSourceDirFor(dataSourceId: DataSourceId, datasetNeedsConversion: Boolean): Path = { + private def dataSourceDirFor(dataSourceId: LegacyDataSourceId, datasetNeedsConversion: Boolean): Path = { val dataSourceDir = if (datasetNeedsConversion) dataBaseDir.resolve(dataSourceId.team).resolve(forConversionDir).resolve(dataSourceId.name) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/BucketProviderCache.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/BucketProviderCache.scala index 82b5f053eaa..c12d4f8c641 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/BucketProviderCache.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/BucketProviderCache.scala @@ -2,6 +2,6 @@ package com.scalableminds.webknossos.datastore.storage import com.scalableminds.util.cache.LRUConcurrentCache import com.scalableminds.webknossos.datastore.dataformats.BucketProvider -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId -class BucketProviderCache(val maxEntries: Int) extends LRUConcurrentCache[(DataSourceId, String), BucketProvider] +class BucketProviderCache(val maxEntries: Int) extends LRUConcurrentCache[(LegacyDataSourceId, String), BucketProvider] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala index dc9a98f4b6f..4f2630f8965 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.tools.Fox.box2Fox import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId import com.scalableminds.webknossos.datastore.services.DSRemoteWebknossosClient import net.liftweb.common.Box import net.liftweb.common.Box.tryo @@ -21,14 +21,14 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote dataStoreConfig: DataStoreConfig, dataVaultService: DataVaultService) { - def vaultPathFor(baseDir: Path, datasetId: DataSourceId, layerName: String, magLocator: MagLocator)( + def vaultPathFor(baseDir: Path, datasetId: LegacyDataSourceId, layerName: String, magLocator: MagLocator)( implicit ec: ExecutionContext): Fox[VaultPath] = for { remoteSourceDescriptor <- remoteSourceDescriptorFor(baseDir, datasetId, layerName, magLocator) vaultPath <- dataVaultService.getVaultPath(remoteSourceDescriptor) } yield vaultPath - def removeVaultFromCache(baseDir: Path, datasetId: DataSourceId, layerName: String, magLocator: MagLocator)( + def removeVaultFromCache(baseDir: Path, datasetId: LegacyDataSourceId, layerName: String, magLocator: MagLocator)( implicit ec: ExecutionContext): Fox[Unit] = for { remoteSource <- remoteSourceDescriptorFor(baseDir, datasetId, layerName, magLocator) @@ -36,10 +36,10 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote } yield () private def remoteSourceDescriptorFor( - baseDir: Path, - datasetId: DataSourceId, - layerName: String, - magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[RemoteSourceDescriptor] = + baseDir: Path, + datasetId: LegacyDataSourceId, + layerName: String, + magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[RemoteSourceDescriptor] = for { credentialBox <- credentialFor(magLocator: MagLocator).futureBox uri <- uriForMagLocator(baseDir, datasetId, layerName, magLocator).toFox @@ -47,7 +47,7 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote } yield remoteSource private def uriForMagLocator(baseDir: Path, - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, layerName: String, magLocator: MagLocator): Box[URI] = tryo { val localDatasetDir = baseDir.resolve(dataSourceId.team).resolve(dataSourceId.name) diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index a4dca523cde..996d563d6f3 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -5,10 +5,10 @@ GET /health @com.scalableminds.webknossos.datastore.controllers.Application.health # Read image data -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetIdOrName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(token: Option[String], organizationId: String, datasetIdOrName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 5d07e4363ad..d7cb1de801c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -4,7 +4,7 @@ import com.google.inject.Inject import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, DataSourceLike} +import com.scalableminds.webknossos.datastore.models.datasource.{LegacyDataSourceId, DataSourceLike} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{ AccessTokenService, @@ -41,7 +41,7 @@ class TSRemoteWebknossosClient @Inject()( private val webknossosUri: String = config.Tracingstore.WebKnossos.uri - private lazy val dataSourceIdByTracingIdCache: AlfuCache[String, DataSourceId] = AlfuCache() + private lazy val dataSourceIdByTracingIdCache: AlfuCache[String, LegacyDataSourceId] = AlfuCache() def reportTracingUpdates(tracingUpdatesReport: TracingUpdatesReport): Fox[WSResponse] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/handleTracingUpdateReport") @@ -62,14 +62,14 @@ class TSRemoteWebknossosClient @Inject()( .silent .getWithJsonResponse[String] - def getDataSourceIdForTracing(tracingId: String)(implicit ec: ExecutionContext): Fox[DataSourceId] = + def getDataSourceIdForTracing(tracingId: String)(implicit ec: ExecutionContext): Fox[LegacyDataSourceId] = dataSourceIdByTracingIdCache.getOrLoad( tracingId, tracingId => rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataSourceId") .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) - .getWithJsonResponse[DataSourceId] + .getWithJsonResponse[LegacyDataSourceId] ) override def requestUserAccess(token: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala index e95880ae974..e6f9464ee81 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala @@ -7,7 +7,7 @@ import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.WebknossosDataRequest -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, LegacyDataSourceId} import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.FallbackDataKey import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} @@ -19,7 +19,7 @@ case class RemoteFallbackLayer(organizationId: String, elementClass: ElementClassProto) object RemoteFallbackLayer extends ProtoGeometryImplicits { - def fromDataLayerAndDataSource(dataLayer: DataLayerLike, dataSource: DataSourceId): RemoteFallbackLayer = + def fromDataLayerAndDataSource(dataLayer: DataLayerLike, dataSource: LegacyDataSourceId): RemoteFallbackLayer = RemoteFallbackLayer(dataSource.team, dataSource.name, dataLayer.name, dataLayer.elementClass) } trait FallbackDataHelper { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index 47965cee726..fa4cfbb83a5 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -14,7 +14,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ CoordinateTransformation, DataFormat, DataLayer, - DataSourceId, + LegacyDataSourceId, ElementClass, SegmentationLayer } @@ -86,7 +86,7 @@ case class EditableMappingLayer(name: String, override def lengthOfUnderlyingCubes(resolution: Vec3Int): Int = DataLayer.bucketLength override def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider = new EditableMappingBucketProvider(layer = this) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index f8d620e1405..cf15645b63f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -108,7 +108,7 @@ case class VolumeTracingLayer( new VolumeTracingBucketProvider(this) override def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: DataSourceId, + dataSourceId: LegacyDataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider = volumeBucketProvider diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala index f2baa1f9c80..926b3f3afa1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala @@ -17,7 +17,7 @@ import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.datasource.{ AdditionalAxis, DataLayer, - DataSourceId, + LegacyDataSourceId, GenericDataSource } import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, BucketPosition, VoxelSize} @@ -72,7 +72,7 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLay MagLocator(mag = vec3IntToProto(mag), axisOrder = Some(AxisOrder.cAdditionalxyz(rank))) } GenericDataSource( - id = DataSourceId("", ""), + id = LegacyDataSourceId("", ""), dataLayers = List( Zarr3SegmentationLayer( defaultLayerName, From 5d3f8bda3d258e675636ae7286b3958be33f51b0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 18 Sep 2024 17:07:20 +0200 Subject: [PATCH 004/129] WIP: finish using dataset id in wk core backend and dataspath in datastore & add legacy routes - Undo renaming DataSourceId to LegacyDataSourceId --- app/controllers/AiModelController.scala | 3 +- app/controllers/AnnotationIOController.scala | 2 +- .../AuthenticationController.scala | 17 +- app/controllers/DatasetController.scala | 8 +- app/controllers/LegacyApiController.scala | 42 +++- app/controllers/UserTokenController.scala | 18 +- .../WKRemoteDataStoreController.scala | 33 ++-- .../WKRemoteTracingStoreController.scala | 6 +- .../WKRemoteWorkerController.scala | 2 +- app/models/annotation/AnnotationService.scala | 10 +- app/models/dataset/Dataset.scala | 14 +- app/models/dataset/DatasetService.scala | 60 +++--- app/models/job/Job.scala | 10 +- app/models/job/JobService.scala | 2 +- app/models/task/TaskCreationService.scala | 6 +- app/opengraph/OpenGraphService.scala | 2 +- conf/webknossos.latest.routes | 3 +- conf/webknossos.versioned.routes | 36 +++- .../admin/dataset/dataset_upload_view.tsx | 1 + tools/postgres/schema.sql | 2 +- .../controllers/BinaryDataController.scala | 130 ++++++------- .../controllers/DSMeshController.scala | 30 +-- .../controllers/DataSourceController.scala | 182 +++++++++--------- .../controllers/ZarrStreamingController.scala | 102 +++++----- .../DatasetArrayBucketProvider.scala | 4 +- .../dataformats/MappingProvider.scala | 4 +- .../dataformats/layers/N5DataLayers.scala | 2 +- .../layers/PrecomputedDataLayers.scala | 2 +- .../dataformats/layers/WKWDataLayers.scala | 2 +- .../dataformats/layers/Zarr3DataLayers.scala | 2 +- .../dataformats/layers/ZarrDataLayers.scala | 2 +- .../datastore/datareaders/DatasetArray.scala | 4 +- .../datastore/datareaders/n5/N5Array.scala | 6 +- .../precomputed/PrecomputedArray.scala | 6 +- .../datastore/datareaders/wkw/WKWArray.scala | 6 +- .../datareaders/zarr/ZarrArray.scala | 6 +- .../datareaders/zarr3/Zarr3Array.scala | 6 +- .../explore/ExploreLocalLayerService.scala | 16 +- .../explore/ExploreRemoteLayerService.scala | 4 +- .../datastore/models/UnfinishedUpload.scala | 4 +- .../models/annotation/AnnotationSource.scala | 2 +- .../models/datasource/DataLayer.scala | 2 +- .../models/datasource/DataSource.scala | 38 ++-- .../models/datasource/InboxDataSource.scala | 4 +- .../models/requests/DataServiceRequests.scala | 2 +- .../webknossos/datastore/rpc/RPCRequest.scala | 7 + .../services/AccessTokenService.scala | 22 +-- .../services/AgglomerateService.scala | 6 +- .../services/BinaryDataService.scala | 16 +- .../services/ConnectomeFileService.scala | 8 +- .../services/DSFullMeshService.scala | 20 +- .../services/DSRemoteWebknossosClient.scala | 20 +- .../services/DataSourceRepository.scala | 18 +- .../services/DataSourceService.scala | 10 +- .../services/DatasetIdRepository.scala | 6 +- .../datastore/services/MeshFileService.scala | 18 +- .../services/MeshMappingHelper.scala | 6 +- .../services/SegmentIndexFileService.scala | 48 ++--- .../services/uploading/ComposeService.scala | 8 +- .../services/uploading/UploadService.scala | 70 +++---- .../storage/AgglomerateFileCache.scala | 8 +- .../storage/BucketProviderCache.scala | 4 +- .../storage/ParsedMappingCache.scala | 4 +- .../RemoteSourceDescriptorService.scala | 12 +- ....scalableminds.webknossos.datastore.routes | 136 ++++++------- .../TSRemoteDatastoreClient.scala | 12 +- .../TSRemoteWebknossosClient.scala | 12 +- .../tracings/RemoteFallbackLayer.scala | 10 +- .../EditableMappingLayer.scala | 4 +- .../EditableMappingService.scala | 3 +- .../tracings/volume/VolumeTracingLayer.scala | 2 +- .../volume/Zarr3BucketStreamSink.scala | 4 +- 72 files changed, 709 insertions(+), 630 deletions(-) diff --git a/app/controllers/AiModelController.scala b/app/controllers/AiModelController.scala index 6500cc380bf..c22be24e686 100644 --- a/app/controllers/AiModelController.scala +++ b/app/controllers/AiModelController.scala @@ -163,11 +163,10 @@ class AiModelController @Inject()( for { _ <- userService.assertIsSuperUser(request.identity) organization <- organizationDAO.findOne(request.identity._organization) - dataset <- datasetDAO.findOneByNameAndOrganization(request.body.datasetName, organization._id) + dataset <- datasetDAO.findOneByPathAndOrganization(request.body.datasetName, organization._id) dataStore <- dataStoreDAO.findOneByName(dataset._dataStore) ?~> "dataStore.notFound" _ <- aiModelDAO.findOne(request.body.aiModelId) ?~> "aiModel.notFound" _ <- datasetService.assertValidDatasetName(request.body.newDatasetName) - _ <- datasetService.assertNewDatasetName(request.body.newDatasetName, organization._id) jobCommand = JobCommand.infer_with_model boundingBox <- BoundingBox.fromLiteral(request.body.boundingBox).toFox commandArgs = Json.obj( diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index a3e2bdf490c..46936f0d116 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -235,7 +235,7 @@ class AnnotationIOController @Inject()( organizationId <- Fox.fillOption(organizationIdOpt) { datasetDAO.getOrganizationIdForDataset(datasetName)(GlobalAccessContext) } ?~> Messages("dataset.noAccess", datasetName) ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> (if (wkUrl.nonEmpty && conf.Http.uri != wkUrl) { + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) ?~> (if (wkUrl.nonEmpty && conf.Http.uri != wkUrl) { Messages( "dataset.noAccess.wrongHost", datasetName, diff --git a/app/controllers/AuthenticationController.scala b/app/controllers/AuthenticationController.scala index f6609755e09..7288867d06d 100755 --- a/app/controllers/AuthenticationController.scala +++ b/app/controllers/AuthenticationController.scala @@ -8,6 +8,7 @@ import play.silhouette.api.util.{Credentials, PasswordInfo} import play.silhouette.api.{LoginInfo, Silhouette} import play.silhouette.impl.providers.CredentialsProvider import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} import mail.{DefaultMails, MailchimpClient, MailchimpTag, Send} import models.analytics.{AnalyticsService, InviteEvent, JoinOrganizationEvent, SignupEvent} @@ -26,16 +27,8 @@ import play.api.data.validation.Constraints._ import play.api.i18n.Messages import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, Cookie, PlayBodyParsers, Request, Result} -import security.{ - CombinedAuthenticator, - OpenIdConnectClient, - OpenIdConnectUserInfo, - PasswordHasher, - TokenType, - WkEnv, - WkSilhouetteEnvironment -} -import utils.{ObjectId, WkConf} +import security.{CombinedAuthenticator, OpenIdConnectClient, OpenIdConnectUserInfo, PasswordHasher, TokenType, WkEnv, WkSilhouetteEnvironment} +import utils.WkConf import java.net.URLEncoder import java.nio.charset.StandardCharsets @@ -261,7 +254,7 @@ class AuthenticationController @Inject()( case (Some(organizationId), Some(datasetName), None, None) => for { organization <- organizationDAO.findOne(organizationId) - _ <- datasetDAO.findOneByNameAndOrganization(datasetName, organization._id) + _ <- datasetDAO.findOneByPathAndOrganization(datasetName, organization._id) } yield organization case (None, None, Some(annotationId), None) => for { @@ -314,7 +307,7 @@ class AuthenticationController @Inject()( } private def canAccessDataset(ctx: DBAccessContext, organizationId: String, datasetName: String): Fox[Boolean] = { - val foundFox = datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(ctx) + val foundFox = datasetDAO.findOneByPathAndOrganization(datasetName, organizationId)(ctx) foundFox.futureBox.map(_.isDefined) } diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index 0625a49e1c8..cf43c4db670 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -306,7 +306,7 @@ class DatasetController @Inject()(userService: UserService, def resolveDatasetNameToId(organizationId: String, datasetName: String): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND } yield Ok(Json.obj("datasetId" -> dataset._id)) } @@ -325,12 +325,12 @@ class DatasetController @Inject()(userService: UserService, } // Note that there exists also updatePartial (which will only expect the changed fields) - def update(datasetNameAndId: String): Action[JsValue] = + def update(datasetId: String): Action[JsValue] = sil.SecuredAction.async(parse.json) { implicit request => withJsonBodyUsing(datasetPublicReads) { case (description, displayName, sortingKey, isPublic, tags, metadata, folderId) => for { - parsedDatasetId <- ObjectId.fromString(datasetNameAndId) ?~> "Invalid dataset id" ~> NOT_FOUND + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND dataset <- datasetDAO.findOne(parsedDatasetId) ?~> notFoundMessage(parsedDatasetId.toString) ~> NOT_FOUND maybeUpdatedMetadata = metadata.getOrElse(dataset.metadata) _ <- assertNoDuplicateMetadataKeys(maybeUpdatedMetadata) @@ -416,7 +416,7 @@ class DatasetController @Inject()(userService: UserService, def getDatasetIdFromNameAndOrganization(datasetName: String, organizationId: String): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND } yield Ok( Json.obj("id" -> dataset._id, diff --git a/app/controllers/LegacyApiController.scala b/app/controllers/LegacyApiController.scala index f38876a4741..2c04f37e160 100644 --- a/app/controllers/LegacyApiController.scala +++ b/app/controllers/LegacyApiController.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType} import com.scalableminds.webknossos.tracingstore.tracings.volume.ResolutionRestrictions -import models.dataset.DatasetService +import models.dataset.{DatasetDAO, DatasetService} import models.organization.OrganizationDAO import javax.inject.Inject @@ -38,6 +38,7 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, organizationDAO: OrganizationDAO, datasetService: DatasetService, taskDAO: TaskDAO, + datasetDAO: DatasetDAO, taskService: TaskService, sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller { @@ -92,7 +93,8 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, def readDatasetV6(organizationName: String, datasetName: String, sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { - result <- datasetController.read(organizationName, datasetName, sharingToken)(request) + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationName) + result <- datasetController.read(dataset._id.toString, sharingToken)(request) adaptedResult <- replaceInResult(replaceVoxelSize)(result) } yield adaptedResult } @@ -103,10 +105,44 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, organization <- organizationDAO.findOne(organizationName) // the old organizationName is now the organization id _ <- bool2Fox(organization._id == request.identity._organization) ~> FORBIDDEN _ <- datasetService.assertValidDatasetName(datasetName) - _ <- datasetService.assertNewDatasetName(datasetName, organization._id) ?~> "dataset.name.alreadyTaken" } yield Ok } + def updateDatasetV8(organizationId: String, datasetName: String): Action[JsValue] = + sil.SecuredAction.async(parse.json) { implicit request => + for { + _ <- Fox.successful(logVersioned(request)) + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) + result <- datasetController.update(dataset._id.toString)(request) + } yield result + } + + def getDatasetSharingTokenV8(organizationId: String, datasetName: String): Action[AnyContent] = + sil.SecuredAction.async { implicit request => + for { + _ <- Fox.successful(logVersioned(request)) + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) + sharingToken <- datasetController.getSharingToken(dataset._id.toString)(request) + } yield sharingToken + } + + def updateDatasetTeamsV8(organizationId: String, datasetName: String): Action[List[ObjectId]] = + sil.SecuredAction.async(validateJson[List[ObjectId]]) { implicit request => + for { + _ <- Fox.successful(logVersioned(request)) + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) + result <- datasetController.updateTeams(dataset._id.toString)(request) + } yield result + } + + def readDatasetV8(organizationId: String, datasetName: String, sharingToken: Option[String]): Action[AnyContent] = + sil.UserAwareAction.async { implicit request => + for { + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) + result <- datasetController.read(dataset._id.toString, sharingToken)(request) + } yield result + } + /* to provide v4 - replace new annotation layers by old tracing ids (changed in v5) */ diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index b7548d1c0bd..5d5628a32a8 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -3,7 +3,7 @@ package controllers import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.services.AccessMode.AccessMode import com.scalableminds.webknossos.datastore.services.{ AccessMode, @@ -97,23 +97,23 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, case AccessResourceType.datasource => handleDataSourceAccess(accessRequest.resourceId, accessRequest.mode, userBox)(sharingTokenAccessCtx) case AccessResourceType.tracing => - handleTracingAccess(accessRequest.resourceId.name, accessRequest.mode, userBox, token) + handleTracingAccess(accessRequest.resourceId.path, accessRequest.mode, userBox, token) case AccessResourceType.jobExport => - handleJobExportAccess(accessRequest.resourceId.name, accessRequest.mode, userBox) + handleJobExportAccess(accessRequest.resourceId.path, accessRequest.mode, userBox) case _ => Fox.successful(UserAccessAnswer(granted = false, Some("Invalid access token."))) } } yield Ok(Json.toJson(answer)) } - private def handleDataSourceAccess(dataSourceId: LegacyDataSourceId, mode: AccessMode, userBox: Box[User])( + private def handleDataSourceAccess(dataSourceId: DataSourceId, mode: AccessMode, userBox: Box[User])( implicit ctx: DBAccessContext): Fox[UserAccessAnswer] = { // Write access is explicitly handled here depending on userBox, // Read access is ensured in findOneBySourceName, depending on the implicit DBAccessContext (to allow sharingTokens) def tryRead: Fox[UserAccessAnswer] = for { - dataSourceBox <- datasetDAO.findOneByNameAndOrganization(dataSourceId.name, dataSourceId.team).futureBox + dataSourceBox <- datasetDAO.findOneByPathAndOrganization(dataSourceId.path, dataSourceId.organizationId).futureBox } yield dataSourceBox match { case Full(_) => UserAccessAnswer(granted = true) @@ -122,7 +122,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, def tryWrite: Fox[UserAccessAnswer] = for { - dataset <- datasetDAO.findOneByNameAndOrganization(dataSourceId.name, dataSourceId.team) ?~> "datasource.notFound" + dataset <- datasetDAO.findOneByPathAndOrganization(dataSourceId.path, dataSourceId.organizationId) ?~> "datasource.notFound" user <- userBox.toFox ?~> "auth.token.noUser" isAllowed <- datasetService.isEditableBy(dataset, Some(user)) } yield UserAccessAnswer(isAllowed) @@ -132,9 +132,9 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, case Full(user) => for { // if dataSourceId is empty, the request asks if the user may administrate in *any* (i.e. their own) organization - relevantOrganization <- if (dataSourceId.team.isEmpty) + relevantOrganization <- if (dataSourceId.organizationId.isEmpty) Fox.successful(user._organization) - else organizationDAO.findOne(dataSourceId.team).map(_._id) + else organizationDAO.findOne(dataSourceId.organizationId).map(_._id) isTeamManagerOrAdmin <- userService.isTeamManagerOrAdminOfOrg(user, relevantOrganization) } yield UserAccessAnswer(isTeamManagerOrAdmin || user.isDatasetManager) case _ => Fox.successful(UserAccessAnswer(granted = false, Some("invalid access token"))) @@ -143,7 +143,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, def tryDelete: Fox[UserAccessAnswer] = for { _ <- bool2Fox(conf.Features.allowDeleteDatasets) ?~> "dataset.delete.disabled" - dataset <- datasetDAO.findOneByNameAndOrganization(dataSourceId.name, dataSourceId.team)(GlobalAccessContext) ?~> "datasource.notFound" + dataset <- datasetDAO.findOneByPathAndOrganization(dataSourceId.path, dataSourceId.organizationId)(GlobalAccessContext) ?~> "datasource.notFound" user <- userBox.toFox ?~> "auth.token.noUser" } yield UserAccessAnswer(user._organization == dataset._organization && user.isAdmin) diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index caa6b20de06..08fd203b44d 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.models.UnfinishedUpload -import com.scalableminds.webknossos.datastore.models.datasource.{DatasetIdWithPath, LegacyDataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{DatasetIdWithPath, DataSourceId} import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSourceLike => InboxDataSource} import com.scalableminds.webknossos.datastore.services.DataStoreStatus import com.scalableminds.webknossos.datastore.services.uploading.{LinkedLayerIdentifier, ReserveUploadInformation} @@ -59,14 +59,6 @@ class WKRemoteDataStoreController @Inject()( val bearerTokenService: WebknossosBearerTokenAuthenticatorService = wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService - def getDatasetIdWithPath(name: String, key: String, organizationId: String, datasetName: String): Action[AnyContent] = - Action.async { implicit request => - dataStoreService.validateAccess(name, key) { dataStore => - for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(GlobalAccessContext) ?~> Messages( - "dataset.notFound", - datasetName) ~> NOT_FOUND - } yield Ok(Json.toJson(DatasetIdWithPath(dataset._id, dataset.path))) def reserveDatasetUpload(name: String, key: String, token: String): Action[ReserveUploadInformation] = Action.async(validateJson[ReserveUploadInformation]) { implicit request => @@ -82,7 +74,6 @@ class WKRemoteDataStoreController @Inject()( bool2Fox(usedStorageBytes <= includedStorage)) ?~> "dataset.upload.storageExceeded" ~> FORBIDDEN _ <- bool2Fox(organization._id == user._organization) ?~> "notAllowed" ~> FORBIDDEN _ <- datasetService.assertValidDatasetName(uploadInfo.name) - _ <- datasetService.assertNewDatasetName(uploadInfo.name, organization._id) ?~> "dataset.name.alreadyTaken" _ <- bool2Fox(dataStore.onlyAllowedOrganization.forall(_ == organization._id)) ?~> "dataset.upload.Datastore.restricted" folderId <- ObjectId.fromString(uploadInfo.folderId.getOrElse(organization._rootFolder.toString)) ?~> "dataset.upload.folderId.invalid" _ <- folderDAO.assertUpdateAccess(folderId)(AuthorizedAccessContext(user)) ?~> "folder.noWriteAccess" @@ -91,7 +82,8 @@ class WKRemoteDataStoreController @Inject()( _ <- datasetDAO.updateFolder(dataset._id, folderId)(GlobalAccessContext) _ <- datasetService.addInitialTeams(dataset, uploadInfo.initialTeams, user)(AuthorizedAccessContext(user)) _ <- datasetService.addUploader(dataset, user._id)(AuthorizedAccessContext(user)) - } yield Ok + uploadInfo.path = dataset.path // Update path according to the newly created dataset. + } yield Ok(Json.toJson(uploadInfo)) } } @@ -129,7 +121,7 @@ class WKRemoteDataStoreController @Inject()( organization <- organizationDAO.findOne(layerIdentifier.getOrganizationId)(GlobalAccessContext) ?~> Messages( "organization.notFound", layerIdentifier.getOrganizationId) ~> NOT_FOUND - dataset <- datasetDAO.findOneByNameAndOrganization(layerIdentifier.dataSetName, organization._id)( + dataset <- datasetDAO.findOneByPathAndOrganization(layerIdentifier.dataSetName, organization._id)( AuthorizedAccessContext(requestingUser)) ?~> Messages("dataset.notFound", layerIdentifier.dataSetName) isTeamManagerOrAdmin <- userService.isTeamManagerOrAdminOfOrg(requestingUser, dataset._organization) _ <- Fox.bool2Fox(isTeamManagerOrAdmin || requestingUser.isDatasetManager || dataset.isPublic) ?~> "dataset.upload.linkRestricted" @@ -138,30 +130,31 @@ class WKRemoteDataStoreController @Inject()( def reportDatasetUpload(name: String, key: String, token: String, - datasetName: String, + datasetId: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean): Action[AnyContent] = Action.async { implicit request => dataStoreService.validateAccess(name, key) { dataStore => for { + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Could not parse dataset id" user <- bearerTokenService.userForToken(token) - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, user._organization)(GlobalAccessContext) ?~> Messages( + dataset <- datasetDAO.findOne(parsedDatasetId)(GlobalAccessContext) ?~> Messages( "dataset.notFound", - datasetName) ~> NOT_FOUND + datasetId) ~> NOT_FOUND _ <- Fox.runIf(!needsConversion && !viaAddRoute)(usedStorageService.refreshStorageReportForDataset(dataset)) - _ <- Fox.runIf(!needsConversion)(logUploadToSlack(user, datasetName, viaAddRoute)) + _ <- Fox.runIf(!needsConversion)(logUploadToSlack(user, parsedDatasetId, viaAddRoute)) _ = analyticsService.track(UploadDatasetEvent(user, dataset, dataStore, datasetSizeBytes)) _ = if (!needsConversion) mailchimpClient.tagUser(user, MailchimpTag.HasUploadedOwnDataset) } yield Ok } } - private def logUploadToSlack(user: User, datasetName: String, viaAddRoute: Boolean): Fox[Unit] = + private def logUploadToSlack(user: User, datasetId: ObjectId, viaAddRoute: Boolean): Fox[Unit] = for { organization <- organizationDAO.findOne(user._organization)(GlobalAccessContext) multiUser <- multiUserDAO.findOne(user._multiUser)(GlobalAccessContext) - resultLink = s"${conf.Http.uri}/datasets/${organization._id}/$datasetName" + resultLink = s"${conf.Http.uri}/datasets/$datasetId" addLabel = if (viaAddRoute) "(via explore+add)" else "(upload without conversion)" superUserLabel = if (multiUser.isSuperUser) " (for superuser)" else "" _ = slackNotificationService.info(s"Dataset added $addLabel$superUserLabel", @@ -225,9 +218,9 @@ class WKRemoteDataStoreController @Inject()( def deleteDataset(name: String, key: String): Action[JsValue] = Action.async(parse.json) { implicit request => dataStoreService.validateAccess(name, key) { _ => for { - datasourceId <- request.body.validate[LegacyDataSourceId].asOpt.toFox ?~> "dataStore.upload.invalid" + datasourceId <- request.body.validate[DataSourceId].asOpt.toFox ?~> "dataStore.upload.invalid" existingDataset = datasetDAO - .findOneByNameAndOrganization(datasourceId.name, datasourceId.team)(GlobalAccessContext) + .findOneByPathAndOrganization(datasourceId.path, datasourceId.organizationId)(GlobalAccessContext) .futureBox _ <- existingDataset.flatMap { diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 7d2b84d9718..e28092e51e1 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -3,7 +3,7 @@ package controllers import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.tracingstore.TracingUpdatesReport import javax.inject.Inject @@ -109,7 +109,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore annotation <- annotationInformationProvider.annotationForTracing(tracingId) ?~> s"No annotation for tracing $tracingId" dataset <- datasetDAO.findOne(annotation._dataset) organization <- organizationDAO.findOne(dataset._organization) - } yield Ok(Json.toJson(LegacyDataSourceId(dataset.name, organization._id))) + } yield Ok(Json.toJson(DataSourceId(dataset.name, organization._id))) } } @@ -124,7 +124,7 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore organizationIdWithFallback <- Fox.fillOption(organizationId) { datasetDAO.getOrganizationIdForDataset(datasetPath)(GlobalAccessContext) } ?~> Messages("dataset.noAccess", datasetPath) ~> FORBIDDEN - dataset <- datasetDAO.findOneByNameAndOrganization(datasetPath, organizationIdWithFallback) ?~> Messages( + dataset <- datasetDAO.findOneByPathAndOrganization(datasetPath, organizationIdWithFallback) ?~> Messages( "dataset.noAccess", datasetPath) ~> FORBIDDEN dataStore <- datasetService.dataStoreFor(dataset) diff --git a/app/controllers/WKRemoteWorkerController.scala b/app/controllers/WKRemoteWorkerController.scala index 8df45323cba..8535c6fdc1f 100644 --- a/app/controllers/WKRemoteWorkerController.scala +++ b/app/controllers/WKRemoteWorkerController.scala @@ -109,7 +109,7 @@ class WKRemoteWorkerController @Inject()(jobDAO: JobDAO, _ <- workerDAO.findOneByKey(key) ?~> "jobs.worker.notFound" jobIdParsed <- ObjectId.fromString(id) organizationId <- jobDAO.organizationIdForJobId(jobIdParsed) ?~> "job.notFound" - dataset <- datasetDAO.findOneByNameAndOrganization(request.body, organizationId) + dataset <- datasetDAO.findOneByPathAndOrganization(request.body, organizationId) aiInference <- aiInferenceDAO.findOneByJobId(jobIdParsed) ?~> "aiInference.notFound" _ <- aiInferenceDAO.updateDataset(aiInference._id, dataset._id) } yield Ok diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 655ed92dbec..ae638ac2919 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -135,7 +135,7 @@ class AnnotationService @Inject()( remoteDatastoreClient = new WKRemoteDataStoreClient(datasetDataStore, rpc) fallbackLayerHasSegmentIndex <- fallbackLayer match { case Some(layer) => - remoteDatastoreClient.hasSegmentIndexFile(datasetOrganizationId, dataSource.id.name, layer.name) + remoteDatastoreClient.hasSegmentIndexFile(datasetOrganizationId, dataSource.id.path, layer.name) case None => Fox.successful(false) } } yield @@ -143,7 +143,7 @@ class AnnotationService @Inject()( None, boundingBoxToProto(boundingBox.getOrElse(dataSource.boundingBox)), System.currentTimeMillis(), - dataSource.id.name, + dataSource.id.path, vec3IntToProto(startPosition.getOrElse(dataSource.center)), vec3DoubleToProto(startRotation.getOrElse(vec3DoubleFromProto(VolumeTracingDefaults.editRotation))), elementClassToProto( @@ -371,7 +371,7 @@ class AnnotationService @Inject()( dataset <- datasetDAO.findOne(datasetId) ?~> "dataset.noAccessById" dataSource <- datasetService.dataSourceFor(dataset) datasetOrganization <- organizationDAO.findOne(dataset._organization) - usableDataSource <- dataSource.toUsable ?~> Messages("dataset.notImported", dataSource.id.name) + usableDataSource <- dataSource.toUsable ?~> Messages("dataset.notImported", dataSource.id.path) annotationLayers <- createTracingsForExplorational(dataset, usableDataSource, annotationLayerParameters, @@ -540,7 +540,7 @@ class AnnotationService @Inject()( m: MessagesProvider): Fox[VolumeTracing] = for { organization <- organizationDAO.findOne(organizationId) - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> Messages("dataset.notFound", + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) ?~> Messages("dataset.notFound", datasetName) dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) @@ -933,7 +933,7 @@ class AnnotationService @Inject()( annotationSource = AnnotationSource( id = annotation.id, annotationLayers = annotation.annotationLayers, - datasetName = dataset.name, + datasetPath = dataset.path, organizationId = organization._id, dataStoreUrl = dataStore.publicUrl, tracingStoreUrl = tracingStore.publicUrl, diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index 442380c27bc..ef8337391e7 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -16,7 +16,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ Category, CoordinateTransformation, CoordinateTransformationType, - LegacyDataSourceId, + DataSourceId, ElementClass, ThinPlateSplineCorrespondences, DataLayerLike => DataLayer @@ -81,7 +81,7 @@ case class DatasetCompactInfo( colorLayerNames: List[String], segmentationLayerNames: List[String], ) { - def dataSourceId = new LegacyDataSourceId(name, owningOrganization) + def dataSourceId = new DataSourceId(path, owningOrganization) } object DatasetCompactInfo { @@ -405,7 +405,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA implicit ctx: DBAccessContext): Fox[Dataset] = getDatasetIdOrNameFromURIPath(idAndName) match { case (Some(validId), None) => findOneByIdAndOrganization(validId, organizationId) - case (None, Some(datasetName)) => findOneByNameAndOrganization(datasetName, organizationId) + case (None, Some(datasetName)) => findOneByPathAndOrganization(datasetName, organizationId) } private def getWhereClauseForDatasetIdOrName(datasetIdOrName: String): SqlToken = { @@ -416,7 +416,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA } } - def findOneByNameAndOrganization(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Dataset] = + def findOneByPathAndOrganization(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Dataset] = for { accessQuery <- readAccessQuery r <- run(q"""SELECT $columns @@ -440,13 +440,13 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA parsed <- parseFirst(r, s"$organizationId/$id") } yield parsed - def findAllByNamesAndOrganization(names: List[String], organizationId: String)( + def findAllByPathsAndOrganization(names: List[String], organizationId: String)( implicit ctx: DBAccessContext): Fox[List[Dataset]] = for { accessQuery <- readAccessQuery r <- run(q"""SELECT $columns FROM $existingCollectionName - WHERE name IN ${SqlToken.tupleFromList(names)} + WHERE path IN ${SqlToken.tupleFromList(names)} AND _organization = $organizationId AND $accessQuery""".as[DatasetsRow]).map(_.toList) parsed <- parseAll(r) @@ -610,7 +610,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA source: InboxDataSource, isUsable: Boolean)(implicit ctx: DBAccessContext): Fox[Unit] = for { - organization <- organizationDAO.findOne(source.id.team) + organization <- organizationDAO.findOne(source.id.organizationId) defaultViewConfiguration: Option[JsValue] = source.defaultViewConfiguration.map(Json.toJson(_)) _ <- run(q"""UPDATE webknossos.datasets SET diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 9e34c9a0004..f7edba98d5b 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -4,8 +4,15 @@ import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContex import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.datasource.inbox.{UnusableDataSource, InboxDataSourceLike => InboxDataSource} -import com.scalableminds.webknossos.datastore.models.datasource.{GenericDataSource, LegacyDataSourceId, DataLayerLike => DataLayer} +import com.scalableminds.webknossos.datastore.models.datasource.inbox.{ + UnusableDataSource, + InboxDataSourceLike => InboxDataSource +} +import com.scalableminds.webknossos.datastore.models.datasource.{ + GenericDataSource, + DataSourceId, + DataLayerLike => DataLayer +} import com.scalableminds.webknossos.datastore.rpc.RPC import com.typesafe.scalalogging.LazyLogging import models.folder.FolderDAO @@ -56,12 +63,19 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, _ <- bool2Fox(!name.startsWith(".")) ?~> "dataset.layer.name.invalid.startsWithDot" } yield () - def assertNewDatasetName(name: String, organizationId: String): Fox[Unit] = - datasetDAO.findOneByNameAndOrganization(name, organizationId)(GlobalAccessContext).reverse + def isNewDatasetName(name: String, organizationId: String): Fox[Boolean] = + datasetDAO + .findOneByPathAndOrganization(name, organizationId)(GlobalAccessContext) + .futureBox + .flatMap { + case Full(_) => Fox.successful(false) + case _ => Fox.successful(true) + } + .toFox def createPreliminaryDataset(datasetName: String, organizationId: String, dataStore: DataStore): Fox[Dataset] = { - val unreportedDatasource = UnusableDataSource(LegacyDataSourceId(datasetName, organizationId), notYetUploadedStatus) - createDataset(dataStore, organizationId, unreportedDatasource) + val unreportedDatasource = UnusableDataSource(DataSourceId(datasetName, organizationId), notYetUploadedStatus) + createDataset(dataStore, organizationId, datasetName, unreportedDatasource) } def getAllUnfinishedDatasetUploadsOfUser(userId: ObjectId, organizationId: String)( @@ -79,6 +93,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, private def createDataset( dataStore: DataStore, owningOrganization: String, + datasetName: String, dataSource: InboxDataSource, publication: Option[ObjectId] = None ): Fox[Dataset] = { @@ -97,11 +112,8 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, for { organization <- organizationDAO.findOne(owningOrganization) organizationRootFolder <- folderDAO.findOne(organization._rootFolder) - isNewDatasetName <- assertNewDatasetName(dataSource.id.name, organization._id).futureBox.map { - case Empty => true - case _ => false - } - datasetPath = if (isNewDatasetName) dataSource.id.name else newId.toString + datasetPath <- isNewDatasetName(datasetName, organization._id).map(if (_) datasetName else newId.toString) + dataSource.id.path = datasetPath // Sync path with dataSource dataset = Dataset( newId, dataStore.name, @@ -116,7 +128,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, path = datasetPath, isPublic = false, isUsable = dataSource.isUsable, - name = dataSource.id.name, + name = dataSource.id.path, voxelSize = dataSource.voxelSizeOpt, sharingToken = None, status = dataSource.statusOpt.getOrElse(""), @@ -132,7 +144,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, def updateDataSources(dataStore: DataStore, dataSources: List[InboxDataSource])( implicit ctx: DBAccessContext): Fox[List[ObjectId]] = { - val groupedByOrga = dataSources.groupBy(_.id.team).toList + val groupedByOrga = dataSources.groupBy(_.id.organizationId).toList Fox .serialCombined(groupedByOrga) { orgaTuple: (String, List[InboxDataSource]) => organizationDAO @@ -145,10 +157,10 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, Fox.successful(List.empty) case Full(organization) => for { - foundDatasets <- datasetDAO.findAllByNamesAndOrganization(orgaTuple._2.map(_.id.name), organization._id) - foundDatasetsByName = foundDatasets.groupBy(_.name) + foundDatasets <- datasetDAO.findAllByPathsAndOrganization(orgaTuple._2.map(_.id.path), organization._id) + foundDatasetsByPath = foundDatasets.groupBy(_.path) existingIds <- Fox.serialCombined(orgaTuple._2)(dataSource => - updateDataSource(dataStore, dataSource, foundDatasetsByName)) + updateDataSource(dataStore, dataSource, foundDatasetsByPath)) } yield existingIds.flatten case _ => logger.info( @@ -163,16 +175,20 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, private def updateDataSource( dataStore: DataStore, dataSource: InboxDataSource, - foundDatasets: Map[String, List[Dataset]] + foundDatasetsByPath: Map[String, List[Dataset]] )(implicit ctx: DBAccessContext): Fox[Option[ObjectId]] = { - val foundDatasetOpt = foundDatasets.get(dataSource.id.name).flatMap(_.headOption) + val foundDatasetOpt = foundDatasetsByPath.get(dataSource.id.path).flatMap(_.headOption) foundDatasetOpt match { case Some(foundDataset) if foundDataset._dataStore == dataStore.name => updateKnownDataSource(foundDataset, dataSource, dataStore).toFox.map(Some(_)) case Some(foundDataset) => // This only returns None for Datasets that are present on a normal Datastore but also got reported from a scratch Datastore updateDataSourceDifferentDataStore(foundDataset, dataSource, dataStore) case _ => - insertNewDataset(dataSource, dataStore).toFox.map(Some(_)) + val maybeParsedDatasetPath = ObjectId.fromStringSync(dataSource.id.path) + // Avoid using the path as name in case it is an ObjectId. + val newDatasetName = maybeParsedDatasetPath.map(_ => "Newly Discovered Dataset").getOrElse(dataSource.id.path) + insertNewDataset(dataSource, newDatasetName, dataStore).toFox + .map(Some(_)) // TODO: Discuss how to better handle this case } } @@ -217,9 +233,9 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, } }).flatten.futureBox - private def insertNewDataset(dataSource: InboxDataSource, dataStore: DataStore) = + private def insertNewDataset(dataSource: InboxDataSource, datasetName: String, dataStore: DataStore) = publicationForFirstDataset.flatMap { publicationId: Option[ObjectId] => - createDataset(dataStore, dataSource.id.team, dataSource, publicationId).map(_._id) + createDataset(dataStore, dataSource.id.organizationId, datasetName, dataSource, publicationId).map(_._id) }.futureBox private def publicationForFirstDataset: Fox[Option[ObjectId]] = @@ -255,7 +271,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, organizationDAO.findOne(dataset._organization)(GlobalAccessContext) ?~> "organization.notFound" } dataLayers <- datasetDataLayerDAO.findAllForDataset(dataset._id) - dataSourceId = LegacyDataSourceId(dataset.name, organization._id) + dataSourceId = DataSourceId(dataset.name, organization._id) } yield { if (dataset.isUsable) for { diff --git a/app/models/job/Job.scala b/app/models/job/Job.scala index c07221ca6c5..fdcdd66a250 100644 --- a/app/models/job/Job.scala +++ b/app/models/job/Job.scala @@ -51,6 +51,8 @@ case class Job( def datasetName: Option[String] = argAsStringOpt("dataset_name") + def datasetId: Option[String] = argAsStringOpt("dataset_id") + private def argAsStringOpt(key: String) = (commandArgs \ key).toOption.flatMap(_.asOpt[String]) def resultLink(organizationId: String): Option[String] = @@ -58,15 +60,15 @@ case class Job( else { command match { case JobCommand.convert_to_wkw | JobCommand.compute_mesh_file => - datasetName.map { dsName => - s"/datasets/$organizationId/$dsName/view" + datasetId.map { datasetId => + s"/datasets/$datasetId/view" // TODO: Adjust worker } case JobCommand.export_tiff | JobCommand.render_animation => Some(s"/api/jobs/${this._id}/export") case JobCommand.infer_nuclei | JobCommand.infer_neurons | JobCommand.materialize_volume_annotation | JobCommand.infer_with_model | JobCommand.infer_mitochondria | JobCommand.align_sections => - returnValue.map { resultDatasetName => - s"/datasets/$organizationId/$resultDatasetName/view" + returnValue.map { resultDatasetId => // TODO: Adjust worker + s"/datasets/$resultDatasetId/view" } case _ => None } diff --git a/app/models/job/JobService.scala b/app/models/job/JobService.scala index 28c6e371daf..4573a802eb0 100644 --- a/app/models/job/JobService.scala +++ b/app/models/job/JobService.scala @@ -153,7 +153,7 @@ class JobService @Inject()(wkConf: WkConf, for { datasetName <- commandArgs.get("dataset_name").map(_.as[String]).toFox organizationId <- commandArgs.get("organization_name").map(_.as[String]).toFox - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId)(GlobalAccessContext) + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId)(GlobalAccessContext) _ <- datasetDAO.deleteDataset(dataset._id) } yield () } else Fox.successful(()) diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index 49f98d11cf2..87fa37b9889 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -74,7 +74,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, for { taskTypeIdValidated <- ObjectId.fromString(taskParameters.taskTypeId) ?~> "taskType.id.invalid" taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" - dataset <- datasetDAO.findOneByNameAndOrganization(taskParameters.dataSet, organizationId) + dataset <- datasetDAO.findOneByPathAndOrganization(taskParameters.dataSet, organizationId) baseAnnotationIdValidated <- ObjectId.fromString(baseAnnotation.baseId) annotation <- resolveBaseAnnotationId(baseAnnotationIdValidated) tracingStoreClient <- tracingStoreService.clientFor(dataset) @@ -234,7 +234,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, private def addVolumeFallbackBoundingBox(volume: VolumeTracing, organizationId: String): Fox[VolumeTracing] = if (volume.boundingBox.isEmpty) { for { - dataset <- datasetDAO.findOneByNameAndOrganization(volume.datasetName, organizationId)(GlobalAccessContext) + dataset <- datasetDAO.findOneByPathAndOrganization(volume.datasetName, organizationId)(GlobalAccessContext) dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) } yield volume.copy(boundingBox = dataSource.boundingBox) } else Fox.successful(volume) @@ -387,7 +387,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, _ <- assertEachHasEitherSkeletonOrVolume(fullTasks) ?~> "task.create.needsEitherSkeletonOrVolume" firstDatasetName <- fullTasks.headOption.map(_._1.dataSet).toFox _ <- assertAllOnSameDataset(fullTasks, firstDatasetName) - dataset <- datasetDAO.findOneByNameAndOrganization(firstDatasetName, requestingUser._organization) ?~> Messages( + dataset <- datasetDAO.findOneByPathAndOrganization(firstDatasetName, requestingUser._organization) ?~> Messages( "dataset.notFound", firstDatasetName) _ = if (fullTasks.exists(task => task._1.baseAnnotation.isDefined)) diff --git a/app/opengraph/OpenGraphService.scala b/app/opengraph/OpenGraphService.scala index 0884fc0863a..19608595fa5 100644 --- a/app/opengraph/OpenGraphService.scala +++ b/app/opengraph/OpenGraphService.scala @@ -111,7 +111,7 @@ class OpenGraphService @Inject()(datasetDAO: DatasetDAO, datasetName: String, token: Option[String])(implicit ctx: DBAccessContext) = for { - dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) layers <- datasetLayerDAO.findAllForDataset(dataset._id) layerOpt = layers.find(_.category == Category.color) organization <- organizationDAO.findOne(dataset._organization) diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 2a1081e289e..07590d89c08 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -110,7 +110,7 @@ PUT /datastores/:name/datasources PATCH /datastores/:name/status controllers.WKRemoteDataStoreController.statusUpdate(name: String, key: String) POST /datastores/:name/reserveUpload controllers.WKRemoteDataStoreController.reserveDatasetUpload(name: String, key: String, token: String) GET /datastores/:name/getUnfinishedUploadsForUser controllers.WKRemoteDataStoreController.getUnfinishedUploadsForUser(name: String, key: String, token: String, organizationName: String) -POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetName: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean, path: Option[String]) +POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetId: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean, path: Option[String]) POST /datastores/:name/deleteDataset controllers.WKRemoteDataStoreController.deleteDataset(name: String, key: String) GET /datastores/:name/jobExportProperties controllers.WKRemoteDataStoreController.jobExportProperties(name: String, key: String, jobId: String) GET /datastores/:name/findCredential controllers.WKRemoteDataStoreController.findCredential(name: String, key: String, credentialId: String) @@ -118,7 +118,6 @@ POST /datastores/:name/validateUserAccess POST /datastores controllers.DataStoreController.create() DELETE /datastores/:name controllers.DataStoreController.delete(name: String) PUT /datastores/:name controllers.DataStoreController.update(name: String) -GET /datastores/:name/:organizationId/:datasetName/getDatasetId // TODO? controllers.WKRemoteDataStoreController.getDatasetIdWithPath(name: String, key: String, organizationId: String, datasetName: String) # Tracingstores GET /tracingstore controllers.TracingStoreController.listOne() diff --git a/conf/webknossos.versioned.routes b/conf/webknossos.versioned.routes index 48bb3dfe9fe..50311201f7a 100644 --- a/conf/webknossos.versioned.routes +++ b/conf/webknossos.versioned.routes @@ -13,22 +13,33 @@ # new in v3: annotation info and finish request now take timestamp # new in v2: annotation json contains visibility enum instead of booleans --> /v8/ webknossos.latest.Routes +-> /v9/ webknossos.latest.Routes # v8: support changes to v9 -PATCH || /datasets/:organizationId/:datasetNameAndId controllers.DatasetController.update(organizationId: String, datasetNameAndId: String) -GET || /datasets/:organizationId/:datasetNameAndId/sharingToken controllers.DatasetController.getSharingToken(organizationId: String, datasetNameAndId: String) -PATCH || /datasets/:organizationId/:datasetNameAndId/teams controllers.DatasetController.updateTeams(organizationId: String, datasetNameAndId: String) -GET || /datasets/:organizationId/:datasetNameAndId controllers.DatasetController.read(organizationId: String, datasetNameAndId: String, sharingToken: Option[String]) -GET /datasets/:datasetName/isValidNewName controllers.DatasetController.isValidNewName(datasetName: String) +PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v8/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) +GET /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.readDatasetV8(organizationId: String, datasetName: String, sharingToken: Option[String]) -> /v8/ webknossos.latest.Routes +#v7: support changes to v9 +PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v7/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) +GET /v7/datasets/:organizationId/:datasetName controllers.LegacyApiController.readDatasetV8(organizationId: String, datasetName: String, sharingToken: Option[String]) + # v7: support changes to v8 GET /v7/datasets controllers.LegacyApiController.listDatasetsV7(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) + -> /v7/ webknossos.latest.Routes +# v6: support changes to v9 +PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v6/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) + # v6: support changes to v7 GET /v6/datasets controllers.LegacyApiController.listDatasetsV6(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) GET /v6/datasets/:organizationName/:datasetName controllers.LegacyApiController.readDatasetV6(organizationName: String, datasetName: String, sharingToken: Option[String]) @@ -36,6 +47,11 @@ GET /v6/datasets/:organizationName/:datasetName co -> /v6/ webknossos.latest.Routes +# v5: support changes to v9 +PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v8/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) + # v5: support changes to v7 GET /v5/datasets controllers.LegacyApiController.listDatasetsV6(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) GET /v5/datasets/:organizationName/:datasetName controllers.LegacyApiController.readDatasetV6(organizationName: String, datasetName: String, sharingToken: Option[String]) @@ -45,6 +61,10 @@ GET /v5/datasets/:organizationName/:datasetName/isValidNewName co -> /v5/ webknossos.latest.Routes +# v4: support changes to v9 +PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v8/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) # v4: support changes to v7 GET /v4/datasets controllers.LegacyApiController.listDatasetsV6(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) @@ -70,6 +90,10 @@ POST /v4/datasets/:organizationName/:datasetName/createExplorational co -> /v4/ webknossos.latest.Routes +# v3: support changes to v9 +PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v8/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) # v3: support changes to v7 GET /v3/datasets controllers.LegacyApiController.listDatasetsV6(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index b4f235e9110..ff526cbd643 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -320,6 +320,7 @@ class DatasetUploadView extends React.Component { uploadId, organization: datasetId.owningOrganization, name: datasetId.name, + path: datasetId.name, totalFileCount: formValues.zipFile.length, filePaths: filePaths, layersToLink: [], diff --git a/tools/postgres/schema.sql b/tools/postgres/schema.sql index 0c467df5709..0e6b2de8e8a 100644 --- a/tools/postgres/schema.sql +++ b/tools/postgres/schema.sql @@ -128,7 +128,7 @@ CREATE TABLE webknossos.datasets( tags VARCHAR(256)[] NOT NULL DEFAULT '{}', created TIMESTAMPTZ NOT NULL DEFAULT NOW(), isDeleted BOOLEAN NOT NULL DEFAULT false, - UNIQUE (name, _organization), + UNIQUE (path, _organization), CONSTRAINT defaultViewConfigurationIsJsonObject CHECK(jsonb_typeof(defaultViewConfiguration) = 'object'), CONSTRAINT adminViewConfigurationIsJsonObject CHECK(jsonb_typeof(adminViewConfiguration) = 'object'), CONSTRAINT metadataIsJsonArray CHECK(jsonb_typeof(metadata) = 'array') diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index 3903b1eb673..646f0e80912 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -3,11 +3,10 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.image.{Color, JPEGWriter} -import com.scalableminds.util.requestparsing.DatasetURIParser import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.DataStoreConfig -import com.scalableminds.webknossos.datastore.helpers.{LegacyDatasetResolver, MissingBucketHeaders} +import com.scalableminds.webknossos.datastore.helpers.MissingBucketHeaders import com.scalableminds.webknossos.datastore.image.{ImageCreator, ImageCreatorParameters} import com.scalableminds.webknossos.datastore.models.DataRequestCollection._ import com.scalableminds.webknossos.datastore.models.datasource._ @@ -23,7 +22,6 @@ import net.liftweb.common.Box.tryo import play.api.i18n.Messages import play.api.libs.json.Json import play.api.mvc.{AnyContent, _} - import scala.concurrent.duration.DurationInt import java.io.ByteArrayOutputStream import java.nio.{ByteBuffer, ByteOrder} @@ -38,16 +36,12 @@ class BinaryDataController @Inject()( slackNotificationService: DSSlackNotificationService, adHocMeshServiceHolder: AdHocMeshServiceHolder, findDataService: FindDataService, - remoteWebknossosClient: DSRemoteWebknossosClient, - datasetIdRepository: DatasetIdRepository )(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with MissingBucketHeaders { override def allowRemoteOrigin: Boolean = true - implicit val remoteWebknossosClientImplicit: DSRemoteWebknossosClient = remoteWebknossosClient - val binaryDataService: BinaryDataService = binaryDataServiceHolder.binaryDataService adHocMeshServiceHolder.dataStoreAdHocMeshConfig = (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) @@ -56,36 +50,28 @@ class BinaryDataController @Inject()( def requestViaWebknossos( token: Option[String], organizationId: String, - datasetIdOrName: String, + datasetPath: String, dataLayerName: String ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => - for { - datasetIdWithPath <- datasetIdRepository - .getDatasetIdFromIdOrName(organizationId, datasetIdOrName) ~> NOT_FOUND ?~> Messages( - "dataset.notFoundByIdOrName", - datasetIdOrName) - datasourceId = LegacyDataSourceId(datasetIdWithPath.id.toString, "") - response <- accessTokenService.validateAccess( - UserAccessRequest.readDataSources(datasourceId), - urlOrHeaderToken(token, request) - ) { - logTime(slackNotificationService.noticeSlowRequest) { - val t = Instant.now - for { - (dataSource, dataLayer) <- dataSourceRepository - .getDataSourceAndDataLayer(dataSourceId, dataLayerName) ~> NOT_FOUND - (data, indices) <- requestData(dataSource, dataLayer, request.body) - duration = Instant.since(t) - _ = if (duration > (10 seconds)) - logger.info( - s"Complete data request for $organizationId/$datasetIdOrName/$dataLayerName took $duration." - + request.body.headOption - .map(firstReq => s" First of ${request.body.size} requests was $firstReq") - .getOrElse("")) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) - } + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), + urlOrHeaderToken(token, request)) { + logTime(slackNotificationService.noticeSlowRequest) { + val t = Instant.now + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetPath, + dataLayerName) ~> NOT_FOUND + (data, indices) <- requestData(dataSource, dataLayer, request.body) + duration = Instant.since(t) + _ = if (duration > (10 seconds)) + logger.info( + s"Complete data request for $organizationId/$datasetPath/$dataLayerName took $duration." + + request.body.headOption + .map(firstReq => s" First of ${request.body.size} requests was $firstReq") + .getOrElse("")) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) } - } yield response + } } /** @@ -94,7 +80,7 @@ class BinaryDataController @Inject()( def requestRawCuboid( token: Option[String], organizationId: String, - datasetIdOrName: String, + datasetPath: String, dataLayerName: String, // Mag1 coordinates of the top-left corner of the bounding box x: Int, @@ -110,24 +96,22 @@ class BinaryDataController @Inject()( halfByte: Boolean, mappingName: Option[String] ): Action[AnyContent] = Action.async { implicit request => - { - val dataSourceId = LegacyDataSourceId.fromDatasetIdOrNameAndOrganizationId(datasetIdOrName, organizationId) - accessTokenService.validateAccess(UserAccessRequest.readDataSources(dataSourceId), - urlOrHeaderToken(token, request)) { - for { - (dataSource, dataLayer) <- dataSourceRepository - .getDataSourceAndDataLayer(dataSourceId, dataLayerName) ~> NOT_FOUND - magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" - request = DataRequest( - VoxelPosition(x, y, z, magParsed), - width, - height, - depth, - DataServiceRequestSettings(halfByte = halfByte, appliedAgglomerate = mappingName) - ) - (data, indices) <- requestData(dataSource, dataLayer, request) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) - } + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), + urlOrHeaderToken(token, request)) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetPath, + dataLayerName) ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" + request = DataRequest( + VoxelPosition(x, y, z, magParsed), + width, + height, + depth, + DataServiceRequestSettings(halfByte = halfByte, appliedAgglomerate = mappingName) + ) + (data, indices) <- requestData(dataSource, dataLayer, request) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) } } @@ -137,12 +121,11 @@ class BinaryDataController @Inject()( datasetPath: String, dataLayerName: String ): Action[RawCuboidRequest] = Action.async(validateJson[RawCuboidRequest]) { implicit request => - // TODO :D - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, + datasetPath, dataLayerName) ~> NOT_FOUND (data, indices) <- requestData(dataSource, dataLayer, request.body) } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) @@ -154,18 +137,18 @@ class BinaryDataController @Inject()( */ def requestViaKnossos(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, resolution: Int, x: Int, y: Int, z: Int, cubeSize: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, + datasetPath, dataLayerName) ~> NOT_FOUND request = DataRequest( VoxelPosition(x * cubeSize * resolution, @@ -196,12 +179,11 @@ class BinaryDataController @Inject()( intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => - // TODO :D - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, + datasetPath, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" @@ -241,15 +223,15 @@ class BinaryDataController @Inject()( def mappingJson( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, + datasetPath, dataLayerName) ~> NOT_FOUND segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> Messages("dataLayer.notFound") mappingRequest = DataServiceMappingRequest(dataSource, segmentationLayer, mappingName) @@ -263,14 +245,14 @@ class BinaryDataController @Inject()( */ def requestAdHocMesh(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, + datasetPath, dataLayerName) ~> NOT_FOUND segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> "dataLayer.mustBeSegmentation" adHocMeshRequest = AdHocMeshRequest( @@ -305,14 +287,14 @@ class BinaryDataController @Inject()( def findData(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, + datasetPath, dataLayerName) ~> NOT_FOUND positionAndResolutionOpt <- findDataService.findPositionWithData(dataSource, dataLayer) } yield @@ -324,14 +306,14 @@ class BinaryDataController @Inject()( def histogram(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, + datasetPath, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND ?~> Messages("histogram.layerMissing", dataLayerName) listOfHistograms <- findDataService.createHistogram(dataSource, dataLayer) ?~> Messages("histogram.failed", diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala index 7a2f768b6ff..d20d2881edc 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala @@ -2,7 +2,7 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.services._ import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} @@ -25,20 +25,20 @@ class DSMeshController @Inject()( def listMeshFiles(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { - meshFiles <- meshFileService.exploreMeshFiles(organizationId, datasetName, dataLayerName) + meshFiles <- meshFileService.exploreMeshFiles(organizationId, datasetPath, dataLayerName) } yield Ok(Json.toJson(meshFiles)) } } def listMeshChunksForSegment(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, /* If targetMappingName is set, assume that meshfile contains meshes for the oversegmentation. Collect mesh chunks of all *unmapped* segment ids @@ -49,17 +49,17 @@ class DSMeshController @Inject()( targetMappingName: Option[String], editableMappingTracingId: Option[String]): Action[ListMeshChunksRequest] = Action.async(validateJson[ListMeshChunksRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { _ <- Fox.successful(()) mappingNameForMeshFile = meshFileService.mappingNameForMeshFile(organizationId, - datasetName, + datasetPath, dataLayerName, request.body.meshFile) segmentIds: List[Long] <- segmentIdsForAgglomerateIdIfNeeded( organizationId, - datasetName, + datasetPath, dataLayerName, targetMappingName, editableMappingTracingId, @@ -69,7 +69,7 @@ class DSMeshController @Inject()( urlOrHeaderToken(token, request) ) chunkInfos <- meshFileService.listMeshChunksForSegmentsMerged(organizationId, - datasetName, + datasetPath, dataLayerName, request.body.meshFile, segmentIds) @@ -79,13 +79,13 @@ class DSMeshController @Inject()( def readMeshChunk(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[MeshChunkDataRequestList] = Action.async(validateJson[MeshChunkDataRequestList]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { - (data, encoding) <- meshFileService.readMeshChunk(organizationId, datasetName, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" + (data, encoding) <- meshFileService.readMeshChunk(organizationId, datasetPath, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" } yield { if (encoding.contains("gzip")) { Ok(data).withHeaders("Content-Encoding" -> "gzip") @@ -96,15 +96,15 @@ class DSMeshController @Inject()( def loadFullMeshStl(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], organizationId, - datasetName, + datasetPath, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index f5a506b198e..2b8d59aa0a9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong import com.scalableminds.webknossos.datastore.explore.{ @@ -16,7 +17,7 @@ import com.scalableminds.webknossos.datastore.helpers.{ SegmentStatisticsParameters } import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, LegacyDataSourceId, GenericDataSource} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId, GenericDataSource} import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.datastore.services.uploading.{ CancelUploadInformation, @@ -65,16 +66,16 @@ class DataSourceController @Inject()( override def allowRemoteOrigin: Boolean = true - def readInboxDataSource(token: Option[String], organizationId: String, datasetName: String): Action[AnyContent] = + def readInboxDataSource(token: Option[String], organizationId: String, datasetPath: String): Action[AnyContent] = Action.async { implicit request => { accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { // Read directly from file, not from repository to ensure recent changes are seen val dataSource: InboxDataSource = dataSourceService.dataSourceFromDir( - dataSourceService.dataBaseDir.resolve(organizationId).resolve(datasetName), + dataSourceService.dataBaseDir.resolve(organizationId).resolve(datasetPath), organizationId) Ok(Json.toJson(dataSource)) } @@ -97,7 +98,7 @@ class DataSourceController @Inject()( isKnownUpload <- uploadService.isKnownUpload(request.body.uploadId) _ <- if (!isKnownUpload) { (remoteWebknossosClient.reserveDataSourceUpload(request.body, urlOrHeaderToken(token, request)) ?~> "dataset.upload.validation.failed") - .flatMap(_ => uploadService.reserveUpload(request.body)) + .flatMap(reservedInfo => uploadService.reserveUpload(reservedInfo)) } else Fox.successful(()) } yield Ok } @@ -111,7 +112,7 @@ class DataSourceController @Inject()( unfinishedUploads <- remoteWebknossosClient.getUnfinishedUploadsForUser(urlOrHeaderToken(token, request), organizationName) unfinishedUploadsWithUploadIds <- uploadService.addUploadIdsToUnfinishedUploads(unfinishedUploads) - } yield Ok(Json.toJson(unfinishedUploadsWithUploadIds)) + } yield Ok(Json.toJson(unfinishedUploadsWithUploadIds)) // TODO: Adjust frontend accordingly } } @@ -126,6 +127,7 @@ class DataSourceController @Inject()( ReserveUploadInformation( "aManualUpload", request.body.datasetName, + request.body.datasetPath, request.body.organization, 0, List.empty, @@ -216,6 +218,7 @@ class DataSourceController @Inject()( urlOrHeaderToken(token, request)) { for { (dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) ?~> "finishUpload.failed" + // TODO: Store dataset id from upload information in redis and use it here to report the upload _ <- remoteWebknossosClient.reportUpload( dataSourceId, datasetSizeBytes, @@ -248,28 +251,28 @@ class DataSourceController @Inject()( def listMappings( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { addNoCacheHeaderFallback( - Ok(Json.toJson(dataSourceService.exploreMappings(organizationId, datasetName, dataLayerName)))) + Ok(Json.toJson(dataSourceService.exploreMappings(organizationId, datasetPath, dataLayerName)))) } } def listAgglomerates( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox - agglomerateList = agglomerateService.exploreAgglomerates(organizationId, datasetName, dataLayerName) + agglomerateList = agglomerateService.exploreAgglomerates(organizationId, datasetPath, dataLayerName) } yield Ok(Json.toJson(agglomerateList)) } } @@ -277,17 +280,17 @@ class DataSourceController @Inject()( def generateAgglomerateSkeleton( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox skeleton <- agglomerateService.generateSkeleton(organizationId, - datasetName, + datasetPath, dataLayerName, mappingName, agglomerateId) ?~> "agglomerateSkeleton.failed" @@ -298,17 +301,17 @@ class DataSourceController @Inject()( def agglomerateGraph( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateGraph <- agglomerateService.generateAgglomerateGraph( - AgglomerateFileKey(organizationId, datasetName, dataLayerName, mappingName), + AgglomerateFileKey(organizationId, datasetPath, dataLayerName, mappingName), agglomerateId) ?~> "agglomerateGraph.failed" } yield Ok(agglomerateGraph.toByteArray).as(protobufMimeType) } @@ -317,17 +320,17 @@ class DataSourceController @Inject()( def positionForSegmentViaAgglomerateFile( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mappingName: String, segmentId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox position <- agglomerateService.positionForSegmentId( - AgglomerateFileKey(organizationId, datasetName, dataLayerName, mappingName), + AgglomerateFileKey(organizationId, datasetPath, dataLayerName, mappingName), segmentId) ?~> "getSegmentPositionFromAgglomerateFile.failed" } yield Ok(Json.toJson(position)) } @@ -336,11 +339,11 @@ class DataSourceController @Inject()( def largestAgglomerateId( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox @@ -348,7 +351,7 @@ class DataSourceController @Inject()( .largestAgglomerateId( AgglomerateFileKey( organizationId, - datasetName, + datasetPath, dataLayerName, mappingName ) @@ -361,11 +364,11 @@ class DataSourceController @Inject()( def agglomerateIdsForSegmentIds( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox @@ -373,7 +376,7 @@ class DataSourceController @Inject()( .agglomerateIdsForSegmentIds( AgglomerateFileKey( organizationId, - datasetName, + datasetPath, dataLayerName, mappingName ), @@ -387,11 +390,11 @@ class DataSourceController @Inject()( def agglomerateIdsForAllSegmentIds( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox @@ -399,7 +402,7 @@ class DataSourceController @Inject()( .agglomerateIdsForAllSegmentIds( AgglomerateFileKey( organizationId, - datasetName, + datasetPath, dataLayerName, mappingName ) @@ -409,13 +412,13 @@ class DataSourceController @Inject()( } } - def update(token: Option[String], organizationId: String, datasetName: String): Action[DataSource] = + def update(token: Option[String], organizationId: String, datasetPath: String): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.writeDataSource(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.writeDataSource(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { _ <- Fox.successful(()) - dataSource <- dataSourceRepository.find(LegacyDataSourceId(datasetName, organizationId)).toFox ?~> Messages( + dataSource <- dataSourceRepository.find(DataSourceId(datasetPath, organizationId)).toFox ?~> Messages( "dataSource.notFound") ~> NOT_FOUND _ <- dataSourceService.updateDataSource(request.body.copy(id = dataSource.id), expectExisting = true) } yield Ok @@ -430,12 +433,11 @@ class DataSourceController @Inject()( Action.async(validateJson[DataSource]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.administrateDataSources, urlOrHeaderToken(token, request)) { for { - _ <- bool2Fox(dataSourceRepository.find(LegacyDataSourceId(datasetName, organizationId)).isEmpty) ?~> Messages( - "dataSource.alreadyPresent") - _ <- remoteWebknossosClient.reserveDataSourceUpload( + reservedInfo <- remoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( - uploadId = "", + uploadId = "", // Set by core backend name = datasetName, + path = "", // Set by core backend organization = organizationId, totalFileCount = 1, filePaths = None, @@ -445,10 +447,12 @@ class DataSourceController @Inject()( ), urlOrHeaderToken(token, request) ) ?~> "dataset.upload.validation.failed" - _ <- dataSourceService.updateDataSource(request.body.copy(id = LegacyDataSourceId(datasetName, organizationId)), - expectExisting = false) + _ <- dataSourceService.updateDataSource( + request.body.copy(id = DataSourceId(reservedInfo.path, organizationId)), + expectExisting = false) + parsedUploadId <- ObjectId.fromString(reservedInfo.uploadId) ?~> "reportUpload.failed" _ <- remoteWebknossosClient.reportUpload( - LegacyDataSourceId(datasetName, organizationId), + parsedUploadId, 0L, needsConversion = false, viaAddRoute = true, @@ -471,7 +475,7 @@ class DataSourceController @Inject()( def measureUsedStorage(token: Option[String], organizationId: String, - datasetName: Option[String] = None): Action[AnyContent] = + datasetPath: Option[String] = None): Action[AnyContent] = Action.async { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId), @@ -479,10 +483,10 @@ class DataSourceController @Inject()( for { before <- Fox.successful(System.currentTimeMillis()) usedStorageInBytes: List[DirectoryStorageReport] <- storageUsageService.measureStorage(organizationId, - datasetName) + datasetPath) after = System.currentTimeMillis() _ = if (after - before > (10 seconds).toMillis) { - val datasetLabel = datasetName.map(n => s" dataset $n of").getOrElse("") + val datasetLabel = datasetPath.map(n => s" dataset $n of").getOrElse("") logger.info(s"Measuring storage for$datasetLabel orga $organizationId took ${after - before} ms.") } } yield Ok(Json.toJson(usedStorageInBytes)) @@ -492,23 +496,23 @@ class DataSourceController @Inject()( def reload(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, layerName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationId), urlOrHeaderToken(token, request)) { val (closedAgglomerateFileHandleCount, clearedBucketProviderCount, removedChunksCount) = - binaryDataServiceHolder.binaryDataService.clearCache(organizationId, datasetName, layerName) + binaryDataServiceHolder.binaryDataService.clearCache(organizationId, datasetPath, layerName) val reloadedDataSource = dataSourceService.dataSourceFromDir( - dataSourceService.dataBaseDir.resolve(organizationId).resolve(datasetName), + dataSourceService.dataBaseDir.resolve(organizationId).resolve(datasetPath), organizationId) - datasetErrorLoggingService.clearForDataset(organizationId, datasetName) + datasetErrorLoggingService.clearForDataset(organizationId, datasetPath) for { clearedVaultCacheEntriesBox <- dataSourceService.invalidateVaultCache(reloadedDataSource, layerName).futureBox _ = clearedVaultCacheEntriesBox match { case Full(clearedVaultCacheEntries) => logger.info( - s"Reloading ${layerName.map(l => s"layer '$l' of ").getOrElse("")}dataset $organizationId/$datasetName: closed $closedAgglomerateFileHandleCount agglomerate file handles, removed $clearedBucketProviderCount bucketProviders, $clearedVaultCacheEntries vault cache entries and $removedChunksCount image chunk cache entries.") + s"Reloading ${layerName.map(l => s"layer '$l' of ").getOrElse("")}dataset $organizationId/$datasetPath: closed $closedAgglomerateFileHandleCount agglomerate file handles, removed $clearedBucketProviderCount bucketProviders, $clearedVaultCacheEntries vault cache entries and $removedChunksCount image chunk cache entries.") case _ => () } _ <- dataSourceRepository.updateDataSource(reloadedDataSource) @@ -516,15 +520,15 @@ class DataSourceController @Inject()( } } - def deleteOnDisk(token: Option[String], organizationId: String, datasetName: String): Action[AnyContent] = + def deleteOnDisk(token: Option[String], organizationId: String, datasetPath: String): Action[AnyContent] = Action.async { implicit request => - val dataSourceId = LegacyDataSourceId(datasetName, organizationId) + val dataSourceId = DataSourceId(datasetPath, organizationId) accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId), urlOrHeaderToken(token, request)) { for { _ <- binaryDataServiceHolder.binaryDataService.deleteOnDisk( organizationId, - datasetName, + datasetPath, reason = Some("the user wants to delete the dataset")) ?~> "dataset.delete.failed" _ <- dataSourceRepository.cleanUpDataSource(dataSourceId) // also frees the name in the wk-side database } yield Ok @@ -539,7 +543,7 @@ class DataSourceController @Inject()( _ <- Fox.serialCombined(request.body.layers.map(_.datasetId).toList)( id => accessTokenService.assertUserAccess( - UserAccessRequest.readDataSources(LegacyDataSourceId(id.name, id.owningOrganization)), + UserAccessRequest.readDataSources(DataSourceId(id.name, id.owningOrganization)), userToken)) dataSource <- composeService.composeDataset(request.body, userToken) _ <- dataSourceRepository.updateDataSource(dataSource) @@ -549,17 +553,17 @@ class DataSourceController @Inject()( def listConnectomeFiles(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { val connectomeFileNames = - connectomeFileService.exploreConnectomeFiles(organizationId, datasetName, dataLayerName) + connectomeFileService.exploreConnectomeFiles(organizationId, datasetPath, dataLayerName) for { mappingNames <- Fox.serialCombined(connectomeFileNames.toList) { connectomeFileName => val path = - connectomeFileService.connectomeFilePath(organizationId, datasetName, dataLayerName, connectomeFileName) + connectomeFileService.connectomeFilePath(organizationId, datasetPath, dataLayerName, connectomeFileName) connectomeFileService.mappingNameForConnectomeFile(path) } connectomesWithMappings = connectomeFileNames @@ -571,15 +575,15 @@ class DataSourceController @Inject()( def getSynapsesForAgglomerates(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[ByAgglomerateIdsRequest] = Action.async(validateJson[ByAgglomerateIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { meshFilePath <- Fox.successful( connectomeFileService - .connectomeFilePath(organizationId, datasetName, dataLayerName, request.body.connectomeFile)) + .connectomeFilePath(organizationId, datasetPath, dataLayerName, request.body.connectomeFile)) synapses <- connectomeFileService.synapsesForAgglomerates(meshFilePath, request.body.agglomerateIds) } yield Ok(Json.toJson(synapses)) } @@ -587,16 +591,16 @@ class DataSourceController @Inject()( def getSynapticPartnerForSynapses(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, direction: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { meshFilePath <- Fox.successful( connectomeFileService - .connectomeFilePath(organizationId, datasetName, dataLayerName, request.body.connectomeFile)) + .connectomeFilePath(organizationId, datasetPath, dataLayerName, request.body.connectomeFile)) agglomerateIds <- connectomeFileService.synapticPartnerForSynapses(meshFilePath, request.body.synapseIds, direction) @@ -606,15 +610,15 @@ class DataSourceController @Inject()( def getSynapsePositions(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { meshFilePath <- Fox.successful( connectomeFileService - .connectomeFilePath(organizationId, datasetName, dataLayerName, request.body.connectomeFile)) + .connectomeFilePath(organizationId, datasetPath, dataLayerName, request.body.connectomeFile)) synapsePositions <- connectomeFileService.positionsForSynapses(meshFilePath, request.body.synapseIds) } yield Ok(Json.toJson(synapsePositions)) } @@ -622,15 +626,15 @@ class DataSourceController @Inject()( def getSynapseTypes(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { meshFilePath <- Fox.successful( connectomeFileService - .connectomeFilePath(organizationId, datasetName, dataLayerName, request.body.connectomeFile)) + .connectomeFilePath(organizationId, datasetPath, dataLayerName, request.body.connectomeFile)) synapseTypes <- connectomeFileService.typesForSynapses(meshFilePath, request.body.synapseIds) } yield Ok(Json.toJson(synapseTypes)) } @@ -638,13 +642,13 @@ class DataSourceController @Inject()( def checkSegmentIndexFile(token: Option[String], organizationId: String, - dataSetName: String, + datasetPath: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(dataSetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { val segmentIndexFileOpt = - segmentIndexFileService.getSegmentIndexFile(organizationId, dataSetName, dataLayerName).toOption + segmentIndexFileService.getSegmentIndexFile(organizationId, datasetPath, dataLayerName).toOption Future.successful(Ok(Json.toJson(segmentIndexFileOpt.isDefined))) } } @@ -655,16 +659,16 @@ class DataSourceController @Inject()( */ def getSegmentIndex(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, segmentId: String): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { segmentIds <- segmentIdsForAgglomerateIdIfNeeded( organizationId, - datasetName, + datasetPath, dataLayerName, request.body.mappingName, request.body.editableMappingTracingId, @@ -673,9 +677,9 @@ class DataSourceController @Inject()( omitMissing = false, urlOrHeaderToken(token, request) ) - fileMag <- segmentIndexFileService.readFileMag(organizationId, datasetName, dataLayerName) + fileMag <- segmentIndexFileService.readFileMag(organizationId, datasetPath, dataLayerName) topLeftsNested: Seq[Array[Vec3Int]] <- Fox.serialCombined(segmentIds)(sId => - segmentIndexFileService.readSegmentIndex(organizationId, datasetName, dataLayerName, sId)) + segmentIndexFileService.readSegmentIndex(organizationId, datasetPath, dataLayerName, sId)) topLefts: Array[Vec3Int] = topLeftsNested.toArray.flatten bucketPositions = segmentIndexFileService.topLeftsToDistinctBucketPositions(topLefts, request.body.mag, @@ -695,17 +699,17 @@ class DataSourceController @Inject()( */ def querySegmentIndex(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = Action.async(validateJson[GetMultipleSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { segmentIdsAndBucketPositions <- Fox.serialCombined(request.body.segmentIds) { segmentOrAgglomerateId => for { segmentIds <- segmentIdsForAgglomerateIdIfNeeded( organizationId, - datasetName, + datasetPath, dataLayerName, request.body.mappingName, request.body.editableMappingTracingId, @@ -714,9 +718,9 @@ class DataSourceController @Inject()( omitMissing = true, // assume agglomerate ids not present in the mapping belong to user-brushed segments urlOrHeaderToken(token, request) ) - fileMag <- segmentIndexFileService.readFileMag(organizationId, datasetName, dataLayerName) + fileMag <- segmentIndexFileService.readFileMag(organizationId, datasetPath, dataLayerName) topLeftsNested: Seq[Array[Vec3Int]] <- Fox.serialCombined(segmentIds)(sId => - segmentIndexFileService.readSegmentIndex(organizationId, datasetName, dataLayerName, sId)) + segmentIndexFileService.readSegmentIndex(organizationId, datasetPath, dataLayerName, sId)) topLefts: Array[Vec3Int] = topLeftsNested.toArray.flatten bucketPositions = segmentIndexFileService.topLeftsToDistinctBucketPositions(topLefts, request.body.mag, @@ -729,17 +733,17 @@ class DataSourceController @Inject()( def getSegmentVolume(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { - _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) + _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetPath, dataLayerName) volumes <- Fox.serialCombined(request.body.segmentIds) { segmentId => segmentIndexFileService.getSegmentVolume( organizationId, - datasetName, + datasetPath, dataLayerName, segmentId, request.body.mag, @@ -752,16 +756,16 @@ class DataSourceController @Inject()( def getSegmentBoundingBox(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { - _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetName, dataLayerName) + _ <- segmentIndexFileService.assertSegmentIndexFileExists(organizationId, datasetPath, dataLayerName) boxes <- Fox.serialCombined(request.body.segmentIds) { segmentId => segmentIndexFileService.getSegmentBoundingBox(organizationId, - datasetName, + datasetPath, dataLayerName, segmentId, request.body.mag, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index 06ab27a165a..b707374ea67 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -52,14 +52,14 @@ class ZarrStreamingController @Inject()( def requestZAttrs( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, + datasetPath, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND omeNgffHeader = NgffMetadata.fromNameVoxelSizeAndMags(dataLayerName, dataSource.scale, dataLayer.resolutions) @@ -70,14 +70,14 @@ class ZarrStreamingController @Inject()( def requestZarrJson( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, + datasetPath, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND omeNgffHeaderV0_5 = NgffMetadataV0_5.fromNameVoxelSizeAndMags(dataLayerName, @@ -105,7 +105,7 @@ class ZarrStreamingController @Inject()( orElse = annotationSource => for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(annotationSource.organizationId, - annotationSource.datasetName, + annotationSource.datasetPath, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND dataSourceOmeNgffHeader = NgffMetadata.fromNameVoxelSizeAndMags(dataLayerName, @@ -131,7 +131,7 @@ class ZarrStreamingController @Inject()( orElse = annotationSource => for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(annotationSource.organizationId, - annotationSource.datasetName, + annotationSource.datasetPath, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND dataSourceOmeNgffHeader = NgffMetadataV0_5.fromNameVoxelSizeAndMags(dataLayerName, @@ -150,13 +150,13 @@ class ZarrStreamingController @Inject()( def requestDataSource( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, zarrVersion: Int, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { - dataSource <- dataSourceRepository.findUsable(LegacyDataSourceId(datasetName, organizationId)).toFox ~> NOT_FOUND + dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetPath, organizationId)).toFox ~> NOT_FOUND dataLayers = dataSource.dataLayers zarrLayers = dataLayers.map(convertLayerToZarrLayer(_, zarrVersion)) zarrSource = GenericDataSource[DataLayer](dataSource.id, zarrLayers, dataSource.scale) @@ -211,7 +211,7 @@ class ZarrStreamingController @Inject()( else urlOrHeaderToken(token, request) volumeAnnotationLayers = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume) dataSource <- dataSourceRepository - .findUsable(LegacyDataSourceId(annotationSource.datasetName, annotationSource.organizationId)) + .findUsable(DataSourceId(annotationSource.datasetPath, annotationSource.organizationId)) .toFox ~> NOT_FOUND dataSourceLayers = dataSource.dataLayers .filter(dL => !volumeAnnotationLayers.exists(_.name == dL.name)) @@ -231,14 +231,14 @@ class ZarrStreamingController @Inject()( def requestRawZarrCube( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mag: String, coordinates: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { - rawZarrCube(organizationId, datasetName, dataLayerName, mag, coordinates) + rawZarrCube(organizationId, datasetPath, dataLayerName, mag, coordinates) } } @@ -261,20 +261,20 @@ class ZarrStreamingController @Inject()( relevantToken) .map(Ok(_)), orElse = annotationSource => - rawZarrCube(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag, coordinates) + rawZarrCube(annotationSource.organizationId, annotationSource.datasetPath, dataLayerName, mag, coordinates) ) } private def rawZarrCube( organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mag: String, coordinates: String, )(implicit m: MessagesProvider): Fox[Result] = for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, + datasetPath, dataLayerName) ~> NOT_FOUND reorderedAdditionalAxes = dataLayer.additionalAxes.map(reorderAdditionalAxes) (x, y, z, additionalCoordinates) <- ZarrCoordinatesParser.parseNDimensionalDotCoordinates( @@ -303,20 +303,20 @@ class ZarrStreamingController @Inject()( def requestZArray(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { - zArray(organizationId, datasetName, dataLayerName, mag) + zArray(organizationId, datasetPath, dataLayerName, mag) } } - private def zArray(organizationId: String, datasetName: String, dataLayerName: String, mag: String)( + private def zArray(organizationId: String, datasetPath: String, dataLayerName: String, mag: String)( implicit m: MessagesProvider): Fox[Result] = for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, dataLayerName) ?~> Messages( + (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetPath, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(dataLayer.containsResolution(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND @@ -325,20 +325,20 @@ class ZarrStreamingController @Inject()( def requestZarrJsonForMag(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { - zarrJsonForMag(organizationId, datasetName, dataLayerName, mag) + zarrJsonForMag(organizationId, datasetPath, dataLayerName, mag) } } - private def zarrJsonForMag(organizationId: String, datasetName: String, dataLayerName: String, mag: String)( + private def zarrJsonForMag(organizationId: String, datasetPath: String, dataLayerName: String, mag: String)( implicit m: MessagesProvider): Fox[Result] = for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, dataLayerName) ?~> Messages( + (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetPath, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(dataLayer.containsResolution(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND @@ -358,7 +358,7 @@ class ZarrStreamingController @Inject()( .getZArray(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl, relevantToken) .map(z => Ok(Json.toJson(z))), orElse = - annotationSource => zArray(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) + annotationSource => zArray(annotationSource.organizationId, annotationSource.datasetPath, dataLayerName, mag) ) } @@ -375,7 +375,7 @@ class ZarrStreamingController @Inject()( .getZarrJson(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl, relevantToken) .map(z => Ok(Json.toJson(z))), orElse = annotationSource => - zarrJsonForMag(annotationSource.organizationId, annotationSource.datasetName, dataLayerName, mag) + zarrJsonForMag(annotationSource.organizationId, annotationSource.datasetPath, dataLayerName, mag) ) } @@ -398,24 +398,24 @@ class ZarrStreamingController @Inject()( def requestDataLayerMagFolderContents(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mag: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { - dataLayerMagFolderContents(organizationId, datasetName, dataLayerName, mag, zarrVersion) + dataLayerMagFolderContents(organizationId, datasetPath, dataLayerName, mag, zarrVersion) } } private def dataLayerMagFolderContents(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mag: String, zarrVersion: Int)(implicit m: MessagesProvider): Fox[Result] = for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, dataLayerName) ~> NOT_FOUND + (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetPath, dataLayerName) ~> NOT_FOUND magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true) ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND _ <- bool2Fox(dataLayer.containsResolution(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND additionalEntries = if (zarrVersion == 2) List(ZarrHeader.FILENAME_DOT_ZARRAY) @@ -424,7 +424,7 @@ class ZarrStreamingController @Inject()( Ok( views.html.datastoreZarrDatasourceDir( "Datastore", - "%s/%s/%s/%s".format(organizationId, datasetName, dataLayerName, mag), + "%s/%s/%s/%s".format(organizationId, datasetPath, dataLayerName, mag), additionalEntries )).withHeaders() @@ -455,7 +455,7 @@ class ZarrStreamingController @Inject()( )).withHeaders()), orElse = annotationSource => dataLayerMagFolderContents(annotationSource.organizationId, - annotationSource.datasetName, + annotationSource.datasetPath, dataLayerName, mag, zarrVersion) @@ -464,21 +464,21 @@ class ZarrStreamingController @Inject()( def requestDataLayerFolderContents(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { - dataLayerFolderContents(organizationId, datasetName, dataLayerName, zarrVersion) + dataLayerFolderContents(organizationId, datasetPath, dataLayerName, zarrVersion) } } private def dataLayerFolderContents(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, zarrVersion: Int)(implicit m: MessagesProvider): Fox[Result] = for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetName, dataLayerName) ?~> Messages( + (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetPath, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND mags = dataLayer.resolutions additionalFiles = if (zarrVersion == 2) @@ -488,7 +488,7 @@ class ZarrStreamingController @Inject()( Ok( views.html.datastoreZarrDatasourceDir( "Datastore", - "%s/%s/%s".format(organizationId, datasetName, dataLayerName), + "%s/%s/%s".format(organizationId, datasetPath, dataLayerName), additionalFiles ++ mags.map(_.toMagLiteral(allowScalar = true)) )).withHeaders() @@ -517,7 +517,7 @@ class ZarrStreamingController @Inject()( )).withHeaders()), orElse = annotationSource => dataLayerFolderContents(annotationSource.organizationId, - annotationSource.datasetName, + annotationSource.datasetPath, dataLayerName, zarrVersion) ) @@ -525,13 +525,13 @@ class ZarrStreamingController @Inject()( def requestDataSourceFolderContents(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccess(UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { for { - dataSource <- dataSourceRepository.findUsable(LegacyDataSourceId(datasetName, organizationId)).toFox ?~> Messages( + dataSource <- dataSourceRepository.findUsable(DataSourceId(datasetPath, organizationId)).toFox ?~> Messages( "dataSource.notFound") ~> NOT_FOUND layerNames = dataSource.dataLayers.map((dataLayer: DataLayer) => dataLayer.name) additionalVersionDependantFiles = if (zarrVersion == 2) List(NgffGroupHeader.FILENAME_DOT_ZGROUP) @@ -539,7 +539,7 @@ class ZarrStreamingController @Inject()( } yield Ok(views.html.datastoreZarrDatasourceDir( "Datastore", - s"$organizationId/$datasetName", + s"$organizationId/$datasetPath", List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) ++ additionalVersionDependantFiles ++ layerNames )) } @@ -552,7 +552,7 @@ class ZarrStreamingController @Inject()( for { annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) dataSource <- dataSourceRepository - .findUsable(LegacyDataSourceId(annotationSource.datasetName, annotationSource.organizationId)) + .findUsable(DataSourceId(annotationSource.datasetPath, annotationSource.organizationId)) .toFox ?~> Messages("dataSource.notFound") ~> NOT_FOUND annotationLayerNames = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume).map(_.name) dataSourceLayerNames = dataSource.dataLayers @@ -574,10 +574,10 @@ class ZarrStreamingController @Inject()( def requestZGroup(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessForSyncBlock( - UserAccessRequest.readDataSources(LegacyDataSourceId(datasetName, organizationId)), + UserAccessRequest.readDataSources(DataSourceId(datasetPath, organizationId)), urlOrHeaderToken(token, request)) { Ok(zGroupJson) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala index c0b907885f2..4638466d316 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala @@ -11,7 +11,7 @@ import com.scalableminds.webknossos.datastore.datareaders.wkw.WKWArray import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrArray import com.scalableminds.webknossos.datastore.datareaders.zarr3.Zarr3Array import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.{DataFormat, DataLayer, LegacyDataSourceId, ElementClass} +import com.scalableminds.webknossos.datastore.models.datasource.{DataFormat, DataLayer, DataSourceId, ElementClass} import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import com.typesafe.scalalogging.LazyLogging @@ -22,7 +22,7 @@ import ucar.ma2.{Array => MultiArray} import scala.concurrent.ExecutionContext class DatasetArrayBucketProvider(dataLayer: DataLayer, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], sharedChunkContentsCacheOpt: Option[AlfuCache[String, MultiArray]]) extends BucketProvider diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MappingProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MappingProvider.scala index e8598a1bb01..a4c23adaa0b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MappingProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/MappingProvider.scala @@ -12,8 +12,8 @@ class MappingProvider(layer: SegmentationLayer) { def load(readInstruction: MappingReadInstruction): Box[Array[Byte]] = { val mappingFile = readInstruction.baseDir - .resolve(readInstruction.dataSource.id.team) - .resolve(readInstruction.dataSource.id.name) + .resolve(readInstruction.dataSource.id.organizationId) + .resolve(readInstruction.dataSource.id.path) .resolve(layer.name) .resolve(MappingProvider.mappingsDir) .resolve(s"${readInstruction.mapping}.${MappingProvider.mappingFileExtension}") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala index 5474e633844..ba9d7db10e7 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala @@ -14,7 +14,7 @@ trait N5Layer extends DataLayerWithMagLocators { val dataFormat: DataFormat.Value = DataFormat.n5 def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]) = new DatasetArrayBucketProvider(this, dataSourceId, remoteSourceDescriptorServiceOpt, sharedChunkContentsCache) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala index 535b699f883..bb8e7ccda48 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala @@ -14,7 +14,7 @@ trait PrecomputedLayer extends DataLayerWithMagLocators { val dataFormat: DataFormat.Value = DataFormat.neuroglancerPrecomputed def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]) = new DatasetArrayBucketProvider(this, dataSourceId, remoteSourceDescriptorServiceOpt, sharedChunkContentsCache) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala index 27b3ac37c2e..ee117e5f234 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala @@ -20,7 +20,7 @@ trait WKWLayer extends DataLayer { val dataFormat: DataFormat.Value = DataFormat.wkw override def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider = new DatasetArrayBucketProvider(this, dataSourceId, remoteSourceDescriptorServiceOpt, sharedChunkContentsCache) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala index 04c2af1baa9..54a343879d4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala @@ -14,7 +14,7 @@ trait Zarr3Layer extends DataLayerWithMagLocators { val dataFormat: DataFormat.Value = DataFormat.zarr3 def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]) = new DatasetArrayBucketProvider(this, dataSourceId, remoteSourceDescriptorServiceOpt, sharedChunkContentsCache) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala index 0242b50d0eb..17810f5efb3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala @@ -12,7 +12,7 @@ import ucar.ma2.{Array => MultiArray} trait ZarrLayer extends DataLayerWithMagLocators { def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]) = new DatasetArrayBucketProvider(this, dataSourceId, remoteSourceDescriptorServiceOpt, sharedChunkContentsCache) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index 06234ca8e3a..b04fa57ee80 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox, option2Fox} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import net.liftweb.common.Box.tryo @@ -17,7 +17,7 @@ import scala.collection.immutable.NumericRange import scala.concurrent.ExecutionContext class DatasetArray(vaultPath: VaultPath, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, header: DatasetHeader, // axisOrder and additionalAxes match those from “outer” metadata, and can directly be used to compute chunk indices. diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala index c0401118cae..27541d75dd1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala @@ -4,7 +4,7 @@ import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.util.cache.AlfuCache import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, ChunkReader, DatasetArray, DatasetHeader} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.typesafe.scalalogging.LazyLogging import com.scalableminds.util.tools.Fox.box2Fox @@ -16,7 +16,7 @@ import scala.concurrent.ExecutionContext object N5Array extends LazyLogging { def open(path: VaultPath, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], @@ -40,7 +40,7 @@ object N5Array extends LazyLogging { } class N5Array(vaultPath: VaultPath, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, header: DatasetHeader, axisOrder: AxisOrder, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala index 29cbe7b00f0..5c26c43c333 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.io.ZipIO import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, DatasetArray} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Box.tryo @@ -21,7 +21,7 @@ import ucar.ma2.{Array => MultiArray} object PrecomputedArray extends LazyLogging { def open( magPath: VaultPath, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], @@ -49,7 +49,7 @@ object PrecomputedArray extends LazyLogging { } class PrecomputedArray(vaultPath: VaultPath, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, header: PrecomputedScaleHeader, axisOrder: AxisOrder, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala index 3f874e31665..c29e42d399f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala @@ -8,7 +8,7 @@ import com.scalableminds.util.tools.JsonHelper.bool2Box import com.scalableminds.webknossos.datastore.dataformats.wkw.{MortonEncoding, WKWDataFormatHelper, WKWHeader} import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, ChunkUtils, DatasetArray} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, LegacyDataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataSourceId} import net.liftweb.common.Box import net.liftweb.common.Box.tryo import ucar.ma2.{Array => MultiArray} @@ -19,7 +19,7 @@ import scala.concurrent.ExecutionContext object WKWArray extends WKWDataFormatHelper { def open(path: VaultPath, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[WKWArray] = for { @@ -38,7 +38,7 @@ object WKWArray extends WKWDataFormatHelper { } class WKWArray(vaultPath: VaultPath, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, header: WKWHeader, axisOrder: AxisOrder, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala index c4b31765252..dbc1b5af04d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.cache.AlfuCache import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, DatasetArray, DatasetHeader} import ucar.ma2.{Array => MultiArray} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Box.tryo @@ -15,7 +15,7 @@ import scala.concurrent.ExecutionContext object ZarrArray extends LazyLogging { def open(path: VaultPath, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], @@ -42,7 +42,7 @@ object ZarrArray extends LazyLogging { } class ZarrArray(vaultPath: VaultPath, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, header: DatasetHeader, axisOrder: AxisOrder, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala index 6b95816192a..874dd1d38fd 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.tools.Fox.box2Fox import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, ChunkReader, ChunkUtils, DatasetArray} import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, LegacyDataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataSourceId} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Box.tryo import ucar.ma2.{Array => MultiArray} @@ -16,7 +16,7 @@ import scala.concurrent.ExecutionContext object Zarr3Array extends LazyLogging { def open(path: VaultPath, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], @@ -39,7 +39,7 @@ object Zarr3Array extends LazyLogging { } class Zarr3Array(vaultPath: VaultPath, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, header: Zarr3ArrayHeader, axisOrder: AxisOrder, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala index 55b584d5bb1..461360fe92d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala @@ -7,7 +7,7 @@ import com.scalableminds.webknossos.datastore.datareaders.n5.N5Header import com.scalableminds.webknossos.datastore.models.datasource.{ DataLayerWithMagLocators, DataSource, - LegacyDataSourceId, + DataSourceId, DataSourceWithMagLocators, GenericDataSource } @@ -26,7 +26,7 @@ class ExploreLocalLayerService @Inject()(dataVaultService: DataVaultService) extends ExploreLayerUtils with FoxImplicits { - def exploreLocal(path: Path, dataSourceId: LegacyDataSourceId, layerDirectory: String = "")( + def exploreLocal(path: Path, dataSourceId: DataSourceId, layerDirectory: String = "")( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = for { _ <- Fox.successful(()) @@ -40,7 +40,7 @@ class ExploreLocalLayerService @Inject()(dataVaultService: DataVaultService) dataSource <- Fox.firstSuccess(explored) ?~> "Could not explore local data source" } yield dataSource - private def exploreLocalZarrArray(path: Path, dataSourceId: LegacyDataSourceId, layerDirectory: String)( + private def exploreLocalZarrArray(path: Path, dataSourceId: DataSourceId, layerDirectory: String)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = for { magDirectories <- tryo(Files.list(path.resolve(layerDirectory)).iterator().asScala.toList).toFox ?~> s"Could not resolve color directory as child of $path" @@ -57,28 +57,28 @@ class ExploreLocalLayerService @Inject()(dataVaultService: DataVaultService) dataSource = new DataSourceWithMagLocators(dataSourceId, relativeLayers, voxelSize) } yield dataSource - private def exploreLocalNgffArray(path: Path, dataSourceId: LegacyDataSourceId)( + private def exploreLocalNgffArray(path: Path, dataSourceId: DataSourceId)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = exploreLocalLayer( layers => layers.map(selectLastTwoDirectories), new NgffExplorer )(path, dataSourceId, "") - private def exploreLocalNeuroglancerPrecomputed(path: Path, dataSourceId: LegacyDataSourceId, layerDirectory: String)( + private def exploreLocalNeuroglancerPrecomputed(path: Path, dataSourceId: DataSourceId, layerDirectory: String)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = exploreLocalLayer( layers => layers.map(selectLastDirectory), new PrecomputedExplorer )(path, dataSourceId, layerDirectory) - private def exploreLocalN5Multiscales(path: Path, dataSourceId: LegacyDataSourceId, layerDirectory: String)( + private def exploreLocalN5Multiscales(path: Path, dataSourceId: DataSourceId, layerDirectory: String)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = exploreLocalLayer( layers => layers.map(selectLastDirectory), new N5MultiscalesExplorer )(path, dataSourceId, layerDirectory) - private def exploreLocalN5Array(path: Path, dataSourceId: LegacyDataSourceId)( + private def exploreLocalN5Array(path: Path, dataSourceId: DataSourceId)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = for { _ <- Fox.successful(()) @@ -111,7 +111,7 @@ class ExploreLocalLayerService @Inject()(dataVaultService: DataVaultService) private def exploreLocalLayer( makeLayersRelative: List[DataLayerWithMagLocators] => List[DataLayerWithMagLocators], - explorer: RemoteLayerExplorer)(path: Path, dataSourceId: LegacyDataSourceId, layerDirectory: String)( + explorer: RemoteLayerExplorer)(path: Path, dataSourceId: DataSourceId, layerDirectory: String)( implicit ec: ExecutionContext): Fox[DataSourceWithMagLocators] = for { fullPath <- Fox.successful(path.resolve(layerDirectory)) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala index 1877b4f8bff..964a889a5ce 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala @@ -8,7 +8,7 @@ import com.scalableminds.webknossos.datastore.models.VoxelSize import com.scalableminds.webknossos.datastore.models.datasource.{ DataLayer, DataLayerWithMagLocators, - LegacyDataSourceId, + DataSourceId, GenericDataSource } import com.scalableminds.webknossos.datastore.services.DSRemoteWebknossosClient @@ -66,7 +66,7 @@ class ExploreRemoteLayerService @Inject()(dataVaultService: DataVaultService, _ <- bool2Fox(layersWithVoxelSizes.nonEmpty) ?~> "Detected zero layers" (layers, voxelSize) <- adaptLayersAndVoxelSize(layersWithVoxelSizes, preferredVoxelSize) dataSource = GenericDataSource[DataLayer]( - LegacyDataSourceId("", ""), // Frontend will prompt user for a good name + DataSourceId("", ""), // Frontend will prompt user for a good name layers, voxelSize ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/UnfinishedUpload.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/UnfinishedUpload.scala index c315bd6585d..8f8947b6a9c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/UnfinishedUpload.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/UnfinishedUpload.scala @@ -1,11 +1,11 @@ package com.scalableminds.webknossos.datastore.models import com.scalableminds.util.time.Instant -import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import play.api.libs.json.{Format, Json} case class UnfinishedUpload(uploadId: String, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, folderId: String, created: Instant, filePaths: Option[List[String]], diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationSource.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationSource.scala index 93c4230ab5b..fbfce5c8e2f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationSource.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/annotation/AnnotationSource.scala @@ -4,7 +4,7 @@ import play.api.libs.json.{Json, OFormat} case class AnnotationSource(id: String, annotationLayers: List[AnnotationLayer], - datasetName: String, + datasetPath: String, organizationId: String, dataStoreUrl: String, tracingStoreUrl: String, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala index be4ad0b1a83..030c33ee148 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala @@ -198,7 +198,7 @@ trait DataLayer extends DataLayerLike { def lengthOfUnderlyingCubes(resolution: Vec3Int): Int def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider def bucketProviderCacheKey: String = this.name diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala index b42e449b76e..385191a6305 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala @@ -2,32 +2,48 @@ package com.scalableminds.webknossos.datastore.models import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.requestparsing.{DatasetURIParser, ObjectId} +import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrHeader +import com.scalableminds.webknossos.datastore.helpers.JsonImplicits import com.scalableminds.webknossos.datastore.models.datasource.DatasetViewConfiguration.DatasetViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource.inbox.GenericInboxDataSource +import play.api.libs.json.Json.WithDefaultValues import play.api.libs.json._ package object datasource { // here team is not (yet) renamed to organization to avoid migrating all jsons - case class LegacyDataSourceId(name: String, team: String) extends DatasetURIParser { - override def toString: String = s"DataSourceId($team/$name)" + case class DataSourceId(path: String, organizationId: String) extends DatasetURIParser { + override def toString: String = s"DataSourceId($organizationId/$path)" } - object LegacyDataSourceId extends DatasetURIParser { - implicit val dataSourceIdFormat: Format[LegacyDataSourceId] = Json.format[LegacyDataSourceId] + object DataSourceId extends JsonImplicits with DatasetURIParser { + implicit object DataSourceIdFormat extends Format[DataSourceId] { + override def reads(json: JsValue): JsResult[DataSourceId] = + (json \ "path").validate[String] flatMap { path => + (json \ "organization").validate[String].map { org => + DataSourceId(path, org) + } + } + + override def writes(datasetId: DataSourceId): JsValue = + Json.obj( + "name" -> datasetId.path, + "team" -> datasetId.organizationId, + ) + } - def fromDatasetNameAndIdAndOrganizationId(datasetNameAndId: String, organizationId: String): LegacyDataSourceId = { + def fromDatasetNameAndIdAndOrganizationId(datasetNameAndId: String, organizationId: String): DataSourceId = { val (maybeId, maybeDatasetName) = getDatasetIdOrNameFromURIPath(datasetNameAndId) maybeId match { - case Some(validId) => LegacyDataSourceId(validId.toString, organizationId) - case None => LegacyDataSourceId(maybeDatasetName.getOrElse(datasetNameAndId), organizationId) + case Some(validId) => DataSourceId(validId.toString, organizationId) + case None => DataSourceId(maybeDatasetName.getOrElse(datasetNameAndId), organizationId) } } - def fromDatasetIdOrNameAndOrganizationId(datasetIdOrName: String, organizationId: String): LegacyDataSourceId = { + def fromDatasetIdOrNameAndOrganizationId(datasetIdOrName: String, organizationId: String): DataSourceId = { val parsedId = ObjectId.fromStringSync(datasetIdOrName) parsedId match { - case Some(validId) => LegacyDataSourceId(validId.toString, organizationId) - case None => LegacyDataSourceId(datasetIdOrName, organizationId) + case Some(validId) => DataSourceId(validId.toString, organizationId) + case None => DataSourceId(datasetIdOrName, organizationId) } } } @@ -45,7 +61,7 @@ package object datasource { implicit val jsonFormat: Format[DatasetViewConfiguration] = Format.of[DatasetViewConfiguration] } - case class GenericDataSource[+T <: DataLayerLike](id: LegacyDataSourceId, + case class GenericDataSource[+T <: DataLayerLike](id: DataSourceId, dataLayers: List[T], scale: VoxelSize, defaultViewConfiguration: Option[DatasetViewConfiguration] = None) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala index 0b0425e6aa2..43d73851bb3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala @@ -8,7 +8,7 @@ package object inbox { trait GenericInboxDataSource[+T <: DataLayerLike] { - def id: LegacyDataSourceId + def id: DataSourceId def toUsable: Option[GenericDataSource[T]] @@ -35,7 +35,7 @@ package object inbox { } } - case class UnusableDataSource[+T <: DataLayerLike](id: LegacyDataSourceId, + case class UnusableDataSource[+T <: DataLayerLike](id: DataSourceId, status: String, scale: Option[VoxelSize] = None, existingDataSourceProperties: Option[JsValue] = None) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala index 774b781aec4..f0bdebc7cdb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala @@ -28,7 +28,7 @@ case class DataReadInstruction( bucket: BucketPosition, version: Option[Long] = None ) { - def layerSummary: String = f"${dataSource.id.team}/${dataSource.id.name}/${dataLayer.name}" + def layerSummary: String = f"${dataSource.id.organizationId}/${dataSource.id.path}/${dataLayer.name}" } case class DataServiceMappingRequest( diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index e1b36d40e42..8d457a19963 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.datastore.rpc import com.scalableminds.util.mvc.MimeTypes import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.services.uploading.ReserveUploadInformation import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Failure, Full} import play.api.http.{HeaderNames, Status} @@ -126,6 +127,12 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient)(implicit ec: performRequest } + def postWithJsonResponse[TW: Writes, TR: Reads](body: TW = Json.obj()): Fox[TR] = { + request = + request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("POST") + parseJsonResponse(performRequest) + } + def put[T: Writes](body: T = Json.obj()): Fox[WSResponse] = { request = request.addHttpHeaders(HeaderNames.CONTENT_TYPE -> jsonMimeType).withBody(Json.toJson(body)).withMethod("PUT") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index 57ca8c9e1de..2e89f193607 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -4,7 +4,7 @@ import com.google.inject.Inject import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.enumeration.ExtendedEnumeration import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import play.api.libs.json.{Json, OFormat} import play.api.mvc.Result import play.api.mvc.Results.Forbidden @@ -25,31 +25,31 @@ object AccessResourceType extends ExtendedEnumeration { case class UserAccessAnswer(granted: Boolean, msg: Option[String] = None) object UserAccessAnswer { implicit val jsonFormat: OFormat[UserAccessAnswer] = Json.format[UserAccessAnswer] } -case class UserAccessRequest(resourceId: LegacyDataSourceId, resourceType: AccessResourceType.Value, mode: AccessMode.Value) +case class UserAccessRequest(resourceId: DataSourceId, resourceType: AccessResourceType.Value, mode: AccessMode.Value) object UserAccessRequest { implicit val jsonFormat: OFormat[UserAccessRequest] = Json.format[UserAccessRequest] - def deleteDataSource(dataSourceId: LegacyDataSourceId): UserAccessRequest = + def deleteDataSource(dataSourceId: DataSourceId): UserAccessRequest = UserAccessRequest(dataSourceId, AccessResourceType.datasource, AccessMode.delete) def administrateDataSources: UserAccessRequest = - UserAccessRequest(LegacyDataSourceId("", ""), AccessResourceType.datasource, AccessMode.administrate) + UserAccessRequest(DataSourceId("", ""), AccessResourceType.datasource, AccessMode.administrate) def administrateDataSources(organizationId: String): UserAccessRequest = - UserAccessRequest(LegacyDataSourceId("", organizationId), AccessResourceType.datasource, AccessMode.administrate) - def readDataSources(dataSourceId: LegacyDataSourceId): UserAccessRequest = + UserAccessRequest(DataSourceId("", organizationId), AccessResourceType.datasource, AccessMode.administrate) + def readDataSources(dataSourceId: DataSourceId): UserAccessRequest = UserAccessRequest(dataSourceId, AccessResourceType.datasource, AccessMode.read) - def writeDataSource(dataSourceId: LegacyDataSourceId): UserAccessRequest = + def writeDataSource(dataSourceId: DataSourceId): UserAccessRequest = UserAccessRequest(dataSourceId, AccessResourceType.datasource, AccessMode.write) def readTracing(tracingId: String): UserAccessRequest = - UserAccessRequest(LegacyDataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.read) + UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.read) def writeTracing(tracingId: String): UserAccessRequest = - UserAccessRequest(LegacyDataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.write) + UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.write) def downloadJobExport(jobId: String): UserAccessRequest = - UserAccessRequest(LegacyDataSourceId(jobId, ""), AccessResourceType.jobExport, AccessMode.read) + UserAccessRequest(DataSourceId(jobId, ""), AccessResourceType.jobExport, AccessMode.read) def webknossos: UserAccessRequest = - UserAccessRequest(LegacyDataSourceId("webknossos", ""), AccessResourceType.webknossos, AccessMode.administrate) + UserAccessRequest(DataSourceId("webknossos", ""), AccessResourceType.webknossos, AccessMode.administrate) } trait AccessTokenService { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala index 0d3bab614e6..167a2cc610c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala @@ -29,6 +29,8 @@ class AgglomerateService @Inject()(config: DataStoreConfig) extends DataConverte private val dataBaseDir = Paths.get(config.Datastore.baseFolder) private val cumsumFileName = "cumsum.json" + // TODO: Also rename datasetName to datasetPath + lazy val agglomerateFileCache = new AgglomerateFileCache(config.Datastore.Cache.AgglomerateFile.maxFileHandleEntries) def exploreAgglomerates(organizationId: String, datasetName: String, dataLayerName: String): Set[String] = { @@ -136,7 +138,7 @@ class AgglomerateService @Inject()(config: DataStoreConfig) extends DataConverte } def generateSkeleton(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mappingName: String, agglomerateId: Long): Box[SkeletonTracing] = @@ -145,7 +147,7 @@ class AgglomerateService @Inject()(config: DataStoreConfig) extends DataConverte val hdfFile = dataBaseDir .resolve(organizationId) - .resolve(datasetName) + .resolve(datasetPath) .resolve(dataLayerName) .resolve(agglomerateDir) .resolve(s"$mappingName.$agglomerateFileExtension") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index 7abf60a1974..1a45347dd2c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.tools.ExtendedTypes.ExtendedArraySeq import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.helpers.DatasetDeleter import com.scalableminds.webknossos.datastore.models.BucketPosition -import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataLayer, LegacyDataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataLayer, DataSourceId} import com.scalableminds.webknossos.datastore.models.requests.{DataReadInstruction, DataServiceDataRequest} import com.scalableminds.webknossos.datastore.storage._ import com.typesafe.scalalogging.LazyLogging @@ -87,7 +87,7 @@ class BinaryDataService(val dataBaseDir: Path, val readInstruction = DataReadInstruction(dataBaseDir, request.dataSource, request.dataLayer, bucket, request.settings.version) // dataSource is null and unused for volume tracings. Insert dummy DataSourceId (also unused in that case) - val dataSourceId = if (request.dataSource != null) request.dataSource.id else LegacyDataSourceId("", "") + val dataSourceId = if (request.dataSource != null) request.dataSource.id else DataSourceId("", "") val bucketProvider = bucketProviderCache.getOrLoadAndPut((dataSourceId, request.dataLayer.bucketProviderCacheKey))(_ => request.dataLayer.bucketProvider(remoteSourceDescriptorServiceOpt, dataSourceId, sharedChunkContentsCache)) @@ -98,11 +98,11 @@ class BinaryDataService(val dataBaseDir: Path, s"Caught internal error: $msg while loading a bucket for layer ${request.dataLayer.name} of dataset ${request.dataSource.id}") Fox.failure(e.getMessage) case f: Failure => - if (datasetErrorLoggingService.exists(_.shouldLog(request.dataSource.id.team, request.dataSource.id.name))) { + if (datasetErrorLoggingService.exists(_.shouldLog(request.dataSource.id.organizationId, request.dataSource.id.path))) { logger.error( - s"Bucket loading for layer ${request.dataLayer.name} of dataset ${request.dataSource.id.team}/${request.dataSource.id.name} at ${readInstruction.bucket} failed: ${Fox + s"Bucket loading for layer ${request.dataLayer.name} of dataset ${request.dataSource.id.organizationId}/${request.dataSource.id.path} at ${readInstruction.bucket} failed: ${Fox .failureChainAsString(f, includeStackTraces = true)}") - datasetErrorLoggingService.foreach(_.registerLogged(request.dataSource.id.team, request.dataSource.id.name)) + datasetErrorLoggingService.foreach(_.registerLogged(request.dataSource.id.organizationId, request.dataSource.id.path)) } f.toFox case Full(data) => @@ -175,14 +175,14 @@ class BinaryDataService(val dataBaseDir: Path, } def clearCache(organizationId: String, datasetName: String, layerName: Option[String]): (Int, Int, Int) = { - val dataSourceId = LegacyDataSourceId(datasetName, organizationId) + val dataSourceId = DataSourceId(datasetName, organizationId) def agglomerateFileMatchPredicate(agglomerateKey: AgglomerateFileKey) = agglomerateKey.datasetName == datasetName && agglomerateKey.organizationId == organizationId && layerName.forall( _ == agglomerateKey.layerName) - def bucketProviderPredicate(key: (LegacyDataSourceId, String)): Boolean = - key._1 == LegacyDataSourceId(datasetName, organizationId) && layerName.forall(_ == key._2) + def bucketProviderPredicate(key: (DataSourceId, String)): Boolean = + key._1 == DataSourceId(datasetName, organizationId) && layerName.forall(_ == key._2) val closedAgglomerateFileHandleCount = agglomerateServiceOpt.map(_.agglomerateFileCache.clear(agglomerateFileMatchPredicate)).getOrElse(0) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ConnectomeFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ConnectomeFileService.scala index 97d0c270830..1ca48c75146 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ConnectomeFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ConnectomeFileService.scala @@ -90,8 +90,8 @@ class ConnectomeFileService @Inject()(config: DataStoreConfig)(implicit ec: Exec private lazy val connectomeFileCache = new Hdf5FileCache(30) - def exploreConnectomeFiles(organizationId: String, datasetName: String, dataLayerName: String): Set[String] = { - val layerDir = dataBaseDir.resolve(organizationId).resolve(datasetName).resolve(dataLayerName) + def exploreConnectomeFiles(organizationId: String, datasetPath: String, dataLayerName: String): Set[String] = { + val layerDir = dataBaseDir.resolve(organizationId).resolve(datasetPath).resolve(dataLayerName) PathUtils .listFiles(layerDir.resolve(connectomesDir), silent = true, @@ -105,12 +105,12 @@ class ConnectomeFileService @Inject()(config: DataStoreConfig)(implicit ec: Exec } def connectomeFilePath(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, connectomeFileName: String): Path = dataBaseDir .resolve(organizationId) - .resolve(datasetName) + .resolve(datasetPath) .resolve(dataLayerName) .resolve(connectomesDir) .resolve(s"$connectomeFileName.$connectomeFileExtension") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala index 24c14630a91..74460560a32 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala @@ -51,13 +51,13 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, def loadFor(token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Array[Byte]] = fullMeshRequest.meshFileName match { case Some(_) => - loadFullMeshFromMeshfile(token, organizationId, datasetName, dataLayerName, fullMeshRequest) - case None => loadFullMeshFromAdHoc(organizationId, datasetName, dataLayerName, fullMeshRequest) + loadFullMeshFromMeshfile(token, organizationId, datasetPath, dataLayerName, fullMeshRequest) + case None => loadFullMeshFromAdHoc(organizationId, datasetPath, dataLayerName, fullMeshRequest) } private def loadFullMeshFromAdHoc( @@ -116,19 +116,19 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, private def loadFullMeshFromMeshfile( token: Option[String], organizationId: String, - datasetName: String, + datasetPath: String, layerName: String, fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Array[Byte]] = for { meshFileName <- fullMeshRequest.meshFileName.toFox ?~> "meshFileName.needed" before = Instant.now mappingNameForMeshFile = meshFileService.mappingNameForMeshFile(organizationId, - datasetName, + datasetPath, layerName, meshFileName) segmentIds <- segmentIdsForAgglomerateIdIfNeeded( organizationId, - datasetName, + datasetPath, layerName, fullMeshRequest.mappingName, fullMeshRequest.editableMappingTracingId, @@ -138,20 +138,20 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, token ) chunkInfos: WebknossosSegmentInfo <- meshFileService.listMeshChunksForSegmentsMerged(organizationId, - datasetName, + datasetPath, layerName, meshFileName, segmentIds) allChunkRanges: List[MeshChunk] = chunkInfos.chunks.lods.head.chunks stlEncodedChunks: Seq[Array[Byte]] <- Fox.serialCombined(allChunkRanges) { chunkRange: MeshChunk => - readMeshChunkAsStl(organizationId, datasetName, layerName, meshFileName, chunkRange, chunkInfos.transform) + readMeshChunkAsStl(organizationId, datasetPath, layerName, meshFileName, chunkRange, chunkInfos.transform) } stlOutput = combineEncodedChunksToStl(stlEncodedChunks) _ = logMeshingDuration(before, "meshfile", stlOutput.length) } yield stlOutput private def readMeshChunkAsStl(organizationId: String, - datasetName: String, + datasetPath: String, layerName: String, meshfileName: String, chunkInfo: MeshChunk, @@ -159,7 +159,7 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, for { (dracoMeshChunkBytes, encoding) <- meshFileService.readMeshChunk( organizationId, - datasetName, + datasetPath, layerName, MeshChunkDataRequestList(meshfileName, List(MeshChunkDataRequest(chunkInfo.byteOffset, chunkInfo.byteSize))) ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index b599bbe15c6..01e15723007 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -11,7 +11,7 @@ import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.helpers.IntervalScheduler import com.scalableminds.webknossos.datastore.models.UnfinishedUpload import com.scalableminds.webknossos.datastore.models.annotation.AnnotationSource -import com.scalableminds.webknossos.datastore.models.datasource.{DatasetIdWithPath, LegacyDataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{DatasetIdWithPath, DataSourceId} import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSourceLike import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.uploading.ReserveUploadInformation @@ -79,7 +79,7 @@ class DSRemoteWebknossosClient @Inject()( .getWithJsonResponse[List[UnfinishedUpload]] } yield unfinishedUploads - def reportUpload(dataSourceId: LegacyDataSourceId, + def reportUpload(datasetId: ObjectId, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean, @@ -87,7 +87,7 @@ class DSRemoteWebknossosClient @Inject()( for { _ <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reportDatasetUpload") .addQueryString("key" -> dataStoreKey) - .addQueryString("datasetName" -> dataSourceId.name) + .addQueryString("datasetId" -> datasetId.toString) .addQueryString("needsConversion" -> needsConversion.toString) .addQueryString("viaAddRoute" -> viaAddRoute.toString) .addQueryString("datasetSizeBytes" -> datasetSizeBytes.toString) @@ -101,16 +101,17 @@ class DSRemoteWebknossosClient @Inject()( .silent .put(dataSources) - def reserveDataSourceUpload(info: ReserveUploadInformation, userTokenOpt: Option[String]): Fox[Unit] = + def reserveDataSourceUpload(info: ReserveUploadInformation, + userTokenOpt: Option[String]): Fox[ReserveUploadInformation] = for { userToken <- option2Fox(userTokenOpt) ?~> "reserveUpload.noUserToken" - _ <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reserveUpload") + reserveUploadInfo <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reserveUpload") .addQueryString("key" -> dataStoreKey) .addQueryString("token" -> userToken) - .post(info) - } yield () + .postWithJsonResponse[ReserveUploadInformation, ReserveUploadInformation](info) + } yield reserveUploadInfo - def deleteDataSource(id: LegacyDataSourceId): Fox[_] = + def deleteDataSource(id: DataSourceId): Fox[_] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/deleteDataset").addQueryString("key" -> dataStoreKey).post(id) def getJobExportProperties(jobId: String): Fox[JobExportProperties] = @@ -168,7 +169,8 @@ class DSRemoteWebknossosClient @Inject()( def resolveDatasetNameToId(organizationId: String, datasetName: String): Fox[DatasetIdWithPath] = for { - datasetIdWithPath <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/:$organizationId/:$datasetName/getDatasetId") + datasetIdWithPath <- rpc( + s"$webknossosUri/api/datastores/$dataStoreName/:$organizationId/:$datasetName/getDatasetId") .addQueryString("key" -> dataStoreKey) .silent .getWithJsonResponse[DatasetIdWithPath] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala index 412cb67fa1d..19d20cddd8d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala @@ -4,7 +4,7 @@ import org.apache.pekko.actor.ActorSystem import com.google.inject.Inject import com.google.inject.name.Named import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DatasetIdWithPath, LegacyDataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId} import com.scalableminds.webknossos.datastore.storage.TemporaryStore import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging @@ -16,24 +16,24 @@ class DataSourceRepository @Inject()( remoteWebknossosClient: DSRemoteWebknossosClient, @Named("webknossos-datastore") val system: ActorSystem )(implicit ec: ExecutionContext) - extends TemporaryStore[DatasetIdWithPath, InboxDataSource](system) + extends TemporaryStore[DataSourceId, InboxDataSource](system) with LazyLogging with FoxImplicits { - def getDataSourceAndDataLayer(datasetId: DatasetIdWithPath, dataLayerName: String)( + def getDataSourceAndDataLayer(organizationId: String, datasetPath: String, dataLayerName: String)( implicit m: MessagesProvider): Fox[(DataSource, DataLayer)] = for { - dataSource <- findUsable(datasetId).toFox ?~> Messages("dataSource.notFound") + dataSource <- findUsable(DataSourceId(datasetPath, organizationId)).toFox ?~> Messages("dataSource.notFound") dataLayer <- dataSource.getDataLayer(dataLayerName) ?~> Messages("dataLayer.notFound", dataLayerName) } yield (dataSource, dataLayer) - def findUsable(datasetId: DatasetIdWithPath): Option[DataSource] = - find(datasetId).flatMap(_.toUsable) + def findUsable(id: DataSourceId): Option[DataSource] = + find(id).flatMap(_.toUsable) - def updateDataSource(datasetId: DatasetIdWithPath, dataSource: InboxDataSource): Fox[Unit] = + def updateDataSource(dataSource: InboxDataSource): Fox[Unit] = for { _ <- Fox.successful(()) - _ = insert(datasetId, dataSource) + _ = insert(dataSource.id, dataSource) _ <- remoteWebknossosClient.reportDataSource(dataSource) } yield () @@ -45,7 +45,7 @@ class DataSourceRepository @Inject()( _ <- remoteWebknossosClient.reportDataSources(dataSources) } yield () - def cleanUpDataSource(dataSourceId: LegacyDataSourceId): Fox[Unit] = + def cleanUpDataSource(dataSourceId: DataSourceId): Fox[Unit] = for { _ <- Fox.successful(remove(dataSourceId)) _ <- remoteWebknossosClient.deleteDataSource(dataSourceId) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala index 168f076b229..aacbcfbbd19 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala @@ -81,13 +81,13 @@ class DataSourceService @Inject()( s"Finished scanning inbox ($dataBaseDir): ${foundInboxSources.count(_.isUsable)} active, ${foundInboxSources .count(!_.isUsable)} inactive" val msg = if (verbose) { - val byTeam: Map[String, Seq[InboxDataSource]] = foundInboxSources.groupBy(_.id.team) + val byTeam: Map[String, Seq[InboxDataSource]] = foundInboxSources.groupBy(_.id.organizationId) shortForm + ". " + byTeam.keys.map { team => val byUsable: Map[Boolean, Seq[InboxDataSource]] = byTeam(team).groupBy(_.isUsable) team + ": [" + byUsable.keys.map { usable => val label = if (usable) "active: [" else "inactive: [" label + byUsable(usable).map { ds => - s"${ds.id.name}" + s"${ds.id.path}" }.mkString(" ") + "]" }.mkString(", ") + "]" }.mkString(", ") @@ -160,9 +160,9 @@ class DataSourceService @Inject()( } def updateDataSource(dataSource: DataSource, expectExisting: Boolean): Fox[Unit] = - for { + for { // TODO: Dangerous territory. Unsure whether this still works. Needs testing. _ <- validateDataSource(dataSource).toFox - dataSourcePath = dataBaseDir.resolve(dataSource.id.team).resolve(dataSource.id.name) + dataSourcePath = dataBaseDir.resolve(dataSource.id.organizationId).resolve(dataSource.id.path) propertiesFile = dataSourcePath.resolve(propertiesFileName) _ <- Fox.runIf(!expectExisting)(ensureDirectoryBox(dataSourcePath)) _ <- Fox.runIf(!expectExisting)(bool2Fox(!Files.exists(propertiesFile))) ?~> "dataSource.alreadyPresent" @@ -209,7 +209,7 @@ class DataSourceService @Inject()( } def dataSourceFromDir(path: Path, organizationId: String): InboxDataSource = { - val id = LegacyDataSourceId(path.getFileName.toString, organizationId) + val id = DataSourceId(path.getFileName.toString, organizationId) val propertiesFile = path.resolve(propertiesFileName) if (new File(propertiesFile.toString).exists()) { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala index 2133acb94a4..d57dffddd3c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala @@ -5,7 +5,7 @@ import com.google.inject.name.Named import com.scalableminds.util.requestparsing.{DatasetURIParser, ObjectId} import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, LegacyDataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId} import com.scalableminds.webknossos.datastore.storage.TemporaryStore import com.typesafe.scalalogging.LazyLogging import org.apache.pekko.actor.ActorSystem @@ -19,13 +19,13 @@ class DatasetIdRepository @Inject()( remoteWebknossosClient: DSRemoteWebknossosClient, @Named("webknossos-datastore") val system: ActorSystem )(implicit ec: ExecutionContext) - extends TemporaryStore[LegacyDataSourceId, ObjectId](system) + extends TemporaryStore[DataSourceId, ObjectId](system) with LazyLogging with FoxImplicits with DatasetURIParser { def getDatasetIdFromIdOrName(datasetIdOrName: String, organizationId: String): Fox[ObjectId] = { - val dataSourceId = LegacyDataSourceId(datasetIdOrName, organizationId) + val dataSourceId = DataSourceId(datasetIdOrName, organizationId) find(dataSourceId) match { case Some(datasetId) => Fox.successful(datasetId) case None => diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala index 23ecd7be3ba..7d00d0f0d25 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala @@ -177,8 +177,8 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC private lazy val meshFileCache = new Hdf5FileCache(30) - def exploreMeshFiles(organizationId: String, datasetName: String, dataLayerName: String): Fox[Set[MeshFileInfo]] = { - val layerDir = dataBaseDir.resolve(organizationId).resolve(datasetName).resolve(dataLayerName) + def exploreMeshFiles(organizationId: String, datasetPath: String, dataLayerName: String): Fox[Set[MeshFileInfo]] = { + val layerDir = dataBaseDir.resolve(organizationId).resolve(datasetPath).resolve(dataLayerName) val meshFileNames = PathUtils .listFiles(layerDir.resolve(meshesDir), silent = true, PathUtils.fileExtensionFilter(hdf5FileExtension)) .map { paths => @@ -218,13 +218,13 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC // Same as above but this variant constructs the meshFilePath itself and converts null to None def mappingNameForMeshFile(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, meshFileName: String): Option[String] = { val meshFilePath = dataBaseDir .resolve(organizationId) - .resolve(datasetName) + .resolve(datasetPath) .resolve(dataLayerName) .resolve(meshesDir) .resolve(s"$meshFileName.$hdf5FileExtension") @@ -242,12 +242,12 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC def listMeshChunksForSegmentsMerged( organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, meshFileName: String, segmentIds: Seq[Long])(implicit m: MessagesProvider): Fox[WebknossosSegmentInfo] = { val meshChunksForUnmappedSegments = - listMeshChunksForSegmentsNested(organizationId, datasetName, dataLayerName, meshFileName, segmentIds) + listMeshChunksForSegmentsNested(organizationId, datasetPath, dataLayerName, meshFileName, segmentIds) for { _ <- bool2Fox(meshChunksForUnmappedSegments.nonEmpty) ?~> "zero chunks" ?~> Messages( "mesh.file.listChunks.failed", @@ -258,7 +258,7 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC } private def listMeshChunksForSegmentsNested(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, meshFileName: String, segmentIds: Seq[Long]): Seq[WebknossosSegmentInfo] = { @@ -369,7 +369,7 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC } def readMeshChunk(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, meshChunkDataRequests: MeshChunkDataRequestList, ): Fox[(Array[Byte], String)] = @@ -381,7 +381,7 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC val meshChunkDataRequest = requestAndIndex._1 val meshFilePath = dataBaseDir .resolve(organizationId) - .resolve(datasetName) + .resolve(datasetPath) .resolve(dataLayerName) .resolve(meshesDir) .resolve(s"${meshChunkDataRequests.meshFile}.$hdf5FileExtension") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala index 391c234fc6b..ae616378b89 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala @@ -15,7 +15,7 @@ trait MeshMappingHelper { protected def segmentIdsForAgglomerateIdIfNeeded( organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String], @@ -39,7 +39,7 @@ trait MeshMappingHelper { .segmentIdsForAgglomerateId( AgglomerateFileKey( organizationId, - datasetName, + datasetPath, dataLayerName, mappingName ), @@ -68,7 +68,7 @@ trait MeshMappingHelper { localSegmentIds <- agglomerateService.segmentIdsForAgglomerateId( AgglomerateFileKey( organizationId, - datasetName, + datasetPath, dataLayerName, mappingName ), diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala index 69391015eba..8046949ac72 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala @@ -39,10 +39,10 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, private lazy val fileHandleCache = new Hdf5FileCache(10) - def getSegmentIndexFile(organizationId: String, datasetName: String, dataLayerName: String): Box[Path] = + def getSegmentIndexFile(organizationId: String, datasetPath: String, dataLayerName: String): Box[Path] = for { _ <- Full("") - layerDir = dataBaseDir.resolve(organizationId).resolve(datasetName).resolve(dataLayerName) + layerDir = dataBaseDir.resolve(organizationId).resolve(datasetPath).resolve(dataLayerName) segmentIndexDir = layerDir.resolve(this.segmentIndexDir) files <- PathUtils.listFiles(segmentIndexDir, silent = true, PathUtils.fileExtensionFilter(hdf5FileExtension)) file <- files.headOption @@ -53,11 +53,11 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, * The bucket positions are the top left corners of the buckets that contain the segment in the file mag. */ def readSegmentIndex(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, segmentId: Long): Fox[Array[Vec3Int]] = for { - segmentIndexPath <- getSegmentIndexFile(organizationId, datasetName, dataLayerName).toFox + segmentIndexPath <- getSegmentIndexFile(organizationId, datasetPath, dataLayerName).toFox segmentIndex = fileHandleCache.withCache(segmentIndexPath)(CachedHdf5File.fromPath) hashFunction = getHashFunction(segmentIndex.reader.string().getAttr("/", "hash_function")) nBuckets = segmentIndex.reader.uint64().getAttr("/", "n_hash_buckets") @@ -76,9 +76,9 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, case None => Array.empty } - def readFileMag(organizationId: String, datasetName: String, dataLayerName: String): Fox[Vec3Int] = + def readFileMag(organizationId: String, datasetPath: String, dataLayerName: String): Fox[Vec3Int] = for { - segmentIndexPath <- getSegmentIndexFile(organizationId, datasetName, dataLayerName).toFox + segmentIndexPath <- getSegmentIndexFile(organizationId, datasetPath, dataLayerName).toFox segmentIndex = fileHandleCache.withCache(segmentIndexPath)(CachedHdf5File.fromPath) mag <- Vec3Int.fromArray(segmentIndex.reader.uint64().getArrayAttr("/", "mag").map(_.toInt)).toFox } yield mag @@ -116,7 +116,7 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, .distinct def getSegmentVolume(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, segmentId: Long, mag: Vec3Int, @@ -125,12 +125,12 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, segmentId, mag, None, // see #7556 - getBucketPositions(organizationId, datasetName, dataLayerName, mappingName), - getTypedDataForBucketPosition(organizationId, datasetName, dataLayerName, mappingName) + getBucketPositions(organizationId, datasetPath, dataLayerName, mappingName), + getTypedDataForBucketPosition(organizationId, datasetPath, dataLayerName, mappingName) ) def getSegmentBoundingBox(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, segmentId: Long, mag: Vec3Int, @@ -141,16 +141,16 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, segmentId, mag, None, // see #7556 - getBucketPositions(organizationId, datasetName, dataLayerName, mappingName), - getTypedDataForBucketPosition(organizationId, datasetName, dataLayerName, mappingName) + getBucketPositions(organizationId, datasetPath, dataLayerName, mappingName), + getTypedDataForBucketPosition(organizationId, datasetPath, dataLayerName, mappingName) ) } yield bb - def assertSegmentIndexFileExists(organizationId: String, datasetName: String, dataLayerName: String): Fox[Path] = - Fox.box2Fox(getSegmentIndexFile(organizationId, datasetName, dataLayerName)) ?~> "segmentIndexFile.notFound" + def assertSegmentIndexFileExists(organizationId: String, datasetPath: String, dataLayerName: String): Fox[Path] = + Fox.box2Fox(getSegmentIndexFile(organizationId, datasetPath, dataLayerName)) ?~> "segmentIndexFile.notFound" private def getTypedDataForBucketPosition(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mappingName: Option[String])( bucketPosition: Vec3Int, @@ -159,7 +159,7 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, for { // Additional coordinates parameter ignored, see #7556 (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, + datasetPath, dataLayerName) data <- getDataForBucketPositions(dataSource, dataLayer, mag, Seq(bucketPosition * mag), mappingName) dataTyped: Array[UnsignedInteger] = UnsignedIntegerArray.fromByteArray(data, dataLayer.elementClass) @@ -167,33 +167,33 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, private def getBucketPositions( organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, mappingName: Option[String])(segmentOrAgglomerateId: Long, mag: Vec3Int): Fox[ListOfVec3IntProto] = for { segmentIds <- getSegmentIdsForAgglomerateIdIfNeeded(organizationId, - datasetName, + datasetPath, dataLayerName, segmentOrAgglomerateId, mappingName) positionsPerSegment <- Fox.serialCombined(segmentIds)(segmentId => - getBucketPositions(organizationId, datasetName, dataLayerName, segmentId, mag)) + getBucketPositions(organizationId, datasetPath, dataLayerName, segmentId, mag)) positionsCollected = positionsPerSegment.flatten.distinct } yield ListOfVec3IntProto.of(positionsCollected.map(vec3IntToProto)) private def getBucketPositions(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, segmentId: Long, mag: Vec3Int): Fox[Array[Vec3Int]] = for { - fileMag <- readFileMag(organizationId, datasetName, dataLayerName) - bucketPositionsInFileMag <- readSegmentIndex(organizationId, datasetName, dataLayerName, segmentId) + fileMag <- readFileMag(organizationId, datasetPath, dataLayerName) + bucketPositionsInFileMag <- readSegmentIndex(organizationId, datasetPath, dataLayerName, segmentId) bucketPositions = bucketPositionsInFileMag.map(_ / (mag / fileMag)) } yield bucketPositions private def getSegmentIdsForAgglomerateIdIfNeeded(organizationId: String, - datasetName: String, + datasetPath: String, dataLayerName: String, segmentOrAgglomerateId: Long, mappingNameOpt: Option[String]): Fox[List[Long]] = @@ -204,7 +204,7 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox agglomerateFileKey = AgglomerateFileKey( organizationId, - datasetName, + datasetPath, dataLayerName, mappingName ) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala index 87498a3ff87..40d7bbdeb7f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala @@ -89,7 +89,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, private def getLayerFromComposeLayer(composeLayer: ComposeRequestLayer, uploadDir: Path): Fox[DataLayer] = for { dataSourceId <- Fox.successful( - LegacyDataSourceId(composeLayer.datasetId.name, composeLayer.datasetId.owningOrganization)) + DataSourceId(composeLayer.datasetId.name, composeLayer.datasetId.owningOrganization)) dataSource <- Fox.option2Fox(dataSourceRepository.find(dataSourceId)) ds <- Fox.option2Fox(dataSource.toUsable) layer <- Fox.option2Fox(ds.dataLayers.find(_.name == composeLayer.sourceName)) @@ -144,7 +144,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, for { layers <- Fox.serialCombined(composeRequest.layers.toList)(getLayerFromComposeLayer(_, uploadDir)) dataSource = GenericDataSource( - LegacyDataSourceId(composeRequest.newDatasetName, organizationId), + DataSourceId(composeRequest.newDatasetName, organizationId), layers, composeRequest.voxelSize, None @@ -153,8 +153,8 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, } yield dataSource } - private def isLayerRemote(dataSourceId: LegacyDataSourceId, layerName: String) = { - val layerPath = dataBaseDir.resolve(dataSourceId.team).resolve(dataSourceId.name).resolve(layerName) + private def isLayerRemote(dataSourceId: DataSourceId, layerName: String) = { + val layerPath = dataBaseDir.resolve(dataSourceId.organizationId).resolve(dataSourceId.path).resolve(layerName) !Files.exists(layerPath) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index 196b8de6a46..25aee89c424 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -31,6 +31,7 @@ import scala.concurrent.ExecutionContext case class ReserveUploadInformation( uploadId: String, // upload id that was also used in chunk upload (this time without file paths) name: String, // dataset name + path: String, // dataset path organization: String, totalFileCount: Long, filePaths: Option[List[String]], @@ -41,6 +42,7 @@ object ReserveUploadInformation { implicit val reserveUploadInformation: OFormat[ReserveUploadInformation] = Json.format[ReserveUploadInformation] } case class ReserveManualUploadInformation(datasetName: String, + datasetPath: String, organization: String, initialTeamIds: List[String], folderId: Option[String]) @@ -120,7 +122,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, s"upload___${uploadId}___file___${fileName}___chunkCount" private def redisKeyForFileChunkSet(uploadId: String, fileName: String): String = s"upload___${uploadId}___file___${fileName}___chunkSet" - private def redisKeyForUploadId(datasourceId: LegacyDataSourceId): String = + private def redisKeyForUploadId(datasourceId: DataSourceId): String = s"upload___${Json.stringify(Json.toJson(datasourceId))}___datasourceId" private def redisKeyForFilePaths(uploadId: String): String = s"upload___${uploadId}___filePaths" @@ -139,30 +141,30 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, private def uploadDirectory(organizationId: String, uploadId: String): Path = dataBaseDir.resolve(organizationId).resolve(uploadingDir).resolve(uploadId) - def getDataSourceIdByUploadId(uploadId: String): Fox[LegacyDataSourceId] = - getObjectFromRedis[LegacyDataSourceId](redisKeyForDataSourceId(uploadId)) + def getDataSourceIdByUploadId(uploadId: String): Fox[DataSourceId] = + getObjectFromRedis[DataSourceId](redisKeyForDataSourceId(uploadId)) - def reserveUpload(reserveUploadInformation: ReserveUploadInformation): Fox[Unit] = + def reserveUpload(reservedInfoByWk: ReserveUploadInformation): Fox[Unit] = for { - _ <- dataSourceService.assertDataDirWritable(reserveUploadInformation.organization) - _ <- runningUploadMetadataStore.insert(redisKeyForFileCount(reserveUploadInformation.uploadId), - String.valueOf(reserveUploadInformation.totalFileCount)) + _ <- dataSourceService.assertDataDirWritable(reservedInfoByWk.organization) + _ <- runningUploadMetadataStore.insert(redisKeyForFileCount(reservedInfoByWk.uploadId), + String.valueOf(reservedInfoByWk.totalFileCount)) _ <- runningUploadMetadataStore.insert( - redisKeyForDataSourceId(reserveUploadInformation.uploadId), - Json.stringify(Json.toJson(LegacyDataSourceId(reserveUploadInformation.name, reserveUploadInformation.organization))) + redisKeyForDataSourceId(reservedInfoByWk.uploadId), + Json.stringify(Json.toJson(DataSourceId(reservedInfoByWk.path, reservedInfoByWk.organization))) ) _ <- runningUploadMetadataStore.insert( - redisKeyForUploadId(LegacyDataSourceId(reserveUploadInformation.name, reserveUploadInformation.organization)), - reserveUploadInformation.uploadId + redisKeyForUploadId(DataSourceId(reservedInfoByWk.path, reservedInfoByWk.organization)), + reservedInfoByWk.uploadId ) - filePaths = Json.stringify(Json.toJson(reserveUploadInformation.filePaths.getOrElse(List.empty))) - _ <- runningUploadMetadataStore.insert(redisKeyForFilePaths(reserveUploadInformation.uploadId), filePaths) + filePaths = Json.stringify(Json.toJson(reservedInfoByWk.filePaths.getOrElse(List.empty))) + _ <- runningUploadMetadataStore.insert(redisKeyForFilePaths(reservedInfoByWk.uploadId), filePaths) _ <- runningUploadMetadataStore.insert( - redisKeyForLinkedLayerIdentifier(reserveUploadInformation.uploadId), - Json.stringify(Json.toJson(LinkedLayerIdentifiers(reserveUploadInformation.layersToLink))) + redisKeyForLinkedLayerIdentifier(reservedInfoByWk.uploadId), + Json.stringify(Json.toJson(LinkedLayerIdentifiers(reservedInfoByWk.layersToLink))) ) _ = logger.info( - f"Reserving dataset upload of ${reserveUploadInformation.organization}/${reserveUploadInformation.name} with id ${reserveUploadInformation.uploadId}...") + f"Reserving dataset upload of ${reservedInfoByWk.organization}/${reservedInfoByWk.name} with id ${reservedInfoByWk.uploadId}...") } yield () def addUploadIdsToUnfinishedUploads( @@ -192,7 +194,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, val uploadId = extractDatasetUploadId(uploadFileId) for { dataSourceId <- getDataSourceIdByUploadId(uploadId) - uploadDir = uploadDirectory(dataSourceId.team, uploadId) + uploadDir = uploadDirectory(dataSourceId.organizationId, uploadId) filePathRaw = uploadFileId.split("/").tail.mkString("/") filePath = if (filePathRaw.charAt(0) == '/') filePathRaw.drop(1) else filePathRaw _ <- bool2Fox(!isOutsideUploadDir(uploadDir, filePath)) ?~> s"Invalid file path: $filePath" @@ -247,7 +249,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, case e: Exception => runningUploadMetadataStore.removeFromSet(redisKeyForFileChunkSet(uploadId, filePath), String.valueOf(currentChunkNumber)) - val errorMsg = s"Error receiving chunk $currentChunkNumber for upload ${dataSourceId.name}: ${e.getMessage}" + val errorMsg = s"Error receiving chunk $currentChunkNumber for upload ${dataSourceId.path}: ${e.getMessage}" logger.warn(errorMsg) Fox.failure(errorMsg) } @@ -261,24 +263,24 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, knownUpload <- isKnownUpload(uploadId) } yield if (knownUpload) { - logger.info(f"Cancelling dataset upload of ${dataSourceId.team}/${dataSourceId.name} with id $uploadId...") + logger.info(f"Cancelling dataset upload of ${dataSourceId.organizationId}/${dataSourceId.path} with id $uploadId...") removeFromRedis(uploadId).flatMap(_ => - PathUtils.deleteDirectoryRecursively(uploadDirectory(dataSourceId.team, uploadId))) + PathUtils.deleteDirectoryRecursively(uploadDirectory(dataSourceId.organizationId, uploadId))) } else { Fox.failure(s"Unknown upload") } } - def finishUpload(uploadInformation: UploadInformation, checkCompletion: Boolean = true): Fox[(LegacyDataSourceId, Long)] = { + def finishUpload(uploadInformation: UploadInformation, checkCompletion: Boolean = true): Fox[(DataSourceId, Long)] = { val uploadId = uploadInformation.uploadId for { dataSourceId <- getDataSourceIdByUploadId(uploadId) datasetNeedsConversion = uploadInformation.needsConversion.getOrElse(false) - uploadDir = uploadDirectory(dataSourceId.team, uploadId) + uploadDir = uploadDirectory(dataSourceId.organizationId, uploadId) unpackToDir = dataSourceDirFor(dataSourceId, datasetNeedsConversion) - _ = logger.info(s"Finishing dataset upload of ${dataSourceId.team}/${dataSourceId.name} with id $uploadId...") + _ = logger.info(s"Finishing dataset upload of ${dataSourceId.organizationId}/${dataSourceId.path} with id $uploadId...") _ <- Fox.runIf(checkCompletion)(ensureAllChunksUploaded(uploadId)) _ <- ensureDirectoryBox(unpackToDir.getParent) ?~> "dataset.import.fileAccessDenied" unpackResult <- unpackDataset(uploadDir, unpackToDir).futureBox @@ -296,7 +298,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, dataSourceId, datasetNeedsConversion, label = s"processing dataset at $unpackToDir") - dataSource = dataSourceService.dataSourceFromDir(unpackToDir, dataSourceId.team) + dataSource = dataSourceService.dataSourceFromDir(unpackToDir, dataSourceId.organizationId) _ <- dataSourceRepository.updateDataSource(dataSource) datasetSizeBytes <- tryo(FileUtils.sizeOfDirectoryAsBigInteger(new File(unpackToDir.toString)).longValue) } yield (dataSourceId, datasetSizeBytes) @@ -304,7 +306,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, private def postProcessUploadedDataSource(datasetNeedsConversion: Boolean, unpackToDir: Path, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layersToLink: Option[List[LinkedLayerIdentifier]]): Fox[Unit] = if (datasetNeedsConversion) Fox.successful(()) @@ -323,12 +325,12 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, case UploadedDataSourceType.WKW => addLayerAndResolutionDirIfMissing(unpackToDir).toFox } _ <- datasetSymlinkService.addSymlinksToOtherDatasetLayers(unpackToDir, layersToLink.getOrElse(List.empty)) - _ <- addLinkedLayersToDataSourceProperties(unpackToDir, dataSourceId.team, layersToLink.getOrElse(List.empty)) + _ <- addLinkedLayersToDataSourceProperties(unpackToDir, dataSourceId.organizationId, layersToLink.getOrElse(List.empty)) } yield () } private def exploreLocalDatasource(path: Path, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, typ: UploadedDataSourceType.Value): Fox[Unit] = for { _ <- Fox.runIf(typ == UploadedDataSourceType.ZARR)( @@ -338,7 +340,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, } yield () private def tryExploringMultipleLayers(path: Path, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, typ: UploadedDataSourceType.Value): Fox[Option[Path]] = for { layerDirs <- typ match { @@ -363,18 +365,18 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, } yield path private def cleanUpOnFailure[T](result: Box[T], - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, datasetNeedsConversion: Boolean, label: String): Fox[Unit] = result match { case Full(_) => Fox.successful(()) case Empty => - deleteOnDisk(dataSourceId.team, dataSourceId.name, datasetNeedsConversion, Some("the upload failed")) + deleteOnDisk(dataSourceId.organizationId, dataSourceId.path, datasetNeedsConversion, Some("the upload failed")) Fox.failure(s"Unknown error $label") case Failure(msg, e, _) => logger.warn(s"Error while $label: $msg, $e") - deleteOnDisk(dataSourceId.team, dataSourceId.name, datasetNeedsConversion, Some("the upload failed")) + deleteOnDisk(dataSourceId.organizationId, dataSourceId.path, datasetNeedsConversion, Some("the upload failed")) dataSourceRepository.cleanUpDataSource(dataSourceId) for { _ <- result ?~> f"Error while $label" @@ -399,12 +401,12 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, _ <- bool2Fox(list.forall(identity)) } yield () - private def dataSourceDirFor(dataSourceId: LegacyDataSourceId, datasetNeedsConversion: Boolean): Path = { + private def dataSourceDirFor(dataSourceId: DataSourceId, datasetNeedsConversion: Boolean): Path = { val dataSourceDir = if (datasetNeedsConversion) - dataBaseDir.resolve(dataSourceId.team).resolve(forConversionDir).resolve(dataSourceId.name) + dataBaseDir.resolve(dataSourceId.organizationId).resolve(forConversionDir).resolve(dataSourceId.path) else - dataBaseDir.resolve(dataSourceId.team).resolve(dataSourceId.name) + dataBaseDir.resolve(dataSourceId.organizationId).resolve(dataSourceId.path) dataSourceDir } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/AgglomerateFileCache.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/AgglomerateFileCache.scala index c1e5d056d98..7939bbd5dd6 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/AgglomerateFileCache.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/AgglomerateFileCache.scala @@ -21,14 +21,14 @@ case class CachedAgglomerateFile(reader: IHDF5Reader, case class AgglomerateFileKey( organizationId: String, - datasetName: String, + datasetPath: String, layerName: String, mappingName: String ) { def path(dataBaseDir: Path, agglomerateDir: String, agglomerateFileExtension: String): Path = dataBaseDir .resolve(organizationId) - .resolve(datasetName) + .resolve(datasetPath) .resolve(layerName) .resolve(agglomerateDir) .resolve(s"$mappingName.$agglomerateFileExtension") @@ -36,8 +36,8 @@ case class AgglomerateFileKey( object AgglomerateFileKey { def fromDataRequest(dataRequest: DataServiceDataRequest): AgglomerateFileKey = - AgglomerateFileKey(dataRequest.dataSource.id.team, - dataRequest.dataSource.id.name, + AgglomerateFileKey(dataRequest.dataSource.id.organizationId, + dataRequest.dataSource.id.path, dataRequest.dataLayer.name, dataRequest.settings.appliedAgglomerate.get) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/BucketProviderCache.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/BucketProviderCache.scala index c12d4f8c641..82b5f053eaa 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/BucketProviderCache.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/BucketProviderCache.scala @@ -2,6 +2,6 @@ package com.scalableminds.webknossos.datastore.storage import com.scalableminds.util.cache.LRUConcurrentCache import com.scalableminds.webknossos.datastore.dataformats.BucketProvider -import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId -class BucketProviderCache(val maxEntries: Int) extends LRUConcurrentCache[(LegacyDataSourceId, String), BucketProvider] +class BucketProviderCache(val maxEntries: Int) extends LRUConcurrentCache[(DataSourceId, String), BucketProvider] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/ParsedMappingCache.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/ParsedMappingCache.scala index d50617ff96e..a1196eac811 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/ParsedMappingCache.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/ParsedMappingCache.scala @@ -19,8 +19,8 @@ case class CachedMapping( object CachedMapping { def fromMappingRequest(mappingRequest: DataServiceMappingRequest): CachedMapping = - storage.CachedMapping(mappingRequest.dataSource.id.team, - mappingRequest.dataSource.id.name, + storage.CachedMapping(mappingRequest.dataSource.id.organizationId, + mappingRequest.dataSource.id.path, mappingRequest.dataLayer.name, mappingRequest.mapping) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala index 4f2630f8965..e305821d28c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.tools.Fox.box2Fox import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.datavault.VaultPath -import com.scalableminds.webknossos.datastore.models.datasource.LegacyDataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.services.DSRemoteWebknossosClient import net.liftweb.common.Box import net.liftweb.common.Box.tryo @@ -21,14 +21,14 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote dataStoreConfig: DataStoreConfig, dataVaultService: DataVaultService) { - def vaultPathFor(baseDir: Path, datasetId: LegacyDataSourceId, layerName: String, magLocator: MagLocator)( + def vaultPathFor(baseDir: Path, datasetId: DataSourceId, layerName: String, magLocator: MagLocator)( implicit ec: ExecutionContext): Fox[VaultPath] = for { remoteSourceDescriptor <- remoteSourceDescriptorFor(baseDir, datasetId, layerName, magLocator) vaultPath <- dataVaultService.getVaultPath(remoteSourceDescriptor) } yield vaultPath - def removeVaultFromCache(baseDir: Path, datasetId: LegacyDataSourceId, layerName: String, magLocator: MagLocator)( + def removeVaultFromCache(baseDir: Path, datasetId: DataSourceId, layerName: String, magLocator: MagLocator)( implicit ec: ExecutionContext): Fox[Unit] = for { remoteSource <- remoteSourceDescriptorFor(baseDir, datasetId, layerName, magLocator) @@ -37,7 +37,7 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote private def remoteSourceDescriptorFor( baseDir: Path, - datasetId: LegacyDataSourceId, + datasetId: DataSourceId, layerName: String, magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[RemoteSourceDescriptor] = for { @@ -47,10 +47,10 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote } yield remoteSource private def uriForMagLocator(baseDir: Path, - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, layerName: String, magLocator: MagLocator): Box[URI] = tryo { - val localDatasetDir = baseDir.resolve(dataSourceId.team).resolve(dataSourceId.name) + val localDatasetDir = baseDir.resolve(dataSourceId.organizationId).resolve(dataSourceId.path) val localLayerDir = localDatasetDir.resolve(layerName) magLocator.path match { case Some(magLocatorPath) => diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index 996d563d6f3..56b08c5ab04 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -5,29 +5,29 @@ GET /health @com.scalableminds.webknossos.datastore.controllers.Application.health # Read image data -POST /datasets/:organizationId/:datasetIdOrName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(token: Option[String], organizationId: String, datasetIdOrName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) # Knossos compatible routes -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mag:resolution/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, resolution: Int, x: Int, y: Int, z: Int, cubeSize: Int) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/mag:resolution/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, resolution: Int, x: Int, y: Int, z: Int, cubeSize: Int) # Zarr2 compatible routes -GET /zarr/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(token: Option[String], organizationId: String, datasetName: String, dataLayerName="") -GET /zarr/:organizationId/:datasetName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String) -GET /zarr/:organizationId/:datasetName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr/:organizationId/:datasetPath @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetPath: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetPath/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetPath: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetPath/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(token: Option[String], organizationId: String, datasetPath: String, dataLayerName="") +GET /zarr/:organizationId/:datasetPath/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(token: Option[String], organizationId: String, datasetPath: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetPath/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetPath/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetPath/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +GET /zarr/:organizationId/:datasetPath/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +GET /zarr/:organizationId/:datasetPath/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetPath/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:organizationId/:datasetPath/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mag: String) +GET /zarr/:organizationId/:datasetPath/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mag: String, coordinates: String) GET /annotations/zarr/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 2) GET /annotations/zarr/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 2) @@ -43,63 +43,63 @@ GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/.zarray GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Zarr3 compatible routes -GET /zarr3_experimental/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(token: Option[String], organizationId: String, datasetName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String) -GET /zarr3_experimental/:organizationId/:datasetName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mag: String, coordinates: String) - -GET /annotations/zarr3_experimental/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String) -GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr3_experimental/:organizationId/:datasetPath @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetPath: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetPath/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceFolderContents(token: Option[String], organizationId: String, datasetPath: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetPath/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(token: Option[String], organizationId: String, datasetPath: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetPath/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetPath/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerFolderContents(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetPath/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +GET /zarr3_experimental/:organizationId/:datasetPath/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetPath/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagFolderContents(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:organizationId/:datasetPath/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mag: String) +GET /zarr3_experimental/:organizationId/:datasetPath/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mag: String, coordinates: String) + +GET /annotations/zarr3_experimental/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonWithAnnotationPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataLayerMagFolderContentsPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.zarrJsonPrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String) +GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(token: Option[String], accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Segmentation mappings -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) # Agglomerate files -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForAllSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForAllSegmentIds(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, mappingName: String, segmentId: Long) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mappingName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForAllSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForAllSegmentIds(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, mappingName: String, segmentId: Long) # Mesh files -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) # Connectome files -GET /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/positions @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsePositions(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/types @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapseTypes(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses/:direction @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapticPartnerForSynapses(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String, direction: String) -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/connectomes/synapses/positions @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsePositions(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/connectomes/synapses/types @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapseTypes(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/connectomes/synapses/:direction @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapticPartnerForSynapses(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, direction: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) # Ad-Hoc Meshing -POST /datasets/:organizationId/:datasetName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(token: Option[String], organizationId: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) # Segment-Index files -GET /datasets/:organizationId/:dataSetName/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String, segmentId: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) -POST /datasets/:organizationId/:dataSetName/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(token: Option[String], organizationId: String, dataSetName: String, dataLayerName: String) +GET /datasets/:organizationId/:datasetPath/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String, segmentId: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) +POST /datasets/:organizationId/:datasetPath/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(token: Option[String], organizationId: String, datasetPath: String, dataLayerName: String) # DataSource management GET /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.testChunk(token: Option[String], resumableChunkNumber: Int, resumableIdentifier: String) @@ -109,18 +109,18 @@ POST /datasets/reserveUpload POST /datasets/reserveManualUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reserveManualUpload(token: Option[String]) POST /datasets/finishUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.finishUpload(token: Option[String]) POST /datasets/cancelUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.cancelUpload(token: Option[String]) -GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(token: Option[String], organizationId: String, datasetName: Option[String]) -GET /datasets/:organizationId/:datasetName/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(token: Option[String], organizationId: String, datasetName: String) -POST /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(token: Option[String], organizationId: String, datasetName: String) -PUT /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(token: Option[String], organizationId: String, datasetName: String, folderId: Option[String]) -DELETE /datasets/:organizationId/:datasetName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(token: Option[String], organizationId: String, datasetName: String) +GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(token: Option[String], organizationId: String, datasetPath: Option[String]) +GET /datasets/:organizationId/:datasetPath/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(token: Option[String], organizationId: String, datasetPath: String) +POST /datasets/:organizationId/:datasetPath @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(token: Option[String], organizationId: String, datasetPath: String) +PUT /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(token: Option[String], organizationId: String, datasetPath: String, folderId: Option[String]) +DELETE /datasets/:organizationId/:datasetPath/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(token: Option[String], organizationId: String, datasetPath: String) POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose(token: Option[String]) POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset(token: Option[String]) # Actions POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking(token: Option[String]) POST /triggers/createOrganizationDirectory @com.scalableminds.webknossos.datastore.controllers.DataSourceController.createOrganizationDirectory(token: Option[String], organizationId: String) -POST /triggers/reload/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(token: Option[String], organizationId: String, datasetName: String, layerName: Option[String]) +POST /triggers/reload/:organizationId/:datasetPath @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(token: Option[String], organizationId: String, datasetPath: String, layerName: Option[String]) # Exports GET /exports/:jobId/download @com.scalableminds.webknossos.datastore.controllers.ExportsController.download(token: Option[String], jobId: String) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala index 564e51d671a..53e7c49bafd 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala @@ -189,8 +189,8 @@ class TSRemoteDatastoreClient @Inject()( private def voxelSizeForTracing(tracingId: String, token: Option[String]): Fox[VoxelSize] = for { dataSourceId <- remoteWebknossosClient.getDataSourceIdForTracing(tracingId) - dataStoreUri <- dataStoreUriWithCache(dataSourceId.team, dataSourceId.name) - result <- rpc(s"$dataStoreUri/data/datasets/${dataSourceId.team}/${dataSourceId.name}/readInboxDataSource") + dataStoreUri <- dataStoreUriWithCache(dataSourceId.organizationId, dataSourceId.path) + result <- rpc(s"$dataStoreUri/data/datasets/${dataSourceId.organizationId}/${dataSourceId.path}/readInboxDataSource") .addQueryStringOptional("token", token) .getWithJsonResponse[InboxDataSource] scale <- result.voxelSizeOpt ?~> "could not determine voxel size of dataset" @@ -198,13 +198,13 @@ class TSRemoteDatastoreClient @Inject()( private def getRemoteLayerUri(remoteLayer: RemoteFallbackLayer): Fox[String] = for { - datastoreUri <- dataStoreUriWithCache(remoteLayer.organizationId, remoteLayer.datasetName) + datastoreUri <- dataStoreUriWithCache(remoteLayer.organizationId, remoteLayer.datasetPath) } yield - s"$datastoreUri/data/datasets/${remoteLayer.organizationId}/${remoteLayer.datasetName}/layers/${remoteLayer.layerName}" + s"$datastoreUri/data/datasets/${remoteLayer.organizationId}/${remoteLayer.datasetPath}/layers/${remoteLayer.layerName}" - private def dataStoreUriWithCache(organizationId: String, datasetName: String): Fox[String] = + private def dataStoreUriWithCache(organizationId: String, datasetPath: String): Fox[String] = dataStoreUriCache.getOrLoad( - (organizationId, datasetName), + (organizationId, datasetPath), keyTuple => remoteWebknossosClient.getDataStoreUriForDataSource(keyTuple._1, keyTuple._2)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index d7cb1de801c..4f3780a09c3 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -4,7 +4,7 @@ import com.google.inject.Inject import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.models.datasource.{LegacyDataSourceId, DataSourceLike} +import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, DataSourceLike} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{ AccessTokenService, @@ -41,7 +41,7 @@ class TSRemoteWebknossosClient @Inject()( private val webknossosUri: String = config.Tracingstore.WebKnossos.uri - private lazy val dataSourceIdByTracingIdCache: AlfuCache[String, LegacyDataSourceId] = AlfuCache() + private lazy val dataSourceIdByTracingIdCache: AlfuCache[String, DataSourceId] = AlfuCache() def reportTracingUpdates(tracingUpdatesReport: TracingUpdatesReport): Fox[WSResponse] = rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/handleTracingUpdateReport") @@ -55,21 +55,21 @@ class TSRemoteWebknossosClient @Inject()( .addQueryString("key" -> tracingStoreKey) .getWithJsonResponse[DataSourceLike] - def getDataStoreUriForDataSource(organizationId: String, datasetName: String): Fox[String] = - rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataStoreUri/$datasetName") + def getDataStoreUriForDataSource(organizationId: String, datasetPath: String): Fox[String] = + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataStoreUri/$datasetPath") .addQueryString("organizationId" -> organizationId) .addQueryString("key" -> tracingStoreKey) .silent .getWithJsonResponse[String] - def getDataSourceIdForTracing(tracingId: String)(implicit ec: ExecutionContext): Fox[LegacyDataSourceId] = + def getDataSourceIdForTracing(tracingId: String)(implicit ec: ExecutionContext): Fox[DataSourceId] = dataSourceIdByTracingIdCache.getOrLoad( tracingId, tracingId => rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataSourceId") .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) - .getWithJsonResponse[LegacyDataSourceId] + .getWithJsonResponse[DataSourceId] ) override def requestUserAccess(token: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala index e6f9464ee81..6594fb4bd50 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala @@ -7,20 +7,20 @@ import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.WebknossosDataRequest -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, LegacyDataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceId} import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.FallbackDataKey import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} import scala.concurrent.ExecutionContext case class RemoteFallbackLayer(organizationId: String, - datasetName: String, + datasetPath: String, layerName: String, elementClass: ElementClassProto) object RemoteFallbackLayer extends ProtoGeometryImplicits { - def fromDataLayerAndDataSource(dataLayer: DataLayerLike, dataSource: LegacyDataSourceId): RemoteFallbackLayer = - RemoteFallbackLayer(dataSource.team, dataSource.name, dataLayer.name, dataLayer.elementClass) + def fromDataLayerAndDataSource(dataLayer: DataLayerLike, dataSource: DataSourceId): RemoteFallbackLayer = + RemoteFallbackLayer(dataSource.organizationId, dataSource.path, dataLayer.name, dataLayer.elementClass) } trait FallbackDataHelper { def remoteDatastoreClient: TSRemoteDatastoreClient @@ -34,7 +34,7 @@ trait FallbackDataHelper { for { layerName <- tracing.fallbackLayer.toFox ?~> "This feature is only defined on volume annotations with fallback segmentation layer." datasetId <- remoteWebknossosClient.getDataSourceIdForTracing(tracingId) - } yield RemoteFallbackLayer(datasetId.team, datasetId.name, layerName, tracing.elementClass) + } yield RemoteFallbackLayer(datasetId.organizationId, datasetId.path, layerName, tracing.elementClass) def getFallbackDataFromDatastore( remoteFallbackLayer: RemoteFallbackLayer, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index fa4cfbb83a5..47965cee726 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -14,7 +14,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ CoordinateTransformation, DataFormat, DataLayer, - LegacyDataSourceId, + DataSourceId, ElementClass, SegmentationLayer } @@ -86,7 +86,7 @@ case class EditableMappingLayer(name: String, override def lengthOfUnderlyingCubes(resolution: Vec3Int): Int = DataLayer.bucketLength override def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider = new EditableMappingBucketProvider(layer = this) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index cd83ccee97e..94b935a8828 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -502,7 +502,8 @@ class EditableMappingService @Inject()( )) val skeleton = SkeletonTracingDefaults.createInstance.copy( - datasetName = remoteFallbackLayer.datasetName, + datasetName = remoteFallbackLayer.datasetPath, // TODO: Open question: How do we handle this? + // Migrate the name to datasetPath or not? Might get tricky with the fossilDB. trees = trees ) skeleton.toByteArray diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index cf15645b63f..f8d620e1405 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -108,7 +108,7 @@ case class VolumeTracingLayer( new VolumeTracingBucketProvider(this) override def bucketProvider(remoteSourceDescriptorServiceOpt: Option[RemoteSourceDescriptorService], - dataSourceId: LegacyDataSourceId, + dataSourceId: DataSourceId, sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider = volumeBucketProvider diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala index 926b3f3afa1..f2baa1f9c80 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/Zarr3BucketStreamSink.scala @@ -17,7 +17,7 @@ import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.datasource.{ AdditionalAxis, DataLayer, - LegacyDataSourceId, + DataSourceId, GenericDataSource } import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, BucketPosition, VoxelSize} @@ -72,7 +72,7 @@ class Zarr3BucketStreamSink(val layer: VolumeTracingLayer, tracingHasFallbackLay MagLocator(mag = vec3IntToProto(mag), axisOrder = Some(AxisOrder.cAdditionalxyz(rank))) } GenericDataSource( - id = LegacyDataSourceId("", ""), + id = DataSourceId("", ""), dataLayers = List( Zarr3SegmentationLayer( defaultLayerName, From 0c111cb341f719c4e0a732add965163d8fa52d3d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 18 Sep 2024 18:08:17 +0200 Subject: [PATCH 005/129] WIP: Fix backend compilation --- app/controllers/AnnotationController.scala | 2 +- app/controllers/AnnotationIOController.scala | 3 +- app/controllers/ConfigurationController.scala | 1 - app/controllers/FolderController.scala | 3 +- app/controllers/InitialDataController.scala | 3 +- app/controllers/JobController.scala | 16 ++++---- app/controllers/LegacyApiController.scala | 3 +- app/controllers/ShortLinkController.scala | 3 +- app/controllers/UserTokenController.scala | 3 +- app/controllers/VoxelyticsController.scala | 3 +- .../WKRemoteDataStoreController.scala | 13 +++--- .../WKRemoteWorkerController.scala | 3 +- app/models/analytics/AnalyticsService.scala | 3 +- .../annotation/AnnotationMutexService.scala | 3 +- app/models/annotation/AnnotationService.scala | 2 +- app/models/dataset/DataStore.scala | 3 +- app/models/dataset/Dataset.scala | 8 ---- app/models/dataset/DatasetService.scala | 5 ++- .../explore/WKExploreRemoteLayerService.scala | 1 - app/models/job/JobService.scala | 3 +- app/models/job/Worker.scala | 3 +- .../organization/OrganizationService.scala | 3 +- app/models/task/TaskService.scala | 4 +- .../user/EmailVerificationService.scala | 3 +- app/models/user/Invite.scala | 5 ++- app/models/user/UserService.scala | 2 +- app/models/user/time/TimeSpanService.scala | 3 +- app/opengraph/OpenGraphService.scala | 3 +- ...ossosBearerTokenAuthenticatorService.scala | 9 ++--- conf/webknossos.latest.routes | 2 +- .../controllers/DataSourceController.scala | 7 ++-- .../models/datasource/DataSource.scala | 10 ----- .../webknossos/datastore/rpc/RPCRequest.scala | 1 - .../services/AgglomerateService.scala | 15 ++++--- .../services/BinaryDataService.scala | 2 +- .../services/DSRemoteWebknossosClient.scala | 17 ++------ .../services/DatasetIdRepository.scala | 40 ------------------- .../datastore/services/MeshFileService.scala | 2 +- .../services/uploading/ComposeService.scala | 16 ++++---- ....scalableminds.webknossos.datastore.routes | 2 +- 40 files changed, 89 insertions(+), 144 deletions(-) delete mode 100644 webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index c97b84a08c7..4b4d3558a0c 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -31,7 +31,7 @@ import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{URLSharing, UserAwareRequestLogging, WkEnv} import telemetry.SlackNotificationService -import utils.{ObjectId, WkConf} +import utils.{WkConf} import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 46936f0d116..fb021ede853 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -9,6 +9,7 @@ import org.apache.pekko.stream.Materializer import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.io.ZipIO +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} @@ -52,7 +53,7 @@ import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, MultipartFormData} import security.WkEnv -import utils.{ObjectId, WkConf} +import utils.{WkConf} import scala.concurrent.{ExecutionContext, Future} diff --git a/app/controllers/ConfigurationController.scala b/app/controllers/ConfigurationController.scala index 74dae0b3a0a..315bce1d1fb 100755 --- a/app/controllers/ConfigurationController.scala +++ b/app/controllers/ConfigurationController.scala @@ -3,7 +3,6 @@ package controllers import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.GlobalAccessContext import com.scalableminds.util.requestparsing.ObjectId -import com.scalableminds.util.tools.Fox import javax.inject.Inject import models.dataset.{DatasetDAO, DatasetService} diff --git a/app/controllers/FolderController.scala b/app/controllers/FolderController.scala index 5a848239714..8678d2978a4 100644 --- a/app/controllers/FolderController.scala +++ b/app/controllers/FolderController.scala @@ -1,5 +1,6 @@ package controllers +import com.scalableminds.util.requestparsing.ObjectId import play.silhouette.api.Silhouette import com.scalableminds.util.tools.{Fox, FoxImplicits} import models.dataset.DatasetDAO @@ -10,7 +11,7 @@ import models.user.UserService import play.api.libs.json.{JsArray, Json} import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.WkEnv -import utils.{MetadataAssertions, ObjectId} +import utils.{MetadataAssertions} import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/InitialDataController.scala b/app/controllers/InitialDataController.scala index d2c736a0648..6aa111776b2 100644 --- a/app/controllers/InitialDataController.scala +++ b/app/controllers/InitialDataController.scala @@ -2,6 +2,7 @@ package controllers import play.silhouette.api.{LoginInfo, Silhouette} import com.scalableminds.util.accesscontext.GlobalAccessContext +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging @@ -14,7 +15,7 @@ import models.team._ import models.user._ import net.liftweb.common.{Box, Full} import play.api.libs.json.{JsArray, Json} -import utils.{ObjectId, StoreModules, WkConf} +import utils.{StoreModules, WkConf} import javax.inject.Inject import models.organization.{Organization, OrganizationDAO, OrganizationService} diff --git a/app/controllers/JobController.scala b/app/controllers/JobController.scala index 84cfa5bf4f2..c00f57adc0f 100644 --- a/app/controllers/JobController.scala +++ b/app/controllers/JobController.scala @@ -164,7 +164,7 @@ class JobController @Inject()( } yield Ok(js) } - def runComputeSegmentIndexFileJob(datasetId: String, layerName: String, ): Action[AnyContent] = + def runComputeSegmentIndexFileJob(datasetId: String, layerName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => for { parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND @@ -245,19 +245,19 @@ class JobController @Inject()( } } - def runInferMitochondriaJob(organizationId: String, - datasetNameAndId: String, + def runInferMitochondriaJob(datasetId: String, layerName: String, bbox: String, newDatasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { - organization <- organizationDAO.findOne(organizationId) ?~> Messages("organization.notFound", organizationId) - _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferMitochondria.notAllowed.organization" ~> FORBIDDEN - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organization._id) ?~> Messages( + parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages( "dataset.notFound", - datasetNameAndId) ~> NOT_FOUND + datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", dataset._organization) + _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferMitochondria.notAllowed.organization" ~> FORBIDDEN _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) multiUser <- multiUserDAO.findOne(request.identity._multiUser) @@ -265,7 +265,7 @@ class JobController @Inject()( _ <- Fox.runIf(!multiUser.isSuperUser)(jobService.assertBoundingBoxLimits(bbox, None)) command = JobCommand.infer_mitochondria commandArgs = Json.obj( - "organization_name" -> organizationId, + "organization_name" -> dataset._organization, "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset "new_dataset_name" -> newDatasetName, diff --git a/app/controllers/LegacyApiController.scala b/app/controllers/LegacyApiController.scala index 2c04f37e160..59033ee7306 100644 --- a/app/controllers/LegacyApiController.scala +++ b/app/controllers/LegacyApiController.scala @@ -270,7 +270,8 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, sil.SecuredAction.async(validateJson[LegacyCreateExplorationalParameters]) { implicit request => for { _ <- Fox.successful(logVersioned(request)) - result <- annotationController.createExplorational(organizationName, datasetName)( + dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationName) + result <- annotationController.createExplorational(dataset._id.toString)( request.withBody(replaceCreateExplorationalParameters(request))) adaptedResult <- replaceInResult(replaceAnnotationLayers)(result) } yield adaptedResult diff --git a/app/controllers/ShortLinkController.scala b/app/controllers/ShortLinkController.scala index 5c0194cc818..bacf8e6ecd8 100644 --- a/app/controllers/ShortLinkController.scala +++ b/app/controllers/ShortLinkController.scala @@ -1,12 +1,13 @@ package controllers +import com.scalableminds.util.requestparsing.ObjectId import play.silhouette.api.Silhouette import com.scalableminds.util.tools.FoxImplicits import models.shortlinks.{ShortLink, ShortLinkDAO} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import security.{RandomIDGenerator, WkEnv} -import utils.{ObjectId, WkConf} +import utils.{WkConf} import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index 5d5628a32a8..67ec8db2f55 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -2,6 +2,7 @@ package controllers import play.silhouette.api.Silhouette import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.services.AccessMode.AccessMode @@ -23,7 +24,7 @@ import net.liftweb.common.{Box, Full} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers, Result} import security.{RandomIDGenerator, URLSharing, WkEnv, WkSilhouetteEnvironment} -import utils.{ObjectId, WkConf} +import utils.{WkConf} import scala.concurrent.ExecutionContext diff --git a/app/controllers/VoxelyticsController.scala b/app/controllers/VoxelyticsController.scala index 9cd27ab9230..b75762738f9 100644 --- a/app/controllers/VoxelyticsController.scala +++ b/app/controllers/VoxelyticsController.scala @@ -1,5 +1,6 @@ package controllers +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import models.organization.OrganizationDAO @@ -9,7 +10,7 @@ import play.api.mvc._ import play.silhouette.api.Silhouette import play.silhouette.api.actions.SecuredRequest import security.WkEnv -import utils.{ObjectId, WkConf} +import utils.{WkConf} import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 08fd203b44d..5f59a14eb7d 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.models.UnfinishedUpload -import com.scalableminds.webknossos.datastore.models.datasource.{DatasetIdWithPath, DataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId} import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSourceLike => InboxDataSource} import com.scalableminds.webknossos.datastore.services.DataStoreStatus import com.scalableminds.webknossos.datastore.services.uploading.{LinkedLayerIdentifier, ReserveUploadInformation} @@ -59,7 +59,6 @@ class WKRemoteDataStoreController @Inject()( val bearerTokenService: WebknossosBearerTokenAuthenticatorService = wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService - def reserveDatasetUpload(name: String, key: String, token: String): Action[ReserveUploadInformation] = Action.async(validateJson[ReserveUploadInformation]) { implicit request => dataStoreService.validateAccess(name, key) { dataStore => @@ -130,20 +129,20 @@ class WKRemoteDataStoreController @Inject()( def reportDatasetUpload(name: String, key: String, token: String, - datasetId: String, + datasetPath: String, + organizationId: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean): Action[AnyContent] = Action.async { implicit request => dataStoreService.validateAccess(name, key) { dataStore => for { - parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Could not parse dataset id" user <- bearerTokenService.userForToken(token) - dataset <- datasetDAO.findOne(parsedDatasetId)(GlobalAccessContext) ?~> Messages( + dataset <- datasetDAO.findOneByPathAndOrganization(datasetPath, organizationId)(GlobalAccessContext) ?~> Messages( "dataset.notFound", - datasetId) ~> NOT_FOUND + datasetPath) ~> NOT_FOUND _ <- Fox.runIf(!needsConversion && !viaAddRoute)(usedStorageService.refreshStorageReportForDataset(dataset)) - _ <- Fox.runIf(!needsConversion)(logUploadToSlack(user, parsedDatasetId, viaAddRoute)) + _ <- Fox.runIf(!needsConversion)(logUploadToSlack(user, dataset._id, viaAddRoute)) _ = analyticsService.track(UploadDatasetEvent(user, dataset, dataStore, datasetSizeBytes)) _ = if (!needsConversion) mailchimpClient.tagUser(user, MailchimpTag.HasUploadedOwnDataset) } yield Ok diff --git a/app/controllers/WKRemoteWorkerController.scala b/app/controllers/WKRemoteWorkerController.scala index 8535c6fdc1f..47f2eb9a825 100644 --- a/app/controllers/WKRemoteWorkerController.scala +++ b/app/controllers/WKRemoteWorkerController.scala @@ -1,6 +1,7 @@ package controllers import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox import models.aimodels.AiInferenceDAO import models.dataset.DatasetDAO @@ -12,7 +13,7 @@ import models.voxelytics.VoxelyticsDAO import net.liftweb.common.{Empty, Failure, Full} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} -import utils.{ObjectId, WkConf} +import utils.{WkConf} import scala.concurrent.ExecutionContext diff --git a/app/models/analytics/AnalyticsService.scala b/app/models/analytics/AnalyticsService.scala index 86b2cec45c1..8f4c5ad0817 100644 --- a/app/models/analytics/AnalyticsService.scala +++ b/app/models/analytics/AnalyticsService.scala @@ -1,6 +1,7 @@ package models.analytics import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox} import com.scalableminds.webknossos.datastore.rpc.RPC @@ -9,7 +10,7 @@ import models.user.{MultiUserDAO, UserDAO} import net.liftweb.common.Box.tryo import play.api.http.Status.UNAUTHORIZED import play.api.libs.json._ -import utils.{ObjectId, WkConf} +import utils.{WkConf} import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/annotation/AnnotationMutexService.scala b/app/models/annotation/AnnotationMutexService.scala index 6495b1537cc..1a402ac0d46 100644 --- a/app/models/annotation/AnnotationMutexService.scala +++ b/app/models/annotation/AnnotationMutexService.scala @@ -2,6 +2,7 @@ package models.annotation import org.apache.pekko.actor.ActorSystem import com.scalableminds.util.accesscontext.GlobalAccessContext +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.helpers.IntervalScheduler @@ -11,7 +12,7 @@ import models.user.{UserDAO, UserService} import net.liftweb.common.Full import play.api.inject.ApplicationLifecycle import play.api.libs.json.{JsObject, Json} -import utils.{ObjectId, WkConf} +import utils.{WkConf} import utils.sql.{SimpleSQLDAO, SqlClient} import javax.inject.Inject diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index ae638ac2919..a3617354102 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -35,7 +35,7 @@ import net.liftweb.common.{Box, Full} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.json.{JsNull, JsObject, JsValue, Json} -import utils.{ObjectId, WkConf} +import utils.{WkConf} import java.io.{BufferedOutputStream, File, FileOutputStream} import javax.inject.Inject diff --git a/app/models/dataset/DataStore.scala b/app/models/dataset/DataStore.scala index 9f14f10ac7b..19982b6c23d 100644 --- a/app/models/dataset/DataStore.scala +++ b/app/models/dataset/DataStore.scala @@ -1,6 +1,7 @@ package models.dataset import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.schema.Tables._ import models.job.JobService @@ -12,7 +13,7 @@ import play.api.mvc.{Result, Results} import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep import utils.sql.{SQLDAO, SqlClient, SqlToken} -import utils.{ObjectId, WkConf} +import utils.{WkConf} import scala.concurrent.{ExecutionContext, Future} diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index ef8337391e7..c9a15e25566 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -408,14 +408,6 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA case (None, Some(datasetName)) => findOneByPathAndOrganization(datasetName, organizationId) } - private def getWhereClauseForDatasetIdOrName(datasetIdOrName: String): SqlToken = { - val (maybeId, maybeDatasetName) = getDatasetIdOrNameFromURIPath(datasetIdOrName) - maybeId match { - case Some(id) => q"_id = $id" - case None => q"name = $maybeDatasetName" - } - } - def findOneByPathAndOrganization(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Dataset] = for { accessQuery <- readAccessQuery diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index f7edba98d5b..2cbe1d0f1c4 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -19,7 +19,7 @@ import models.folder.FolderDAO import models.organization.{Organization, OrganizationDAO} import models.team._ import models.user.{User, UserService} -import net.liftweb.common.{Box, Empty, Full} +import net.liftweb.common.{Box, Full} import play.api.libs.json.{JsObject, Json} import security.RandomIDGenerator import utils.WkConf @@ -113,7 +113,8 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, organization <- organizationDAO.findOne(owningOrganization) organizationRootFolder <- folderDAO.findOne(organization._rootFolder) datasetPath <- isNewDatasetName(datasetName, organization._id).map(if (_) datasetName else newId.toString) - dataSource.id.path = datasetPath // Sync path with dataSource + adjustedDataSourceId = dataSource.id.copy(path = datasetPath) // Sync path with dataSource + dataSource.id = dataSource.clone() dataset = Dataset( newId, dataStore.name, diff --git a/app/models/dataset/explore/WKExploreRemoteLayerService.scala b/app/models/dataset/explore/WKExploreRemoteLayerService.scala index 0c097da44ff..35c6f5363ba 100644 --- a/app/models/dataset/explore/WKExploreRemoteLayerService.scala +++ b/app/models/dataset/explore/WKExploreRemoteLayerService.scala @@ -111,7 +111,6 @@ class WKExploreRemoteLayerService @Inject()(credentialService: CredentialService organization <- organizationDAO.findOne(user._organization) dataStore <- dataStoreDAO.findOneWithUploadsAllowed _ <- datasetService.assertValidDatasetName(datasetName) - _ <- datasetService.assertNewDatasetName(datasetName, organization._id) ?~> "dataset.name.alreadyTaken" client = new WKRemoteDataStoreClient(dataStore, rpc) userToken <- bearerTokenService.createAndInitDataStoreTokenForUser(user) _ <- client.addDataSource(organization._id, datasetName, dataSource, folderId, userToken) diff --git a/app/models/job/JobService.scala b/app/models/job/JobService.scala index 4573a802eb0..84c82c30998 100644 --- a/app/models/job/JobService.scala +++ b/app/models/job/JobService.scala @@ -3,6 +3,7 @@ package models.job import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.mvc.Formatter +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging import mail.{DefaultMails, MailchimpClient, MailchimpTag, Send} @@ -15,7 +16,7 @@ import org.apache.pekko.actor.ActorSystem import play.api.libs.json.{JsObject, Json} import security.WkSilhouetteEnvironment import telemetry.SlackNotificationService -import utils.{ObjectId, WkConf} +import utils.{WkConf} import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/job/Worker.scala b/app/models/job/Worker.scala index 187c4ef6dd8..e5a0c25558f 100644 --- a/app/models/job/Worker.scala +++ b/app/models/job/Worker.scala @@ -3,6 +3,7 @@ package models.job import org.apache.pekko.actor.ActorSystem import com.scalableminds.util.accesscontext.GlobalAccessContext import com.scalableminds.util.mvc.Formatter +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.helpers.IntervalScheduler @@ -15,7 +16,7 @@ import play.api.libs.json.{JsObject, Json} import slick.lifted.Rep import telemetry.SlackNotificationService import utils.sql.{SQLDAO, SqlClient} -import utils.{ObjectId, WkConf} +import utils.{WkConf} import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/organization/OrganizationService.scala b/app/models/organization/OrganizationService.scala index 6e105e18c2c..0e665d64799 100644 --- a/app/models/organization/OrganizationService.scala +++ b/app/models/organization/OrganizationService.scala @@ -1,6 +1,7 @@ package models.organization import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} import com.scalableminds.webknossos.datastore.rpc.RPC import com.typesafe.scalalogging.LazyLogging @@ -11,7 +12,7 @@ import models.folder.{Folder, FolderDAO, FolderService} import models.team.{PricingPlan, Team, TeamDAO} import models.user.{Invite, MultiUserDAO, User, UserDAO, UserService} import play.api.libs.json.{JsArray, JsObject, Json} -import utils.{ObjectId, WkConf} +import utils.{WkConf} import scala.concurrent.{ExecutionContext, Future} diff --git a/app/models/task/TaskService.scala b/app/models/task/TaskService.scala index 48a9d8852a7..12946bf7e41 100644 --- a/app/models/task/TaskService.scala +++ b/app/models/task/TaskService.scala @@ -1,7 +1,9 @@ package models.task import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits} + import javax.inject.Inject import models.annotation.{Annotation, AnnotationDAO, AnnotationType} import models.dataset.DatasetDAO @@ -10,7 +12,7 @@ import models.team.TeamDAO import models.user.{User, UserService} import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json.{JsObject, Json} -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.concurrent.ExecutionContext diff --git a/app/models/user/EmailVerificationService.scala b/app/models/user/EmailVerificationService.scala index cd04217185c..344386dd596 100644 --- a/app/models/user/EmailVerificationService.scala +++ b/app/models/user/EmailVerificationService.scala @@ -2,12 +2,13 @@ package models.user import org.apache.pekko.actor.ActorSystem import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.typesafe.scalalogging.LazyLogging import mail.{DefaultMails, Send} import security.RandomIDGenerator -import utils.{ObjectId, WkConf} +import utils.{WkConf} import javax.inject.Inject import scala.concurrent.ExecutionContext diff --git a/app/models/user/Invite.scala b/app/models/user/Invite.scala index 84ebda8ff4d..66e546e9c2e 100644 --- a/app/models/user/Invite.scala +++ b/app/models/user/Invite.scala @@ -2,6 +2,7 @@ package models.user import org.apache.pekko.actor.ActorSystem import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.schema.Tables._ @@ -13,8 +14,8 @@ import models.organization.OrganizationDAO import security.RandomIDGenerator import slick.jdbc.PostgresProfile.api._ import slick.lifted.Rep -import utils.sql.{SqlClient, SQLDAO} -import utils.{ObjectId, WkConf} +import utils.sql.{SQLDAO, SqlClient} +import utils.WkConf import scala.concurrent.{ExecutionContext, Future} diff --git a/app/models/user/UserService.scala b/app/models/user/UserService.scala index 8e959ee6ce5..18e632c3d7f 100755 --- a/app/models/user/UserService.scala +++ b/app/models/user/UserService.scala @@ -24,7 +24,7 @@ import play.silhouette.api.util.PasswordInfo import play.silhouette.impl.providers.CredentialsProvider import security.{PasswordHasher, TokenDAO} import utils.sql.SqlEscaping -import utils.{ObjectId, WkConf} +import utils.{WkConf} import javax.inject.Inject import scala.concurrent.{ExecutionContext, Future} diff --git a/app/models/user/time/TimeSpanService.scala b/app/models/user/time/TimeSpanService.scala index 8e191f2d3c4..84add24bcb3 100644 --- a/app/models/user/time/TimeSpanService.scala +++ b/app/models/user/time/TimeSpanService.scala @@ -1,6 +1,7 @@ package models.user.time import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.typesafe.scalalogging.LazyLogging @@ -13,7 +14,7 @@ import models.task.TaskDAO import models.user.{User, UserService} import net.liftweb.common.{Box, Full} import org.apache.pekko.actor.{ActorSelection, ActorSystem} -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.collection.mutable import scala.concurrent.ExecutionContext diff --git a/app/opengraph/OpenGraphService.scala b/app/opengraph/OpenGraphService.scala index 19608595fa5..c6c0b900b68 100644 --- a/app/opengraph/OpenGraphService.scala +++ b/app/opengraph/OpenGraphService.scala @@ -4,6 +4,7 @@ import org.apache.pekko.http.scaladsl.model.Uri import com.google.inject.Inject import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.enumeration.ExtendedEnumeration +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataLayerLike} import models.annotation.AnnotationDAO @@ -13,7 +14,7 @@ import models.shortlinks.ShortLinkDAO import net.liftweb.common.Box.tryo import net.liftweb.common.Full import security.URLSharing -import utils.{ObjectId, WkConf} +import utils.{WkConf} import scala.concurrent.{ExecutionContext, Future} diff --git a/app/security/WebknossosBearerTokenAuthenticatorService.scala b/app/security/WebknossosBearerTokenAuthenticatorService.scala index 2c31b9a4aed..777fe35860b 100644 --- a/app/security/WebknossosBearerTokenAuthenticatorService.scala +++ b/app/security/WebknossosBearerTokenAuthenticatorService.scala @@ -5,17 +5,14 @@ import play.silhouette.api.exceptions.{AuthenticatorCreationException, Authentic import play.silhouette.api.services.AuthenticatorService.{CreateError, InitError} import play.silhouette.api.util.{Clock, IDGenerator} import play.silhouette.impl.authenticators.BearerTokenAuthenticatorService.ID -import play.silhouette.impl.authenticators.{ - BearerTokenAuthenticator, - BearerTokenAuthenticatorService, - BearerTokenAuthenticatorSettings -} +import play.silhouette.impl.authenticators.{BearerTokenAuthenticator, BearerTokenAuthenticatorService, BearerTokenAuthenticatorSettings} import com.scalableminds.util.accesscontext.GlobalAccessContext import com.scalableminds.util.tools.{Fox, FoxImplicits} import models.user.{User, UserService} import TokenType.TokenType +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant -import utils.{ObjectId, WkConf} +import utils.WkConf import scala.concurrent.duration._ import scala.concurrent.{ExecutionContext, Future} diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index 07590d89c08..af9b4e17572 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -110,7 +110,7 @@ PUT /datastores/:name/datasources PATCH /datastores/:name/status controllers.WKRemoteDataStoreController.statusUpdate(name: String, key: String) POST /datastores/:name/reserveUpload controllers.WKRemoteDataStoreController.reserveDatasetUpload(name: String, key: String, token: String) GET /datastores/:name/getUnfinishedUploadsForUser controllers.WKRemoteDataStoreController.getUnfinishedUploadsForUser(name: String, key: String, token: String, organizationName: String) -POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetId: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean, path: Option[String]) +POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetPath: String, organizationId: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean) POST /datastores/:name/deleteDataset controllers.WKRemoteDataStoreController.deleteDataset(name: String, key: String) GET /datastores/:name/jobExportProperties controllers.WKRemoteDataStoreController.jobExportProperties(name: String, key: String, jobId: String) GET /datastores/:name/findCredential controllers.WKRemoteDataStoreController.findCredential(name: String, key: String, credentialId: String) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 2b8d59aa0a9..e5d6fb086ea 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -2,7 +2,6 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject import com.scalableminds.util.geometry.Vec3Int -import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong import com.scalableminds.webknossos.datastore.explore.{ @@ -447,12 +446,12 @@ class DataSourceController @Inject()( ), urlOrHeaderToken(token, request) ) ?~> "dataset.upload.validation.failed" + datasourceId = DataSourceId(reservedInfo.path, organizationId) _ <- dataSourceService.updateDataSource( - request.body.copy(id = DataSourceId(reservedInfo.path, organizationId)), + request.body.copy(id = datasourceId), expectExisting = false) - parsedUploadId <- ObjectId.fromString(reservedInfo.uploadId) ?~> "reportUpload.failed" _ <- remoteWebknossosClient.reportUpload( - parsedUploadId, + datasourceId, 0L, needsConversion = false, viaAddRoute = true, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala index 385191a6305..5b35dccbee0 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala @@ -2,11 +2,9 @@ package com.scalableminds.webknossos.datastore.models import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.requestparsing.{DatasetURIParser, ObjectId} -import com.scalableminds.webknossos.datastore.datareaders.zarr.ZarrHeader import com.scalableminds.webknossos.datastore.helpers.JsonImplicits import com.scalableminds.webknossos.datastore.models.datasource.DatasetViewConfiguration.DatasetViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource.inbox.GenericInboxDataSource -import play.api.libs.json.Json.WithDefaultValues import play.api.libs.json._ package object datasource { @@ -48,14 +46,6 @@ package object datasource { } } - case class DatasetIdWithPath(id: ObjectId, path: String) { - override def toString: String = s"DatasetIdWithPath($id, $path)" - } - - object DatasetIdWithPath { - implicit val datasetIdWithPathFormat: Format[DatasetIdWithPath] = Json.format[DatasetIdWithPath] - } - object DatasetViewConfiguration { type DatasetViewConfiguration = Map[String, JsValue] implicit val jsonFormat: Format[DatasetViewConfiguration] = Format.of[DatasetViewConfiguration] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index 8d457a19963..7f94e778e96 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -2,7 +2,6 @@ package com.scalableminds.webknossos.datastore.rpc import com.scalableminds.util.mvc.MimeTypes import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.services.uploading.ReserveUploadInformation import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Failure, Full} import play.api.http.{HeaderNames, Status} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala index 167a2cc610c..c136552a635 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala @@ -25,16 +25,15 @@ import scala.collection.compat.immutable.ArraySeq class AgglomerateService @Inject()(config: DataStoreConfig) extends DataConverter with LazyLogging { private val agglomerateDir = "agglomerates" private val agglomerateFileExtension = "hdf5" - private val datasetName = "/segment_to_agglomerate" + private val datasetName = "/segment_to_agglomerate" // TODO: How does this work? There is no assignemnt to this val anywhere? It is plain constant. private val dataBaseDir = Paths.get(config.Datastore.baseFolder) private val cumsumFileName = "cumsum.json" - // TODO: Also rename datasetName to datasetPath lazy val agglomerateFileCache = new AgglomerateFileCache(config.Datastore.Cache.AgglomerateFile.maxFileHandleEntries) - def exploreAgglomerates(organizationId: String, datasetName: String, dataLayerName: String): Set[String] = { - val layerDir = dataBaseDir.resolve(organizationId).resolve(datasetName).resolve(dataLayerName) + def exploreAgglomerates(organizationId: String, datasetPath: String, dataLayerName: String): Set[String] = { + val layerDir = dataBaseDir.resolve(organizationId).resolve(datasetPath).resolve(dataLayerName) PathUtils .listFiles(layerDir.resolve(agglomerateDir), silent = true, @@ -102,7 +101,7 @@ class AgglomerateService @Inject()(config: DataStoreConfig) extends DataConverte // We don't need to differentiate between the data types because the underlying library does the conversion for us reader.uint64().readArrayBlockWithOffset(hdf5Dataset, blockSize.toInt, segmentId) - // This uses the datasetName, which allows us to call it on the same hdf file in parallel. + // This uses the datasetPath, which allows us to call it on the same hdf file in parallel. private def readHDF(reader: IHDF5Reader, segmentId: Long, blockSize: Long) = reader.uint64().readArrayBlockWithOffset(datasetName, blockSize.toInt, segmentId) @@ -117,7 +116,7 @@ class AgglomerateService @Inject()(config: DataStoreConfig) extends DataConverte val cumsumPath = dataBaseDir .resolve(agglomerateFileKey.organizationId) - .resolve(agglomerateFileKey.datasetName) + .resolve(agglomerateFileKey.datasetPath) .resolve(agglomerateFileKey.layerName) .resolve(agglomerateDir) .resolve(cumsumFileName) @@ -210,7 +209,7 @@ class AgglomerateService @Inject()(config: DataStoreConfig) extends DataConverte )) val skeleton = SkeletonTracingDefaults.createInstance.copy( - datasetName = datasetName, + datasetName = datasetPath, trees = trees ) val duration = System.nanoTime() - startTime @@ -237,7 +236,7 @@ class AgglomerateService @Inject()(config: DataStoreConfig) extends DataConverte val hdfFile = dataBaseDir .resolve(agglomerateFileKey.organizationId) - .resolve(agglomerateFileKey.datasetName) + .resolve(agglomerateFileKey.datasetPath) .resolve(agglomerateFileKey.layerName) .resolve(agglomerateDir) .resolve(s"${agglomerateFileKey.mappingName}.$agglomerateFileExtension") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index 1a45347dd2c..c53b3703970 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -178,7 +178,7 @@ class BinaryDataService(val dataBaseDir: Path, val dataSourceId = DataSourceId(datasetName, organizationId) def agglomerateFileMatchPredicate(agglomerateKey: AgglomerateFileKey) = - agglomerateKey.datasetName == datasetName && agglomerateKey.organizationId == organizationId && layerName.forall( + agglomerateKey.datasetPath == datasetName && agglomerateKey.organizationId == organizationId && layerName.forall( _ == agglomerateKey.layerName) def bucketProviderPredicate(key: (DataSourceId, String)): Boolean = diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index 01e15723007..0186769071f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -4,14 +4,13 @@ import org.apache.pekko.actor.ActorSystem import com.google.inject.Inject import com.google.inject.name.Named import com.scalableminds.util.cache.AlfuCache -import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.helpers.IntervalScheduler import com.scalableminds.webknossos.datastore.models.UnfinishedUpload import com.scalableminds.webknossos.datastore.models.annotation.AnnotationSource -import com.scalableminds.webknossos.datastore.models.datasource.{DatasetIdWithPath, DataSourceId} +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSourceLike import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.uploading.ReserveUploadInformation @@ -79,7 +78,7 @@ class DSRemoteWebknossosClient @Inject()( .getWithJsonResponse[List[UnfinishedUpload]] } yield unfinishedUploads - def reportUpload(datasetId: ObjectId, + def reportUpload(dataSourceId: DataSourceId, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean, @@ -87,7 +86,8 @@ class DSRemoteWebknossosClient @Inject()( for { _ <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reportDatasetUpload") .addQueryString("key" -> dataStoreKey) - .addQueryString("datasetId" -> datasetId.toString) + .addQueryString("datasetPath" -> dataSourceId.path) + .addQueryString("organizationId" -> dataSourceId.organizationId) .addQueryString("needsConversion" -> needsConversion.toString) .addQueryString("viaAddRoute" -> viaAddRoute.toString) .addQueryString("datasetSizeBytes" -> datasetSizeBytes.toString) @@ -166,13 +166,4 @@ class DSRemoteWebknossosClient @Inject()( .silent .getWithJsonResponse[DataVaultCredential] ) - - def resolveDatasetNameToId(organizationId: String, datasetName: String): Fox[DatasetIdWithPath] = - for { - datasetIdWithPath <- rpc( - s"$webknossosUri/api/datastores/$dataStoreName/:$organizationId/:$datasetName/getDatasetId") - .addQueryString("key" -> dataStoreKey) - .silent - .getWithJsonResponse[DatasetIdWithPath] - } yield datasetIdWithPath } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala deleted file mode 100644 index d57dffddd3c..00000000000 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetIdRepository.scala +++ /dev/null @@ -1,40 +0,0 @@ -package com.scalableminds.webknossos.datastore.services - -import com.google.inject.Inject -import com.google.inject.name.Named -import com.scalableminds.util.requestparsing.{DatasetURIParser, ObjectId} -import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId} -import com.scalableminds.webknossos.datastore.storage.TemporaryStore -import com.typesafe.scalalogging.LazyLogging -import org.apache.pekko.actor.ActorSystem -import play.api.i18n.{Messages, MessagesProvider} - -import scala.concurrent.ExecutionContext - -/* This class is used to resolve legacy dataset addressing by caching a mapping from the datasource id - based on what is given by the URI path to a DatasetIdWithPath that contains the actual id of the dataset . */ -class DatasetIdRepository @Inject()( - remoteWebknossosClient: DSRemoteWebknossosClient, - @Named("webknossos-datastore") val system: ActorSystem -)(implicit ec: ExecutionContext) - extends TemporaryStore[DataSourceId, ObjectId](system) - with LazyLogging - with FoxImplicits - with DatasetURIParser { - - def getDatasetIdFromIdOrName(datasetIdOrName: String, organizationId: String): Fox[ObjectId] = { - val dataSourceId = DataSourceId(datasetIdOrName, organizationId) - find(dataSourceId) match { - case Some(datasetId) => Fox.successful(datasetId) - case None => - val (maybeId, _) = getDatasetIdOrNameFromURIPath(datasetIdOrName) - val resolvedId = maybeId match { - case Some(id) => Fox.successful(id) - case None => remoteWebknossosClient.resolveDatasetNameToId(organizationId, datasetIdOrName) - } - resolvedId.map(insert(dataSourceId, _)).flatMap(_ => resolvedId) - } - } -} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala index 7d00d0f0d25..18997921b4d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala @@ -265,7 +265,7 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC val meshFilePath = dataBaseDir .resolve(organizationId) - .resolve(datasetName) + .resolve(datasetPath) .resolve(dataLayerName) .resolve(meshesDir) .resolve(s"$meshFileName.$hdf5FileExtension") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala index 40d7bbdeb7f..ecb73ecafc4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala @@ -64,24 +64,25 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, val dataBaseDir: Path = datasetSymlinkService.dataBaseDir - private def uploadDirectory(organizationId: String, name: String): Path = - dataBaseDir.resolve(organizationId).resolve(name) + private def uploadDirectory(organizationId: String, datasetPath: String): Path = + dataBaseDir.resolve(organizationId).resolve(datasetPath) def composeDataset(composeRequest: ComposeRequest, userToken: Option[String]): Fox[DataSource] = for { _ <- dataSourceService.assertDataDirWritable(composeRequest.organizationId) reserveUploadInfo = ReserveUploadInformation("", composeRequest.newDatasetName, + "", // filled by core backend composeRequest.organizationId, 1, None, None, List(), Some(composeRequest.targetFolderId)) - _ <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo, userToken) ?~> "Failed to reserve upload." - directory = uploadDirectory(composeRequest.organizationId, composeRequest.newDatasetName) + reservedInfo <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo, userToken) ?~> "Failed to reserve upload." + directory = uploadDirectory(reservedInfo.organization, reservedInfo.path) _ = PathUtils.ensureDirectory(directory) - dataSource <- createDatasource(composeRequest, composeRequest.organizationId) + dataSource <- createDatasource(composeRequest, reservedInfo.path, reservedInfo.organization, directory) properties = Json.toJson(dataSource).toString().getBytes(StandardCharsets.UTF_8) _ = Files.write(directory.resolve(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON), properties) } yield dataSource @@ -139,12 +140,11 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, } } yield editedLayer - private def createDatasource(composeRequest: ComposeRequest, organizationId: String): Fox[DataSource] = { - val uploadDir = uploadDirectory(organizationId, composeRequest.newDatasetName) + private def createDatasource(composeRequest: ComposeRequest, datasetPath: String, organizationId: String, uploadDir: Path): Fox[DataSource] = { for { layers <- Fox.serialCombined(composeRequest.layers.toList)(getLayerFromComposeLayer(_, uploadDir)) dataSource = GenericDataSource( - DataSourceId(composeRequest.newDatasetName, organizationId), + DataSourceId(datasetPath, organizationId), layers, composeRequest.voxelSize, None diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index 56b08c5ab04..996859c11e1 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -112,7 +112,7 @@ POST /datasets/cancelUpload GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(token: Option[String], organizationId: String, datasetPath: Option[String]) GET /datasets/:organizationId/:datasetPath/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(token: Option[String], organizationId: String, datasetPath: String) POST /datasets/:organizationId/:datasetPath @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(token: Option[String], organizationId: String, datasetPath: String) -PUT /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(token: Option[String], organizationId: String, datasetPath: String, folderId: Option[String]) +PUT /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(token: Option[String], organizationId: String, datasetName: String, folderId: Option[String]) DELETE /datasets/:organizationId/:datasetPath/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(token: Option[String], organizationId: String, datasetPath: String) POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose(token: Option[String]) POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset(token: Option[String]) From d8ad983716cccb1238b15fc76dea88c3c03849b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Thu, 19 Sep 2024 10:17:05 +0200 Subject: [PATCH 006/129] Fix backend compilation --- app/models/dataset/Dataset.scala | 19 ------------------- app/models/dataset/DatasetService.scala | 15 +++++++-------- app/models/dataset/ThumbnailService.scala | 6 ------ .../models/datasource/DataSource.scala | 2 ++ .../models/datasource/InboxDataSource.scala | 3 +++ 5 files changed, 12 insertions(+), 33 deletions(-) diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index c9a15e25566..65b044cac20 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -401,13 +401,6 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA r <- rList.headOption } yield r - def findOneByIdOrNameAndOrganization(idAndName: String, organizationId: String)( - implicit ctx: DBAccessContext): Fox[Dataset] = - getDatasetIdOrNameFromURIPath(idAndName) match { - case (Some(validId), None) => findOneByIdAndOrganization(validId, organizationId) - case (None, Some(datasetName)) => findOneByPathAndOrganization(datasetName, organizationId) - } - def findOneByPathAndOrganization(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Dataset] = for { accessQuery <- readAccessQuery @@ -420,18 +413,6 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA parsed <- parseFirst(r, s"$organizationId/$name") } yield parsed - private def findOneByIdAndOrganization(id: ObjectId, organizationId: String)( - implicit ctx: DBAccessContext): Fox[Dataset] = - for { - accessQuery <- readAccessQuery - r <- run(q"""SELECT $columns - FROM $existingCollectionName - WHERE _id = $id - AND _organization = $organizationId - AND $accessQuery""".as[DatasetsRow]) - parsed <- parseFirst(r, s"$organizationId/$id") - } yield parsed - def findAllByPathsAndOrganization(names: List[String], organizationId: String)( implicit ctx: DBAccessContext): Fox[List[Dataset]] = for { diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 2cbe1d0f1c4..cb349e3d4b9 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -113,8 +113,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, organization <- organizationDAO.findOne(owningOrganization) organizationRootFolder <- folderDAO.findOne(organization._rootFolder) datasetPath <- isNewDatasetName(datasetName, organization._id).map(if (_) datasetName else newId.toString) - adjustedDataSourceId = dataSource.id.copy(path = datasetPath) // Sync path with dataSource - dataSource.id = dataSource.clone() + newDataSource = dataSource.withUpdatedId(dataSource.id.copy(path = datasetPath)) // Sync path with dataSource dataset = Dataset( newId, dataStore.name, @@ -123,21 +122,21 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, None, organizationRootFolder._id, dataSourceHash, - dataSource.defaultViewConfiguration, + newDataSource.defaultViewConfiguration, adminViewConfiguration = None, description = None, path = datasetPath, isPublic = false, - isUsable = dataSource.isUsable, - name = dataSource.id.path, - voxelSize = dataSource.voxelSizeOpt, + isUsable = newDataSource.isUsable, + name = newDataSource.id.path, + voxelSize = newDataSource.voxelSizeOpt, sharingToken = None, - status = dataSource.statusOpt.getOrElse(""), + status = newDataSource.statusOpt.getOrElse(""), logoUrl = None, metadata = metadata ) _ <- datasetDAO.insertOne(dataset) - _ <- datasetDataLayerDAO.updateLayers(newId, dataSource) + _ <- datasetDataLayerDAO.updateLayers(newId, newDataSource) _ <- teamDAO.updateAllowedTeamsForDataset(newId, List()) } yield dataset } diff --git a/app/models/dataset/ThumbnailService.scala b/app/models/dataset/ThumbnailService.scala index 56884d725d0..254cb13e026 100644 --- a/app/models/dataset/ThumbnailService.scala +++ b/app/models/dataset/ThumbnailService.scala @@ -173,12 +173,6 @@ class ThumbnailCachingService @Inject()(datasetDAO: DatasetDAO, thumbnailDAO: Th } yield fromDbOrNew ) - def removeFromCache(organizationId: String, datasetNameAndId: String): Fox[Unit] = - for { - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetNameAndId, organizationId)(GlobalAccessContext) - _ <- removeFromCache(dataset._id) - } yield () - def removeFromCache(datasetId: ObjectId): Fox[Unit] = { inMemoryThumbnailCache.clear(keyTuple => keyTuple._1 == datasetId) thumbnailDAO.removeAllForDataset(datasetId) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala index 5b35dccbee0..9fb5634c1ef 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala @@ -79,6 +79,8 @@ package object datasource { def additionalAxesUnion: Option[Seq[AdditionalAxis]] = AdditionalAxis.merge(dataLayers.map(_.additionalAxes)) + def withUpdatedId(newId: DataSourceId): GenericDataSource[T] = copy(id = newId) + } object GenericDataSource { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala index 43d73851bb3..10e33493561 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/InboxDataSource.scala @@ -9,6 +9,7 @@ package object inbox { trait GenericInboxDataSource[+T <: DataLayerLike] { def id: DataSourceId + def withUpdatedId(newId: DataSourceId): GenericInboxDataSource[T] def toUsable: Option[GenericDataSource[T]] @@ -47,6 +48,8 @@ package object inbox { val statusOpt: Option[String] = Some(status) val defaultViewConfiguration: Option[DatasetViewConfiguration] = None + + def withUpdatedId(newId: DataSourceId): UnusableDataSource[T] = copy(id = newId) } object UnusableDataSource { From 10724e7a03e5c214705e58b0231030ec8f523549 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Thu, 19 Sep 2024 13:07:48 +0200 Subject: [PATCH 007/129] WIP: Adapt frontend to new api --- .../WKRemoteDataStoreController.scala | 3 +- frontend/javascripts/admin/admin_rest_api.ts | 65 +++++++------ frontend/javascripts/admin/api/mesh.ts | 4 +- .../04_configure_new_dataset.tsx | 95 ++++++++----------- .../dataset/composition_wizard/common.ts | 6 +- .../admin/dataset/dataset_add_remote_view.tsx | 12 +-- .../admin/dataset/dataset_add_view.tsx | 34 +++---- .../admin/dataset/dataset_upload_view.tsx | 18 ++-- .../dataset/dataset_settings_view.tsx | 2 +- .../segments_tab/segments_view_helper.tsx | 2 +- frontend/javascripts/types/api_flow_types.ts | 10 +- .../controllers/DataSourceController.scala | 15 +-- .../services/DSRemoteWebknossosClient.scala | 11 ++- .../services/uploading/ComposeService.scala | 32 ++++--- .../services/uploading/UploadService.scala | 1 + 15 files changed, 141 insertions(+), 169 deletions(-) diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 5f59a14eb7d..a57511c9f1f 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -82,6 +82,7 @@ class WKRemoteDataStoreController @Inject()( _ <- datasetService.addInitialTeams(dataset, uploadInfo.initialTeams, user)(AuthorizedAccessContext(user)) _ <- datasetService.addUploader(dataset, user._id)(AuthorizedAccessContext(user)) uploadInfo.path = dataset.path // Update path according to the newly created dataset. + uploadInfo.newDatasetId = dataset._id.toString // Update newDatasetId according to the newly created dataset. } yield Ok(Json.toJson(uploadInfo)) } } @@ -145,7 +146,7 @@ class WKRemoteDataStoreController @Inject()( _ <- Fox.runIf(!needsConversion)(logUploadToSlack(user, dataset._id, viaAddRoute)) _ = analyticsService.track(UploadDatasetEvent(user, dataset, dataStore, datasetSizeBytes)) _ = if (!needsConversion) mailchimpClient.tagUser(user, MailchimpTag.HasUploadedOwnDataset) - } yield Ok + } yield Ok(Json.toJson("id" -> dataset._id)) } } diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index ff90597a51e..2e3e2f24d55 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -779,18 +779,18 @@ export async function getAnnotationCompoundInformation( } export function getEmptySandboxAnnotationInformation( - datasetId: APIDatasetId, + datasetId: string, tracingType: TracingType, sharingToken?: string | null | undefined, options: RequestOptions = {}, ): Promise { const sharingTokenSuffix = sharingToken != null ? `?sharingToken=${sharingToken}` : ""; - const infoUrl = `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/sandbox/${tracingType}${sharingTokenSuffix}`; + const infoUrl = `/api/datasets/${datasetId}/sandbox/${tracingType}${sharingTokenSuffix}`; return Request.receiveJSON(infoUrl, options); } export function createExplorational( - datasetId: APIDatasetId, + datasetId: string, typ: TracingType, autoFallbackLayer: boolean, fallbackLayerName?: string | null | undefined, @@ -798,7 +798,7 @@ export function createExplorational( resolutionRestrictions?: APIResolutionRestrictions | null | undefined, options: RequestOptions = {}, ): Promise { - const url = `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/createExplorational`; + const url = `/api/datasets/${datasetId}/createExplorational`; let layers: Array = []; if (typ === "skeleton") { @@ -961,13 +961,13 @@ export function getNewestVersionForTracing( export function hasSegmentIndexInDataStore( dataStoreUrl: string, - dataSetName: string, + datasetPath: string, dataLayerName: string, organizationId: string, ) { return doWithToken((token) => Request.receiveJSON( - `${dataStoreUrl}/data/datasets/${organizationId}/${dataSetName}/layers/${dataLayerName}/hasSegmentIndex?token=${token}`, + `${dataStoreUrl}/data/datasets/${organizationId}/${datasetPath}/layers/${dataLayerName}/hasSegmentIndex?token=${token}`, ), ); } @@ -1125,19 +1125,19 @@ export async function getDatasets( export function readDatasetDatasource(dataset: APIDataset): Promise { return doWithToken((token) => Request.receiveJSON( - `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.name}/readInboxDataSource?token=${token}`, + `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.path}/readInboxDataSource?token=${token}`, ), ); } export async function updateDatasetDatasource( - datasetName: string, + datasetPath: string, dataStoreUrl: string, datasource: APIDataSource, ): Promise { await doWithToken((token) => Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${datasource.id.team}/${datasetName}?token=${token}`, + `${dataStoreUrl}/data/datasets/${datasource.id.team}/${datasetPath}?token=${token}`, { data: datasource, }, @@ -1152,15 +1152,12 @@ export async function getActiveDatasetsOfMyOrganization(): Promise { const sharingTokenSuffix = sharingToken != null ? `?sharingToken=${sharingToken}` : ""; - return Request.receiveJSON( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}${sharingTokenSuffix}`, - options, - ); + return Request.receiveJSON(`/api/datasets/${datasetId}${sharingTokenSuffix}`, options); } export type DatasetUpdater = { @@ -1174,16 +1171,13 @@ export type DatasetUpdater = { }; export function updateDatasetPartial( - datasetId: APIDatasetId, + datasetId: string, updater: DatasetUpdater, ): Promise { - return Request.sendJSONReceiveJSON( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/updatePartial`, - { - method: "PATCH", - data: updater, - }, - ); + return Request.sendJSONReceiveJSON(`/api/datasets/${datasetId}/updatePartial`, { + method: "PATCH", + data: updater, + }); } export async function getDatasetViewConfiguration( @@ -1249,7 +1243,10 @@ type DatasetCompositionArgs = { layers: LayerLink[]; }; -export function createDatasetComposition(datastoreUrl: string, payload: DatasetCompositionArgs) { +export function createDatasetComposition( + datastoreUrl: string, + payload: DatasetCompositionArgs, +): Promise { return doWithToken((token) => Request.sendJSONReceiveJSON(`${datastoreUrl}/data/datasets/compose?token=${token}`, { data: payload, @@ -1289,8 +1286,10 @@ export function createResumableUpload(datastoreUrl: string, uploadId: string): P } type ReserveUploadInformation = { uploadId: string; - organization: string; name: string; + path: string; + newDatasetId: string; + organization: string; totalFileCount: number; filePaths: Array; initialTeams: Array; @@ -1339,10 +1338,14 @@ export function getUnfinishedUploads( }); } +type NewDatasetReply = { + newDatasetId: string; +}; + export function finishDatasetUpload( datastoreHost: string, uploadInformation: ArbitraryObject, -): Promise { +): Promise { return doWithToken((token) => Request.sendJSONReceiveJSON(`/data/datasets/finishUpload?token=${token}`, { data: uploadInformation, @@ -1407,7 +1410,7 @@ export async function storeRemoteDataset( organizationId: string, datasource: string, folderId: string | null, -): Promise { +): Promise { return doWithToken((token) => { const params = new URLSearchParams(); params.append("token", token); @@ -1426,16 +1429,12 @@ export async function storeRemoteDataset( } // Returns void if the name is valid. Otherwise, a string is returned which denotes the reason. -export async function isDatasetNameValid( - datasetId: APIDatasetId, -): Promise { - if (datasetId.name === "") { +export async function isDatasetNameValid(datasetName: string): Promise { + if (datasetName === "") { return "The dataset name must not be empty."; } - const response = await Request.receiveJSON( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/isValidNewName`, - ); + const response = await Request.receiveJSON(`/api/datasets/${datasetName}/isValidNewName`); if (response.isValid) { return null; } else { diff --git a/frontend/javascripts/admin/api/mesh.ts b/frontend/javascripts/admin/api/mesh.ts index 880334bebd0..8e8d38a9c44 100644 --- a/frontend/javascripts/admin/api/mesh.ts +++ b/frontend/javascripts/admin/api/mesh.ts @@ -56,7 +56,7 @@ export function getMeshfileChunksForSegment( params.append("editableMappingTracingId", editableMappingTracingId); } return Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/meshes/chunks?${params}`, + `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.path}/layers/${layerName}/meshes/chunks?${params}`, { data: { meshFile, @@ -86,7 +86,7 @@ export function getMeshfileChunkData( ): Promise { return doWithToken(async (token) => { const dracoDataChunks = await Request.sendJSONReceiveArraybuffer( - `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/meshes/chunks/data?token=${token}`, + `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.path}/layers/${layerName}/meshes/chunks/data?token=${token}`, { data: batchDescription, useWebworkerForArrayBuffer: true, diff --git a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx index 7af7b572e39..c9658230e64 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx @@ -28,14 +28,7 @@ import { flatToNestedMatrix } from "oxalis/model/accessors/dataset_accessor"; import type { OxalisState } from "oxalis/store"; import React, { useState } from "react"; import { useSelector } from "react-redux"; -import { - type APIDataLayer, - type APIDataset, - type APIDatasetId, - type APITeam, - areDatasetsIdentical, - type LayerLink, -} from "types/api_flow_types"; +import type { APIDataLayer, APIDataset, APITeam, LayerLink } from "types/api_flow_types"; import { syncValidator } from "types/validation"; import type { WizardComponentProps } from "./common"; import { useEffectOnlyOnce } from "libs/react_hooks"; @@ -78,10 +71,8 @@ export function ConfigureNewDataset(props: WizardComponentProps) { ) as [APIDataset, APIDataLayer][] ).map( ([dataset, dataLayer]): LayerLink => ({ - datasetId: { - owningOrganization: dataset.owningOrganization, - name: dataset.name, - }, + datasetId: dataset.id, + datasetName: dataset.name, sourceName: dataLayer.name, newName: dataLayer.name, transformations: [], @@ -124,13 +115,14 @@ export function ConfigureNewDataset(props: WizardComponentProps) { if (useThinPlateSplines) { checkLandmarksForThinPlateSpline(sourcePoints, targetPoints); } - return layers.map((layer) => ({ - ...layer, - // The first dataset will be transformed to match the second. - transformations: areDatasetsIdentical(layer.datasetId, linkedDatasets[0]) - ? transformationArr - : [], - })); + return layers.map((layer) => { + const areDatasetsIdentical = layer.datasetId === linkedDatasets[0].id; + return { + ...layer, + // The first dataset will be transformed to match the second. + transformations: areDatasetsIdentical ? transformationArr : [], + }; + }); } // Don't check datastore.allowsUpdate for dataset composition @@ -172,7 +164,8 @@ export function ConfigureNewDataset(props: WizardComponentProps) { const newDatasetName = form.getFieldValue(["name"]); setIsLoading(true); try { - await createDatasetComposition(datastoreToUse.url, { + const { newDatasetId } = await createDatasetComposition(datastoreToUse.url, { + // keep identifying dataset at orgaId & path as this is a datastore request. newDatasetName, targetFolderId: form.getFieldValue(["targetFolderId"]), organizationId: activeUser.organization, @@ -180,40 +173,33 @@ export function ConfigureNewDataset(props: WizardComponentProps) { layers: layersWithTransforms, }); - const uniqueDatasets = _.uniqBy( - layersWithoutTransforms.map((layer) => layer.datasetId), - (id) => id.owningOrganization + "-" + id.name, - ); + const uniqueDatasets = _.uniqBy(layersWithoutTransforms, (layer) => layer.datasetId); const datasetMarkdownLinks = uniqueDatasets - .map((el) => `- [${el.name}](/datasets/${el.owningOrganization}/${el.name})`) + .map((el) => `- [${el.datasetName}](/datasets/${el.datasetId})`) .join("\n"); - await updateDatasetPartial( - { owningOrganization: activeUser.organization, name: newDatasetName }, - { - description: [ - "This dataset was composed from:", - datasetMarkdownLinks, - "", - "The layers were combined " + - (sourcePoints.length === 0 - ? "without any transforms" - : `with ${ - useThinPlateSplines - ? `Thin-Plate-Splines (${sourcePoints.length} correspondences)` - : `an affine transformation (mean error: ${formatNumber( - affineMeanError.meanError, - )} vx)` - }`) + - ".", - ].join("\n"), - }, - ); + await updateDatasetPartial(newDatasetId, { + description: [ + "This dataset was composed from:", + datasetMarkdownLinks, + "", + "The layers were combined " + + (sourcePoints.length === 0 + ? "without any transforms" + : `with ${ + useThinPlateSplines + ? `Thin-Plate-Splines (${sourcePoints.length} correspondences)` + : `an affine transformation (mean error: ${formatNumber( + affineMeanError.meanError, + )} vx)` + }`) + + ".", + ].join("\n"), + }); + props.onAdded(newDatasetId, false); } finally { setIsLoading(false); } - - props.onAdded(activeUser.organization, newDatasetName, false); }; return ( @@ -271,6 +257,7 @@ export function ConfigureNewDataset(props: WizardComponentProps) { void; form: FormInstance; - datasetId: APIDatasetId; + datasetId: string; + datasetName: string; }) { const layers = Form.useWatch(["layers"]); @@ -379,12 +368,8 @@ function LinkedLayerForm({ label="Layer Source" info="This is the layer which will be linked into the new dataset." > - - {datasetId.name} + + {datasetName} {" "} / {layer.sourceName} diff --git a/frontend/javascripts/admin/dataset/composition_wizard/common.ts b/frontend/javascripts/admin/dataset/composition_wizard/common.ts index 04776662a1e..1fd854d19bf 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/common.ts +++ b/frontend/javascripts/admin/dataset/composition_wizard/common.ts @@ -27,11 +27,7 @@ export type WizardComponentProps = { wizardContext: WizardContext; setWizardContext: React.Dispatch>; datastores: APIDataStore[]; - onAdded: ( - datasetOrganization: string, - uploadedDatasetName: string, - needsConversion?: boolean | null | undefined, - ) => Promise; + onAdded: (datasetId: string, needsConversion?: boolean | null | undefined) => Promise; }; export async function tryToFetchDatasetsByName( diff --git a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx index eb12f66bc18..27a717a892f 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx @@ -49,8 +49,7 @@ type FileList = UploadFile[]; type OwnProps = { onAdded: ( - datasetOrganization: string, - uploadedDatasetName: string, + uploadedDatasetId: string, needsConversion?: boolean | null | undefined, ) => Promise; datastores: APIDataStore[]; @@ -287,26 +286,23 @@ function DatasetAddRemoteView(props: Props) { let configJSON; try { configJSON = JSON.parse(dataSourceJsonStr); - const nameValidationResult = await isDatasetNameValid({ - name: configJSON.id.name, - owningOrganization: activeUser.organization, - }); + const nameValidationResult = await isDatasetNameValid(configJSON.id.name); if (nameValidationResult) { throw new Error(nameValidationResult); } - await storeRemoteDataset( + const { newDatasetId } = await storeRemoteDataset( datastoreToUse.url, configJSON.id.name, activeUser.organization, dataSourceJsonStr, targetFolderId, ); + onAdded(newDatasetId); } catch (e) { setShowLoadingOverlay(false); Toast.error(`The datasource config could not be stored. ${e}`); return; } - onAdded(activeUser.organization, configJSON.id.name); } } diff --git a/frontend/javascripts/admin/dataset/dataset_add_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_view.tsx index 84e38b7ce83..0f9ab830284 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_view.tsx @@ -33,24 +33,21 @@ const addTypeToVerb: Record = { function DatasetAddView({ history }: RouteComponentProps) { const datastores = useFetch(getDatastores, [], []); - const [datasetName, setDatasetName] = useState(""); - const [organization, setOrganization] = useState(""); + const [datasetId, setDatasetId] = useState(""); const [datasetNeedsConversion, setDatasetNeedsConversion] = useState(false); const [datasetAddType, setImportType] = useState(DatasetAddType.UPLOAD); const handleDatasetAdded = async ( datasetAddType: DatasetAddType, - datasetOrganization: string, - uploadedDatasetName: string, + datasetId: string, needsConversion: boolean | null | undefined, ): Promise => { - setOrganization(datasetOrganization); - setDatasetName(uploadedDatasetName); + setDatasetId(datasetId); setImportType(datasetAddType); if (needsConversion != null) setDatasetNeedsConversion(needsConversion); }; - const showAfterUploadContent = datasetName !== ""; + const showAfterUploadContent = datasetId !== ""; const getAfterUploadModalContent = () => { if (!showAfterUploadContent) { @@ -60,9 +57,8 @@ function DatasetAddView({ history }: RouteComponentProps) { return getPostUploadModal( datasetNeedsConversion, datasetAddType, - organization, - datasetName, - setDatasetName, + datasetId, + setDatasetId, history, ); }; @@ -265,9 +261,8 @@ export default connector(withRouter(DatasetAddView)); const getPostUploadModal = ( datasetNeedsConversion: boolean, datasetAddType: DatasetAddType, - organization: string, - datasetName: string, - setDatasetName: (arg0: string) => void, + datasetId: string, + setDatasetId: (arg0: string) => void, history: History, ) => { return ( @@ -286,8 +281,8 @@ const getPostUploadModal = ( display: "none", }, }} - onCancel={() => setDatasetName("")} - onOk={() => setDatasetName("")} + onCancel={() => setDatasetId("")} + onOk={() => setDatasetId("")} width={580} >
) : ( - - diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index ff526cbd643..8cf8b2f4692 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -82,7 +82,7 @@ const logRetryToAnalytics = _.throttle((datasetName: string) => { type OwnProps = { datastores: Array; withoutCard?: boolean; - onUploaded: (arg0: string, arg1: string, arg2: boolean) => Promise | void; + onUploaded: (arg0: string, arg2: boolean) => Promise | void; }; type StateProps = { activeUser: APIUser | null | undefined; @@ -303,7 +303,7 @@ class DatasetUploadView extends React.Component { // @ts-ignore window.onbeforeunload = beforeUnload; const datasetId: APIDatasetId = { - name: formValues.name, + path: formValues.name, owningOrganization: activeUser.organization, }; @@ -314,13 +314,13 @@ class DatasetUploadView extends React.Component { const uploadId = unfinishedUploadToContinue ? unfinishedUploadToContinue.uploadId - : `${dayjs(Date.now()).format("YYYY-MM-DD_HH-mm")}__${datasetId.name}__${getRandomString()}`; + : `${dayjs(Date.now()).format("YYYY-MM-DD_HH-mm")}__${datasetId.path}__${getRandomString()}`; const filePaths = formValues.zipFile.map((file) => file.path || ""); const reserveUploadInformation = { uploadId, + name: formValues.name, + path: datasetId.path, organization: datasetId.owningOrganization, - name: datasetId.name, - path: datasetId.name, totalFileCount: formValues.zipFile.length, filePaths: filePaths, layersToLink: [], @@ -350,7 +350,7 @@ class DatasetUploadView extends React.Component { isFinishing: true, }); finishDatasetUpload(datastoreUrl, uploadInfo).then( - async () => { + async ({ uploadedDatasetId }) => { trackAction("Upload dataset"); Toast.success(messages["dataset.upload_success"]); let maybeError; @@ -404,11 +404,7 @@ class DatasetUploadView extends React.Component { name: "", zipFile: [], }); - this.props.onUploaded( - activeUser.organization, - formValues.name, - this.state.needsConversion, - ); + this.props.onUploaded(uploadedDatasetId, this.state.needsConversion); } }, (error) => { diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx index 35ebe7f483e..23077ecbca6 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx @@ -48,7 +48,7 @@ import { defaultContext } from "@tanstack/react-query"; const FormItem = Form.Item; const notImportedYetStatus = "Not imported yet."; type OwnProps = { - datasetId: APIDatasetId; + datasetId: string; isEditingMode: boolean; onComplete: () => void; onCancel: () => void; diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx index 0440554ef95..2201d36a5c5 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx @@ -69,7 +69,7 @@ export async function hasSegmentIndex( if (maybeVolumeTracing == null) { segmentIndexInDataStore = await hasSegmentIndexInDataStore( dataset.dataStore.url, - dataset.name, + dataset.path, visibleSegmentationLayer.name, dataset.owningOrganization, ); diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index f5100c1bc59..80ec9980057 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -98,7 +98,8 @@ export type APIDataLayer = APIColorLayer | APISegmentationLayer; export type APISkeletonLayer = { category: "skeleton" }; export type LayerLink = { - datasetId: APIDatasetId; + datasetId: string; + datasetName: string; sourceName: string; newName: string; transformations: CoordinateTransformation[]; @@ -165,11 +166,8 @@ export type APIPublication = { }; export type MutableAPIDatasetId = { owningOrganization: string; - name: string; + path: string; }; -export function areDatasetsIdentical(a: APIDatasetId, b: APIDatasetId) { - return a.owningOrganization === b.owningOrganization && a.name === b.name; -} export type APIDatasetId = Readonly; export enum APIMetadataEnum { @@ -186,6 +184,8 @@ export type APIMetadata = { export type APIMetadataEntries = APIMetadata[]; type MutableAPIDatasetBase = MutableAPIDatasetId & { + readonly id: string; // Should never be changed. + name: string; isUnreported: boolean; folderId: string; allowedTeams: Array; diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index e5d6fb086ea..cff68410ea6 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -127,6 +127,7 @@ class DataSourceController @Inject()( "aManualUpload", request.body.datasetName, request.body.datasetPath, + "newDatasetIdSetByCoreBackend", request.body.organization, 0, List.empty, @@ -217,14 +218,13 @@ class DataSourceController @Inject()( urlOrHeaderToken(token, request)) { for { (dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) ?~> "finishUpload.failed" - // TODO: Store dataset id from upload information in redis and use it here to report the upload - _ <- remoteWebknossosClient.reportUpload( + uploadedDatasetIdJson <- remoteWebknossosClient.reportUpload( dataSourceId, datasetSizeBytes, request.body.needsConversion.getOrElse(false), viaAddRoute = false, userToken = urlOrHeaderToken(token, request)) ?~> "reportUpload.failed" - } yield Ok + } yield Ok(Json.toJson("newDatasetId" -> uploadedDatasetIdJson)) } } yield result } @@ -437,6 +437,7 @@ class DataSourceController @Inject()( uploadId = "", // Set by core backend name = datasetName, path = "", // Set by core backend + newDatasetId = "", // Set by core backend organization = organizationId, totalFileCount = 1, filePaths = None, @@ -450,13 +451,13 @@ class DataSourceController @Inject()( _ <- dataSourceService.updateDataSource( request.body.copy(id = datasourceId), expectExisting = false) - _ <- remoteWebknossosClient.reportUpload( + uploadedDatasetId <- remoteWebknossosClient.reportUpload( datasourceId, 0L, needsConversion = false, viaAddRoute = true, userToken = urlOrHeaderToken(token, request)) ?~> "reportUpload.failed" - } yield Ok + } yield Ok(Json.toJson("newDatasetId" -> uploadedDatasetId)) } } @@ -544,9 +545,9 @@ class DataSourceController @Inject()( accessTokenService.assertUserAccess( UserAccessRequest.readDataSources(DataSourceId(id.name, id.owningOrganization)), userToken)) - dataSource <- composeService.composeDataset(request.body, userToken) + (dataSource, newDatasetId) <- composeService.composeDataset(request.body, userToken) _ <- dataSourceRepository.updateDataSource(dataSource) - } yield Ok + } yield Ok(Json.toJson("newDatasetId" -> newDatasetId)) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index 0186769071f..4b88c25ef47 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -17,7 +17,7 @@ import com.scalableminds.webknossos.datastore.services.uploading.ReserveUploadIn import com.scalableminds.webknossos.datastore.storage.DataVaultCredential import com.typesafe.scalalogging.LazyLogging import play.api.inject.ApplicationLifecycle -import play.api.libs.json.{Json, OFormat} +import play.api.libs.json.{JsValue, Json, OFormat} import scala.concurrent.ExecutionContext import scala.concurrent.duration._ @@ -82,9 +82,9 @@ class DSRemoteWebknossosClient @Inject()( datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean, - userToken: Option[String]): Fox[Unit] = + userToken: Option[String]): Fox[String] = for { - _ <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reportDatasetUpload") + uploadedDatasetIdJson <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reportDatasetUpload") .addQueryString("key" -> dataStoreKey) .addQueryString("datasetPath" -> dataSourceId.path) .addQueryString("organizationId" -> dataSourceId.organizationId) @@ -92,8 +92,9 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("viaAddRoute" -> viaAddRoute.toString) .addQueryString("datasetSizeBytes" -> datasetSizeBytes.toString) .addQueryStringOptional("token", userToken) - .post() - } yield () + .getWithJsonResponse[JsValue] + uploadedDatasetId <- (uploadedDatasetIdJson \ "id").validate[String].asOpt.toFox ?~> "uploadedDatasetId.invalid" + } yield uploadedDatasetId def reportDataSources(dataSources: List[InboxDataSourceLike]): Fox[_] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/datasources") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala index ecb73ecafc4..42c700c295a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala @@ -67,25 +67,29 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, private def uploadDirectory(organizationId: String, datasetPath: String): Path = dataBaseDir.resolve(organizationId).resolve(datasetPath) - def composeDataset(composeRequest: ComposeRequest, userToken: Option[String]): Fox[DataSource] = + def composeDataset(composeRequest: ComposeRequest, userToken: Option[String]): Fox[(DataSource, String)] = for { _ <- dataSourceService.assertDataDirWritable(composeRequest.organizationId) - reserveUploadInfo = ReserveUploadInformation("", - composeRequest.newDatasetName, - "", // filled by core backend - composeRequest.organizationId, - 1, - None, - None, - List(), - Some(composeRequest.targetFolderId)) + reserveUploadInfo = ReserveUploadInformation( + "", + composeRequest.newDatasetName, + "", // filled by core backend + "", // filled by core backend + composeRequest.organizationId, + 1, + None, + None, + List(), + Some(composeRequest.targetFolderId) + ) reservedInfo <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo, userToken) ?~> "Failed to reserve upload." directory = uploadDirectory(reservedInfo.organization, reservedInfo.path) _ = PathUtils.ensureDirectory(directory) dataSource <- createDatasource(composeRequest, reservedInfo.path, reservedInfo.organization, directory) properties = Json.toJson(dataSource).toString().getBytes(StandardCharsets.UTF_8) _ = Files.write(directory.resolve(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON), properties) - } yield dataSource + newDatasetId = reservedInfo.newDatasetId + } yield (dataSource, newDatasetId) private def getLayerFromComposeLayer(composeLayer: ComposeRequestLayer, uploadDir: Path): Fox[DataLayer] = for { @@ -140,7 +144,10 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, } } yield editedLayer - private def createDatasource(composeRequest: ComposeRequest, datasetPath: String, organizationId: String, uploadDir: Path): Fox[DataSource] = { + private def createDatasource(composeRequest: ComposeRequest, + datasetPath: String, + organizationId: String, + uploadDir: Path): Fox[DataSource] = for { layers <- Fox.serialCombined(composeRequest.layers.toList)(getLayerFromComposeLayer(_, uploadDir)) dataSource = GenericDataSource( @@ -151,7 +158,6 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, ) } yield dataSource - } private def isLayerRemote(dataSourceId: DataSourceId, layerName: String) = { val layerPath = dataBaseDir.resolve(dataSourceId.organizationId).resolve(dataSourceId.path).resolve(layerName) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index 25aee89c424..0266f95fe0e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -32,6 +32,7 @@ case class ReserveUploadInformation( uploadId: String, // upload id that was also used in chunk upload (this time without file paths) name: String, // dataset name path: String, // dataset path + newDatasetId: String, organization: String, totalFileCount: Long, filePaths: Option[List[String]], From 1a8349aa24c27eb3d8e7d6a2d816da66564dc965 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Fri, 20 Sep 2024 10:07:24 +0200 Subject: [PATCH 008/129] WIP: adapt frontend to new routes --- frontend/javascripts/admin/admin_rest_api.ts | 117 +++++++++--------- frontend/javascripts/admin/api/mesh.ts | 6 +- .../composition_wizard/02_upload_files.tsx | 15 ++- .../composition_wizard/03_select_datasets.tsx | 11 +- .../dataset/composition_wizard/common.ts | 21 ++-- .../admin/dataset/dataset_components.tsx | 19 +-- .../admin/dataset/dataset_upload_view.tsx | 26 ++-- frontend/javascripts/admin/onboarding.tsx | 21 ++-- .../create_explorative_modal.tsx | 4 +- .../advanced_dataset/dataset_action_view.tsx | 8 +- .../advanced_dataset/dataset_table.tsx | 10 +- .../dataset/dataset_collection_context.tsx | 10 +- .../dataset/dataset_settings_data_tab.tsx | 17 +-- .../dataset/dataset_settings_delete_tab.tsx | 7 +- .../dataset/dataset_settings_sharing_tab.tsx | 8 +- .../dataset/dataset_settings_view.tsx | 27 ++-- .../dataset_settings_viewconfig_tab.tsx | 14 +-- .../javascripts/dashboard/dataset/queries.tsx | 6 +- .../oxalis/model_initialization.ts | 6 +- frontend/javascripts/oxalis/store.ts | 15 ++- .../connectome_tab/connectome_view.tsx | 4 +- frontend/javascripts/router.tsx | 41 +++--- .../puppeteer/dataset_rendering_helpers.ts | 18 +-- frontend/javascripts/types/api_flow_types.ts | 6 +- .../controllers/DataSourceController.scala | 6 +- 25 files changed, 215 insertions(+), 228 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 2e3e2f24d55..3ea572972a6 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -13,7 +13,7 @@ import type { APIDataSource, APIDataStore, APIDataset, - APIDatasetId, + APIDataSourceId, APIFeatureToggles, APIHistogramData, APIMapping, @@ -1160,6 +1160,19 @@ export function getDataset( return Request.receiveJSON(`/api/datasets/${datasetId}${sharingTokenSuffix}`, options); } +export function getDatasetLegacy( + datasetOrga: string, + datasetPath: string, + sharingToken?: string | null | undefined, + options: RequestOptions = {}, +): Promise { + const sharingTokenSuffix = sharingToken != null ? `?sharingToken=${sharingToken}` : ""; + return Request.receiveJSON( + `/api/datasets/${datasetOrga}/${datasetPath}${sharingTokenSuffix}`, + options, + ); +} + export type DatasetUpdater = { description?: string | null; displayName?: string | null; @@ -1187,7 +1200,7 @@ export async function getDatasetViewConfiguration( ): Promise { const sharingTokenSuffix = sharingToken != null ? `?sharingToken=${sharingToken}` : ""; const settings = await Request.sendJSONReceiveJSON( - `/api/datasetConfigurations/${dataset.owningOrganization}/${dataset.name}${sharingTokenSuffix}`, + `/api/datasetConfigurations/${dataset.id}${sharingTokenSuffix}`, { data: displayedVolumeTracings, method: "POST", @@ -1198,38 +1211,32 @@ export async function getDatasetViewConfiguration( } export function updateDatasetConfiguration( - datasetId: APIDatasetId, + datasetId: string, datasetConfig: PartialDatasetConfiguration, options: RequestOptions = {}, ): Promise> { - return Request.sendJSONReceiveJSON( - `/api/datasetConfigurations/${datasetId.owningOrganization}/${datasetId.name}`, - { ...options, method: "PUT", data: datasetConfig }, - ); + return Request.sendJSONReceiveJSON(`/api/datasetConfigurations/${datasetId}`, { + ...options, + method: "PUT", + data: datasetConfig, + }); } -export function getDatasetDefaultConfiguration( - datasetId: APIDatasetId, -): Promise { - return Request.receiveJSON( - `/api/datasetConfigurations/default/${datasetId.owningOrganization}/${datasetId.name}`, - ); +export function getDatasetDefaultConfiguration(datasetId: string): Promise { + return Request.receiveJSON(`/api/datasetConfigurations/default/${datasetId}`); } export function updateDatasetDefaultConfiguration( - datasetId: APIDatasetId, + datasetId: string, datasetConfiguration: DatasetConfiguration, ): Promise { - return Request.sendJSONReceiveJSON( - `/api/datasetConfigurations/default/${datasetId.owningOrganization}/${datasetId.name}`, - { - method: "PUT", - data: datasetConfiguration, - }, - ); + return Request.sendJSONReceiveJSON(`/api/datasetConfigurations/default/${datasetId}`, { + method: "PUT", + data: datasetConfiguration, + }); } -export function getDatasetAccessList(datasetId: APIDatasetId): Promise> { +export function getDatasetAccessList(datasetId: APIDataSourceId): Promise> { return Request.receiveJSON( `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/accessList`, ); @@ -1443,16 +1450,13 @@ export async function isDatasetNameValid(datasetName: string): Promise, ): Promise { - return Request.sendJSONReceiveJSON( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/teams`, - { - method: "PATCH", - data: newTeams, - }, - ); + return Request.sendJSONReceiveJSON(`/api/datasets/${datasetId}/teams`, { + method: "PATCH", + data: newTeams, + }); } export async function triggerDatasetCheck(datastoreHost: string): Promise { @@ -1466,12 +1470,12 @@ export async function triggerDatasetCheck(datastoreHost: string): Promise export async function triggerDatasetClearCache( datastoreHost: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName?: string, ): Promise { await doWithToken((token) => Request.triggerRequest( - `/data/triggers/reload/${datasetId.owningOrganization}/${datasetId.name}?token=${token}${ + `/data/triggers/reload/${datasetId.owningOrganization}/${datasetId.path}?token=${token}${ layerName ? `&layerName=${layerName}` : "" }`, { @@ -1484,11 +1488,11 @@ export async function triggerDatasetClearCache( export async function deleteDatasetOnDisk( datastoreHost: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, ): Promise { await doWithToken((token) => Request.triggerRequest( - `/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/deleteOnDisk?token=${token}`, + `/data/datasets/${datasetId.owningOrganization}/${datasetId.path}/deleteOnDisk?token=${token}`, { host: datastoreHost, method: "DELETE", @@ -1497,7 +1501,7 @@ export async function deleteDatasetOnDisk( ); } -export async function triggerDatasetClearThumbnailCache(datasetId: APIDatasetId): Promise { +export async function triggerDatasetClearThumbnailCache(datasetId: APIDataSourceId): Promise { await Request.triggerRequest( `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/clearThumbnailCache`, { @@ -1514,23 +1518,20 @@ export async function clearCache(dataset: APIMaybeUnimportedDataset, layerName?: } export async function getDatasetSharingToken( - datasetId: APIDatasetId, + datasetId: string, options?: RequestOptions, ): Promise { const { sharingToken } = await Request.receiveJSON( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/sharingToken`, + `/api/datasets/${datasetId}/sharingToken`, options, ); return sharingToken; } -export async function revokeDatasetSharingToken(datasetId: APIDatasetId): Promise { - await Request.triggerRequest( - `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}/sharingToken`, - { - method: "DELETE", - }, - ); +export async function revokeDatasetSharingToken(datasetId: string): Promise { + await Request.triggerRequest(`/api/datasets/${datasetId}/sharingToken`, { + method: "DELETE", + }); } export async function getOrganizationForDataset(datasetName: string): Promise { @@ -1542,7 +1543,7 @@ export async function getOrganizationForDataset(datasetName: string): Promise { return doWithToken((token) => @@ -1590,7 +1591,7 @@ export async function getHistogramForLayer( export async function getMappingsForDatasetLayer( datastoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, ): Promise> { return doWithToken((token) => @@ -1602,7 +1603,7 @@ export async function getMappingsForDatasetLayer( export function fetchMapping( datastoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, mappingName: string, ): Promise { @@ -1638,7 +1639,7 @@ export function getEditableMappingInfo( export function getPositionForSegmentInAgglomerate( datastoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, mappingName: string, segmentId: number, @@ -1659,7 +1660,7 @@ export function getPositionForSegmentInAgglomerate( export async function getAgglomeratesForDatasetLayer( datastoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, ): Promise> { return doWithToken((token) => @@ -2070,7 +2071,7 @@ export function getBucketPositionsForAdHocMesh( export function getAgglomerateSkeleton( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, mappingId: string, agglomerateId: number, @@ -2090,7 +2091,7 @@ export function getAgglomerateSkeleton( export async function getAgglomeratesForSegmentsFromDatastore( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, mappingId: string, segmentIds: Array, @@ -2169,7 +2170,7 @@ export function getEditableAgglomerateSkeleton( export async function getMeshfilesForDatasetLayer( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, ): Promise> { const meshFiles: Array = await doWithToken((token) => @@ -2190,7 +2191,7 @@ export async function getMeshfilesForDatasetLayer( // ### Connectomes export function getConnectomeFilesForDatasetLayer( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, ): Promise> { return doWithToken((token) => @@ -2202,7 +2203,7 @@ export function getConnectomeFilesForDatasetLayer( export function getSynapsesOfAgglomerates( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, connectomeFile: string, agglomerateIds: Array, @@ -2227,7 +2228,7 @@ export function getSynapsesOfAgglomerates( function getSynapseSourcesOrDestinations( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, connectomeFile: string, synapseIds: Array, @@ -2258,7 +2259,7 @@ export function getSynapseDestinations(...args: any): Promise> { export function getSynapsePositions( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, connectomeFile: string, synapseIds: Array, @@ -2278,7 +2279,7 @@ export function getSynapsePositions( export function getSynapseTypes( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, connectomeFile: string, synapseIds: Array, diff --git a/frontend/javascripts/admin/api/mesh.ts b/frontend/javascripts/admin/api/mesh.ts index 8e8d38a9c44..3c765f6fcef 100644 --- a/frontend/javascripts/admin/api/mesh.ts +++ b/frontend/javascripts/admin/api/mesh.ts @@ -1,7 +1,7 @@ import Request from "libs/request"; import _ from "lodash"; import type { Vector3, Vector4 } from "oxalis/constants"; -import type { APIDatasetId } from "types/api_flow_types"; +import type { APIDataSourceId } from "types/api_flow_types"; import { doWithToken } from "./token"; export type MeshChunk = { @@ -32,7 +32,7 @@ type SegmentInfo = { export function getMeshfileChunksForSegment( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, meshFile: string, segmentId: number, @@ -80,7 +80,7 @@ type MeshChunkDataRequestList = { export function getMeshfileChunkData( dataStoreUrl: string, - datasetId: APIDatasetId, + datasetId: APIDataSourceId, layerName: string, batchDescription: MeshChunkDataRequestList, ): Promise { diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx index dddc9160665..375472255c4 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx @@ -10,7 +10,7 @@ import type { Vector3 } from "oxalis/constants"; import { parseNml } from "oxalis/model/helpers/nml_helpers"; import React from "react"; import { - tryToFetchDatasetsByName, + tryToFetchDatasetsByNameOrId, type WizardComponentProps, type WizardContext, type FileList, @@ -167,10 +167,12 @@ async function parseNmlFiles(fileList: FileList): Promise throw new SoftError("NML files should not be empty."); } - const { trees: trees1, datasetName: datasetName1 } = await parseNml(nmlString1); - const { trees: trees2, datasetName: datasetName2 } = await parseNml(nmlString2); + // TODO: Now the datasetName stored in the nml is interpreted as the path of the dataset. -> call to legacy route is necessary. + // Discussion: how to handle this better? + const { trees: trees1, datasetName: datasetPath1 } = await parseNml(nmlString1); + const { trees: trees2, datasetName: datasetPath2 } = await parseNml(nmlString2); - if (!datasetName1 || !datasetName2) { + if (!datasetPath1 || !datasetPath2) { throw new SoftError("Could not extract dataset names."); } @@ -206,8 +208,9 @@ async function parseNmlFiles(fileList: FileList): Promise throw new SoftError("Each file should contain at least 3 nodes."); } - const datasets = await tryToFetchDatasetsByName( - [datasetName1, datasetName2], + const datasets = await tryToFetchDatasetsByNameOrId( + [datasetPath1, datasetPath2], // fetch by name + [], "Could not derive datasets from NML. Please specify these manually.", ); diff --git a/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx index 9e42a94cccf..126d94a4bfd 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx @@ -4,7 +4,7 @@ import DatasetSelectionComponent, { type DatasetSelectionValue, } from "dashboard/dataset/dataset_selection_component"; import React, { useState } from "react"; -import { tryToFetchDatasetsByName, type WizardComponentProps } from "./common"; +import { tryToFetchDatasetsByNameOrId, type WizardComponentProps } from "./common"; import { useEffectOnlyOnce } from "libs/react_hooks"; export default function SelectDatasets({ wizardContext, setWizardContext }: WizardComponentProps) { @@ -18,12 +18,13 @@ export default function SelectDatasets({ wizardContext, setWizardContext }: Wiza })); }; const onNext = async () => { - const datasets = await tryToFetchDatasetsByName( - datasetValues.map((el) => el.value), + const datasets = await tryToFetchDatasetsByNameOrId( + [], + datasetValues.map((el) => el.value), // fetch by id "Could not find datasets. Please doublecheck your selection.", ); if (datasets == null) { - // An error message was already shown in tryToFetchDatasetsByName + // An error message was already shown in tryToFetchDatasetsByNameOrId return; } @@ -35,7 +36,7 @@ export default function SelectDatasets({ wizardContext, setWizardContext }: Wiza }; useEffectOnlyOnce(() => { - setDatasetValues(wizardContext.datasets.map((ds) => ({ value: ds.name, label: ds.name }))); + setDatasetValues(wizardContext.datasets.map((ds) => ({ value: ds.id, label: ds.name }))); }); // When not using any transforms, diff --git a/frontend/javascripts/admin/dataset/composition_wizard/common.ts b/frontend/javascripts/admin/dataset/composition_wizard/common.ts index 1fd854d19bf..117c80315ef 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/common.ts +++ b/frontend/javascripts/admin/dataset/composition_wizard/common.ts @@ -1,4 +1,4 @@ -import { getDataset } from "admin/admin_rest_api"; +import { getDataset, getDatasetLegacy } from "admin/admin_rest_api"; import type { UploadFile } from "antd"; import Toast from "libs/toast"; import type { Vector3 } from "oxalis/constants"; @@ -30,24 +30,19 @@ export type WizardComponentProps = { onAdded: (datasetId: string, needsConversion?: boolean | null | undefined) => Promise; }; -export async function tryToFetchDatasetsByName( +export async function tryToFetchDatasetsByNameOrId( names: string[], + ids: string[], userErrorMessage: string, ): Promise { const { activeUser } = Store.getState(); try { - const datasets = await Promise.all( - names.map((name) => - getDataset( - { - owningOrganization: activeUser?.organization || "", - name: name, - }, - null, - { showErrorToast: false }, - ), + const datasets = await Promise.all([ + ...names.map((name) => + getDatasetLegacy(activeUser?.organization || "", name, null, { showErrorToast: false }), ), - ); + ...ids.map((id) => getDataset(id, null, { showErrorToast: false })), + ]); return datasets; } catch (exception) { Toast.warning(userErrorMessage); diff --git a/frontend/javascripts/admin/dataset/dataset_components.tsx b/frontend/javascripts/admin/dataset/dataset_components.tsx index 8044ec861b2..2e1a47bcf2e 100644 --- a/frontend/javascripts/admin/dataset/dataset_components.tsx +++ b/frontend/javascripts/admin/dataset/dataset_components.tsx @@ -53,10 +53,7 @@ export const layerNameRules = [ }, ]; -export const getDatasetNameRules = ( - activeUser: APIUser | null | undefined, - allowRenaming: boolean = true, -) => [ +export const getDatasetNameRules = (activeUser: APIUser | null | undefined) => [ { required: true, message: messages["dataset.import.required.name"], @@ -64,16 +61,9 @@ export const getDatasetNameRules = ( { min: 3, message: messages["dataset.name_length"] }, ...layerNameRules, { - validator: async (_rule: any, value: string) => { - if (!allowRenaming) { - // Renaming is not allowed. No need to validate the (existing) name then. - return Promise.resolve(); - } + validator: async (_rule: any, newName: string) => { if (!activeUser) throw new Error("Can't do operation if no user is logged in."); - const reasons = await isDatasetNameValid({ - name: value, - owningOrganization: activeUser.organization, - }); + const reasons = await isDatasetNameValid(newName); if (reasons != null) { return Promise.reject(reasons); @@ -88,7 +78,6 @@ export function DatasetNameFormItem({ activeUser, initialName, label, - allowDuplicate, disabled, }: { activeUser: APIUser | null | undefined; @@ -103,7 +92,7 @@ export function DatasetNameFormItem({ label={label || "Dataset Name"} hasFeedback initialValue={initialName} - rules={getDatasetNameRules(activeUser, !allowDuplicate)} + rules={getDatasetNameRules(activeUser)} validateFirst > diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index 8cf8b2f4692..7ccc7c2ace7 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -35,7 +35,7 @@ import { type APITeam, type APIDataStore, type APIUser, - type APIDatasetId, + type APIDataSourceId, type APIOrganization, APIJobType, } from "types/api_flow_types"; @@ -302,25 +302,23 @@ class DatasetUploadView extends React.Component { this.unblock = this.props.history.block(beforeUnload); // @ts-ignore window.onbeforeunload = beforeUnload; - const datasetId: APIDatasetId = { - path: formValues.name, - owningOrganization: activeUser.organization, - }; const getRandomString = () => { const randomBytes = window.crypto.getRandomValues(new Uint8Array(6)); return Array.from(randomBytes, (byte) => `0${byte.toString(16)}`.slice(-2)).join(""); }; + const newDatasetName = formValues.name; const uploadId = unfinishedUploadToContinue ? unfinishedUploadToContinue.uploadId - : `${dayjs(Date.now()).format("YYYY-MM-DD_HH-mm")}__${datasetId.path}__${getRandomString()}`; + : `${dayjs(Date.now()).format("YYYY-MM-DD_HH-mm")}__${newDatasetName}__${getRandomString()}`; const filePaths = formValues.zipFile.map((file) => file.path || ""); const reserveUploadInformation = { uploadId, - name: formValues.name, - path: datasetId.path, - organization: datasetId.owningOrganization, + name: newDatasetName, + path: "", + newDatasetId: "", + organization: activeUser.organization, totalFileCount: formValues.zipFile.length, filePaths: filePaths, layersToLink: [], @@ -350,7 +348,7 @@ class DatasetUploadView extends React.Component { isFinishing: true, }); finishDatasetUpload(datastoreUrl, uploadInfo).then( - async ({ uploadedDatasetId }) => { + async ({ newDatasetId }) => { trackAction("Upload dataset"); Toast.success(messages["dataset.upload_success"]); let maybeError; @@ -364,7 +362,7 @@ class DatasetUploadView extends React.Component { } await startConvertToWkwJob( - formValues.name, + newDatasetName, // TODO: likely needs dataset id / path to identify the dataset correctly. activeUser.organization, formValues.voxelSizeFactor, formValues.voxelSizeUnit, @@ -404,12 +402,12 @@ class DatasetUploadView extends React.Component { name: "", zipFile: [], }); - this.props.onUploaded(uploadedDatasetId, this.state.needsConversion); + this.props.onUploaded(newDatasetId, this.state.needsConversion); } }, (error) => { sendFailedRequestAnalyticsEvent("finish_dataset_upload", error, { - dataset_name: datasetId.name, + dataset_name: reserveUploadInformation.name, }); Toast.error(messages["dataset.upload_failed"]); this.setState({ @@ -439,7 +437,7 @@ class DatasetUploadView extends React.Component { }); }); resumableUpload.on("fileRetry", () => { - logRetryToAnalytics(datasetId.name); + logRetryToAnalytics(newDatasetName); this.setState({ isRetrying: true, }); diff --git a/frontend/javascripts/admin/onboarding.tsx b/frontend/javascripts/admin/onboarding.tsx index 09fcb9b3e9a..5f0846e0337 100644 --- a/frontend/javascripts/admin/onboarding.tsx +++ b/frontend/javascripts/admin/onboarding.tsx @@ -39,7 +39,7 @@ type State = { currentStep: number; datastores: Array; organizationId: string; - datasetNameToImport: string | null | undefined; + datasetIdToImport: string | null | undefined; isDatasetUploadModalVisible: boolean; isInviteModalVisible: boolean; }; @@ -395,7 +395,7 @@ class OnboardingView extends React.PureComponent { organizationId: "", isDatasetUploadModalVisible: false, isInviteModalVisible: false, - datasetNameToImport: null, + datasetIdToImport: null, }; componentDidMount() { @@ -416,7 +416,7 @@ class OnboardingView extends React.PureComponent { currentStep: prevState.currentStep + 1, isDatasetUploadModalVisible: false, isInviteModalVisible: false, - datasetNameToImport: null, + datasetIdToImport: null, })); }; renderCreateOrganization = () => ( @@ -498,13 +498,9 @@ class OnboardingView extends React.PureComponent { > { + onUploaded={async (uploadedDatasetId: string, needsConversion: boolean) => { this.setState({ - datasetNameToImport: datasetName, + datasetIdToImport: uploadedDatasetId, isDatasetUploadModalVisible: false, }); @@ -517,14 +513,11 @@ class OnboardingView extends React.PureComponent { /> )} - {this.state.datasetNameToImport != null && ( + {this.state.datasetIdToImport != null && ( diff --git a/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx b/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx index 5e64975c724..cece01c9cb5 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx @@ -2,7 +2,7 @@ import { InfoCircleOutlined } from "@ant-design/icons"; import { Link } from "react-router-dom"; import { Modal, Radio, Button, Tooltip, Slider, Spin } from "antd"; import React, { useEffect, useState } from "react"; -import type { APIDataset, APIDatasetId, APISegmentationLayer } from "types/api_flow_types"; +import type { APIDataset, APIDataSourceId, APISegmentationLayer } from "types/api_flow_types"; import { doesSupportVolumeWithFallback, getSomeResolutionInfoForDataset, @@ -15,7 +15,7 @@ import { useFetch } from "libs/react_helpers"; import type { ResolutionInfo } from "oxalis/model/helpers/resolution_info"; type Props = { - datasetId: APIDatasetId; + datasetId: APIDataSourceId; onClose: () => void; }; type RestrictResolutionSliderProps = { diff --git a/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx b/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx index 3562aa7fed6..df0587661a7 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx @@ -11,7 +11,7 @@ import { import window from "libs/window"; import { Link, type LinkProps } from "react-router-dom"; import type * as React from "react"; -import type { APIDatasetId, APIDataset, APIDatasetCompact } from "types/api_flow_types"; +import type { APIDataSourceId, APIDataset, APIDatasetCompact } from "types/api_flow_types"; import { clearCache, deleteDatasetOnDisk, getDataset } from "admin/admin_rest_api"; import Toast from "libs/toast"; import messages from "messages"; @@ -86,7 +86,7 @@ function NewAnnotationLink({ type Props = { dataset: APIDatasetCompact; - reloadDataset: (arg0: APIDatasetId) => Promise; + reloadDataset: (arg0: APIDataSourceId) => Promise; }; function LinkWithDisabled({ @@ -283,7 +283,7 @@ function DatasetActionView(props: Props) { } const onClearCache = async ( dataset: APIDataset, - reloadDataset: (arg0: APIDatasetId) => Promise, + reloadDataset: (arg0: APIDataSourceId) => Promise, ) => { await clearCache(dataset); await reloadDataset(dataset); @@ -299,7 +299,7 @@ export function getDatasetActionContextMenu({ datasets, hideContextMenu, }: { - reloadDataset: (arg0: APIDatasetId) => Promise; + reloadDataset: (arg0: APIDataSourceId) => Promise; datasets: APIDatasetCompact[]; hideContextMenu: () => void; }): MenuProps { diff --git a/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx b/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx index 80631b46f8e..63af2584f60 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx @@ -8,7 +8,7 @@ import { diceCoefficient as dice } from "dice-coefficient"; import type { OxalisState } from "oxalis/store"; import type { APIDatasetCompact, - APIDatasetId, + APIDataSourceId, APIMaybeUnimportedDataset, FolderItem, } from "types/api_flow_types"; @@ -57,8 +57,8 @@ type Props = { isUserAdmin: boolean; isUserDatasetManager: boolean; datasetFilteringMode: DatasetFilteringMode; - reloadDataset: (arg0: APIDatasetId) => Promise; - updateDataset: (id: APIDatasetId, updater: DatasetUpdater) => void; + reloadDataset: (arg0: APIDataSourceId) => Promise; + updateDataset: (id: APIDataSourceId, updater: DatasetUpdater) => void; addTagToSearch: (tag: string) => void; onSelectDataset: (dataset: APIDatasetCompact | null, multiSelect?: boolean) => void; onSelectFolder: (folder: FolderItem | null) => void; @@ -433,7 +433,7 @@ class DatasetTable extends React.PureComponent { }); }; - reloadSingleDataset = (datasetId: APIDatasetId): Promise => + reloadSingleDataset = (datasetId: APIDataSourceId): Promise => this.props.reloadDataset(datasetId); getFilteredDatasets() { @@ -762,7 +762,7 @@ export function DatasetTags({ }: { dataset: APIDatasetCompact; onClickTag?: (t: string) => void; - updateDataset: (id: APIDatasetId, updater: DatasetUpdater) => void; + updateDataset: (id: APIDataSourceId, updater: DatasetUpdater) => void; }) { const editTagFromDataset = ( shouldAddTag: boolean, diff --git a/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx b/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx index fdf731f9508..e54b2e6545b 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx @@ -1,7 +1,7 @@ import type React from "react"; import { createContext, useCallback, useContext, useEffect, useMemo, useState } from "react"; import type { - APIDatasetId, + APIDataSourceId, APIDatasetCompact, APIDatasetCompactWithoutStatusAndLayerNames, FolderItem, @@ -31,10 +31,10 @@ export type DatasetCollectionContextValue = { checkDatasets: () => Promise; fetchDatasets: () => void; reloadDataset: ( - datasetId: APIDatasetId, + datasetId: APIDataSourceId, datasetsToUpdate?: Array, ) => Promise; - updateCachedDataset: (id: APIDatasetId, updater: DatasetUpdater) => Promise; + updateCachedDataset: (id: APIDataSourceId, updater: DatasetUpdater) => Promise; activeFolderId: string | null; setActiveFolderId: (id: string | null) => void; mostRecentlyUsedActiveFolderId: string | null; @@ -157,11 +157,11 @@ export default function DatasetCollectionContextProvider({ datasetSearchQuery.refetch(); } - async function reloadDataset(datasetId: APIDatasetId) { + async function reloadDataset(datasetId: APIDataSourceId) { await updateDatasetMutation.mutateAsync(datasetId); } - async function updateCachedDataset(id: APIDatasetId, updater: DatasetUpdater) { + async function updateCachedDataset(id: APIDataSourceId, updater: DatasetUpdater) { return await updateDatasetMutation.mutateAsync([id, updater]); } diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx index fbffcb22f0e..8222b713714 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx @@ -60,13 +60,11 @@ export const syncDataSourceFields = ( }; export default function DatasetSettingsDataTab({ - allowRenamingDataset, form, activeDataSourceEditMode, onChange, dataset, }: { - allowRenamingDataset: boolean; form: FormInstance; activeDataSourceEditMode: "simple" | "advanced"; onChange: (arg0: "simple" | "advanced") => void; @@ -113,12 +111,7 @@ export default function DatasetSettingsDataTab({ @@ -145,12 +138,10 @@ export default function DatasetSettingsDataTab({ } function SimpleDatasetForm({ - allowRenamingDataset, dataSource, form, dataset, }: { - allowRenamingDataset: boolean; dataSource: Record; form: FormInstance; dataset: APIDataset | null | undefined; @@ -190,15 +181,13 @@ function SimpleDatasetForm({ { if (!deleteDataset) { return; } + const dataSourceId = { owningOrganization: dataset.owningOrganization, path: dataset.path }; setIsDeleting(true); - await deleteDatasetOnDisk(dataset.dataStore.url, datasetId); + await deleteDatasetOnDisk(dataset.dataStore.url, dataSourceId); Toast.success( messages["dataset.delete_success"]({ datasetName: dataset.name, diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx index 0dcbccf9dab..0403258a6f5 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx @@ -4,7 +4,7 @@ import { type RouteComponentProps, withRouter } from "react-router-dom"; import { connect } from "react-redux"; import { Button, Input, Checkbox, Tooltip, type FormInstance, Collapse, Space } from "antd"; import { CopyOutlined, InfoCircleOutlined, RetweetOutlined } from "@ant-design/icons"; -import type { APIDataset, APIDatasetId, APIUser } from "types/api_flow_types"; +import type { APIDataset, APIDataSourceId, APIUser } from "types/api_flow_types"; import { AsyncButton } from "components/async_clickables"; import { getDatasetSharingToken, revokeDatasetSharingToken } from "admin/admin_rest_api"; import Toast from "libs/toast"; @@ -19,7 +19,7 @@ import { PricingEnforcedBlur } from "components/pricing_enforcers"; type Props = { form: FormInstance | null; - datasetId: APIDatasetId; + datasetId: string; dataset: APIDataset | null | undefined; activeUser: APIUser | null | undefined; }; @@ -77,9 +77,7 @@ function DatasetSettingsSharingTab({ form, datasetId, dataset, activeUser }: Pro const doesNeedToken = !form.getFieldValue("dataset.isPublic"); const tokenSuffix = `?token=${sharingToken}`; - return `${window.location.origin}/datasets/${datasetId.owningOrganization}/${ - datasetId.name - }/view${doesNeedToken ? tokenSuffix : ""}`; + return `${window.location.origin}/datasets/${datasetId}/view${doesNeedToken ? tokenSuffix : ""}`; } function getUserAccessList() { diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx index 23077ecbca6..c783ea6ea76 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx @@ -15,12 +15,11 @@ import type { APIDataSource, APIDataset, MutableAPIDataset, - APIDatasetId, APIMessage, } from "types/api_flow_types"; import { Unicode } from "oxalis/constants"; import type { DatasetConfiguration, OxalisState } from "oxalis/store"; -import { diffObjects, jsonStringify } from "libs/utils"; +import { diffObjects, jsonStringify, maybe } from "libs/utils"; import { getDataset, getDatasetDefaultConfiguration, @@ -183,7 +182,7 @@ class DatasetSettingsView extends React.PureComponent - {this.props.datasetId.name} - + {maybeStoredDatasetName} ) : ( - this.props.datasetId.name + maybeStoredDatasetName ); const confirmString = this.props.isEditingMode || @@ -521,7 +523,6 @@ class DatasetSettingsView extends React.PureComponent { @@ -576,7 +577,7 @@ class DatasetSettingsView extends React.PureComponent
); @@ -86,7 +86,7 @@ function NewAnnotationLink({ type Props = { dataset: APIDatasetCompact; - reloadDataset: (arg0: APIDataSourceId) => Promise; + reloadDataset: (arg0: APIDataset["id"]) => Promise; }; function LinkWithDisabled({ @@ -126,9 +126,9 @@ function DatasetActionView(props: Props) { const onClearCache = async (compactDataset: APIDatasetCompact) => { setIsReloading(true); - const dataset = await getDataset(compactDataset); + const dataset = await getDataset(compactDataset.id); await clearCache(dataset); - await props.reloadDataset(dataset); + await props.reloadDataset(dataset.id); Toast.success( messages["dataset.clear_cache_success"]({ datasetName: dataset.name, @@ -138,7 +138,7 @@ function DatasetActionView(props: Props) { }; const onDeleteDataset = async () => { - const dataset = await getDataset(props.dataset); + const dataset = await getDataset(props.dataset.id); const deleteDataset = await confirmAsync({ title: "Danger Zone", @@ -176,10 +176,7 @@ function DatasetActionView(props: Props) { if (oldItems == null) { return oldItems; } - return oldItems.filter( - (item) => - item.name !== dataset.name || item.owningOrganization !== dataset.owningOrganization, - ); + return oldItems.filter((item) => item.id !== dataset.id); }, ); queryClient.invalidateQueries({ queryKey: ["dataset", "search"] }); @@ -204,7 +201,7 @@ function DatasetActionView(props: Props) { const datasetSettingsLink = ( <> @@ -215,7 +212,7 @@ function DatasetActionView(props: Props) { ); const brokenDatasetActions = (
- + Settings @@ -261,7 +258,7 @@ function DatasetActionView(props: Props) { onCloseCreateExplorativeModal={() => setIsCreateExplorativeModalVisible(false)} /> @@ -283,10 +280,10 @@ function DatasetActionView(props: Props) { } const onClearCache = async ( dataset: APIDataset, - reloadDataset: (arg0: APIDataSourceId) => Promise, + reloadDataset: (arg0: APIDataset["id"]) => Promise, ) => { await clearCache(dataset); - await reloadDataset(dataset); + await reloadDataset(dataset.id); Toast.success( messages["dataset.clear_cache_success"]({ datasetName: dataset.name, @@ -299,7 +296,7 @@ export function getDatasetActionContextMenu({ datasets, hideContextMenu, }: { - reloadDataset: (arg0: APIDataSourceId) => Promise; + reloadDataset: (arg0: APIDataset["id"]) => Promise; datasets: APIDatasetCompact[]; hideContextMenu: () => void; }): MenuProps { @@ -320,7 +317,7 @@ export function getDatasetActionContextMenu({ key: "view", label: "View", onClick: () => { - window.location.href = `/datasets/${dataset.owningOrganization}/${dataset.name}/view`; + window.location.href = `/datasets/${dataset.id}/view`; }, } : null, @@ -329,7 +326,7 @@ export function getDatasetActionContextMenu({ key: "edit", label: "Open Settings", onClick: () => { - window.location.href = `/datasets/${dataset.owningOrganization}/${dataset.name}/edit`; + window.location.href = `/datasets/${dataset.id}/edit`; }, } : null, @@ -338,7 +335,7 @@ export function getDatasetActionContextMenu({ key: "reload", label: "Reload", onClick: async () => { - const fullDataset = await getDataset(dataset); + const fullDataset = await getDataset(dataset.id); return dataset.isActive ? onClearCache(fullDataset, reloadDataset) : null; }, }, diff --git a/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx b/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx index 63af2584f60..22c6486dfee 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx @@ -7,6 +7,7 @@ import _ from "lodash"; import { diceCoefficient as dice } from "dice-coefficient"; import type { OxalisState } from "oxalis/store"; import type { + APIDataset, APIDatasetCompact, APIDataSourceId, APIMaybeUnimportedDataset, @@ -57,8 +58,8 @@ type Props = { isUserAdmin: boolean; isUserDatasetManager: boolean; datasetFilteringMode: DatasetFilteringMode; - reloadDataset: (arg0: APIDataSourceId) => Promise; - updateDataset: (id: APIDataSourceId, updater: DatasetUpdater) => void; + reloadDataset: (arg0: APIDataset["id"]) => Promise; + updateDataset: (id: APIDataset["id"], updater: DatasetUpdater) => void; addTagToSearch: (tag: string) => void; onSelectDataset: (dataset: APIDatasetCompact | null, multiSelect?: boolean) => void; onSelectFolder: (folder: FolderItem | null) => void; @@ -287,17 +288,13 @@ class DatasetRenderer { const selectedLayerName: string | null = this.data.colorLayerNames[0] || this.data.segmentationLayerNames[0]; const imgSrc = selectedLayerName - ? `/api/datasets/${this.data.owningOrganization}/${ - this.data.name - }/layers/${selectedLayerName}/thumbnail?w=${2 * THUMBNAIL_SIZE}&h=${2 * THUMBNAIL_SIZE}` + ? `/api/datasets/${this.data.id}/layers/${selectedLayerName}/thumbnail?w=${2 * THUMBNAIL_SIZE}&h=${2 * THUMBNAIL_SIZE}` : "/assets/images/inactive-dataset-thumbnail.svg"; const iconClassName = selectedLayerName ? "" : " icon-thumbnail"; + console.log("linking to", `/datasets/${this.data.id}/view`); return ( <> - +
{this.data.name} + + Test disambiguate + {this.renderTags()} {this.datasetTable.props.context.globalSearchQuery != null ? ( @@ -433,7 +437,7 @@ class DatasetTable extends React.PureComponent { }); }; - reloadSingleDataset = (datasetId: APIDataSourceId): Promise => + reloadSingleDataset = (datasetId: APIDataset["id"]): Promise => this.props.reloadDataset(datasetId); getFilteredDatasets() { @@ -735,7 +739,7 @@ class DatasetTable extends React.PureComponent { }, onDoubleClick: () => { if (isADataset) { - window.location.href = `/datasets/${data.owningOrganization}/${data.name}/view`; + window.location.href = `/datasets/${data.id}/view`; } else { context.setActiveFolderId(data.key); } @@ -762,7 +766,7 @@ export function DatasetTags({ }: { dataset: APIDatasetCompact; onClickTag?: (t: string) => void; - updateDataset: (id: APIDataSourceId, updater: DatasetUpdater) => void; + updateDataset: (id: APIDataset["id"], updater: DatasetUpdater) => void; }) { const editTagFromDataset = ( shouldAddTag: boolean, @@ -792,7 +796,7 @@ export function DatasetTags({ } trackAction("Edit dataset tag"); - updateDataset(dataset, updater); + updateDataset(dataset.id, updater); }; return ( diff --git a/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx b/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx index e54b2e6545b..4997ede542c 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx @@ -31,10 +31,10 @@ export type DatasetCollectionContextValue = { checkDatasets: () => Promise; fetchDatasets: () => void; reloadDataset: ( - datasetId: APIDataSourceId, + datasetId: APIDataset["id"], datasetsToUpdate?: Array, ) => Promise; - updateCachedDataset: (id: APIDataSourceId, updater: DatasetUpdater) => Promise; + updateCachedDataset: (id: APIDataset["id"], updater: DatasetUpdater) => Promise; activeFolderId: string | null; setActiveFolderId: (id: string | null) => void; mostRecentlyUsedActiveFolderId: string | null; @@ -157,11 +157,11 @@ export default function DatasetCollectionContextProvider({ datasetSearchQuery.refetch(); } - async function reloadDataset(datasetId: APIDataSourceId) { + async function reloadDataset(datasetId: APIDataset["id"]) { await updateDatasetMutation.mutateAsync(datasetId); } - async function updateCachedDataset(id: APIDataSourceId, updater: DatasetUpdater) { + async function updateCachedDataset(id: APIDataset["id"], updater: DatasetUpdater) { return await updateDatasetMutation.mutateAsync([id, updater]); } diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx index 8222b713714..0159f264100 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx @@ -336,11 +336,7 @@ function SimpleLayerForm({ const startJobFn = dataset != null ? async () => { - const job = await startFindLargestSegmentIdJob( - dataset.name, - dataset.owningOrganization, - layer.name, - ); + const job = await startFindLargestSegmentIdJob(dataset.id, layer.name); Toast.info( "A job was scheduled to compute the largest segment ID. It will be automatically updated for the dataset. You may close this tab now.", ); diff --git a/frontend/javascripts/dashboard/dataset/queries.tsx b/frontend/javascripts/dashboard/dataset/queries.tsx index 7dc46ce3f59..87f9f145c1c 100644 --- a/frontend/javascripts/dashboard/dataset/queries.tsx +++ b/frontend/javascripts/dashboard/dataset/queries.tsx @@ -16,9 +16,8 @@ import { updateFolder, } from "admin/api/folders"; import Toast from "libs/toast"; -import React, { useEffect, useRef } from "react"; +import { useEffect, useRef } from "react"; import { - type APIDataSourceId, type APIDatasetCompact, type FlatFolderTreeItem, type Folder, @@ -57,7 +56,7 @@ export function useFolderQuery(folderId: string | null) { ); } -export function useDatasetQuery(datasetId: APIDataSourceId) { +export function useDatasetQuery(datasetId: APIDataset["id"]) { const queryKey = ["datasetById", datasetId]; return useQuery( queryKey, @@ -395,15 +394,15 @@ export function useUpdateDatasetMutation(folderId: string | null) { const mutationKey = ["datasetsByFolder", folderId]; return useMutation( - (params: [APIDataSourceId, DatasetUpdater] | APIDataSourceId) => { + (params: [APIDataset["id"], DatasetUpdater] | APIDataset["id"]) => { // If a APIDatasetId is provided, simply refetch the dataset // without any mutation so that it gets reloaded effectively. - if ("owningOrganization" in params) { - const datasetId = params; - return getDataset(datasetId); + if (Array.isArray(params)) { + const [id, updater] = params; + return updateDatasetPartial(id, updater); } - const [id, updater] = params; - return updateDatasetPartial(id, updater); + const datasetId = params; + return getDataset(datasetId); }, { mutationKey, @@ -421,12 +420,8 @@ export function useUpdateDatasetMutation(folderId: string | null) { }) .filter((dataset: APIDatasetCompact) => dataset.folderId === folderId), ); - const updatedDatasetId = { - name: updatedDataset.name, - owningOrganization: updatedDataset.owningOrganization, - }; // Also update the cached dataset under the key "datasetById". - queryClient.setQueryData(["datasetById", updatedDatasetId], updatedDataset); + queryClient.setQueryData(["datasetById", updatedDataset.id], updatedDataset); const targetFolderId = updatedDataset.folderId; if (targetFolderId !== folderId) { // The dataset was moved to another folder. Add the dataset to that target folder @@ -561,7 +556,7 @@ function getUnobtrusivelyUpdatedDatasets( * lastUsedByUser property, as this would change the ordering when the default sorting is used. */ - const idFn = (dataset: APIDatasetCompact) => `${dataset.owningOrganization}#${dataset.name}`; + const idFn = (dataset: APIDatasetCompact) => dataset.id; const newDatasetsById = _.keyBy(newDatasets, idFn); return oldDatasets.map((oldDataset) => { diff --git a/frontend/javascripts/dashboard/folders/details_sidebar.tsx b/frontend/javascripts/dashboard/folders/details_sidebar.tsx index ef656e04980..05eb4087c8e 100644 --- a/frontend/javascripts/dashboard/folders/details_sidebar.tsx +++ b/frontend/javascripts/dashboard/folders/details_sidebar.tsx @@ -90,12 +90,7 @@ function getMaybeSelectMessage(datasetCount: number) { function DatasetDetails({ selectedDataset }: { selectedDataset: APIDatasetCompact }) { const context = useDatasetCollectionContext(); - // exactDatasetId is needed to prevent refetching when some dataset property of selectedDataset was changed. - const exactDatasetId = { - owningOrganization: selectedDataset.owningOrganization, - name: selectedDataset.name, - }; - const { data: fullDataset, isFetching } = useDatasetQuery(exactDatasetId); + const { data: fullDataset, isFetching } = useDatasetQuery(selectedDataset.id); const activeUser = useSelector((state: OxalisState) => state.activeUser); const { data: owningOrganization } = useQuery( ["organizations", selectedDataset.owningOrganization], diff --git a/frontend/javascripts/dashboard/folders/folder_tree.tsx b/frontend/javascripts/dashboard/folders/folder_tree.tsx index 69d09fe02b0..f806c746b92 100644 --- a/frontend/javascripts/dashboard/folders/folder_tree.tsx +++ b/frontend/javascripts/dashboard/folders/folder_tree.tsx @@ -331,7 +331,7 @@ export function useDatasetDrop( let successCounter = 0; Promise.all( selectedDatasets.map((ds) => - context.queries.updateDatasetMutation.mutateAsync([ds, { folderId }]).then(() => { + context.queries.updateDatasetMutation.mutateAsync([ds.id, { folderId }]).then(() => { successCounter++; modal.update({ content: `Already moved ${successCounter} of ${selectedDatasets.length} datasets.`, @@ -358,7 +358,7 @@ export function useDatasetDrop( const dataset = context.datasets.find((ds) => ds.name === item.datasetName); if (dataset) { - context.queries.updateDatasetMutation.mutateAsync([dataset, { folderId }]); + context.queries.updateDatasetMutation.mutateAsync([dataset.id, { folderId }]); } else { Toast.error("Could not move dataset. Please try again."); } diff --git a/frontend/javascripts/dashboard/folders/metadata_table.tsx b/frontend/javascripts/dashboard/folders/metadata_table.tsx index 68d67c5ace0..2086ca4baef 100644 --- a/frontend/javascripts/dashboard/folders/metadata_table.tsx +++ b/frontend/javascripts/dashboard/folders/metadata_table.tsx @@ -163,7 +163,7 @@ const saveCurrentMetadata = async ( try { if (isADataset) { // In case of a dataset, update the dataset's metadata. - serverResponse = await context.updateCachedDataset(datasetOrFolderToUpdate, { + serverResponse = await context.updateCachedDataset(datasetOrFolderToUpdate.id, { metadata: metadataWithoutIndexAndError, }); } else { diff --git a/frontend/javascripts/dashboard/publication_card.tsx b/frontend/javascripts/dashboard/publication_card.tsx index 9d8b15ec44f..1274d00a7a7 100644 --- a/frontend/javascripts/dashboard/publication_card.tsx +++ b/frontend/javascripts/dashboard/publication_card.tsx @@ -69,7 +69,7 @@ function getExtendedDetails(item: PublicationItem): ExtendedDatasetDetails { function getUrl(item: PublicationItem): string { return item.type === PublicationItemType.ANNOTATION ? `/annotations/${item.annotation.id}` - : `/datasets/${item.dataset.owningOrganization}/${item.dataset.name}`; + : `/datasets/${item.dataset.id}`; } function ThumbnailOverlay({ details }: { details: ExtendedDatasetDetails }) { diff --git a/frontend/javascripts/libs/utils.ts b/frontend/javascripts/libs/utils.ts index 2a7af53dab6..3227219f9f3 100644 --- a/frontend/javascripts/libs/utils.ts +++ b/frontend/javascripts/libs/utils.ts @@ -1239,7 +1239,7 @@ export function notEmpty(value: TValue | null | undefined): value is TVa export function isNumberMap(x: Map): x is Map { const { value } = x.entries().next(); - return value && typeof value[0] === "number"; + return (value && typeof value[0] === "number") || false; } export function isBigInt(x: NumberLike): x is bigint { diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index 515b20f0f31..265dba34058 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -121,6 +121,7 @@ const defaultState: OxalisState = { }, task: null, dataset: { + id: "dummy-dataset-id", name: "Loading", folderId: "dummy-folder-id", isUnreported: false, diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts index 3c6742cf6cf..8c6e6e3d232 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts @@ -574,8 +574,6 @@ export function getEnabledColorLayers( } export function getThumbnailURL(dataset: APIDataset): string { - const datasetName = dataset.name; - const organizationId = dataset.owningOrganization; const layers = dataset.dataSource.dataLayers; const colorLayer = _.find(layers, { @@ -583,18 +581,16 @@ export function getThumbnailURL(dataset: APIDataset): string { }); if (colorLayer) { - return `/api/datasets/${organizationId}/${datasetName}/layers/${colorLayer.name}/thumbnail`; + return `/api/datasets/${dataset.id}/layers/${colorLayer.name}/thumbnail`; } return ""; } export function getSegmentationThumbnailURL(dataset: APIDataset): string { - const datasetName = dataset.name; - const organizationId = dataset.owningOrganization; const segmentationLayer = getFirstSegmentationLayer(dataset); if (segmentationLayer) { - return `/api/datasets/${organizationId}/${datasetName}/layers/${segmentationLayer.name}/thumbnail`; + return `/api/datasets/${dataset.id}/layers/${segmentationLayer.name}/thumbnail`; } return ""; diff --git a/frontend/javascripts/oxalis/model/sagas/settings_saga.ts b/frontend/javascripts/oxalis/model/sagas/settings_saga.ts index da0666c79e3..b1698ed3c8c 100644 --- a/frontend/javascripts/oxalis/model/sagas/settings_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/settings_saga.ts @@ -40,7 +40,7 @@ function* pushDatasetSettingsAsync(originalDatasetSettings: DatasetConfiguration SETTINGS_MAX_RETRY_COUNT, SETTINGS_RETRY_DELAY, updateDatasetConfiguration, - dataset, + dataset.id, maybeMaskedDatasetConfiguration, ); } catch (error) { diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 8e43a9fcfcc..80ec43bf9f6 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -125,7 +125,7 @@ export async function initialize( > { Store.dispatch(setControlModeAction(initialCommandType.type)); let annotation: APIAnnotation | null | undefined; - let datasetId: APIDataSourceId; + let datasetId: APIDataset["id"]; if (initialCommandType.type === ControlModeEnum.TRACE) { const { annotationId } = initialCommandType; @@ -133,10 +133,7 @@ export async function initialize( initialMaybeCompoundType != null ? await getAnnotationCompoundInformation(annotationId, initialMaybeCompoundType) : await getAnnotationInformation(annotationId); - datasetId = { - name: annotation.dataSetName, - owningOrganization: annotation.organization, - }; + datasetId = annotation.datasetId; if (!annotation.restrictions.allowAccess) { Toast.error(messages["tracing.no_access"]); @@ -148,22 +145,14 @@ export async function initialize( }); Store.dispatch(setTaskAction(annotation.task)); } else if (initialCommandType.type === ControlModeEnum.SANDBOX) { - const { name, owningOrganization } = initialCommandType; - datasetId = { - name, - owningOrganization, - }; + datasetId = initialCommandType.datasetId; annotation = await getEmptySandboxAnnotationInformation( datasetId, initialCommandType.tracingType, getSharingTokenFromUrlParameters(), ); } else { - const { name, owningOrganization } = initialCommandType; - datasetId = { - name, - owningOrganization, - }; + datasetId = initialCommandType.datasetId; } const [dataset, initialUserSettings, serverTracings] = await fetchParallel( @@ -236,7 +225,7 @@ export async function initialize( async function fetchParallel( annotation: APIAnnotation | null | undefined, - datasetId: APIDataSourceId, + datasetId: APIDataset["id"], versions?: Versions, ): Promise<[APIDataset, UserConfiguration, Array]> { return Promise.all([ diff --git a/frontend/javascripts/oxalis/view/action-bar/create_animation_modal.tsx b/frontend/javascripts/oxalis/view/action-bar/create_animation_modal.tsx index 45f7e507c1e..92d0a8f1b73 100644 --- a/frontend/javascripts/oxalis/view/action-bar/create_animation_modal.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/create_animation_modal.tsx @@ -262,7 +262,7 @@ function CreateAnimationModal(props: Props) { ) return; - startRenderAnimationJob(state.dataset.owningOrganization, state.dataset.name, animationOptions); + startRenderAnimationJob(state.dataset.id, animationOptions); Toast.info( <> diff --git a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx index d832e045dbd..1d7074369e7 100644 --- a/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/download_modal_view.tsx @@ -364,8 +364,7 @@ function _DownloadModalView({ await Model.ensureSavedState(); await startJob(async () => { const job = await startExportTiffJob( - dataset.name, - dataset.owningOrganization, + dataset.id, computeArrayFromBoundingBox(selectedBoundingBox.boundingBox), selectedLayerInfos.layerName, mag.join("-"), diff --git a/frontend/javascripts/oxalis/view/action-bar/private_links_view.tsx b/frontend/javascripts/oxalis/view/action-bar/private_links_view.tsx index 98ab25bcce6..17459a7ba23 100644 --- a/frontend/javascripts/oxalis/view/action-bar/private_links_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/private_links_view.tsx @@ -146,7 +146,7 @@ export function useZarrLinkMenu(maybeAccessToken: string | null) { const baseUrl = maybeAccessToken ? `${dataStoreURL}/data/annotations/zarr/${maybeAccessToken}` - : `${dataStoreURL}/data/zarr/${dataset.owningOrganization}/${dataset.name}`; + : `${dataStoreURL}/data/zarr/${dataset.owningOrganization}/${dataset.path}`; const copyTokenToClipboard = async ({ key: layerName }: { key: string }) => { await navigator.clipboard.writeText(`${baseUrl}/${layerName}`); diff --git a/frontend/javascripts/oxalis/view/action-bar/share_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/share_modal_view.tsx index 7649a56ada1..d2c7f16d8bf 100644 --- a/frontend/javascripts/oxalis/view/action-bar/share_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/share_modal_view.tsx @@ -95,7 +95,7 @@ export function useDatasetSharingToken(dataset: APIDataset) { return; } try { - const sharingToken = await getDatasetSharingToken(dataset, { + const sharingToken = await getDatasetSharingToken(dataset.id, { doNotInvestigate: true, }); setDatasetToken(sharingToken); diff --git a/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx b/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx index 9f4428b504e..7aa2615ce5d 100644 --- a/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx @@ -666,12 +666,7 @@ export function NucleiDetectionForm() { title="AI Nuclei Segmentation" suggestedDatasetSuffix="with_nuclei" jobApiCall={async ({ newDatasetName, selectedLayer: colorLayer }) => - startNucleiInferralJob( - dataset.owningOrganization, - dataset.name, - colorLayer.name, - newDatasetName, - ) + startNucleiInferralJob(dataset.id, colorLayer.name, newDatasetName) } description={ <> @@ -710,13 +705,7 @@ export function NeuronSegmentationForm() { } const bbox = computeArrayFromBoundingBox(selectedBoundingBox.boundingBox); - return startNeuronInferralJob( - dataset.owningOrganization, - dataset.name, - colorLayer.name, - bbox, - newDatasetName, - ); + return startNeuronInferralJob(dataset.id, colorLayer.name, bbox, newDatasetName); }} description={ <> @@ -752,13 +741,7 @@ export function MitochondriaSegmentationForm() { } const bbox = computeArrayFromBoundingBox(selectedBoundingBox.boundingBox); - return startMitochondriaInferralJob( - dataset.owningOrganization, - dataset.name, - colorLayer.name, - bbox, - newDatasetName, - ); + return startMitochondriaInferralJob(dataset.id, colorLayer.name, bbox, newDatasetName); }} description={ <> @@ -865,13 +848,7 @@ export function AlignSectionsForm() { isBoundingBoxConfigurable={false} isSkeletonSelectable={true} jobApiCall={async ({ newDatasetName, selectedLayer: colorLayer, annotationId }) => - startAlignSectionsJob( - dataset.owningOrganization, - dataset.name, - colorLayer.name, - newDatasetName, - annotationId, - ) + startAlignSectionsJob(dataset.id, colorLayer.name, newDatasetName, annotationId) } description={ @@ -977,8 +954,7 @@ export function MaterializeVolumeAnnotationModal({ : null; const baseSegmentationName = getBaseSegmentationName(segmentationLayer); return startMaterializingVolumeAnnotationJob( - dataset.owningOrganization, - dataset.name, + dataset.id, baseSegmentationName, volumeLayerName, newDatasetName, diff --git a/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.tsx b/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.tsx index 0ddbca53318..a73f91a2e55 100644 --- a/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/tracing_actions_view.tsx @@ -354,7 +354,7 @@ class TracingActionsView extends React.PureComponent { // volume tracings const fallbackLayer = sandboxTracing.volumes.length > 0 ? sandboxTracing.volumes[0].fallbackLayer : null; - const newAnnotation = await createExplorational(dataset, tracingType, false, fallbackLayer); + const newAnnotation = await createExplorational(dataset.id, tracingType, false, fallbackLayer); UrlManager.changeBaseUrl(`/annotations/${newAnnotation.typ}/${newAnnotation.id}`); await api.tracing.restart(null, newAnnotation.id, ControlModeEnum.TRACE, undefined, true); const sandboxSkeletonTracing = enforceSkeletonTracing(sandboxTracing); diff --git a/frontend/javascripts/oxalis/view/action_bar_view.tsx b/frontend/javascripts/oxalis/view/action_bar_view.tsx index e842fdd10d9..24fdfad3473 100644 --- a/frontend/javascripts/oxalis/view/action_bar_view.tsx +++ b/frontend/javascripts/oxalis/view/action_bar_view.tsx @@ -189,7 +189,7 @@ class ActionBarView extends React.PureComponent { } const annotation = await createExplorational( - dataset, + dataset.id, "hybrid", false, fallbackLayerName, diff --git a/frontend/javascripts/oxalis/view/largest_segment_id_modal.tsx b/frontend/javascripts/oxalis/view/largest_segment_id_modal.tsx index 988a0811123..9aa8f3e0bb7 100644 --- a/frontend/javascripts/oxalis/view/largest_segment_id_modal.tsx +++ b/frontend/javascripts/oxalis/view/largest_segment_id_modal.tsx @@ -76,11 +76,7 @@ export default function EnterLargestSegmentIdModal({ const editString = "edit the same property in the dataset"; const editLinkOrText = mayUserEditDataset(activeUser, dataset) ? ( - + {editString} ) : ( diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index 757db35bf95..d57d7e43e48 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -524,11 +524,7 @@ class DatasetSettings extends React.PureComponent { if (!(this.props.isSuperUser && isSegmentation)) return <>; const triggerComputeSegmentIndexFileJob = async () => { - await startComputeSegmentIndexFileJob( - this.props.dataset.owningOrganization, - this.props.dataset.name, - layerName, - ); + await startComputeSegmentIndexFileJob(this.props.dataset.id, layerName); Toast.info( Started a job for computating a segment index file. @@ -1401,7 +1397,7 @@ class DatasetSettings extends React.PureComponent { ...completeDatasetConfiguration, layers: updatedLayers, }; - await updateDatasetDefaultConfiguration(dataset, updatedConfiguration); + await updateDatasetDefaultConfiguration(dataset.id, updatedConfiguration); Toast.success("Successfully saved the current view configuration as default."); } catch (error) { Toast.error( diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx index 269ed36697b..f5ceed8e59f 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/dataset_info_tab_view.tsx @@ -312,19 +312,16 @@ export class DatasetInfoTabView extends React.PureComponent { getDatasetName() { const { + id: datasetId, name: datasetName, description: datasetDescription, - owningOrganization, } = this.props.dataset; const { activeUser } = this.props; const getEditSettingsIcon = () => mayUserEditDataset(activeUser, this.props.dataset) ? ( - + @@ -362,7 +359,7 @@ export class DatasetInfoTabView extends React.PureComponent {

Dataset {getEditSettingsIcon()}

{ : undefined; const job = await startComputeMeshFileJob( - this.props.organization, - this.props.datasetName, + this.props.dataset.id, getBaseSegmentationName(this.props.visibleSegmentationLayer), meshfileResolution, maybeMappingName, diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx index 2201d36a5c5..8d994dee2f6 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segments_view_helper.tsx @@ -47,7 +47,7 @@ export function getVolumeRequestUrl( visibleSegmentationLayer: APISegmentationLayer | APIDataLayer, ) { if (tracing == null || tracingId == null) { - return `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.name}/layers/${visibleSegmentationLayer.name}`; + return `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.path}/layers/${visibleSegmentationLayer.name}`; } else { const tracingStoreHost = tracing?.tracingStore.url; return `${tracingStoreHost}/tracings/volume/${tracingId}`; diff --git a/frontend/javascripts/router.tsx b/frontend/javascripts/router.tsx index 72618cb3c8b..01e40aa6f4f 100644 --- a/frontend/javascripts/router.tsx +++ b/frontend/javascripts/router.tsx @@ -186,7 +186,7 @@ class ReactRouter extends React.Component { tracingViewModeLegacy = ({ match, location }: ContextRouter) => ( { - const datasetName = match.params.id || ""; + const datasetName = match.params.datasetName || ""; const organizationId = match.params.organizationId || ""; const datasetId = await getDatasetIdFromNameAndOrganization(datasetName, organizationId); return `/datasets/${datasetId}/view${location.search}${location.hash}`; @@ -614,18 +614,6 @@ class ReactRouter extends React.Component { render={this.tracingViewModeLegacy} /> - ( - { - const datasetName = match.params.id || ""; - const organizationId = await getOrganizationForDataset(datasetName); - return `/datasets/${organizationId}/${datasetName}/view${location.search}${location.hash}`; - }} - /> - )} - /> {/*maybe this also needs a legacy route?*/} { console.error("Unhandled rejection (promise: ", promise, ", reason: ", err, ")."); }); diff --git a/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts index ab2e5f0856f..b81326d2a7b 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts @@ -46,13 +46,13 @@ test("getDatasetAccessList", async (t) => { test("updateDatasetTeams", async (t) => { const [dataset, newTeams] = await Promise.all([getFirstDataset(), api.getEditableTeams()]); const updatedDataset = await api.updateDatasetTeams( - dataset, + dataset.id, newTeams.map((team) => team.id), ); t.snapshot(updatedDataset); // undo the Change await api.updateDatasetTeams( - dataset, + dataset.id, dataset.allowedTeams.map((team) => team.id), ); }); // test("getDatasetSharingToken and revokeDatasetSharingToken", async t => { diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 691f1b876d5..7bb8f31c247 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -156,6 +156,7 @@ export const tracing: ServerSkeletonTracing = { }; export const annotation: APIAnnotation = { description: "", + datasetId: "id", // TODO: check whether this is correct. state: "Active", id: "598b52293c00009906f043e7", visibility: "Internal", diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index 71a3e9f6896..6a85cbf6fb2 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -62,6 +62,7 @@ export const tracing: ServerSkeletonTracing = { id: "e90133de-b2db-4912-8261-8b6f84f7edab", }; export const annotation: APIAnnotation = { + datasetId: "id", modified: 1529066010230, state: "Active", id: "5b1fd1cf97000027049c67ee", diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index d13a74939d3..8d1d5baae13 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -67,6 +67,7 @@ export const tracing: ServerVolumeTracing = { ], }; export const annotation: APIAnnotation = { + datasetId: "id", description: "", state: "Active", id: "598b52293c00009906f043e7", diff --git a/frontend/javascripts/test/puppeteer/dataset_rendering.screenshot.ts b/frontend/javascripts/test/puppeteer/dataset_rendering.screenshot.ts index a3f260b002a..ca0e0f45c4d 100644 --- a/frontend/javascripts/test/puppeteer/dataset_rendering.screenshot.ts +++ b/frontend/javascripts/test/puppeteer/dataset_rendering.screenshot.ts @@ -15,6 +15,7 @@ import { WK_AUTH_TOKEN, checkBrowserstackCredentials, } from "./dataset_rendering_helpers"; +import { getDatasetIdFromNameAndOrganization } from "admin/api/disambiguate_legacy_routes"; if (!WK_AUTH_TOKEN) { throw new Error("No WK_AUTH_TOKEN specified."); @@ -106,19 +107,38 @@ const datasetConfigOverrides: Record = { }, }; +const datasetNameToId: Record = {}; +datasetNames.map(async (datasetName) => { + test.serial(`it should render dataset ${datasetName} correctly`, async (t) => { + await withRetry( + 3, + async () => { + const response = await fetch( + `${URL}/api/datasets/disambiguate/sample_organization/${datasetName}/toId`, + ); + const { datasetId } = await response.json(); + datasetNameToId[datasetName] = datasetId; + return true; + }, + (condition) => { + t.true( + condition, + `Dataset with name: "${datasetName}" does not look the same, see ${datasetName}.diff.png for the difference and ${datasetName}.new.png for the new screenshot.`, + ); + }, + ); + }); +}); + datasetNames.map(async (datasetName) => { test.serial(`it should render dataset ${datasetName} correctly`, async (t) => { await withRetry( 3, async () => { - const datasetId = { - name: datasetName, - owningOrganization: "sample_organization", - }; const { screenshot, width, height } = await screenshotDataset( await getNewPage(t.context.browser), URL, - datasetId, + datasetNameToId[datasetName], viewOverrides[datasetName], datasetConfigOverrides[datasetName], ); @@ -153,14 +173,10 @@ annotationSpecs.map(async (annotationSpec) => { await withRetry( 3, async () => { - const datasetId = { - name: datasetName, - owningOrganization: "sample_organization", - }; const { screenshot, width, height } = await screenshotAnnotation( await getNewPage(t.context.browser), URL, - datasetId, + datasetNameToId[datasetName], fallbackLayerName, viewOverrides[datasetName], datasetConfigOverrides[datasetName], @@ -191,14 +207,10 @@ test.serial("it should render a dataset with mappings correctly", async (t) => { await withRetry( 3, async () => { - const datasetId = { - name: datasetName, - owningOrganization: "sample_organization", - }; const { screenshot, width, height } = await screenshotDatasetWithMapping( await getNewPage(t.context.browser), URL, - datasetId, + datasetNameToId[datasetName], mappingName, ); const changedPixels = await compareScreenshot( @@ -226,14 +238,10 @@ test.serial( await withRetry( 3, async () => { - const datasetId = { - name: datasetName, - owningOrganization: "sample_organization", - }; const { screenshot, width, height } = await screenshotDatasetWithMappingLink( await getNewPage(t.context.browser), URL, - datasetId, + datasetNameToId[datasetName], viewOverride, ); const changedPixels = await compareScreenshot( @@ -262,14 +270,10 @@ test.serial( await withRetry( 3, async () => { - const datasetId = { - name: datasetName, - owningOrganization: "sample_organization", - }; const { screenshot, width, height } = await screenshotSandboxWithMappingLink( await getNewPage(t.context.browser), URL, - datasetId, + datasetNameToId[datasetName], viewOverride, ); const changedPixels = await compareScreenshot( @@ -298,14 +302,10 @@ test.serial( await withRetry( 3, async () => { - const datasetId = { - name: datasetName, - owningOrganization: "sample_organization", - }; const { screenshot, width, height } = await screenshotDataset( await getNewPage(t.context.browser), URL, - datasetId, + datasetNameToId[datasetName], viewOverride, ); const changedPixels = await compareScreenshot( diff --git a/frontend/javascripts/test/puppeteer/dataset_rendering.wkorg_screenshot.ts b/frontend/javascripts/test/puppeteer/dataset_rendering.wkorg_screenshot.ts index f2bc654e916..fd504eab7e1 100644 --- a/frontend/javascripts/test/puppeteer/dataset_rendering.wkorg_screenshot.ts +++ b/frontend/javascripts/test/puppeteer/dataset_rendering.wkorg_screenshot.ts @@ -45,10 +45,10 @@ test.serial(`it should render dataset ${demoDatasetName} correctly`, async (t) = await withRetry( 3, async () => { - const datasetId = { - name: demoDatasetName, - owningOrganization, - }; + const response = await fetch( + `${URL}/api/datasets/disambiguate/${owningOrganization}/${demoDatasetName}/toId`, + ); + const { datasetId } = await response.json(); const { screenshot, width, height } = await screenshotDatasetView( await getNewPage(t.context.browser), URL, diff --git a/frontend/javascripts/test/puppeteer/dataset_rendering_helpers.ts b/frontend/javascripts/test/puppeteer/dataset_rendering_helpers.ts index 20f8a88a4bd..17446c4d085 100644 --- a/frontend/javascripts/test/puppeteer/dataset_rendering_helpers.ts +++ b/frontend/javascripts/test/puppeteer/dataset_rendering_helpers.ts @@ -10,7 +10,7 @@ import mergeImg from "merge-img"; import pixelmatch from "pixelmatch"; import type { RequestOptions } from "libs/request"; import { bufferToPng, isPixelEquivalent } from "./screenshot_helpers"; -import type { APIDataSourceId } from "../../types/api_flow_types"; +import type { APIDataset, APIDataSourceId } from "../../types/api_flow_types"; import { createExplorational, updateDatasetConfiguration } from "../../admin/admin_rest_api"; import puppeteer from "puppeteer"; import { sleep } from "libs/utils"; @@ -39,7 +39,7 @@ function getDefaultRequestOptions(baseUrl: string): RequestOptions { export async function screenshotDataset( page: Page, baseUrl: string, - datasetId: APIDataSourceId, + datasetId: APIDataset["id"], optionalViewOverride?: string | null | undefined, optionalDatasetConfigOverride?: PartialDatasetConfiguration | null | undefined, ): Promise { @@ -57,7 +57,7 @@ export async function screenshotDataset( export async function screenshotAnnotation( page: Page, baseUrl: string, - datasetId: APIDataSourceId, + datasetId: APIDataset["id"], fallbackLayerName: string | null, optionalViewOverride?: string | null | undefined, optionalDatasetConfigOverride?: PartialDatasetConfiguration | null | undefined, @@ -76,7 +76,7 @@ export async function screenshotAnnotation( async function _screenshotAnnotationHelper( page: Page, baseUrl: string, - datasetId: APIDataSourceId, + datasetId: APIDataset["id"], typ: "skeleton" | "volume" | "hybrid", fallbackLayerName: string | null, optionalViewOverride?: string | null | undefined, @@ -104,10 +104,10 @@ async function _screenshotAnnotationHelper( export async function screenshotDatasetView( page: Page, baseUrl: string, - datasetId: APIDataSourceId, + datasetId: APIDataset["id"], optionalViewOverride?: string | null | undefined, ): Promise { - const url = `${baseUrl}/datasets/${datasetId.owningOrganization}/${datasetId.name}`; + const url = `${baseUrl}/datasets/${datasetId}`; await openDatasetView(page, url, optionalViewOverride); return screenshotTracingView(page); @@ -116,7 +116,7 @@ export async function screenshotDatasetView( export async function screenshotDatasetWithMapping( page: Page, baseUrl: string, - datasetId: APIDataSourceId, + datasetId: APIDataset["id"], mappingName: string, ): Promise { const options = getDefaultRequestOptions(baseUrl); @@ -139,7 +139,7 @@ export async function screenshotDatasetWithMapping( export async function screenshotDatasetWithMappingLink( page: Page, baseUrl: string, - datasetId: APIDataSourceId, + datasetId: APIDataset["id"], optionalViewOverride: string | null | undefined, ): Promise { const options = getDefaultRequestOptions(baseUrl); @@ -159,7 +159,7 @@ export async function screenshotDatasetWithMappingLink( export async function screenshotSandboxWithMappingLink( page: Page, baseUrl: string, - datasetId: APIDataSourceId, + datasetId: APIDataset["id"], optionalViewOverride: string | null | undefined, ): Promise { await openSandboxView(page, baseUrl, datasetId, optionalViewOverride); @@ -265,14 +265,11 @@ async function openDatasetView( async function openSandboxView( page: Page, baseUrl: string, - datasetId: APIDataSourceId, + datasetId: APIDataset["id"], optionalViewOverride: string | null | undefined, ) { const urlSlug = optionalViewOverride != null ? `#${optionalViewOverride}` : ""; - const url = urljoin( - baseUrl, - `/datasets/${datasetId.owningOrganization}/${datasetId.name}/sandbox/skeleton${urlSlug}`, - ); + const url = urljoin(baseUrl, `/datasets/${datasetId}/sandbox/skeleton${urlSlug}`); console.log(`Opening sandbox annotation view at ${url}`); await page.goto(url, { timeout: 0, diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 65e0cac0d4b..dc4a3b00a39 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -238,7 +238,7 @@ export type APIDatasetCompactWithoutStatusAndLayerNames = Pick< | "isUnreported" >; export type APIDatasetCompact = APIDatasetCompactWithoutStatusAndLayerNames & { - id?: string; + id: string; // Open question: Why was this optional?, The backend code clearly always returns an id ... :thinking: status: MutableAPIDataSourceBase["status"]; colorLayerNames: Array; segmentationLayerNames: Array; @@ -251,6 +251,7 @@ export function convertDatasetToCompact(dataset: APIDataset): APIDatasetCompact ).map((layers) => layers.map((layer) => layer.name).sort()); return { + id: dataset.id, owningOrganization: dataset.owningOrganization, name: dataset.name, folderId: dataset.folderId, @@ -465,6 +466,7 @@ export type EditableLayerProperties = Partial<{ }>; export type APIAnnotationInfo = { readonly annotationLayers: Array; + readonly datasetId: APIDataset["id"]; readonly dataSetName: string; readonly organization: string; readonly description: string; @@ -491,6 +493,7 @@ export function annotationToCompact(annotation: APIAnnotation): APIAnnotationInf description, modified, id, + datasetId, name, state, isLockedByOwner, @@ -504,6 +507,7 @@ export function annotationToCompact(annotation: APIAnnotation): APIAnnotationInf } = annotation; return { + datasetId, annotationLayers, dataSetName, organization, @@ -696,6 +700,7 @@ export enum APIJobType { export type APIJob = { readonly id: string; + readonly datasetId: string | null | undefined; // TODO: Adjust worker accordingly readonly datasetName: string | null | undefined; readonly exportFileName: string | null | undefined; readonly layerName: string | null | undefined; diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala index 9fb5634c1ef..51699cef324 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataSource.scala @@ -17,9 +17,9 @@ package object datasource { object DataSourceId extends JsonImplicits with DatasetURIParser { implicit object DataSourceIdFormat extends Format[DataSourceId] { override def reads(json: JsValue): JsResult[DataSourceId] = - (json \ "path").validate[String] flatMap { path => - (json \ "organization").validate[String].map { org => - DataSourceId(path, org) + (json \ "name").validate[String] flatMap { nameRenamedToPath => + (json \ "team").validate[String].map { teamRenamedToOrganization => + DataSourceId(nameRenamedToPath, teamRenamedToOrganization) } } From 62bbd756550336fb72c5a4cd4b78c6a70335b34b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 25 Sep 2024 10:01:18 +0200 Subject: [PATCH 011/129] Try update schema and evolution --- .../121-decouple-dataset-directory-from-name.sql | 10 ++++++++++ tools/postgres/schema.sql | 4 ++-- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/conf/evolutions/121-decouple-dataset-directory-from-name.sql b/conf/evolutions/121-decouple-dataset-directory-from-name.sql index 3cebe517504..653df6a7edc 100644 --- a/conf/evolutions/121-decouple-dataset-directory-from-name.sql +++ b/conf/evolutions/121-decouple-dataset-directory-from-name.sql @@ -2,11 +2,21 @@ START TRANSACTION; do $$ begin ASSERT (select schemaVersion from webknossos.releaseInformation) = 120, 'Previous schema version mismatch'; end; $$ LANGUAGE plpgsql; +DROP VIEW IF EXISTS webknossos.datasets_; + UPDATE webknossos.datasets SET displayName = name WHERE displayName IS NULL; ALTER TABLE webknossos.datasets RENAME COLUMN name TO path; ALTER TABLE webknossos.datasets RENAME COLUMN displayName TO name; ALTER TABLE webknossos.datasets ALTER COLUMN name SET NOT NULL; +ALTER TABLE webknossos.datasets DROP CONSTRAINT IF EXISTS datasets_name__organization_key; +ALTER TABLE webknossos.datasets ADD CONSTRAINT datasets_path__organization_key UNIQUE(path, _organization); +-- TODO +-- CREATE INDEX ON webknossos.datasets(name); +-- DROP INDEX datasets_name_idx; +-- CREATE INDEX ON webknossos.datasets(path); + +CREATE VIEW webknossos.datasets_ AS SELECT * FROM webknossos.datasets WHERE NOT isDeleted; UPDATE webknossos.releaseInformation SET schemaVersion = 121; diff --git a/tools/postgres/schema.sql b/tools/postgres/schema.sql index 0e6b2de8e8a..a9fddc9f1ed 100644 --- a/tools/postgres/schema.sql +++ b/tools/postgres/schema.sql @@ -20,7 +20,7 @@ CREATE TABLE webknossos.releaseInformation ( schemaVersion BIGINT NOT NULL ); -INSERT INTO webknossos.releaseInformation(schemaVersion) values(120); +INSERT INTO webknossos.releaseInformation(schemaVersion) values(121); COMMIT TRANSACTION; @@ -741,7 +741,7 @@ CREATE INDEX ON webknossos.annotations(typ, state, isDeleted); CREATE INDEX ON webknossos.annotations(_user, _task, isDeleted); CREATE INDEX ON webknossos.annotations(_task, typ, isDeleted); CREATE INDEX ON webknossos.annotations(typ, isDeleted); -CREATE INDEX ON webknossos.datasets(name); +CREATE INDEX ON webknossos.datasets(path); CREATE INDEX ON webknossos.datasets(_folder); CREATE INDEX ON webknossos.tasks(_project); CREATE INDEX ON webknossos.tasks(isDeleted); From f74a7f19a47db2721d396492d6a225db9bee2fc5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 25 Sep 2024 12:29:51 +0200 Subject: [PATCH 012/129] fix evolution & add first version of reversion (needs to be tested) --- ...1-decouple-dataset-directory-from-name.sql | 6 ++---- ...1-decouple-dataset-directory-from-name.sql | 20 +++++++++++++++++++ 2 files changed, 22 insertions(+), 4 deletions(-) create mode 100644 conf/evolutions/reversions/121-decouple-dataset-directory-from-name.sql diff --git a/conf/evolutions/121-decouple-dataset-directory-from-name.sql b/conf/evolutions/121-decouple-dataset-directory-from-name.sql index 653df6a7edc..28cd310f737 100644 --- a/conf/evolutions/121-decouple-dataset-directory-from-name.sql +++ b/conf/evolutions/121-decouple-dataset-directory-from-name.sql @@ -11,10 +11,8 @@ ALTER TABLE webknossos.datasets ALTER COLUMN name SET NOT NULL; ALTER TABLE webknossos.datasets DROP CONSTRAINT IF EXISTS datasets_name__organization_key; ALTER TABLE webknossos.datasets ADD CONSTRAINT datasets_path__organization_key UNIQUE(path, _organization); --- TODO --- CREATE INDEX ON webknossos.datasets(name); --- DROP INDEX datasets_name_idx; --- CREATE INDEX ON webknossos.datasets(path); +DROP INDEX webknossos.datasets_name_idx; +CREATE INDEX ON webknossos.datasets(path); CREATE VIEW webknossos.datasets_ AS SELECT * FROM webknossos.datasets WHERE NOT isDeleted; diff --git a/conf/evolutions/reversions/121-decouple-dataset-directory-from-name.sql b/conf/evolutions/reversions/121-decouple-dataset-directory-from-name.sql new file mode 100644 index 00000000000..7016a7aea0a --- /dev/null +++ b/conf/evolutions/reversions/121-decouple-dataset-directory-from-name.sql @@ -0,0 +1,20 @@ +START TRANSACTION; + +do $$ begin ASSERT (select schemaVersion from webknossos.releaseInformation) = 121, 'Previous schema version mismatch'; end; $$ LANGUAGE plpgsql; + +DROP VIEW IF EXISTS webknossos.datasets_; + +ALTER TABLE webknossos.datasets RENAME COLUMN name TO displayName; +ALTER TABLE webknossos.datasets RENAME COLUMN path TO name; +ALTER TABLE webknossos.datasets ALTER COLUMN displayName DROP NOT NULL; + +ALTER TABLE webknossos.datasets DROP CONSTRAINT IF EXISTS datasets_path__organization_key; +ALTER TABLE webknossos.datasets ADD CONSTRAINT datasets_name__organization_key UNIQUE(name, _organization); +DROP INDEX webknossos.datasets_path_idx; +CREATE INDEX ON webknossos.datasets(name); + +CREATE VIEW webknossos.datasets_ AS SELECT * FROM webknossos.datasets WHERE NOT isDeleted; + +UPDATE webknossos.releaseInformation SET schemaVersion = 120; + +COMMIT TRANSACTION; From 3a203a802233244e1a28c6b14a5becfa9a6db630 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 25 Sep 2024 15:43:57 +0200 Subject: [PATCH 013/129] fix frontend tests --- .../test/fixtures/dataset_server_object.ts | 40 +++++++++++++++++-- .../skeletontracing_server_objects.ts | 2 +- .../fixtures/tasktracing_server_objects.ts | 2 +- .../fixtures/volumetracing_server_objects.ts | 2 +- .../javascripts/test/helpers/apiHelpers.ts | 3 +- frontend/javascripts/test/model/model.spec.ts | 28 ++++++------- 6 files changed, 54 insertions(+), 23 deletions(-) diff --git a/frontend/javascripts/test/fixtures/dataset_server_object.ts b/frontend/javascripts/test/fixtures/dataset_server_object.ts index 3a392dc7871..c1af74f1e34 100644 --- a/frontend/javascripts/test/fixtures/dataset_server_object.ts +++ b/frontend/javascripts/test/fixtures/dataset_server_object.ts @@ -1,6 +1,8 @@ import { UnitLong } from "oxalis/constants"; +import type { APIDataset } from "types/api_flow_types"; -export default { +const apiDataset: APIDataset = { + id: "66f3c82966010034942e9740", name: "ROI2017_wkw", dataSource: { id: { @@ -26,6 +28,7 @@ export default { [16, 16, 16], ], elementClass: "uint8", + additionalAxes: [], }, { name: "segmentation", @@ -54,7 +57,8 @@ export default { "mitochondria", "astrocyte-full", ], - tracingId: null, + tracingId: undefined, + additionalAxes: [], }, ], scale: { factor: [11.239999771118164, 11.239999771118164, 28], unit: UnitLong.nm }, @@ -62,13 +66,41 @@ export default { dataStore: { name: "localhost", url: "http://localhost:9000", - typ: "webknossos-store", + isScratch: false, + allowsUpload: true, + jobsSupportedByAvailableWorkers: [], + jobsEnabled: false, }, owningOrganization: "Connectomics department", - allowedTeams: ["Connectomics department"], + allowedTeams: [ + { + id: "5b1e45f9a00000a000abc2c3", + name: "Connectomics department", + organization: "Connectomics department", + }, + ], + allowedTeamsCumulative: [ + { + id: "5b1e45f9a00000a000abc2c3", + name: "Connectomics department", + organization: "Connectomics department", + }, + ], isActive: true, isPublic: false, description: null, created: 1502288550432, isEditable: true, + path: "ROI2017_wkw", + isUnreported: false, + tags: [], + folderId: "66f3c82466010002752e972c", + metadata: [], + logoUrl: "/assets/images/logo.svg", + lastUsedByUser: 1727268949322, + sortingKey: 1727252521746, + publication: null, + usedStorageBytes: 0, }; + +export default apiDataset; diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index 7bb8f31c247..4dacaf35406 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -156,7 +156,7 @@ export const tracing: ServerSkeletonTracing = { }; export const annotation: APIAnnotation = { description: "", - datasetId: "id", // TODO: check whether this is correct. + datasetId: "66f3c82966010034942e9740", state: "Active", id: "598b52293c00009906f043e7", visibility: "Internal", diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index 6a85cbf6fb2..071dbc1b92e 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -62,7 +62,7 @@ export const tracing: ServerSkeletonTracing = { id: "e90133de-b2db-4912-8261-8b6f84f7edab", }; export const annotation: APIAnnotation = { - datasetId: "id", + datasetId: "66f3c82966010034942e9740", modified: 1529066010230, state: "Active", id: "5b1fd1cf97000027049c67ee", diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index 8d1d5baae13..d5361eb30c3 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -67,7 +67,7 @@ export const tracing: ServerVolumeTracing = { ], }; export const annotation: APIAnnotation = { - datasetId: "id", + datasetId: "66f3c82966010034942e9740", description: "", state: "Active", id: "598b52293c00009906f043e7", diff --git a/frontend/javascripts/test/helpers/apiHelpers.ts b/frontend/javascripts/test/helpers/apiHelpers.ts index caea93554fd..8de61c806bf 100644 --- a/frontend/javascripts/test/helpers/apiHelpers.ts +++ b/frontend/javascripts/test/helpers/apiHelpers.ts @@ -171,7 +171,6 @@ export function __setupOxalis( }; t.context.setSlowCompression = setSlowCompression; const webknossos = new OxalisApi(Model); - const organizationId = "Connectomics Department"; const ANNOTATION = modelData[mode].annotation; Request.receiveJSON .withArgs( @@ -194,7 +193,7 @@ export function __setupOxalis( ) .returns(Promise.resolve({})); Request.receiveJSON - .withArgs(`/api/datasets/${organizationId}/${ANNOTATION.dataSetName}`) // Right now, initializeDataset() in model_initialization mutates the dataset to add a new + .withArgs(`/api/datasets/${ANNOTATION.datasetId}`) // Right now, initializeDataset() in model_initialization mutates the dataset to add a new // volume layer. Since this mutation should be isolated between different tests, we have to make // sure that each receiveJSON call returns its own clone. Without the following "onCall" line, // each __setupOxalis call would overwrite the current stub to receiveJSON. diff --git a/frontend/javascripts/test/model/model.spec.ts b/frontend/javascripts/test/model/model.spec.ts index a59cc4127a7..3f273988964 100644 --- a/frontend/javascripts/test/model/model.spec.ts +++ b/frontend/javascripts/test/model/model.spec.ts @@ -1,18 +1,22 @@ -// @ts-nocheck import _ from "lodash"; import "test/mocks/lz4"; import mockRequire from "mock-require"; import sinon from "sinon"; -import test from "ava"; +import anyTest, { type TestFn } from "ava"; import { ControlModeEnum } from "oxalis/constants"; import { tracing as TRACING, annotation as ANNOTATION, } from "../fixtures/skeletontracing_server_objects"; import DATASET from "../fixtures/dataset_server_object"; +import type { OxalisModel } from "oxalis/model"; + +const test = anyTest as TestFn<{ model: OxalisModel }>; function makeModelMock() { - class ModelMock {} + class ModelMock { + fetch = sinon.stub(); + } ModelMock.prototype.fetch = sinon.stub(); ModelMock.prototype.fetch.returns(Promise.resolve()); @@ -45,7 +49,7 @@ mockRequire("oxalis/model/bucket_data_handling/wkstore_adapter", {}); // Avoid node caching and make sure all mockRequires are applied const Model = mockRequire.reRequire("../../oxalis/model").OxalisModel; const { HANDLED_ERROR } = mockRequire.reRequire("../../oxalis/model_initialization"); -const ANNOTATION_TYPE = "annotationTypeValue"; +const ANNOTATION_TYPE = null; const ANNOTATION_ID = "annotationIdValue"; test.beforeEach((t) => { const model = new Model(); @@ -56,9 +60,6 @@ test.beforeEach((t) => { Request.receiveJSON .withArgs(`/api/annotations/${ANNOTATION_ID}/info?timestamp=${Date.now()}`) .returns(Promise.resolve(_.cloneDeep(ANNOTATION))); - Request.receiveJSON - .withArgs(`/api/datasets/${ANNOTATION.dataSetName}`) - .returns(Promise.resolve(_.cloneDeep(DATASET))); // The following code assumes a skeleton tracing (note that ANNOTATION is imported from // skeletontracing_server_objects.js) const contentType = "skeleton"; @@ -74,17 +75,17 @@ test("Model Initialization: should throw a model.HANDLED_ERROR for missing data const datasetObject = _.clone(DATASET); + // @ts-expect-error still delete dataLayers on the cloned object. delete datasetObject.dataSource.dataLayers; Request.receiveJSON - .withArgs(`/api/datasets/${ANNOTATION.organization}/${ANNOTATION.dataSetName}`) + .withArgs(`/api/datasets/${ANNOTATION.datasetId}`) .returns(Promise.resolve(_.cloneDeep(datasetObject))); return model .fetch( ANNOTATION_TYPE, { type: ControlModeEnum.VIEW, - name: ANNOTATION.dataSetName || "", - owningOrganization: ANNOTATION.organization || "", + datasetId: ANNOTATION.datasetId, }, true, ) @@ -100,15 +101,14 @@ test("Model Initialization: should throw an Error on unexpected failure", (t) => const { model } = t.context; const rejectedDatasetError = new Error("mocked dataset rejection"); Request.receiveJSON - .withArgs(`/api/datasets/Connectomics Department/${ANNOTATION.dataSetName}`) + .withArgs(`/api/datasets/${ANNOTATION.datasetId}`) .returns(Promise.reject(rejectedDatasetError)); return model .fetch( ANNOTATION_TYPE, { - name: ANNOTATION.dataSetName, - owningOrganization: "Connectomics Department", - type: "VIEW", + type: ControlModeEnum.VIEW, + datasetId: ANNOTATION.datasetId, }, true, ) From a65c7fb55febfcfbb84f9ea0c9f5172159140a58 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 30 Sep 2024 09:57:32 +0200 Subject: [PATCH 014/129] format backend --- app/controllers/AnnotationController.scala | 18 +++++------ .../AuthenticationController.scala | 23 +++++++------- app/controllers/DatasetController.scala | 6 +--- app/controllers/JobController.scala | 14 ++++----- app/controllers/UserTokenController.scala | 7 +++-- app/models/annotation/AnnotationService.scala | 30 ++++++++++++++++--- .../DatasetConfigurationService.scala | 11 +++---- app/models/dataset/DatasetService.scala | 2 +- .../dataset/WKRemoteDataStoreClient.scala | 3 +- .../credential/CredentialService.scala | 8 ++++- ...ossosBearerTokenAuthenticatorService.scala | 6 +++- .../requestparsing/DatasetURIParser.scala | 2 +- .../controllers/DSMeshController.scala | 2 +- .../controllers/DataSourceController.scala | 8 ++--- .../precomputed/PrecomputedArray.scala | 14 ++++----- .../services/AgglomerateService.scala | 1 - .../services/BinaryDataService.scala | 6 ++-- .../services/DSFullMeshService.scala | 4 +-- .../services/uploading/UploadService.scala | 10 +++++-- .../RemoteSourceDescriptorService.scala | 8 ++--- .../TSRemoteDatastoreClient.scala | 3 +- 21 files changed, 106 insertions(+), 80 deletions(-) diff --git a/app/controllers/AnnotationController.scala b/app/controllers/AnnotationController.scala index 4b4d3558a0c..b2ceb473e43 100755 --- a/app/controllers/AnnotationController.scala +++ b/app/controllers/AnnotationController.scala @@ -8,7 +8,11 @@ import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType.AnnotationLayerType -import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerStatistics, AnnotationLayerType} +import com.scalableminds.webknossos.datastore.models.annotation.{ + AnnotationLayer, + AnnotationLayerStatistics, + AnnotationLayerType +} import com.scalableminds.webknossos.datastore.models.datasource.AdditionalAxis import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.tracings.volume.ResolutionRestrictions @@ -243,9 +247,7 @@ class AnnotationController @Inject()( sil.SecuredAction.async(validateJson[List[AnnotationLayerParameters]]) { implicit request => for { parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND - dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages( - "dataset.notFound", - parsedDatasetId) ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", parsedDatasetId) ~> NOT_FOUND annotation <- annotationService.createExplorationalFor( request.identity, dataset._id, @@ -257,16 +259,12 @@ class AnnotationController @Inject()( } yield JsonOk(json) } - def getSandbox(datasetId: String, - typ: String, - sharingToken: Option[String]): Action[AnyContent] = + def getSandbox(datasetId: String, typ: String, sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => val ctx = URLSharing.fallbackTokenAccessContext(sharingToken) // users with dataset sharing token may also get a sandbox annotation for { parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND - dataset <- datasetDAO.findOne(parsedDatasetId)(ctx) ?~> Messages( - "dataset.notFound", - parsedDatasetId) ~> NOT_FOUND + dataset <- datasetDAO.findOne(parsedDatasetId)(ctx) ?~> Messages("dataset.notFound", parsedDatasetId) ~> NOT_FOUND tracingType <- TracingType.fromString(typ).toFox _ <- bool2Fox(tracingType == TracingType.skeleton) ?~> "annotation.sandbox.skeletonOnly" annotation = Annotation( diff --git a/app/controllers/AuthenticationController.scala b/app/controllers/AuthenticationController.scala index 1c57855b243..c891a4edaf0 100755 --- a/app/controllers/AuthenticationController.scala +++ b/app/controllers/AuthenticationController.scala @@ -27,7 +27,15 @@ import play.api.data.validation.Constraints._ import play.api.i18n.Messages import play.api.libs.json._ import play.api.mvc.{Action, AnyContent, Cookie, PlayBodyParsers, Request, Result} -import security.{CombinedAuthenticator, OpenIdConnectClient, OpenIdConnectUserInfo, PasswordHasher, TokenType, WkEnv, WkSilhouetteEnvironment} +import security.{ + CombinedAuthenticator, + OpenIdConnectClient, + OpenIdConnectUserInfo, + PasswordHasher, + TokenType, + WkEnv, + WkSilhouetteEnvironment +} import utils.WkConf import java.net.URLEncoder @@ -235,10 +243,7 @@ class AuthenticationController @Inject()( selectedOrganization <- if (isSuperUser) accessibleBySwitchingForSuperUser(parsedDatasetId, annotationId, workflowHash) else - accessibleBySwitchingForMultiUser(request.identity._multiUser, - parsedDatasetId, - annotationId, - workflowHash) + accessibleBySwitchingForMultiUser(request.identity._multiUser, parsedDatasetId, annotationId, workflowHash) _ <- bool2Fox(selectedOrganization._id != request.identity._organization) // User is already in correct orga, but still could not see dataset. Assume this had a reason. selectedOrganizationJs <- organizationService.publicWrites(selectedOrganization) } yield Ok(selectedOrganizationJs) @@ -276,12 +281,8 @@ class AuthenticationController @Inject()( workflowHashOpt: Option[String]): Fox[Organization] = for { identities <- userDAO.findAllByMultiUser(multiUserId) - selectedIdentity <- Fox.find(identities)( - identity => - canAccessDatasetOrAnnotationOrWorkflow(identity, - datasetIdOpt, - annotationIdOpt, - workflowHashOpt)) + selectedIdentity <- Fox.find(identities)(identity => + canAccessDatasetOrAnnotationOrWorkflow(identity, datasetIdOpt, annotationIdOpt, workflowHashOpt)) selectedOrganization <- organizationDAO.findOne(selectedIdentity._organization)(GlobalAccessContext) } yield selectedOrganization diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index cf43c4db670..b4f979190a8 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -120,11 +120,7 @@ class DatasetController @Inject()(userService: UserService, for { parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND _ <- datasetDAO.findOne(parsedDatasetId)(ctx) ?~> notFoundMessage(datasetId) ~> NOT_FOUND // To check Access Rights - image <- thumbnailService.getThumbnailWithCache(parsedDatasetId, - dataLayerName, - w, - h, - mappingName) + image <- thumbnailService.getThumbnailWithCache(parsedDatasetId, dataLayerName, w, h, mappingName) } yield { addRemoteOriginHeaders(Ok(image)).as(jpegMimeType).withHeaders(CACHE_CONTROL -> "public, max-age=86400") } diff --git a/app/controllers/JobController.scala b/app/controllers/JobController.scala index c00f57adc0f..7696b63d54b 100644 --- a/app/controllers/JobController.scala +++ b/app/controllers/JobController.scala @@ -186,9 +186,7 @@ class JobController @Inject()( } yield Ok(js) } - def runInferNucleiJob(datasetId: String, - layerName: String, - newDatasetName: String): Action[AnyContent] = + def runInferNucleiJob(datasetId: String, layerName: String, newDatasetName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { @@ -253,10 +251,9 @@ class JobController @Inject()( log(Some(slackNotificationService.noticeFailedJobRequest)) { for { parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND - dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages( - "dataset.notFound", - datasetId) ~> NOT_FOUND - organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", dataset._organization) + dataset <- datasetDAO.findOne(parsedDatasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND + organization <- organizationDAO.findOne(dataset._organization) ?~> Messages("organization.notFound", + dataset._organization) _ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.inferMitochondria.notAllowed.organization" ~> FORBIDDEN _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(layerName) @@ -390,8 +387,7 @@ class JobController @Inject()( } } - def runFindLargestSegmentIdJob(datasetId: String, - layerName: String): Action[AnyContent] = + def runFindLargestSegmentIdJob(datasetId: String, layerName: String): Action[AnyContent] = sil.SecuredAction.async { implicit request => log(Some(slackNotificationService.noticeFailedJobRequest)) { for { diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index 67ec8db2f55..71d68579fdf 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -114,7 +114,9 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, def tryRead: Fox[UserAccessAnswer] = for { - dataSourceBox <- datasetDAO.findOneByPathAndOrganization(dataSourceId.path, dataSourceId.organizationId).futureBox + dataSourceBox <- datasetDAO + .findOneByPathAndOrganization(dataSourceId.path, dataSourceId.organizationId) + .futureBox } yield dataSourceBox match { case Full(_) => UserAccessAnswer(granted = true) @@ -144,7 +146,8 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, def tryDelete: Fox[UserAccessAnswer] = for { _ <- bool2Fox(conf.Features.allowDeleteDatasets) ?~> "dataset.delete.disabled" - dataset <- datasetDAO.findOneByPathAndOrganization(dataSourceId.path, dataSourceId.organizationId)(GlobalAccessContext) ?~> "datasource.notFound" + dataset <- datasetDAO.findOneByPathAndOrganization(dataSourceId.path, dataSourceId.organizationId)( + GlobalAccessContext) ?~> "datasource.notFound" user <- userBox.toFox ?~> "auth.token.noUser" } yield UserAccessAnswer(user._organization == dataset._organization && user.isAdmin) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index daac0442ddc..2b0af2c7a55 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -10,15 +10,37 @@ import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{BoxImplicits, Fox, FoxImplicits, TextUtils} import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} -import com.scalableminds.webknossos.datastore.geometry.{AdditionalCoordinateProto, ColorProto, NamedBoundingBoxProto, Vec3DoubleProto, Vec3IntProto} +import com.scalableminds.webknossos.datastore.geometry.{ + AdditionalCoordinateProto, + ColorProto, + NamedBoundingBoxProto, + Vec3DoubleProto, + Vec3IntProto +} import com.scalableminds.webknossos.datastore.helpers.{NodeDefaults, ProtoGeometryImplicits, SkeletonTracingDefaults} import com.scalableminds.webknossos.datastore.models.VoxelSize -import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerStatistics, AnnotationLayerType, AnnotationSource, FetchedAnnotationLayer} -import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, ElementClass, DataSourceLike => DataSource, SegmentationLayerLike => SegmentationLayer} +import com.scalableminds.webknossos.datastore.models.annotation.{ + AnnotationLayer, + AnnotationLayerStatistics, + AnnotationLayerType, + AnnotationSource, + FetchedAnnotationLayer +} +import com.scalableminds.webknossos.datastore.models.datasource.{ + AdditionalAxis, + ElementClass, + DataSourceLike => DataSource, + SegmentationLayerLike => SegmentationLayer +} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.tracingstore.tracings._ import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat -import com.scalableminds.webknossos.tracingstore.tracings.volume.{ResolutionRestrictions, VolumeDataZipFormat, VolumeTracingDefaults, VolumeTracingDownsampling} +import com.scalableminds.webknossos.tracingstore.tracings.volume.{ + ResolutionRestrictions, + VolumeDataZipFormat, + VolumeTracingDefaults, + VolumeTracingDownsampling +} import com.typesafe.scalalogging.LazyLogging import controllers.AnnotationLayerParameters import models.annotation.AnnotationState._ diff --git a/app/models/configuration/DatasetConfigurationService.scala b/app/models/configuration/DatasetConfigurationService.scala index d798cd27cba..d47559f36b9 100644 --- a/app/models/configuration/DatasetConfigurationService.scala +++ b/app/models/configuration/DatasetConfigurationService.scala @@ -20,10 +20,8 @@ class DatasetConfigurationService @Inject()(datasetService: DatasetService, datasetDAO: DatasetDAO, thumbnailCachingService: ThumbnailCachingService, datasetDataLayerDAO: DatasetLayerDAO)(implicit ec: ExecutionContext) { - def getDatasetViewConfigurationForUserAndDataset( - requestedVolumeIds: List[String], - user: User, - datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = + def getDatasetViewConfigurationForUserAndDataset(requestedVolumeIds: List[String], user: User, datasetId: ObjectId)( + implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = for { dataset <- datasetDAO.findOne(datasetId) @@ -33,9 +31,8 @@ class DatasetConfigurationService @Inject()(datasetService: DatasetService, layerConfigurations <- getLayerConfigurations(datasetLayers, requestedVolumeIds, dataset, Some(user)) } yield buildCompleteDatasetConfiguration(datasetViewConfiguration, layerConfigurations) - def getDatasetViewConfigurationForDataset( - requestedVolumeIds: List[String], - datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = + def getDatasetViewConfigurationForDataset(requestedVolumeIds: List[String], datasetId: ObjectId)( + implicit ctx: DBAccessContext): Fox[DatasetViewConfiguration] = for { dataset <- datasetDAO.findOne(datasetId) diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index cb349e3d4b9..188cc102969 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -19,7 +19,7 @@ import models.folder.FolderDAO import models.organization.{Organization, OrganizationDAO} import models.team._ import models.user.{User, UserService} -import net.liftweb.common.{Box, Full} +import net.liftweb.common.{Box, Full} import play.api.libs.json.{JsObject, Json} import security.RandomIDGenerator import utils.WkConf diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index c056d38c70a..2cbebfd42e5 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -51,8 +51,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin .getWithBytesResponse } - def getLayerData( - dataset: Dataset, + def getLayerData(dataset: Dataset, layerName: String, mag1BoundingBox: BoundingBox, mag: Vec3Int, diff --git a/app/models/dataset/credential/CredentialService.scala b/app/models/dataset/credential/CredentialService.scala index 8af19c40608..541767be8e2 100644 --- a/app/models/dataset/credential/CredentialService.scala +++ b/app/models/dataset/credential/CredentialService.scala @@ -2,7 +2,13 @@ package models.dataset.credential import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.storage.{DataVaultCredential, DataVaultService, GoogleServiceAccountCredential, HttpBasicAuthCredential, S3AccessKeyCredential} +import com.scalableminds.webknossos.datastore.storage.{ + DataVaultCredential, + DataVaultService, + GoogleServiceAccountCredential, + HttpBasicAuthCredential, + S3AccessKeyCredential +} import net.liftweb.common.Box.tryo import play.api.libs.json.Json diff --git a/app/security/WebknossosBearerTokenAuthenticatorService.scala b/app/security/WebknossosBearerTokenAuthenticatorService.scala index 777fe35860b..1af0248ed6c 100644 --- a/app/security/WebknossosBearerTokenAuthenticatorService.scala +++ b/app/security/WebknossosBearerTokenAuthenticatorService.scala @@ -5,7 +5,11 @@ import play.silhouette.api.exceptions.{AuthenticatorCreationException, Authentic import play.silhouette.api.services.AuthenticatorService.{CreateError, InitError} import play.silhouette.api.util.{Clock, IDGenerator} import play.silhouette.impl.authenticators.BearerTokenAuthenticatorService.ID -import play.silhouette.impl.authenticators.{BearerTokenAuthenticator, BearerTokenAuthenticatorService, BearerTokenAuthenticatorSettings} +import play.silhouette.impl.authenticators.{ + BearerTokenAuthenticator, + BearerTokenAuthenticatorService, + BearerTokenAuthenticatorSettings +} import com.scalableminds.util.accesscontext.GlobalAccessContext import com.scalableminds.util.tools.{Fox, FoxImplicits} import models.user.{User, UserService} diff --git a/util/src/main/scala/com/scalableminds/util/requestparsing/DatasetURIParser.scala b/util/src/main/scala/com/scalableminds/util/requestparsing/DatasetURIParser.scala index 957ebfe2457..2aa46c3454d 100644 --- a/util/src/main/scala/com/scalableminds/util/requestparsing/DatasetURIParser.scala +++ b/util/src/main/scala/com/scalableminds/util/requestparsing/DatasetURIParser.scala @@ -7,7 +7,7 @@ trait DatasetURIParser { val maybeId = maybeIdStr.flatMap(ObjectId.fromStringSync) maybeId match { case Some(validId) => (Some(validId), None) - case None => (None, Some(datasetNameAndId)) + case None => (None, Some(datasetNameAndId)) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala index d20d2881edc..8b78c96ad3c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala @@ -104,7 +104,7 @@ class DSMeshController @Inject()( for { data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], organizationId, - datasetPath, + datasetPath, dataLayerName, request.body) ?~> "mesh.file.loadChunk.failed" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 151a39646e0..9c815a3576b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -215,8 +215,8 @@ class DataSourceController @Inject()( dataSourceId <- uploadService .getDataSourceIdByUploadId(request.body.uploadId) ?~> "dataset.upload.validation.failed" response <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), - urlOrHeaderToken(token, request)) { - for { + urlOrHeaderToken(token, request)) { + for { (dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) ?~> "finishUpload.failed" uploadedDatasetIdJson <- remoteWebknossosClient.reportUpload( dataSourceId, @@ -448,9 +448,7 @@ class DataSourceController @Inject()( urlOrHeaderToken(token, request) ) ?~> "dataset.upload.validation.failed" datasourceId = DataSourceId(reservedInfo.path, organizationId) - _ <- dataSourceService.updateDataSource( - request.body.copy(id = datasourceId), - expectExisting = false) + _ <- dataSourceService.updateDataSource(request.body.copy(id = datasourceId), expectExisting = false) uploadedDatasetId <- remoteWebknossosClient.reportUpload( datasourceId, 0L, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala index 5c26c43c333..f7cc98ef8d1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala @@ -20,13 +20,13 @@ import ucar.ma2.{Array => MultiArray} object PrecomputedArray extends LazyLogging { def open( - magPath: VaultPath, - dataSourceId: DataSourceId, - layerName: String, - axisOrderOpt: Option[AxisOrder], - channelIndex: Option[Int], - additionalAxes: Option[Seq[AdditionalAxis]], - sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[PrecomputedArray] = + magPath: VaultPath, + dataSourceId: DataSourceId, + layerName: String, + axisOrderOpt: Option[AxisOrder], + channelIndex: Option[Int], + additionalAxes: Option[Seq[AdditionalAxis]], + sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[PrecomputedArray] = for { headerBytes <- (magPath.parent / PrecomputedHeader.FILENAME_INFO) .readBytes() ?~> s"Could not read header at ${PrecomputedHeader.FILENAME_INFO}" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala index c136552a635..e3cac72feac 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala @@ -29,7 +29,6 @@ class AgglomerateService @Inject()(config: DataStoreConfig) extends DataConverte private val dataBaseDir = Paths.get(config.Datastore.baseFolder) private val cumsumFileName = "cumsum.json" - lazy val agglomerateFileCache = new AgglomerateFileCache(config.Datastore.Cache.AgglomerateFile.maxFileHandleEntries) def exploreAgglomerates(organizationId: String, datasetPath: String, dataLayerName: String): Set[String] = { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index c53b3703970..fa32a5de150 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -98,11 +98,13 @@ class BinaryDataService(val dataBaseDir: Path, s"Caught internal error: $msg while loading a bucket for layer ${request.dataLayer.name} of dataset ${request.dataSource.id}") Fox.failure(e.getMessage) case f: Failure => - if (datasetErrorLoggingService.exists(_.shouldLog(request.dataSource.id.organizationId, request.dataSource.id.path))) { + if (datasetErrorLoggingService.exists( + _.shouldLog(request.dataSource.id.organizationId, request.dataSource.id.path))) { logger.error( s"Bucket loading for layer ${request.dataLayer.name} of dataset ${request.dataSource.id.organizationId}/${request.dataSource.id.path} at ${readInstruction.bucket} failed: ${Fox .failureChainAsString(f, includeStackTraces = true)}") - datasetErrorLoggingService.foreach(_.registerLogged(request.dataSource.id.organizationId, request.dataSource.id.path)) + datasetErrorLoggingService.foreach( + _.registerLogged(request.dataSource.id.organizationId, request.dataSource.id.path)) } f.toFox case Full(data) => diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala index 74460560a32..5706b6694ff 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala @@ -123,7 +123,7 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, meshFileName <- fullMeshRequest.meshFileName.toFox ?~> "meshFileName.needed" before = Instant.now mappingNameForMeshFile = meshFileService.mappingNameForMeshFile(organizationId, - datasetPath, + datasetPath, layerName, meshFileName) segmentIds <- segmentIdsForAgglomerateIdIfNeeded( @@ -138,7 +138,7 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, token ) chunkInfos: WebknossosSegmentInfo <- meshFileService.listMeshChunksForSegmentsMerged(organizationId, - datasetPath, + datasetPath, layerName, meshFileName, segmentIds) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index 0266f95fe0e..ef17c0a4c99 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -264,7 +264,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, knownUpload <- isKnownUpload(uploadId) } yield if (knownUpload) { - logger.info(f"Cancelling dataset upload of ${dataSourceId.organizationId}/${dataSourceId.path} with id $uploadId...") + logger.info( + f"Cancelling dataset upload of ${dataSourceId.organizationId}/${dataSourceId.path} with id $uploadId...") removeFromRedis(uploadId).flatMap(_ => PathUtils.deleteDirectoryRecursively(uploadDirectory(dataSourceId.organizationId, uploadId))) } else { @@ -281,7 +282,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, uploadDir = uploadDirectory(dataSourceId.organizationId, uploadId) unpackToDir = dataSourceDirFor(dataSourceId, datasetNeedsConversion) - _ = logger.info(s"Finishing dataset upload of ${dataSourceId.organizationId}/${dataSourceId.path} with id $uploadId...") + _ = logger.info( + s"Finishing dataset upload of ${dataSourceId.organizationId}/${dataSourceId.path} with id $uploadId...") _ <- Fox.runIf(checkCompletion)(ensureAllChunksUploaded(uploadId)) _ <- ensureDirectoryBox(unpackToDir.getParent) ?~> "dataset.import.fileAccessDenied" unpackResult <- unpackDataset(uploadDir, unpackToDir).futureBox @@ -326,7 +328,9 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, case UploadedDataSourceType.WKW => addLayerAndResolutionDirIfMissing(unpackToDir).toFox } _ <- datasetSymlinkService.addSymlinksToOtherDatasetLayers(unpackToDir, layersToLink.getOrElse(List.empty)) - _ <- addLinkedLayersToDataSourceProperties(unpackToDir, dataSourceId.organizationId, layersToLink.getOrElse(List.empty)) + _ <- addLinkedLayersToDataSourceProperties(unpackToDir, + dataSourceId.organizationId, + layersToLink.getOrElse(List.empty)) } yield () } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala index e305821d28c..cbdc435c85c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala @@ -36,10 +36,10 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemote } yield () private def remoteSourceDescriptorFor( - baseDir: Path, - datasetId: DataSourceId, - layerName: String, - magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[RemoteSourceDescriptor] = + baseDir: Path, + datasetId: DataSourceId, + layerName: String, + magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[RemoteSourceDescriptor] = for { credentialBox <- credentialFor(magLocator: MagLocator).futureBox uri <- uriForMagLocator(baseDir, datasetId, layerName, magLocator).toFox diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala index 53e7c49bafd..19752af409a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala @@ -190,7 +190,8 @@ class TSRemoteDatastoreClient @Inject()( for { dataSourceId <- remoteWebknossosClient.getDataSourceIdForTracing(tracingId) dataStoreUri <- dataStoreUriWithCache(dataSourceId.organizationId, dataSourceId.path) - result <- rpc(s"$dataStoreUri/data/datasets/${dataSourceId.organizationId}/${dataSourceId.path}/readInboxDataSource") + result <- rpc( + s"$dataStoreUri/data/datasets/${dataSourceId.organizationId}/${dataSourceId.path}/readInboxDataSource") .addQueryStringOptional("token", token) .getWithJsonResponse[InboxDataSource] scale <- result.voxelSizeOpt ?~> "could not determine voxel size of dataset" From 2234b5b6e96d7b4d83e695bc2ca922abf436b3b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 1 Oct 2024 18:20:51 +0200 Subject: [PATCH 015/129] fix dataSets.csv --- test/db/dataSets.csv | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/test/db/dataSets.csv b/test/db/dataSets.csv index 6875b5f816f..864c6d009e8 100644 --- a/test/db/dataSets.csv +++ b/test/db/dataSets.csv @@ -1,7 +1,7 @@ -_id,_dataStore,_organization,_publication,_uploader,_folder,inboxSourceHash,defaultViewConfiguration,adminViewConfiguration,description,displayName,isPublic,isUsable,name,voxelSizeFactor,voxelSizeUnit,status,sharingToken,logoUrl,sortingKey,metadata,tags,created,isDeleted -'570b9f4e4bb848d0885ee711','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,,f,f,'2012-06-28_Cortex',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.082Z','[]',{},'2016-04-11T12:57:50.082Z',f -'570b9f4e4bb848d0885ee712','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,,f,f,'Experiment_001',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.079Z','[]',{},'2016-04-11T12:57:50.079Z',f -'570b9f4e4bb848d0885ee713','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,,f,f,'2012-09-28_ex145_07x2',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.080Z','[]',{},'2016-04-11T12:57:50.080Z',f -'570b9fd34bb848d0885ee716','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,,f,f,'rgb',,,'No longer available on datastore.',,,'2016-04-11T13:00:03.792Z',[],{},'2016-04-11T13:00:03.792Z',f -'59e9cfbdba632ac2ab8b23b3','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,,f,t,'confocal-multi_knossos','(22,22,44.599998474121094)','nanometer','',,,'2017-10-20T10:28:13.763Z','[{"key": "key","type": "number","value": 4}]',{},'2017-10-20T10:28:13.763Z',f -'59e9cfbdba632ac2ab8b23b5','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,,f,t,'l4_sample','(11.239999771118164,11.239999771118164,28)','nanometer','',,,'2017-10-20T10:28:13.789Z','[]',{},'2017-10-20T10:28:13.789Z',f +_id,_dataStore,_organization,_publication,_uploader,_folder,inboxSourceHash,defaultViewConfiguration,adminViewConfiguration,description,path,isPublic,isUsable,name,voxelSizeFactor,voxelSizeUnit,status,sharingToken,logoUrl,sortingKey,metadata,tags,created,isDeleted +'570b9f4e4bb848d0885ee711','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'2012-06-28_Cortex',f,f,'2012-06-28_Cortex',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.082Z','[]',{},'2016-04-11T12:57:50.082Z',f +'570b9f4e4bb848d0885ee712','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'Experiment_001',f,f,'Experiment_001',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.079Z','[]',{},'2016-04-11T12:57:50.079Z',f +'570b9f4e4bb848d0885ee713','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'2012-09-28_ex145_07x2',f,f,'2012-09-28_ex145_07x2',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.080Z','[]',{},'2016-04-11T12:57:50.080Z',f +'570b9fd34bb848d0885ee716','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'rgb',f,f,'rgb',,,'No longer available on datastore.',,,'2016-04-11T13:00:03.792Z',[],{},'2016-04-11T13:00:03.792Z',f +'59e9cfbdba632ac2ab8b23b3','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'confocal-multi_knossos',f,t,'confocal-multi_knossos','(22,22,44.599998474121094)','nanometer','',,,'2017-10-20T10:28:13.763Z','[{"key": "key","type": "number","value": 4}]',{},'2017-10-20T10:28:13.763Z',f +'59e9cfbdba632ac2ab8b23b5','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'l4_sample',f,t,'l4_sample','(11.239999771118164,11.239999771118164,28)','nanometer','',,,'2017-10-20T10:28:13.789Z','[]',{},'2017-10-20T10:28:13.789Z',f From 3570545b8ba7131ef54b5c572223c31145e3a20c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 2 Oct 2024 09:46:20 +0200 Subject: [PATCH 016/129] fix e2e tests --- .../backend-snapshot-tests/annotations.e2e.ts | 2 +- .../annotations.e2e.js.md | 8 ++++++++ .../annotations.e2e.js.snap | Bin 12618 -> 12693 bytes .../backend-snapshot-tests/datasets.e2e.js.md | 18 ++++++++++-------- .../datasets.e2e.js.snap | Bin 4101 -> 4123 bytes .../backend-snapshot-tests/tasks.e2e.js.md | 2 ++ .../backend-snapshot-tests/tasks.e2e.js.snap | Bin 5358 -> 5433 bytes 7 files changed, 21 insertions(+), 9 deletions(-) diff --git a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts index e548f4e212c..86d103c0dae 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/annotations.e2e.ts @@ -20,7 +20,7 @@ import * as api from "admin/admin_rest_api"; import generateDummyTrees from "oxalis/model/helpers/generate_dummy_trees"; import test from "ava"; import { createSaveQueueFromUpdateActions } from "../helpers/saveHelpers"; -const datasetId = "id"; +const datasetId = "59e9cfbdba632ac2ab8b23b3"; process.on("unhandledRejection", (err, promise) => { console.error("Unhandled rejection (promise: ", promise, ", reason: ", err, ")."); }); diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md index 0df10c840a4..f0ecbebfc2b 100644 --- a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.md @@ -32,6 +32,7 @@ Generated by [AVA](https://avajs.dev). name: 'localhost', url: 'http://localhost:9000', }, + datasetId: '570b9f4e4bb848d0885ee711', description: '', id: '570ba0092a7c0e980056fe9b', isLockedByOwner: false, @@ -155,6 +156,7 @@ Generated by [AVA](https://avajs.dev). name: 'localhost', url: 'http://localhost:9000', }, + datasetId: '570b9f4e4bb848d0885ee711', description: '', id: '88135c192faeb34c0081c05d', isLockedByOwner: false, @@ -394,6 +396,7 @@ Generated by [AVA](https://avajs.dev). name: 'localhost', url: 'http://localhost:9000', }, + datasetId: '570b9f4e4bb848d0885ee711', description: '', id: 'id', isLockedByOwner: false, @@ -566,6 +569,7 @@ Generated by [AVA](https://avajs.dev). name: 'localhost', url: 'http://localhost:9000', }, + datasetId: '570b9f4e4bb848d0885ee711', description: '', id: 'id', isLockedByOwner: false, @@ -745,6 +749,7 @@ Generated by [AVA](https://avajs.dev). name: 'localhost', url: 'http://localhost:9000', }, + datasetId: '570b9f4e4bb848d0885ee711', description: '', id: 'id', isLockedByOwner: false, @@ -871,6 +876,7 @@ Generated by [AVA](https://avajs.dev). name: 'localhost', url: 'http://localhost:9000', }, + datasetId: '570b9f4e4bb848d0885ee711', description: '', id: 'id', isLockedByOwner: false, @@ -999,6 +1005,7 @@ Generated by [AVA](https://avajs.dev). name: 'localhost', url: 'http://localhost:9000', }, + datasetId: '570b9f4e4bb848d0885ee711', description: 'new description', id: 'id', isLockedByOwner: false, @@ -1122,6 +1129,7 @@ Generated by [AVA](https://avajs.dev). name: 'localhost', url: 'http://localhost:9000', }, + datasetId: '59e9cfbdba632ac2ab8b23b3', description: '', id: 'id', isLockedByOwner: false, diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/annotations.e2e.js.snap index 77707994e1d37e499fff3ee04c89105d66202058..a4838005eea59d7e1d5eb31e8a991fc4aa9b093b 100644 GIT binary patch literal 12693 zcmZvBWmFqX6E0rdUEAXBZly&^(c(^Uhd^<6ik0G4+&#EUlHyj}A%PdS6sOqbyZ7&% zb9T?p&NI*K%)gzH)02Cv?P>x1X7A?t)>{x86-ho?=#5jw)YmH--L>23FT?;Oh!CpD zwNQtV7p(f1tqMatCer^E)YCjaK-%Ebi~01XT~oJsC#uIdQ3?{)TSAChha4Dn-CA@s z#ai^l6;gEFW^`^Ob)?&}snwv>VAQ&3%!_lVDQm5NLZ|+og{kW?vlDpw2GR?vj zS*rHz-*lOMYqxhfF(G*Q9B8|@R~^^&!N%nv#kj}9ykn}nHKbM4UYb*(94du9OaxD=D(qGW5rsi(B&LV+tp!+3KIQQILeKrEg%dLOG1?mV zC3=@Ew)J`x;c{nmvtNgt!6iZ6E3r!-U$cao&WR zAvZ9;`wx%zqU9+?($6tiL0LDsy|TEtqjEXb+(c^SI8VYMrI=|`)8^08TYaVrn(*&K z6iG)@)4c3Jjk<~34AT95jeMH%TzJap^EH?4#!9Nq=-~^l{chcoRw(suiTbbMbp}~F=lrG?LTqAO@rmgnQP3+`SKfb|`0lOPIL*dL;hx?Gz!`-Q3 z<bYpIpZU%YJ~@!g7t`%0dg%y~vrX#bf02BmX|A+~S^ zi}{E?yyh~hT_Le+gy9Cmm^7OyaU7{L`m@qPCPu`WOG^1EyRixB0i3ZngE)`TuT6rV@}h+tr)Gd06EH`e6fW8(pkj7 zXlts#60CgE=H)@IAs;n_KFPr(-wZD21ZV-bJu9l#;p5_+P6cLd&;qVoHeBUfkGLtv z-UQ)9)nwBE5>A>?hw`nF zu?(I!Q-m08$MF%0@kb|`Bb2kn8TnKm{N%nrM`MMOCWfiLd`+&{I+1(#8)RO|f7t}Y zdYoL`;Ge9s7ZUOCE)XZ#&2=rKr!}eHei33W`jeFeo*=PgAETt7b>)0u8XjW^SNyQt~_Z;d3=1#;K=>mIB0x+qae697UX7ic%oU>UH0Kxrk_=)NI;X&Jn>~`@$RQPqF5q zP>P(je`Ll-H{CUBGSu&XR!wp1e0Y|t|B%BJYwPbnTUTY4e4K@fXov5}4amktRQq-o%xs{4n)Z-(WT0?Z5dc8J*16!7YYUcYAV|g_* z|t~sQa+BmcXY4$V9I2<-H(W0zA3S1C| zQ}%Wj``@-6x0bda2`m3g@ytrc*F6MUNUZ1aw9w~mt3C11-IPa6A(zmFcPP9wI}6pt z7u!PiiQU$Vx{Js6T|mZQP4L3r7KyqOV6AnBpsRN%kxqa8-hUFgln3wti)FseuOT#t zhwqpoq$wWdNzW?`%Ht+yi7t29aD@IrV=0Vm8!A?5QtPQ24bu>Bq}+*1NQvQvqIw^L zdv7une>xx3*wg*P=6wIjyzGwsCTh`la3pu(YG2uo&prSJBZ? zTHL|b|8~oGmz{KYQy!ggcu5|eP3kv_>Y*yKgjHB^pFVKjT#kl7ZK8c(kO-C@&$bQ}*SPri;=r-zoZ zT>6^vR|B2w0&}S5@UGY2t ze@A9)qRF@!KjRqg@qO^-ur|y`R-^_bBL`x1EF&e+z&?01aX~Sk52+f-`d`3ys}24 z|1$*Wc{J66QPjwnZNiqg04)$qPA*XR7Ph`u2G^RG&`6W)b+S#bo%GA@4D&dt%NMx} zam$gWnGJ=Ex>LsSe-ipfd7NokTx$*yYc2>$8d?zeV1A^@L$bCBI_h1iu9$&-3Tibx zc;mgCPHlLTK$qVRb@INOvtSyRkrJ@Z?vA%!DP(id-BdTq^ZswNsk_8~%W=uck#)Sh z9O`}uQ;s<=hvvN|vQ-WpKP+?~82Qh7#qv!paG(>vgHhb@M)J*xSLhU0NouOiuDZc) z^im6Of3*0|ZM4SsOBF&nabGH|hUT%-GcnfY$W5CP{_Z zvrc}D{W$b1X&4CTdJmxHqcBi^^ihs`;87S}Cvl+p7NYDqk?z;L$iL*(iKefN0HHCc z{Hzair#2`Hn-c;iV&pFk$l)Ed@+smpw*q^}k6M8dI$SoKrcNKCM!D&oxVbZ)A!9of$m=Qlv}3bc%VfjV_eWU5-$zR z3biAFZv&H+68@fX-6_(X2JY76qW04z{4a~x3fg}BxO=9hvsYW-HCrf8{9UTZL@i);mdPR3P&`(b^>^e}*T4_2oUJ&K2FGva zml6|`HC|YOvG+f7m!?TX8`_CQcoWO^g&MN#q4uaW&WBC_rP8o;L;TJ#bZ#t~WwH!; zypx&yl>yb1JujS-=HD8cE!*Kw)8u!b$(ug*jXcCn1O!%pm9s7YL}}Lo%Z1Hql%qz- z+0iBm>JW^`103WJJe=!xQCr4v?Z)M!$AGTlWmIRrc?_F2-#YwsOUXgY0?UXm$RC&v zy{<)m8s;9mAik@ydAB(>lBwvQ7C0m9XeiM=nDon*pMGOG9-DYp_9l){RP4okC}JP} z`NObwK6}KTZt;;~9v;4F{3f+DWCCAf1wQS4H-=v_oAOoZAF~5U054}2HY?;SKT-VF z_v*Y9945q94uyFdFcIryD?3nwcym=)^-Fk(2(sdsi=y&9pvQ>7G>lpMjgsH z8uoBu?!a^@CdUsdFSO}GOx2Hw82N>((w~()T$Zy;hr|2|q6-0^-ubjbcGfO4M5meY ze>M#%Qn90y+IowB7@I#*%RJ6zQC=gC`D|K(oBSoUv|>91Z=vl`;`dKJ2}l2lY16nL zM~{|Y+RN*3b@|7B97IF9ae?L>{8~xWu+h(|X+xOMM9?B;1y$55 z-5I4^BBv97+J*0^0#mU1tip}e5R%E!x}e^Fs4RB_Xyk&b-<{v-n7dI$B%4O-=fc?q zdz>(u(Xj{rFwd~l7UUEe5czR|Mo}^`Yc^GjHh-)!&S>x&!pzA>5}nCv+aULP9fR1@ z25_2t9~zw%7%t7ogXT@#aa(O%sLr|)L>@8ZPjI=v0nd)|wUerv)7sv*~Ji0Zud$R7-g#)P55owc)6PQ4p|k{WZ1SzXPXB%H~Td zlV8;aepem%!JKw`NpKjxi@z)8oN`qm3H$ZAaGnJJE_WotlRZnrDiaGo`9D5R$gi#(pN{kJuu*O6suhy$K-@T$Z;?=#VnO+W{k^SQY@NV$_a@0x;I|kwZq1g36I* z%!}N$SX3t?XI0sz6B#h*NNQF(-L?`6<9mDUFx^p?8hK^xkQM%+esUZ`5?@AaRgz z0|~eT?t&s3KICVIwB4dZcbH^#}%3A>jh&*!8Jafdw)9a4fPa@mlt4@Sn zi-V-!xQ>QVctqBslIHYHA-nMWw+mS}kqgZF*uz5c*a>v!S!a!7C?O0Hrq~>dn)WCP zk(CSV2p81FQqXCGGm?9+i0j5M4_qJ?s@ZHUM4HV-xhEVS=r*DZ%~O>dN3)nVZvTOI zpI~Rfh??NPb9rHoBtvr8!N;H}g4IB&hos1)eRFD%gGCm3Tq{V5#rxKsyQescSK@ew zDtUgm`3~{mTkYSFw7+L|xZ@|%m^55_w3D4Y%W}sZD=nqZe1(uo{sR?cYmBKVm6Vps zKf6GzTZ9&rZo~bSk^1=tf>B3GruZ0ZfBZtu{tab3_wSd@j@jpDl(LDOPT*Tcn^OaA zqE%$W$VnbWI%Dkmv7RkaQmKX%ZhBe&B824IhR*q*B(`^{R0-r+Y1J_m5ENnZ4z^c{ z*T0xc7zGzX1IJqFrahnzSWo6{1b5&JWPWi)O~O-e59{%}*qxV>@=)DW5&^Q@A|@K4 zA0>ZT#H^hnYDU0-RUt|mti+?Vn2J4nU*G!kuk`sb4^mRG*@Lq5rUV*G__ZzGnK2a| z(@;jQn3#QEU4rS(m@|0iOaB10x#1O=Aww|zl|Cku}S_r*N}=B3D=sWZ-tNqT*YJGQSq;3(LevBPod}^*!8fuf147Wv`?#~|K`cJE6=GJMn^(A>8&e~*QQJV z+KeFU(ZS04*}=++q0;oYA;O>hkrAgURnouQ272Nw#!NG%es<8wO>;IHz25}i`R@n4 z62A5Rhp+g~BVmHyU_WS=bJu4|M$xr-5=NpFM(L!lI|;|4I6W+UJd~z$I}yVscLEgA zSK|IZ0Ckph(l#5r2M!0{uCC5ltWrEPh5Mm&Pq4mVgb9JiRf`K&&SopO+~)Ge@>s82 zJ!{g{W}#uSpgF|ou94jA4+NRpz%%cPMos-lRN^n`>I7v2m-ipn9uP zYOq(1g6W2KV{u_8)%;6f;6+S~f8a$?LEBFjWy6hKB_q2~ysEooM_icbz%3pk6xmET zbPCfd^&weZAY3w=p=uE`HxU>^WcFiS#g-o|Mp7#RzWFb>^gI_vO(NwOE}I22=8vCE z5}L;?nc$noMHV!%h*p{ae(;p@T1cArHoEZilGQRX$`}!=Z5>c17c;(cjAa%7y!VD1 z@f{ss(d28Co`FNuPqO-E2=AaEmj|i&z(~ryJ#D9R>ot?oG@Efj3F|v>2N&26B<;;P z?Xcmse32Iv!(HQ5)91?MU1CSVQ1hrk^EiwVamu%XUs3K^+q5SoX~Oi8&CmZ{itK*% z)oS!#itHc)+bS8#^ps zIDuRt0h9V_qRhH^7#cwLwxIjb=YQH*ckDlLi#QT>(kFFniLXAax;C<;=Tu4`Hu|OYczGXdSj*s( zKIs3BKmHp0)uK_LD}GR76N$9~VmZosIZp+<7y*k1Fz#K*HkjUY%I=0$?c1vR!yXYO}$o2|fs}jcuhU)W+6#1ouJPrtuf~ zNaF`(4z5K`}wo2X4eqbeW9lo>mAGCs#Yfmea&3idAylve#Y6fa_b5WuxL% zqd{!u)~-_K;E_?sMcbKWqwBd<<9wH{xhQe>jNt40U+f}6)&PZc*r%X(!sN%ak|tTd z2K@y*{8z@keI=>lnHd-!lj(3B_?U#rogArYGcj;x0Sgw^jp~O)Tzae!o`&zU1{!U1 z25*w25{22mSf#hm8C0~+8TeF0&>PegXer=~1Yvs$120gZ(LlI$>Qh)W;$5 zQHO$nIE(~&AK{U;SWBx8ViQ-{j^@m@@%C@hY~b9wOmBWI)MH@;9p1^Gfe30}In)CC zfyyX3EbseqQP0!4a1rtG1^>>cj#93rG3d0%f{)}lbJ;_$@r7luELW}m(Rzk;$~f~2 zkV&~-k|foHp=#4pR*X5tDabO#Nkw0|J}*|eemDtx^m^&6^Cq|!`9iy>qnrxRhRlHn ztVebg9H7CtONe#2IZOY|voy)$ zB$}e2ssNNj&{-5(S7jFP(nWQX4L{bY@69}HkRxYP2|f*4Mp@&2+ic*bh##EMAf4kJ zmC87bQt9%Lq$RpUhj(){_;T%Z9=Zoge20rwm|8;eGz5 zmXCd#20`($3F|KIC@usQ^NBBA$+}uDI|;k& z>R)n6+V)Pl2OggonU+HfG7BZ33|T}J^=SZUni2;6uXGlP5my+3r6@LsDM~3PBXmK> zXwO*mRrzz776}($*fd&Z4y^ALW=8V3iS{B8MoTZehAV8_A)u>usBe2$Mu}fZ=)bg0qd8DaJXSIV$wKPod`sZ>#Gk^0*{-W*w zwrU=JQl4aZ^e)Ij&C>SJnZ^VVVJdQ$j)z}9#MOE7(<#Qno3bFmkfRN%Dp}k9iBH&8 zc~j0eimtvH>JpP|*ws&yU3ObMky4WfXP3U1(E5~S`0s1?X69;kS;O3iTzsoJ3wH_U zFH>BmY54l3P>op&HxJuG^FxG<#D5Y1MUPjy0e!OA;4Vh-W+pe-8cl4l2eVzqe|Uj{ z?`Jf&l&82xY3|Ox%gYA+ahXVYZwgB%Jw&NVy94#gsF%*Q9xB?yTyzs=`UE*0IB$1fHd8OG_Oz?KC;9nV8_8OIGpUHA1p}C4~L)0Heg& z0Qc}Y+MmbmawVSlK+=kJz9S#eD=rDt(B{Hcx772ggs( z{m5BfTP+#d$SBM0vq|@E?sGY8Q(`df3aW84?J{Im-h(jK^l1O5WU3O-@f%5>8s>^B z#VxQiHn_f)?3DdH_iK{b^@HA-yTl@9Y655(GrhROn`$GH@I;kv8O zxPNx25Nz6cliJ`sJPHSKTRUqda-1nI zcID^g&HO?PwkG$l9~Ax{!d;vMU5BX!Swce6qH0Bt=*lmRgnVr^3oE`2i48+9*>iSA zg52f(%_SK{KWVC^eOzFRYD$prZwRS}WviAclWOkHcc!)x>eB~X{kejKmN z_HWJB>FpaAtU8Yk%`;b&Ect^7>wBOL7awx{h3zyrdVs;`n*fX^sweBe-Tv>>!dgSX zAIv05izFhM&)x!K?|2|Pa}yj6+s`i2EPV2N3^puc;`g2kGSYWkBqny@8^ylcgrUDl zoRn`@ToxI$3S$Y6%{B;PQuO4M>#WbPD+`KQt*sFX~dc|)-j{!){5 z-hIEnp+{J!i{X2Qa}vXh#}!)B3~Qs}abmxIEo(#K{|)0zEW#uQzmx2$UsrG;YSEwP zf1Q5&YT}0$K8G!?H_{Gy&rhk~Uiz~hf-S5kCh5V~>VFr1R-xxRkKJGZFJUU;VNtGV z4jaT6UIs)TxKExLXFO`~sIb)C{FC-ev3YoUlpX+qCqS;bHm{SEj33;$+Y;xCq>^`w z@F2P#Nr^kFm`(6Yu%~24U`MSA&Q1%TY%6|nHF> z;4<8lCCp}iQ6-RFil9w^t3e_{S(a7UI@Miw2x9nLD6$Gym5vyTqQ z(J#=*g`)~)ae)RTJ5vPfB)P5bJo~)0| z?$DSNcySV#fJeRzh`?)Gg(1^OQN*UIHbb=}eNTWtfOolj62xjdHsu^FrGIn`cWN%t zdba^KwHZpqB3+;Vp2o&k@Ta-r-CY_*MAmXsoBfyp#3zclN`GY*DQy(cq_E#06BOfa zGvKDkz^=@1a*!Ew>Zy*QS+V?Q^ULdDi7_uLCqmT=lrO}eowE9fsU}Q$`v+K zb$AixQnn$(XSBM8p{UYr2WV+A$0v1^@X$2goQ!w{GLTL>(b7(KI|5tEMe$`Mgr5LF zV+X-#c@@cTpIch`-p$OOQNEwQs(R<*=i)chb9_X`=(K$wxaD z^XU{(`mQV!H-<9?S`SA^el;Ottd=f7O9+FJLGQED`}* z*{Xr(jy_4D!-@mB#g*F}RHt@~uo|L2A=XWav5VWueBYRSl_2+%(;5kpL5%RVd9( z_%$Uk)+eWz62oGK2m1IC4g7o_TwJs`WLD^vMc9?!h9FT^@V-Wb$7S=O0i`<^mjpRS zQWV`V`ZI^%;1H?LB!J&&S;ENFECcBzfF(3!$DVDQem-O~hpCHLHVK7yIU+;?tRKPh za-P*6JqP8*g5O6f%veSpq?5o`V-Y{ujEV_^T;BIc6Uk60RIqcU7I86fts6IAY zJKrt0^A{(gwVc{!xtz(?+lK zK%Y;j;i=hQSxw*7wWN@-E0g3L%wBX%FkS5&m_ge z83ti&MDiLs-__Z|iir)F&RfJhcB;@0NE635KA=4zAE3%a^k)b8v;J4q3L+<}wshyiyXOzh8OBv=r=(h+@rBAd^LWqv9$ zKDVGC8|!=u?TL$007q{k9LCjQoq?oV6S^vlL;nliZ8H2tNS0zEdbjzV5DiZrp2kG< zQ!_k%1x4or@-kAqel zpJsqc0RcsU?BCtx6u22;YVbtfl0%koEFkX<8Zkw{b_Ta3}3^4{e_*{yisP0lU)p9blvEcb7G~Nhb=uDYBceY_}fdcRI+QdA=bs zR}Z4i2|rCoS=2}~Ft2sq01G;jaT%c7)rHgOgL(DAeYyZ+JQ6)9WC3Ln^-FEe0?Zsw zzb?8!SeC&AZ&8@f9goWhb!}+?&k0r}jIubLX5dyE=Cc^;%}v`(u5O02wg^ddg2P)8 zU?UJM0W^sMM1MMRDs_9u)160lmT%x;Dq1mRN)i#`xb#_O`MzJX5fP|{Q#nn_e*usi@uEu;aBIM#eKf22DN#9kc zH6nXsQ;mq)Z&T{z)Mz{m7p%POwma!twi+qMFA!4h5@~A`mS-HfFi8zpmRQ#W6GIq#0p)VOpyTPZe|M`X zt7l28ipJ;3RXo-!4=3~4yv1j~xW#8syYU*Ro~BOP&(v}8ruKMuQKm++F39}VWaZX= zL$Tp-%709-uzJ*-W3phVrp`*}p+nRp8TPX-?&ktlB$saFg!)_S>i5>aTh_de1N^%2 z8qSBjA9g;uw%08Nm$e%|0DO!B{=`-pc0c|$b3M%=>7H3#Dz!hg3+Qeo@T_RfV=c2i zU17cUa4HDY5;c0*Carric#TZ6;o$N#s$}cEua&nTCK(3PQ2=_P$gOA7CH>sQLgU94 zr+-&Rg6U9=d6VUB?CHVl*3PMZHsl?!_pW_S&uMFIt?nt%$LK+p-6-ISy?1@}o5rD{ zQ;fRu!g97c-y`IM!U9nUq{c^26?@Pj(APlhhXJ3XX@Oe+M!h} zv>J}^G7?O3U9@kkgcH@6Xx>#axD2>*1NIl<5SVP zr9QKop&iiKs+6~yxF`dke*5t&sz*W z{8?8gQ$uuamsqfHAQ|>HznVCy@E9%p?)?r?OWp^<2-+PYp}Y^8r99qumnl}gq<-4z*1KHHPWkFoQKq_;e8#}+XJz!>O3eio(r%L1Yw8XH5} zDE0y@A}HG8wow=GQ>nNd`ye>_0q|g6Dc+BGU+sgpa2WjMw9!#((EdsZPeSD<6?FOP zuord0%p)Zfi|!#D9ktl54dE5?f8mZfN^v<`wc)aQ;C)?KHF=>TyHHY1bhx8|2_F5| zz?Pi7#^SxX8u%~Ajv{J$;-VkA_M6&xZaPq~Bi{n%_RhSJ2BGl&!oUc1mMJQ0oF)lb z4pbP_i&0-2W?Jmqm$SDcKRG==a4C!k#tUTpIH_YlbqxkEsQv|G7rd2GQTtX^**Wwk zn8D;A!dSji9D6e90p`Haa~h$c@K->1@{%+L%ME3Tg|Z!M&yBcnk5j0i3#(cLOqB~W z`WhIPv!_tJ_aF-Xlna<EsCiZG^c5v3Wl) z5Va@i1m86BZ6m*l-yUBOY9JL3F~ixK|L96bI*+AULz**w4A7->u8z2r|GVIrqd_LT zrTM0P9=zj)h{oL2vwtqdm#K+p@uxK9{jOi>aPvLKZVCG6FV;%m7O!a*wMTaiG?kVz zAmIJJt?QWMBtLn+1dX{ayo3V!k#@E1d2XPpvb5(h7+(WZbM_>@UPc9XOc40|gF6sa zn-$&D$+>pa2{Q3#rL+Vnhd)F|y{6SmU?VIC6yd<6^20A$Xu(WN)E?UX+YIslh$tjc<4e zE!4*{9n*R3-{VI-r#iGY4!gZSP{g1#8Xx(~M zk2Qq$kUz4x3KnwDG1X&k`fR)e&}zQB<7lCXmR@z6-DcqaB+I88sW-yn8A!mxd%4pphRgV#hV#}D(EF(_D z|DEgWJ{NxUb0?W_4);jW#4n5>}rh=K)+A6<2K3E0#6=g{8dz-m}VHs6Oz z*Xrc=qnA5}*%%ipriIAs-dkbs+PQ?CyN2Z%!}y6#_P^{lxP-O056HHKy41XV@r7LC zZ0j}8JkasbGMBe(oC7;4HE4Rbz|LZ&4|GBfbQ_dK!RC>+`~1@V2}!tz_crT{`AL6J z$M3>A=0I;Uh};=02)L|zKt!r_n(Q|oOGDq{t~#k zKJE+NNF7^VYDn_z?`s+U|6sh=8v@j+B+r(lD-wd9x*E)HOaJFLmGSolb(U4BpGFlK G>Hh$DyR&lu literal 12618 zcmZv?Wl&tr^EQmT1b25QxVwAs;1G1N4est92=4Cgz6&>j;I4t};?ANWNO<`D->>hf znm$u=wanG0>Qqhl$QsJh>bY41JssV>XnpuG5aD`wwztB^>7zMd_ar7iZ>k~Rfh+Qn zZ*&~bWar^d^lLPUXmJ1cAYK&uTbYinJcJrOyQx~Vi_<-yifGxyrv10^CpZ#8w#(ME zl}j}Job#^Ink)JPExM^O1|h2{ozCP?CN67*jQ<8^2WMsUqCn+$T^}h+)AWPhn@lRE zlDXC|J9qWnj*(}n)Cxw&WTlYoeW6)4r(GyOl6WOwx|HOmTt4`@mfc%i&4v^scL zD?*YuG*Dbr32$hf6=Tqow?#9{uISF7X(HZVZ9dgOUxdK0z;GkDcBDwA92T$;dQ&Fk zBl=D{s5D#-n{!^K5=CnbluYhtjL72zxgwu;vQ8tmcP=%=bO1nwxNOx>vTcnGyB3V7 zzg0YzFP$i~G2F{}s4P00Or&?H1Z3~u{m0R<-o8SDA*>oyNNdf3i$YHq`Z<`9wc0+7 z&B?~eu(z&0=v!rxiRHPlku~^n1mS+j)x5ga{J80?c#ry3J5<<&nyf9&?hpX3tFVtVPZ9!wjfPSXD? z2W2X1^c?u7GN6E?YLaWk8mz!D^X?+rkEB$R3ZztOl<$=2{AaXly?+P&!A2;rWn4 zaTo>wQSM0yQL0h%4tXWn=qZ9=6oLXb{I>Nm&L73O5MqLtA4qQk;agaG(-1+-&s1`& ztF_#C#)0u5QaNJmRwey za-~Em-54J>8tFJy@Y4;_Pa8BN`>B4?;-D7g#=$kr4+Z8#@ynWI!ra{Al@%o=CAE{xUtO+O0$#3@{5^he>4-6y zT{^MrG)hCTQ@LzzY(HMo6X@MvUWaXN$mq;`CSVl>E1Dsf& zzf1IlRkgj}v$CH@p!+80n)vOJ`G~IFHTK_o|Ak4LRWi3$%~CvLOyJMVtkJ2W;$v+6 z3Jcz0l5N&@QOdQW(=HhR_|MbCMXBc>q@T4fKgnedrD3J7M+I(}5FSJYL9H|gCGeYF z?p2)|`6wIir|Yh>jh+!R>(~Q@k(^JRgEhZ+o3~Us#gXtErwOPCiJA^j{ygN=#7|$o zF=%{Pd87x+WXuRi7M`*CHvJT`JK|FfXvP-Wgve3+vW-L(vKComew9*`=2U`?mU(W{ zjpO|FA+>XVjpj=n$vEd-!8tqEVL@|Xd@dN9+$S8??en~7S;EQQf;u4RYjM;tBC6H@ za5N!u@5WK>8*M5$>2woZ5M7V(DfG$cVcxhKss_yM++B32r7y{zM~az;EdN(w;;W<1OIV9xC^UB$ zRpf+~wel#u@@#{CtxL)dc*!*QhBOGTtZ}u%6;PabDyaI@Or@z87~=pM!K(TB!6a4d zx{BM8aH9}j+W|CQ)}u&w36#qsI20}2WQ#K;+9eT#;UT7x!A8ayHb#0n%d)7KfD6rJ+Ld{uRf#)I)Ep#;4*> z)cHaUG{t>zOk-owb_C2Meu;_dlL?6!7??psJLtRX`3-Qpk9iGn1URj58Jl`x55^@m zM?{qOMjyVkscqS}-Ec&dzVtn{k%#(+UUG>?Gy}^b^S8-QxMc5;X+=gL?pa2dGl9rs<}~V9MSEXV z_9G}qUiLM`UY<&E+$g7TZ&y0%Vf0Pq%J>HV`s`)ohP%D8)Vp|PMv$>&ZYNaJ21XX@ z0i)1VEBlJt229|*UhvWrZ5!CeviTT)iIoZPS`+nIw~kf60B&1X^t(y(d1Y5edaa3n z?VDPV6Q}asKWWMn+4i0A-NDJ^}is4w1SE z4n@Gld6#`9pKQAcihM@KYfrdiG26^|=a5%wPns~zf$^9{U%}zaUfE07qHe~Jm`&=w z`dH_a(*C{;j<^ML|IDEYwc>G(IT{nkW?)+j*V-sLXI2wh4#G4_*1}UiW9Jk1*!VAk zxa8hpW~oD*Y@07l&vNM8-yl-xI4WtS;S1!(wc&b#z+99EXGjUkoe61aG?|Gp1=CQ| z;)ovobtN7u&mUQ?psHv}D$Ye&8PuaQ2shP0ZRDIcuNo^7~ceh@rjPdS+FtgPN!ROEn-;l-e<5#2+B_XiS79>nFV z=%ry&V^dZ0U#SfCWM35pb8?r0AE6?C-T%>~5F`GnQ~e3*%urp*&ERv2oW@|Dp;Jwq zo>XlRqho$qh4*RLelZ;IH^>XjTeEhvm{EsN9m=~|;4-;e3~O%=2xK)a4dCx0?_oC# zS5}8peM1qN@|eKpv{l^%aknu#Tj6@mpxCDiG@C%U)X&fp49!~JHV{HHqRW0Yv)}HB zt<=!A+3h|ua=c7zn$)XCPY|;q%@7$r*1}m4B?c!FU3QF|89r)oc8R?em9qNKP+)a{ zIZd4#wK8q`|FbE!jr4$*VLb8f6hhmAs8RglS>zL~8#ClI@4Q-v%n9*g?5h;1wFbr$ zEFbnB%LFD(kW?SW%9OH=r*(a9xXYdJqcVIWS%!{pcjGXtzLii>caIidz>=wAM0GzY zKPloEL&H!|%^+dVW1JWf%5PnWJHBBb5uY4D2;aU=s;Q3Fv_y82QQ4!KhB(n6ueTzY zZ>?b5-F)LoQ(dGepe2YVC}{bCp09YQ-Rwx3Ee<7TkvNFLEwmy&k%(3|l3qkc<~BZx zXyoqEolY-oyYpzzPV@kl2iYC%b0TVYlJjMlSgYs!^xax0_O?DK-ZkfFeN@VdeQEmDh~>pJ7onN_FWy&C|;5uk%5FEGFI2|kY=1PC=1rfO2}dkyeS)PB7P zQaZC7UC|sK%9a>9*^*Co2fTTxbc-3rKx4EvmyKM*G-Fn8&u|n)3jtwE<)1XQ$ss%_ z6M`dOZQ|1DCsi5TcN0ksjC9lg3NRGdMWH6chVX`j<)jHTgk_`Axj)3~BU{!FgOR%P zAfg08_}f3Khiv$W6@8x?g-6NkA9Kk0KPz^-oh@sO<=NLDE16o=wz$pUpI$U1 z{NVYSc=OB&v-j%XLF1Tik66PvGKPNTFm~nrg~IPbdxgBg)iltSk;PZnH%#i}orrze z0j{T?hEuG_1mg&aPj`$eiBH@^d6YSyo9R>7b%T>V)r zH`RZN{8_OQ0!PL4;$%k zLSjo!6M;PbR!?lx3ruApPIs*w-T`{j&3V4;9UW5|Z=%!>C0H{Azj_3z((jg{#7Or~ z3lM`s$+4K`65J41Z=Z*5r5(*Fky%1I2hw7;C#qJS*3m^X?msZWUvBHJGB| z{&9~Zw6R(PYkE=G4{#bBPfGvI9VQ@3p07xlB|OGSsrhCR#P2;+brmYGk?uvQHPWdf z+FloQu=4j?oz;4_`jq$sijQs54Cdd-$j;88u(c!2p)Mbl#f;Cj=onvmhPqY> z5_o#sgH|Xom^HaKd6#(Rs^r@DcjN6dP?w|To+~P>Z4_;Ca#Ux#^vO_G4_SPi&DFKB zILdv7Q@bBpH?CJhbbSQUISfk9Q(V?Fv3>1Fr#9H+Kfh&Q|F)OEqRh$4NSI#DJdiQK zDJVqjj@vEM*5{KdM&vmHNqd%Pb8E~XyK|pU6SX~jatz=kDVmJ_;|We6OP!94 zkT6b1%~!boLb#i~56<)lhEfl3CEyzJ7PEt+CWV6jLtxpeK5dCop&l|JV&a@WUQxwt z|2UX(Inq%5vtq+mBi-#2kMRN~AOO5EXrk@=RHsS$%=XwgDkdH0@6;sy%OdYt02YBF zH4ic(2Rd075NoZDIGL=MN5^iRRsQ9z%>Lo?I4Cfh+Lc{3U>e{wt9e=Zx3Mn!u)jm7 zESvGqLWBBg`9{&Q@~xH!&CFWiNjcf(XDRYw_=uB7$1ZY+GD^=5oHi*05z$sTaz*da zali1WR9`BOh8&8;@XI~QHyPH5_ml$>A@I`#h5%@0?wNZwke)SxynTNpqEoAQ`y_)K z$-45PV*C>Cf0@d&ax$0jZ^Xt0lgfJ5;pCX-9iV1Tn37lOUtW&sR=HfF*|w2#L*6Z` z8tkeKnH`Yd*8r)!;w=b?ke_I0s*(e0B9;Am=_HPWa_U;ZBo6BSeBZ;9qY^bX&5Ww~ zq;h-bxN=4T#lXYylcSi=`QB~eLHXahab*VYm{ex%6=nXT`Cx(4Hw2oQmBOQP|E6(e z&+p^VqI<^5WWiu@}9eM#dnKEmwqSkXw% zSX<$0${Uny%6sSqC}5Y>ANF4!*7-jSZyA(NrF1@^zoTsreE{y+n5i~sk&7QgXZ>Nm z%ltklj4~&4(?Bm5u~QUif%EIzropW)&cHXs;v<<5?(c^Y(j-3BJ}VNrn9rWjQL~mR zpOvJk$aw!2K3sC$e!#;axYHk|LOrmyS0@_LsphqEEIwtRx4u_*vZMKf@g(a{MCYKD z9*;;fW-q{I%OmAty^GRvH^uSbALXADWslI5if9Qgjl~FUOz}=&Byo{*^U;wso1}Cw z?(k9dqhnym$oAr`r@zPCoM~MVjdI?zCo$unSACzm54(x!;$d#mH#jAXEcnAapN)X` zLNXy@MJ3-*Sb0J0h*8+Uc=)0eYRm5<1j}=1;}thZ$sb3M7#@>>U+>T=j*@`$XdBEq zd7A_slSt$F5PgD|Wu#%^c2-H*P(HAwrj@{`AAf_us1X0Jv?dl-pF>!;y@9q)eTU08 z{)tW*Y0PPSTyEky`Lt;~OqX`hP!0dd?}bPXW)r9aM*&+A(zp7@7)6pn?^^`%I033sv4eHMnT=CUH1~DgZQp`_~DQK)}slVPPdh%fdzDLtu$?(A$39{{D_;BqV57%G{I9s zY#J+?>^2t3w6|<8W(a2jM*&OUmKiV`(;}|VL03d=A2H>*uk=4`miQ*JaGP^Lb?{x9 zl2e9~B#OpV*hgl-^S30~e-?DOD!%qsoCe?UYDSy`?sz?$gLFb z9&@)BmcB@7&B)(>90BP|EVD8s3CmUDaVJ?6bpA#|-A`i9NzaQlv5xR&UI#?fYr>2H z)TfdJL5>K6WIuAPd&#!yum$82e4gNO%cC+ zT(B>(%;O3R!~W+O8M>9n5~8ep!}t;K0ZoeJmHw{e%yZ+KtiiB;T4KCh&Xrl7TN2@%PBqigOpQZu$do}51z34F?yviu3Vn1QkG{`Eb z61%9G*YEb4MZQG6gE@(j^n{k9Cd0>&TBX3VV*`8PsWpScpQW*2?tgcD85;QH3h{i% zt&8z2ghza9d4SoYl{WdulFTCf_sEyRc@QB23FnB=pKJNK)^%j;i-Vw7K7KRTnDND4 zkU4Y9&c{fpcfAZw-W3Es+)m*DxVE6Jxlqn|>AqeI*V~|iAkk|_m^O(I^1oV#Qcmvg z?&5t$oxVx#0p;yYfVDp`P08&VhVA2={h%ceiiS>bt+$>0a&!&f_il55>9LWK}Rvf)wrK)Fwc z_w+0pUq{4P3i9%to@xo)yhaGL9urxc2K*(vo`W^K^a2xJckj7HR*>pYD9r%2d=MStr?WQP}r;=W;fm z^5R0}y=h5tQ1ib&Q?1lg-EEXIkoR{;iL!ZBva{XEnN*~Sin0kblvm8(>{sZ_e4>ly zK5JO76uyVKiFM462RtgE@Pd#K8$N-fCm4d?MrG(41OtjX%=EY)i++C=M8EkzK z(4mv>`JLLWCh@MaVE#rL^OaJVln<*~ zDnu{Iu(=ed2<`UX~~l>G1CMAm6f{bc*zDFm|dE?O#1l`5U30?FF<7Bi;X- zUX|ggEdAfANT>k9$*R^~4B-g^e}xK(U+I4A?0pOQMUw$8*Y_s)FC9AXtr}bJ+cdHw z3ar`;nax{#720QX5L@z_3o1371uEylwyP&2Or=FJqMp%DBSKn*4V}vLmI*_5s^tpi zH~8;x1$aU{<}ynrDgD{PAeNzMvrmX1Pq@ozSxep`ElrH~W2b;c)Mv2O#2Kz1{m^j} zD_+YhD(jW%1mN3vtbeIhusQ#^l;*Ar-M-#(e{yPW&^%DiFk8YXZ^MJR6XC}?1T*?e zRJh0TCNb73_k-YLUbSmq)Dl5PJ7W4J12*Mym?~zZ!aprhEk?LhKL?sVA*m|UAI;wX*Q>;E5DtofL-8?taJ<;xpYj;qhx||10t?dKcX;I z{^~%5&V!vEZz)edq70+j=?-!bRt}?xVZKGx^A)a~Qz7iALN=-%fAO52=t2`X7McGL1u?fbL zhhwW|oXZ6;W|^O|5{>GgZ#X!oppw{s0q~Qo03nUVQGLnjl~D}If|^*h@JzwlBpaN} zj*6*1$2|TOG0m&{6~($u@*^w*DSvAM^;Lo9-}p7oUl@(M*Nx~F(Vu2Q+%W+jp-$Q4 zzWG^hPgMq+-cRd`7mC3_Cl7-|{wK_qgif?wlj5o&%YvP;R4D6qVW#E2IF-iJgHB<- z*f6^{} zb;;2SmHSTC}{Zm|4MqIcimg3YqVCq~i zpD@L!u$I9=RF7W3ES#nV@SfOX#;>k1_qfVryJgx}JQF3&rTUo)z&%xw>Z<~h%NyTm zNbuHFjnQ4~K+=;!s6zIq>)Mu?5-B2THxYb2C=Kbd1;jrFXBd>P0BVvZ3Rx-PBkCeG zs2-6|j+G)RaytL7w#Bd@X)?sS8BdFUNiS_C zL&>%7AY;zAt|H4qYeiHQ4q+KiQMSwWJ?pU8PPh>9#eeL0MUvna$A6Y$S^imA?8b8Y)?`)#%_ z?3SQ>%in|+=~qrT`;G}I@4c~Af4QOeBNHd85fb}tIrkyszi+oo*Qqb9ga5u=rG5{s zNUaOCwH|+EAd0=D=~Wn$_E;-@RxXnEdW$RjO&Jln-%S3fye!MdD~bA(kZXE=mIL~1{q!-l0nq3l;d_5f9%V4!KJaa9Q6mVX z&d(TPT~8b~cW7=4NvTtpCeEaHAMdvB|Nft#3=YcTEDk0wWO`G>#XXUg(z$E@K1*rZ z1ph<^F-B|~TK0&5uhYOJGPN1n=6E^#p2n~Df1}8x_#%HI+;RfjkV9B!uZ=h$ zO`RZ3V!uTMcf~75G78Czl1+>wQ|n=(o%WGGyPU!82wFvjsV!g$iUOBNE}@<&l(}_C z2{xsVu?=c7)$9ixtEF?M4FL=LI(#_{%>#pf;1^(PqM6N zhecH;HY=dM=;SW(3qLe#L)EdIIB{2z@_ondd>M)BPY|?`%a_5eA!@tmt{!mtaoG!Z zUz{eo4R=U&m+P`#LY_T`0d}Mk1~=#*=tR^>o^-R~nco>`}ViSXm3^ z^}DQWHV{-tIEpg; zs}QAFi*R~?@EC`Pr2GXDOuc-w%(bA_KMalZ)&KSA zusq7!k_=dG?!Wxv1!E^}L%c_E$3|H*ij9j3+Ox3&f3>V(3->^^ge8t+s-teu^O2bL z65ae}u&?@{?rx92uyvW&`Ai-3zdT`%B^=nx4Qu3lM_1=BrPr%jN9s%en(&K^O`%byYBZC0Vg19`|-7-ZUp zwd=&KHlb4yq4wcS6exD1gx4N7jR2x8WzczQ9F6-&o4))#LziVyAeC$Vy zTzZ%(+~O_L{}~INyBB`)6mwnE@MN-Cv6zYJZiBYFv>bq}p!Qxf1~to?o}FlpIeCcu zo>pIR8m7YAjyuoddzNh5ZLU5UyJo1e?Up>As`-{`|ZfQ57ejsoa@d4y#Rd;?hPGVPe0THzx?)dvOK5YxmXL1&cnGN znd+~#l(-sw-Stm?-SankNL{--{ij@AMRT8;DdZ_Lb>ND)TX~Ngm+dnLS6Y>d8+Xuo zFn-%KmwGfvCtdU>)r)!cdLG%#`@GyX`M|&VY2Ua#VS!m9YeA9cnce+xACNAMBVd)KPcEuZl@E4-l)z zhL4GXD(u3|T~=rsWEm%x1~>LOb~Jw!+73m~mqylKMLU*6P*V=8zV^RD>v2aK5JdPVi4c<#^_QX4 zr2+Av@o#+VEQJWRZNxcloa*zUJ0;(R_rrfUl3&j6)N7R4r97LXG$|EoJ%4_ zb;7ddq)6d$FiF`lVRWJ<%YpVNG39-KjKSe_8Jy>{fO6EI}&He(4g=u}dSgIG0sbMhe%N4yGC zX9c5Xt9o>M3HoOVI`!jdgsjB{CCUMW%nw;0gI!`h0%G8Z^kwe3qhEA$j4(2j=hm`D z7%E{FY(eH2M;blmgs-J?UT+$HUzsTh72lNU0Qtz5pLKZE zKS*lQ^?4CD6d}`nx>LqjNVEr6wnM08_1n$O#MD-^?ZFi7kmt|5RzeNM*!S(wgbmo# z1`O=NJ01Qaim*zi)V>VEJ@#GqI#M^}px)m?M*Lxq5`Ll7LHybvG41#jUn#6ST+GAB;p@Loe_`uEk4r(1 zK2k~Ma8o3pAIJ!+T1>0{n2<7L;BN%K!Rw9Q4?j4gh%ie7=*&7lP@AK@tU7;2vdUdO8)@T08!UwMKz-$mr7N|RU zE+cECuCm3S&1`_M$QXl$Gx{nE!&sQT^@%4EJY}`OSK}5mWKH%GK=$HK7Bq~7ku^Gb zpwn&!>QjvG{33d8f`iK$`&(G5-w4!0)B-?WZVGcY1nqG^W{410dA=K4HahNr`CXW% zP0_cEMpihWYaGzVOe<65FGk=O1B6v`wT3!_55~yZ4H1@tQZ#1Bt1G;2f}FT5qhVx~k%sB0)kl zR#ujz2-iTOe0oSm4d-`>zG+69aJFL7^>lu>i@E;(&h0a?h0V&d#de?VJ~y`uszRrW z*u{>A@$K!+nEOJrfbwuBQ!X%KbpLu*@4Z`+ATr%pu2U%K5SJ(SZ(oOmnW` z{70Nl(wRr9G~cxUG)k-E!S3Dt$)(%HAMvHlCYlO|3o_~B!&Rwk)c1|tE_FOazK3pp z4}H%TS3OK#fo*0&n0TeRc40j!N>Ojr%mwh(~!L1FWH6C)jjdh~FOA=9X?H z*t$r?>D?AGeNK7z|N737=iMJHqp!Eqp?Eod6{q-nQD^hj9=yabC!@)X?W_g`)6lz<0UZ#7vk8 z6J-OEUM8)SiRP{GC(jTfti}j317HN2pjN4Gv7h9t9<_O$6 zh&Ub~PtLQ5=IPlwohIuU`T7Kqr>;0-5Q6pyHIp{uk(<$X3|Yt3euQdD2Qeqi=`Kkq zf6aj@0o|h(D6B7)-~ml?w`>%pHVg$_c-$moN3~8Cw1Ay%yvuDg9=E7e#hPf0nLa>^ z{%8px$Al}6M|WvJVR@jBT&pHVMev-kG>cu}w;8st{^-&Qtci696A*7;}$dX77Kn1RmFOA|jW!e*T%GbX^l5@M+)WOKMDR3z46InS?f zRmfJZaHk5_$$&g(g_$E1D+f3?91sD$exuy&7QCM{oFy_X2WuSavcOt=o-%4FFIr-x z4xOB{m^>hlq3n<(5@zy23O%AyE3D4Qk=hKE;mpdOs(>{?xW;j{ zfQ`DYDsdUyCAO}I>WgNa*G`!4?mf2*#`K0|i&-8<#(dF_X1De1{);9u!gCmab(gWy4>qzeI5p#+yRB8vaANHfHT+j|Qv3#STxx52B5#~+$|1?Pj@Q$g%%i?H5qw8++2`5REt?ZBh^uQFf z2hYkIX9l1i5_K|&qU>d+ZqnU=^^FmsSQ+#gm*EV#W$XK-!`kRb0Be0LO~OrgJ^QeH zX^^($d1GwgDHIv+T&waX^%QC)NlH#TESvVby6(j-`eqj;XBF;@f|fDjRkt=s8Sxwt zW=a83{q&F8;NTDY_u*~0D`Ai@c3S&qxPI;00V{ruO)Pa``ApW(;kp}CqRDRe7*Ce2!}w==8w1;>yM=$SH8RyI42>f z$>og{49i!E^Jak>Ydm|ehqRk>HX+> f_FiNAqCI?hvSwcUKf6J@m$%o+b6yTLc)0%u13-J; diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md index 719d80174c7..3c08354886f 100644 --- a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.md @@ -12,7 +12,6 @@ Generated by [AVA](https://avajs.dev). { colorLayerNames: [], created: 1460379470082, - displayName: null, folderId: '570b9f4e4bb848d0885ea917', id: '570b9f4e4bb848d0885ee711', isActive: false, @@ -21,6 +20,7 @@ Generated by [AVA](https://avajs.dev). lastUsedByUser: 0, name: '2012-06-28_Cortex', owningOrganization: 'Organization_X', + path: '2012-06-28_Cortex', segmentationLayerNames: [], status: 'No longer available on datastore.', tags: [], @@ -28,7 +28,6 @@ Generated by [AVA](https://avajs.dev). { colorLayerNames: [], created: 1460379470080, - displayName: null, folderId: '570b9f4e4bb848d0885ea917', id: '570b9f4e4bb848d0885ee713', isActive: false, @@ -37,6 +36,7 @@ Generated by [AVA](https://avajs.dev). lastUsedByUser: 0, name: '2012-09-28_ex145_07x2', owningOrganization: 'Organization_X', + path: '2012-09-28_ex145_07x2', segmentationLayerNames: [], status: 'No longer available on datastore.', tags: [], @@ -44,7 +44,6 @@ Generated by [AVA](https://avajs.dev). { colorLayerNames: [], created: 1460379470079, - displayName: null, folderId: '570b9f4e4bb848d0885ea917', id: '570b9f4e4bb848d0885ee712', isActive: false, @@ -53,6 +52,7 @@ Generated by [AVA](https://avajs.dev). lastUsedByUser: 0, name: 'Experiment_001', owningOrganization: 'Organization_X', + path: 'Experiment_001', segmentationLayerNames: [], status: 'No longer available on datastore.', tags: [], @@ -64,7 +64,6 @@ Generated by [AVA](https://avajs.dev). 'color_3', ], created: 1508495293763, - displayName: null, folderId: '570b9f4e4bb848d0885ea917', id: '59e9cfbdba632ac2ab8b23b3', isActive: true, @@ -73,6 +72,7 @@ Generated by [AVA](https://avajs.dev). lastUsedByUser: 0, name: 'confocal-multi_knossos', owningOrganization: 'Organization_X', + path: 'confocal-multi_knossos', segmentationLayerNames: [], status: '', tags: [], @@ -82,7 +82,6 @@ Generated by [AVA](https://avajs.dev). 'color', ], created: 1508495293789, - displayName: null, folderId: '570b9f4e4bb848d0885ea917', id: '59e9cfbdba632ac2ab8b23b5', isActive: true, @@ -91,6 +90,7 @@ Generated by [AVA](https://avajs.dev). lastUsedByUser: 0, name: 'l4_sample', owningOrganization: 'Organization_X', + path: 'l4_sample', segmentationLayerNames: [ 'segmentation', ], @@ -100,7 +100,6 @@ Generated by [AVA](https://avajs.dev). { colorLayerNames: [], created: 1460379603792, - displayName: null, folderId: '570b9f4e4bb848d0885ea917', id: '570b9fd34bb848d0885ee716', isActive: false, @@ -109,6 +108,7 @@ Generated by [AVA](https://avajs.dev). lastUsedByUser: 0, name: 'rgb', owningOrganization: 'Organization_X', + path: 'rgb', segmentationLayerNames: [], status: 'No longer available on datastore.', tags: [], @@ -287,8 +287,8 @@ Generated by [AVA](https://avajs.dev). url: 'http://localhost:9000', }, description: null, - displayName: null, folderId: '570b9f4e4bb848d0885ea917', + id: '59e9cfbdba632ac2ab8b23b3', isActive: true, isEditable: true, isPublic: false, @@ -304,6 +304,7 @@ Generated by [AVA](https://avajs.dev). ], name: 'confocal-multi_knossos', owningOrganization: 'Organization_X', + path: 'confocal-multi_knossos', publication: null, sortingKey: 1508495293763, tags: [], @@ -435,8 +436,8 @@ Generated by [AVA](https://avajs.dev). url: 'http://localhost:9000', }, description: null, - displayName: null, folderId: '570b9f4e4bb848d0885ea917', + id: '59e9cfbdba632ac2ab8b23b5', isActive: true, isEditable: true, isPublic: false, @@ -446,6 +447,7 @@ Generated by [AVA](https://avajs.dev). metadata: [], name: 'l4_sample', owningOrganization: 'Organization_X', + path: 'l4_sample', publication: null, sortingKey: 1508495293789, tags: [], diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.snap b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/datasets.e2e.js.snap index 054bb7f37c25cad554474ffe3861f47dddde6865..09888dc5417312a2ef4b289757795a3090f2970e 100644 GIT binary patch literal 4123 zcmV+$5ajPcRzVo(*Z9#z~h8doUqLa2`AjXSC#YbyCw2K2`}!rr8lrYO0|{ zQtyi0a^9HjLLp(CV~JQqOP&|We5l^o7aAQH>(P2fNBevFBccBOjhZ^p*(Y_hM2mOP z`Z_yhUQvB8Y()2JsSg&v)LatP&xu40bu=~y@JRqqRNoa(YEy}1W>5tXZ%oyVUAh(- zn%Sjm$@IvPKuTPl)G{w+U7^mdb)nvMUHv1&CguGSI*`~GkH*J$B*)cw^h(u;CgNFc zxgSR^lG#kD#zZkTULnrUnaxUF8=utTMvB^^lWNxu)tJ^ZuZ`Oi4Y5RgTuU~nd(~*n z)Lug(-e7t--AE+0^(j8l96pMnj_WxVb^Wp}isNlj-6w)YRivkSz?^Dre`n9ekx<|M zu47_Vi?Q+wao2IOtnU=Dte2N^1KL1%Y&0^e_I7uv;VyNwf3&N6bO|pd+re^d zNw&uiZcCNnM0_j}R%7cXr(;HRWKTSy>j_zvN5`fX<>eLPlCmSSEmBnW7O|YqE!%P~ zJ>HhH@kFp3SFC45S0|@p+A%SfT=HHa{_&{T$E!u`6Uh>-VDf}P3%7Z4SHfHd?22*CNPCFI z^I#|YbMj!;5YP*pwN&0Ko!8o$&3ipPm-ljtszCwg3eW_6M!WUi~~fC67u;Bf`sRKRJ2W*eMo z1Jwp!w80N-@M{~C+F`97cG$tN!#C~ltQ~%5hYknqaKIG~IN*Q>0NVs zPMa&$W_mS=v^%2d;be3w>yE9ZCvcSPjOrImkH(_mh4;dv6$&|GiSfj)WGwSuvq9C( zb+sWnsg7&I2S#eKfDLe_GcfriH5G+R%)l=~>lhm({b*V$15c&Ldn_m(B}Y?pl0V4ZBwS zvO3yFh!wl6KH>U+%j!6uTaI|~(=PbF1hUDI-%nid zstewAfu}^;3?1@5>}V>1juO~b0%(hW84d~rI8L}91M<7sun zT8Y3q8Pw~B(W8Er8Vd;y;PkK^OSoZx}>G#r9MCujdO^@W$QU|59?<^~wZ?i-rsc)y{e6#6c z)wJo-)KzEx;!n9+eb-bhp)PWKbVc=@;iPJWCl+=N?#mOS`Z;ma|IE7&uMp!o64aZR zozqikCo?oNm^re2G?Cns8b+~2Z*FcA3EhxT*6Dd?Q!!x}Q=2wy$YM7QghHVleO_Cd zuiuBH_cu!O_3igAuI~YFp}r&DKexU=>s=x^&)vBG!bh>QwXpu)2mkAXljW<~W|^*ammhZfL6@N|#TI(I>~3!N z!&m(9gdg6JNu69Un)m&nltEt^>@Jhw>kHsFm%(ji@Jt!}N`{9D##9%8<^Zgh4%uC@ zo(2MNW&pH+MDv^inzsbt3$rvIkfEE4>FK;Yj_F0V)cwwAm)aK&Y2BltP=7@2)xyP$ z)?sMs|<)!FjE-$gr?(BBkM!rYCJKi8Cr5d?OONDg-LVI1v1S{WrtypPfw0&GB?{^ zH8!2bC}t(jN}Ix!CQmlobMLB*N&R%rq?WRyMWf=wG@a`5Wt)yu*v~14t>rLL4mX#> zqvh~wx#W9XvYWF1u^bMSLrnz?RY0r)ZmC$R_Q(G93V65zo~wX&Dxjef&aRZY1q=ML zS_$Kou&)w6RhjQ>TP0^}|5_zHR0%(Uvs%c;{8aj%r9&!xyUa#XDE_%(m2|-B&$7Y3|X)bUYG`j}Il}${J@_HMH?W zGP{~H+0A?t)nb~N)*OzhdNzS?n~uhf{-Oq-&AfVr_|IH{lA4}~O`Ddc=g90kX~oE# zF5nnCX-S#)tsI%VQv&_NqB0jG#}zZNlbaOJ`H<$MI7FbQXZ^;3fq{Ykp1z*WuFlXv zPtNdPTLHt*(DDiqSY2eo>?z2(tCuHnUTXhv1h<=lcgsaB{+OB^*K}iNn%Z3cNwNO1 zueI11CGK7!u3U7eh4btXfzHnLUESt1_VsmkcJ_Dn_DEA-6f@&^ zSPgHj26u3&N>Y1A5H@yW!i_=rY7o9F^I28E=d~al z48m`N@LmvXHQ=ww=Tjl`3D!Vk4V+m6m({?&8i`L$0iVy;z#TPkPYryt1|F+{rzJjh zGN0#b;Kdqvy9UZ@VO_1nr>B6=)>`<>TG(9+W3_NaEnHQb&!<=Bb3-kBrWWq6g=cHw z=Q5wu3;2jSaMVFr9n{o8TOD-OQuDD<~=+35OT>aqpjzpu*{eAm^d3%-wz z)LZM1Ka$X>Z8(}zU)*p6Qh%}`BlW`#S*c%d$Vy$>n3Z}{ z<6%lIw@Okg=`2*^2b5aLk@~jAjMU$4%u4;6#;nxAHCd^*tT~!ePp>%wsqbEsk@|;g zvQq!wnyl0vO4OCU~L={=EralfVTP2fu5A zcbdT73{}nfV6O~rZ-({Fa7Huikib3}{IO;jYlgkeaDxQ)%isge@a0+R4>ZG*5?C%^ zP@ZdspESe4W_Viym&;&V3%FY#*aB-?^1+od`1BSSZh@UG5Rt%DGI+WL{(6@BO)c;x z30y6Mzup25&r*M?1zwQAC&}Q0E%2*Z>VIkhPisDSwG6Ipg{D^MYK0*QTq}b=(h3)~ z!bB^~NZ@)I{OMM>r4{aMg$E^YgAD%XR`}j5^_N=Vza?;^41TW_Xaj#6G`8h~*T~?m zHt27IEp2eI1a6YSm$$)G8(cFByj22kl)?A3!ToLUSQ~s_0{6+_pSHoz+u#pvptR?M z`(W_55GZJ`M2EWt+Kbxign+`Z6fzK{pK4f0u45^^9d7VKw{`&s0E zmIeDMaz91xr!3gdDfe^A{hSN-^UMAGazFov%KT87Cyp{Lc%+UwUo83Y@bOdW!u^(3 z>B9Y%R_VgyTUw5|8D-b)31|0ayW&nNkE5y5dk*{__lzT1iUAp(E?j6OO=JXMl7IN;0_Bs zWr24rP;P}@D_m-Y>#R#vw7BlD!d+H)(F%vG(51iy3M3V{U4eg+i&LdURH=EbDV@T! zNi~|i6|bJwwdBa)S?2w}dD}i2jYstL;l!kT#XlBJ>c;k*1f4zIbLlIiNczgCCo*?s zG$vPJMfJhRWHfyfR8U*sj_QN)M0{p4F;_`nP+j1U>T?Ejz8Y7@=Zd`wYs$=uu)G17 zAIt%Gg?LfPsf94oC4PsgyP;+4Zm5XvdY7ZSg1U=ith=F;?JmXWg ztY}EKxD#HN;hqBDdK50qOQX73G+cD~Epn;8 zlmG+${avHoeW6Hy|G;Q}PpB)TjYYcUWw4L{vJEdJK=(2Q=sg?(4o3q$C%_aeUx34z zg3h^^DLFkAQH{*6ZBjMp1rgIl2Xg`@TR(6HFb@0;@Yqs8lad$E6(5`QtD_=@DE$nn Zh{dFl*7^N$L1fR^{{s|6%h!%a001*{AS?g? literal 4101 zcmV+g5c=;yRzVxQT_v&8hN?JX3y(`=Mu-E%ujBV}1-d#h$4+uCm zw!ydof?2MlyK7;swCLW&I5-a+D3I1+Xu^znW#W*~x}-yA(o$aKSrXDR4U@LPXtLq`uNhEB&-9*}D$5XYe2P)ji+e`JLbSo!|ML-|w93gCmhdD!R+O>Fb6S zw-Uy#QNubw6?A_Y?>OkUU6EB(Eghl4azTq}S|!rOe~UB#`9H;{p58k$HUEepE7# zn`MwxyZ^u;86J}1B^ln4A>@E94%p{_gARDs0Y@C*QD9JkYZbUlfgdT8@;SFet}&vP zByZ!K}AO6!1|L2F60IUnZ z4FR}60IvrG)}dn7J|sVqUy|MrK&@>_YPA%IrV^?2CVh{Q-mH%qCQ^WTlAI&EqG?07 zj9BKk;*>XxsFz9t#v6;9;|YC_-9Gcq;S&(rNGcIC(i>vA7YkMngd;;E{YHNzGT1*D z3l9#iH1wgK0imHBFW9UNY7bG}|c*Q9nQ z^Sud zW##E$DDFi6u&IxYCycq-#^+9mZG5AQZ9F||+t|8o=Skj&Vtv9swCZ%&haBnA$lR>r zCv&ulW8HjyS86v}(V^Z^bf_<;DP?GC8JepMO)Eolm!Wyf(5lMNe8n_LsjY$f8tAQo zjWxwL9Dy>7f@NscLML*p2Cfm9&l5qPuYo@mK(%FhsVhU9Uxv1z3~ix!r{h4XvdxYu zmtlL9;hxF&k|Yp%a|Br+^kw)-B0n`lot{=zxDqCBvaTI9tgHu^`97OK`%HB_H%->%U1yfddLWp}=7U-d3Pag#i`DRk&S+zg6M; zD!iq_d?##k!k81j?1UGc@GB?OxnQYymvv^z{YqWNS6t$n@)cEjB9Yo{#4a`TF*8Hm zT|f;LQrAz6O(b-Cf{ra>^-I#d1-v}=3~ozJq@zZL_AdoAcZOzrvywf^Fr(@Cc;3yH z>`{NjU2Lx5iWWDwPDB#%==59KiHc@iiPUIndpeQ(-MB(G?d7u~KBkWv=8CcL_%a(} zE;p|m&8Xg~O{rcRGc4U!|A!k)7ti5%Mos$6v+OZbX)B%_-DvD7wX&sJ$=+AgI5=S% zG26QIQDfa6%P{kL%rA)xv~o)#q0Mng?3fbv4sGTo@wE2Qk~m3A;`hh9Bu>b&Va}_d zz_8}KXS)hCN$zvQkehN5qa?3)!v+DQiec9_H(c(92{+s(ra)ZcM(FyQ8y+~y_j{A+ z53dK#_CUlVH0>0d%3t=tU7liby32$>OU3cr?}0-CgJmM<+a7pS0Id)Q{hSBB=Yb<0 zcwdyfr)1D}E+^!LAun9+g&Vx^Eie2)%lfVP~}f50{MTo@zK)4G&fq>#0}N)5F#9L^Zr7@;twUr_Vku*TB*m z0s8TBdfHIbV|I~MN{>W(^?_*E=!=BIgE4)T5iMu5F3ZrzhA$KI5ml*U+3Qa=c|5sd zjf-n?=i*yy@^u|g)nw~Be3`%JdNNRKLCa}I&N!F%YNnivb3~6?_WeOIqlskP%KMO{ zo=lAymXV%TyN+FxVbbJtiA;OO^TTi^C&nU%*qeHto|wpD6uX?|RGY$6O`iN!OY6Lx zNnJ57Yf_c$XvT2*7)|HSd9yYhO>#av4_=rDN9KXI7FO26<+XzEaf@!sd0j2+t%bX4 z;i+2qWi9yXDizQ;&!~fN9b8lgW*yvD2hWJj8cO`~&+6b8b?{CdsG(wK+aWkxXLkrz zgkVz$(jmAv1W$&9KKe=`(;tW6)ew9Tf~E7}Pv*nt=fk1-LesNL#QV;CcyT`b+J-JD z7ViSlGpm_NYg;fiY2HX`A{mP(N7tpq;u9^ZTgGTAonOtltHojyH4=t>r?@_$oB7*w zbt0a$2Fn_J{@T+oNuMkfC~cUj#Dr~WW`WFqe(K2Fn-S=#$}+!m>d4%e5vX(FOfr|; zOe<2T_(r%zGhi}!x!}ah? zJ-k>CFV`1)G9d*esjt<;oAuyqfc6GBr$OjvUP(V!HbA5S5)H7s0roY({)Xay>coBy zG{9FI;E4wKaRa;~_S0C>Pfa5%XoQwVSk?%Gjj+D4xSvI0Kbsrj(nd%(!X1t9*Ns9y z{U!ap&RO>lLS(9g$8`uS`V+|&eLYJ$6( z;2TZwP*ZU~=ZZUxBF1zvW8BIfdKDQlC+^&7#7B2pIdV3(c&us4id|M}e3LOEo?6x2 z@z@ksIb+{yCIEV)DHjUWF3LyOYZv9Krjv{ERnzw@%2rLE7^z=aGzY2QU6hl$WpQ5W zEsOI~?_Hdi`q1JNDfJH*&p~QOb581Tb6)Bl&3UQ6*qoR8ADWL*YLO~Ptz>VZnvYXz zr9kTHB{`|jS(2A}*OIBa`Xza(pI>q!rG9J49Hd^{l9PHF$?r*n((pC%(ir~67XlR3_ZE&^#7OMx8i`rmY8${Y5Er4r9@V+*< zwG9rm!65-062XtP!P9N(GKBuSkn#{3*dz! zIMNO~+u{0lxJ>}pi{QK4;cM;iP&+&&fSW|{_uJt=+Tln$yeEJci(pp=_&T7e1D1Cb zgPTS0xgBtR2W;If6;Il;V>MmH*1s8WgQ~;kNf+xD*hA#NCF1S|! zuMxq2(*@t|g6F#6-v#hm5qzWz{_80B-*-V(cQJUK2yW_z)^6zQhVumQdJ(+68$Q(y zySibo06wpL?T($oD~gg4B4VaJR!QQqN)iKUY1($O*iIJP${yGCxx0>7z`UDxqP)8@V_h{_s>feY?skoxWXVl}@ut!btOm{lwp1$M%$_5z$QN+lH}wJXD`bxx zn&fVkpj(1r32u?#F$oS!@V^o)k>NtQQeCEdSO!Cedt`V{hWBKsb-*eI>~O$Mj!G3R z?t3P`W`5ZLe{evr0$UYGPkzn(v{)S~TtxZow5D(hGsg6IJ}bUp!ZgyuYuDKMe>>Yg z7Ei{^<%OHG!smsA%7CHE~7g|owe@P&sulu%jj<4r0PzD&7iyW8QpbHqdQGfHmh){ z3RkHRS1VM~St@*50Eu-a@=YpyQ2=?T_l#drbDr@zHSZbidhto$o_(>zSzL{@)tPg~ zpL4>^PI$x#KXF#9rm@|)(*^c_jj%(kI#)~JH@e^!7d+;IpH=)17Zh0oO@p&E7}hE! zVU;gvaGM5CXz+>%uPX7aCm><&i<@@!Z`tKHLsGqx07HX=y^+3waBOgJC^Fa|?hP9w zu|9DbOecV7!_x`SH%kFl9g6_RqJjR?UvkhoCzmV6Ve z;5y#Lk{^o*00000000B+oqL>HWtqpnC+8&RumU&@XpvlD$+;|_*T*Dj{8KO( zMDifXNM1==S1#<8r2EYOrP9Yv2_%PEW%yDFu8`n!5`15Rzeo^sz=;kRaKL*Uu+;%S zaX_mKD`e1R7?a_88Sa;@Zn#cMY%r2V$x-f314!;b|?J72@#jjgxA)@;VwAZ1*f}UqYFOng8N*s%LQQ-j!_|@ z!euIKQ{ne2cr`dwgEwoC)`TWnYn$*Rg^+@@NRC$X!8*f|H<267rqbDg<+;tJYqDRE z2v$j)X(W3~*HuwUP38@~XrxNt71!}kZTFrn2{OKtOgfi6G25T3-xBi4&zG!Sl3P{q08dPdEQt9I9xkB2kxpepQ;@!9AihB8OO=FI9YW!nt zVPDsR-bH#}cfW2V7A(@^@jiXwf`#3s3LmYiFqAcnl#yDwdBn)4jcn2oDs_+KbLIm- zv9|7oiT+f%?ykl0uD-5heBmPTRahlY)rXBr_tzSQ;`-CeHw1ZCA(>B)6e|xhAW4dQ zcSl`?qFx*=2!)iQp$}J{w)tiK*eak9pxnFWlyZZGx~O2+QB`!VkUh ztXEioo#Gm=M0^nQL9Y+a^a0BwQVk@&=69~OWk18{5rP7;Lu;;`2R;GF@uBp`GmVC%#U0k|mu54=?5=c0Jf zHtZJz;0(gjAmoFvB?$Kh;h7-3J_M(RgepR|7S9jCMIrb=2yP73j-*W-_7_5MR|vil zf*%UP?c%VX48dcj!|E^$iQ+Nau&)fm2gC5+VfaTFj*7sl2%H~* z+avI3M5v;pwu&-d_L)IH$JN{MH6G=hBxlzxI z8r5~kVjX(2nBHiKNTO}l`qSBTVQ}1lOD~FsdBi9!0@iI$sR>QA87ZS|j?%eGU)xU@ zG?GJ|#lc*`=u8@!OrbM3T0EqY6PSjySqPRWQ?M@fXyylq_HZBx2Ihb z`}_As6OZ3pP3&FN^Kvw?cX3}|cVfY!c&e{&aiXs$-W@mkQ$1pzC(^_T(dBEJXg8XI zD!ONgQM_PQ8bwV~zuW>}ZGoS(fYJ(wwZbW_u&Gs8+cgm$sJFJlZLP4q6`pN{Xd5hT zgLIovG;AyS@izEW8*FQXr`o{X4qfeVZo5!)j?HD{D!q^Qw^wad^h_qV$w-}&OBv$x zZAs+yY;y4QTsm7UEGx^E?m2>jq-WU34;Xo~lCnUk)X?2k4YlWuLM}6E>c(4*LNT9C znjdXV7IMS-37K?qD4iW>*yNmzxys2{KjL87YZKj1Mm#S% zMeq2uX&$jADZ^1HMB(Zv+!C!t{O(%BUpqlOHjsaR6dsDg&!g}+p{huus#O zumf&!z)lDJ*#SNo;xfFod2nG+hO7)9l;O)V{KN`09Ie2~R-oZ}1@2eiZwegYgjG&> zhZ8Pw!cES`!G+o7RJ+I7$f@>g@$SAl)mmLM#HqH_HJMJetZVkV|8I9S(x{(_clR}* zB<%tQaS?yJ^qV z;B6X=Y49;o-0r5`s=+o5p3~q+H>`6*(G9n{;R!dWo~CZv1s+)Jfwy?zY)=z6?Vty; z9{3jzT;*xvroG++pYgy$Vkhiw+Fy9!*B)?s;V{veZFkeI@WM%6xXcS*@xrgXfDd|n zFyw=ehz{=1Z1z{aN_OV=`)1?Le0HhgtHpKx_gSfnnxroD!xBH7y2=xOQ;A-Fw z;75R0f+Hk&vjpc$@JR`NEP=}bhdN-11J*m>VuvtYCEIirHK6(4YZ^A|*Et$(t-c}N z-Pg_fA00DfYjuD;DO)S6_SJH|t<}{pUA;zz&&Y76TsvVM;vVBc8NMaMBQiWCH{OTI z&&lwD44sOgwTjJZ9H+o?1=cHYp(t+KiMel3;3frrra;&U$2(!26Rvi`Lr!>FbbYyA z+hff5Jw}IXmg2g#E?DP+%`W(~3m$aA?_AKPHg*tHpvo$Bil9nCopz}5Ar-Du;SLqP zEqH>oM)<-#x%k3D18FPep&ky<{8cz0ya5;iJ`8LH9s^zk7D}*2g7YM}NrL~900+b! zaEb#A2VCKR&o##av&g;wKtsgv`&Sw<%#~*dVpugfh{1Bkj@CsCTV>cL!wwl9Z-yA2 zmSL9+4h14g6T~o2fx{IzsX1ckS0JOn6$<=EbHwmX1-_$z=7c3q*x-cAoUqji&p06} zX5zet7-l|VIAOLThIhN*G7Bwicfr#ZT3EWLh+$-^5X1G;j~MP%;Q@ zHf4=`Ma64v%y2H1?oUs|K~}Q6pN@FXm&*_6+4T8(wE+*DR&{ax*+LP2t~hAq3#aH~ zD^uxW^}PQ|)gyN1%>I^r^VKgiUt+P4!o~S{uAET}UU=nRKzToM|QNSTawVb;e%ab zH3!+=&g=Z}MnAmM5AXBCy?%Jq56}ByUI0!M14Reh7RT~hW9EJ_jx=iRO9^Kb&KSw$ z^a)mUce=19nb(WS!TQ$CyCIh-tjy|(Ow|$}kfe_Uc2A&#O_?>LBO|80Pc0u?woy-K z%o1ni@LKapxtE<}>4;+Xjopkz7ZN*-VLe?b z7&E8#`enx#l6t8_!|80QaC9;^EIx()biPotj=9Puo5SXm1)EbA1QoL<)O2B4YS`LJ z*dD((U09aQWygkdqlNk}Ae1hwC@+hr=vjSWd=(Na(lWX*0P})50Mqma>_*mau2n*3 zO6syKnKTN8`mOj&5-W0>vJ-1mxt3L>?M)+zGe%P5%Ze#c0m_dBrU)pzr#T8(rn}an z+j{Ux0J&BBl2;=5G|7~{Hv}7bk=BZL_m$Fjad3uskv<=s%(&A}gElWx9Vf?YhIpzC z@j)TGA=V`4>Ja>M2sVb`su28-m{%=2;~MR#CFQRn_=iaf2ty=X3zj^h@PT1CGz@PF z!&!oGsL@{CmffV}!Y~?!%ffJtP?as$`=&5_HVk)%;ah^RE!X?eFg#&ypTx{-d#?9^ z5jZpg%OaqQ;`Utciz0A&1ilu5T@jeq0>`z$NDJK30^ey)M3}KNU};7R~Qpr1#G6=}mRb*LA&b z{=z;qKXSuvs^ z2V82UxqhoTDJ496NGVS=B&GP|y>ll_NJ?2;RiQR)6bkx)QSQh)W(r;9ib| zj*H_t7rY>9ux&e|n%T(5sj$vE1NmkvL-?;^MAB_riEJ8B7~LIhFvo1Jo1NCh{?SZk ztkX2(Myd*L9sEh!tJe*EFo9axN z3N<1AQNJ@$V$NN_BQJdpNN|({CrL11ohSaF1b>zw?0_RIQtrDZJu#>1`{aU}4>trg z_rH9mts|W(75(2y$->e10HoBMZ`{)yr9>wOr3lw-{N3G<#e_*8kl{udzSxXm`+y7& z%dm4#a&rDA!;3N;(wt%YCIwa~pet}ubB67W3f!!~6Bc+m(g`b_Fy@3WI^mI*os(l4 z=o(~EiBOJ13-eE%a*9~aDVB4+0vN>X-sBz;DNh~-DNV|kk2aK9iZ)d9n4Fa^NV?!E zi{$iU6Rhqjqqb|R7`5-1en#!PRJcS1lid5|{f|*QoHx!JH3}2v;dH#NYT2J_=HGl2 zxW`)ZBNEKFmisFuxJ7~;5`cK(gUhHCHak4*=O6a`{y7gqYp6E+-FGwtC zZnzroV(k`cZy2!E7aDl=CY1~OzWQ9)Z#U$^zMwR>v=eh-7uV;)o>i9%dr@63>=RBq zJHY(yhw78mUJe)5hJGfD`|c*jzJ98Rv1u3NTn&L%%{`|`j2&~qg;qw!*In?Wm65ST zTtDpRL}gUSt6-jS^*t4ywtm59z6NVGxIiSv+K%^deprK#YH*JRKhpq_7;BHb%`-`` zZdm1p5jT9u4R^cYm(4h}O?Ve_s|V(IV3|OS6%K!pLg#uw_rRqd_^bzZy!2W2g84%Y zOTF+mFTC3eTfOibiy(WD50?AjJdx2V9-CRy^Z_CYeyAH3!<>&XYd)tq(f= z@D}Sw7R3;IsX!b=Y!B0f<+-nh>Tac5xP)Chuq~Mcz+144Z#-S2rRcK{P_^v5rQ2d_>~|W zY_ylQ|GLo&AyC558ivCJ5^Jm2Zf_Wl3&Y7_czY9iteg!)F$~v=KUdUl>%^TFi*{!i zo^ASLNba@>%#FYq5y(eiO9bwXz%vneeG8l_GG1+@S>^l|xTpoLY=M7ksU69|`@nep zREvENRXZ(gjMv>)8fn!lKHevVQMgJJ@3e{E9)+(&;fGOpP87GZ zUJr=DycjHvL9z+!wK1R0uEUeY9o(~~UJ;hyD>tfY66$+nQ?z6HQfzv6Os}cTKB|l+ jDL;t8BQe+&17Alie3{b-0M4Dbm@fS literal 5358 zcmVa zA(5Xt0UwJ900000000B+oq3!bWtqpnb#ztr-91;&5RL${fDo8TCX)%Ul7obVD=Y~Z zqY`VnYbKrP=^na!5{956!p9G~22mCl6e5a3*xHJ0zKz%m9CcuBzv~s-Ak@-}5}br{>1qo>V5@pZm~ft$Z?{vex%m`Bi4F zKX=s8$z0kT%&p7hbC=$jNSJx^9j$Q%9s57Ykkn4#1mF~41<)vIQOUC~n>B|eY2;Tp z97ggXDM)@vT2pN7m!$3X|E0punG#5{-DUKH5?mp{trGl5g1<`8D#LOa`ee9VhHWzZ zRE8!6PEo*AU|50c6}Ve*rlGCqS#QPjl3e;3i9`xRJf^_U6nI&I!&Nv{h4m_YN`*UB z*r9^Y1IK!x*8`vQz<+w+ArJh~0}Yxmgx@v9Q5qbl!Py#Y(BL`^wrlWb4Wc?6uS1Ux zm+5eq4u86-$xZf+^+Y9qPi82cNT&N1WiBk7Q-Xp- zxJ=?aE8bN&uZvQ8JZqVGD^WOBwFNsL@~@TznLt4%nMp5C_hzaOrrYYW(#nqcgq2uw;h>dGTIskYbm|?9DlWY6anOrWDD=cupS4*$16R+;3Ud@en%r8Fs&UkzK$g>Yg()WE+ z#8UmqH=dU2P`^v93`tG+VZ9%Q{cwf95_c#SxWkatYyGgr54ZW@EcpgB~zl7P7EBSLUY2u=vWX@YQ2T=tp}ye9;ghJ;CkT$8vV1UH4?-q)HuD~g9* z%YHQko-mvohHMya48ym=@M0MLDFSCjgf1ek5ig3sB@wtX0$U=LD`^&&eR~A9M&P>< zcvKK>5tsdZ1b!QVKvbAS%r%LlqHtUkPLD#rDBkK?_J^bJu_*jk6kd+Pu??`a0WNBQ zuQtF>8iX#|D!VA+<$(Q)P5)Ul?1>8!o(*Pd z$SUtc5&JOX`Q!#iL=tVY(VI*sbL&P7xb&)MmEe7zm>A`_69b+bdlPsC|;g) zP3-O66HQ#Or<&L`uk-b2V%Plc?%6$a=CvofyXW_Gcec-Nw|Wzu;+)6Q#4*w3o0{l0 znu02Nr-)J9G%byyA?bHC!nYgYr;VUC!4XYxW)o~|683gOga`U9O>kQiJk$h#Yl2ua zoZJk_W}#`+)%3b%_)Ih0)eJkE!P^2IE%443q3Hpxkdan+9{<`>=Bk*fRA!@3nWsQLb=J4hxdL0V~^QW$jLi0+GT%ca{y*lC^S~)R3(k=UBOX zHW{~nx`xbU2F%5&WV}C_?yEWE0UI)@p#f`oI&Wod4Y7NxXi&c59pk^^a4~8V+fP<} zSZsox@rp@au_37gG04SWa}2%^t3>?XO2l9Jg7{n@|Lz#vAA@IO@RHC~L#?hFU0p?6 zA=V1BTjBK9%C2VA>gvEcj)46vGTHpPOkXB#rYd(b+nQy1QppR3tjaCTHkZArWZ$~{ zROr#kHAGgH^zqavORpBDy+w!Hbl9%L!+M=C?Nd5Dr^BCgkc~QF+6Dt!4LCu}le)vS z=Na%W1BMN_Rup%KX}1}0mjSyBIK~TWypZ?8EnaxW3%ajvn0Ag2=KElo4_5o?glX6L zAnk+q`rxC!I$_%DeegLS+%HbT9j5)c4}R?fj~|W@gW2vd?J0gZ%@3FP;TwMVl^+N| zX8`&G@Ch-%9huJY$~TB-es^Fxdgjwh72hbK^S@6^T{I+pZV*lk!aIWS!64iegom6& zUY#h>ks+8Bg407V6oP9)a90R+ifOm#7@?lF@C1bcmWKC}z(-=DR(#bYuh>p?hGAG%OB`k+5o7JZ2H0x4iK z@HOBuz%Rkk5}Yc*MG|~kg2yG$WH?-g6J=N{!v|#Hb(LJNtEd6>eXmJy);}rNa;?5A zUfo+~{eR>s;#wW3jEigK^nSWh&9&P6+S#iW_?!Y?S1MmvS@bdPQ{Vvwo={+?Qrizx zb}8_R0y9-XYgL!kI7x*?Dy&uE{i3+b6Z77n!c8hXtwPiT3p}vK1Dic?zXx6rLtomP zKE{;yG1|0gitAQsuttLmHTbLs_i6Au4Vv}Z0fG`#S*lMERLSX+4ppww;gdRiO@{}C zNRUwrUwFqCUpQzW>6AQF!vRLH3n9&Ed`pLWb$C*TzwUpCVPl__ zUzkoi8&+;X!~6mevpp^X4hjd6pJEH8fkREeZT|2?*Oj^ZK0YxUA&nA0@@|motGkyn2l)qeKWOd(G)}^=5ET74W)}@Lm6HTTY zN_4h6P{Jxgi7v79DxA!nk%{+PiABRJHm0p?Nl7cM#6TvI>`jivB39F*YZR{qGTA;e zoxI2_H^HG5Whd9J7McVz`E^z{ccwYKB$3RQzq!L@H_DT-=UX`a?ea&Gt$2+?lJufl z-{N{~m{NL7+QP!VRIL0eV`&tmik&X=l%o){uXy%?%CoutVi8?=7q)pV3je;G|KXU| zst>+e=%hkI$qx^fhFvu>Y^gK$)02?nJ;ZfM!~)n z42&Vkwx9SOttO1BxTXmZOJA?S;AVw5XuDnsWWs!ILAnt?9eegveJmiCyeK5lh^Zk(W!)8DHzz@6p&>VnS z0r-~yv+Hyf+A+3c~gvJRbzV znCdyiwY?R0i^D>N-J*yUD(y=FDdf%_Ol8b5_GE7|w=$kJ^YL}nt($*+rYE-~ZT6(f zHtCQgof{f`fex`{Rt^mg+EzQUXn5fUGnukmygifcw_L`*+#*Welg|IL?6U_WP5$h3QOscpx*BtNs8Y$=oSs-ppC~ zGtIQwH?j+f9T_Q8Sb%-PUVv?SLvAA*wD&4uG6i*67>`@IT=h|c1&LEK8`EQJRI!(( z1qY>;#JPisk!{76C_&vDLK6ga9}G<{)GZQLEAeSH5G26VGS6aPq>sj#l=quZtvFCi zc+cWMW0LX~ho?v!C>R~)EHnBq-CJkf^u*|6IX8L811YuXT_NSt7V-)VIU#-D^*)^-PE738_G|lc=bGyws-5m+DGcmWlhJ#c#fzoH2q^6N_ ziuR_`XPeeG6+hcF0j!TH9k_j?aQHSiO~=S>=^c2W^bYJAeFu(gc0OHJmxtaA?`noC zo8jwXnWyUuV(N}&c&Zs9Ezl{Jb_QJHt6Sjw7PzVfwu<6GmwWz`7I>xwUT%RSW3Vg+ zgE6=+CNw?3HPVAI_)!ehRye9v5I@uuE_anOkL<12wnv3i~{&MfE07wKjf^2uZ3eQcP@E#ww@WZ2!7UPeM}p@iI9P_WWayXSgHEC91NCVo(dk1gd8Q_< zB%thBIAKg$$^5bojR7l{GyAOKM9!Zogp(`Am2k4HD&gcG3I&OXT1F-vi*VAeLYE3l zR5-T|;Y7An=of@sC4xgLY;uaNYuEg#cRKahPpa^LqPRPwGQ$Ihdf=T7cXhi5c6h+A z!6FUTi>wr*j?o!6=cbe$xJDbv4(#tNmBuOJ>ujH~d#xyPo(Y9o9Hs zG5)+$0-l0@#c&2T}twb5#Jj$3Lym_uMxp>o% z)SOeje6;~zHfl!lYI>F91a>YwxQGCpN}8EwIx#b0E7X|yM}K6j(weQn6R-UaNpP$L zr%BM~d=dOUhvgEL;b@0ed-1rJ)|4HOFR1x=O;B_9>*vbK$wZMwF)np9=5BzLiZ2v* z)<-F^@j)rVITr(W6tdVb=}HB*DDahf9NBvn_`U)=cBiW5B?Vqp;L!RUS)1#0iVCI* zm(=ITZc*X$Dm>$Wmt#Dz!~??~_=*Rfc->Vswt=od7Nrd32(+;OsnSjr+j+!x+CIP_ z_VmX0Bah1XVUWU0`TEg@I!)1rZhz=zi3V{EKB~cO8a!@;)!pU9c1#o}_Wa4`#9pk! zr8?O3+&lI^PV7L|x?sr4jaf<4`m3^S{{VZtzZ$p$c+}bQ?P}UHBuGkdr3Bj~cvJ#i zhGS${B*S}TxJZT@WY{XhgEIVFhFvnWD6m8UOMy*Jbz~iQP$acJQkT^F=kln+>m;?- z+V;cyp4sZQF?+&cEk98sE;p`h*3+u9SwC8n&3eyXv04A2y7R4e8Dmv8>!PY`)*Gv{ zS-0;go7IJ1#?0J~Qd@VH&ul!Yt;cB4q1A*+71ul>we>^|7HW`isv~aJV21`T*QxC> zB=2!Lbn38PhmY%UyW@kts6&eZ|145lU7y(StTJGY0T&za83Vo}HgV_9o;TpP1{hwL z?S++I*yM%J*JH)jVNNKId*MkhC<3)r_yB_xImicx`(TL=Qa-rS`6SeHK07@L2m4`x zA1?C4tq#TYSwE-&m?Lsh#ZOmO41J)8W0wbDMF57xZVqxiotp#jr2zaQ0KOm`8H6Q4 z=nKN;AZ!&sfO4>FmnsIY?60LvIJNcF;2WIU>fS~62kz92sYGI3Hc=$jq5Y7=Iwt5D zPbTYxMQc7$lhJyQcy;%qH7|swO0=eRe8{WSqBW0H8bnL$@l)%BOXdA8TpFC-MAT&^BI?FbBI?aeRf(wX{K8{R@UtecNvsEo zL{#^u0T(pG(q@P^!-wkgR&Q;FuQtOY&G08t+{IhfTUy}27C4~=5-o6f3w*f+c8DBS zH{;%jK_~{t#9)QUTb=0=xBo=Ma15@A!8b*5H*fXn7(5>XsTGc>!&|M*h;!@kxUv6s zZ>W93^V@f;DyNYyZ=E2IX+!JedQ5Ms%wDRDA*oll!gZ~%wG|!_{=~`wwE*CZ@lA*P MKP|LnN>P0P0A)K Date: Wed, 2 Oct 2024 13:12:57 +0200 Subject: [PATCH 017/129] format backend --- .../datastore/controllers/DataSourceController.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 5bb738d9759..33c2484d0c6 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -218,7 +218,8 @@ class DataSourceController @Inject()( response <- accessTokenService.validateAccess(UserAccessRequest.writeDataSource(dataSourceId), urlOrHeaderToken(token, request)) { for { - (dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) ?~> "dataset.upload.finishFailed" + (dataSourceId, datasetSizeBytes) <- uploadService + .finishUpload(request.body) ?~> "dataset.upload.finishFailed" uploadedDatasetIdJson <- remoteWebknossosClient.reportUpload( dataSourceId, datasetSizeBytes, From 90990b5b9ff6ee27ef69e9abf5b1244a47793253 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 2 Oct 2024 13:13:57 +0200 Subject: [PATCH 018/129] fix frontend --- frontend/javascripts/admin/dataset/dataset_upload_view.tsx | 1 - .../advanced_dataset/create_explorative_modal.tsx | 2 +- .../dashboard/advanced_dataset/dataset_action_view.tsx | 2 +- .../dashboard/advanced_dataset/dataset_table.tsx | 1 - .../dashboard/dataset/dataset_collection_context.tsx | 1 - .../dashboard/dataset/dataset_settings_delete_tab.tsx | 2 +- .../dashboard/dataset/dataset_settings_sharing_tab.tsx | 2 +- .../dashboard/dataset/dataset_settings_view.tsx | 2 +- frontend/javascripts/oxalis/model_initialization.ts | 1 - frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx | 5 +---- frontend/javascripts/router.tsx | 7 ++----- .../test/puppeteer/dataset_rendering.screenshot.ts | 1 - .../test/puppeteer/dataset_rendering_helpers.ts | 2 +- 13 files changed, 9 insertions(+), 20 deletions(-) diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index 66b938166dc..af18147d0de 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -35,7 +35,6 @@ import { type APITeam, type APIDataStore, type APIUser, - type APIDataSourceId, type APIOrganization, APIJobType, } from "types/api_flow_types"; diff --git a/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx b/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx index 632438a5d5c..45e2c910962 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/create_explorative_modal.tsx @@ -2,7 +2,7 @@ import { InfoCircleOutlined } from "@ant-design/icons"; import { Link } from "react-router-dom"; import { Modal, Radio, Button, Tooltip, Slider, Spin } from "antd"; import React, { useEffect, useState } from "react"; -import type { APIDataset, APIDataSourceId, APISegmentationLayer } from "types/api_flow_types"; +import type { APIDataset, APISegmentationLayer } from "types/api_flow_types"; import { doesSupportVolumeWithFallback, getSomeResolutionInfoForDataset, diff --git a/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx b/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx index e32e006a1ef..389d57852e4 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx @@ -11,7 +11,7 @@ import { import window from "libs/window"; import { Link, type LinkProps } from "react-router-dom"; import type * as React from "react"; -import type { APIDataSourceId, APIDataset, APIDatasetCompact } from "types/api_flow_types"; +import type { APIDataset, APIDatasetCompact } from "types/api_flow_types"; import { clearCache, deleteDatasetOnDisk, getDataset } from "admin/admin_rest_api"; import Toast from "libs/toast"; import messages from "messages"; diff --git a/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx b/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx index 22c6486dfee..af71eabacb9 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx @@ -9,7 +9,6 @@ import type { OxalisState } from "oxalis/store"; import type { APIDataset, APIDatasetCompact, - APIDataSourceId, APIMaybeUnimportedDataset, FolderItem, } from "types/api_flow_types"; diff --git a/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx b/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx index 4997ede542c..bdd0aa34fc1 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx @@ -1,7 +1,6 @@ import type React from "react"; import { createContext, useCallback, useContext, useEffect, useMemo, useState } from "react"; import type { - APIDataSourceId, APIDatasetCompact, APIDatasetCompactWithoutStatusAndLayerNames, FolderItem, diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx index b379c694be0..6eba97cff15 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx @@ -1,6 +1,6 @@ import { Button } from "antd"; import { useState, useEffect } from "react"; -import type { APIDataset, APIDataSourceId } from "types/api_flow_types"; +import type { APIDataset } from "types/api_flow_types"; import { getDataset, deleteDatasetOnDisk } from "admin/admin_rest_api"; import Toast from "libs/toast"; import messages from "messages"; diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx index 0403258a6f5..00385a5344e 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_sharing_tab.tsx @@ -4,7 +4,7 @@ import { type RouteComponentProps, withRouter } from "react-router-dom"; import { connect } from "react-redux"; import { Button, Input, Checkbox, Tooltip, type FormInstance, Collapse, Space } from "antd"; import { CopyOutlined, InfoCircleOutlined, RetweetOutlined } from "@ant-design/icons"; -import type { APIDataset, APIDataSourceId, APIUser } from "types/api_flow_types"; +import type { APIDataset, APIUser } from "types/api_flow_types"; import { AsyncButton } from "components/async_clickables"; import { getDatasetSharingToken, revokeDatasetSharingToken } from "admin/admin_rest_api"; import Toast from "libs/toast"; diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx index c783ea6ea76..470db65a177 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx @@ -19,7 +19,7 @@ import type { } from "types/api_flow_types"; import { Unicode } from "oxalis/constants"; import type { DatasetConfiguration, OxalisState } from "oxalis/store"; -import { diffObjects, jsonStringify, maybe } from "libs/utils"; +import { diffObjects, jsonStringify } from "libs/utils"; import { getDataset, getDatasetDefaultConfiguration, diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index 13e8e349e76..5740cf5dda9 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -1,7 +1,6 @@ import _ from "lodash"; import type { APIAnnotation, - APIDataSourceId, APIDataset, MutableAPIDataset, APIDataLayer, diff --git a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx index 6dada26125f..f685cd99230 100644 --- a/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx +++ b/frontend/javascripts/oxalis/view/jobs/train_ai_model.tsx @@ -473,10 +473,7 @@ function AnnotationsCsvInput({ const newAnnotationsWithDatasets = await Promise.all( newItems.map(async (item) => { const annotation = await getAnnotationInformation(item.annotationId); - const dataset = await getDataset({ - owningOrganization: annotation.organization, - name: annotation.dataSetName, - }); + const dataset = await getDataset(annotation.datasetId); const volumeServerTracings: ServerVolumeTracing[] = await Promise.all( annotation.annotationLayers diff --git a/frontend/javascripts/router.tsx b/frontend/javascripts/router.tsx index 01e40aa6f4f..c70b9429a5d 100644 --- a/frontend/javascripts/router.tsx +++ b/frontend/javascripts/router.tsx @@ -45,7 +45,7 @@ import { trackAction } from "oxalis/model/helpers/analytics"; import type { OxalisState } from "oxalis/store"; import HelpButton from "oxalis/view/help_modal"; import TracingLayoutView from "oxalis/view/layouting/tracing_layout_view"; -import React, { useEffect } from "react"; +import React from "react"; import { connect } from "react-redux"; // @ts-expect-error ts-migrate(2305) FIXME: Module '"react-router-dom"' has no exported member... Remove this comment to see the full error message import { type ContextRouter, Link, type RouteProps } from "react-router-dom"; @@ -65,10 +65,7 @@ import loadable from "libs/lazy_loader"; import type { EmptyObject } from "types/globals"; import { DatasetURLImport } from "admin/dataset/dataset_url_import"; import AiModelListView from "admin/voxelytics/ai_model_list_view"; -import { - getDatasetIdFromNameAndOrganization, - getOrganizationForDataset, -} from "admin/api/disambiguate_legacy_routes"; +import { getDatasetIdFromNameAndOrganization } from "admin/api/disambiguate_legacy_routes"; const { Content } = Layout; diff --git a/frontend/javascripts/test/puppeteer/dataset_rendering.screenshot.ts b/frontend/javascripts/test/puppeteer/dataset_rendering.screenshot.ts index ca0e0f45c4d..a29c425a1a5 100644 --- a/frontend/javascripts/test/puppeteer/dataset_rendering.screenshot.ts +++ b/frontend/javascripts/test/puppeteer/dataset_rendering.screenshot.ts @@ -15,7 +15,6 @@ import { WK_AUTH_TOKEN, checkBrowserstackCredentials, } from "./dataset_rendering_helpers"; -import { getDatasetIdFromNameAndOrganization } from "admin/api/disambiguate_legacy_routes"; if (!WK_AUTH_TOKEN) { throw new Error("No WK_AUTH_TOKEN specified."); diff --git a/frontend/javascripts/test/puppeteer/dataset_rendering_helpers.ts b/frontend/javascripts/test/puppeteer/dataset_rendering_helpers.ts index 17446c4d085..3d71aa7a9fd 100644 --- a/frontend/javascripts/test/puppeteer/dataset_rendering_helpers.ts +++ b/frontend/javascripts/test/puppeteer/dataset_rendering_helpers.ts @@ -10,7 +10,7 @@ import mergeImg from "merge-img"; import pixelmatch from "pixelmatch"; import type { RequestOptions } from "libs/request"; import { bufferToPng, isPixelEquivalent } from "./screenshot_helpers"; -import type { APIDataset, APIDataSourceId } from "../../types/api_flow_types"; +import type { APIDataset } from "../../types/api_flow_types"; import { createExplorational, updateDatasetConfiguration } from "../../admin/admin_rest_api"; import puppeteer from "puppeteer"; import { sleep } from "libs/utils"; From 1b0aec3ee9f701fcf6605390801b4db726e13bb7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 7 Oct 2024 14:15:35 +0200 Subject: [PATCH 019/129] remove occurences of displayName access / variables in context of a dataset object --- app/controllers/DatasetController.scala | 9 ++++-- app/controllers/JobController.scala | 42 ++++++++++++------------- app/models/dataset/Dataset.scala | 6 ++-- 3 files changed, 30 insertions(+), 27 deletions(-) diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index b4f979190a8..c67f625b56b 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -34,7 +34,7 @@ import scala.concurrent.{ExecutionContext, Future} case class DatasetUpdateParameters( description: Option[Option[String]] = Some(None), - displayName: Option[Option[String]] = Some(None), + name: Option[Option[String]] = Some(None), sortingKey: Option[Instant], isPublic: Option[Boolean], tags: Option[List[String]], @@ -94,6 +94,7 @@ class DatasetController @Inject()(userService: UserService, private val datasetPublicReads = ((__ \ "description").readNullable[String] and + (__ \ "name").readNullable[String] and (__ \ "displayName").readNullable[String] and (__ \ "sortingKey").readNullable[Instant] and (__ \ "isPublic").read[Boolean] and @@ -324,7 +325,8 @@ class DatasetController @Inject()(userService: UserService, def update(datasetId: String): Action[JsValue] = sil.SecuredAction.async(parse.json) { implicit request => withJsonBodyUsing(datasetPublicReads) { - case (description, displayName, sortingKey, isPublic, tags, metadata, folderId) => + case (description, datasetName, legacyDatasetDisplayName, sortingKey, isPublic, tags, metadata, folderId) => { + val name = if(datasetName.isDefined) datasetName else legacyDatasetDisplayName for { parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND dataset <- datasetDAO.findOne(parsedDatasetId) ?~> notFoundMessage(parsedDatasetId.toString) ~> NOT_FOUND @@ -334,7 +336,7 @@ class DatasetController @Inject()(userService: UserService, _ <- datasetDAO.updateFields( dataset._id, description, - displayName, + name, sortingKey.getOrElse(dataset.created), isPublic, tags, @@ -345,6 +347,7 @@ class DatasetController @Inject()(userService: UserService, _ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated)) js <- datasetService.publicWrites(updated, Some(request.identity)) } yield Ok(Json.toJson(js)) + } } } diff --git a/app/controllers/JobController.scala b/app/controllers/JobController.scala index 161ba82a0a2..ed60b221f65 100644 --- a/app/controllers/JobController.scala +++ b/app/controllers/JobController.scala @@ -131,8 +131,8 @@ class JobController @Inject()( commandArgs = Json.obj( "organization_name" -> organization._id, "organization_display_name" -> organization.name, - "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset - "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset + "dataset_name" -> dataset.name, + "dataset_path" -> dataset.path, "voxel_size_factor" -> voxelSize.factor.toUriLiteral, "voxel_size_unit" -> voxelSize.unit ) @@ -158,8 +158,8 @@ class JobController @Inject()( command = JobCommand.compute_mesh_file commandArgs = Json.obj( "organization_name" -> organization._id, - "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset - "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset + "dataset_name" -> dataset.name, + "dataset_path" -> dataset.path, "layer_name" -> layerName, "mag" -> mag, "agglomerate_view" -> agglomerateView @@ -182,8 +182,8 @@ class JobController @Inject()( command = JobCommand.compute_segment_index_file commandArgs = Json.obj( "organization_name" -> dataset._organization, - "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset - "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset + "dataset_name" -> dataset.name, + "dataset_path" -> dataset.path, "segmentation_layer_name" -> layerName, ) job <- jobService.submitJob(command, commandArgs, request.identity, dataset._dataStore) ?~> "job.couldNotRunSegmentIndexFile" @@ -206,8 +206,8 @@ class JobController @Inject()( command = JobCommand.infer_nuclei commandArgs = Json.obj( "organization_name" -> dataset._organization, - "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset - "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset + "dataset_name" -> dataset.name, + "dataset_path" -> dataset.path, "layer_name" -> layerName, "new_dataset_name" -> newDatasetName ) @@ -236,8 +236,8 @@ class JobController @Inject()( command = JobCommand.infer_neurons commandArgs = Json.obj( "organization_name" -> organization._id, - "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset - "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset + "dataset_name" -> dataset.name, + "dataset_path" -> dataset.path, "new_dataset_name" -> newDatasetName, "layer_name" -> layerName, "bbox" -> bbox, @@ -268,8 +268,8 @@ class JobController @Inject()( command = JobCommand.infer_mitochondria commandArgs = Json.obj( "organization_name" -> dataset._organization, - "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset - "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset + "dataset_name" -> dataset.name, + "dataset_path" -> dataset.path, "new_dataset_name" -> newDatasetName, "layer_name" -> layerName, "bbox" -> bbox, @@ -300,8 +300,8 @@ class JobController @Inject()( command = JobCommand.align_sections commandArgs = Json.obj( "organization_name" -> organization._id, - "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset - "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset + "dataset_name" -> dataset.name, + "dataset_path" -> dataset.path, "new_dataset_name" -> newDatasetName, "layer_name" -> layerName, "annotation_id" -> annotationId @@ -352,7 +352,7 @@ class JobController @Inject()( else s"${formatDateForFilename(new Date())}__${dataset.name}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.zip" commandArgs = Json.obj( - "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset + "dataset_path" -> dataset.path, "organization_name" -> organization._id, "dataset_name" -> dataset.name, "nd_bbox" -> ndBoundingBox.toWkLibsDict, @@ -391,8 +391,8 @@ class JobController @Inject()( _ <- datasetService.assertValidLayerNameLax(outputSegmentationLayerName) commandArgs = Json.obj( "organization_name" -> organization._id, - "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset - "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset + "dataset_name" -> dataset.name, + "dataset_path" -> dataset.path, "fallback_layer_name" -> fallbackLayerName, "annotation_id" -> annotationId, "output_segmentation_layer_name" -> outputSegmentationLayerName, @@ -420,8 +420,8 @@ class JobController @Inject()( command = JobCommand.find_largest_segment_id commandArgs = Json.obj( "organization_name" -> organization._id, - "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset - "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset + "dataset_name" -> dataset.name, + "dataset_path" -> dataset.path, "layer_name" -> layerName ) job <- jobService.submitJob(command, commandArgs, request.identity, dataset._dataStore) ?~> "job.couldNotRunFindLargestSegmentId" @@ -453,8 +453,8 @@ class JobController @Inject()( command = JobCommand.render_animation commandArgs = Json.obj( "organization_name" -> organization._id, - "dataset_name" -> dataset.name, // TODOM: Adjust worker to use correct dataset - "dataset_path" -> dataset.path, // TODOM: Adjust worker to use correct dataset + "dataset_name" -> dataset.name, + "dataset_path" -> dataset.path, "export_file_name" -> exportFileName, "layer_name" -> animationJobOptions.layerName, "bounding_box" -> animationJobOptions.boundingBox.toLiteral, diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index 65b044cac20..79a219ece46 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -479,7 +479,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA def updatePartial(datasetId: ObjectId, params: DatasetUpdateParameters)(implicit ctx: DBAccessContext): Fox[Unit] = { val setQueries = List( params.description.map(d => q"description = $d"), - params.displayName.map(v => q"displayName = $v"), + params.name.map(v => q"name = $v"), params.sortingKey.map(v => q"sortingKey = $v"), params.isPublic.map(v => q"isPublic = $v"), params.tags.map(v => q"tags = $v"), @@ -503,7 +503,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA def updateFields(datasetId: ObjectId, description: Option[String], - displayName: Option[String], + name: Option[String], sortingKey: Instant, isPublic: Boolean, tags: List[String], @@ -511,7 +511,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA folderId: ObjectId)(implicit ctx: DBAccessContext): Fox[Unit] = { val updateParameters = new DatasetUpdateParameters( description = Some(description), - displayName = Some(displayName), + name = Some(name), sortingKey = Some(sortingKey), isPublic = Some(isPublic), tags = Some(tags), From 7a79ea47f2bab64a5d12579657ec942a5305d58b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 7 Oct 2024 14:15:46 +0200 Subject: [PATCH 020/129] fixed verion routes --- conf/webknossos.versioned.routes | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/conf/webknossos.versioned.routes b/conf/webknossos.versioned.routes index 50311201f7a..0b171a2c0de 100644 --- a/conf/webknossos.versioned.routes +++ b/conf/webknossos.versioned.routes @@ -24,8 +24,8 @@ GET /v8/datasets/:organizationId/:datasetName co -> /v8/ webknossos.latest.Routes #v7: support changes to v9 -PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) -GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v7/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v7/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) PATCH /v7/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) GET /v7/datasets/:organizationId/:datasetName controllers.LegacyApiController.readDatasetV8(organizationId: String, datasetName: String, sharingToken: Option[String]) @@ -36,8 +36,8 @@ GET /v7/datasets co -> /v7/ webknossos.latest.Routes # v6: support changes to v9 -PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) -GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v6/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v6/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) PATCH /v6/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) # v6: support changes to v7 @@ -48,9 +48,9 @@ GET /v6/datasets/:organizationName/:datasetName co # v5: support changes to v9 -PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) -GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) -PATCH /v8/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) +PATCH /v5/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v5/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v5/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) # v5: support changes to v7 GET /v5/datasets controllers.LegacyApiController.listDatasetsV6(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) @@ -62,9 +62,9 @@ GET /v5/datasets/:organizationName/:datasetName/isValidNewName co -> /v5/ webknossos.latest.Routes # v4: support changes to v9 -PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) -GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) -PATCH /v8/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) +PATCH /v4/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v4/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v4/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) # v4: support changes to v7 GET /v4/datasets controllers.LegacyApiController.listDatasetsV6(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) @@ -91,9 +91,9 @@ POST /v4/datasets/:organizationName/:datasetName/createExplorational co -> /v4/ webknossos.latest.Routes # v3: support changes to v9 -PATCH /v8/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) -GET /v8/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) -PATCH /v8/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) +PATCH /v3/datasets/:organizationId/:datasetName controllers.LegacyApiController.updateDatasetV8(organizationId: String, datasetName: String) +GET /v3/datasets/:organizationId/:datasetName/sharingToken controllers.LegacyApiController.getDatasetSharingTokenV8(organizationId: String, datasetName: String) +PATCH /v3/datasets/:organizationId/:datasetName/teams controllers.LegacyApiController.updateDatasetTeamsV8(organizationId: String, datasetName: String) # v3: support changes to v7 GET /v3/datasets controllers.LegacyApiController.listDatasetsV6(isActive: Option[Boolean], isUnreported: Option[Boolean], organizationName: Option[String], onlyMyOrganization: Option[Boolean], uploaderId: Option[String], folderId: Option[String], includeSubfolders: Option[Boolean], searchQuery: Option[String], limit: Option[Int], compact: Option[Boolean]) From 5443912d6f7fb2c608af469621111d7ba71cd537 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 7 Oct 2024 16:08:08 +0200 Subject: [PATCH 021/129] fix reserveUploadRoute --- app/controllers/WKRemoteDataStoreController.scala | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index a57511c9f1f..687d2efd8b9 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -81,9 +81,8 @@ class WKRemoteDataStoreController @Inject()( _ <- datasetDAO.updateFolder(dataset._id, folderId)(GlobalAccessContext) _ <- datasetService.addInitialTeams(dataset, uploadInfo.initialTeams, user)(AuthorizedAccessContext(user)) _ <- datasetService.addUploader(dataset, user._id)(AuthorizedAccessContext(user)) - uploadInfo.path = dataset.path // Update path according to the newly created dataset. - uploadInfo.newDatasetId = dataset._id.toString // Update newDatasetId according to the newly created dataset. - } yield Ok(Json.toJson(uploadInfo)) + updatedInfo = uploadInfo.copy(newDatasetId = dataset._id.toString, path = dataset.path) // Update newDatasetId and path according to the newly created dataset. + } yield Ok(Json.toJson(updatedInfo)) } } From 8224f6a6294c4d6e18a94659235a420072bdd547 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 8 Oct 2024 11:55:18 +0200 Subject: [PATCH 022/129] rename orga_name in jobs to orga_id --- app/controllers/AiModelController.scala | 4 ++-- app/controllers/JobController.scala | 22 +++++++++++----------- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/app/controllers/AiModelController.scala b/app/controllers/AiModelController.scala index c22be24e686..87dfeac205e 100644 --- a/app/controllers/AiModelController.scala +++ b/app/controllers/AiModelController.scala @@ -133,7 +133,7 @@ class AiModelController @Inject()( jobCommand = JobCommand.train_model commandArgs = Json.obj( "training_annotations" -> Json.toJson(trainingAnnotations), - "organization_name" -> organization._id, + "organization_id" -> organization._id, "model_id" -> modelId, "custom_workflow_provided_by_user" -> request.body.workflowYaml ) @@ -170,7 +170,7 @@ class AiModelController @Inject()( jobCommand = JobCommand.infer_with_model boundingBox <- BoundingBox.fromLiteral(request.body.boundingBox).toFox commandArgs = Json.obj( - "organization_name" -> organization._id, + "organization_id" -> organization._id, "dataset_name" -> dataset.name, "color_layer_name" -> request.body.colorLayerName, "bounding_box" -> boundingBox.toLiteral, diff --git a/app/controllers/JobController.scala b/app/controllers/JobController.scala index ed60b221f65..0f979cead4d 100644 --- a/app/controllers/JobController.scala +++ b/app/controllers/JobController.scala @@ -129,7 +129,7 @@ class JobController @Inject()( _ <- bool2Fox(request.identity._organization == organization._id) ~> FORBIDDEN command = JobCommand.convert_to_wkw commandArgs = Json.obj( - "organization_name" -> organization._id, + "organization_id" -> organization._id, "organization_display_name" -> organization.name, "dataset_name" -> dataset.name, "dataset_path" -> dataset.path, @@ -157,7 +157,7 @@ class JobController @Inject()( _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.compute_mesh_file commandArgs = Json.obj( - "organization_name" -> organization._id, + "organization_id" -> organization._id, "dataset_name" -> dataset.name, "dataset_path" -> dataset.path, "layer_name" -> layerName, @@ -181,7 +181,7 @@ class JobController @Inject()( _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.compute_segment_index_file commandArgs = Json.obj( - "organization_name" -> dataset._organization, + "organization_id" -> dataset._organization, "dataset_name" -> dataset.name, "dataset_path" -> dataset.path, "segmentation_layer_name" -> layerName, @@ -205,7 +205,7 @@ class JobController @Inject()( _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.infer_nuclei commandArgs = Json.obj( - "organization_name" -> dataset._organization, + "organization_id" -> dataset._organization, "dataset_name" -> dataset.name, "dataset_path" -> dataset.path, "layer_name" -> layerName, @@ -235,7 +235,7 @@ class JobController @Inject()( _ <- Fox.runIf(!multiUser.isSuperUser)(jobService.assertBoundingBoxLimits(bbox, None)) command = JobCommand.infer_neurons commandArgs = Json.obj( - "organization_name" -> organization._id, + "organization_id" -> organization._id, "dataset_name" -> dataset.name, "dataset_path" -> dataset.path, "new_dataset_name" -> newDatasetName, @@ -267,7 +267,7 @@ class JobController @Inject()( _ <- Fox.runIf(!multiUser.isSuperUser)(jobService.assertBoundingBoxLimits(bbox, None)) command = JobCommand.infer_mitochondria commandArgs = Json.obj( - "organization_name" -> dataset._organization, + "organization_id" -> dataset._organization, "dataset_name" -> dataset.name, "dataset_path" -> dataset.path, "new_dataset_name" -> newDatasetName, @@ -299,7 +299,7 @@ class JobController @Inject()( _ <- bool2Fox(multiUser.isSuperUser) ?~> "job.alignSections.notAllowed.onlySuperUsers" command = JobCommand.align_sections commandArgs = Json.obj( - "organization_name" -> organization._id, + "organization_id" -> organization._id, "dataset_name" -> dataset.name, "dataset_path" -> dataset.path, "new_dataset_name" -> newDatasetName, @@ -353,7 +353,7 @@ class JobController @Inject()( s"${formatDateForFilename(new Date())}__${dataset.name}__${annotationLayerName.map(_ => "volume").getOrElse(layerName.getOrElse(""))}.zip" commandArgs = Json.obj( "dataset_path" -> dataset.path, - "organization_name" -> organization._id, + "organization_id" -> organization._id, "dataset_name" -> dataset.name, "nd_bbox" -> ndBoundingBox.toWkLibsDict, "export_file_name" -> exportFileName, @@ -390,7 +390,7 @@ class JobController @Inject()( _ <- datasetService.assertValidDatasetName(newDatasetName) _ <- datasetService.assertValidLayerNameLax(outputSegmentationLayerName) commandArgs = Json.obj( - "organization_name" -> organization._id, + "organization_id" -> organization._id, "dataset_name" -> dataset.name, "dataset_path" -> dataset.path, "fallback_layer_name" -> fallbackLayerName, @@ -419,7 +419,7 @@ class JobController @Inject()( _ <- datasetService.assertValidLayerNameLax(layerName) command = JobCommand.find_largest_segment_id commandArgs = Json.obj( - "organization_name" -> organization._id, + "organization_id" -> organization._id, "dataset_name" -> dataset.name, "dataset_path" -> dataset.path, "layer_name" -> layerName @@ -452,7 +452,7 @@ class JobController @Inject()( exportFileName = s"webknossos_animation_${formatDateForFilename(new Date())}__${dataset.name}__$layerName.mp4" command = JobCommand.render_animation commandArgs = Json.obj( - "organization_name" -> organization._id, + "organization_id" -> organization._id, "dataset_name" -> dataset.name, "dataset_path" -> dataset.path, "export_file_name" -> exportFileName, From 35d62a94ca320453873f0e515fc582cd8089d380 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 8 Oct 2024 11:57:30 +0200 Subject: [PATCH 023/129] format code --- app/controllers/DatasetController.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index c67f625b56b..9f1c666f24a 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -326,7 +326,7 @@ class DatasetController @Inject()(userService: UserService, sil.SecuredAction.async(parse.json) { implicit request => withJsonBodyUsing(datasetPublicReads) { case (description, datasetName, legacyDatasetDisplayName, sortingKey, isPublic, tags, metadata, folderId) => { - val name = if(datasetName.isDefined) datasetName else legacyDatasetDisplayName + val name = if (datasetName.isDefined) datasetName else legacyDatasetDisplayName for { parsedDatasetId <- ObjectId.fromString(datasetId) ?~> "Invalid dataset id" ~> NOT_FOUND dataset <- datasetDAO.findOne(parsedDatasetId) ?~> notFoundMessage(parsedDatasetId.toString) ~> NOT_FOUND From 1b6ff5c187aea3a6cd6cbbd3cde28c2e6a97cd44 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 8 Oct 2024 12:28:41 +0200 Subject: [PATCH 024/129] fix finishUploadRoute --- app/controllers/WKRemoteDataStoreController.scala | 2 +- .../webknossos/datastore/controllers/DataSourceController.scala | 2 +- .../datastore/services/DSRemoteWebknossosClient.scala | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 687d2efd8b9..3e3b14faeb5 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -145,7 +145,7 @@ class WKRemoteDataStoreController @Inject()( _ <- Fox.runIf(!needsConversion)(logUploadToSlack(user, dataset._id, viaAddRoute)) _ = analyticsService.track(UploadDatasetEvent(user, dataset, dataStore, datasetSizeBytes)) _ = if (!needsConversion) mailchimpClient.tagUser(user, MailchimpTag.HasUploadedOwnDataset) - } yield Ok(Json.toJson("id" -> dataset._id)) + } yield Ok(Json.obj("id" -> dataset._id)) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 33c2484d0c6..f33c90dcbe3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -226,7 +226,7 @@ class DataSourceController @Inject()( request.body.needsConversion.getOrElse(false), viaAddRoute = false, userToken = urlOrHeaderToken(token, request)) ?~> "reportUpload.failed" - } yield Ok(Json.toJson("newDatasetId" -> uploadedDatasetIdJson)) + } yield Ok(Json.obj("newDatasetId" -> uploadedDatasetIdJson)) } } yield response } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index 4b88c25ef47..eede55dea8a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -92,7 +92,7 @@ class DSRemoteWebknossosClient @Inject()( .addQueryString("viaAddRoute" -> viaAddRoute.toString) .addQueryString("datasetSizeBytes" -> datasetSizeBytes.toString) .addQueryStringOptional("token", userToken) - .getWithJsonResponse[JsValue] + .postWithJsonResponse[JsValue] uploadedDatasetId <- (uploadedDatasetIdJson \ "id").validate[String].asOpt.toFox ?~> "uploadedDatasetId.invalid" } yield uploadedDatasetId From e320a6ca56721fbb9d09fc0bbcd796a0ed82fa1e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 8 Oct 2024 19:09:44 +0200 Subject: [PATCH 025/129] allow duplicate names when uploading a new dataset --- frontend/javascripts/admin/dataset/dataset_upload_view.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index af18147d0de..587a34c2ddf 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -823,7 +823,7 @@ class DatasetUploadView extends React.Component { From 72715f10fe4da0d30cf834102e48951861fea055 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Tue, 8 Oct 2024 19:22:50 +0200 Subject: [PATCH 026/129] fix job list view --- frontend/javascripts/admin/api/jobs.ts | 3 ++- frontend/javascripts/types/api_flow_types.ts | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/admin/api/jobs.ts b/frontend/javascripts/admin/api/jobs.ts index b5529acda8b..042adeae3a7 100644 --- a/frontend/javascripts/admin/api/jobs.ts +++ b/frontend/javascripts/admin/api/jobs.ts @@ -20,7 +20,8 @@ function transformBackendJobToAPIJob(job: any): APIJob { datasetId: job.commandArgs.datasetId, type: job.command, datasetName: job.commandArgs.dataset_name, - organizationId: job.commandArgs.organization_name, + datasetPath: job.commandArgs.dataset_path, + organizationId: job.commandArgs.organization_id, layerName: job.commandArgs.layer_name || job.commandArgs.volume_layer_name, annotationLayerName: job.commandArgs.annotation_layer_name, boundingBox: job.commandArgs.bbox, diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 0f95b204fca..16ea20d0ca1 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -715,8 +715,9 @@ export type WkLibsNdBoundingBox = BoundingBoxObject & { export type APIJob = { readonly id: string; - readonly datasetId: string | null | undefined; // TODO: Adjust worker accordingly + readonly datasetId: string | null | undefined; readonly datasetName: string | null | undefined; + readonly datasetPath: string | null | undefined; readonly exportFileName: string | null | undefined; readonly layerName: string | null | undefined; readonly annotationLayerName: string | null | undefined; From d5a7fdc09b436d70f2ad6efd3950fe9c2058e986 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 9 Oct 2024 09:10:05 +0200 Subject: [PATCH 027/129] fix some datastore requests --- frontend/javascripts/oxalis/api/api_latest.ts | 2 +- .../oxalis/model/bucket_data_handling/wkstore_adapter.ts | 4 ++-- frontend/javascripts/oxalis/model/sagas/mesh_saga.ts | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/frontend/javascripts/oxalis/api/api_latest.ts b/frontend/javascripts/oxalis/api/api_latest.ts index 404d508f660..4899b9dc750 100644 --- a/frontend/javascripts/oxalis/api/api_latest.ts +++ b/frontend/javascripts/oxalis/api/api_latest.ts @@ -2111,7 +2111,7 @@ class DataApi { const magString = resolution.join("-"); return ( - `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.name}/layers/${layerName}/data?mag=${magString}&` + + `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.path}/layers/${layerName}/data?mag=${magString}&` + `token=${token}&` + `x=${Math.floor(topLeft[0])}&` + `y=${Math.floor(topLeft[1])}&` + diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index 4db8385e75b..640d469748f 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -100,13 +100,13 @@ export async function requestWithFallback( batch: Array, ): Promise> { const state = Store.getState(); - const datasetName = state.dataset.name; + const datasetPath = state.dataset.path; const organization = state.dataset.owningOrganization; const dataStoreHost = state.dataset.dataStore.url; const tracingStoreHost = state.tracing.tracingStore.url; const getDataStoreUrl = (optLayerName?: string) => - `${dataStoreHost}/data/datasets/${organization}/${datasetName}/layers/${ + `${dataStoreHost}/data/datasets/${organization}/${datasetPath}/layers/${ optLayerName || layerInfo.name }`; diff --git a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts index 772a23a763e..4056035295c 100644 --- a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts @@ -464,9 +464,9 @@ function* maybeLoadMeshChunk( const scaleFactor = yield* select((state) => state.dataset.dataSource.scale.factor); const dataStoreHost = yield* select((state) => state.dataset.dataStore.url); const owningOrganization = yield* select((state) => state.dataset.owningOrganization); - const datasetName = yield* select((state) => state.dataset.name); + const datasetPath = yield* select((state) => state.dataset.path); const tracingStoreHost = yield* select((state) => state.tracing.tracingStore.url); - const dataStoreUrl = `${dataStoreHost}/data/datasets/${owningOrganization}/${datasetName}/layers/${ + const dataStoreUrl = `${dataStoreHost}/data/datasets/${owningOrganization}/${datasetPath}/layers/${ layer.fallbackLayer != null ? layer.fallbackLayer : layer.name }`; const tracingStoreUrl = `${tracingStoreHost}/tracings/volume/${layer.name}`; From 24ddfd34659f27eb5e3e8866fc8f39c21ec8d083 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 9 Oct 2024 11:14:10 +0200 Subject: [PATCH 028/129] further minor fixes --- app/models/dataset/DatasetService.scala | 2 +- frontend/javascripts/admin/job/job_list_view.tsx | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 188cc102969..e6453774d7e 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -128,7 +128,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, path = datasetPath, isPublic = false, isUsable = newDataSource.isUsable, - name = newDataSource.id.path, + name = datasetName, voxelSize = newDataSource.voxelSizeOpt, sharingToken = None, status = newDataSource.statusOpt.getOrElse(""), diff --git a/frontend/javascripts/admin/job/job_list_view.tsx b/frontend/javascripts/admin/job/job_list_view.tsx index 767e034c3a9..cfba27dce86 100644 --- a/frontend/javascripts/admin/job/job_list_view.tsx +++ b/frontend/javascripts/admin/job/job_list_view.tsx @@ -116,7 +116,7 @@ function JobListView() { const linkToDataset = job.datasetId != null ? `/datasets/${job.datasetId}/view` // prefer updated link over legacy link. - : `/datasets/${job.organizationId || ""}/${job.datasetName || ""}/view`; + : `/datasets/${job.organizationId || ""}/${job.datasetPath || job.datasetName}/view`; if (job.type === APIJobType.CONVERT_TO_WKW && job.datasetName) { return {`Conversion to WKW of ${job.datasetName}`}; } else if (job.type === APIJobType.EXPORT_TIFF && job.organizationId && job.datasetName) { From 503f11489eb7d5fb8c63797d5d50188083eeeb51 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 9 Oct 2024 11:27:42 +0200 Subject: [PATCH 029/129] make add remote dataset path a post request as it always creates a new dataset (even when the name is already taken) --- frontend/javascripts/admin/admin_rest_api.ts | 2 +- .../conf/com.scalableminds.webknossos.datastore.routes | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index e74645b2054..eefbd0af100 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1429,7 +1429,7 @@ export async function storeRemoteDataset( return Request.sendJSONReceiveJSON( `${datastoreUrl}/data/datasets/${organizationId}/${datasetName}?${params}`, { - method: "PUT", + method: "POST", data: datasource, }, ); diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index 996859c11e1..de75c8cfc6a 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -112,7 +112,7 @@ POST /datasets/cancelUpload GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(token: Option[String], organizationId: String, datasetPath: Option[String]) GET /datasets/:organizationId/:datasetPath/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(token: Option[String], organizationId: String, datasetPath: String) POST /datasets/:organizationId/:datasetPath @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(token: Option[String], organizationId: String, datasetPath: String) -PUT /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(token: Option[String], organizationId: String, datasetName: String, folderId: Option[String]) +POST /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(token: Option[String], organizationId: String, datasetName: String, folderId: Option[String]) DELETE /datasets/:organizationId/:datasetPath/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(token: Option[String], organizationId: String, datasetPath: String) POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose(token: Option[String]) POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset(token: Option[String]) From 3b41f86b86c3b82c0155d5e1d7c94c996d917866 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 9 Oct 2024 16:21:22 +0200 Subject: [PATCH 030/129] WIP: replace missed code parts where dataset address was still wrong / not backwards compatible --- app/controllers/AiModelController.scala | 4 +- app/controllers/AnnotationIOController.scala | 1 + app/controllers/DatasetController.scala | 11 +---- app/controllers/LegacyApiController.scala | 12 ++--- .../WKRemoteDataStoreController.scala | 1 + .../WKRemoteWorkerController.scala | 3 +- app/models/dataset/Dataset.scala | 34 +++++++++++++- app/models/dataset/DatasetService.scala | 11 +---- app/models/job/JobService.scala | 4 +- app/models/task/TaskCreationParameters.scala | 2 +- app/models/task/TaskCreationService.scala | 1 + app/opengraph/OpenGraphService.scala | 47 +++++++++++++------ conf/application.conf | 4 +- frontend/javascripts/admin/api/jobs.ts | 2 +- .../04_configure_new_dataset.tsx | 2 +- .../dataset/dataset_selection_component.tsx | 2 +- .../view/action-bar/starting_job_modals.tsx | 2 +- .../services/uploading/UploadService.scala | 2 +- 18 files changed, 92 insertions(+), 53 deletions(-) diff --git a/app/controllers/AiModelController.scala b/app/controllers/AiModelController.scala index 87dfeac205e..fe659e68990 100644 --- a/app/controllers/AiModelController.scala +++ b/app/controllers/AiModelController.scala @@ -40,7 +40,7 @@ object RunTrainingParameters { case class RunInferenceParameters(annotationId: Option[ObjectId], aiModelId: ObjectId, - datasetName: String, + datasetPath: String, colorLayerName: String, boundingBox: String, newDatasetName: String, @@ -163,7 +163,7 @@ class AiModelController @Inject()( for { _ <- userService.assertIsSuperUser(request.identity) organization <- organizationDAO.findOne(request.identity._organization) - dataset <- datasetDAO.findOneByPathAndOrganization(request.body.datasetName, organization._id) + dataset <- datasetDAO.findOneByPathAndOrganization(request.body.datasetPath, organization._id) dataStore <- dataStoreDAO.findOneByName(dataset._dataStore) ?~> "dataStore.notFound" _ <- aiModelDAO.findOne(request.body.aiModelId) ?~> "aiModel.notFound" _ <- datasetService.assertValidDatasetName(request.body.newDatasetName) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index fb021ede853..ba0c1e60086 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -236,6 +236,7 @@ class AnnotationIOController @Inject()( organizationId <- Fox.fillOption(organizationIdOpt) { datasetDAO.getOrganizationIdForDataset(datasetName)(GlobalAccessContext) } ?~> Messages("dataset.noAccess", datasetName) ~> FORBIDDEN + // TODO: Check whether thats ok, the datasets are addressed via the name by the annotations. This looks broken to me. dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) ?~> (if (wkUrl.nonEmpty && conf.Http.uri != wkUrl) { Messages( "dataset.noAccess.wrongHost", diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index 9f1c666f24a..c9fef455e80 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -299,14 +299,6 @@ class DatasetController @Inject()(userService: UserService, } yield Ok("Ok") } - // TODO: Maybe no longer needed. Remove? - def resolveDatasetNameToId(organizationId: String, datasetName: String): Action[AnyContent] = - sil.UserAwareAction.async { implicit request => - for { - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND - } yield Ok(Json.obj("datasetId" -> dataset._id)) - } - def updatePartial(datasetId: String): Action[DatasetUpdateParameters] = sil.SecuredAction.async(validateJson[DatasetUpdateParameters]) { implicit request => for { @@ -415,7 +407,8 @@ class DatasetController @Inject()(userService: UserService, def getDatasetIdFromNameAndOrganization(datasetName: String, organizationId: String): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND + // TODO: Make this first by path and then by name if the path is not found + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) ?~> notFoundMessage(datasetName) ~> NOT_FOUND } yield Ok( Json.obj("id" -> dataset._id, diff --git a/app/controllers/LegacyApiController.scala b/app/controllers/LegacyApiController.scala index 59033ee7306..6a5a482c228 100644 --- a/app/controllers/LegacyApiController.scala +++ b/app/controllers/LegacyApiController.scala @@ -93,7 +93,7 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, def readDatasetV6(organizationName: String, datasetName: String, sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationName) + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationName) result <- datasetController.read(dataset._id.toString, sharingToken)(request) adaptedResult <- replaceInResult(replaceVoxelSize)(result) } yield adaptedResult @@ -112,7 +112,7 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, sil.SecuredAction.async(parse.json) { implicit request => for { _ <- Fox.successful(logVersioned(request)) - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) result <- datasetController.update(dataset._id.toString)(request) } yield result } @@ -121,7 +121,7 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, sil.SecuredAction.async { implicit request => for { _ <- Fox.successful(logVersioned(request)) - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) sharingToken <- datasetController.getSharingToken(dataset._id.toString)(request) } yield sharingToken } @@ -130,7 +130,7 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, sil.SecuredAction.async(validateJson[List[ObjectId]]) { implicit request => for { _ <- Fox.successful(logVersioned(request)) - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) result <- datasetController.updateTeams(dataset._id.toString)(request) } yield result } @@ -138,7 +138,7 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, def readDatasetV8(organizationId: String, datasetName: String, sharingToken: Option[String]): Action[AnyContent] = sil.UserAwareAction.async { implicit request => for { - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) result <- datasetController.read(dataset._id.toString, sharingToken)(request) } yield result } @@ -270,7 +270,7 @@ class LegacyApiController @Inject()(annotationController: AnnotationController, sil.SecuredAction.async(validateJson[LegacyCreateExplorationalParameters]) { implicit request => for { _ <- Fox.successful(logVersioned(request)) - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationName) + dataset <- datasetDAO.findOneByNameAndOrganization(datasetName, organizationName) result <- annotationController.createExplorational(dataset._id.toString)( request.withBody(replaceCreateExplorationalParameters(request))) adaptedResult <- replaceInResult(replaceAnnotationLayers)(result) diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 3e3b14faeb5..85b10fad9e6 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -114,6 +114,7 @@ class WKRemoteDataStoreController @Inject()( } } + // TODOM: I think this is not used anywhere? Got replaced with the compose route I would say. private def validateLayerToLink(layerIdentifier: LinkedLayerIdentifier, requestingUser: User)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Unit] = for { diff --git a/app/controllers/WKRemoteWorkerController.scala b/app/controllers/WKRemoteWorkerController.scala index 47f2eb9a825..fb479b3477f 100644 --- a/app/controllers/WKRemoteWorkerController.scala +++ b/app/controllers/WKRemoteWorkerController.scala @@ -103,6 +103,7 @@ class WKRemoteWorkerController @Inject()(jobDAO: JobDAO, } yield Ok } + // TODOM: What tool is using this route? def attachDatasetToInference(key: String, id: String): Action[String] = Action.async(validateJson[String]) { implicit request => implicit val ctx: DBAccessContext = GlobalAccessContext @@ -110,7 +111,7 @@ class WKRemoteWorkerController @Inject()(jobDAO: JobDAO, _ <- workerDAO.findOneByKey(key) ?~> "jobs.worker.notFound" jobIdParsed <- ObjectId.fromString(id) organizationId <- jobDAO.organizationIdForJobId(jobIdParsed) ?~> "job.notFound" - dataset <- datasetDAO.findOneByPathAndOrganization(request.body, organizationId) + dataset <- datasetDAO.findOneByNameAndOrganization(request.body, organizationId) aiInference <- aiInferenceDAO.findOneByJobId(jobIdParsed) ?~> "aiInference.notFound" _ <- aiInferenceDAO.updateDataset(aiInference._id, dataset._id) } yield Ok diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index 79a219ece46..2ac7d3633fe 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -26,6 +26,7 @@ import controllers.DatasetUpdateParameters import javax.inject.Inject import models.organization.OrganizationDAO +import net.liftweb.common.Full import play.api.libs.json._ import play.utils.UriEncoding import slick.jdbc.PostgresProfile.api._ @@ -401,15 +402,44 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA r <- rList.headOption } yield r - def findOneByPathAndOrganization(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Dataset] = + def findOneByPathAndOrganization(path: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Dataset] = for { accessQuery <- readAccessQuery r <- run(q"""SELECT $columns FROM $existingCollectionName - WHERE path = $name + WHERE path = $path AND _organization = $organizationId AND $accessQuery LIMIT 1""".as[DatasetsRow]) + parsed <- parseFirst(r, s"$organizationId/$path") + } yield parsed + + def doesDatasetNameExistInOrganization(name: String, organizationId: String)( + implicit ctx: DBAccessContext): Fox[Boolean] = + for { + accessQuery <- readAccessQuery + r <- run(q"""SELECT 1 + FROM $existingCollectionName + WHERE name = $name + AND _organization = $organizationId + AND $accessQuery + LIMIT 1""".as[DatasetsRow]) + exists <- parseFirst(r, s"$organizationId/$name").futureBox.map { + case Full(_) => true + case _ => false + } + } yield exists + + def findOneByNameAndOrganization(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Dataset] = + for { + accessQuery <- readAccessQuery + r <- run(q"""SELECT $columns + FROM $existingCollectionName + WHERE (path = $name OR name = $name) + AND _organization = $organizationId + AND $accessQuery + ORDER BY created ASC + LIMIT 1""".as[DatasetsRow]) parsed <- parseFirst(r, s"$organizationId/$name") } yield parsed diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index e6453774d7e..1caf8b3e962 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -63,15 +63,8 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, _ <- bool2Fox(!name.startsWith(".")) ?~> "dataset.layer.name.invalid.startsWithDot" } yield () - def isNewDatasetName(name: String, organizationId: String): Fox[Boolean] = - datasetDAO - .findOneByPathAndOrganization(name, organizationId)(GlobalAccessContext) - .futureBox - .flatMap { - case Full(_) => Fox.successful(false) - case _ => Fox.successful(true) - } - .toFox + private def isNewDatasetName(name: String, organizationId: String): Fox[Boolean] = + datasetDAO.doesDatasetNameExistInOrganization(name, organizationId)(GlobalAccessContext) def createPreliminaryDataset(datasetName: String, organizationId: String, dataStore: DataStore): Fox[Dataset] = { val unreportedDatasource = UnusableDataSource(DataSourceId(datasetName, organizationId), notYetUploadedStatus) diff --git a/app/models/job/JobService.scala b/app/models/job/JobService.scala index 26b7119d360..2100c59cc86 100644 --- a/app/models/job/JobService.scala +++ b/app/models/job/JobService.scala @@ -152,9 +152,9 @@ class JobService @Inject()(wkConf: WkConf, logger.info(s"WKW conversion job ${job._id} failed. Deleting dataset from the database, freeing the name...") val commandArgs = job.commandArgs.value for { - datasetName <- commandArgs.get("dataset_name").map(_.as[String]).toFox + datasetPath <- commandArgs.get("dataset_path").map(_.as[String]).toFox organizationId <- commandArgs.get("organization_name").map(_.as[String]).toFox - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId)(GlobalAccessContext) + dataset <- datasetDAO.findOneByPathAndOrganization(datasetPath, organizationId)(GlobalAccessContext) _ <- datasetDAO.deleteDataset(dataset._id) } yield () } else Fox.successful(()) diff --git a/app/models/task/TaskCreationParameters.scala b/app/models/task/TaskCreationParameters.scala index e9594cecb2b..31526d7a671 100644 --- a/app/models/task/TaskCreationParameters.scala +++ b/app/models/task/TaskCreationParameters.scala @@ -10,7 +10,7 @@ case class TaskParameters(taskTypeId: String, projectName: String, scriptId: Option[String], boundingBox: Option[BoundingBox], - dataSet: String, + dataSet: String, // TODOM: Make this a dataSetId editPosition: Vec3Int, editRotation: Vec3Double, creationInfo: Option[String], diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index 87fa37b9889..a2441c8f910 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -234,6 +234,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, private def addVolumeFallbackBoundingBox(volume: VolumeTracing, organizationId: String): Fox[VolumeTracing] = if (volume.boundingBox.isEmpty) { for { + // TODOM: Here the dataset also needs to be identified by either path & orga or id dataset <- datasetDAO.findOneByPathAndOrganization(volume.datasetName, organizationId)(GlobalAccessContext) dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) } yield volume.copy(boundingBox = dataSource.boundingBox) diff --git a/app/opengraph/OpenGraphService.scala b/app/opengraph/OpenGraphService.scala index c6c0b900b68..d59bce27070 100644 --- a/app/opengraph/OpenGraphService.scala +++ b/app/opengraph/OpenGraphService.scala @@ -40,8 +40,10 @@ class OpenGraphService @Inject()(datasetDAO: DatasetDAO, // This should match the frontend-side routes, not api routes, since those are the links people send around private val shortLinkRouteRegex = "^/links/(.*)".r - private val datasetRoute1Regex = "^/datasets/([^/^#]+)/([^/^#]+)/view".r - private val datasetRoute2Regex = "^/datasets/([^/^#]+)/([^/^#]+)".r + private val datasetRoute1Regex = "^/datasets/([^/^#]+)/view".r + private val datasetRoute2Regex = "^/datasets/([^/^#]+)".r + private val datasetRoute1LegacyRegex = "^/datasets/([^/^#]+)/([^/^#]+)/view".r + private val datasetRoute2LegacyRegex = "^/datasets/([^/^#]+)/([^/^#]+)".r private val workflowRouteRegex = "^/workflows/([^/^#]+)".r private val annotationRouteRegex = "^/annotations/([^/^#]+)".r @@ -92,27 +94,44 @@ class OpenGraphService @Inject()(datasetDAO: DatasetDAO, private def detectPageType(uriPath: String) = uriPath match { - case datasetRoute1Regex(_, _) | datasetRoute2Regex(_, _) => OpenGraphPageType.dataset - case annotationRouteRegex(_) => OpenGraphPageType.annotation - case workflowRouteRegex(_) => OpenGraphPageType.workflow - case _ => OpenGraphPageType.unknown + case datasetRoute1Regex(_, _) | datasetRoute2Regex(_, _) | datasetRoute1LegacyRegex(_, _) | + datasetRoute2LegacyRegex(_, _) => + OpenGraphPageType.dataset + case annotationRouteRegex(_) => OpenGraphPageType.annotation + case workflowRouteRegex(_) => OpenGraphPageType.workflow + case _ => OpenGraphPageType.unknown } private def datasetOpenGraphTags(uriPath: String, token: Option[String])(implicit ec: ExecutionContext, ctx: DBAccessContext): Fox[OpenGraphTags] = uriPath match { - case datasetRoute1Regex(organizationId, datasetName) => - datasetOpenGraphTagsWithOrganizationId(organizationId, datasetName, token) - case datasetRoute2Regex(organizationId, datasetName) => - datasetOpenGraphTagsWithOrganizationId(organizationId, datasetName, token) + case datasetRoute1Regex(datasetId) => + datasetOpenGraphTagsWithOrganizationId(Some(datasetId), None, None, token) + case datasetRoute2Regex(datasetId) => + datasetOpenGraphTagsWithOrganizationId(Some(datasetId), None, None, token) + case datasetRoute1LegacyRegex(organizationId, datasetName) => + datasetOpenGraphTagsWithOrganizationId(None, Some(organizationId), Some(datasetName), token) + case datasetRoute2LegacyRegex(organizationId, datasetName) => + datasetOpenGraphTagsWithOrganizationId(None, Some(organizationId), Some(datasetName), token) case _ => Fox.failure("not a matching uri") } - private def datasetOpenGraphTagsWithOrganizationId(organizationId: String, - datasetName: String, - token: Option[String])(implicit ctx: DBAccessContext) = + private def datasetOpenGraphTagsWithOrganizationId( + datasetIdOpt: Option[String], + organizationIdOpt: Option[String], + datasetNameOpt: Option[String], + token: Option[String])(implicit ec: ExecutionContext, ctx: DBAccessContext) = for { - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) + dataset <- (datasetIdOpt, organizationIdOpt, datasetNameOpt) match { + case (Some(datasetId), None, None) => + ObjectId + .fromStringSync(datasetId) + .map(datasetIdParsed => datasetDAO.findOne(datasetIdParsed)) + .getOrElse(Fox.failure("Invalid dataset id")) + case (None, Some(organizationId), Some(datasetName)) => + datasetDAO.findOneByNameAndOrganization(datasetName, organizationId) + case _ => Fox.failure("Could not find dataset") + } layers <- datasetLayerDAO.findAllForDataset(dataset._id) layerOpt = layers.find(_.category == Category.color) organization <- organizationDAO.findOne(dataset._organization) diff --git a/conf/application.conf b/conf/application.conf index 07d8b5d2dd1..85db10f41cf 100644 --- a/conf/application.conf +++ b/conf/application.conf @@ -151,8 +151,8 @@ features { taskReopenAllowedInSeconds = 30 allowDeleteDatasets = true # to enable jobs for local development, use "yarn enable-jobs" to also activate it in the database - jobsEnabled = false - voxelyticsEnabled = false + jobsEnabled = true + voxelyticsEnabled = true # For new users, the dashboard will show a banner which encourages the user to check out the following dataset. # If isWkorgInstance == true, `/createExplorative/hybrid/true` is appended to the URL so that a new tracing is opened. # If isWkorgInstance == false, `/view` is appended to the URL so that it's opened in view mode (since the user might not diff --git a/frontend/javascripts/admin/api/jobs.ts b/frontend/javascripts/admin/api/jobs.ts index 0e04cc85666..fd38eb230e6 100644 --- a/frontend/javascripts/admin/api/jobs.ts +++ b/frontend/javascripts/admin/api/jobs.ts @@ -316,7 +316,7 @@ export function runTraining(params: RunTrainingParameters) { type RunInferenceParameters = { annotationId?: string; aiModelId: string; - datasetName: string; + datasetPath: string; colorLayerName: string; boundingBox: Vector6; newDatasetName: string; diff --git a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx index 7da7c59a9d7..1d5c8a90028 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx @@ -307,7 +307,7 @@ function LinkedLayerForm({ datasetId: string; datasetName: string; }) { - const layers = Form.useWatch(["layers"]); + const layers = Form.useWatch(["layers"]) || []; // biome-ignore lint/correctness/useExhaustiveDependencies: See comment below React.useEffect(() => { diff --git a/frontend/javascripts/dashboard/dataset/dataset_selection_component.tsx b/frontend/javascripts/dashboard/dataset/dataset_selection_component.tsx index 920f069c2c3..39a0a6f7186 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_selection_component.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_selection_component.tsx @@ -12,7 +12,7 @@ async function fetchDatasets(query: string): Promise { return datasets.map((d) => ({ label: d.name, - value: d.name, + value: d.id, })); } diff --git a/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx b/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx index 6287fa9676b..1f384d87f5d 100644 --- a/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/starting_job_modals.tsx @@ -811,7 +811,7 @@ function CustomAiModelInferenceForm() { ...maybeAnnotationId, aiModelId: form.getFieldValue("aiModel"), workflowYaml: useCustomWorkflow ? form.getFieldValue("workflowYaml") : undefined, - datasetName: dataset.name, + datasetPath: dataset.path, colorLayerName: colorLayer.name, boundingBox, newDatasetName: newDatasetName, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index ef17c0a4c99..4657ed45209 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -36,7 +36,7 @@ case class ReserveUploadInformation( organization: String, totalFileCount: Long, filePaths: Option[List[String]], - layersToLink: Option[List[LinkedLayerIdentifier]], + layersToLink: Option[List[LinkedLayerIdentifier]], // TODOM: This is not used anywhere? Got replaced with the compose route I would say. initialTeams: List[String], // team ids folderId: Option[String]) object ReserveUploadInformation { From e0ecf8b8897d950700942d1f8c6620f10b216485 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Wed, 9 Oct 2024 18:01:21 +0200 Subject: [PATCH 031/129] WIP: replace missed code parts where dataset address was still wrong / not backwards compatible --- app/models/annotation/AnnotationService.scala | 11 ++-- app/models/task/TaskCreationParameters.scala | 6 +- app/models/task/TaskCreationService.scala | 63 +++++++++++++------ .../admin/task/task_create_bulk_view.tsx | 3 +- .../admin/task/task_create_form_view.tsx | 18 +++--- .../javascripts/admin/task/task_list_view.tsx | 8 +-- .../explorative_annotations_view.tsx | 2 +- frontend/javascripts/oxalis/api/api_latest.ts | 2 +- .../view/action-bar/merge_modal_view.tsx | 2 +- .../skeletontracing_server_objects.ts | 2 +- .../fixtures/tasktracing_server_objects.ts | 4 +- .../fixtures/volumetracing_server_objects.ts | 2 +- frontend/javascripts/types/api_flow_types.ts | 9 +-- .../helpers/SkeletonElementDefaults.scala | 7 ++- .../proto/SkeletonTracing.proto | 27 ++++---- 15 files changed, 102 insertions(+), 64 deletions(-) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 9f443721861..fe2af56b5dd 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -525,10 +525,10 @@ class AnnotationService @Inject()( } yield result } - def createSkeletonTracingBase(datasetName: String, + def createSkeletonTracingBase(datasetId: ObjectId, boundingBox: Option[BoundingBox], startPosition: Vec3Int, - startRotation: Vec3Double): SkeletonTracing = { + startRotation: Vec3Double)(implicit ctx: DBAccessContext): Fox[SkeletonTracing] = { val initialNode = NodeDefaults.createInstance.withId(1).withPosition(startPosition).withRotation(startRotation) val initialTree = Tree( 1, @@ -540,8 +540,11 @@ class AnnotationService @Inject()( "", System.currentTimeMillis() ) - SkeletonTracingDefaults.createInstance.copy( - datasetName = datasetName, + for { + dataset <- datasetDAO.findOne(datasetId) + } yield SkeletonTracingDefaults.createInstance.copy( + datasetName = dataset.name, + datasetId = dataset._id.toString, boundingBox = boundingBox.flatMap { box => if (box.isEmpty) None else Some(box) }, diff --git a/app/models/task/TaskCreationParameters.scala b/app/models/task/TaskCreationParameters.scala index 31526d7a671..b32a8c2e31b 100644 --- a/app/models/task/TaskCreationParameters.scala +++ b/app/models/task/TaskCreationParameters.scala @@ -1,6 +1,7 @@ package models.task -import com.scalableminds.util.geometry.{BoundingBox, Vec3Int, Vec3Double} +import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.requestparsing.ObjectId import models.user.Experience import play.api.libs.json.{Format, Json} @@ -10,7 +11,8 @@ case class TaskParameters(taskTypeId: String, projectName: String, scriptId: Option[String], boundingBox: Option[BoundingBox], - dataSet: String, // TODOM: Make this a dataSetId + datasetName: String, + datasetId: ObjectId, // TODOM: Make this a dataSetId editPosition: Vec3Int, editRotation: Vec3Double, creationInfo: Option[String], diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index a2441c8f910..a9dd0b1ad2d 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -22,6 +22,7 @@ import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json.{JsObject, Json} import telemetry.SlackNotificationService import com.scalableminds.util.requestparsing.ObjectId +import play.api.http.Status.FORBIDDEN import scala.concurrent.ExecutionContext @@ -74,7 +75,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, for { taskTypeIdValidated <- ObjectId.fromString(taskParameters.taskTypeId) ?~> "taskType.id.invalid" taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" - dataset <- datasetDAO.findOneByPathAndOrganization(taskParameters.dataSet, organizationId) + dataset <- datasetDAO.findOne(taskParameters.datasetId) baseAnnotationIdValidated <- ObjectId.fromString(baseAnnotation.baseId) annotation <- resolveBaseAnnotationId(baseAnnotationIdValidated) tracingStoreClient <- tracingStoreService.clientFor(dataset) @@ -118,9 +119,10 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, }).flatten // Used in create (without files) in case of base annotation - private def duplicateOrCreateSkeletonBase(baseAnnotation: Annotation, - params: TaskParameters, - tracingStoreClient: WKRemoteTracingStoreClient): Fox[String] = + private def duplicateOrCreateSkeletonBase( + baseAnnotation: Annotation, + params: TaskParameters, + tracingStoreClient: WKRemoteTracingStoreClient)(implicit ctx: DBAccessContext): Fox[String] = for { baseSkeletonTracingIdOpt <- baseAnnotation.skeletonTracingId newTracingId <- baseSkeletonTracingIdOpt @@ -131,13 +133,14 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, editRotation = Some(params.editRotation), boundingBox = params.boundingBox)) .getOrElse( - tracingStoreClient.saveSkeletonTracing( - annotationService.createSkeletonTracingBase( - params.dataSet, + annotationService + .createSkeletonTracingBase( + params.datasetId, params.boundingBox, params.editPosition, params.editRotation - ))) + ) + .flatMap(tracingStoreClient.saveSkeletonTracing)) } yield newTracingId // Used in create (without files) in case of base annotation @@ -172,15 +175,17 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, for { taskTypeIdValidated <- ObjectId.fromString(params.taskTypeId) ?~> "taskType.id.invalid" taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" - skeletonTracingOpt = if ((taskType.tracingType == TracingType.skeleton || taskType.tracingType == TracingType.hybrid) && params.baseAnnotation.isEmpty) { - Some( - annotationService.createSkeletonTracingBase( - params.dataSet, + skeletonTracingOpt <- if ((taskType.tracingType == TracingType.skeleton || taskType.tracingType == TracingType.hybrid) && params.baseAnnotation.isEmpty) { + + annotationService + .createSkeletonTracingBase( + params.datasetId, params.boundingBox, params.editPosition, params.editRotation - )) - } else None + ) + .map(Some(_)) + } else Fox.successful(None) } yield skeletonTracingOpt } @@ -317,9 +322,26 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, case (paramBox, skeleton, volume) => paramBox match { case Full(params) => + for { + skeletonFox <- if(skeleton.isDefined) Fox.successful(skeleton) else annotationService.createSkeletonTracingBase(params.datasetId, + params.boundingBox, + params.editPosition, + params.editRotation) + volumeFox <- if(volume.isDefined) Fox.successful(volume) else annotationService + .createVolumeTracingBase( + params.datasetId.toString, + organizationId, + params.boundingBox, + params.editPosition, + params.editRotation, + volumeShowFallbackLayer = false, + resolutionRestrictions = taskType.settings.resolutionRestrictions + ) + .map(v => (v, None)) + } yield (Full(skeletonFox), Full(volumeFox)) val skeletonBox = Full( skeleton.openOr( - annotationService.createSkeletonTracingBase(params.dataSet, + annotationService.createSkeletonTracingBase(params.datasetId, params.boundingBox, params.editPosition, params.editRotation))) @@ -328,7 +350,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, .openOr( annotationService .createVolumeTracingBase( - params.dataSet, + params.datasetId.toString, organizationId, params.boundingBox, params.editPosition, @@ -386,11 +408,12 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, } else { for { _ <- assertEachHasEitherSkeletonOrVolume(fullTasks) ?~> "task.create.needsEitherSkeletonOrVolume" - firstDatasetName <- fullTasks.headOption.map(_._1.dataSet).toFox - _ <- assertAllOnSameDataset(fullTasks, firstDatasetName) - dataset <- datasetDAO.findOneByPathAndOrganization(firstDatasetName, requestingUser._organization) ?~> Messages( + firstDatasetId <- fullTasks.headOption.map(_._1.datasetId).toFox // TODOM continue + _ <- assertAllOnSameDataset(fullTasks, firstDatasetId) + dataset <- datasetDAO.findOneByPathAndOrganization(firstDatasetId, requestingUser._organization) ?~> Messages( "dataset.notFound", - firstDatasetName) + firstDatasetId) + _ <- bool2Fox(dataset._organization == requestingUser._organization) ?~> FORBIDDEN _ = if (fullTasks.exists(task => task._1.baseAnnotation.isDefined)) slackNotificationService.noticeBaseAnnotationTaskCreation(fullTasks.map(_._1.taskTypeId).distinct, fullTasks.count(_._1.baseAnnotation.isDefined)) diff --git a/frontend/javascripts/admin/task/task_create_bulk_view.tsx b/frontend/javascripts/admin/task/task_create_bulk_view.tsx index da323050da6..fa81497aa7b 100644 --- a/frontend/javascripts/admin/task/task_create_bulk_view.tsx +++ b/frontend/javascripts/admin/task/task_create_bulk_view.tsx @@ -16,7 +16,8 @@ const { TextArea } = Input; export const NUM_TASKS_PER_BATCH = 100; export type NewTask = { readonly boundingBox: BoundingBoxObject | null | undefined; - readonly dataSet: string; + readonly datasetId: string; + readonly datasetName: string; readonly editPosition: Vector3; readonly editRotation: Vector3; readonly neededExperience: { diff --git a/frontend/javascripts/admin/task/task_create_form_view.tsx b/frontend/javascripts/admin/task/task_create_form_view.tsx index 075086defad..64ecebbeb77 100644 --- a/frontend/javascripts/admin/task/task_create_form_view.tsx +++ b/frontend/javascripts/admin/task/task_create_form_view.tsx @@ -76,7 +76,7 @@ export function taskToShortText(task: APITask) { export function taskToText(task: APITask) { const { id, - dataSet, + datasetId: dataSet, type, neededExperience, editPosition, @@ -304,7 +304,8 @@ type FormValues = { editPosition: Vector3; editRotation: Vector3; nmlFiles: UploadFile[]; - dataSet: string; + datasetId: string; + datasetName: string; projectName: string; neededExperience: NewTask["neededExperience"]; }; @@ -491,9 +492,10 @@ function TaskCreateFormView({ taskId, history }: Props) { }), )); - if (annotationResponse?.dataSetName != null) { + if (annotationResponse?.datasetName != null) { form.setFieldsValue({ - dataSet: annotationResponse.dataSetName, + datasetName: annotationResponse.datasetName, + datasetId: annotationResponse.datasetId, }); return Promise.resolve(); } @@ -505,7 +507,7 @@ function TaskCreateFormView({ taskId, history }: Props) { ); if ( - taskResponse?.dataSet != null && + taskResponse?.datasetId != null && _.isEqual(taskResponse.status, { pending: 0, active: 0, @@ -513,13 +515,15 @@ function TaskCreateFormView({ taskId, history }: Props) { }) ) { form.setFieldsValue({ - dataSet: taskResponse.dataSet, + datasetName: taskResponse.datasetName, + datasetId: taskResponse.datasetId, }); return Promise.resolve(); } form.setFieldsValue({ - dataSet: undefined, + datasetName: undefined, + datasetId: undefined, }); return Promise.reject(new Error("Invalid base annotation id.")); }, diff --git a/frontend/javascripts/admin/task/task_list_view.tsx b/frontend/javascripts/admin/task/task_list_view.tsx index c110b6c72cc..637763933c3 100644 --- a/frontend/javascripts/admin/task/task_list_view.tsx +++ b/frontend/javascripts/admin/task/task_list_view.tsx @@ -158,7 +158,7 @@ function TaskListView({ initialFieldValues }: Props) { "team", "projectName", "id", - "dataSet", + "datasetName", "created", "type", (task) => task.neededExperience.domain, @@ -267,9 +267,9 @@ function TaskListView({ initialFieldValues }: Props) { }, { title: "Dataset", - dataIndex: "dataSet", - key: "dataSet", - sorter: Utils.localeCompareBy((task) => task.dataSet), + dataIndex: "datasetName", + key: "datasetName", + sorter: Utils.localeCompareBy((task) => task.datasetName), }, { title: "Stats", diff --git a/frontend/javascripts/dashboard/explorative_annotations_view.tsx b/frontend/javascripts/dashboard/explorative_annotations_view.tsx index 6710eba5b37..a4a875e90d3 100644 --- a/frontend/javascripts/dashboard/explorative_annotations_view.tsx +++ b/frontend/javascripts/dashboard/explorative_annotations_view.tsx @@ -731,7 +731,7 @@ class ExplorativeAnnotationsView extends React.PureComponent { onClose={_.partial(this.editTagFromAnnotation, annotation, false, tag)} tag={tag} closable={ - !(tag === annotation.dataSetName || AnnotationContentTypes.includes(tag)) && + !(tag === annotation.datasetName || AnnotationContentTypes.includes(tag)) && !this.state.shouldShowArchivedTracings } /> diff --git a/frontend/javascripts/oxalis/api/api_latest.ts b/frontend/javascripts/oxalis/api/api_latest.ts index 4899b9dc750..5ae4726e4fc 100644 --- a/frontend/javascripts/oxalis/api/api_latest.ts +++ b/frontend/javascripts/oxalis/api/api_latest.ts @@ -1072,7 +1072,7 @@ class TracingApi { try { const annotation = await requestTask(); - const isDifferentDataset = state.dataset.name !== annotation.dataSetName; + const isDifferentDataset = state.dataset.name !== annotation.datasetName; const isDifferentTaskType = annotation.task.type.id !== task.type.id; const involvesVolumeTask = state.tracing.volumes.length > 0 || getVolumeDescriptors(annotation).length > 0; diff --git a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx index 9f3f5f52d9d..63448e937e4 100644 --- a/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/merge_modal_view.tsx @@ -151,7 +151,7 @@ class _MergeModalView extends PureComponent { }; async mergeAnnotationIntoActiveTracing(annotation: APIAnnotation): Promise { - if (annotation.dataSetName !== Store.getState().dataset.name) { + if (annotation.datasetName !== Store.getState().dataset.name) { Toast.error(messages["merge.different_dataset"]); return; } diff --git a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts index e94e30eba78..8fda5d985bc 100644 --- a/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/skeletontracing_server_objects.ts @@ -182,7 +182,7 @@ export const annotation: APIAnnotation = { stats: {}, }, ], - dataSetName: "ROI2017_wkw", + datasetName: "ROI2017_wkw", organization: "Connectomics Department", dataStore: { name: "localhost", diff --git a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts index da08d1ce260..410332b956b 100644 --- a/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/tasktracing_server_objects.ts @@ -92,7 +92,7 @@ export const annotation: APIAnnotation = { recommendedConfiguration: null, tracingType: "skeleton", }, - dataSet: "ROI2017_wkw", + datasetId: "66f3c82966010034942e9740", neededExperience: { domain: "oxalis", value: 1, @@ -124,7 +124,7 @@ export const annotation: APIAnnotation = { stats: {}, }, ], - dataSetName: "ROI2017_wkw", + datasetName: "ROI2017_wkw", organization: "Connectomics Department", dataStore: { name: "localhost", diff --git a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts index d5361eb30c3..886879d6916 100644 --- a/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts +++ b/frontend/javascripts/test/fixtures/volumetracing_server_objects.ts @@ -91,7 +91,7 @@ export const annotation: APIAnnotation = { stats: {}, }, ], - dataSetName: "ROI2017_wkw", + datasetName: "ROI2017_wkw", organization: "Connectomics Department", dataStore: { name: "localhost", diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index ff4516e01a5..d930f18cbae 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -448,7 +448,8 @@ export type APITask = { readonly boundingBoxVec6?: Vector6; readonly created: number; readonly creationInfo: string | null | undefined; - readonly dataSet: string; + readonly datasetId: APIDataset["id"]; + readonly datasetName: string; readonly editPosition: Vector3; readonly editRotation: Vector3; readonly id: string; @@ -477,7 +478,7 @@ export type EditableLayerProperties = Partial<{ export type APIAnnotationInfo = { readonly annotationLayers: Array; readonly datasetId: APIDataset["id"]; - readonly dataSetName: string; + readonly datasetName: string; readonly organization: string; readonly description: string; readonly modified: number; @@ -499,7 +500,7 @@ export type APIAnnotationInfo = { export function annotationToCompact(annotation: APIAnnotation): APIAnnotationInfo { const { - dataSetName, + datasetName, description, modified, id, @@ -519,7 +520,7 @@ export function annotationToCompact(annotation: APIAnnotation): APIAnnotationInf return { datasetId, annotationLayers, - dataSetName, + datasetName, organization, description, modified, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SkeletonElementDefaults.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SkeletonElementDefaults.scala index 2f6602d498b..57a56ddd72d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SkeletonElementDefaults.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SkeletonElementDefaults.scala @@ -1,10 +1,12 @@ package com.scalableminds.webknossos.datastore.helpers -import com.scalableminds.util.geometry.{Vec3Int, Vec3Double} +import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.webknossos.datastore.SkeletonTracing.{Node, SkeletonTracing} object SkeletonTracingDefaults extends ProtoGeometryImplicits { private val datasetName = "" + private val datasetId = "" private val trees = Seq() private def createdTimestamp = System.currentTimeMillis() private val boundingBox = None @@ -16,7 +18,8 @@ object SkeletonTracingDefaults extends ProtoGeometryImplicits { private val userBoundingBox = None def createInstance: SkeletonTracing = - SkeletonTracing(datasetName, + SkeletonTracing(datasetId, + datasetName, trees, createdTimestamp, boundingBox, diff --git a/webknossos-datastore/proto/SkeletonTracing.proto b/webknossos-datastore/proto/SkeletonTracing.proto index 7f9ebc91e38..7ea08f8a6b4 100644 --- a/webknossos-datastore/proto/SkeletonTracing.proto +++ b/webknossos-datastore/proto/SkeletonTracing.proto @@ -62,19 +62,20 @@ message TreeGroup { } message SkeletonTracing { - required string datasetName = 1; // used when parsing and handling nmls, not used in tracing store anymore, do not rely on correct values - repeated Tree trees = 2; - required int64 createdTimestamp = 3; - optional BoundingBoxProto boundingBox = 4; - optional int32 activeNodeId = 5; - required Vec3IntProto editPosition = 6; - required Vec3DoubleProto editRotation = 7; - required double zoomLevel = 8; - required int64 version = 9; - optional BoundingBoxProto userBoundingBox = 10; - repeated TreeGroup treeGroups = 11; - repeated NamedBoundingBoxProto userBoundingBoxes = 12; - optional string organizationId = 13; // used when parsing and handling nmls, not used in tracing store anymore, do not rely on correct values + required string datasetId = 1; + required string datasetName = 2; // used when parsing and handling nmls, not used in tracing store anymore, do not rely on correct values + repeated Tree trees = 3; + required int64 createdTimestamp = 4; + optional BoundingBoxProto boundingBox = 5; + optional int32 activeNodeId = 6; + required Vec3IntProto editPosition = 7; + required Vec3DoubleProto editRotation = 8; + required double zoomLevel = 9; + required int64 version = 10; + optional BoundingBoxProto userBoundingBox = 11; + repeated TreeGroup treeGroups = 12; + repeated NamedBoundingBoxProto userBoundingBoxes = 13; + optional string organizationId = 14; // used when parsing and handling nmls, not used in tracing store anymore, do not rely on correct values repeated AdditionalCoordinateProto editPositionAdditionalCoordinates = 21; repeated AdditionalAxisProto additionalAxes = 22; // Additional axes for which this tracing is defined } From 852df001d506bd18e68c82b31d9987b74dda551c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Thu, 10 Oct 2024 17:32:39 +0200 Subject: [PATCH 032/129] WIP: adapt annotation upload & task upload to use datasetId --- app/controllers/AnnotationIOController.scala | 143 ++++++------- app/controllers/TaskController.scala | 5 +- .../WKRemoteWorkerController.scala | 2 +- app/models/annotation/AnnotationService.scala | 14 +- .../annotation/AnnotationUploadService.scala | 197 ++++++++++-------- app/models/annotation/nml/NmlParser.scala | 88 ++++---- app/models/annotation/nml/NmlResults.scala | 11 +- app/models/dataset/Dataset.scala | 10 + app/models/organization/Organization.scala | 11 +- app/models/task/TaskCreationParameters.scala | 2 +- app/models/task/TaskCreationService.scala | 19 +- .../admin/task/task_create_form_view.tsx | 7 +- .../helpers/SkeletonElementDefaults.scala | 5 +- .../services/uploading/UploadService.scala | 2 +- .../proto/SkeletonTracing.proto | 27 ++- 15 files changed, 299 insertions(+), 244 deletions(-) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index ba0c1e60086..2a5dd5210d1 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -53,7 +53,7 @@ import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, MultipartFormData} import security.WkEnv -import utils.{WkConf} +import utils.WkConf import scala.concurrent.{ExecutionContext, Future} @@ -102,58 +102,52 @@ class AnnotationIOController @Inject()( log() { val shouldCreateGroupForEachFile: Boolean = request.body.dataParts("createGroupForEachFile").headOption.contains("true") - val overwritingDatasetName: Option[String] = - request.body.dataParts.get("datasetName").flatMap(_.headOption) + val overwritingDatasetId: Option[String] = + request.body.dataParts.get("datasetId").flatMap(_.headOption) val overwritingOrganizationId: Option[String] = request.body.dataParts.get("organizationId").flatMap(_.headOption) val attachedFiles = request.body.files.map(f => (f.ref.path.toFile, f.filename)) - val parsedFiles = - annotationUploadService.extractFromFiles(attachedFiles, - useZipName = true, - overwritingDatasetName, - overwritingOrganizationId) - val parsedFilesWrapped = - annotationUploadService.wrapOrPrefixGroups(parsedFiles.parseResults, shouldCreateGroupForEachFile) - val parseResultsFiltered: List[NmlParseResult] = parsedFilesWrapped.filter(_.succeeded) - - if (parseResultsFiltered.isEmpty) { - returnError(parsedFiles) - } else { - for { - parseSuccesses <- Fox.serialCombined(parseResultsFiltered)(r => r.toSuccessBox) - name = nameForUploaded(parseResultsFiltered.map(_.fileName)) - description = descriptionForNMLs(parseResultsFiltered.map(_.description)) - wkUrl = wkUrlsForNMLs(parseResultsFiltered.map(_.wkUrl)) - _ <- assertNonEmpty(parseSuccesses) - skeletonTracings = parseSuccesses.flatMap(_.skeletonTracing) - // Create a list of volume layers for each uploaded (non-skeleton-only) annotation. - // This is what determines the merging strategy for volume layers - volumeLayersGroupedRaw = parseSuccesses.map(_.volumeLayers).filter(_.nonEmpty) - dataset <- findDatasetForUploadedAnnotations(skeletonTracings, - volumeLayersGroupedRaw.flatten.map(_.tracing), - wkUrl) - dataSource <- datasetService.dataSourceFor(dataset) ?~> Messages("dataset.notImported", dataset.name) - usableDataSource <- dataSource.toUsable.toFox ?~> Messages("dataset.notImported", dataset.name) - volumeLayersGrouped <- adaptVolumeTracingsToFallbackLayer(volumeLayersGroupedRaw, dataset, usableDataSource) - tracingStoreClient <- tracingStoreService.clientFor(dataset) - mergedVolumeLayers <- mergeAndSaveVolumeLayers(volumeLayersGrouped, - tracingStoreClient, - parsedFiles.otherFiles, - usableDataSource) - mergedSkeletonLayers <- mergeAndSaveSkeletonLayers(skeletonTracings, tracingStoreClient) - annotation <- annotationService.createFrom(request.identity, - dataset, - mergedSkeletonLayers ::: mergedVolumeLayers, - AnnotationType.Explorational, - name, - description) - _ = analyticsService.track(UploadAnnotationEvent(request.identity, annotation)) - } yield - JsonOk( - Json.obj("annotation" -> Json.obj("typ" -> annotation.typ, "id" -> annotation.id)), - Messages("nml.file.uploadSuccess") - ) - } + for { + parsedFiles <- annotationUploadService.extractFromFiles( + attachedFiles, + SharedParsingParameters(useZipName = true, overwritingDatasetId, overwritingOrganizationId)) + parsedFilesWrapped = annotationUploadService.wrapOrPrefixGroups(parsedFiles.parseResults, + shouldCreateGroupForEachFile) + parseResultsFiltered: List[NmlParseResult] = parsedFilesWrapped.filter(_.succeeded) + // _ <- bool2Fox(parseResultsFiltered.isEmpty) ?~> returnError(parsedFiles)) TODOM: Find a proper way to return an error when parseResultsFiltered.isEmpty + parseSuccesses <- Fox.serialCombined(parseResultsFiltered)(r => r.toSuccessBox) + name = nameForUploaded(parseResultsFiltered.map(_.fileName)) + description = descriptionForNMLs(parseResultsFiltered.map(_.description)) + wkUrl = wkUrlsForNMLs(parseResultsFiltered.map(_.wkUrl)) + _ <- assertNonEmpty(parseSuccesses) + skeletonTracingsWithDatasetId = parseSuccesses.flatMap(_.skeletonTracingOpt) + // Create a list of volume layers for each uploaded (non-skeleton-only) annotation. + // This is what determines the merging strategy for volume layers + volumeLayersGroupedRaw = parseSuccesses.map(_.volumeLayers).filter(_.nonEmpty) + dataset <- findDatasetForUploadedAnnotations(skeletonTracingsWithDatasetId, + volumeLayersGroupedRaw.flatten, + wkUrl) + dataSource <- datasetService.dataSourceFor(dataset) ?~> Messages("dataset.notImported", dataset.name) + usableDataSource <- dataSource.toUsable.toFox ?~> Messages("dataset.notImported", dataset.name) + volumeLayersGrouped <- adaptVolumeTracingsToFallbackLayer(volumeLayersGroupedRaw, dataset, usableDataSource) + tracingStoreClient <- tracingStoreService.clientFor(dataset) + mergedVolumeLayers <- mergeAndSaveVolumeLayers(volumeLayersGrouped, + tracingStoreClient, + parsedFiles.otherFiles, + usableDataSource) + mergedSkeletonLayers <- mergeAndSaveSkeletonLayers(skeletonTracingsWithDatasetId, tracingStoreClient) + annotation <- annotationService.createFrom(request.identity, + dataset, + mergedSkeletonLayers ::: mergedVolumeLayers, + AnnotationType.Explorational, + name, + description) + _ = analyticsService.track(UploadAnnotationEvent(request.identity, annotation)) + } yield + JsonOk( + Json.obj("annotation" -> Json.obj("typ" -> annotation.typ, "id" -> annotation.id)), + Messages("nml.file.uploadSuccess") + ) } } @@ -200,14 +194,14 @@ class AnnotationIOController @Inject()( )) } - private def mergeAndSaveSkeletonLayers(skeletonTracings: List[SkeletonTracing], + private def mergeAndSaveSkeletonLayers(skeletonTracings: List[SkeletonTracingWithDatasetId], tracingStoreClient: WKRemoteTracingStoreClient): Fox[List[AnnotationLayer]] = if (skeletonTracings.isEmpty) Fox.successful(List()) else { for { mergedTracingId <- tracingStoreClient.mergeSkeletonTracingsByContents( - SkeletonTracings(skeletonTracings.map(t => SkeletonTracingOpt(Some(t)))), + SkeletonTracings(skeletonTracings.map(t => SkeletonTracingOpt(Some(t.skeletonTracing)))), persistTracing = true) } yield List( @@ -218,14 +212,14 @@ class AnnotationIOController @Inject()( } private def assertNonEmpty(parseSuccesses: List[NmlParseSuccess]) = - bool2Fox(parseSuccesses.exists(p => p.skeletonTracing.nonEmpty || p.volumeLayers.nonEmpty)) ?~> "nml.file.noFile" + bool2Fox(parseSuccesses.exists(p => p.skeletonTracingOpt.nonEmpty || p.volumeLayers.nonEmpty)) ?~> "nml.file.noFile" private def findDatasetForUploadedAnnotations( - skeletonTracings: List[SkeletonTracing], - volumeTracings: List[VolumeTracing], + skeletonTracings: List[SkeletonTracingWithDatasetId], + volumeTracings: List[UploadedVolumeLayer], wkUrl: String)(implicit mp: MessagesProvider, ctx: DBAccessContext): Fox[Dataset] = for { - datasetName <- assertAllOnSameDataset(skeletonTracings, volumeTracings) ?~> "nml.file.differentDatasets" + datasetId <- assertAllOnSameDataset(skeletonTracings, volumeTracings) ?~> "nml.file.differentDatasets" organizationIdOpt <- assertAllOnSameOrganization(skeletonTracings, volumeTracings) ?~> "nml.file.differentDatasets" organizationIdOpt <- Fox.runOptional(organizationIdOpt) { organizationDAO.findOne(_)(GlobalAccessContext).map(_._id) @@ -234,20 +228,17 @@ class AnnotationIOController @Inject()( } else { Messages("organization.notFound", organizationIdOpt.getOrElse("")) }) ~> NOT_FOUND organizationId <- Fox.fillOption(organizationIdOpt) { - datasetDAO.getOrganizationIdForDataset(datasetName)(GlobalAccessContext) - } ?~> Messages("dataset.noAccess", datasetName) ~> FORBIDDEN - // TODO: Check whether thats ok, the datasets are addressed via the name by the annotations. This looks broken to me. - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) ?~> (if (wkUrl.nonEmpty && conf.Http.uri != wkUrl) { - Messages( - "dataset.noAccess.wrongHost", - datasetName, - wkUrl, - conf.Http.uri) - } else { - Messages( - "dataset.noAccess", - datasetName) - }) ~> FORBIDDEN + organizationDAO.findOrganizationForDataset(datasetId)(GlobalAccessContext) + } ?~> Messages("dataset.noAccess", datasetId) ~> FORBIDDEN + dataset <- datasetDAO.findOne(datasetId) ?~> (if (wkUrl.nonEmpty && conf.Http.uri != wkUrl) { + Messages("dataset.noAccess.wrongHost", + datasetId, + wkUrl, + conf.Http.uri) + } else { + Messages("dataset.noAccess", datasetId) + }) ~> FORBIDDEN + _ <- bool2Fox(organizationId == dataset._organization) ?~> Messages("dataset.noAccess", datasetId) ~> FORBIDDEN } yield dataset private def nameForUploaded(fileNames: Seq[String]) = @@ -274,13 +265,15 @@ class AnnotationIOController @Inject()( Future.successful(JsonBadRequest(Messages("nml.file.noFile"))) } - private def assertAllOnSameDataset(skeletons: List[SkeletonTracing], volumes: List[VolumeTracing]): Fox[String] = - SequenceUtils.findUniqueElement(volumes.map(_.datasetName) ++ skeletons.map(_.datasetName)).toFox + private def assertAllOnSameDataset(skeletons: List[SkeletonTracingWithDatasetId], + volumes: List[UploadedVolumeLayer]): Fox[ObjectId] = + SequenceUtils.findUniqueElement(volumes.map(_.datasetId) ++ skeletons.map(_.datasetId)).toFox - private def assertAllOnSameOrganization(skeletons: List[SkeletonTracing], - volumes: List[VolumeTracing]): Fox[Option[String]] = { + private def assertAllOnSameOrganization(skeletons: List[SkeletonTracingWithDatasetId], + volumes: List[UploadedVolumeLayer]): Fox[Option[String]] = { // Note that organizationIds are optional. Tracings with no organization attribute are ignored here - val organizationIds = skeletons.flatMap(_.organizationId) ::: volumes.flatMap(_.organizationId) + val organizationIds = skeletons.flatMap(_.skeletonTracing.organizationId) ::: volumes.flatMap( + _.tracing.organizationId) for { _ <- Fox.runOptional(organizationIds.headOption)(name => bool2Fox(organizationIds.forall(_ == name))) } yield organizationIds.headOption diff --git a/app/controllers/TaskController.scala b/app/controllers/TaskController.scala index bcfe1323e4a..f13e45a7fe4 100755 --- a/app/controllers/TaskController.scala +++ b/app/controllers/TaskController.scala @@ -86,9 +86,8 @@ class TaskController @Inject()(taskCreationService: TaskCreationService, project <- projectDAO .findOneByNameAndOrganization(params.projectName, request.identity._organization) ?~> "project.notFound" ~> NOT_FOUND _ <- Fox.assertTrue(userService.isTeamManagerOrAdminOf(request.identity, project._team)) - extractedFiles = nmlService.extractFromFiles(inputFiles.map(f => (f.ref.path.toFile, f.filename)), - useZipName = false, - isTaskUpload = true) + extractedFiles <- nmlService.extractFromFiles(inputFiles.map(f => (f.ref.path.toFile, f.filename)), + SharedParsingParameters(useZipName = false, isTaskUpload = true)) extractedTracingBoxesRaw: List[TracingBoxContainer] = extractedFiles.toBoxes extractedTracingBoxes: List[TracingBoxContainer] <- taskCreationService .addVolumeFallbackBoundingBoxes(extractedTracingBoxesRaw, request.identity._organization) diff --git a/app/controllers/WKRemoteWorkerController.scala b/app/controllers/WKRemoteWorkerController.scala index fb479b3477f..2cf0d59367f 100644 --- a/app/controllers/WKRemoteWorkerController.scala +++ b/app/controllers/WKRemoteWorkerController.scala @@ -111,7 +111,7 @@ class WKRemoteWorkerController @Inject()(jobDAO: JobDAO, _ <- workerDAO.findOneByKey(key) ?~> "jobs.worker.notFound" jobIdParsed <- ObjectId.fromString(id) organizationId <- jobDAO.organizationIdForJobId(jobIdParsed) ?~> "job.notFound" - dataset <- datasetDAO.findOneByNameAndOrganization(request.body, organizationId) + dataset <- datasetDAO.findOneByNameAndOrganization(request.body, organizationId) // TODOM: findOneByPathAndOrganization might be the correct method aiInference <- aiInferenceDAO.findOneByJobId(jobIdParsed) ?~> "aiInference.notFound" _ <- aiInferenceDAO.updateDataset(aiInference._id, dataset._id) } yield Ok diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index fe2af56b5dd..50c83e5d27d 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -525,10 +525,12 @@ class AnnotationService @Inject()( } yield result } - def createSkeletonTracingBase(datasetId: ObjectId, + def createSkeletonTracingBase(datasetIdOpt: Option[ObjectId], + datasetName: String, + organizationId: String, boundingBox: Option[BoundingBox], startPosition: Vec3Int, - startRotation: Vec3Double)(implicit ctx: DBAccessContext): Fox[SkeletonTracing] = { + startRotation: Vec3Double)(implicit ctx: DBAccessContext, m: MessagesProvider) : Fox[SkeletonTracing] = { val initialNode = NodeDefaults.createInstance.withId(1).withPosition(startPosition).withRotation(startRotation) val initialTree = Tree( 1, @@ -541,10 +543,9 @@ class AnnotationService @Inject()( System.currentTimeMillis() ) for { - dataset <- datasetDAO.findOne(datasetId) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetIdOpt, datasetName, organizationId) } yield SkeletonTracingDefaults.createInstance.copy( datasetName = dataset.name, - datasetId = dataset._id.toString, boundingBox = boundingBox.flatMap { box => if (box.isEmpty) None else Some(box) }, @@ -555,7 +556,8 @@ class AnnotationService @Inject()( ) } - def createVolumeTracingBase(datasetName: String, + def createVolumeTracingBase(datasetIdOpt: Option[ObjectId], + datasetName: String, organizationId: String, boundingBox: Option[BoundingBox], startPosition: Vec3Int, @@ -565,7 +567,7 @@ class AnnotationService @Inject()( m: MessagesProvider): Fox[VolumeTracing] = for { organization <- organizationDAO.findOne(organizationId) - dataset <- datasetDAO.findOneByPathAndOrganization(datasetName, organizationId) ?~> Messages("dataset.notFound", + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetIdOpt,datasetName, organizationId) ?~> Messages("dataset.notFound", datasetName) dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) diff --git a/app/models/annotation/AnnotationUploadService.scala b/app/models/annotation/AnnotationUploadService.scala index bceaee9d59d..a9a133f423e 100644 --- a/app/models/annotation/AnnotationUploadService.scala +++ b/app/models/annotation/AnnotationUploadService.scala @@ -1,8 +1,12 @@ package models.annotation +import com.scalableminds.util.accesscontext.DBAccessContext + import java.io.{File, FileInputStream, InputStream} import java.nio.file.{Files, Path, StandardCopyOption} import com.scalableminds.util.io.ZipIO +import com.scalableminds.util.requestparsing.ObjectId +import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, TreeGroup} import com.scalableminds.webknossos.datastore.VolumeTracing.{SegmentGroup, VolumeTracing} import com.typesafe.scalalogging.LazyLogging @@ -15,19 +19,30 @@ import net.liftweb.common.{Box, Empty, Failure, Full} import net.liftweb.common.Box.tryo import play.api.i18n.MessagesProvider -case class UploadedVolumeLayer(tracing: VolumeTracing, dataZipLocation: String, name: Option[String]) { +import scala.concurrent.ExecutionContext + +case class UploadedVolumeLayer(tracing: VolumeTracing, + datasetId: ObjectId, + dataZipLocation: String, + name: Option[String]) { def getDataZipFrom(otherFiles: Map[String, File]): Option[File] = otherFiles.get(dataZipLocation) } -class AnnotationUploadService @Inject()(tempFileService: TempFileService) extends LazyLogging { +case class SkeletonTracingWithDatasetId(skeletonTracing: SkeletonTracing, datasetId: ObjectId) + +case class SharedParsingParameters(useZipName: Boolean, + overwritingDatasetId: Option[String] = None, + overwritingOrganizationId: Option[String] = None, + isTaskUpload: Boolean = false) - private def extractFromNmlFile(file: File, - name: String, - overwritingDatasetName: Option[String], - overwritingOrganizationId: Option[String], - isTaskUpload: Boolean)(implicit m: MessagesProvider): NmlParseResult = - extractFromNml(new FileInputStream(file), name, overwritingDatasetName, overwritingOrganizationId, isTaskUpload) +class AnnotationUploadService @Inject()(tempFileService: TempFileService, nmlParser: NmlParser) extends LazyLogging { + + private def extractFromNmlFile(file: File, name: String, sharedParsingParameters: SharedParsingParameters)( + implicit m: MessagesProvider, + ec: ExecutionContext, + ctx: DBAccessContext): Fox[NmlParseResult] = + extractFromNml(new FileInputStream(file), name, sharedParsingParameters) private def formatChain(chain: Box[Failure]): String = chain match { case Full(failure) => @@ -37,44 +52,46 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend private def extractFromNml(inputStream: InputStream, name: String, - overwritingDatasetName: Option[String], - overwritingOrganizationId: Option[String], - isTaskUpload: Boolean, - basePath: Option[String] = None)(implicit m: MessagesProvider): NmlParseResult = - NmlParser.parse(name, inputStream, overwritingDatasetName, overwritingOrganizationId, isTaskUpload, basePath) match { - case Full((skeletonTracing, uploadedVolumeLayers, description, wkUrl)) => - NmlParseSuccess(name, skeletonTracing, uploadedVolumeLayers, description, wkUrl) + sharedParsingParameters: SharedParsingParameters, + basePath: Option[String] = None)(implicit m: MessagesProvider, + ec: ExecutionContext, + ctx: DBAccessContext): Fox[NmlParseResult] = { + val parserOutput = + nmlParser.parse( + name, + inputStream, + sharedParsingParameters.overwritingDatasetId, + sharedParsingParameters.overwritingOrganizationId, + sharedParsingParameters.isTaskUpload, + basePath + ) + parserOutput.futureBox.map { + case Full((skeletonTracingOpt, uploadedVolumeLayers, description, wkUrl)) => + NmlParseSuccess(name, skeletonTracingOpt, uploadedVolumeLayers, description, wkUrl) case Failure(msg, _, chain) => NmlParseFailure(name, msg + chain.map(_ => formatChain(chain)).getOrElse("")) case Empty => NmlParseEmpty(name) } + } - private def extractFromZip(file: File, - zipFileName: Option[String], - useZipName: Boolean, - overwritingDatasetName: Option[String], - overwritingOrganizationId: Option[String], - isTaskUpload: Boolean)(implicit m: MessagesProvider): MultiNmlParseResult = { + private def extractFromZip(file: File, zipFileName: Option[String], sharedParsingParameters: SharedParsingParameters, + )(implicit m: MessagesProvider, ec: ExecutionContext, ctx: DBAccessContext): Fox[MultiNmlParseResult] = { val name = zipFileName getOrElse file.getName var otherFiles = Map.empty[String, File] - var parseResults = List.empty[NmlParseResult] + var pendingResults = List.empty[Fox[NmlParseResult]] ZipIO.withUnziped(file) { (filename, inputStream) => if (filename.toString.endsWith(".nml")) { - val result = - extractFromNml(inputStream, - filename.toString, - overwritingDatasetName, - overwritingOrganizationId, - isTaskUpload, - Some(file.getPath)) - parseResults ::= (if (useZipName) result.withName(name) else result) + val parsedResult = for { + result <- extractFromNml(inputStream, filename.toString, sharedParsingParameters, Some(file.getPath)) + } yield if (sharedParsingParameters.useZipName) result.withName(name) else result + pendingResults ::= parsedResult } else { val tempFile: Path = tempFileService.create(file.getPath.replaceAll("/", "_") + filename.toString) Files.copy(inputStream, tempFile, StandardCopyOption.REPLACE_EXISTING) otherFiles += (file.getPath + filename.toString -> tempFile.toFile) } } - MultiNmlParseResult(parseResults, otherFiles) + Fox.combined(pendingResults).map(parsedResults => MultiNmlParseResult(parsedResults, otherFiles)) } def wrapOrPrefixGroups(parseResults: List[NmlParseResult], @@ -85,17 +102,23 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend addPrefixesToGroupItemNames(parseResults) private def addPrefixesToGroupItemNames(parseResults: List[NmlParseResult]): List[NmlParseResult] = { - def renameTrees(name: String, tracing: SkeletonTracing): SkeletonTracing = { + def renameTrees(name: String, tracingWithId: SkeletonTracingWithDatasetId): SkeletonTracingWithDatasetId = { val prefix = name.replaceAll("\\.[^.]*$", "") + "_" - tracing.copy(trees = tracing.trees.map(tree => tree.copy(name = prefix + tree.name))) + tracingWithId.copy( + skeletonTracing = tracingWithId.skeletonTracing.copy(trees = tracingWithId.skeletonTracing.trees.map(tree => + tree.copy(name = prefix + tree.name)))) } // Segments are not renamed in this case. Segment ids are adjusted in the separate merge step. if (parseResults.length > 1) { parseResults.map { - case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, description, wkUrl) => - NmlParseSuccess(name, Some(renameTrees(name, skeletonTracing)), uploadedVolumeLayers, description, wkUrl) + case NmlParseSuccess(name, Some(skeletonTracingWithDatasetId), uploadedVolumeLayers, description, wkUrl) => + NmlParseSuccess(name, + Some(renameTrees(name, skeletonTracingWithDatasetId)), + uploadedVolumeLayers, + description, + wkUrl) case r => r } } else { @@ -112,11 +135,13 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend if (segmentGroups.isEmpty) 0 else Math.max(segmentGroups.map(_.groupId).max, getMaximumSegmentGroupId(segmentGroups.flatMap(_.children))) - def wrapTreesInGroup(name: String, tracing: SkeletonTracing): SkeletonTracing = { - val unusedGroupId = getMaximumTreeGroupId(tracing.treeGroups) + 1 - val newTrees = tracing.trees.map(tree => tree.copy(groupId = Some(tree.groupId.getOrElse(unusedGroupId)))) - val newTreeGroups = Seq(TreeGroup(name, unusedGroupId, tracing.treeGroups, isExpanded = Some(true))) - tracing.copy(trees = newTrees, treeGroups = newTreeGroups) + def wrapTreesInGroup(name: String, tracing: SkeletonTracingWithDatasetId): SkeletonTracingWithDatasetId = { + val unusedGroupId = getMaximumTreeGroupId(tracing.skeletonTracing.treeGroups) + 1 + val newTrees = + tracing.skeletonTracing.trees.map(tree => tree.copy(groupId = Some(tree.groupId.getOrElse(unusedGroupId)))) + val newTreeGroups = Seq( + TreeGroup(name, unusedGroupId, tracing.skeletonTracing.treeGroups, isExpanded = Some(true))) + tracing.copy(skeletonTracing = tracing.skeletonTracing.copy(trees = newTrees, treeGroups = newTreeGroups)) } def wrapSegmentsInGroup(name: String, tracing: VolumeTracing): VolumeTracing = { @@ -131,9 +156,9 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend volumeLayers.map(v => v.copy(tracing = wrapSegmentsInGroup(name, v.tracing))) parseResults.map { - case NmlParseSuccess(name, Some(skeletonTracing), uploadedVolumeLayers, description, wkUrl) => + case NmlParseSuccess(name, Some(skeletonTracingWithDatasetId), uploadedVolumeLayers, description, wkUrl) => NmlParseSuccess(name, - Some(wrapTreesInGroup(name, skeletonTracing)), + Some(wrapTreesInGroup(name, skeletonTracingWithDatasetId)), wrapVolumeLayers(name, uploadedVolumeLayers), description, wkUrl) @@ -141,58 +166,54 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService) extend } } - def extractFromFiles(files: Seq[(File, String)], - useZipName: Boolean, - overwritingDatasetName: Option[String] = None, - overwritingOrganizationId: Option[String] = None, - isTaskUpload: Boolean = false)(implicit m: MessagesProvider): MultiNmlParseResult = - files.foldLeft(NmlResults.MultiNmlParseResult()) { - case (acc, (file, name)) => - if (name.endsWith(".zip")) - tryo(new java.util.zip.ZipFile(file)).map(ZipIO.forallZipEntries(_)(_.getName.endsWith(".zip"))) match { - case Full(allZips) => - if (allZips) - acc.combineWith( - extractFromFiles( - extractFromZip(file, - Some(name), - useZipName, - overwritingDatasetName, - overwritingOrganizationId, - isTaskUpload).otherFiles.toSeq.map(tuple => (tuple._2, tuple._1)), - useZipName, - overwritingDatasetName, - overwritingOrganizationId, - isTaskUpload - )) - else - acc.combineWith( - extractFromFile(file, - name, - useZipName, - overwritingDatasetName, - overwritingOrganizationId, - isTaskUpload)) - case _ => acc + def extractFromFiles(files: Seq[(File, String)], sharedParams: SharedParsingParameters)( + implicit m: MessagesProvider, + ec: ExecutionContext, + ctx: DBAccessContext): Fox[MultiNmlParseResult] = { + val accResult = NmlResults.MultiNmlParseResult() + Fox + .serialCombined(files.iterator) { + case (file, name) => + if (name.endsWith(".zip")) { + tryo(new java.util.zip.ZipFile(file)).map(ZipIO.forallZipEntries(_)(_.getName.endsWith(".zip"))) match { + case Full(allZips) => + if (allZips) { + for { + parsedZipResult <- extractFromZip(file, Some(name), sharedParams) + otherFiles = parsedZipResult.otherFiles.toSeq.map(tuple => (tuple._2, tuple._1)) + parsedFileResults <- extractFromFiles(otherFiles, sharedParams) + _ = accResult.combineWith(parsedFileResults) + } yield () + } else { + for { + parsedFile <- extractFromFile(file, name, sharedParams) + _ = accResult.combineWith(parsedFile) + } yield () + } + case _ => Fox.successful(()) + } } else - acc.combineWith( - extractFromFile(file, name, useZipName, overwritingDatasetName, overwritingOrganizationId, isTaskUpload)) - } + for { + parsedFromFile <- extractFromFile(file, name, sharedParams) + _ = accResult.combineWith(parsedFromFile) + } yield () + } + .map(_ => accResult) + + } - private def extractFromFile(file: File, - fileName: String, - useZipName: Boolean, - overwritingDatasetName: Option[String], - overwritingOrganizationId: Option[String], - isTaskUpload: Boolean)(implicit m: MessagesProvider): MultiNmlParseResult = + private def extractFromFile(file: File, fileName: String, sharedParsingParameters: SharedParsingParameters)( + implicit m: MessagesProvider, + ec: ExecutionContext, + ctx: DBAccessContext): Fox[MultiNmlParseResult] = if (fileName.endsWith(".zip")) { logger.trace("Extracting from Zip file") - extractFromZip(file, Some(fileName), useZipName, overwritingDatasetName, overwritingOrganizationId, isTaskUpload) + extractFromZip(file, Some(fileName), sharedParsingParameters) } else { logger.trace("Extracting from Nml file") - val parseResult = - extractFromNmlFile(file, fileName, overwritingDatasetName, overwritingOrganizationId, isTaskUpload) - MultiNmlParseResult(List(parseResult), Map.empty) + for { + parseResult <- extractFromNmlFile(file, fileName, sharedParsingParameters) + } yield MultiNmlParseResult(List(parseResult), Map.empty) } } diff --git a/app/models/annotation/nml/NmlParser.scala b/app/models/annotation/nml/NmlParser.scala index b3567fd2079..a08426d46e9 100755 --- a/app/models/annotation/nml/NmlParser.scala +++ b/app/models/annotation/nml/NmlParser.scala @@ -1,7 +1,10 @@ package models.annotation.nml +import com.scalableminds.util.accesscontext.DBAccessContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} +import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.ExtendedTypes.{ExtendedDouble, ExtendedString} +import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.JsonHelper.bool2Box import com.scalableminds.webknossos.datastore.SkeletonTracing._ import com.scalableminds.webknossos.datastore.MetadataEntry.MetadataEntryProto @@ -20,16 +23,19 @@ import com.scalableminds.webknossos.tracingstore.tracings.ColorGenerator import com.scalableminds.webknossos.tracingstore.tracings.skeleton.updating.TreeType import com.scalableminds.webknossos.tracingstore.tracings.skeleton.{MultiComponentTreeSplitter, TreeValidator} import com.typesafe.scalalogging.LazyLogging -import models.annotation.UploadedVolumeLayer +import models.annotation.{SkeletonTracingWithDatasetId, UploadedVolumeLayer} +import models.dataset.DatasetDAO import net.liftweb.common.Box._ import net.liftweb.common.{Box, Empty, Failure, Full} import play.api.i18n.{Messages, MessagesProvider} import java.io.InputStream +import javax.inject.Inject import scala.collection.{immutable, mutable} +import scala.concurrent.ExecutionContext import scala.xml.{Attribute, NodeSeq, XML, Node => XMLNode} -object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGenerator { +class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with ProtoGeometryImplicits with ColorGenerator { private val DEFAULT_TIME = 0L private val DEFAULT_VIEWPORT = 0 @@ -41,12 +47,12 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener def parse(name: String, nmlInputStream: InputStream, - overwritingDatasetName: Option[String], + overwritingDatasetId: Option[String], overwritingOrganizationId: Option[String], isTaskUpload: Boolean, - basePath: Option[String] = None)( - implicit m: MessagesProvider): Box[(Option[SkeletonTracing], List[UploadedVolumeLayer], String, Option[String])] = - try { + basePath: Option[String] = None)(implicit m: MessagesProvider, ec: ExecutionContext, ctx: DBAccessContext) + : Fox[(Option[SkeletonTracingWithDatasetId], List[UploadedVolumeLayer], String, Option[String])] = { + val foxInABox = try { val data = XML.load(nmlInputStream) for { parameters <- (data \ "parameters").headOption ?~ Messages("nml.parameters.notFound") @@ -63,9 +69,8 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener treeGroupsAfterSplit = treesAndGroupsAfterSplitting._2 _ <- TreeValidator.validateTrees(treesSplit, treeGroupsAfterSplit, branchPoints, comments) additionalAxisProtos <- parseAdditionalAxes(parameters \ "additionalAxes") - datasetName = overwritingDatasetName.getOrElse(parseDatasetName(parameters \ "experiment")) - organizationId = if (overwritingDatasetName.isDefined) overwritingOrganizationId - else parseOrganizationId(parameters \ "experiment") + datasetNameFromNml = parseDatasetName(parameters \ "experiment") + organizationId = overwritingOrganizationId.getOrElse(parseOrganizationId(parameters \ "experiment")) } yield { val description = parseDescription(parameters \ "experiment") val wkUrl = parseWkUrl(parameters \ "experiment") @@ -84,14 +89,16 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener logger.debug(s"Parsed NML file. Trees: ${treesSplit.size}, Volumes: ${volumes.size}") - val volumeLayers: List[UploadedVolumeLayer] = - volumes.toList.map { v => + for { + parsedDatasetIdOpt <- Fox.runOptional(overwritingDatasetId)(ObjectId.fromString) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(parsedDatasetIdOpt, datasetNameFromNml, organizationId) + volumeLayers: List[UploadedVolumeLayer] = volumes.toList.map { v => UploadedVolumeLayer( VolumeTracing( activeSegmentId = None, boundingBox = boundingBoxToProto(taskBoundingBox.getOrElse(BoundingBox.empty)), // Note: this property may be adapted later in adaptPropertiesToFallbackLayer createdTimestamp = timestamp, - datasetName = datasetName, + datasetName = dataset.name, editPosition = editPosition, editRotation = editRotation, elementClass = ElementClass.uint32, // Note: this property may be adapted later in adaptPropertiesToFallbackLayer @@ -101,7 +108,7 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener zoomLevel = zoomLevel, userBoundingBox = None, userBoundingBoxes = userBoundingBoxes, - organizationId = organizationId, + organizationId = Some(organizationId), segments = v.segments, mappingName = v.mappingName, mappingIsLocked = v.mappingIsLocked, @@ -110,35 +117,37 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener editPositionAdditionalCoordinates = editPositionAdditionalCoordinates, additionalAxes = additionalAxisProtos ), + dataset._id, basePath.getOrElse("") + v.dataZipPath, v.name, ) } - - val skeletonTracingOpt: Option[SkeletonTracing] = - if (treesSplit.isEmpty && userBoundingBoxes.isEmpty) None + skeletonTracingOpt: Option[SkeletonTracingWithDatasetId] = if (treesSplit.isEmpty && userBoundingBoxes.isEmpty) + None else Some( - SkeletonTracing( - datasetName, - treesSplit, - timestamp, - taskBoundingBox, - activeNodeId, - editPosition, - editRotation, - zoomLevel, - version = 0, - None, - treeGroupsAfterSplit, - userBoundingBoxes, - organizationId, - editPositionAdditionalCoordinates, - additionalAxes = additionalAxisProtos + SkeletonTracingWithDatasetId( + SkeletonTracing( + dataset.name, + treesSplit, + timestamp, + taskBoundingBox, + activeNodeId, + editPosition, + editRotation, + zoomLevel, + version = 0, + None, + treeGroupsAfterSplit, + userBoundingBoxes, + Some(organizationId), + editPositionAdditionalCoordinates, + additionalAxes = additionalAxisProtos + ), + dataset._id ) ) - - (skeletonTracingOpt, volumeLayers, description, wkUrl) + } yield (skeletonTracingOpt, volumeLayers, description, wkUrl) // TODO: to fox and flatten } } catch { case e: org.xml.sax.SAXParseException if e.getMessage.startsWith("Premature end of file") => @@ -153,6 +162,13 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener logger.error(s"Failed to parse NML $name due to " + e) Failure(s"Failed to parse NML '$name': " + e.toString) } + foxInABox match { + case Full(value) => value + case Failure(message, cause, _chain) => + logger.error(s"Failed to parse NML $name due to " + cause) + Failure(s"Failed to parse NML '$name': " + message) + } + } private def extractTreeGroups(treeGroupContainerNodes: NodeSeq)( implicit m: MessagesProvider): Box[List[TreeGroup]] = { @@ -336,8 +352,8 @@ object NmlParser extends LazyLogging with ProtoGeometryImplicits with ColorGener private def parseWkUrl(nodes: NodeSeq): Option[String] = nodes.headOption.map(node => getSingleAttribute(node, "wkUrl")) - private def parseOrganizationId(nodes: NodeSeq): Option[String] = - nodes.headOption.flatMap(node => getSingleAttributeOpt(node, "organization")) + private def parseOrganizationId(nodes: NodeSeq): String = + nodes.headOption.map(node => getSingleAttribute(node, "organization")).getOrElse("") private def parseActiveNode(nodes: NodeSeq): Option[Int] = nodes.headOption.flatMap(node => getSingleAttribute(node, "id").toIntOpt) diff --git a/app/models/annotation/nml/NmlResults.scala b/app/models/annotation/nml/NmlResults.scala index a89b6ffac40..da03288fc3f 100644 --- a/app/models/annotation/nml/NmlResults.scala +++ b/app/models/annotation/nml/NmlResults.scala @@ -1,11 +1,10 @@ package models.annotation.nml import java.io.File - import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.typesafe.scalalogging.LazyLogging -import models.annotation.UploadedVolumeLayer +import models.annotation.{SkeletonTracingWithDatasetId, UploadedVolumeLayer} import net.liftweb.common.{Box, Empty, Failure, Full} object NmlResults extends LazyLogging { @@ -31,7 +30,7 @@ object NmlResults extends LazyLogging { } case class NmlParseSuccess(fileName: String, - skeletonTracing: Option[SkeletonTracing], + skeletonTracingOpt: Option[SkeletonTracingWithDatasetId], volumeLayers: List[UploadedVolumeLayer], _description: String, _wkUrl: Option[String]) @@ -72,7 +71,7 @@ object NmlResults extends LazyLogging { val successBox = parseResult.toSuccessBox val skeletonBox = successBox match { case Full(success) => - success.skeletonTracing match { + success.skeletonTracingOpt match { case Some(skeleton) => Full(skeleton) case None => Empty } @@ -82,7 +81,7 @@ object NmlResults extends LazyLogging { val volumeBox = successBox match { case Full(success) if success.volumeLayers.length <= 1 => success.volumeLayers.headOption match { - case Some(UploadedVolumeLayer(tracing, dataZipLocation, _)) => + case Some(UploadedVolumeLayer(tracing, datasetId,dataZipLocation, _)) => Full((tracing, otherFiles.get(dataZipLocation))) case None => Empty } @@ -97,7 +96,7 @@ object NmlResults extends LazyLogging { case class TracingBoxContainer(fileName: Box[String], description: Box[Option[String]], - skeleton: Box[SkeletonTracing], + skeleton: Box[SkeletonTracingWithDatasetId], volume: Box[(VolumeTracing, Option[File])]) } diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index 2ac7d3633fe..a0f27b6a52d 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -27,6 +27,7 @@ import controllers.DatasetUpdateParameters import javax.inject.Inject import models.organization.OrganizationDAO import net.liftweb.common.Full +import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json._ import play.utils.UriEncoding import slick.jdbc.PostgresProfile.api._ @@ -443,6 +444,15 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA parsed <- parseFirst(r, s"$organizationId/$name") } yield parsed + def findOneByIdOrNameAndOrganization(datasetIdOpt: Option[ObjectId], datasetName: String, organizationId: String)( + implicit ctx: DBAccessContext, + m: MessagesProvider): Fox[Dataset] = + datasetIdOpt + .map(datasetId => this.findOne(datasetId)) + .getOrElse(this.findOneByNameAndOrganization(datasetName, organizationId)) ?~> Messages( + "dataset.notFoundByIdOrName", + datasetIdOpt.map(_.toString).getOrElse(datasetName)) + def findAllByPathsAndOrganization(names: List[String], organizationId: String)( implicit ctx: DBAccessContext): Fox[List[Dataset]] = for { diff --git a/app/models/organization/Organization.scala b/app/models/organization/Organization.scala index 3faaa9ffd4d..e2573d4d7d3 100644 --- a/app/models/organization/Organization.scala +++ b/app/models/organization/Organization.scala @@ -88,7 +88,7 @@ class OrganizationDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionCont for { accessQuery <- readAccessQuery r <- run( - q"select $columns from $existingCollectionName where _id = $organizationId and $accessQuery" + q"SELECT $columns FROM $existingCollectionName WHERE _id = $organizationId AND $accessQuery" .as[OrganizationsRow]) parsed <- parseFirst(r, organizationId) } yield parsed @@ -129,6 +129,15 @@ class OrganizationDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionCont r <- rList.headOption.toFox } yield r + def findOrganizationForDataset(datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[String] = + for { + accessQuery <- readAccessQuery + rList <- run(q"""SELECT o._id FROM webknossos.organizations_ o + JOIN webknossos.datasets_ d ON o._id = d._organization + WHERE d._id = $datasetId WHERE $accessQuery""".as[String]) + r <- rList.headOption.toFox + } yield r + def updateFields(organizationId: String, name: String, newUserMailingList: String)( implicit ctx: DBAccessContext): Fox[Unit] = for { diff --git a/app/models/task/TaskCreationParameters.scala b/app/models/task/TaskCreationParameters.scala index b32a8c2e31b..080828ecdcc 100644 --- a/app/models/task/TaskCreationParameters.scala +++ b/app/models/task/TaskCreationParameters.scala @@ -12,7 +12,7 @@ case class TaskParameters(taskTypeId: String, scriptId: Option[String], boundingBox: Option[BoundingBox], datasetName: String, - datasetId: ObjectId, // TODOM: Make this a dataSetId + datasetId: Option[ObjectId], // TODOM: Make this a dataSetId editPosition: Vec3Int, editRotation: Vec3Double, creationInfo: Option[String], diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index a9dd0b1ad2d..07a4a486469 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -22,6 +22,7 @@ import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.json.{JsObject, Json} import telemetry.SlackNotificationService import com.scalableminds.util.requestparsing.ObjectId +import models.organization.OrganizationDAO import play.api.http.Status.FORBIDDEN import scala.concurrent.ExecutionContext @@ -30,6 +31,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, taskTypeDAO: TaskTypeDAO, annotationService: AnnotationService, taskDAO: TaskDAO, + organizationDAO: OrganizationDAO, taskService: TaskService, userService: UserService, teamDAO: TeamDAO, @@ -75,12 +77,13 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, for { taskTypeIdValidated <- ObjectId.fromString(taskParameters.taskTypeId) ?~> "taskType.id.invalid" taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" - dataset <- datasetDAO.findOne(taskParameters.datasetId) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(taskParameters.datasetId, taskParameters.datasetName, organizationId) + _ <- bool2Fox(dataset._organization == organizationId) ?~> "dataset" baseAnnotationIdValidated <- ObjectId.fromString(baseAnnotation.baseId) annotation <- resolveBaseAnnotationId(baseAnnotationIdValidated) tracingStoreClient <- tracingStoreService.clientFor(dataset) newSkeletonId <- if (taskType.tracingType == TracingType.skeleton || taskType.tracingType == TracingType.hybrid) - duplicateOrCreateSkeletonBase(annotation, taskParameters, tracingStoreClient).map(Some(_)) + duplicateOrCreateSkeletonBase(annotation, taskParameters, organizationId, tracingStoreClient).map(Some(_)) else Fox.successful(None) newVolumeId <- if (taskType.tracingType == TracingType.volume || taskType.tracingType == TracingType.hybrid) duplicateOrCreateVolumeBase(annotation, @@ -122,7 +125,8 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, private def duplicateOrCreateSkeletonBase( baseAnnotation: Annotation, params: TaskParameters, - tracingStoreClient: WKRemoteTracingStoreClient)(implicit ctx: DBAccessContext): Fox[String] = + organizationId: String, + tracingStoreClient: WKRemoteTracingStoreClient)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[String] = for { baseSkeletonTracingIdOpt <- baseAnnotation.skeletonTracingId newTracingId <- baseSkeletonTracingIdOpt @@ -136,6 +140,8 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, annotationService .createSkeletonTracingBase( params.datasetId, + params.datasetName, + organizationId, params.boundingBox, params.editPosition, params.editRotation @@ -157,7 +163,8 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, .getOrElse( annotationService .createVolumeTracingBase( - params.dataSet, + params.datasetId, + params.datasetName, organizationId, params.boundingBox, params.editPosition, @@ -180,6 +187,8 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, annotationService .createSkeletonTracingBase( params.datasetId, + params.datasetName, + organizationId, params.boundingBox, params.editPosition, params.editRotation @@ -285,7 +294,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, } // used in createFromFiles route - def fillInMissingTracings(skeletons: List[Box[SkeletonTracing]], + def fillInMissingTracings(skeletons: List[Box[SkeletonTracingWithDatasetId]], volumes: List[Box[(VolumeTracing, Option[File])]], fullParams: List[Box[TaskParameters]], taskType: TaskType, diff --git a/frontend/javascripts/admin/task/task_create_form_view.tsx b/frontend/javascripts/admin/task/task_create_form_view.tsx index 64ecebbeb77..cadbdb29dcc 100644 --- a/frontend/javascripts/admin/task/task_create_form_view.tsx +++ b/frontend/javascripts/admin/task/task_create_form_view.tsx @@ -59,7 +59,7 @@ const fullWidth = { const maxDisplayedTasksCount = 50; const TASK_CSV_HEADER = - "taskId,dataSet,taskTypeId,experienceDomain,minExperience,x,y,z,rotX,rotY,rotZ,instances,minX,minY,minZ,width,height,depth,project,scriptId,creationInfo"; + "taskId,datasetId,datasetName,taskTypeId,experienceDomain,minExperience,x,y,z,rotX,rotY,rotZ,instances,minX,minY,minZ,width,height,depth,project,scriptId,creationInfo"; export enum SpecificationEnum { Manual = "Manual", @@ -76,7 +76,8 @@ export function taskToShortText(task: APITask) { export function taskToText(task: APITask) { const { id, - datasetId: dataSet, + datasetId, + datasetName, type, neededExperience, editPosition, @@ -96,7 +97,7 @@ export function taskToText(task: APITask) { const scriptId = script ? `${script.id}` : ""; const creationInfoOrEmpty = creationInfo || ""; const taskAsString = - `${id},${dataSet},${type.id},${neededExperienceAsString},${editPositionAsString},` + + `${id},${datasetId},${datasetName},${type.id},${neededExperienceAsString},${editPositionAsString},` + `${editRotationAsString},${totalNumberOfInstances},${boundingBoxAsString},${projectName},${scriptId},${creationInfoOrEmpty}`; return taskAsString; } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SkeletonElementDefaults.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SkeletonElementDefaults.scala index 57a56ddd72d..398cda1e860 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SkeletonElementDefaults.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/SkeletonElementDefaults.scala @@ -1,12 +1,10 @@ package com.scalableminds.webknossos.datastore.helpers import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} -import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.webknossos.datastore.SkeletonTracing.{Node, SkeletonTracing} object SkeletonTracingDefaults extends ProtoGeometryImplicits { private val datasetName = "" - private val datasetId = "" private val trees = Seq() private def createdTimestamp = System.currentTimeMillis() private val boundingBox = None @@ -18,8 +16,7 @@ object SkeletonTracingDefaults extends ProtoGeometryImplicits { private val userBoundingBox = None def createInstance: SkeletonTracing = - SkeletonTracing(datasetId, - datasetName, + SkeletonTracing(datasetName, trees, createdTimestamp, boundingBox, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index 4657ed45209..0825e0e529b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -36,7 +36,7 @@ case class ReserveUploadInformation( organization: String, totalFileCount: Long, filePaths: Option[List[String]], - layersToLink: Option[List[LinkedLayerIdentifier]], // TODOM: This is not used anywhere? Got replaced with the compose route I would say. + layersToLink: Option[List[LinkedLayerIdentifier]], // TODOM: This is used by wk libs, should use the legacy heuristic to identify the dataset initialTeams: List[String], // team ids folderId: Option[String]) object ReserveUploadInformation { diff --git a/webknossos-datastore/proto/SkeletonTracing.proto b/webknossos-datastore/proto/SkeletonTracing.proto index 7ea08f8a6b4..7f9ebc91e38 100644 --- a/webknossos-datastore/proto/SkeletonTracing.proto +++ b/webknossos-datastore/proto/SkeletonTracing.proto @@ -62,20 +62,19 @@ message TreeGroup { } message SkeletonTracing { - required string datasetId = 1; - required string datasetName = 2; // used when parsing and handling nmls, not used in tracing store anymore, do not rely on correct values - repeated Tree trees = 3; - required int64 createdTimestamp = 4; - optional BoundingBoxProto boundingBox = 5; - optional int32 activeNodeId = 6; - required Vec3IntProto editPosition = 7; - required Vec3DoubleProto editRotation = 8; - required double zoomLevel = 9; - required int64 version = 10; - optional BoundingBoxProto userBoundingBox = 11; - repeated TreeGroup treeGroups = 12; - repeated NamedBoundingBoxProto userBoundingBoxes = 13; - optional string organizationId = 14; // used when parsing and handling nmls, not used in tracing store anymore, do not rely on correct values + required string datasetName = 1; // used when parsing and handling nmls, not used in tracing store anymore, do not rely on correct values + repeated Tree trees = 2; + required int64 createdTimestamp = 3; + optional BoundingBoxProto boundingBox = 4; + optional int32 activeNodeId = 5; + required Vec3IntProto editPosition = 6; + required Vec3DoubleProto editRotation = 7; + required double zoomLevel = 8; + required int64 version = 9; + optional BoundingBoxProto userBoundingBox = 10; + repeated TreeGroup treeGroups = 11; + repeated NamedBoundingBoxProto userBoundingBoxes = 12; + optional string organizationId = 13; // used when parsing and handling nmls, not used in tracing store anymore, do not rely on correct values repeated AdditionalCoordinateProto editPositionAdditionalCoordinates = 21; repeated AdditionalAxisProto additionalAxes = 22; // Additional axes for which this tracing is defined } From 0d6b2e753d3e405cdc06a4b2ff3ec8e90b5f2270 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Fri, 11 Oct 2024 12:26:19 +0200 Subject: [PATCH 033/129] WIP: adjust backend part of task upload to use new dataset addressing --- app/controllers/AnnotationIOController.scala | 2 +- app/controllers/TaskController.scala | 6 +- app/models/annotation/AnnotationService.scala | 39 ++--- app/models/annotation/nml/NmlParser.scala | 3 + app/models/annotation/nml/NmlResults.scala | 8 +- app/models/task/TaskCreationService.scala | 146 +++++++++++------- .../view/layouting/tracing_layout_view.tsx | 4 +- 7 files changed, 124 insertions(+), 84 deletions(-) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 2a5dd5210d1..071ac725ea0 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -11,7 +11,7 @@ import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContex import com.scalableminds.util.io.ZipIO import com.scalableminds.util.requestparsing.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} -import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracing, SkeletonTracingOpt, SkeletonTracings} +import com.scalableminds.webknossos.datastore.SkeletonTracing.{SkeletonTracingOpt, SkeletonTracings} import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.annotation.{ diff --git a/app/controllers/TaskController.scala b/app/controllers/TaskController.scala index f13e45a7fe4..56a410b8482 100755 --- a/app/controllers/TaskController.scala +++ b/app/controllers/TaskController.scala @@ -49,8 +49,8 @@ class TaskController @Inject()(taskCreationService: TaskCreationService, _ <- taskCreationService.assertBatchLimit(request.body.length, request.body.map(_.taskTypeId)) taskParameters <- taskCreationService.createTracingsFromBaseAnnotations(request.body, request.identity._organization) - skeletonBaseOpts: List[Option[SkeletonTracing]] <- taskCreationService.createTaskSkeletonTracingBases( - taskParameters) + skeletonBaseOpts: List[Option[SkeletonTracing]] <- taskCreationService + .createTaskSkeletonTracingBases(taskParameters, request.identity._organization) volumeBaseOpts: List[Option[(VolumeTracing, Option[File])]] <- taskCreationService .createTaskVolumeTracingBases(taskParameters, request.identity._organization) paramsWithTracings = taskParameters.lazyZip(skeletonBaseOpts).lazyZip(volumeBaseOpts).map { @@ -87,7 +87,7 @@ class TaskController @Inject()(taskCreationService: TaskCreationService, .findOneByNameAndOrganization(params.projectName, request.identity._organization) ?~> "project.notFound" ~> NOT_FOUND _ <- Fox.assertTrue(userService.isTeamManagerOrAdminOf(request.identity, project._team)) extractedFiles <- nmlService.extractFromFiles(inputFiles.map(f => (f.ref.path.toFile, f.filename)), - SharedParsingParameters(useZipName = false, isTaskUpload = true)) + SharedParsingParameters(useZipName = false, isTaskUpload = true)) extractedTracingBoxesRaw: List[TracingBoxContainer] = extractedFiles.toBoxes extractedTracingBoxes: List[TracingBoxContainer] <- taskCreationService .addVolumeFallbackBoundingBoxes(extractedTracingBoxesRaw, request.identity._organization) diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index 50c83e5d27d..2e0a45239fc 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -525,12 +525,13 @@ class AnnotationService @Inject()( } yield result } - def createSkeletonTracingBase(datasetIdOpt: Option[ObjectId], - datasetName: String, - organizationId: String, - boundingBox: Option[BoundingBox], - startPosition: Vec3Int, - startRotation: Vec3Double)(implicit ctx: DBAccessContext, m: MessagesProvider) : Fox[SkeletonTracing] = { + def createSkeletonTracingBase( + datasetIdOpt: Option[ObjectId], + datasetName: String, + organizationId: String, + boundingBox: Option[BoundingBox], + startPosition: Vec3Int, + startRotation: Vec3Double)(implicit ctx: DBAccessContext, m: MessagesProvider): Fox[SkeletonTracing] = { val initialNode = NodeDefaults.createInstance.withId(1).withPosition(startPosition).withRotation(startRotation) val initialTree = Tree( 1, @@ -544,16 +545,17 @@ class AnnotationService @Inject()( ) for { dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetIdOpt, datasetName, organizationId) - } yield SkeletonTracingDefaults.createInstance.copy( - datasetName = dataset.name, - boundingBox = boundingBox.flatMap { box => - if (box.isEmpty) None else Some(box) - }, - editPosition = startPosition, - editRotation = startRotation, - activeNodeId = Some(1), - trees = Seq(initialTree) - ) + } yield + SkeletonTracingDefaults.createInstance.copy( + datasetName = dataset.name, + boundingBox = boundingBox.flatMap { box => + if (box.isEmpty) None else Some(box) + }, + editPosition = startPosition, + editRotation = startRotation, + activeNodeId = Some(1), + trees = Seq(initialTree) + ) } def createVolumeTracingBase(datasetIdOpt: Option[ObjectId], @@ -567,8 +569,9 @@ class AnnotationService @Inject()( m: MessagesProvider): Fox[VolumeTracing] = for { organization <- organizationDAO.findOne(organizationId) - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetIdOpt,datasetName, organizationId) ?~> Messages("dataset.notFound", - datasetName) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(datasetIdOpt, datasetName, organizationId) ?~> Messages( + "dataset.notFound", + datasetName) dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) dataStore <- dataStoreDAO.findOneByName(dataset._dataStore.trim) fallbackLayer = if (volumeShowFallbackLayer) { diff --git a/app/models/annotation/nml/NmlParser.scala b/app/models/annotation/nml/NmlParser.scala index a08426d46e9..1b6e4b8ca50 100755 --- a/app/models/annotation/nml/NmlParser.scala +++ b/app/models/annotation/nml/NmlParser.scala @@ -167,6 +167,9 @@ class NmlParser @Inject()(datasetDAO: DatasetDAO) extends LazyLogging with Proto case Failure(message, cause, _chain) => logger.error(s"Failed to parse NML $name due to " + cause) Failure(s"Failed to parse NML '$name': " + message) + case Empty => + logger.error(s"Failed to parse NML $name. Parser returned empty") + Failure(s"Failed to parse NML '$name': Parser returned empty") } } diff --git a/app/models/annotation/nml/NmlResults.scala b/app/models/annotation/nml/NmlResults.scala index da03288fc3f..a005d5090d0 100644 --- a/app/models/annotation/nml/NmlResults.scala +++ b/app/models/annotation/nml/NmlResults.scala @@ -1,8 +1,6 @@ package models.annotation.nml import java.io.File -import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing -import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.typesafe.scalalogging.LazyLogging import models.annotation.{SkeletonTracingWithDatasetId, UploadedVolumeLayer} import net.liftweb.common.{Box, Empty, Failure, Full} @@ -81,8 +79,8 @@ object NmlResults extends LazyLogging { val volumeBox = successBox match { case Full(success) if success.volumeLayers.length <= 1 => success.volumeLayers.headOption match { - case Some(UploadedVolumeLayer(tracing, datasetId,dataZipLocation, _)) => - Full((tracing, otherFiles.get(dataZipLocation))) + case Some(volumeLayer) => + Full((volumeLayer, otherFiles.get(volumeLayer.dataZipLocation))) case None => Empty } case Full(success) if success.volumeLayers.length > 1 => @@ -97,6 +95,6 @@ object NmlResults extends LazyLogging { case class TracingBoxContainer(fileName: Box[String], description: Box[Option[String]], skeleton: Box[SkeletonTracingWithDatasetId], - volume: Box[(VolumeTracing, Option[File])]) + volume: Box[(UploadedVolumeLayer, Option[File])]) } diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index 07a4a486469..088a31431ce 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -77,7 +77,9 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, for { taskTypeIdValidated <- ObjectId.fromString(taskParameters.taskTypeId) ?~> "taskType.id.invalid" taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" - dataset <- datasetDAO.findOneByIdOrNameAndOrganization(taskParameters.datasetId, taskParameters.datasetName, organizationId) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization(taskParameters.datasetId, + taskParameters.datasetName, + organizationId) _ <- bool2Fox(dataset._organization == organizationId) ?~> "dataset" baseAnnotationIdValidated <- ObjectId.fromString(baseAnnotation.baseId) annotation <- resolveBaseAnnotationId(baseAnnotationIdValidated) @@ -176,8 +178,9 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, } yield newVolumeTracingId // Used in create (without files). If base annotations were used, this does nothing. - def createTaskSkeletonTracingBases(paramsList: List[TaskParameters])( - implicit ctx: DBAccessContext): Fox[List[Option[SkeletonTracing]]] = + def createTaskSkeletonTracingBases(paramsList: List[TaskParameters], organizationId: String)( + implicit ctx: DBAccessContext, + m: MessagesProvider): Fox[List[Option[SkeletonTracing]]] = Fox.serialCombined(paramsList) { params => for { taskTypeIdValidated <- ObjectId.fromString(params.taskTypeId) ?~> "taskType.id.invalid" @@ -209,7 +212,8 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, volumeTracingOpt <- if ((taskType.tracingType == TracingType.volume || taskType.tracingType == TracingType.hybrid) && params.baseAnnotation.isEmpty) { annotationService .createVolumeTracingBase( - params.dataSet, + params.datasetId, + params.datasetName, organizationId, params.boundingBox, params.editPosition, @@ -245,29 +249,40 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, } // Used in createFromFiles. Called once per requested task if volume tracing is passed - private def addVolumeFallbackBoundingBox(volume: VolumeTracing, organizationId: String): Fox[VolumeTracing] = - if (volume.boundingBox.isEmpty) { + private def addVolumeFallbackBoundingBox(volume: UploadedVolumeLayer, + organizationId: String): Fox[UploadedVolumeLayer] = + if (volume.tracing.boundingBox.isEmpty) { for { - // TODOM: Here the dataset also needs to be identified by either path & orga or id - dataset <- datasetDAO.findOneByPathAndOrganization(volume.datasetName, organizationId)(GlobalAccessContext) + dataset <- datasetDAO.findOne(volume.datasetId)(GlobalAccessContext) dataSource <- datasetService.dataSourceFor(dataset).flatMap(_.toUsable) - } yield volume.copy(boundingBox = dataSource.boundingBox) + } yield volume.copy(tracing = volume.tracing.copy(boundingBox = dataSource.boundingBox)) } else Fox.successful(volume) // Used in createFromFiles. Called once per requested task private def buildFullParamsFromFilesForSingleTask( nmlFormParams: NmlTaskParameters, - skeletonTracing: Box[SkeletonTracing], - volumeTracing: Box[VolumeTracing], + skeletonTracing: Box[SkeletonTracingWithDatasetId], + uploadedVolumeLayer: Box[UploadedVolumeLayer], fileName: Box[String], description: Box[Option[String]])(implicit m: MessagesProvider): Box[TaskParameters] = { - val paramBox: Box[(Option[BoundingBox], String, Vec3Int, Vec3Double)] = skeletonTracing match { - case Full(tracing) => Full((tracing.boundingBox, tracing.datasetName, tracing.editPosition, tracing.editRotation)) - case f: Failure => f + val paramBox: Box[(Option[BoundingBox], String, ObjectId, Vec3Int, Vec3Double)] = skeletonTracing match { + case Full(tracing) => + Full( + (tracing.skeletonTracing.boundingBox, + tracing.skeletonTracing.datasetName, + tracing.datasetId, + tracing.skeletonTracing.editPosition, + tracing.skeletonTracing.editRotation)) + case f: Failure => f case Empty => - volumeTracing match { - case Full(tracing) => - Full((Some(tracing.boundingBox), tracing.datasetName, tracing.editPosition, tracing.editRotation)) + uploadedVolumeLayer match { + case Full(layer) => + Full( + (Some(layer.tracing.boundingBox), + layer.tracing.datasetName, + layer.datasetId, + layer.tracing.editPosition, + layer.tracing.editRotation)) case f: Failure => f case Empty => Failure(Messages("task.create.needsEitherSkeletonOrVolume")) } @@ -284,8 +299,9 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, nmlFormParams.scriptId, bbox, params._2, - params._3, + Some(params._3), params._4, + params._5, fileName, description.toOption.flatten, None @@ -295,7 +311,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, // used in createFromFiles route def fillInMissingTracings(skeletons: List[Box[SkeletonTracingWithDatasetId]], - volumes: List[Box[(VolumeTracing, Option[File])]], + volumes: List[Box[(UploadedVolumeLayer, Option[File])]], fullParams: List[Box[TaskParameters]], taskType: TaskType, organizationId: String)( @@ -309,7 +325,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, case (skeletonTracingBox, volumeTracingBox) => volumeTracingBox match { case Full(_) => (Failure(Messages("taskType.mismatch", "skeleton", "volume")), Empty) - case _ => (skeletonTracingBox, Empty) + case _ => (skeletonTracingBox.map(_.skeletonTracing), Empty) } } .unzip) @@ -321,7 +337,7 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, case (skeletonTracingBox, volumeTracingBox) => skeletonTracingBox match { case Full(_) => (Empty, Failure(Messages("taskType.mismatch", "volume", "skeleton"))) - case _ => (Empty, volumeTracingBox) + case _ => (Empty, volumeTracingBox.map(box => (box._1.tracing, box._2))) } } .unzip) @@ -332,34 +348,20 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, paramBox match { case Full(params) => for { - skeletonFox <- if(skeleton.isDefined) Fox.successful(skeleton) else annotationService.createSkeletonTracingBase(params.datasetId, - params.boundingBox, - params.editPosition, - params.editRotation) - volumeFox <- if(volume.isDefined) Fox.successful(volume) else annotationService - .createVolumeTracingBase( - params.datasetId.toString, - organizationId, - params.boundingBox, - params.editPosition, - params.editRotation, - volumeShowFallbackLayer = false, - resolutionRestrictions = taskType.settings.resolutionRestrictions - ) - .map(v => (v, None)) - } yield (Full(skeletonFox), Full(volumeFox)) - val skeletonBox = Full( - skeleton.openOr( + skeletonFox <- if (skeleton.isDefined) Fox.successful(skeleton) + else annotationService.createSkeletonTracingBase(params.datasetId, + params.datasetName, + organizationId, params.boundingBox, params.editPosition, - params.editRotation))) - val volumeFox = volume - .map(Fox.successful(_)) - .openOr( + params.editRotation) + volumeFox <- if (volume.isDefined) Fox.successful(volume) + else annotationService .createVolumeTracingBase( - params.datasetId.toString, + params.datasetId, + params.datasetName, organizationId, params.boundingBox, params.editPosition, @@ -367,9 +369,36 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, volumeShowFallbackLayer = false, resolutionRestrictions = taskType.settings.resolutionRestrictions ) - .map(v => (v, None))) - - volumeFox.map(v => (skeletonBox, Full(v))) + .map(v => (v, None)) + } yield (Full(skeletonFox), Full(volumeFox)) + val skeletonFox = + skeleton + .map(s => Fox.successful(s.skeletonTracing)) + .openOr( + annotationService.createSkeletonTracingBase(params.datasetId, + params.datasetName, + organizationId, + params.boundingBox, + params.editPosition, + params.editRotation)) + val volumeFox = volume + .map(v => Fox.successful(v._1.tracing, v._2)) + .openOr(annotationService + .createVolumeTracingBase( + params.datasetId, + params.datasetName, + organizationId, + params.boundingBox, + params.editPosition, + params.editRotation, + volumeShowFallbackLayer = false, + resolutionRestrictions = taskType.settings.resolutionRestrictions + ) + .map(v => (v, None))) + for { + skeleton <- skeletonFox + volume <- volumeFox + } yield (Full(skeleton), Full(volume)) case f: Failure => Fox.failure(f.msg, Empty, f.chain) case _ => Fox.failure("") } @@ -418,11 +447,13 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, for { _ <- assertEachHasEitherSkeletonOrVolume(fullTasks) ?~> "task.create.needsEitherSkeletonOrVolume" firstDatasetId <- fullTasks.headOption.map(_._1.datasetId).toFox // TODOM continue - _ <- assertAllOnSameDataset(fullTasks, firstDatasetId) - dataset <- datasetDAO.findOneByPathAndOrganization(firstDatasetId, requestingUser._organization) ?~> Messages( - "dataset.notFound", - firstDatasetId) - _ <- bool2Fox(dataset._organization == requestingUser._organization) ?~> FORBIDDEN + firstDatasetName <- fullTasks.headOption.map(_._1.datasetName).toFox // TODOM continue + _ <- assertAllOnSameDataset(fullTasks, firstDatasetId, firstDatasetName) + dataset <- datasetDAO.findOneByIdOrNameAndOrganization( + firstDatasetId, + firstDatasetName, + requestingUser._organization) ?~> Messages("dataset.notFound", firstDatasetId) + _ <- bool2Fox(dataset._organization == requestingUser._organization) ~> FORBIDDEN _ = if (fullTasks.exists(task => task._1.baseAnnotation.isDefined)) slackNotificationService.noticeBaseAnnotationTaskCreation(fullTasks.map(_._1.taskTypeId).distinct, fullTasks.count(_._1.baseAnnotation.isDefined)) @@ -480,17 +511,22 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, private def assertAllOnSameDataset( requestedTasks: List[(TaskParameters, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])], + firstDatasetIdOpt: Option[ObjectId], firstDatasetName: String)(implicit mp: MessagesProvider): Fox[String] = { @scala.annotation.tailrec def allOnSameDatasetIter( requestedTasksRest: List[(TaskParameters, Option[SkeletonTracing], Option[(VolumeTracing, Option[File])])], + datasetIdOpt: Option[ObjectId], datasetName: String): Boolean = requestedTasksRest match { - case List() => true - case head :: tail => head._1.dataSet == datasetName && allOnSameDatasetIter(tail, datasetName) + case List() => true + case head :: tail => + val areIdsEqual = datasetIdOpt.forall(_ == head._1.datasetId.getOrElse(ObjectId.dummyId)) + val areNamesEqual = head._1.datasetName == datasetName + areIdsEqual && areNamesEqual && allOnSameDatasetIter(tail, datasetIdOpt, datasetName) } - if (allOnSameDatasetIter(requestedTasks, firstDatasetName)) + if (allOnSameDatasetIter(requestedTasks, firstDatasetIdOpt, firstDatasetName)) Fox.successful(firstDatasetName) else Fox.failure(Messages("task.notOnSameDataset")) diff --git a/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx b/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx index 7362b366cd8..a2edaaab1ef 100644 --- a/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx +++ b/frontend/javascripts/oxalis/view/layouting/tracing_layout_view.tsx @@ -269,7 +269,7 @@ class TracingLayoutView extends React.PureComponent { data: { nmlFile: files, createGroupForEachFile, - datasetName: this.props.datasetName, + datasetId: this.props.datasetId, organizationId: this.props.organization, }, }); @@ -405,7 +405,7 @@ function mapStateToProps(state: OxalisState) { showVersionRestore: state.uiInformation.showVersionRestore, storedLayouts: state.uiInformation.storedLayouts, isDatasetOnScratchVolume: state.dataset.dataStore.isScratch, - datasetName: state.dataset.name, + datasetId: state.dataset.id, is2d: is2dDataset(state.dataset), displayName: state.tracing.name ? state.tracing.name : state.dataset.name, organization: state.dataset.owningOrganization, From f4c16e3fe240916dc6a0c93826da11edbfa7fa08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Fri, 11 Oct 2024 17:30:58 +0200 Subject: [PATCH 034/129] Finish adapting task & annotation upload to new format --- app/controllers/AnnotationIOController.scala | 5 +- .../WKRemoteDataStoreController.scala | 12 ++--- app/models/dataset/Dataset.scala | 17 +++---- app/models/task/TaskCreationParameters.scala | 7 ++- app/models/task/TaskCreationService.scala | 4 +- .../admin/task/task_create_bulk_view.tsx | 50 ++++++++++--------- .../services/uploading/UploadService.scala | 13 +++-- 7 files changed, 58 insertions(+), 50 deletions(-) diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 071ac725ea0..15ba6d51016 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -48,6 +48,7 @@ import models.organization.OrganizationDAO import models.project.ProjectDAO import models.task._ import models.user._ +import net.liftweb.common.Failure import play.api.i18n.{Messages, MessagesProvider} import play.api.libs.Files.{TemporaryFile, TemporaryFileCreator} import play.api.libs.json.Json @@ -253,7 +254,7 @@ class AnnotationIOController @Inject()( private def wkUrlsForNMLs(wkUrls: Seq[Option[String]]) = if (wkUrls.toSet.size == 1) wkUrls.headOption.flatten.getOrElse("") else "" - private def returnError(zipParseResult: NmlResults.MultiNmlParseResult)(implicit messagesProvider: MessagesProvider) = + /*private def returnError(zipParseResult: NmlResults.MultiNmlParseResult)(implicit messagesProvider: MessagesProvider) = if (zipParseResult.containsFailure) { val errors = zipParseResult.parseResults.flatMap { case result: NmlResults.NmlParseFailure => @@ -263,7 +264,7 @@ class AnnotationIOController @Inject()( Future.successful(JsonBadRequest(errors)) } else { Future.successful(JsonBadRequest(Messages("nml.file.noFile"))) - } + }*/ private def assertAllOnSameDataset(skeletons: List[SkeletonTracingWithDatasetId], volumes: List[UploadedVolumeLayer]): Fox[ObjectId] = diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 85b10fad9e6..1493dad48e4 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -76,12 +76,12 @@ class WKRemoteDataStoreController @Inject()( _ <- bool2Fox(dataStore.onlyAllowedOrganization.forall(_ == organization._id)) ?~> "dataset.upload.Datastore.restricted" folderId <- ObjectId.fromString(uploadInfo.folderId.getOrElse(organization._rootFolder.toString)) ?~> "dataset.upload.folderId.invalid" _ <- folderDAO.assertUpdateAccess(folderId)(AuthorizedAccessContext(user)) ?~> "folder.noWriteAccess" - _ <- Fox.serialCombined(uploadInfo.layersToLink.getOrElse(List.empty))(l => validateLayerToLink(l, user)) ?~> "dataset.upload.invalidLinkedLayers" + layersToLinkWithDatasetId <- Fox.serialCombined(uploadInfo.layersToLink.getOrElse(List.empty))(l => validateLayerToLink(l, user)) ?~> "dataset.upload.invalidLinkedLayers" dataset <- datasetService.createPreliminaryDataset(uploadInfo.name, uploadInfo.organization, dataStore) ?~> "dataset.name.alreadyTaken" _ <- datasetDAO.updateFolder(dataset._id, folderId)(GlobalAccessContext) _ <- datasetService.addInitialTeams(dataset, uploadInfo.initialTeams, user)(AuthorizedAccessContext(user)) _ <- datasetService.addUploader(dataset, user._id)(AuthorizedAccessContext(user)) - updatedInfo = uploadInfo.copy(newDatasetId = dataset._id.toString, path = dataset.path) // Update newDatasetId and path according to the newly created dataset. + updatedInfo = uploadInfo.copy(newDatasetId = dataset._id.toString, path = dataset.path, layersToLink = Some(layersToLinkWithDatasetId)) // Update newDatasetId and path according to the newly created dataset. } yield Ok(Json.toJson(updatedInfo)) } } @@ -114,18 +114,18 @@ class WKRemoteDataStoreController @Inject()( } } - // TODOM: I think this is not used anywhere? Got replaced with the compose route I would say. private def validateLayerToLink(layerIdentifier: LinkedLayerIdentifier, - requestingUser: User)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Unit] = + requestingUser: User)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[LinkedLayerIdentifier] = for { organization <- organizationDAO.findOne(layerIdentifier.getOrganizationId)(GlobalAccessContext) ?~> Messages( "organization.notFound", layerIdentifier.getOrganizationId) ~> NOT_FOUND - dataset <- datasetDAO.findOneByPathAndOrganization(layerIdentifier.dataSetName, organization._id)( + // TODOM: Consider to interpret dataSetName as the datasets path, both variations have scenarios in which the dataset might not be found. + dataset <- datasetDAO.findOneByNameAndOrganization(layerIdentifier.dataSetName, organization._id)( AuthorizedAccessContext(requestingUser)) ?~> Messages("dataset.notFound", layerIdentifier.dataSetName) isTeamManagerOrAdmin <- userService.isTeamManagerOrAdminOfOrg(requestingUser, dataset._organization) _ <- Fox.bool2Fox(isTeamManagerOrAdmin || requestingUser.isDatasetManager || dataset.isPublic) ?~> "dataset.upload.linkRestricted" - } yield () + } yield layerIdentifier.copy(datasetPath = Some(dataset.path)) def reportDatasetUpload(name: String, key: String, diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index a0f27b6a52d..f7a6040a21b 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -415,20 +415,17 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA parsed <- parseFirst(r, s"$organizationId/$path") } yield parsed - def doesDatasetNameExistInOrganization(name: String, organizationId: String)( + def doesDatasetPathExistInOrganization(path: String, organizationId: String)( implicit ctx: DBAccessContext): Fox[Boolean] = for { accessQuery <- readAccessQuery - r <- run(q"""SELECT 1 + r <- run(q"""SELECT EXISTS(SELECT 1 FROM $existingCollectionName - WHERE name = $name + WHERE path = $path AND _organization = $organizationId AND $accessQuery - LIMIT 1""".as[DatasetsRow]) - exists <- parseFirst(r, s"$organizationId/$name").futureBox.map { - case Full(_) => true - case _ => false - } + LIMIT 1)""".as[Boolean]) + exists <- r.headOption } yield exists def findOneByNameAndOrganization(name: String, organizationId: String)(implicit ctx: DBAccessContext): Fox[Dataset] = @@ -453,13 +450,13 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA "dataset.notFoundByIdOrName", datasetIdOpt.map(_.toString).getOrElse(datasetName)) - def findAllByPathsAndOrganization(names: List[String], organizationId: String)( + def findAllByPathsAndOrganization(paths: List[String], organizationId: String)( implicit ctx: DBAccessContext): Fox[List[Dataset]] = for { accessQuery <- readAccessQuery r <- run(q"""SELECT $columns FROM $existingCollectionName - WHERE path IN ${SqlToken.tupleFromList(names)} + WHERE path IN ${SqlToken.tupleFromList(paths)} AND _organization = $organizationId AND $accessQuery""".as[DatasetsRow]).map(_.toList) parsed <- parseAll(r) diff --git a/app/models/task/TaskCreationParameters.scala b/app/models/task/TaskCreationParameters.scala index 080828ecdcc..1f318e79cb1 100644 --- a/app/models/task/TaskCreationParameters.scala +++ b/app/models/task/TaskCreationParameters.scala @@ -11,13 +11,16 @@ case class TaskParameters(taskTypeId: String, projectName: String, scriptId: Option[String], boundingBox: Option[BoundingBox], - datasetName: String, - datasetId: Option[ObjectId], // TODOM: Make this a dataSetId + dataSet: String, + datasetId: Option[ObjectId], editPosition: Vec3Int, editRotation: Vec3Double, creationInfo: Option[String], description: Option[String], baseAnnotation: Option[BaseAnnotation]) +{ + lazy val datasetName: String = dataSet +} object TaskParameters { implicit val taskParametersFormat: Format[TaskParameters] = Json.format[TaskParameters] diff --git a/app/models/task/TaskCreationService.scala b/app/models/task/TaskCreationService.scala index 088a31431ce..cdff27c5c79 100644 --- a/app/models/task/TaskCreationService.scala +++ b/app/models/task/TaskCreationService.scala @@ -446,8 +446,8 @@ class TaskCreationService @Inject()(taskTypeService: TaskTypeService, } else { for { _ <- assertEachHasEitherSkeletonOrVolume(fullTasks) ?~> "task.create.needsEitherSkeletonOrVolume" - firstDatasetId <- fullTasks.headOption.map(_._1.datasetId).toFox // TODOM continue - firstDatasetName <- fullTasks.headOption.map(_._1.datasetName).toFox // TODOM continue + firstDatasetId <- fullTasks.headOption.map(_._1.datasetId).toFox + firstDatasetName <- fullTasks.headOption.map(_._1.datasetName).toFox _ <- assertAllOnSameDataset(fullTasks, firstDatasetId, firstDatasetName) dataset <- datasetDAO.findOneByIdOrNameAndOrganization( firstDatasetId, diff --git a/frontend/javascripts/admin/task/task_create_bulk_view.tsx b/frontend/javascripts/admin/task/task_create_bulk_view.tsx index fa81497aa7b..bcb5e1c6b08 100644 --- a/frontend/javascripts/admin/task/task_create_bulk_view.tsx +++ b/frontend/javascripts/admin/task/task_create_bulk_view.tsx @@ -17,7 +17,7 @@ export const NUM_TASKS_PER_BATCH = 100; export type NewTask = { readonly boundingBox: BoundingBoxObject | null | undefined; readonly datasetId: string; - readonly datasetName: string; + readonly dataSet: string; readonly editPosition: Vector3; readonly editRotation: Vector3; readonly neededExperience: { @@ -118,24 +118,25 @@ function TaskCreateBulkView() { function parseLine(line: string): NewTask { const words = splitToWords(line); - const dataSet = words[0]; - const taskTypeId = words[1]; - const experienceDomain = words[2]; - const minExperience = Number.parseInt(words[3]); - const x = Number.parseInt(words[4]); - const y = Number.parseInt(words[5]); - const z = Number.parseInt(words[6]); - const rotX = Number.parseInt(words[7]); - const rotY = Number.parseInt(words[8]); - const rotZ = Number.parseInt(words[9]); - const pendingInstances = Number.parseInt(words[10]); - const boundingBoxX = Number.parseInt(words[11]); - const boundingBoxY = Number.parseInt(words[12]); - const boundingBoxZ = Number.parseInt(words[13]); - const width = Number.parseInt(words[14]); - const height = Number.parseInt(words[15]); - const depth = Number.parseInt(words[16]); - const projectName = words[17]; + const datasetName = words[0]; + const datasetId = words[1]; + const taskTypeId = words[2]; + const experienceDomain = words[3]; + const minExperience = Number.parseInt(words[4]); + const x = Number.parseInt(words[5]); + const y = Number.parseInt(words[6]); + const z = Number.parseInt(words[7]); + const rotX = Number.parseInt(words[8]); + const rotY = Number.parseInt(words[9]); + const rotZ = Number.parseInt(words[10]); + const pendingInstances = Number.parseInt(words[11]); + const boundingBoxX = Number.parseInt(words[12]); + const boundingBoxY = Number.parseInt(words[13]); + const boundingBoxZ = Number.parseInt(words[14]); + const width = Number.parseInt(words[15]); + const height = Number.parseInt(words[16]); + const depth = Number.parseInt(words[17]); + const projectName = words[18]; // mapOptional takes care of treating empty strings as null function mapOptional(word: string, fn: (arg0: string) => U): U | null | undefined { @@ -157,7 +158,8 @@ function TaskCreateBulkView() { depth, }; return { - dataSet, + dataSet: datasetName, + datasetId, taskTypeId, scriptId, pendingInstances, @@ -265,10 +267,10 @@ function TaskCreateBulkView() { Specify each new task on a separate line as comma separated values (CSV) in the following format:
- dataset, taskTypeId, experienceDomain, - minExperience, x, y, z, rotX, rotY, rotZ, instances, minX, minY, minZ, width, height, - depth, project, scriptId (optional), - baseAnnotationId (optional) + datasetName, datasetId, taskTypeId, + experienceDomain, minExperience, x, y, z, rotX, rotY, rotZ, instances, minX, minY, minZ, + width, height, depth, project, scriptId{" "} + (optional), baseAnnotationId (optional)
If you want to define some (but not all) of the optional values, please list all optional values and use an empty value for the ones you do not want to set (e.g., diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index 0825e0e529b..5201b18430a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -36,7 +36,7 @@ case class ReserveUploadInformation( organization: String, totalFileCount: Long, filePaths: Option[List[String]], - layersToLink: Option[List[LinkedLayerIdentifier]], // TODOM: This is used by wk libs, should use the legacy heuristic to identify the dataset + layersToLink: Option[List[LinkedLayerIdentifier]], initialTeams: List[String], // team ids folderId: Option[String]) object ReserveUploadInformation { @@ -54,15 +54,20 @@ object ReserveManualUploadInformation { case class LinkedLayerIdentifier(organizationId: Option[String], organizationName: Option[String], + // Filled by backend after identifying the dataset by name. Afterwards this updated value is stored in the redis database. + datasetPath: Option[String], dataSetName: String, layerName: String, newLayerName: Option[String] = None) { def this(organizationId: String, dataSetName: String, layerName: String, newLayerName: Option[String]) = - this(Some(organizationId), None, dataSetName, layerName, newLayerName) + this(Some(organizationId), None, None, dataSetName, layerName, newLayerName) def getOrganizationId: String = this.organizationId.getOrElse(this.organizationName.getOrElse("")) - def pathIn(dataBaseDir: Path): Path = dataBaseDir.resolve(getOrganizationId).resolve(dataSetName).resolve(layerName) + def pathIn(dataBaseDir: Path): Path = { + val datasetPath = this.datasetPath.getOrElse(dataSetName) + dataBaseDir.resolve(getOrganizationId).resolve(datasetPath).resolve(layerName) + } } object LinkedLayerIdentifier { @@ -70,7 +75,7 @@ object LinkedLayerIdentifier { dataSetName: String, layerName: String, newLayerName: Option[String]): LinkedLayerIdentifier = - new LinkedLayerIdentifier(Some(organizationId), None, dataSetName, layerName, newLayerName) + new LinkedLayerIdentifier(Some(organizationId), None, None, dataSetName, layerName, newLayerName) implicit val jsonFormat: OFormat[LinkedLayerIdentifier] = Json.format[LinkedLayerIdentifier] } From d85fc5a3a74f435fe8d4ea33cc21b69cc5cec3bd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Fri, 11 Oct 2024 17:31:09 +0200 Subject: [PATCH 035/129] Fix inserting dataset into database --- app/models/dataset/DatasetService.scala | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 1caf8b3e962..7754cac4ebd 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -63,8 +63,6 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, _ <- bool2Fox(!name.startsWith(".")) ?~> "dataset.layer.name.invalid.startsWithDot" } yield () - private def isNewDatasetName(name: String, organizationId: String): Fox[Boolean] = - datasetDAO.doesDatasetNameExistInOrganization(name, organizationId)(GlobalAccessContext) def createPreliminaryDataset(datasetName: String, organizationId: String, dataStore: DataStore): Fox[Dataset] = { val unreportedDatasource = UnusableDataSource(DataSourceId(datasetName, organizationId), notYetUploadedStatus) @@ -105,7 +103,9 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, for { organization <- organizationDAO.findOne(owningOrganization) organizationRootFolder <- folderDAO.findOne(organization._rootFolder) - datasetPath <- isNewDatasetName(datasetName, organization._id).map(if (_) datasetName else newId.toString) + datasetPath <- datasetDAO + .doesDatasetPathExistInOrganization(datasetName, organization._id) + .map(if (_) newId.toString else datasetName) newDataSource = dataSource.withUpdatedId(dataSource.id.copy(path = datasetPath)) // Sync path with dataSource dataset = Dataset( newId, @@ -177,10 +177,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, case Some(foundDataset) => // This only returns None for Datasets that are present on a normal Datastore but also got reported from a scratch Datastore updateDataSourceDifferentDataStore(foundDataset, dataSource, dataStore) case _ => - val maybeParsedDatasetPath = ObjectId.fromStringSync(dataSource.id.path) - // Avoid using the path as name in case it is an ObjectId. - val newDatasetName = maybeParsedDatasetPath.map(_ => "Newly Discovered Dataset").getOrElse(dataSource.id.path) - insertNewDataset(dataSource, newDatasetName, dataStore).toFox + insertNewDataset(dataSource, dataSource.id.path, dataStore).toFox .map(Some(_)) // TODO: Discuss how to better handle this case } } From 6c8663eeb5dd43ced5d285a677e60f9cb442fd54 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 14 Oct 2024 09:33:13 +0200 Subject: [PATCH 036/129] fix nml annotation upload --- .../annotation/AnnotationUploadService.scala | 58 +++++++++---------- .../com/scalableminds/util/tools/Fox.scala | 16 +++++ 2 files changed, 42 insertions(+), 32 deletions(-) diff --git a/app/models/annotation/AnnotationUploadService.scala b/app/models/annotation/AnnotationUploadService.scala index a9a133f423e..cdf5d93cd81 100644 --- a/app/models/annotation/AnnotationUploadService.scala +++ b/app/models/annotation/AnnotationUploadService.scala @@ -169,38 +169,32 @@ class AnnotationUploadService @Inject()(tempFileService: TempFileService, nmlPar def extractFromFiles(files: Seq[(File, String)], sharedParams: SharedParsingParameters)( implicit m: MessagesProvider, ec: ExecutionContext, - ctx: DBAccessContext): Fox[MultiNmlParseResult] = { - val accResult = NmlResults.MultiNmlParseResult() - Fox - .serialCombined(files.iterator) { - case (file, name) => - if (name.endsWith(".zip")) { - tryo(new java.util.zip.ZipFile(file)).map(ZipIO.forallZipEntries(_)(_.getName.endsWith(".zip"))) match { - case Full(allZips) => - if (allZips) { - for { - parsedZipResult <- extractFromZip(file, Some(name), sharedParams) - otherFiles = parsedZipResult.otherFiles.toSeq.map(tuple => (tuple._2, tuple._1)) - parsedFileResults <- extractFromFiles(otherFiles, sharedParams) - _ = accResult.combineWith(parsedFileResults) - } yield () - } else { - for { - parsedFile <- extractFromFile(file, name, sharedParams) - _ = accResult.combineWith(parsedFile) - } yield () - } - case _ => Fox.successful(()) - } - } else - for { - parsedFromFile <- extractFromFile(file, name, sharedParams) - _ = accResult.combineWith(parsedFromFile) - } yield () - } - .map(_ => accResult) - - } + ctx: DBAccessContext): Fox[MultiNmlParseResult] = + Fox.foldLeft(files.iterator, NmlResults.MultiNmlParseResult()) { + case (collectedResults, (file, name)) => + if (name.endsWith(".zip")) { + tryo(new java.util.zip.ZipFile(file)).map(ZipIO.forallZipEntries(_)(_.getName.endsWith(".zip"))) match { + case Full(allZips) => + if (allZips) { + for { + parsedZipResult <- extractFromZip(file, Some(name), sharedParams) + otherFiles = parsedZipResult.otherFiles.toSeq.map(tuple => (tuple._2, tuple._1)) + parsedFileResults <- extractFromFiles(otherFiles, sharedParams) + } yield collectedResults.combineWith(parsedFileResults) + } else { + for { + parsedFile <- extractFromFile(file, name, sharedParams) + } yield collectedResults.combineWith(parsedFile) + } + case _ => Fox.successful(collectedResults) + } + } else { + for { + parsedFromFile <- extractFromFile(file, name, sharedParams) + } yield collectedResults.combineWith(parsedFromFile) + + } + } private def extractFromFile(file: File, fileName: String, sharedParsingParameters: SharedParsingParameters)( implicit m: MessagesProvider, diff --git a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala index 59af5b50d28..be404109f47 100644 --- a/util/src/main/scala/com/scalableminds/util/tools/Fox.scala +++ b/util/src/main/scala/com/scalableminds/util/tools/Fox.scala @@ -151,6 +151,22 @@ object Fox extends FoxImplicits { runNext(Nil) } + def foldLeft[A, B](l: List[A], initial: B)(f: (B, A) => Fox[B])(implicit ec: ExecutionContext): Fox[List[B]] = + serialCombined(l.iterator)(a => f(initial, a)) + + def foldLeft[A, B](it: Iterator[A], initial: B)(f: (B, A) => Fox[B])(implicit ec: ExecutionContext): Fox[B] = { + def runNext(collectedResult: B): Fox[B] = + if (it.hasNext) { + for { + currentResult <- f(collectedResult, it.next()) + results <- runNext(currentResult) + } yield results + } else { + Fox.successful(collectedResult) + } + runNext(initial) + } + // run in sequence, drop everything that isn’t full def sequenceOfFulls[T](seq: Seq[Fox[T]])(implicit ec: ExecutionContext): Future[List[T]] = Future.sequence(seq.map(_.futureBox)).map { results => From e9b7c25ce502599a88491b6e249e48c29f7b0b80 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 14 Oct 2024 09:34:07 +0200 Subject: [PATCH 037/129] format backend --- app/controllers/WKRemoteDataStoreController.scala | 13 +++++++++---- app/models/dataset/DatasetService.scala | 1 - app/models/task/TaskCreationParameters.scala | 3 +-- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 1493dad48e4..d6611270939 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -76,12 +76,16 @@ class WKRemoteDataStoreController @Inject()( _ <- bool2Fox(dataStore.onlyAllowedOrganization.forall(_ == organization._id)) ?~> "dataset.upload.Datastore.restricted" folderId <- ObjectId.fromString(uploadInfo.folderId.getOrElse(organization._rootFolder.toString)) ?~> "dataset.upload.folderId.invalid" _ <- folderDAO.assertUpdateAccess(folderId)(AuthorizedAccessContext(user)) ?~> "folder.noWriteAccess" - layersToLinkWithDatasetId <- Fox.serialCombined(uploadInfo.layersToLink.getOrElse(List.empty))(l => validateLayerToLink(l, user)) ?~> "dataset.upload.invalidLinkedLayers" + layersToLinkWithDatasetId <- Fox.serialCombined(uploadInfo.layersToLink.getOrElse(List.empty))(l => + validateLayerToLink(l, user)) ?~> "dataset.upload.invalidLinkedLayers" dataset <- datasetService.createPreliminaryDataset(uploadInfo.name, uploadInfo.organization, dataStore) ?~> "dataset.name.alreadyTaken" _ <- datasetDAO.updateFolder(dataset._id, folderId)(GlobalAccessContext) _ <- datasetService.addInitialTeams(dataset, uploadInfo.initialTeams, user)(AuthorizedAccessContext(user)) _ <- datasetService.addUploader(dataset, user._id)(AuthorizedAccessContext(user)) - updatedInfo = uploadInfo.copy(newDatasetId = dataset._id.toString, path = dataset.path, layersToLink = Some(layersToLinkWithDatasetId)) // Update newDatasetId and path according to the newly created dataset. + updatedInfo = uploadInfo.copy( + newDatasetId = dataset._id.toString, + path = dataset.path, + layersToLink = Some(layersToLinkWithDatasetId)) // Update newDatasetId and path according to the newly created dataset. } yield Ok(Json.toJson(updatedInfo)) } } @@ -114,8 +118,9 @@ class WKRemoteDataStoreController @Inject()( } } - private def validateLayerToLink(layerIdentifier: LinkedLayerIdentifier, - requestingUser: User)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[LinkedLayerIdentifier] = + private def validateLayerToLink(layerIdentifier: LinkedLayerIdentifier, requestingUser: User)( + implicit ec: ExecutionContext, + m: MessagesProvider): Fox[LinkedLayerIdentifier] = for { organization <- organizationDAO.findOne(layerIdentifier.getOrganizationId)(GlobalAccessContext) ?~> Messages( "organization.notFound", diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 7754cac4ebd..2926e9fd4e3 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -63,7 +63,6 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, _ <- bool2Fox(!name.startsWith(".")) ?~> "dataset.layer.name.invalid.startsWithDot" } yield () - def createPreliminaryDataset(datasetName: String, organizationId: String, dataStore: DataStore): Fox[Dataset] = { val unreportedDatasource = UnusableDataSource(DataSourceId(datasetName, organizationId), notYetUploadedStatus) createDataset(dataStore, organizationId, datasetName, unreportedDatasource) diff --git a/app/models/task/TaskCreationParameters.scala b/app/models/task/TaskCreationParameters.scala index 1f318e79cb1..3c09d812af5 100644 --- a/app/models/task/TaskCreationParameters.scala +++ b/app/models/task/TaskCreationParameters.scala @@ -17,8 +17,7 @@ case class TaskParameters(taskTypeId: String, editRotation: Vec3Double, creationInfo: Option[String], description: Option[String], - baseAnnotation: Option[BaseAnnotation]) -{ + baseAnnotation: Option[BaseAnnotation]) { lazy val datasetName: String = dataSet } From 486ae67d9f000503acbe3aed3c9099fd5eaa5569 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20B=C3=BC=C3=9Femeyer?= Date: Mon, 14 Oct 2024 09:35:23 +0200 Subject: [PATCH 038/129] add hint about new parameter datasetId to csv / bulk task upload --- frontend/javascripts/admin/task/task_create_bulk_view.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/javascripts/admin/task/task_create_bulk_view.tsx b/frontend/javascripts/admin/task/task_create_bulk_view.tsx index bcb5e1c6b08..cab83fae700 100644 --- a/frontend/javascripts/admin/task/task_create_bulk_view.tsx +++ b/frontend/javascripts/admin/task/task_create_bulk_view.tsx @@ -309,7 +309,7 @@ function TaskCreateBulkView() { >