diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 90d1529a616..f26c1a0416a 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -11,6 +11,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released [Commits](https://github.com/scalableminds/webknossos/compare/22.12.0...HEAD) ### Added +- Added sign in via OIDC. [#6534](https://github.com/scalableminds/webknossos/pull/6534) - Added a new datasets tab to the dashboard which supports managing datasets in folders. Folders can be organized hierarchically and datasets can be moved into these folders. Selecting a dataset will show dataset details in a sidebar. [#6591](https://github.com/scalableminds/webknossos/pull/6591) ### Changed diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index f921aec8990..3d0a151aae9 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -11,4 +11,6 @@ User-facing changes are documented in the [changelog](CHANGELOG.released.md). - Bulk task creation now needs the taskTypeId, the task type summary will no longer be accepted. If you have scripts generating CSVs for bulk task creation, they should not output task type summaries. [#6640](https://github.com/scalableminds/webknossos/pull/6640) ### Postgres Evolutions: + - [091-folders.sql](conf/evolutions/091-folders.sql) +- [092-oidc.sql](conf/evolutions/092-oidc.sql) diff --git a/app/controllers/AuthenticationController.scala b/app/controllers/AuthenticationController.scala index 93ee19c12be..4daa72a6717 100755 --- a/app/controllers/AuthenticationController.scala +++ b/app/controllers/AuthenticationController.scala @@ -4,10 +4,11 @@ import akka.actor.ActorSystem import com.mohiva.play.silhouette.api.actions.SecuredRequest import com.mohiva.play.silhouette.api.exceptions.ProviderException import com.mohiva.play.silhouette.api.services.AuthenticatorResult -import com.mohiva.play.silhouette.api.util.Credentials +import com.mohiva.play.silhouette.api.util.{Credentials, PasswordInfo} import com.mohiva.play.silhouette.api.{LoginInfo, Silhouette} import com.mohiva.play.silhouette.impl.providers.CredentialsProvider import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.tools.JsonHelper.validateJsValue import com.scalableminds.util.tools.{Fox, FoxImplicits, TextUtils} import models.analytics.{AnalyticsService, InviteEvent, JoinOrganizationEvent, SignupEvent} import models.annotation.AnnotationState.Cancelled @@ -27,7 +28,7 @@ import play.api.data.Forms.{email, _} import play.api.data.validation.Constraints._ import play.api.i18n.Messages import play.api.libs.json._ -import play.api.mvc.{Action, AnyContent, PlayBodyParsers} +import play.api.mvc.{Action, AnyContent, Cookie, PlayBodyParsers, Request, Result} import utils.{ObjectId, WkConf} import java.net.URLEncoder @@ -55,6 +56,7 @@ class AuthenticationController @Inject()( annotationDAO: AnnotationDAO, voxelyticsDAO: VoxelyticsDAO, wkSilhouetteEnvironment: WkSilhouetteEnvironment, + openIdConnectClient: OpenIdConnectClient, sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with AuthForms @@ -83,7 +85,7 @@ class AuthenticationController @Inject()( errors ::= Messages("user.lastName.invalid") "" } - multiUserDAO.findOneByEmail(email)(GlobalAccessContext).toFox.futureBox.flatMap { + multiUserDAO.findOneByEmail(email)(GlobalAccessContext).futureBox.flatMap { case Full(_) => errors ::= Messages("user.email.alreadyInUse") Fox.successful(BadRequest(Json.obj("messages" -> Json.toJson(errors.map(t => Json.obj("error" -> t)))))) @@ -98,25 +100,15 @@ class AuthenticationController @Inject()( inviteBox.toOption, organizationName)(GlobalAccessContext) ?~> Messages("organization.notFound", signUpData.organization) autoActivate = inviteBox.toOption.map(_.autoActivate).getOrElse(organization.enableAutoVerify) - user <- userService.insert(organization._id, - email, - firstName, - lastName, - autoActivate, - passwordHasher.hash(signUpData.password)) ?~> "user.creation.failed" - multiUser <- multiUserDAO.findOne(user._multiUser)(GlobalAccessContext) - _ = analyticsService.track(SignupEvent(user, inviteBox.isDefined)) - _ <- Fox.runOptional(inviteBox.toOption)(i => - inviteService.deactivateUsedInvite(i)(GlobalAccessContext)) - brainDBResult <- brainTracing.registerIfNeeded(user, signUpData.password).toFox + _ <- createUser(organization, + email, + firstName, + lastName, + autoActivate, + Option(signUpData.password), + inviteBox, + registerBrainDB = true) } yield { - if (conf.Features.isDemoInstance) { - mailchimpClient.registerUser(user, multiUser, tag = MailchimpTag.RegisteredAsUser) - } else { - Mailer ! Send(defaultMails.newUserMail(user.name, email, brainDBResult, autoActivate)) - } - Mailer ! Send( - defaultMails.registerAdminNotifyerMail(user.name, email, brainDBResult, organization, autoActivate)) Ok } } @@ -126,6 +118,35 @@ class AuthenticationController @Inject()( ) } + private def createUser(organization: Organization, + email: String, + firstName: String, + lastName: String, + autoActivate: Boolean, + password: Option[String], + inviteBox: Box[Invite] = Empty, + registerBrainDB: Boolean = false)(implicit request: Request[AnyContent]): Fox[User] = { + val passwordInfo: PasswordInfo = + password.map(passwordHasher.hash).getOrElse(userService.getOpenIdConnectPasswordInfo) + for { + user <- userService.insert(organization._id, email, firstName, lastName, autoActivate, passwordInfo) ?~> "user.creation.failed" + multiUser <- multiUserDAO.findOne(user._multiUser)(GlobalAccessContext) + _ = analyticsService.track(SignupEvent(user, inviteBox.isDefined)) + _ <- Fox.runIf(inviteBox.isDefined)(Fox.runOptional(inviteBox.toOption)(i => + inviteService.deactivateUsedInvite(i)(GlobalAccessContext))) + brainDBResult <- Fox.runIf(registerBrainDB)(brainTracing.registerIfNeeded(user, password.getOrElse(""))) + _ = if (conf.Features.isDemoInstance) { + mailchimpClient.registerUser(user, multiUser, tag = MailchimpTag.RegisteredAsUser) + } else { + Mailer ! Send(defaultMails.newUserMail(user.name, email, brainDBResult.flatten, autoActivate)) + } + _ = Mailer ! Send( + defaultMails.registerAdminNotifyerMail(user.name, email, brainDBResult.flatten, organization, autoActivate)) + } yield { + user + } + } + def authenticate: Action[AnyContent] = Action.async { implicit request => signInForm.bindFromRequest.fold( bogusForm => Future.successful(BadRequest(bogusForm.toString)), @@ -430,7 +451,7 @@ class AuthenticationController @Inject()( request.identity match { case Some(user) => // logged in - // Check if the request we recieved was signed using our private sso-key + // Check if the request we received was signed using our private sso-key if (shaHex(ssoKey, sso) == sig) { val payload = new String(Base64.decodeBase64(sso)) val values = play.core.parsers.FormUrlEncodedParser.parse(payload) @@ -457,6 +478,58 @@ class AuthenticationController @Inject()( } } + lazy val absoluteOpenIdConnectCallbackURL = s"${conf.Http.uri}/api/auth/oidc/callback" + + def loginViaOpenIdConnect(): Action[AnyContent] = sil.UserAwareAction.async { implicit request => + openIdConnectClient.getRedirectUrl(absoluteOpenIdConnectCallbackURL).map(url => Ok(Json.obj("redirect_url" -> url))) + } + + private def loginUser(loginInfo: LoginInfo)(implicit request: Request[AnyContent]): Future[Result] = + userService.retrieve(loginInfo).flatMap { + case Some(user) if !user.isDeactivated => + for { + authenticator: CombinedAuthenticator <- combinedAuthenticatorService.create(loginInfo) + value: Cookie <- combinedAuthenticatorService.init(authenticator) + result: AuthenticatorResult <- combinedAuthenticatorService.embed(value, Redirect("/dashboard")) + _ <- multiUserDAO.updateLastLoggedInIdentity(user._multiUser, user._id)(GlobalAccessContext) + _ = userDAO.updateLastActivity(user._id)(GlobalAccessContext) + } yield result + case None => + Future.successful(BadRequest(Messages("error.noUser"))) + case Some(_) => Future.successful(BadRequest(Messages("user.deactivated"))) + } + + // Is called after user was successfully authenticated + def loginOrSignupViaOidc(oidc: OpenIdConnectClaimSet): Request[AnyContent] => Future[Result] = { + implicit request: Request[AnyContent] => + userService.userFromMultiUserEmail(oidc.email)(GlobalAccessContext).futureBox.flatMap { + case Full(user) => + val loginInfo = LoginInfo("credentials", user._id.toString) + loginUser(loginInfo) + case Empty => + for { + organization: Organization <- organizationService.findOneByInviteByNameOrDefault(None, None)( + GlobalAccessContext) + user <- createUser(organization, oidc.email, oidc.given_name, oidc.family_name, autoActivate = true, None) + // After registering, also login + loginInfo = LoginInfo("credentials", user._id.toString) + loginResult <- loginUser(loginInfo) + } yield loginResult + case _ => Future.successful(InternalServerError) + } + } + + def openIdCallback(): Action[AnyContent] = Action.async { implicit request => + for { + code <- openIdConnectClient.getToken( + absoluteOpenIdConnectCallbackURL, + request.queryString.get("code").flatMap(_.headOption).getOrElse("missing code"), + ) + oidc: OpenIdConnectClaimSet <- validateJsValue[OpenIdConnectClaimSet](code).toFox + user_result <- loginOrSignupViaOidc(oidc)(request) + } yield user_result + } + private def shaHex(key: String, valueToDigest: String): String = new HmacUtils(HmacAlgorithms.HMAC_SHA_256, key).hmacHex(valueToDigest) @@ -476,7 +549,7 @@ class AuthenticationController @Inject()( errors ::= Messages("user.lastName.invalid") "" } - multiUserDAO.findOneByEmail(email)(GlobalAccessContext).toFox.futureBox.flatMap { + multiUserDAO.findOneByEmail(email)(GlobalAccessContext).futureBox.flatMap { case Full(_) => errors ::= Messages("user.email.alreadyInUse") Fox.successful(BadRequest(Json.obj("messages" -> Json.toJson(errors.map(t => Json.obj("error" -> t)))))) diff --git a/app/controllers/InitialDataController.scala b/app/controllers/InitialDataController.scala index 6b5b1fbc292..5d9e5567d60 100644 --- a/app/controllers/InitialDataController.scala +++ b/app/controllers/InitialDataController.scala @@ -57,6 +57,7 @@ class InitialDataService @Inject()(userService: UserService, implicit val ctx: GlobalAccessContext.type = GlobalAccessContext private val defaultUserEmail = conf.WebKnossos.SampleOrganization.User.email + private val defaultUserEmail2 = conf.WebKnossos.SampleOrganization.User.email2 private val defaultUserPassword = conf.WebKnossos.SampleOrganization.User.password private val defaultUserToken = conf.WebKnossos.SampleOrganization.User.token private val additionalInformation = """**Sample Organization** @@ -75,9 +76,11 @@ Samplecountry PricingPlan.Custom, ObjectId.generate) private val organizationTeam = - Team(organizationTeamId, defaultOrganization._id, defaultOrganization.name, isOrganizationTeam = true) + Team(organizationTeamId, defaultOrganization._id, "Default", isOrganizationTeam = true) private val userId = ObjectId.generate private val multiUserId = ObjectId.generate + private val userId2 = ObjectId.generate + private val multiUserId2 = ObjectId.generate private val defaultMultiUser = MultiUser( multiUserId, defaultUserEmail, @@ -99,6 +102,27 @@ Samplecountry isDeactivated = false, lastTaskTypeId = None ) + private val defaultMultiUser2 = MultiUser( + multiUserId2, + defaultUserEmail2, + userService.createPasswordInfo(defaultUserPassword), + isSuperUser = false, + ) + private val defaultUser2 = User( + userId2, + multiUserId2, + defaultOrganization._id, + "Non-Admin", + "User", + System.currentTimeMillis(), + Json.obj(), + userService.createLoginInfo(userId2), + isAdmin = false, + isDatasetManager = false, + isUnlisted = false, + isDeactivated = false, + lastTaskTypeId = None + ) private val defaultPublication = Publication( ObjectId("5c766bec6c01006c018c7459"), Some(System.currentTimeMillis()), @@ -119,7 +143,8 @@ Samplecountry _ <- insertRootFolder() _ <- insertOrganization() _ <- insertTeams() - _ <- insertDefaultUser() + _ <- insertDefaultUser(defaultUserEmail, defaultMultiUser, defaultUser, true) + _ <- insertDefaultUser(defaultUserEmail2, defaultMultiUser2, defaultUser2, false) _ <- insertToken() _ <- insertTaskType() _ <- insertProject() @@ -143,19 +168,23 @@ Samplecountry case _ => folderDAO.insertAsRoot(Folder(defaultOrganization._rootFolder, folderService.defaultRootName)) } - private def insertDefaultUser(): Fox[Unit] = + private def insertDefaultUser(userEmail: String, + multiUser: MultiUser, + user: User, + isTeamManager: Boolean): Fox[Unit] = userService - .userFromMultiUserEmail(defaultUserEmail) + .userFromMultiUserEmail(userEmail) .futureBox .flatMap { case Full(_) => Fox.successful(()) case _ => for { - _ <- multiUserDAO.insertOne(defaultMultiUser) - _ <- userDAO.insertOne(defaultUser) - _ <- userExperiencesDAO.updateExperiencesForUser(defaultUser, Map("sampleExp" -> 10)) - _ <- userTeamRolesDAO.insertTeamMembership(defaultUser._id, - TeamMembership(organizationTeam._id, isTeamManager = true)) + _ <- multiUserDAO.insertOne(multiUser) + _ <- userDAO.insertOne(user) + _ <- userExperiencesDAO.updateExperiencesForUser(user, Map("sampleExp" -> 10)) + _ <- userTeamRolesDAO.insertTeamMembership( + user._id, + TeamMembership(organizationTeam._id, isTeamManager = isTeamManager)) _ = logger.info("Inserted default user") } yield () } diff --git a/app/controllers/ShortLinkController.scala b/app/controllers/ShortLinkController.scala index 17b70f876db..2a110e8b046 100644 --- a/app/controllers/ShortLinkController.scala +++ b/app/controllers/ShortLinkController.scala @@ -2,6 +2,7 @@ package controllers import com.mohiva.play.silhouette.api.Silhouette import com.scalableminds.util.tools.FoxImplicits +import io.swagger.annotations.{Api, ApiOperation, ApiParam} import models.shortlinks.{ShortLink, ShortLinkDAO} import oxalis.security.{RandomIDGenerator, WkEnv} import play.api.libs.json.Json @@ -11,12 +12,14 @@ import utils.{ObjectId, WkConf} import javax.inject.Inject import scala.concurrent.ExecutionContext +@Api class ShortLinkController @Inject()(shortLinkDAO: ShortLinkDAO, sil: Silhouette[WkEnv], wkConf: WkConf)( implicit ec: ExecutionContext, val bodyParsers: PlayBodyParsers) extends Controller with FoxImplicits { + @ApiOperation(hidden = true, value = "") def create: Action[String] = sil.SecuredAction.async(validateJson[String]) { implicit request => val longLink = request.body val _id = ObjectId.generate @@ -28,7 +31,12 @@ class ShortLinkController @Inject()(shortLinkDAO: ShortLinkDAO, sil: Silhouette[ } yield Ok(Json.toJson(inserted)) } - def getByKey(key: String): Action[AnyContent] = Action.async { implicit request => + @ApiOperation(value = "Information about a short link, including the original long link.", + nickname = "shortLinkByKey") + def getByKey( + @ApiParam(value = "key of the shortLink, this is the short random string identifying the link.", + example = "aU7yv5Aja99T0829") + key: String): Action[AnyContent] = Action.async { implicit request => for { shortLink <- shortLinkDAO.findOneByKey(key) } yield Ok(Json.toJson(shortLink)) diff --git a/app/controllers/TaskController.scala b/app/controllers/TaskController.scala index 3e502fb45f9..558194ea049 100755 --- a/app/controllers/TaskController.scala +++ b/app/controllers/TaskController.scala @@ -105,7 +105,7 @@ Expects: file.filename.toLowerCase.endsWith(".nml") || file.filename.toLowerCase.endsWith(".zip")) _ <- bool2Fox(inputFiles.nonEmpty) ?~> "nml.file.notFound" jsonString <- body.dataParts.get("formJSON").flatMap(_.headOption) ?~> "format.json.missing" - params <- JsonHelper.parseJsonToFox[NmlTaskParameters](jsonString) ?~> "task.create.failed" + params <- JsonHelper.parseAndValidateJson[NmlTaskParameters](jsonString) ?~> "task.create.failed" _ <- taskCreationService.assertBatchLimit(inputFiles.length, List(params.taskTypeId)) taskTypeIdValidated <- ObjectId.fromString(params.taskTypeId) ?~> "taskType.id.invalid" taskType <- taskTypeDAO.findOne(taskTypeIdValidated) ?~> "taskType.notFound" ~> NOT_FOUND diff --git a/app/models/annotation/Annotation.scala b/app/models/annotation/Annotation.scala index 5cb716b704e..be1e16deb2d 100755 --- a/app/models/annotation/Annotation.scala +++ b/app/models/annotation/Annotation.scala @@ -151,7 +151,7 @@ class AnnotationDAO @Inject()(sqlClient: SQLClient, annotationLayerDAO: Annotati for { state <- AnnotationState.fromString(r.state).toFox typ <- AnnotationType.fromString(r.typ).toFox - viewconfigurationOpt <- Fox.runOptional(r.viewconfiguration)(JsonHelper.parseJsonToFox[JsObject](_)) + viewconfigurationOpt <- Fox.runOptional(r.viewconfiguration)(JsonHelper.parseAndValidateJson[JsObject](_)) visibility <- AnnotationVisibility.fromString(r.visibility).toFox annotationLayers <- annotationLayerDAO.findAnnotationLayersFor(ObjectId(r._Id)) } yield { diff --git a/app/models/binary/DataSet.scala b/app/models/binary/DataSet.scala index 59c3b56c5ae..cc99914bf8e 100755 --- a/app/models/binary/DataSet.scala +++ b/app/models/binary/DataSet.scala @@ -81,10 +81,10 @@ class DataSetDAO @Inject()(sqlClient: SQLClient, for { scale <- parseScaleOpt(r.scale) defaultViewConfigurationOpt <- Fox.runOptional(r.defaultviewconfiguration)( - JsonHelper.parseJsonToFox[DataSetViewConfiguration](_)) + JsonHelper.parseAndValidateJson[DataSetViewConfiguration](_)) adminViewConfigurationOpt <- Fox.runOptional(r.adminviewconfiguration)( - JsonHelper.parseJsonToFox[DataSetViewConfiguration](_)) - details <- Fox.runOptional(r.details)(JsonHelper.parseJsonToFox[JsObject](_)) + JsonHelper.parseAndValidateJson[DataSetViewConfiguration](_)) + details <- Fox.runOptional(r.details)(JsonHelper.parseAndValidateJson[JsObject](_)) } yield { DataSet( ObjectId(r._Id), @@ -452,9 +452,9 @@ class DataSetDataLayerDAO @Inject()(sqlClient: SQLClient, dataSetResolutionsDAO: resolutions <- Fox.fillOption(standinResolutions)( dataSetResolutionsDAO.findDataResolutionForLayer(dataSetId, row.name) ?~> "Could not find resolution for layer") defaultViewConfigurationOpt <- Fox.runOptional(row.defaultviewconfiguration)( - JsonHelper.parseJsonToFox[LayerViewConfiguration](_)) + JsonHelper.parseAndValidateJson[LayerViewConfiguration](_)) adminViewConfigurationOpt <- Fox.runOptional(row.adminviewconfiguration)( - JsonHelper.parseJsonToFox[LayerViewConfiguration](_)) + JsonHelper.parseAndValidateJson[LayerViewConfiguration](_)) } yield { category match { case Category.segmentation => diff --git a/app/models/binary/explore/RemoteLayerExplorer.scala b/app/models/binary/explore/RemoteLayerExplorer.scala index 1e1b8852886..58a609bc77e 100644 --- a/app/models/binary/explore/RemoteLayerExplorer.scala +++ b/app/models/binary/explore/RemoteLayerExplorer.scala @@ -26,7 +26,7 @@ trait RemoteLayerExplorer extends FoxImplicits { protected def parseJsonFromPath[T: Reads](path: Path): Fox[T] = for { fileAsString <- tryo(new String(Files.readAllBytes(path), StandardCharsets.UTF_8)).toFox ?~> "Failed to read remote file" - parsed <- JsonHelper.parseJsonToFox[T](fileAsString) ?~> "Failed to validate json against data schema" + parsed <- JsonHelper.parseAndValidateJson[T](fileAsString) ?~> "Failed to validate json against data schema" } yield parsed protected def looksLikeSegmentationLayer(layerName: String, elementClass: ElementClass.Value): Boolean = diff --git a/app/models/organization/OrganizationService.scala b/app/models/organization/OrganizationService.scala index 868b879c21a..7f4e6115645 100644 --- a/app/models/organization/OrganizationService.scala +++ b/app/models/organization/OrganizationService.scala @@ -45,12 +45,12 @@ class OrganizationService @Inject()(organizationDAO: OrganizationDAO, ) } - def findOneByInviteByNameOrDefault(inviteOpt: Option[Invite], organizatioNameOpt: Option[String])( + def findOneByInviteByNameOrDefault(inviteOpt: Option[Invite], organizationNameOpt: Option[String])( implicit ctx: DBAccessContext): Fox[Organization] = inviteOpt match { case Some(invite) => organizationDAO.findOne(invite._organization) case None => - organizatioNameOpt match { + organizationNameOpt match { case Some(organizationName) => organizationDAO.findOneByName(organizationName) case None => for { diff --git a/app/models/user/MultiUser.scala b/app/models/user/MultiUser.scala index 4af4292fc09..3941adb0ba5 100644 --- a/app/models/user/MultiUser.scala +++ b/app/models/user/MultiUser.scala @@ -35,7 +35,7 @@ class MultiUserDAO @Inject()(sqlClient: SQLClient)(implicit ec: ExecutionContext def parse(r: MultiusersRow): Fox[MultiUser] = for { - novelUserExperienceInfos <- JsonHelper.parseJsonToFox[JsObject](r.noveluserexperienceinfos).toFox + novelUserExperienceInfos <- JsonHelper.parseAndValidateJson[JsObject](r.noveluserexperienceinfos).toFox theme <- Theme.fromString(r.selectedtheme).toFox } yield { MultiUser( diff --git a/app/models/user/User.scala b/app/models/user/User.scala index 9e39d6465f2..68a9708b543 100755 --- a/app/models/user/User.scala +++ b/app/models/user/User.scala @@ -2,7 +2,7 @@ package models.user import com.mohiva.play.silhouette.api.{Identity, LoginInfo} import com.scalableminds.util.accesscontext._ -import com.scalableminds.util.tools.JsonHelper.parseJsonToFox +import com.scalableminds.util.tools.JsonHelper.parseAndValidateJson import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.datasource.DataSetViewConfiguration.DataSetViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration @@ -65,7 +65,7 @@ class UserDAO @Inject()(sqlClient: SQLClient)(implicit ec: ExecutionContext) def parse(r: UsersRow): Fox[User] = for { - userConfiguration <- parseJsonToFox[JsObject](r.userconfiguration) + userConfiguration <- parseAndValidateJson[JsObject](r.userconfiguration) } yield { User( ObjectId(r._Id), diff --git a/app/models/user/UserService.scala b/app/models/user/UserService.scala index 6f9e7f8d6ab..7b5ed485fba 100755 --- a/app/models/user/UserService.scala +++ b/app/models/user/UserService.scala @@ -200,6 +200,9 @@ class UserService @Inject()(conf: WkConf, _ <- multiUserDAO.updatePasswordInfo(user._multiUser, passwordInfo)(GlobalAccessContext) } yield passwordInfo + def getOpenIdConnectPasswordInfo: PasswordInfo = + PasswordInfo("Empty", "") + def updateUserConfiguration(user: User, configuration: JsObject)(implicit ctx: DBAccessContext): Fox[Unit] = userDAO.updateUserConfiguration(user._id, configuration).map { result => userCache.invalidateUser(user._id) diff --git a/app/oxalis/security/OpenIdConnectClient.scala b/app/oxalis/security/OpenIdConnectClient.scala new file mode 100644 index 00000000000..c753a1f7d14 --- /dev/null +++ b/app/oxalis/security/OpenIdConnectClient.scala @@ -0,0 +1,139 @@ +package oxalis.security + +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.{bool2Fox, jsResult2Fox, try2Fox} +import com.scalableminds.webknossos.datastore.rpc.RPC +import play.api.libs.json.{JsObject, Json, OFormat} +import pdi.jwt.{JwtJson, JwtOptions} +import play.api.libs.ws._ +import utils.WkConf + +import java.net.URLEncoder +import java.nio.charset.StandardCharsets +import java.security.spec.X509EncodedKeySpec +import java.security.{KeyFactory, PublicKey} +import java.util.Base64 +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +class OpenIdConnectClient @Inject()(rpc: RPC, conf: WkConf)(implicit executionContext: ExecutionContext) { + + lazy val oidcConfig: OpenIdConnectConfig = + OpenIdConnectConfig(conf.SingleSignOn.OpenIdConnect.providerUrl, conf.SingleSignOn.OpenIdConnect.clientId) + + /* + Build redirect URL to redirect to OIDC provider for auth request (https://openid.net/specs/openid-connect-core-1_0.html#AuthRequest) + */ + def getRedirectUrl(callbackUrl: String): Fox[String] = + for { + _ <- bool2Fox(conf.Features.openIdConnectEnabled) ?~> "oidc.disabled" + _ <- bool2Fox(oidcConfig.isValid) ?~> "oidc.configuration.invalid" + redirectUrl <- discover.map { serverInfos => + def queryParams: Map[String, String] = Map( + "client_id" -> oidcConfig.clientId, + "redirect_uri" -> callbackUrl, + "scope" -> oidcConfig.scope, + "response_type" -> "code", + ) + serverInfos.authorization_endpoint + "?" + + queryParams.map(v => v._1 + "=" + URLEncoder.encode(v._2, StandardCharsets.UTF_8.toString)).mkString("&") + } + } yield redirectUrl + + /* + Fetches token from the oidc provider (https://openid.net/specs/openid-connect-core-1_0.html#TokenRequest), + fields described by https://www.rfc-editor.org/rfc/rfc6749#section-4.4.2 + */ + def getToken(redirectUrl: String, code: String): Fox[JsObject] = + for { + _ <- bool2Fox(conf.Features.openIdConnectEnabled) ?~> "oidc.disabled" + _ <- bool2Fox(oidcConfig.isValid) ?~> "oidc.configuration.invalid" + serverInfos <- discover + tokenResponse <- rpc(serverInfos.token_endpoint).postFormParseJson[OpenIdConnectTokenResponse]( + Map( + "grant_type" -> "authorization_code", + "client_id" -> oidcConfig.clientId, + "redirect_uri" -> redirectUrl, + "code" -> code + )) + newToken <- validateOpenIdConnectTokenResponse(tokenResponse) ?~> "failed to parse JWT" + } yield newToken + + /* + Discover endpoints of the provider (https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfig) + */ + def discover: Fox[OpenIdConnectProviderInfo] = + for { + response: WSResponse <- rpc(oidcConfig.discoveryUrl).get + serverInfo <- response.json.validate[OpenIdConnectProviderInfo](OpenIdConnectProviderInfo.format) + } yield serverInfo + + private def validateOpenIdConnectTokenResponse(tr: OpenIdConnectTokenResponse) = + publicKey match { + case Some(pk) => JwtJson.decodeJson(tr.access_token, pk).toFox + case None => + JwtJson.decodeJson(tr.access_token, JwtOptions.DEFAULT.copy(signature = false)).toFox + } + + lazy val publicKey: Option[PublicKey] = { + if (conf.SingleSignOn.OpenIdConnect.publicKey.isEmpty || conf.SingleSignOn.OpenIdConnect.publicKeyAlgorithm.isEmpty) { + None + } else { + val kf = KeyFactory.getInstance("RSA") + val base64EncodedKey = conf.SingleSignOn.OpenIdConnect.publicKey + val key = Base64.getDecoder.decode(base64EncodedKey.getBytes) + val spec = new X509EncodedKeySpec(key) + Some(kf.generatePublic(spec)) + } + + } + +} + +// Fields as specified by https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata +case class OpenIdConnectProviderInfo( + authorization_endpoint: String, + token_endpoint: String, +) + +object OpenIdConnectProviderInfo { + implicit val format: OFormat[OpenIdConnectProviderInfo] = Json.format[OpenIdConnectProviderInfo] +} + +case class OpenIdConnectConfig( + baseUrl: String, + clientId: String, + scope: String = "openid profile" +) { + + lazy val discoveryUrl: String = baseUrl + ".well-known/openid-configuration" + + def isValid: Boolean = + baseUrl.nonEmpty +} + +// Fields as specified by https://www.rfc-editor.org/rfc/rfc6749#section-5.1 +case class OpenIdConnectTokenResponse( + access_token: String, + token_type: String, + refresh_token: Option[String], + scope: Option[String] +) + +object OpenIdConnectTokenResponse { + implicit val format: OFormat[OpenIdConnectTokenResponse] = Json.format[OpenIdConnectTokenResponse] +} + +// Claims from https://openid.net/specs/openid-connect-core-1_0.html#StandardClaims +case class OpenIdConnectClaimSet(iss: String, + sub: String, + preferred_username: String, + given_name: String, + family_name: String, + email: String) { + def username: String = preferred_username +} + +object OpenIdConnectClaimSet { + implicit val format = Json.format[OpenIdConnectClaimSet] +} diff --git a/app/utils/WkConf.scala b/app/utils/WkConf.scala index 36b9afe1186..99f7ab61cd6 100644 --- a/app/utils/WkConf.scala +++ b/app/utils/WkConf.scala @@ -52,6 +52,7 @@ class WkConf @Inject()(configuration: Configuration) extends ConfigReader with L object User { val email: String = get[String]("webKnossos.sampleOrganization.user.email") + val email2: String = get[String]("webKnossos.sampleOrganization.user.email2") val password: String = get[String]("webKnossos.sampleOrganization.user.password") val token: String = get[String]("webKnossos.sampleOrganization.user.token") val isSuperUser: Boolean = get[Boolean]("webKnossos.sampleOrganization.user.isSuperUser") @@ -64,6 +65,15 @@ class WkConf @Inject()(configuration: Configuration) extends ConfigReader with L val children = List(User, Tasks, Cache, SampleOrganization) } + object SingleSignOn { + object OpenIdConnect { + val providerUrl: String = get[String]("singleSignOn.openIdConnect.providerUrl") + val clientId: String = get[String]("singleSignOn.openIdConnect.clientId") + val publicKey: String = get[String]("singleSignOn.openIdConnect.publicKey") + val publicKeyAlgorithm: String = get[String]("singleSignOn.openIdConnect.publicKeyAlgorithm") + } + } + object Features { val isDemoInstance: Boolean = get[Boolean]("features.isDemoInstance") val jobsEnabled: Boolean = get[Boolean]("features.jobsEnabled") @@ -73,6 +83,7 @@ class WkConf @Inject()(configuration: Configuration) extends ConfigReader with L val publicDemoDatasetUrl: String = get[String]("features.publicDemoDatasetUrl") val exportTiffMaxVolumeMVx: Long = get[Long]("features.exportTiffMaxVolumeMVx") val exportTiffMaxEdgeLengthVx: Long = get[Long]("features.exportTiffMaxEdgeLengthVx") + val openIdConnectEnabled: Boolean = get[Boolean]("features.openIdConnectEnabled") } object Datastore { diff --git a/conf/application.conf b/conf/application.conf index dccbe48860b..f5743ed1539 100644 --- a/conf/application.conf +++ b/conf/application.conf @@ -63,6 +63,7 @@ webKnossos { enabled = true user { email = "sample@scm.io" + email2 = "sample2@scm.io" password = "secret" token = "secretSampleUserToken" isSuperUser = true @@ -75,6 +76,16 @@ webKnossos { """ } +singleSignOn { + openIdConnect { + providerUrl = "http://localhost:8080/auth/realms/master/" + clientId = "myclient" + # Public Key to validate claim, for keycloak see Realm settings > keys + publicKey = "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAscUZB3Y5fiOfIdLC/31N1GufZ26bmB21V8D9Crg2bAHPD3g8qofRMg5Uo1+WuKuT5CJrCu+x0hIbA50GYb6E1V78MkYOaCbCT+xE+ec+Jv6zUJAaNJugx71oXI+X5e9kW/O8JSwIicSUYDz7LKvCklwn9/QmgetqGsBrAEOG+4WlwPnrZiKRaQl9V0vBOcwzD946Cbrgg3iLnryJ0pGVKHvWePsXR7Pt8hdA0FeA9V9hVd6gVHR2pHqg46kyPItNMwWTXENqJ4lbhgaoZ9sZpoMXIy1kjh3GXSXGOG+GeOOtOinr1K24I8HG9wsnEefjVSPDB6EvflPrhLKXMfI/JQIDAQAB" + publicKeyAlgorithm = "RSA" + } +} + # This part of the config is exposed as JSON via /api/features features { discussionBoard = "https://forum.image.sc/tag/webknossos" @@ -96,6 +107,7 @@ features { defaultToLegacyBindings = false # The Only valid item value is currently "ConnectomeView": optInTabs = [] + openIdConnectEnabled = false } # Serve annotations. Only active if the corresponding play module is enabled diff --git a/conf/evolutions/092-oidc.sql b/conf/evolutions/092-oidc.sql new file mode 100644 index 00000000000..bcbedd540dd --- /dev/null +++ b/conf/evolutions/092-oidc.sql @@ -0,0 +1,35 @@ +BEGIN transaction; + +DROP VIEW webknossos.userInfos; +DROP VIEW webknossos.multiUsers_; + +-- Cannot alter enum in transaction block, workaround required +ALTER TABLE webknossos.multiusers ALTER COLUMN passwordInfo_hasher TYPE VARCHAR(255); +ALTER TABLE webknossos.multiusers ALTER COLUMN passwordinfo_hasher SET DEFAULT 'SCrypt'; +DROP TYPE IF EXISTS webknossos.USER_PASSWORDINFO_HASHERS; +CREATE TYPE webknossos.USER_PASSWORDINFO_HASHERS AS ENUM ( + 'SCrypt', + 'Empty' + ); +ALTER TABLE webknossos.multiusers + ALTER COLUMN passwordInfo_hasher DROP DEFAULT, + ALTER COLUMN passwordInfo_hasher TYPE webknossos.USER_PASSWORDINFO_HASHERS + USING (passwordInfo_hasher::text::webknossos.USER_PASSWORDINFO_HASHERS), + ALTER COLUMN passwordinfo_hasher SET DEFAULT 'SCrypt'; + +UPDATE webknossos.releaseInformation SET schemaVersion = 92; + + +-- recreate dropped views +CREATE VIEW webknossos.multiUsers_ AS SELECT * FROM webknossos.multiUsers WHERE NOT isDeleted; +CREATE VIEW webknossos.userInfos AS +SELECT + u._id AS _user, m.email, u.firstName, u.lastname, o.displayName AS organization_displayName, + u.isDeactivated, u.isDatasetManager, u.isAdmin, m.isSuperUser, + u._organization, o.name AS organization_name, u.created AS user_created, + m.created AS multiuser_created, u._multiUser, m._lastLoggedInIdentity, u.lastActivity +FROM webknossos.users_ u + JOIN webknossos.organizations_ o ON u._organization = o._id + JOIN webknossos.multiUsers_ m on u._multiUser = m._id; + +COMMIT; diff --git a/conf/evolutions/reversions/092-oidc.sql b/conf/evolutions/reversions/092-oidc.sql new file mode 100644 index 00000000000..7a22a5347be --- /dev/null +++ b/conf/evolutions/reversions/092-oidc.sql @@ -0,0 +1,9 @@ +BEGIN transaction; + +UPDATE webknossos.releaseInformation SET schemaVersion = 91; + +-- Delete OIDC users +DELETE FROM webknossos.multiUsers WHERE passwordInfo_hasher = 'Empty'; +-- Enum is not altered (See evolution) + +COMMIT; diff --git a/conf/messages b/conf/messages index 2202e8d3e43..cb785412d32 100644 --- a/conf/messages +++ b/conf/messages @@ -58,6 +58,9 @@ user.id.notFound=We could not find a user id in the request. user.id.invalid=The provided user id is invalid. user.creation.failed=Failed to create user +oidc.disabled=OIDC is disabled +oidc.configuration.invalid=OIDC configuration is invalid + braintracing.new=An account on braintracing.org was created for you. You can use the same credentials as on webKnossos to login. braintracing.error=We could not atomatically create an account for you on braintracing.org. Please do it on your own. braintracing.exists=Great, you already have an account on braintracing.org. Please double check that you have uploaded all requested information. diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index d95360548ff..056036d403e 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -27,6 +27,9 @@ POST /auth/changePassword POST /auth/resetPassword controllers.AuthenticationController.handleResetPassword GET /auth/logout controllers.AuthenticationController.logout GET /auth/sso controllers.AuthenticationController.singleSignOn(sso: String, sig: String) +GET /auth/oidc/login controllers.AuthenticationController.loginViaOpenIdConnect +# /auth/oidc/callback route is used literally in code +GET /auth/oidc/callback controllers.AuthenticationController.openIdCallback POST /auth/createOrganizationWithAdmin controllers.AuthenticationController.createOrganizationWithAdmin # Configurations diff --git a/docs/data_formats.md b/docs/data_formats.md index 4c3f5eb15ad..61cf5e84908 100644 --- a/docs/data_formats.md +++ b/docs/data_formats.md @@ -349,25 +349,23 @@ my_dataset # Dataset root All segment IDs belonging to the same super-voxel need to be listed in an array: ``` { - { - "name": "astrocytes", - "classes": [ - [ - 69381, - 69445, - 138248 - ], - [ - 138307, - 343831 - ], - [ - 348348, - 132432, - 387433, - 338330 - ] + "name": "astrocytes", + "classes": [ + [ + 69381, + 69445, + 138248 + ], + [ + 138307, + 343831 + ], + [ + 348348, + 132432, + 387433, + 338330 ] - } + ] } ``` diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 68a38ab5773..469ee38c032 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -2402,3 +2402,7 @@ export function sendHelpEmail(message: string) { method: "POST", }); } + +export function requestSingleSignOnLogin() { + return Request.receiveJSON("/api/auth/oidc/login"); +} diff --git a/frontend/javascripts/admin/auth/login_form.tsx b/frontend/javascripts/admin/auth/login_form.tsx index efd095d6854..c3e173e0eaf 100644 --- a/frontend/javascripts/admin/auth/login_form.tsx +++ b/frontend/javascripts/admin/auth/login_form.tsx @@ -3,12 +3,15 @@ import { LockOutlined, MailOutlined } from "@ant-design/icons"; import { Link } from "react-router-dom"; import React from "react"; import { getIsInIframe } from "libs/utils"; -import { loginUser } from "admin/admin_rest_api"; +import { loginUser, requestSingleSignOnLogin } from "admin/admin_rest_api"; import { setActiveUserAction } from "oxalis/model/actions/user_actions"; import Store from "oxalis/store"; import messages from "messages"; +import features from "features"; + const FormItem = Form.Item; const { Password } = Input; + type Props = { layout: "horizontal" | "vertical" | "inline"; onLoggedIn?: () => unknown; @@ -16,6 +19,10 @@ type Props = { style?: Record; }; +const DEFAULT_STYLE = { + maxWidth: 500, +}; + function LoginForm({ layout, onLoggedIn, hideFooter, style }: Props) { const [form] = Form.useForm(); const linkStyle = @@ -35,6 +42,7 @@ function LoginForm({ layout, onLoggedIn, hideFooter, style }: Props) { onLoggedIn(); } }; + const { openIdConnectEnabled } = features(); const iframeWarning = getIsInIframe() ? ( ) : null; return ( -
+
{iframeWarning}
- - - +
+ + + + {openIdConnectEnabled && ( + + + + )} +
{hideFooter ? null : ( - +
diff --git a/frontend/javascripts/dashboard/dashboard_view.tsx b/frontend/javascripts/dashboard/dashboard_view.tsx index 64de0a1d15d..4bd7122dd84 100644 --- a/frontend/javascripts/dashboard/dashboard_view.tsx +++ b/frontend/javascripts/dashboard/dashboard_view.tsx @@ -265,7 +265,7 @@ class DashboardView extends PureComponent { return ( {whatsNextBanner} -
+
{userHeader} diff --git a/frontend/javascripts/oxalis/api/api_latest.ts b/frontend/javascripts/oxalis/api/api_latest.ts index e8faf3a5092..8afe44d5c2c 100644 --- a/frontend/javascripts/oxalis/api/api_latest.ts +++ b/frontend/javascripts/oxalis/api/api_latest.ts @@ -72,6 +72,7 @@ import { getResolutionInfo, getVisibleSegmentationLayer, getMappingInfo, + ResolutionInfo, } from "oxalis/model/accessors/dataset_accessor"; import { getPosition, @@ -1443,7 +1444,7 @@ class DataApi { const minBucket = globalPositionToBucketPosition(bbox.min, resolutions, zoomStep); const topLeft = (bucketAddress: Vector4) => - bucketPositionToGlobalAddress(bucketAddress, resolutions); + bucketPositionToGlobalAddress(bucketAddress, new ResolutionInfo(resolutions)); const nextBucketInDim = (bucket: Vector4, dim: 0 | 1 | 2) => { const copy = bucket.slice(); diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts index ca193b6f9be..3970fc56f38 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts @@ -54,14 +54,12 @@ function minValue(array: Array): number { } export class ResolutionInfo { - resolutions: ReadonlyArray; - resolutionMap: Map; + readonly resolutions: ReadonlyArray; + readonly resolutionMap: ReadonlyMap; constructor(resolutions: Array) { this.resolutions = resolutions; - this.resolutionMap = new Map(); - - this._buildResolutionMap(); + this.resolutionMap = this._buildResolutionMap(); } _buildResolutionMap() { @@ -71,23 +69,21 @@ export class ResolutionInfo { // Therefore, the largest dim for each resolution has to be unique across all resolutions. // This function creates a map which maps from powerOfTwo (2**index) to resolution. const { resolutions } = this; + const resolutionMap = new Map(); if (resolutions.length !== _.uniq(resolutions.map(maxValue)).length) { throw new Error("Max dimension in resolutions is not unique."); } for (const resolution of resolutions) { - this.resolutionMap.set(maxValue(resolution), resolution); + resolutionMap.set(maxValue(resolution), resolution); } + return resolutionMap; } - getDenseResolutions(): Array { - return convertToDenseResolution(this.getResolutionList()); - } + getDenseResolutions = memoizeOne(() => convertToDenseResolution(this.getResolutionList())); - getResolutionList(): Array { - return Array.from(this.resolutionMap.values()); - } + getResolutionList = memoizeOne(() => Array.from(this.resolutionMap.values())); getResolutionsWithIndices(): Array<[number, Vector3]> { return _.sortBy( @@ -197,7 +193,7 @@ export class ResolutionInfo { return this.getResolutionsWithIndices().map((entry) => entry[0]); } - getClosestExistingIndex(index: number): number { + getClosestExistingIndex(index: number, errorMessage: string | null = null): number { if (this.hasIndex(index)) { return index; } @@ -220,7 +216,7 @@ export class ResolutionInfo { const bestIndexWithDistance = _.head(_.sortBy(indicesWithDistances, (entry) => entry[1])); if (bestIndexWithDistance == null) { - throw new Error("Couldn't find any resolution."); + throw new Error(errorMessage || "Couldn't find any resolution."); } return bestIndexWithDistance[0]; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts index b704bddb642..dabcee283f0 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts @@ -193,9 +193,10 @@ export class DataBucket { } getBoundingBox(): BoundingBoxType { - const resolutions = getResolutions(Store.getState().dataset); - const min = bucketPositionToGlobalAddress(this.zoomedAddress, resolutions); - const bucketResolution = resolutions[this.zoomedAddress[3]]; + const min = bucketPositionToGlobalAddress(this.zoomedAddress, this.cube.resolutionInfo); + const bucketResolution = this.cube.resolutionInfo.getResolutionByIndexOrThrow( + this.zoomedAddress[3], + ); const max: Vector3 = [ min[0] + Constants.BUCKET_WIDTH * bucketResolution[0], min[1] + Constants.BUCKET_WIDTH * bucketResolution[1], @@ -208,8 +209,7 @@ export class DataBucket { } getGlobalPosition(): Vector3 { - const resolutions = getResolutions(Store.getState().dataset); - return bucketPositionToGlobalAddress(this.zoomedAddress, resolutions); + return bucketPositionToGlobalAddress(this.zoomedAddress, this.cube.resolutionInfo); } getTopLeftInMag(): Vector3 { @@ -694,10 +694,9 @@ export class DataBucket { const zoomStep = getRequestLogZoomStep(Store.getState()); if (this.zoomedAddress[3] === zoomStep) { - const resolutions = getResolutions(Store.getState().dataset); // @ts-ignore this.visualizedMesh = window.addBucketMesh( - bucketPositionToGlobalAddress(this.zoomedAddress, resolutions), + bucketPositionToGlobalAddress(this.zoomedAddress, this.cube.resolutionInfo), this.zoomedAddress[3], this.visualizationColor, ); diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_traversals.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_traversals.ts index 12ded6098b3..3e062d731c7 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_traversals.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket_traversals.ts @@ -26,7 +26,7 @@ export default function traverse( const lastBucket = globalPositionToBucketPosition(endPosition, resolutions, zoomStep); // The integer variables X and Y are initialized to the starting voxel coordinates. let [X, Y, Z] = uBucket; - const voxelSize = getBucketExtent(resolutions, zoomStep); + const voxelSize = getBucketExtent(resolutions[zoomStep]); // In addition, the variables stepX and stepY are initialized to either 1 or -1 indicating whether X and Y are // incremented or decremented as the ray crosses voxel boundaries (this is determined by the sign of the x and y components of → v). const [stepX, stepY, stepZ] = v.map((el) => Math.sign(el)); diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts index 929ceed8067..cd8f1fc9b27 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts @@ -546,8 +546,10 @@ class DataCube { // Create an array saving the labeled voxel of the current slice for the current bucket, if there isn't already one. const currentLabeledVoxelMap = bucketsWithLabeledVoxelsMap.get(currentBucket.zoomedAddress) || new Map(); - const resolutions = getResolutions(Store.getState().dataset); - const currentResolution = resolutions[currentBucket.zoomedAddress[3]]; + + const currentResolution = this.resolutionInfo.getResolutionByIndexOrThrow( + currentBucket.zoomedAddress[3], + ); const markUvwInSliceAsLabeled = ([firstCoord, secondCoord, thirdCoord]: Vector3) => { // Convert bucket local W coordinate to global W (both mag-dependent) diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index a1d3130553a..059cfdaed90 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -3,10 +3,11 @@ import { bucketPositionToGlobalAddress } from "oxalis/model/helpers/position_con import { createWorker } from "oxalis/workers/comlink_wrapper"; import { doWithToken } from "admin/admin_rest_api"; import { - getResolutions, isSegmentationLayer, getByteCountFromLayer, getMappingInfo, + ResolutionInfo, + getResolutionInfo, } from "oxalis/model/accessors/dataset_accessor"; import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; import { parseAsMaybe } from "libs/utils"; @@ -47,12 +48,12 @@ type RequestBucketInfo = SendBucketInfo & { // object as expected by the server on bucket request const createRequestBucketInfo = ( zoomedAddress: Vector4, - resolutions: Array, + resolutionInfo: ResolutionInfo, fourBit: boolean, applyAgglomerate: string | null | undefined, version: number | null | undefined, ): RequestBucketInfo => ({ - ...createSendBucketInfo(zoomedAddress, resolutions), + ...createSendBucketInfo(zoomedAddress, resolutionInfo), fourBit, ...(applyAgglomerate != null ? { @@ -66,10 +67,13 @@ const createRequestBucketInfo = ( : {}), }); -function createSendBucketInfo(zoomedAddress: Vector4, resolutions: Array): SendBucketInfo { +function createSendBucketInfo( + zoomedAddress: Vector4, + resolutionInfo: ResolutionInfo, +): SendBucketInfo { return { - position: bucketPositionToGlobalAddress(zoomedAddress, resolutions), - mag: resolutions[zoomedAddress[3]], + position: bucketPositionToGlobalAddress(zoomedAddress, resolutionInfo), + mag: resolutionInfo.getResolutionByIndexOrThrow(zoomedAddress[3]), cubeSize: constants.BUCKET_WIDTH, }; } @@ -160,13 +164,13 @@ export async function requestFromStore( activeMapping.mappingType === "HDF5" ? activeMapping.mappingName : null; - const resolutions = getResolutions(state.dataset); + const resolutionInfo = getResolutionInfo(layerInfo.resolutions); const version = !isVolumeFallback && isSegmentation && maybeVolumeTracing != null ? maybeVolumeTracing.version : null; const bucketInfo = batch.map((zoomedAddress) => - createRequestBucketInfo(zoomedAddress, resolutions, fourBit, applyAgglomerates, version), + createRequestBucketInfo(zoomedAddress, resolutionInfo, fourBit, applyAgglomerates, version), ); try { @@ -234,10 +238,7 @@ export async function sendToStore(batch: Array, tracingId: string): const items: Array = await Promise.all( batch.map(async (bucket): Promise => { const data = bucket.getCopyOfData(); - const bucketInfo = createSendBucketInfo( - bucket.zoomedAddress, - getResolutions(Store.getState().dataset), - ); + const bucketInfo = createSendBucketInfo(bucket.zoomedAddress, bucket.cube.resolutionInfo); const byteArray = new Uint8Array(data.buffer, data.byteOffset, data.byteLength); const compressedBase64 = await compressionPool.submit(byteArray); return updateBucket(bucketInfo, compressedBase64); diff --git a/frontend/javascripts/oxalis/model/helpers/position_converter.ts b/frontend/javascripts/oxalis/model/helpers/position_converter.ts index 70efde43ece..8dcb803706e 100644 --- a/frontend/javascripts/oxalis/model/helpers/position_converter.ts +++ b/frontend/javascripts/oxalis/model/helpers/position_converter.ts @@ -65,9 +65,9 @@ export function upsampleResolution(resolutions: Array, resolutionIndex: } export function bucketPositionToGlobalAddress( [x, y, z, resolutionIndex]: Vector4, - resolutions: Array, + resolutionInfo: ResolutionInfo, ): Vector3 { - const resolution = resolutions[resolutionIndex]; + const resolution = resolutionInfo.getResolutionByIndexOrThrow(resolutionIndex); return [ x * constants.BUCKET_WIDTH * resolution[0], y * constants.BUCKET_WIDTH * resolution[1], @@ -137,8 +137,12 @@ export function zoomedAddressToAnotherZoomStepWithInfo( targetResolutionIndex, ]; } -export function getBucketExtent(resolutions: Vector3[], resolutionIndex: number): Vector3 { - return bucketPositionToGlobalAddress([1, 1, 1, resolutionIndex], resolutions); +export function getBucketExtent(resolution: Vector3): Vector3 { + return [ + constants.BUCKET_WIDTH * resolution[0], + constants.BUCKET_WIDTH * resolution[1], + constants.BUCKET_WIDTH * resolution[2], + ]; } // This function returns all bucket addresses for which the fallback bucket // is the provided bucket. diff --git a/frontend/javascripts/oxalis/model/sagas/quick_select_saga.ts b/frontend/javascripts/oxalis/model/sagas/quick_select_saga.ts index f19acf5f640..ed86953e474 100644 --- a/frontend/javascripts/oxalis/model/sagas/quick_select_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/quick_select_saga.ts @@ -16,7 +16,10 @@ import type { Saga } from "oxalis/model/sagas/effect-generators"; import { call, put, takeEvery, race, take } from "typed-redux-saga"; import { select } from "oxalis/model/sagas/effect-generators"; import { V2, V3 } from "libs/mjs"; -import { getActiveSegmentationTracing } from "oxalis/model/accessors/volumetracing_accessor"; +import { + getActiveSegmentationTracing, + getSegmentationLayerForTracing, +} from "oxalis/model/accessors/volumetracing_accessor"; import { CancelQuickSelectAction, ComputeQuickSelectForRectAction, @@ -70,6 +73,7 @@ export default function* listenToQuickSelect(): Saga { ErrorHandling.notify(ex as Error); console.error(ex); } finally { + action.quickSelectGeometry.setCoordinates([0, 0, 0], [0, 0, 0]); yield* put(setIsQuickSelectActiveAction(false)); } }, @@ -136,10 +140,20 @@ function* performQuickSelect(action: ComputeQuickSelectForRectAction): Saga + getSegmentationLayerForTracing(state, volumeTracing), + ); const requestedZoomStep = yield* select((store) => getRequestLogZoomStep(store)); - const resolutionInfo = getResolutionInfo(colorLayer.resolutions); - const labeledZoomStep = resolutionInfo.getClosestExistingIndex(requestedZoomStep); + const resolutionInfo = getResolutionInfo( + // Ensure that a magnification is used which exists in the color layer as well as the + // target segmentation layer. + _.intersectionBy(colorLayer.resolutions, volumeLayer.resolutions, (mag) => mag.join("-")), + ); + const labeledZoomStep = resolutionInfo.getClosestExistingIndex( + requestedZoomStep, + "The visible color layer and the active segmentation layer don't have any magnifications in common. Cannot select segment.", + ); const labeledResolution = resolutionInfo.getResolutionByIndexOrThrow(labeledZoomStep); const boundingBoxTarget = boundingBoxMag1.fromMag1ToMag(labeledResolution); diff --git a/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts b/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts index 8aefc3b4731..d56a93cf08c 100644 --- a/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts +++ b/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts @@ -1,7 +1,7 @@ // @ts-nocheck import _ from "lodash"; import "test/model/binary/layers/wkstore_adapter.mock.js"; -import { getBitDepth } from "oxalis/model/accessors/dataset_accessor"; +import { ResolutionInfo, getBitDepth } from "oxalis/model/accessors/dataset_accessor"; import { byteArrayToLz4Base64 } from "oxalis/workers/byte_array_to_lz4_base64.worker"; import datasetServerObject from "test/fixtures/dataset_server_object"; import mockRequire from "mock-require"; @@ -21,6 +21,10 @@ function setFourBit(bool) { const mockedCube = { isSegmentation: true, + resolutionInfo: new ResolutionInfo([ + [1, 1, 1], + [2, 2, 2], + ]), }; const StoreMock = { getState: () => ({ diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.md b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.md index f99aea5089c..f88e34177c6 100644 --- a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.md +++ b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.md @@ -76,6 +76,7 @@ Generated by [AVA](https://avajs.dev). hideNavbarLogin: false, isDemoInstance: false, jobsEnabled: false, + openIdConnectEnabled: false, optInTabs: [], publicDemoDatasetUrl: 'https://webknossos.org/datasets/scalable_minds/l4dense_motta_et_al_demo', taskReopenAllowedInSeconds: 30, diff --git a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.snap b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.snap index 84c560ec4ad..38ac85bcad4 100644 Binary files a/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.snap and b/frontend/javascripts/test/snapshots/public-test/test-bundle/test/backend-snapshot-tests/misc.e2e.js.snap differ diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 5357bb03832..8e33944bbff 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -563,6 +563,7 @@ export type APIFeatureToggles = { readonly exportTiffMaxEdgeLengthVx: number; readonly defaultToLegacyBindings: boolean; readonly optInTabs?: Array; + readonly openIdConnectEnabled?: boolean; }; export type APIJobCeleryState = "SUCCESS" | "PENDING" | "STARTED" | "FAILURE" | null; export type APIJobManualState = "SUCCESS" | "FAILURE" | null; diff --git a/frontend/stylesheets/_dashboard.less b/frontend/stylesheets/_dashboard.less index 2efc977a23b..d5a12a0550c 100644 --- a/frontend/stylesheets/_dashboard.less +++ b/frontend/stylesheets/_dashboard.less @@ -34,7 +34,7 @@ .welcome-header-content { max-width: 1600px; margin: auto; - padding: 80px 0px 20px; + padding: 20px 0px 20px; .wk-logo { width: 100%; diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 173a23829fc..077787dbe74 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -54,6 +54,7 @@ object Dependencies { private val tika = "org.apache.tika" % "tika-core" % "1.5" private val jackson = "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.12.7" private val commonsCompress = "org.apache.commons" % "commons-compress" % "1.21" + private val jwt = "com.github.jwt-scala" %% "jwt-play-json" % "9.1.1" private val sql = Seq( "com.typesafe.slick" %% "slick" % "3.3.3", @@ -122,7 +123,8 @@ object Dependencies { trireme, triremeNode, xmlWriter, - woodstoxXml + woodstoxXml, + jwt ) ++ sql } diff --git a/tools/postgres/schema.sql b/tools/postgres/schema.sql index 11866c9ef4d..0a1628e70c7 100644 --- a/tools/postgres/schema.sql +++ b/tools/postgres/schema.sql @@ -19,7 +19,7 @@ START TRANSACTION; CREATE TABLE webknossos.releaseInformation ( schemaVersion BIGINT NOT NULL ); -INSERT INTO webknossos.releaseInformation(schemaVersion) values(91); +INSERT INTO webknossos.releaseInformation(schemaVersion) values(92); COMMIT TRANSACTION; @@ -290,7 +290,7 @@ CREATE TABLE webknossos.organizations( isDeleted BOOLEAN NOT NULL DEFAULT false ); -CREATE TYPE webknossos.USER_PASSWORDINFO_HASHERS AS ENUM ('SCrypt'); +CREATE TYPE webknossos.USER_PASSWORDINFO_HASHERS AS ENUM ('SCrypt', 'Empty'); CREATE TABLE webknossos.users( _id CHAR(24) PRIMARY KEY, _multiUser CHAR(24) NOT NULL, diff --git a/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala b/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala index c247ad7ca90..01ca1945691 100644 --- a/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala +++ b/util/src/main/scala/com/scalableminds/util/mvc/ExtendedController.scala @@ -177,6 +177,7 @@ trait MimeTypes { val xmlMimeType: String = "application/xml" val zipMimeType: String = "application/zip" val jsonMimeType: String = "application/json" + val formUrlEncodedMimeType: String = "application/x-www-form-urlencoded" } trait JsonResults extends JsonResultAttribues { diff --git a/util/src/main/scala/com/scalableminds/util/tools/JsonHelper.scala b/util/src/main/scala/com/scalableminds/util/tools/JsonHelper.scala index b703b108137..de61b262985 100644 --- a/util/src/main/scala/com/scalableminds/util/tools/JsonHelper.scala +++ b/util/src/main/scala/com/scalableminds/util/tools/JsonHelper.scala @@ -102,8 +102,11 @@ object JsonHelper extends BoxImplicits with LazyLogging { } } - def parseJsonToFox[T: Reads](s: String): Box[T] = - Json.parse(s).validate[T] match { + def parseAndValidateJson[T: Reads](s: String): Box[T] = + validateJsValue[T](Json.parse(s)) + + def validateJsValue[T: Reads](o: JsValue): Box[T] = + o.validate[T] match { case JsSuccess(parsed, _) => Full(parsed) case errors: JsError => diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala index 34260b4857a..1598526130e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/rpc/RPCRequest.scala @@ -91,6 +91,11 @@ class RPCRequest(val id: Int, val url: String, wsClient: WSClient) performRequest } + def postFormParseJson[T: Reads](parameters: Map[String, String]): Fox[T] = { + request = request.withBody(parameters).withMethod("POST") + parseJsonResponse(performRequest) + } + def postWithJsonResponse[T: Reads]: Fox[T] = { request = request.withMethod("POST") parseJsonResponse(performRequest) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 778d7b1c389..db22503d47a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -95,7 +95,7 @@ class VolumeTracingService @Inject()( case a: UpdateBucketVolumeAction => if (tracing.getMappingIsEditable) { Fox.failure("Cannot mutate volume data in annotation with editable mapping.") - } else updateBucket(tracingId, tracing, a, updateGroup.version) + } else updateBucket(tracingId, tracing, a, updateGroup.version) ?~> "Failed to save volume data." case a: UpdateTracingVolumeAction => Fox.successful( tracing.copy( @@ -114,7 +114,7 @@ class VolumeTracingService @Inject()( case Empty => Fox.empty case f: Failure => - Fox.failure(f.msg) + f.toFox } } _ <- save(updatedTracing.copy(version = updateGroup.version), Some(tracingId), updateGroup.version) @@ -129,7 +129,7 @@ class VolumeTracingService @Inject()( action: UpdateBucketVolumeAction, updateGroupVersion: Long): Fox[VolumeTracing] = for { - _ <- assertMagIsValid(volumeTracing, action.mag) + _ <- assertMagIsValid(volumeTracing, action.mag) ?~> s"Received a mag-${action.mag.toMagLiteral(allowScalar = true)} bucket, which is invalid for this annotation." bucket = BucketPosition(action.position.x, action.position.y, action.position.z, action.mag) _ <- saveBucket(volumeTracingLayer(tracingId, volumeTracing), bucket, action.data, updateGroupVersion) } yield volumeTracing diff --git a/yarn.lock b/yarn.lock index aa61a171742..344f6b16ad8 100644 --- a/yarn.lock +++ b/yarn.lock @@ -10857,12 +10857,7 @@ qs@6.9.7: resolved "https://registry.yarnpkg.com/qs/-/qs-6.9.7.tgz#4610846871485e1e048f44ae3b94033f0e675afe" integrity sha512-IhMFgUmuNpyRfxA90umL7ByLlgRXu6tIfKPpF5TmcfRLlLCckfP/g3IQmju6jjpu+Hh8rA+2p6A27ZSPOOHdKw== -qs@^6.4.0: - version "6.9.4" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.9.4.tgz#9090b290d1f91728d3c22e54843ca44aea5ab687" - integrity sha512-A1kFqHekCTM7cz0udomYUoYNWjBebHm/5wzU/XqrBRBNWectVH0QIiN+NEcZ0Dte5hvzHwbr8+XQmguPhJ6WdQ== - -qs@^6.9.4: +qs@^6.4.0, qs@^6.9.4: version "6.10.2" resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.2.tgz#c1431bea37fc5b24c5bdbafa20f16bdf2a4b9ffe" integrity sha512-mSIdjzqznWgfd4pMii7sHtaYF8rx8861hBO80SraY5GT0XQibWZWJSid0avzHGkDIZLImux2S5mXO0Hfct2QCw== @@ -10870,9 +10865,9 @@ qs@^6.9.4: side-channel "^1.0.4" qs@~6.5.2: - version "6.5.2" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" - integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== + version "6.5.3" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.3.tgz#3aeeffc91967ef6e35c0e488ef46fb296ab76aad" + integrity sha512-qxXIEh4pCGfHICj1mAJQ2/2XVZkjCDTcEgfoSQxc/fYivUZxTkk7L3bDBJSoNrEzXI17oUO5Dp07ktqE5KzczA== query-string@^6.13.8: version "6.14.1"