Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow to continue upload after browser restart for same user #7981

Merged
merged 29 commits into from
Aug 27, 2024
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
daf0b18
implement test route checking if chunk is present
MichaelBuessemeyer Aug 2, 2024
81e321a
Merge branch 'master' of github.com:scalableminds/webknossos into mor…
MichaelBuessemeyer Aug 7, 2024
74062d5
WIP: Implement ongoing uploads listing route
MichaelBuessemeyer Aug 7, 2024
f11e681
implement first version of robust resumable upload
MichaelBuessemeyer Aug 8, 2024
c68cb23
update build backend script command to remove autogenerated message f…
MichaelBuessemeyer Aug 8, 2024
10b4dc1
fix frontend typing
MichaelBuessemeyer Aug 8, 2024
e1b8ce3
refactor code
MichaelBuessemeyer Aug 8, 2024
da162dd
format backend
MichaelBuessemeyer Aug 8, 2024
675d320
Merge branch 'master' into more-robust-resumable-upload
MichaelBuessemeyer Aug 9, 2024
2eba2b9
fix test file route
MichaelBuessemeyer Aug 9, 2024
2cd362a
clear uploadId after successful upload in frontend;
MichaelBuessemeyer Aug 9, 2024
7730af5
format backend
MichaelBuessemeyer Aug 9, 2024
dad68b1
apply feedback
MichaelBuessemeyer Aug 13, 2024
9bc5844
Merge branch 'master' of github.com:scalableminds/webknossos into mor…
MichaelBuessemeyer Aug 16, 2024
9b903c6
fix expected format from backend when requesting ongoinguploads
MichaelBuessemeyer Aug 16, 2024
f5abfba
Merge branch 'master' of github.com:scalableminds/webknossos into mor…
MichaelBuessemeyer Aug 19, 2024
a1e6d8e
apply feedback
MichaelBuessemeyer Aug 21, 2024
2f2572c
remove unused import
MichaelBuessemeyer Aug 21, 2024
b127dc9
consistent renaming to unfinished uploads
MichaelBuessemeyer Aug 21, 2024
dc2c13e
Merge branch 'master' of github.com:scalableminds/webknossos into mor…
MichaelBuessemeyer Aug 22, 2024
1b32460
add sanity check that file names must be equal to initial upload
MichaelBuessemeyer Aug 23, 2024
9fc50b0
do not require same order of files
MichaelBuessemeyer Aug 26, 2024
015e89f
include file names in error to make searching for these file more eas…
MichaelBuessemeyer Aug 26, 2024
3173885
Merge branch 'master' of github.com:scalableminds/webknossos into mor…
MichaelBuessemeyer Aug 26, 2024
57162e1
Merge branch 'master' of github.com:scalableminds/webknossos into mor…
MichaelBuessemeyer Aug 27, 2024
8550faa
fix "files not matching initial upload try" error message
MichaelBuessemeyer Aug 27, 2024
8839129
Merge branch 'master' into more-robust-resumable-upload
MichaelBuessemeyer Aug 27, 2024
c42b737
add changelog entry
MichaelBuessemeyer Aug 27, 2024
e958434
Merge branch 'more-robust-resumable-upload' of github.com:scalablemin…
MichaelBuessemeyer Aug 27, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion app/controllers/DatasetController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -176,7 +176,7 @@ class DatasetController @Inject()(userService: UserService,
searchQuery,
request.identity.map(_._id),
recursive.getOrElse(false),
limit
limitOpt = limit
)
} yield Json.toJson(datasetInfos)
} else {
Expand Down
29 changes: 28 additions & 1 deletion app/controllers/WKRemoteDataStoreController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@ package controllers
import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, GlobalAccessContext}
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.controllers.JobExportProperties
import com.scalableminds.webknossos.datastore.models.OngoingUpload
import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId
import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSourceLike => InboxDataSource}
import com.scalableminds.webknossos.datastore.services.uploading.{LinkedLayerIdentifier, ReserveUploadInformation}
import com.scalableminds.webknossos.datastore.services.DataStoreStatus
import com.scalableminds.webknossos.datastore.services.uploading.{LinkedLayerIdentifier, ReserveUploadInformation}
import com.typesafe.scalalogging.LazyLogging
import mail.{MailchimpClient, MailchimpTag}

Expand All @@ -19,6 +20,7 @@ import models.folder.FolderDAO
import models.job.JobDAO
import models.organization.OrganizationDAO
import models.storage.UsedStorageService
import models.team.TeamDAO
import models.user.{MultiUserDAO, User, UserDAO, UserService}
import net.liftweb.common.Full
import play.api.i18n.{Messages, MessagesProvider}
Expand All @@ -41,6 +43,7 @@ class WKRemoteDataStoreController @Inject()(
datasetDAO: DatasetDAO,
userDAO: UserDAO,
folderDAO: FolderDAO,
teamDAO: TeamDAO,
jobDAO: JobDAO,
multiUserDAO: MultiUserDAO,
credentialDAO: CredentialDAO,
Expand Down Expand Up @@ -82,6 +85,30 @@ class WKRemoteDataStoreController @Inject()(
}
}

def getOngoingUploadsForUser(name: String, key: String, token: String, organizationName: String): Action[AnyContent] =
Action.async { implicit request =>
dataStoreService.validateAccess(name, key) { _ =>
for {
user <- bearerTokenService.userForToken(token)
organization <- organizationDAO.findOneByName(organizationName)(GlobalAccessContext) ?~> Messages(
"organization.notFound",
user._organization) ~> NOT_FOUND
_ <- bool2Fox(organization._id == user._organization) ?~> "notAllowed" ~> FORBIDDEN
datasets <- datasetService.getAllNotYetUploadedDatasetOfUser(user._id, user._organization)(
GlobalAccessContext) ?~> "dataset.upload.couldNotLoadInProgressUploads"
teamIdsPerDataset <- Fox.combined(datasets.map(dataset => teamDAO.findAllowedTeamIdsForDataset(dataset.id)))
ongoingUploads = datasets.zip(teamIdsPerDataset).map {
case (d, teamIds) =>
new OngoingUpload("<filled-in by datastore>",
d.dataSourceId,
d.folderId.toString,
d.created,
teamIds.map(_.toString))
}
} yield Ok(Json.toJson(ongoingUploads))
}
}

private def validateLayerToLink(layerIdentifier: LinkedLayerIdentifier,
requestingUser: User)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Unit] =
for {
Expand Down
42 changes: 29 additions & 13 deletions app/models/dataset/Dataset.scala
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{
Category,
CoordinateTransformation,
CoordinateTransformationType,
DataSourceId,
ElementClass,
ThinPlateSplineCorrespondences,
DataLayerLike => DataLayer
Expand Down Expand Up @@ -79,7 +80,9 @@ case class DatasetCompactInfo(
isUnreported: Boolean,
colorLayerNames: List[String],
segmentationLayerNames: List[String],
)
) {
def dataSourceId = new DataSourceId(name, owningOrganization)
}

object DatasetCompactInfo {
implicit val jsonFormat: Format[DatasetCompactInfo] = Json.format[DatasetCompactInfo]
Expand Down Expand Up @@ -211,29 +214,36 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
folderIdOpt,
uploaderIdOpt,
searchQuery,
includeSubfolders)
includeSubfolders,
None,
None)
limitQuery = limitOpt.map(l => q"LIMIT $l").getOrElse(q"")
r <- run(q"SELECT $columns FROM $existingCollectionName WHERE $selectionPredicates $limitQuery".as[DatasetsRow])
parsed <- parseAll(r)
} yield parsed

def findAllCompactWithSearch(isActiveOpt: Option[Boolean],
isUnreported: Option[Boolean],
organizationIdOpt: Option[ObjectId],
folderIdOpt: Option[ObjectId],
uploaderIdOpt: Option[ObjectId],
searchQuery: Option[String],
requestingUserIdOpt: Option[ObjectId],
includeSubfolders: Boolean,
limitOpt: Option[Int])(implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] =
def findAllCompactWithSearch(isActiveOpt: Option[Boolean] = None,
isUnreported: Option[Boolean] = None,
organizationIdOpt: Option[ObjectId] = None,
folderIdOpt: Option[ObjectId] = None,
uploaderIdOpt: Option[ObjectId] = None,
searchQuery: Option[String] = None,
requestingUserIdOpt: Option[ObjectId] = None,
includeSubfolders: Boolean = false,
statusOpt: Option[String] = None,
createdSinceOpt: Option[Instant] = None,
limitOpt: Option[Int] = None,
)(implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] =
for {
selectionPredicates <- buildSelectionPredicates(isActiveOpt,
isUnreported,
organizationIdOpt,
folderIdOpt,
uploaderIdOpt,
searchQuery,
includeSubfolders)
includeSubfolders,
statusOpt,
createdSinceOpt)
limitQuery = limitOpt.map(l => q"LIMIT $l").getOrElse(q"")
query = q"""
SELECT
Expand Down Expand Up @@ -325,7 +335,9 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
folderIdOpt: Option[ObjectId],
uploaderIdOpt: Option[ObjectId],
searchQuery: Option[String],
includeSubfolders: Boolean)(implicit ctx: DBAccessContext): Fox[SqlToken] =
includeSubfolders: Boolean,
statusOpt: Option[String],
createdSinceOpt: Option[Instant])(implicit ctx: DBAccessContext): Fox[SqlToken] =
for {
accessQuery <- readAccessQuery
folderPredicate = folderIdOpt match {
Expand All @@ -339,6 +351,8 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
organizationPredicate = organizationIdOpt
.map(organizationId => q"_organization = $organizationId")
.getOrElse(q"TRUE")
statusPredicate = statusOpt.map(status => q"status = $status").getOrElse(q"TRUE")
createdSincePredicate = createdSinceOpt.map(createdSince => q"created >= $createdSince").getOrElse(q"TRUE")
searchPredicate = buildSearchPredicate(searchQuery)
isUnreportedPredicate = buildIsUnreportedPredicate(isUnreported)
} yield q"""
Expand All @@ -348,6 +362,8 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
AND ($isActivePredicate)
AND ($isUnreportedPredicate)
AND ($organizationPredicate)
AND ($statusPredicate)
AND ($createdSincePredicate)
AND $accessQuery
"""

Expand Down
17 changes: 15 additions & 2 deletions app/models/dataset/DatasetService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import security.RandomIDGenerator
import utils.{ObjectId, WkConf}

import javax.inject.Inject
import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}

class DatasetService @Inject()(organizationDAO: OrganizationDAO,
Expand Down Expand Up @@ -69,6 +70,18 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO,
createDataset(dataStore, organizationName, unreportedDatasource)
}

def getAllNotYetUploadedDatasetOfUser(userId: ObjectId, organizationId: ObjectId)(
implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] =
datasetDAO.findAllCompactWithSearch(
uploaderIdOpt = Some(userId),
organizationIdOpt = Some(organizationId),
isActiveOpt = Some(false),
includeSubfolders = true,
statusOpt = Some(notYetUploadedStatus),
// Only list pending uploads since the two last weeks.
createdSinceOpt = Some(Instant.now - (14 days))
) ?~> "dataset.list.fetchFailed"

private def createDataset(
dataStore: DataStore,
owningOrganization: String,
Expand All @@ -82,14 +95,14 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO,
val dataSourceHash = if (dataSource.isUsable) Some(dataSource.hashCode()) else None
for {
organization <- organizationDAO.findOneByName(owningOrganization)
orbanizationRootFolder <- folderDAO.findOne(organization._rootFolder)
organizationRootFolder <- folderDAO.findOne(organization._rootFolder)
MichaelBuessemeyer marked this conversation as resolved.
Show resolved Hide resolved
dataset = Dataset(
newId,
dataStore.name,
organization._id,
publication,
None,
orbanizationRootFolder._id,
organizationRootFolder._id,
dataSourceHash,
dataSource.defaultViewConfiguration,
adminViewConfiguration = None,
Expand Down
1 change: 1 addition & 0 deletions conf/messages
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ dataset.upload.Datastore.restricted=Your organization is not allowed to upload d
dataset.upload.validation.failed=Failed to validate Dataset information for upload.
dataset.upload.linkRestricted=Can only link layers of datasets that are either public or allowed to be administrated by your account
dataset.upload.invalidLinkedLayers=Could not link all requested layers
dataset.upload.couldNotLoadInProgressUploads=Could not load unfinished uploads of user.
dataset.upload.noFiles=Tried to finish upload with no files. May be a retry of a failed finish request, see previous errors.
dataset.upload.storageExceeded=Cannot upload dataset because the storage quota of the organization is exceeded.
dataset.upload.finishFailed=Failed to finalize dataset upload.
Expand Down
1 change: 1 addition & 0 deletions conf/webknossos.latest.routes
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ PUT /datastores/:name/datasource
PUT /datastores/:name/datasources controllers.WKRemoteDataStoreController.updateAll(name: String, key: String)
PATCH /datastores/:name/status controllers.WKRemoteDataStoreController.statusUpdate(name: String, key: String)
POST /datastores/:name/reserveUpload controllers.WKRemoteDataStoreController.reserveDatasetUpload(name: String, key: String, token: String)
GET /datastores/:name/getOngoingUploadsForUser controllers.WKRemoteDataStoreController.getOngoingUploadsForUser(name: String, key: String, token: String, organizationName: String)
POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetName: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean)
POST /datastores/:name/deleteDataset controllers.WKRemoteDataStoreController.deleteDataset(name: String, key: String)
GET /datastores/:name/jobExportProperties controllers.WKRemoteDataStoreController.jobExportProperties(name: String, key: String, jobId: String)
Expand Down
31 changes: 30 additions & 1 deletion frontend/javascripts/admin/admin_rest_api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1273,7 +1273,7 @@ export function createResumableUpload(datastoreUrl: string, uploadId: string): P
(token) =>
// @ts-expect-error ts-migrate(2739) FIXME: Type 'Resumable' is missing the following properti... Remove this comment to see the full error message
new ResumableJS({
testChunks: false,
testChunks: true,
target: `${datastoreUrl}/data/datasets?token=${token}`,
chunkSize: 10 * 1024 * 1024, // 10MB
permanentErrors: [400, 403, 404, 409, 415, 500, 501],
Expand Down Expand Up @@ -1307,6 +1307,35 @@ export function reserveDatasetUpload(
);
}

export type OngoingUpload = {
uploadId: string;
datasetId: { name: string; organizationName: string };
folderId: string;
created: number;
allowedTeams: Array<string>;
};

type OldDatasetIdFormat = { name: string; team: string };

export function getOngoingUploads(
datastoreHost: string,
organizationName: string,
): Promise<OngoingUpload[]> {
return doWithToken(async (token) => {
const ongoingUploads = (await Request.receiveJSON(
`/data/datasets/getOngoingUploads?token=${token}&organizationName=${organizationName}`,
{
host: datastoreHost,
},
)) as Array<OngoingUpload & { dataSourceId: OldDatasetIdFormat }>;
// Rename "team" to "organization" as this is the actual used current naming.
return ongoingUploads.map(({ dataSourceId: { name, team }, ...rest }) => ({
...rest,
datasetId: { name, organizationName: team },
}));
});
}

export function finishDatasetUpload(
datastoreHost: string,
uploadInformation: ArbitraryObject,
Expand Down
19 changes: 17 additions & 2 deletions frontend/javascripts/admin/dataset/dataset_components.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -88,30 +88,36 @@ export function DatasetNameFormItem({
activeUser,
initialName,
label,
allowDuplicate,
disabled,
}: {
activeUser: APIUser | null | undefined;
initialName?: string;
label?: string;
allowDuplicate?: boolean;
disabled?: boolean;
}) {
return (
<FormItem
name="name"
label={label || "Dataset Name"}
hasFeedback
initialValue={initialName}
rules={getDatasetNameRules(activeUser)}
rules={getDatasetNameRules(activeUser, !allowDuplicate)}
validateFirst
>
<Input />
<Input disabled={disabled} />
</FormItem>
);
}
export function DatastoreFormItem({
datastores,
hidden,
disabled,
}: {
datastores: Array<APIDataStore>;
hidden?: boolean;
disabled?: boolean;
}) {
return (
<FormItem
Expand All @@ -131,6 +137,7 @@ export function DatastoreFormItem({
showSearch
placeholder="Select a Datastore"
optionFilterProp="label"
disabled={disabled}
style={{
width: "100%",
}}
Expand All @@ -147,12 +154,16 @@ export function AllowedTeamsFormItem({
isDatasetManagerOrAdmin,
selectedTeams,
setSelectedTeams,
afterFetchedTeams,
formRef,
disabled,
}: {
isDatasetManagerOrAdmin: boolean;
selectedTeams: APITeam | Array<APITeam>;
setSelectedTeams: (teams: APITeam | Array<APITeam>) => void;
afterFetchedTeams?: (arg0: Array<APITeam>) => void;
formRef: React.RefObject<FormInstance<any>>;
disabled?: boolean;
}) {
return (
<FormItemWithInfo
Expand All @@ -165,6 +176,7 @@ export function AllowedTeamsFormItem({
mode="multiple"
value={selectedTeams}
allowNonEditableTeams={isDatasetManagerOrAdmin}
disabled={disabled}
onChange={(selectedTeams) => {
if (formRef.current == null) return;

Expand All @@ -179,6 +191,9 @@ export function AllowedTeamsFormItem({
setSelectedTeams(selectedTeams);
}}
afterFetchedTeams={(fetchedTeams) => {
if (afterFetchedTeams) {
afterFetchedTeams(fetchedTeams);
}
if (!features().isWkorgInstance) {
return;
}
Expand Down
Loading