Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Allow to add datasets through wk-connect #3843

Merged
merged 19 commits into from
Mar 13, 2019
Merged
Show file tree
Hide file tree
Changes from 18 commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.md).
### Added
- webKnossos now comes with a list of sample datasets that can be automatically downloaded and imported from the menu. [#3725](https://github.com/scalableminds/webknossos/pull/3725)
- Added a shortcut (Q) and button in the actions dropdown to screenshot the tracing views. The screenshots will contain everything that is visible in the tracing views, so feel free to disable the crosshairs in the settings or toggle the tree visibility using the (1) and (2) shortcuts before triggering the screenshot. [#3834](https://github.com/scalableminds/webknossos/pull/3834)
- Neuroglancer precomputed datasets can now be added to webKnossos using the webknossos-connect (wk-connect) service. To setup a wk-connect datastore follow the instructions in the [Readme](https://github.com/scalableminds/webknossos-connect). Afterwards, datasets can be added through "Add Dataset" - "Add Dataset via wk-connect". [#3843](https://github.com/scalableminds/webknossos/pull/3843)
- The dataset settings within the tracing view allow to select between different loading strategies now ("best quality first" and "progressive quality"). Additionally, the rendering can use different magnifications as a fallback (instead of only one magnification). [#3801](https://github.com/scalableminds/webknossos/pull/3801)
- The mapping selection dropbown is now sorted alphabetically. [#3864](https://github.com/scalableminds/webknossos/pull/3864)

Expand Down
2 changes: 1 addition & 1 deletion MIGRATIONS.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ User-facing changes are documented in the [changelog](CHANGELOG.md).
- To ensure that the existing behavior for loading data is preserved ("best quality first" as opposed to the new "progressive quality" default) execute: `update webknossos.user_datasetconfigurations set configuration = configuration || jsonb '{"loadingStrategy":"BEST_QUALITY_FIRST"}'`. See [#3801](https://github.com/scalableminds/webknossos/pull/3801) for additional context.

### Postgres Evolutions:
-
- [041-add-datastore-isconnector.sql](conf/evolutions/041-add-datastore-isconnector.sql)


## [19.03.0](https://github.com/scalableminds/webknossos/releases/tag/19.03.0) - 2019-03-04
Expand Down
4 changes: 2 additions & 2 deletions app/controllers/InitialDataController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ Samplecountry
if (conf.Datastore.enabled) {
dataStoreDAO.findOneByName("localhost").futureBox.map { maybeStore =>
if (maybeStore.isEmpty) {
logger.info("inserting local datastore");
logger.info("inserting local datastore")
dataStoreDAO.insertOne(DataStore("localhost", conf.Http.uri, conf.Datastore.key))
}
}
Expand All @@ -215,7 +215,7 @@ Samplecountry
if (conf.Tracingstore.enabled) {
tracingStoreDAO.findOneByName("localhost").futureBox.map { maybeStore =>
if (maybeStore.isEmpty) {
logger.info("inserting local tracingstore");
logger.info("inserting local tracingstore")
tracingStoreDAO.insertOne(TracingStore("localhost", conf.Http.uri, conf.Tracingstore.key))
}
}
Expand Down
2 changes: 1 addition & 1 deletion app/models/binary/DataSetService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ class DataSetService @Inject()(organizationDAO: OrganizationDAO,
.getWithJsonResponse[InboxDataSource]

def addForeignDataStore(name: String, url: String)(implicit ctx: DBAccessContext): Fox[Unit] = {
val dataStore = DataStore(name, url, "", isForeign = true) // the key can be "" because keys are only important for own DataStore. Own Datastores have a key that is not ""
val dataStore = DataStore(name, url, "", isForeign = true, isConnector = false) // the key can be "" because keys are only important for own DataStore. Own Datastores have a key that is not ""
for {
_ <- dataStoreDAO.insertOne(dataStore)
} yield ()
Expand Down
13 changes: 8 additions & 5 deletions app/models/binary/DataStore.scala
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@ case class DataStore(
key: String,
isScratch: Boolean = false,
isDeleted: Boolean = false,
isForeign: Boolean = false
isForeign: Boolean = false,
isConnector: Boolean = false
)

class DataStoreService @Inject()(dataStoreDAO: DataStoreDAO)(implicit ec: ExecutionContext)
Expand All @@ -32,7 +33,8 @@ class DataStoreService @Inject()(dataStoreDAO: DataStoreDAO)(implicit ec: Execut
"name" -> dataStore.name,
"url" -> dataStore.url,
"isForeign" -> dataStore.isForeign,
"isScratch" -> dataStore.isScratch
"isScratch" -> dataStore.isScratch,
"isConnector" -> dataStore.isConnector
))

def validateAccess[A](name: String)(block: (DataStore) => Future[Result])(implicit request: Request[A],
Expand Down Expand Up @@ -61,7 +63,8 @@ class DataStoreDAO @Inject()(sqlClient: SQLClient)(implicit ec: ExecutionContext
r.key,
r.isscratch,
r.isdeleted,
r.isforeign
r.isforeign,
r.isconnector
))

def findOneByKey(key: String)(implicit ctx: DBAccessContext): Fox[DataStore] =
Expand Down Expand Up @@ -95,8 +98,8 @@ class DataStoreDAO @Inject()(sqlClient: SQLClient)(implicit ec: ExecutionContext

def insertOne(d: DataStore): Fox[Unit] =
for {
_ <- run(sqlu"""insert into webknossos.dataStores(name, url, key, isScratch, isDeleted, isForeign)
values(${d.name}, ${d.url}, ${d.key}, ${d.isScratch}, ${d.isDeleted}, ${d.isForeign})""")
_ <- run(sqlu"""insert into webknossos.dataStores(name, url, key, isScratch, isDeleted, isForeign, isConnector)
values(${d.name}, ${d.url}, ${d.key}, ${d.isScratch}, ${d.isDeleted}, ${d.isForeign}, ${d.isConnector})""")
} yield ()

}
13 changes: 13 additions & 0 deletions conf/evolutions/041-add-datastore-isconnector.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
-- https://github.com/scalableminds/webknossos/pull/3843

START TRANSACTION;

DROP VIEW webknossos.dataStores_;

ALTER TABLE webknossos.dataStores ADD COLUMN isConnector BOOLEAN NOT NULL DEFAULT false;

CREATE VIEW webknossos.dataStores_ AS SELECT * FROM webknossos.dataStores WHERE NOT isDeleted;

UPDATE webknossos.releaseInformation SET schemaVersion = 41;

COMMIT TRANSACTION;
11 changes: 11 additions & 0 deletions conf/evolutions/reversions/041-add-datastore-isconnector.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
START TRANSACTION;

DROP VIEW webknossos.dataStores_;

ALTER TABLE webknossos.dataStores DROP COLUMN isConnector;

CREATE VIEW webknossos.dataStores_ AS SELECT * FROM webknossos.dataStores WHERE NOT isDeleted;

UPDATE webknossos.releaseInformation SET schemaVersion = 40;

COMMIT TRANSACTION;
18 changes: 16 additions & 2 deletions frontend/javascripts/admin/admin_rest_api.js
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ import {
type ServerSkeletonTracing,
type ServerTracing,
type ServerVolumeTracing,
type WkConnectDatasetConfig,
} from "admin/api_flow_types";
import type { DatasetConfiguration } from "oxalis/store";
import type { NewTask, TaskCreationResponse } from "admin/task/task_create_bulk_view";
Expand Down Expand Up @@ -770,15 +771,28 @@ export function getDatasetAccessList(datasetId: APIDatasetId): Promise<Array<API
);
}

export async function addDataset(datasetConfig: DatasetConfig): Promise<void> {
await doWithToken(token =>
export function addDataset(datasetConfig: DatasetConfig): Promise<void> {
return doWithToken(token =>
Request.sendMultipartFormReceiveJSON(`/data/datasets?token=${token}`, {
data: datasetConfig,
host: datasetConfig.datastore,
}),
);
}

export function addWkConnectDataset(
datastoreHost: string,
datasetConfig: WkConnectDatasetConfig,
): Promise<void> {
return doWithToken(token =>
Request.sendJSONReceiveJSON(`/data/datasets?token=${token}`, {
data: datasetConfig,
host: datastoreHost,
method: "POST",
}),
);
}

export async function addForeignDataSet(
dataStoreName: string,
url: string,
Expand Down
19 changes: 18 additions & 1 deletion frontend/javascripts/admin/api_flow_types.js
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,9 @@ export type APIDataSource = APIDataSourceBase & {
export type APIDataStore = {
+name: string,
+url: string,
+isForeign?: boolean,
+isForeign: boolean,
+isScratch: boolean,
+isConnector: boolean,
};

export type APITracingStore = {
Expand Down Expand Up @@ -369,6 +370,22 @@ export type DatasetConfig = {
+zipFile: File,
};

type WkConnectLayer = {
// This is the source URL of the layer, should start with gs://, http:// or https://
source: string,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It's not clear to me what source is. Is it an url? Then I'd suggest sourceUrl.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Unfortunately this is coming from Neuroglancer and wk-connect adopted the same format for simplicity's sake, but yes it is the URL.
If we really want to we could change it, but I think adding a comment will be easier

type: "image" | "segmentation",
};

export type WkConnectDatasetConfig = {
neuroglancer: {
[organizationName: string]: {
[datasetName: string]: {
layers: { [layerName: string]: WkConnectLayer },
},
},
},
};

export type APITimeTracking = {
time: string,
timestamp: number,
Expand Down
108 changes: 71 additions & 37 deletions frontend/javascripts/admin/dataset/dataset_add_view.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,17 @@ import React from "react";
import { connect } from "react-redux";
import _ from "lodash";

import type { APIUser } from "admin/api_flow_types";
import type { APIUser, APIDataStore } from "admin/api_flow_types";
import type { OxalisState } from "oxalis/store";
import { enforceActiveUser } from "oxalis/model/accessors/user_accessor";
import DatasetAddForeignView from "admin/dataset/dataset_add_foreign_view";
import DatasetAddWkConnectView from "admin/dataset/dataset_add_wk_connect_view";
import DatasetUploadView from "admin/dataset/dataset_upload_view";
import SampleDatasetsModal from "dashboard/dataset/sample_datasets_modal";
import features from "features";
import { getDatastores } from "admin/admin_rest_api";
import renderIndependently from "libs/render_independently";
import { useFetch } from "libs/react_helpers";

const { TabPane } = Tabs;

Expand All @@ -34,49 +37,80 @@ const renderSampleDatasetsModal = (user: APIUser, history: RouterHistory) => {
));
};

const DatasetAddView = ({ history, activeUser }: PropsWithRouter) => (
<React.Fragment>
<Tabs defaultActiveKey="1" className="container">
<TabPane
tab={
<span>
<Icon type="upload" />
Upload Dataset
</span>
}
key="1"
>
<DatasetUploadView
onUploaded={(organization: string, datasetName: string) => {
const url = `/datasets/${organization}/${datasetName}/import`;
history.push(url);
}}
/>
</TabPane>
{features().addForeignDataset ? (
const fetchCategorizedDatastores = async (): Promise<{
own: Array<APIDataStore>,
wkConnect: Array<APIDataStore>,
}> => {
const fetchedDatastores = await getDatastores();
return {
own: fetchedDatastores.filter(ds => !ds.isForeign && !ds.isConnector),
wkConnect: fetchedDatastores.filter(ds => ds.isConnector),
};
};

const DatasetAddView = ({ history, activeUser }: PropsWithRouter) => {
const datastores = useFetch(fetchCategorizedDatastores, { own: [], wkConnect: [] }, []);

const handleDatasetAdded = (organization: string, datasetName: string) => {
const url = `/datasets/${organization}/${datasetName}/import`;
history.push(url);
};

return (
<React.Fragment>
<Tabs defaultActiveKey="1" className="container">
<TabPane
tab={
<span>
<Icon type="bars" />
Add foreign Dataset
<Icon type="upload" />
Upload Dataset
</span>
}
key="2"
key="1"
>
<DatasetAddForeignView onAdded={() => history.push("/dashboard")} />
<DatasetUploadView datastores={datastores.own} onUploaded={handleDatasetAdded} />
</TabPane>
) : null}
</Tabs>
<div style={{ textAlign: "center" }}>
<p>or</p>
<p>
<a href="#" onClick={() => renderSampleDatasetsModal(activeUser, history)}>
Add a Sample Dataset
</a>
</p>
</div>
</React.Fragment>
);
{datastores.wkConnect.length > 0 && (
<TabPane
tab={
<span>
<Icon type="plus" />
Add Dataset via wk-connect
</span>
}
key="2"
>
<DatasetAddWkConnectView
datastores={datastores.wkConnect}
onAdded={handleDatasetAdded}
/>
</TabPane>
)}
{features().addForeignDataset && (
<TabPane
tab={
<span>
<Icon type="bars" />
Add Foreign Dataset
</span>
}
key="3"
>
<DatasetAddForeignView onAdded={() => history.push("/dashboard")} />
</TabPane>
)}
</Tabs>
<div style={{ textAlign: "center" }}>
<p>or</p>
<p>
<a href="#" onClick={() => renderSampleDatasetsModal(activeUser, history)}>
Add a Sample Dataset
</a>
</p>
</div>
</React.Fragment>
);
};

const mapStateToProps = (state: OxalisState) => ({
activeUser: enforceActiveUser(state.activeUser),
Expand Down
Loading