Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Offload gzip compression and bucket loading/decoding/serializing to web workers #3162

Merged
merged 13 commits into from
Sep 10, 2018
Merged
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .eslintrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,7 @@
"no-restricted-properties": ["off", { "object": "Math", "property": "pow" }],
"no-restricted-syntax": "warn",
"no-restricted-syntax": ["error", "ForInStatement"],
"no-restricted-globals": "warn",
"no-underscore-dangle": "off",
"no-unused-vars": ["error", { "argsIgnorePattern": "^_" }],
"no-use-before-define": ["error", { "functions": false, "classes": false }],
Expand Down
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.md).
- The fallback segmentation layer attribute of volume tracings is now persisted to NML/ZIP files. Upon re-upload, only volume tracings with this attribute will show a fallback layer. Use `tools/volumeAddFallbackLayer.py` to add this attribute to existing volume tracings. [#3088](https://github.com/scalableminds/webknossos/pull/3088)
- When splitting a tree, the split part that contains the initial node will now keep the original tree name and id. [#3145](https://github.com/scalableminds/webknossos/pull/3145)
- The welcome header will now also show on the default page if there are no existing organisations. [#3133](https://github.com/scalableminds/webknossos/pull/3133)
- Improved general performance of the tracing view by leveraging web workers. [#3162](https://github.com/scalableminds/webknossos/pull/3162)

### Fixed

Expand Down
10 changes: 10 additions & 0 deletions app/assets/javascripts/libs/handle_http_status.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
// @flow

const handleStatus = (response: Response): Promise<Response> => {
if (response.status >= 200 && response.status < 400) {
return Promise.resolve(response);
}
return Promise.reject(response);
};

export default handleStatus;
36 changes: 22 additions & 14 deletions app/assets/javascripts/libs/request.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,14 @@

import _ from "lodash";
import Toast from "libs/toast";
import * as Utils from "libs/utils";
import { pingDataStoreIfAppropriate, pingMentionedDataStores } from "admin/datastore_health_check";
import { createWorker } from "oxalis/workers/comlink_wrapper";
import handleStatus from "libs/handle_http_status";
import FetchBufferWorker from "oxalis/workers/fetch_buffer.worker";
import CompressWorker from "oxalis/workers/compress.worker";

const fetchBufferViaWorker = createWorker(FetchBufferWorker);
const compress = createWorker(CompressWorker);

type methodType = "GET" | "POST" | "DELETE" | "HEAD" | "OPTIONS" | "PUT" | "PATCH";

Expand All @@ -15,6 +21,7 @@ type RequestOptions = {
method?: methodType,
timeout?: number,
compress?: boolean,
useWebworkerForArrayBuffer?: boolean,
};

export type RequestOptionsWithData<T> = RequestOptions & {
Expand Down Expand Up @@ -47,7 +54,7 @@ class Request {
let body = _.isString(options.data) ? options.data : JSON.stringify(options.data);

if (options.compress) {
body = await Utils.compress(body);
body = await compress(body);
if (options.headers == null) {
options.headers = {
"Content-Encoding": "gzip",
Expand Down Expand Up @@ -147,8 +154,11 @@ class Request {
receiveArraybuffer = (url: string, options: RequestOptions = {}): Promise<ArrayBuffer> =>
this.triggerRequest(
url,
_.defaultsDeep(options, { headers: { Accept: "application/octet-stream" } }),
response => response.arrayBuffer(),
_.defaultsDeep(options, {
headers: { Accept: "application/octet-stream" },
useWebworkerForArrayBuffer: true,
}),
// response => response.arrayBuffer(),
);

// IN: JSON
Expand Down Expand Up @@ -201,9 +211,14 @@ class Request {
}
options.headers = headers;

let fetchPromise = fetch(url, options).then(this.handleStatus);
if (responseDataHandler != null) {
fetchPromise = fetchPromise.then(responseDataHandler);
let fetchPromise;
if (options.useWebworkerForArrayBuffer) {
fetchPromise = fetchBufferViaWorker(url, options);
} else {
fetchPromise = fetch(url, options).then(handleStatus);
if (responseDataHandler != null) {
fetchPromise = fetchPromise.then(responseDataHandler);
}
}

if (!options.doNotCatch) {
Expand All @@ -228,13 +243,6 @@ class Request {
setTimeout(() => resolve("timeout"), timeout);
});

handleStatus = (response: Response): Promise<Response> => {
if (response.status >= 200 && response.status < 400) {
return Promise.resolve(response);
}
return Promise.reject(response);
};

handleError = (requestedUrl: string, error: Response | Error): Promise<void> => {
// Check whether this request failed due to a problematic
// datastore
Expand Down
17 changes: 0 additions & 17 deletions app/assets/javascripts/libs/utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import _ from "lodash";
import type { Vector3, Vector4, Vector6, BoundingBoxType } from "oxalis/constants";
import Maybe from "data.maybe";
import window, { document, location } from "libs/window";
import pako from "pako";
import naturalSort from "javascript-natural-sort";
import type { APIUserType } from "admin/api_flow_types";

Expand Down Expand Up @@ -406,22 +405,6 @@ export function addEventListenerWithDelegation(
return { [eventName]: wrapperFunc };
}

export async function compress(data: Uint8Array | string): Promise<Uint8Array> {
const DEFLATE_PUSH_SIZE = 65536;

const deflator = new pako.Deflate({ gzip: true });
for (let offset = 0; offset < data.length; offset += DEFLATE_PUSH_SIZE) {
// The second parameter to push indicates whether this is the last chunk to be deflated
deflator.push(
data.slice(offset, offset + DEFLATE_PUSH_SIZE),
offset + DEFLATE_PUSH_SIZE >= data.length,
);
// eslint-disable-next-line no-await-in-loop
await sleep(1);
}
return deflator.result;
}

export function median8(dataArray: Array<number>): number {
// Returns the median of an already *sorted* array of size 8 (e.g., with sortArray8)
return Math.round((dataArray[3] + dataArray[4]) / 2);
Expand Down
17 changes: 17 additions & 0 deletions app/assets/javascripts/oxalis/model/accessors/dataset_accessor.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ import constants, { Vector3Indicies, ModeValues } from "oxalis/constants";
import type { APIDatasetType } from "admin/api_flow_types";
import type { Vector3 } from "oxalis/constants";
import type { SettingsType, DataLayerType } from "oxalis/store";
import Maybe from "data.maybe";
import memoizeOne from "memoize-one";

export function getResolutions(dataset: APIDatasetType): Vector3[] {
// Different layers can have different resolutions. At the moment,
Expand All @@ -32,6 +34,21 @@ export function getResolutions(dataset: APIDatasetType): Vector3[] {
return mostExtensiveResolutions.concat(extendedResolutions);
}

function _getMaxZoomStep(maybeDataset: ?APIDatasetType): number {
const minimumZoomStepCount = 1;
const maxZoomstep = Maybe.fromNullable(maybeDataset)
.map(dataset =>
Math.max(
minimumZoomStepCount,
Math.max(0, ...getResolutions(dataset).map(r => Math.max(r[0], r[1], r[2]))),
),
)
.getOrElse(2 ** (minimumZoomStepCount + constants.DOWNSAMPLED_ZOOM_STEP_COUNT - 1));
return maxZoomstep;
}

export const getMaxZoomStep = memoizeOne(_getMaxZoomStep);

function getDataLayers(dataset: APIDatasetType): DataLayerType[] {
return dataset.dataSource.dataLayers;
}
Expand Down
18 changes: 2 additions & 16 deletions app/assets/javascripts/oxalis/model/accessors/flycam_accessor.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,13 @@
import type { Vector3, OrthoViewType, OrthoViewMapType } from "oxalis/constants";
import type { FlycamType, OxalisState } from "oxalis/store";
import constants, { OrthoViews } from "oxalis/constants";
import Maybe from "data.maybe";
import Dimensions from "oxalis/model/dimensions";
import * as scaleInfo from "oxalis/model/scaleinfo";
import * as Utils from "libs/utils";
import type { Matrix4x4 } from "libs/mjs";
import { M4x4 } from "libs/mjs";
import * as THREE from "three";
import { getResolutions } from "oxalis/model/accessors/dataset_accessor";
import { getMaxZoomStep } from "oxalis/model/accessors/dataset_accessor";

// All methods in this file should use constants.PLANE_WIDTH instead of constants.VIEWPORT_WIDTH
// as the area that is rendered is only of size PLANE_WIDTH.
Expand Down Expand Up @@ -55,21 +54,8 @@ export function getZoomedMatrix(flycam: FlycamType): Matrix4x4 {
return M4x4.scale1(flycam.zoomStep, flycam.currentMatrix);
}

export function getMaxZoomStep(state: OxalisState): number {
const minimumZoomStepCount = 1;
const maxZoomstep = Maybe.fromNullable(state.dataset)
.map(dataset =>
Math.max(
minimumZoomStepCount,
Math.max(0, ...getResolutions(dataset).map(r => Math.max(r[0], r[1], r[2]))),
),
)
.getOrElse(2 ** (minimumZoomStepCount + constants.DOWNSAMPLED_ZOOM_STEP_COUNT - 1));
return maxZoomstep;
}

export function getRequestLogZoomStep(state: OxalisState): number {
const maxLogZoomStep = Math.log2(getMaxZoomStep(state));
const maxLogZoomStep = Math.log2(getMaxZoomStep(state.dataset));
const min = Math.min(state.datasetConfiguration.quality, maxLogZoomStep);
const value =
Math.ceil(Math.log2(state.flycam.zoomStep / MAX_ZOOM_STEP_DIFF)) +
Expand Down
Original file line number Diff line number Diff line change
@@ -1,18 +1,22 @@
// @flow

import Base64 from "base64-js";
import Request from "libs/request";
import Store from "oxalis/store";
import { pushSaveQueueAction } from "oxalis/model/actions/save_actions";
import { updateBucket } from "oxalis/model/sagas/update_actions";
import * as Utils from "libs/utils";
import { doWithToken } from "admin/admin_rest_api";
import type { DataBucket } from "oxalis/model/bucket_data_handling/bucket";
import type { Vector3, Vector4 } from "oxalis/constants";
import type { DataLayerType } from "oxalis/store";
import { getResolutions, isSegmentationLayer } from "oxalis/model/accessors/dataset_accessor.js";
import { getResolutions, isSegmentationLayer } from "oxalis/model/accessors/dataset_accessor";
import { bucketPositionToGlobalAddress } from "oxalis/model/helpers/position_converter";
import constants from "oxalis/constants";
import { createWorker } from "oxalis/workers/comlink_wrapper";
import DecodeFourBitWorker from "oxalis/workers/decode_four_bit.worker";
import ByteArrayToBase64Worker from "oxalis/workers/byte_array_to_base64.worker";

const decodeFourBit = createWorker(DecodeFourBitWorker);
const byteArrayToBase64 = createWorker(ByteArrayToBase64Worker);

export const REQUEST_TIMEOUT = 30000;

Expand Down Expand Up @@ -71,45 +75,25 @@ export async function requestFromStore(
},
);

let result = new Uint8Array(responseBuffer);
let resultBuffer = responseBuffer;
if (fourBit) {
result = decodeFourBit(result);
resultBuffer = await decodeFourBit(resultBuffer);
}
return result;
return new Uint8Array(resultBuffer);
});
}

function decodeFourBit(bufferArray: Uint8Array): Uint8Array {
// Expand 4-bit data
const newColors = new Uint8Array(bufferArray.length << 1);

let index = 0;
while (index < newColors.length) {
const value = bufferArray[index >> 1];
newColors[index] = value & 0b11110000;
index++;
newColors[index] = value << 4;
index++;
}

return newColors;
}

export async function sendToStore(batch: Array<DataBucket>): Promise<void> {
const YIELD_AFTER_X_BUCKETS = 3;
let counter = 0;
const items = [];
for (const bucket of batch) {
counter++;
// Do not block the main thread for too long as Base64.fromByteArray is performance heavy
// eslint-disable-next-line no-await-in-loop
if (counter % YIELD_AFTER_X_BUCKETS === 0) await Utils.sleep(1);
const bucketData = bucket.getData();
const bucketInfo = createSendBucketInfo(
bucket.zoomedAddress,
getResolutions(Store.getState().dataset),
);
items.push(updateBucket(bucketInfo, Base64.fromByteArray(bucketData)));
// eslint-disable-next-line no-await-in-loop
const base64 = await byteArrayToBase64(bucketData);
items.push(updateBucket(bucketInfo, base64));
}
Store.dispatch(pushSaveQueueAction(items, "volume"));
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import update from "immutability-helper";
import type { OxalisState } from "oxalis/store";
import type { ActionType } from "oxalis/model/actions/actions";
import { getMaxZoomStep } from "oxalis/model/accessors/flycam_accessor";
import { getMaxZoomStep } from "oxalis/model/accessors/dataset_accessor";
import { getBaseVoxelFactors } from "oxalis/model/scaleinfo";
import { M4x4 } from "libs/mjs";
import type { Matrix4x4 } from "libs/mjs";
Expand Down Expand Up @@ -114,7 +114,7 @@ function moveReducer(state: OxalisState, vector: Vector3): OxalisState {
export function zoomReducer(state: OxalisState, zoomStep: number): OxalisState {
return update(state, {
flycam: {
zoomStep: { $set: Utils.clamp(ZOOM_STEP_MIN, zoomStep, getMaxZoomStep(state)) },
zoomStep: { $set: Utils.clamp(ZOOM_STEP_MIN, zoomStep, getMaxZoomStep(state.dataset)) },
},
});
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ import {
LogSliderSetting,
} from "oxalis/view/settings/setting_input_views";
import { setUserBoundingBoxAction } from "oxalis/model/actions/annotation_actions";
import { getMaxZoomStep } from "oxalis/model/accessors/flycam_accessor";
import { getMaxZoomStep } from "oxalis/model/accessors/dataset_accessor";
import {
enforceSkeletonTracing,
getActiveNode,
Expand Down Expand Up @@ -369,7 +369,7 @@ const mapStateToProps = (state: OxalisState) => ({
userConfiguration: state.userConfiguration,
tracing: state.tracing,
zoomStep: state.flycam.zoomStep,
maxZoomStep: getMaxZoomStep(state),
maxZoomStep: getMaxZoomStep(state.dataset),
viewMode: state.temporaryConfiguration.viewMode,
controlMode: state.temporaryConfiguration.controlMode,
brushSize: state.temporaryConfiguration.brushSize,
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
// @flow
import Base64 from "base64-js";

import { expose } from "./comlink_wrapper";

function byteArrayToBase64(byteArray: Uint8Array): string {
return Base64.fromByteArray(byteArray);
}

export default expose(byteArrayToBase64);
50 changes: 50 additions & 0 deletions app/assets/javascripts/oxalis/workers/comlink_wrapper.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
import headersTransferHandler from "oxalis/workers/headers_transfer_handler";

const isNodeContext = typeof process !== "undefined" && process.title !== "browser";

function importComlink() {
if (!isNodeContext) {
// eslint-disable-next-line global-require
const { proxy, transferHandlers, expose: _expose } = require("comlinkjs");
return { proxy, transferHandlers, _expose };
} else {
return {
proxy: null,
transferHandlers: new Map(),
_expose: null,
};
}
}

const { proxy, transferHandlers, _expose } = importComlink();

// It's important that transferHandlers are registered in this wrapper module and
// not from another file. Otherwise, callers would need to register the handler
// in the main thread as well as in the web worker.
// Since this wrapper is imported from both sides, the handlers are also registered on both sides.
transferHandlers.set("Headers", headersTransferHandler);

export const createWorker: <Fn>(fn: Fn) => Fn = WorkerClass => {
if (isNodeContext) {
// In a node context (e.g., when executing tests), we don't create web workers
return WorkerClass;
}

return proxy(
// When importing a worker module, flow doesn't know that a special Worker class
// is imported. Instead, flow thinks that the declared function is
// directly imported. We exploit this by simply typing this createWorker function as an identity function
// (T => T). That way, we gain proper flow typing for functions executed in web workers. However,
// we need to suppress the following flow error for that to work.
// $FlowFixMe
new WorkerClass(),
);
};

export const expose = <T>(fn: T): T => {
// In a node context (e.g., when executing tests), we don't create web workers
if (!isNodeContext) {
_expose(fn, self);
}
return fn;
};
10 changes: 10 additions & 0 deletions app/assets/javascripts/oxalis/workers/compress.worker.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
// @flow

import pako from "pako";
import { expose } from "./comlink_wrapper";

function compress(data: Uint8Array | string): Promise<Uint8Array> {
return pako.gzip(data);
}

export default expose(compress);
Loading