Skip to content

Commit

Permalink
Allow batch reading of mesh chunks (#7001)
Browse files Browse the repository at this point in the history
* Implement reading of mesh chunks with list input

* Remove length of jumptable

* Move meshfile parameter out of list of requests

* refactor mesh loading saga into sub sagas

* use new batched mesh loading (still fails)

* debug and fix incorrect jumptable

* clean up

* refactor jump table decoding

* implement dynamic batching and add benchmark code

* clean up

* remove comment

* add spec for chunkDynamically

* don't compute jump table in back-end as front-end can compute this on its own

* remove only modifier

* update draco decoder to v1.5.6 to fix buggy error propagation

* better error reporting and also avoid crashing entire batch if one chunk fails

* clean up

* clean up

* wording

* clean up

* update changelog

* add timing code

* use serialCombined

* add timing code

* temporarily disable most CI checks

* Revert "temporarily disable most CI checks"

This reverts commit da694d8.

* temporarily disable most CI checks

* format

* Revert "temporarily disable most CI checks"

This reverts commit da54174.

* add comment explaining where draco file came from

* fix typo and remove timing code

* Apply suggestions from code review

---------

Co-authored-by: Philipp Otto <[email protected]>
Co-authored-by: Philipp Otto <[email protected]>
  • Loading branch information
3 people authored May 8, 2023
1 parent 3a3795a commit 58bce4e
Show file tree
Hide file tree
Showing 14 changed files with 543 additions and 316 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- In addition to drag and drop, the selected tree(s) in the Skeleton tab can also be moved into another group by right-clicking the target group and selecting "Move selected tree(s) here". [#7005](https://github.com/scalableminds/webknossos/pull/7005)

### Changed
- Loading of precomputed meshes got significantly faster (especially when using a mesh file for an oversegmentation with an applied agglomerate mapping). [#7001](https://github.com/scalableminds/webknossos/pull/7001)
- Improved speed of proofreading by only reloading affected areas after a split or merge. [#7050](https://github.com/scalableminds/webknossos/pull/7050)

### Fixed
Expand Down
49 changes: 37 additions & 12 deletions frontend/javascripts/admin/api/mesh_v3.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import Request from "libs/request";
import _ from "lodash";
import { Vector3, Vector4 } from "oxalis/constants";
import { APIDatasetId } from "types/api_flow_types";
import { doWithToken } from "./token";
Expand Down Expand Up @@ -62,26 +63,50 @@ export function getMeshfileChunksForSegment(
});
}

type MeshChunkDataRequestV3 = {
byteOffset: number;
byteSize: number;
};

type MeshChunkDataRequestV3List = {
meshFile: String;
requests: MeshChunkDataRequestV3[];
};

export function getMeshfileChunkData(
dataStoreUrl: string,
datasetId: APIDatasetId,
layerName: string,
meshFile: string,
byteOffset: number,
byteSize: number,
): Promise<ArrayBuffer> {
batchDescription: MeshChunkDataRequestV3List,
): Promise<ArrayBuffer[]> {
return doWithToken(async (token) => {
const data = await Request.sendJSONReceiveArraybufferWithHeaders(
const dracoDataChunks = await Request.sendJSONReceiveArraybuffer(
`${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/meshes/formatVersion/3/chunks/data?token=${token}`,
{
data: {
meshFile,
byteOffset,
byteSize,
},
useWebworkerForArrayBuffer: false,
data: batchDescription,
useWebworkerForArrayBuffer: true,
},
);
return data;
const chunkCount = batchDescription.requests.length;
const jumpPositionsForChunks = [];
let cumsum = 0;
for (const req of batchDescription.requests) {
jumpPositionsForChunks.push(cumsum);
cumsum += req.byteSize;
}
jumpPositionsForChunks.push(cumsum);

const dataEntries = [];
for (let chunkIdx = 0; chunkIdx < chunkCount; chunkIdx++) {
// slice() creates a copy of the data, but working with TypedArray Views would cause
// issues when transferring the data to a webworker.
const dracoData = dracoDataChunks.slice(
jumpPositionsForChunks[chunkIdx],
jumpPositionsForChunks[chunkIdx + 1],
);
dataEntries.push(dracoData);
}

return dataEntries;
});
}
116 changes: 74 additions & 42 deletions frontend/javascripts/libs/DRACOLoader.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,15 @@
// @ts-nocheck
/* eslint-disable */
// Copied from node_modules/three/examples/jsm/loaders/DRACOLoader.js to fix ERR_REQUIRE_ESM error.
import { BufferAttribute, BufferGeometry, FileLoader, Loader } from "three";
// Copied from https://github.com/mrdoob/three.js/pull/25475 / DRACOLoader.js to fix ERR_REQUIRE_ESM error.
import {
BufferAttribute,
BufferGeometry,
Color,
FileLoader,
Loader,
LinearSRGBColorSpace,
SRGBColorSpace,
} from "three";

const _taskCache = new WeakMap();

Expand Down Expand Up @@ -62,44 +70,35 @@ class DRACOLoader extends Loader {
loader.load(
url,
(buffer) => {
const taskConfig = {
attributeIDs: this.defaultAttributeIDs,
attributeTypes: this.defaultAttributeTypes,
useUniqueIDs: false,
};

this.decodeGeometry(buffer, taskConfig).then(onLoad).catch(onError);
this.parse(buffer, onLoad, onError);
},
onProgress,
onError,
);
}

/** @deprecated Kept for backward-compatibility with previous DRACOLoader versions. */
decodeDracoFile(buffer, callback, attributeIDs, attributeTypes) {
parse(buffer, onLoad, onError) {
this.decodeDracoFile(buffer, onLoad, null, null, SRGBColorSpace).catch(onError);
}

decodeDracoFile(
buffer,
callback,
attributeIDs,
attributeTypes,
vertexColorSpace = LinearSRGBColorSpace,
) {
const taskConfig = {
attributeIDs: attributeIDs || this.defaultAttributeIDs,
attributeTypes: attributeTypes || this.defaultAttributeTypes,
useUniqueIDs: !!attributeIDs,
vertexColorSpace: vertexColorSpace,
};

this.decodeGeometry(buffer, taskConfig).then(callback);
return this.decodeGeometry(buffer, taskConfig).then(callback);
}

decodeGeometry(buffer, taskConfig) {
// TODO: For backward-compatibility, support 'attributeTypes' objects containing
// references (rather than names) to typed array constructors. These must be
// serialized before sending them to the worker.
for (const attribute in taskConfig.attributeTypes) {
const type = taskConfig.attributeTypes[attribute];

if (type.BYTES_PER_ELEMENT !== undefined) {
taskConfig.attributeTypes[attribute] = type.name;
}
}

//

const taskKey = JSON.stringify(taskConfig);

// Check for an existing task using this buffer. A transferred buffer cannot be transferred
Expand Down Expand Up @@ -172,17 +171,39 @@ class DRACOLoader extends Loader {
}

for (let i = 0; i < geometryData.attributes.length; i++) {
const attribute = geometryData.attributes[i];
const name = attribute.name;
const array = attribute.array;
const itemSize = attribute.itemSize;
const result = geometryData.attributes[i];
const name = result.name;
const array = result.array;
const itemSize = result.itemSize;

const attribute = new BufferAttribute(array, itemSize);

if (name === "color") {
this._assignVertexColorSpace(attribute, result.vertexColorSpace);
}

geometry.setAttribute(name, new BufferAttribute(array, itemSize));
geometry.setAttribute(name, attribute);
}

return geometry;
}

_assignVertexColorSpace(attribute, inputColorSpace) {
// While .drc files do not specify colorspace, the only 'official' tooling
// is PLY and OBJ converters, which use sRGB. We'll assume sRGB when a .drc
// file is passed into .load() or .parse(). GLTFLoader uses internal APIs
// to decode geometry, and vertex colors are already Linear-sRGB in there.

if (inputColorSpace !== SRGBColorSpace) return;

const _color = new Color();

for (let i = 0, il = attribute.count; i < il; i++) {
_color.fromBufferAttribute(attribute, i).convertSRGBToLinear();
attribute.setXYZ(i, _color.r, _color.g, _color.b);
}
}

_loadLibrary(url, responseType) {
const loader = new FileLoader(this.manager);
loader.setPath(this.decoderPath);
Expand Down Expand Up @@ -298,6 +319,10 @@ class DRACOLoader extends Loader {

this.workerPool.length = 0;

if (this.workerSourceURL !== "") {
URL.revokeObjectURL(this.workerSourceURL);
}

return this;
}
}
Expand All @@ -320,7 +345,7 @@ function DRACOWorker() {
resolve({ draco: draco });
};

DracoDecoderModule(decoderConfig);
DracoDecoderModule(decoderConfig); // eslint-disable-line no-undef
});
break;

Expand All @@ -330,11 +355,9 @@ function DRACOWorker() {
decoderPending.then((module) => {
const draco = module.draco;
const decoder = new draco.Decoder();
const decoderBuffer = new draco.DecoderBuffer();
decoderBuffer.Init(new Int8Array(buffer), buffer.byteLength);

try {
const geometry = decodeGeometry(draco, decoder, decoderBuffer, taskConfig);
const geometry = decodeGeometry(draco, decoder, new Int8Array(buffer), taskConfig);

const buffers = geometry.attributes.map((attr) => attr.array.buffer);

Expand All @@ -346,29 +369,27 @@ function DRACOWorker() {

self.postMessage({ type: "error", id: message.id, error: error.message });
} finally {
draco.destroy(decoderBuffer);
draco.destroy(decoder);
}
});
break;
}
};

function decodeGeometry(draco, decoder, decoderBuffer, taskConfig) {
function decodeGeometry(draco, decoder, array, taskConfig) {
const attributeIDs = taskConfig.attributeIDs;
const attributeTypes = taskConfig.attributeTypes;

let dracoGeometry;
let decodingStatus;

const geometryType = decoder.GetEncodedGeometryType(decoderBuffer);
const geometryType = decoder.GetEncodedGeometryType(array);

if (geometryType === draco.TRIANGULAR_MESH) {
dracoGeometry = new draco.Mesh();
decodingStatus = decoder.DecodeBufferToMesh(decoderBuffer, dracoGeometry);
decodingStatus = decoder.DecodeArrayToMesh(array, array.byteLength, dracoGeometry);
} else if (geometryType === draco.POINT_CLOUD) {
dracoGeometry = new draco.PointCloud();
decodingStatus = decoder.DecodeBufferToPointCloud(decoderBuffer, dracoGeometry);
decodingStatus = decoder.DecodeArrayToPointCloud(array, array.byteLength, dracoGeometry);
} else {
throw new Error("THREE.DRACOLoader: Unexpected geometry type.");
}
Expand Down Expand Up @@ -401,9 +422,20 @@ function DRACOWorker() {
attribute = decoder.GetAttribute(dracoGeometry, attributeID);
}

geometry.attributes.push(
decodeAttribute(draco, decoder, dracoGeometry, attributeName, attributeType, attribute),
const attributeResult = decodeAttribute(
draco,
decoder,
dracoGeometry,
attributeName,
attributeType,
attribute,
);

if (attributeName === "color") {
attributeResult.vertexColorSpace = taskConfig.vertexColorSpace;
}

geometry.attributes.push(attributeResult);
}

// Add index.
Expand Down
6 changes: 3 additions & 3 deletions frontend/javascripts/libs/draco.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@ let _dracoLoader: CustomDRACOLoader | null;

class CustomDRACOLoader extends DRACOLoader {
// Subclass to create a promise-based API and add typing
decodeDracoFileAsync = (buffer: ArrayBuffer, ...args: any[]): Promise<BufferGeometry> =>
new Promise((resolve) => {
decodeDracoFileAsync = (buffer: ArrayBuffer): Promise<BufferGeometry> =>
new Promise((resolve, reject) => {
if (_dracoLoader == null) {
throw new Error("DracoLoader not instantiated.");
}
// @ts-ignore
_dracoLoader.decodeDracoFile(buffer, resolve, ...args);
_dracoLoader.parse(buffer, resolve, reject);
});
}

Expand Down
10 changes: 5 additions & 5 deletions frontend/javascripts/libs/task_pool.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import type { Saga, Task } from "oxalis/model/sagas/effect-generators";
import { join, call, fork } from "typed-redux-saga";

/*
Given an array of async tasks, processTaskWithPool
allows to execute at most ${poolSize} tasks concurrently.
Expand All @@ -11,8 +12,7 @@ export default function* processTaskWithPool(
): Saga<void> {
const startedTasks: Array<Task<void>> = [];
let isFinalResolveScheduled = false;
// @ts-expect-error ts-migrate(7034) FIXME: Variable 'error' implicitly has type 'any' in some... Remove this comment to see the full error message
let error = null;
let error: Error | null = null;

// @ts-expect-error ts-migrate(7006) FIXME: Parameter 'fn' implicitly has an 'any' type.
function* forkSafely(fn): Saga<void> {
Expand All @@ -22,7 +22,7 @@ export default function* processTaskWithPool(
try {
yield* call(fn);
} catch (e) {
error = e;
error = e as Error;
}
}

Expand All @@ -34,7 +34,6 @@ export default function* processTaskWithPool(
// awaited now together.
// @ts-expect-error ts-migrate(2769) FIXME: No overload matches this call.
yield* join(startedTasks);
// @ts-expect-error ts-migrate(7005) FIXME: Variable 'error' implicitly has an 'any' type.
if (error != null) throw error;
}

Expand All @@ -53,6 +52,7 @@ export default function* processTaskWithPool(

for (let i = 0; i < poolSize; i++) {
yield* fork(startNextTask);
} // The saga will wait for all forked tasks to terminate before returning, because
}
// The saga will wait for all forked tasks to terminate before returning, because
// fork() creates attached forks (in contrast to spawn()).
}
27 changes: 27 additions & 0 deletions frontend/javascripts/libs/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -964,6 +964,33 @@ export function chunkIntoTimeWindows<T>(
);
}

// chunkDynamically takes an array of input elements and splits these
// into batches. Instead of using a constant batch size, the elements
// of a batch are measured with a measureFn. Then, each batch is filled
// until the provided minThreshold is exceeded.
// Note that the threshold will be exceeded for each batch
// (except for the last batch which may contain less).
export function chunkDynamically<T>(
elements: T[],
minThreshold: number,
measureFn: (el: T) => number,
): Array<T[]> {
const batches = [];
let currentBatch = [];
let currentSize = 0;

for (let i = 0; i < elements.length; i++) {
currentBatch.push(elements[i]);
currentSize += measureFn(elements[i]);
if (currentSize > minThreshold || i === elements.length - 1) {
currentSize = 0;
batches.push(currentBatch);
currentBatch = [];
}
}
return batches;
}

export function convertBufferToImage(
buffer: Uint8Array,
width: number,
Expand Down
Loading

0 comments on commit 58bce4e

Please sign in to comment.