Skip to content

Commit

Permalink
Add host file stream (#497)
Browse files Browse the repository at this point in the history
  • Loading branch information
lutzroeder committed Dec 20, 2020
1 parent d751b33 commit f11c219
Show file tree
Hide file tree
Showing 10 changed files with 379 additions and 244 deletions.
4 changes: 2 additions & 2 deletions source/barracuda.js
Original file line number Diff line number Diff line change
Expand Up @@ -468,7 +468,7 @@ barracuda.NNModel = class {
}
for (const layer of this._layers) {
for (const tensor of layer.tensors) {
tensor.data = reader.bytes(tensor.offset * tensor.itemsize, tensor.length * tensor.itemsize);
tensor.data = reader.read(tensor.offset * tensor.itemsize, tensor.length * tensor.itemsize);
}
}
}
Expand Down Expand Up @@ -518,7 +518,7 @@ barracuda.BinaryReader = class {
}
}

bytes(offset, length) {
read(offset, length) {
const start = this._position + offset;
const end = start + length;
if (end > this._buffer.length) {
Expand Down
64 changes: 6 additions & 58 deletions source/darknet.js
Original file line number Diff line number Diff line change
Expand Up @@ -1108,13 +1108,14 @@ darknet.Weights = class {

static open(stream) {
if (stream && stream.length >= 20) {
const reader = new darknet.BinaryReader(stream.read(12));
const major = reader.int32();
const minor = reader.int32();
reader.int32(); // revision
((major * 10 + minor) >= 2) ? stream.skip(8) : stream.skip(4); // seen
const buffer = stream.peek(12);
const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
const major = view.getInt32(0, true);
const minor = view.getInt32(4, true);
view.getInt32(8, true); // revision
const transpose = (major > 1000) || (minor > 1000);
if (!transpose) {
stream.skip(12 + (((major * 10 + minor) >= 2) ? 8 : 4));
return new darknet.Weights(stream);
}
// else {
Expand All @@ -1139,59 +1140,6 @@ darknet.Weights = class {
}
};

darknet.BinaryReader = class {

constructor(buffer) {
this._buffer = buffer;
this._length = buffer.length;
this._position = 0;
this._view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
}

get position() {
return this._position;
}

get length() {
return this._length;
}

seek(position) {
this._position = position >= 0 ? position : this._length + position;
}

skip(offset) {
this._position += offset;
}

peek(length) {
if (this._position === 0 && length === undefined) {
return this._buffer;
}
const position = this._position;
this.skip(length !== undefined ? length : this._length - this._position);
const end = this._position;
this.seek(position);
return this._buffer.subarray(position, end);
}

read(length) {
if (this._position === 0 && length === undefined) {
this._position = this._length;
return this._buffer;
}
const position = this._position;
this.skip(length !== undefined ? length : this._length - this._position);
return this._buffer.subarray(position, this._position);
}

int32() {
const position = this._position;
this.skip(4);
return this._view.getInt32(position, true);
}
};

darknet.Metadata = class {

static open(host) {
Expand Down
66 changes: 51 additions & 15 deletions source/electron.js
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,7 @@ host.ElectronHost = class {
reject(new Error("The file '" + file + "' size (" + stats.size.toString() + ") for encoding '" + encoding + "' is greater than 2 GB."));
}
else {
resolve(new host.ElectronHost.FileStream(pathname, stats.size));
resolve(new host.ElectronHost.FileStream(pathname, 0, stats.size, stats.mtimeMs));
}
});
});
Expand Down Expand Up @@ -555,10 +555,12 @@ host.ElectronHost.BinaryStream = class {

host.ElectronHost.FileStream = class {

constructor(file, length) {
constructor(file, start, length, mtime) {
this._file = file;
this._start = start;
this._length = length;
this._position = 0;
this._mtime = mtime;
}

get position() {
Expand All @@ -570,8 +572,11 @@ host.ElectronHost.FileStream = class {
}

stream(length) {
const buffer = this.read(length);
return new host.ElectronHost.BinaryStream(buffer);
const file = new host.ElectronHost.FileStream(this._file, this._position, length, this._mtime);
this.skip(length);
return file;
// const buffer = this.read(length);
// return new host.ElectronHost.BinaryStream(buffer);
}

seek(position) {
Expand All @@ -580,39 +585,70 @@ host.ElectronHost.FileStream = class {

skip(offset) {
this._position += offset;
if (this._position > this._length) {
throw new Error('Expected ' + (this._position - this._length) + ' more bytes. The file might be corrupted. Unexpected end of file.');
}
}

peek(length) {
length = length !== undefined ? length : this._length - this._position;
if (length < 0x10000000) {
const position = this._fill(length);
this._position -= length;
return this._buffer.subarray(position, position + length);
}
const position = this._position;
this.skip(length);
this.seek(position);
const descriptor = fs.openSync(this._file, 'r');
const buffer = new Uint8Array(length);
fs.readSync(descriptor, buffer, 0, length, position);
fs.closeSync(descriptor);
this._read(buffer, position);
return buffer;
}

read(length) {
length = length !== undefined ? length : this._length - this._position;
if (length < 0x10000000) {
const position = this._fill(length);
return this._buffer.subarray(position, position + length);
}
const position = this._position;
this.skip(length);
const descriptor = fs.openSync(this._file, 'r');
const buffer = new Uint8Array(length);
fs.readSync(descriptor, buffer, 0, length, position);
fs.closeSync(descriptor);
this._read(buffer, position);
return buffer;
}

byte() {
const position = this._fill(1);
return this.buffer[position];
}

_fill(length) {
if (this._position + length > this._length) {
throw new Error('Expected ' + (this._position + length - this._length) + ' more bytes. The file might be corrupted. Unexpected end of file.');
}
if (!this._buffer || this._position < this._offset || this._position + length > this._offset + this._buffer.length) {
this._offset = this._position;
this._buffer = new Uint8Array(Math.min(0x10000000, this._length - this._offset));
this._read(this._buffer, this._offset);
}
const position = this._position;
this.skip(1);
this._position += length;
return position - this._offset;
}

_read(buffer, offset) {
const descriptor = fs.openSync(this._file, 'r');
const buffer = new Uint8Array(1);
fs.readSync(descriptor, buffer, 0, 1, position);
fs.closeSync(descriptor);
return buffer[0];
const stat = fs.statSync(this._file);
if (stat.mtimeMs != this._mtime) {
throw new Error("File '" + this._file + "' last modified time changed.");
}
try {
fs.readSync(descriptor, buffer, 0, buffer.length, offset + this._start);
}
finally {
fs.closeSync(descriptor);
}
}
};

Expand Down
42 changes: 21 additions & 21 deletions source/gzip.js
Original file line number Diff line number Diff line change
Expand Up @@ -98,35 +98,16 @@ gzip.InflaterStream = class {
return this._length;
}

inflate() {
if (this._buffer === undefined) {
const compressed = this._stream.peek();
if (typeof process === 'object' && typeof process.versions == 'object' && typeof process.versions.node !== 'undefined') {
this._buffer = require('zlib').inflateRawSync(compressed);
}
else if (typeof pako !== 'undefined') {
this._buffer = pako.inflateRaw(compressed);
}
else {
this._buffer = new require('./zip').Inflater().inflateRaw(compressed);
}
if (this._buffer.length !== this._length) {
throw new gzip.Error('Invalid size.');
}
delete this._stream;
}
}

seek(position) {
if (this._buffer === undefined) {
this.inflate();
this._inflate();
}
this._position = position >= 0 ? position : this._length + position;
}

skip(offset) {
if (this._buffer === undefined) {
this.inflate();
this._inflate();
}
this._position += offset;
}
Expand Down Expand Up @@ -162,6 +143,25 @@ gzip.InflaterStream = class {
this.skip(1);
return this._buffer[position];
}

_inflate() {
if (this._buffer === undefined) {
const compressed = this._stream.peek();
if (typeof process === 'object' && typeof process.versions == 'object' && typeof process.versions.node !== 'undefined') {
this._buffer = require('zlib').inflateRawSync(compressed);
}
else if (typeof pako !== 'undefined') {
this._buffer = pako.inflateRaw(compressed);
}
else {
this._buffer = new require('./zip').Inflater().inflateRaw(compressed);
}
if (this._buffer.length !== this._length) {
throw new gzip.Error('Invalid size.');
}
delete this._stream;
}
}
};

gzip.BinaryReader = class {
Expand Down
3 changes: 2 additions & 1 deletion source/npz.js
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,8 @@ npz.ModelFactory = class {
modulesMap.set(moduleName, newModule);
}
const module = modulesMap.get(moduleName);
let array = new numpy.Array(entry.data);
const data = entry.data;
let array = new numpy.Array(data);
if (array.byteOrder === '|') {
if (array.dataType !== 'O') {
throw new npz.Error("Invalid data type '" + array.dataType + "'.");
Expand Down
20 changes: 10 additions & 10 deletions source/numpy.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ numpy.Array = class {
if (buffer) {
const reader = new numpy.BinaryReader(buffer);
const signature = [ 0x93, 0x4E, 0x55, 0x4D, 0x50, 0x59 ];
if (!reader.bytes(6).every((v, i) => v == signature[i])) {
if (!reader.read(6).every((v, i) => v == signature[i])) {
throw new numpy.Error('Invalid signature.');
}
const major = reader.byte();
Expand All @@ -18,7 +18,7 @@ numpy.Array = class {
}
const size = major >= 2 ? reader.uint32() : reader.uint16();
const encoding = major >= 3 ? 'utf-8' : 'ascii';
const header_content = new TextDecoder(encoding).decode(reader.bytes(size));
const header_content = new TextDecoder(encoding).decode(reader.read(size));
const header = numpy.HeaderReader.create(header_content).read();
if (!header.descr || header.descr.length < 2) {
throw new numpy.Error("Missing property 'descr'.");
Expand All @@ -31,7 +31,7 @@ numpy.Array = class {
switch (this._byteOrder) {
case '|': {
this._dataType = header.descr.substring(1);
this._data = reader.bytes(reader.size - reader.position);
this._data = reader.read(reader.size - reader.position);
break;
}
case '>':
Expand All @@ -41,7 +41,7 @@ numpy.Array = class {
}
this._dataType = header.descr.substring(1);
const size = parseInt(header.descr[2], 10) * this._shape.reduce((a, b) => a * b, 1);
this._data = reader.bytes(size);
this._data = reader.read(size);
break;
}
default:
Expand Down Expand Up @@ -90,7 +90,7 @@ numpy.Array = class {

const writer = new numpy.BinaryWriter();

writer.bytes([ 0x93, 0x4E, 0x55, 0x4D, 0x50, 0x59 ]); // '\\x93NUMPY'
writer.write([ 0x93, 0x4E, 0x55, 0x4D, 0x50, 0x59 ]); // '\\x93NUMPY'
writer.byte(1); // major
writer.byte(0); // minor

Expand Down Expand Up @@ -137,7 +137,7 @@ numpy.Array = class {
context.data = new Uint8Array(size);
context.view = new DataView(context.data.buffer, context.data.byteOffset, size);
numpy.Array._encodeDimension(context, this._data, 0);
writer.bytes(context.data);
writer.write(context.data);

return writer.toBuffer();
}
Expand Down Expand Up @@ -212,7 +212,7 @@ numpy.BinaryReader = class {
return this._buffer[this._position++];
}

bytes(size) {
read(size) {
const value = this._buffer.slice(this._position, this._position + size);
this._position += size;
return value;
Expand All @@ -232,14 +232,14 @@ numpy.BinaryWriter = class {
}

byte(value) {
this.bytes([ value ]);
this.write([ value ]);
}

uint16(value) {
this.bytes([ value & 0xff, (value >> 8) & 0xff ]);
this.write([ value & 0xff, (value >> 8) & 0xff ]);
}

bytes(values) {
write(values) {
const array = new Uint8Array(values.length);
for (let i = 0; i < values.length; i++) {
array[i] = values[i];
Expand Down
Loading

0 comments on commit f11c219

Please sign in to comment.