Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

zlib: expose amount of data read for engines #13088

Closed
wants to merge 5 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions doc/api/zlib.md
Original file line number Diff line number Diff line change
Expand Up @@ -385,6 +385,17 @@ added: v0.5.8
Not exported by the `zlib` module. It is documented here because it is the base
class of the compressor/decompressor classes.

### zlib.bytesRead
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can you add

<!-- YAML
added: REPLACEME
-->

directly below this heading?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Added

<!-- YAML
added: REPLACEME
-->

* {number}

The `zlib.bytesRead` property specifies the number of bytes read by the engine
before the bytes are processed (compressed or decompressed, as appropriate for
the derived class).

### zlib.flush([kind], callback)
<!-- YAML
added: v0.5.8
Expand Down
4 changes: 4 additions & 0 deletions lib/zlib.js
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,8 @@ class Zlib extends Transform {
opts = opts || {};
super(opts);

this.bytesRead = 0;

this._opts = opts;
this._chunkSize = opts.chunkSize || constants.Z_DEFAULT_CHUNK;

Expand Down Expand Up @@ -408,6 +410,8 @@ class Zlib extends Transform {
var have = availOutBefore - availOutAfter;
assert(have >= 0, 'have should not go down');

self.bytesRead += availInBefore - availInAfter;

if (have > 0) {
var out = self._buffer.slice(self._offset, self._offset + have);
self._offset += have;
Expand Down
93 changes: 93 additions & 0 deletions test/parallel/test-zlib-bytes-read.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
'use strict';
const common = require('../common');
const assert = require('assert');
const zlib = require('zlib');

const expectStr = 'abcdefghijklmnopqrstuvwxyz'.repeat(2);
const expectBuf = Buffer.from(expectStr);

function createWriter(target, buffer) {
const writer = { size: 0 };
const write = () => {
target.write(Buffer.from([buffer[writer.size++]]), () => {
if (writer.size < buffer.length) {
target.flush(write);
} else {
target.end();
}
});
};
write();
return writer;
}

for (const method of [
['createGzip', 'createGunzip', false],
['createGzip', 'createUnzip', false],
['createDeflate', 'createInflate', true],
['createDeflateRaw', 'createInflateRaw', true]
]) {
let compWriter;
let compData = new Buffer(0);

const comp = zlib[method[0]]();
comp.on('data', function(d) {
compData = Buffer.concat([compData, d]);
assert.strictEqual(this.bytesRead, compWriter.size,
`Should get write size on ${method[0]} data.`);
});
comp.on('end', common.mustCall(function() {
assert.strictEqual(this.bytesRead, compWriter.size,
`Should get write size on ${method[0]} end.`);
assert.strictEqual(this.bytesRead, expectStr.length,
`Should get data size on ${method[0]} end.`);

{
let decompWriter;
let decompData = new Buffer(0);

const decomp = zlib[method[1]]();
decomp.on('data', function(d) {
decompData = Buffer.concat([decompData, d]);
assert.strictEqual(this.bytesRead, decompWriter.size,
`Should get write size on ${method[0]}/` +
`${method[1]} data.`);
});
decomp.on('end', common.mustCall(function() {
assert.strictEqual(this.bytesRead, compData.length,
`Should get compressed size on ${method[0]}/` +
`${method[1]} end.`);
assert.strictEqual(decompData.toString(), expectStr,
`Should get original string on ${method[0]}/` +
`${method[1]} end.`);
}));
decompWriter = createWriter(decomp, compData);
}

// Some methods should allow extra data after the compressed data
if (method[2]) {
const compDataExtra = Buffer.concat([compData, new Buffer('extra')]);

let decompWriter;
let decompData = new Buffer(0);

const decomp = zlib[method[1]]();
decomp.on('data', function(d) {
decompData = Buffer.concat([decompData, d]);
assert.strictEqual(this.bytesRead, decompWriter.size,
`Should get write size on ${method[0]}/` +
`${method[1]} data.`);
});
decomp.on('end', common.mustCall(function() {
assert.strictEqual(this.bytesRead, compData.length,
`Should get compressed size on ${method[0]}/` +
`${method[1]} end.`);
assert.strictEqual(decompData.toString(), expectStr,
`Should get original string on ${method[0]}/` +
`${method[1]} end.`);
}));
decompWriter = createWriter(decomp, compDataExtra);
}
}));
compWriter = createWriter(comp, expectBuf);
}