Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[tfjs-node] fixed summary writer memory leak #7490

Merged
merged 2 commits into from
Mar 16, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 28 additions & 14 deletions tfjs-node/src/nodejs_kernel_backend.ts
Original file line number Diff line number Diff line change
Expand Up @@ -459,32 +459,41 @@ export class NodeJSKernelBackend extends KernelBackend {
private getMappedInputTensorIds(
inputs: Tensor[], inputTensorInfos: ModelTensorInfo[]) {
const tensorIds = this.getInputTensorIds(inputs);
const newTensors = [];
for (let i = 0; i < inputs.length; i++) {
if (inputTensorInfos[i] != null) {
if (inputTensorInfos[i].tfDtype === 'DT_UINT8') {
const data = Uint8Array.from(inputs[i].dataSync());
const inputTensorId = this.binding.createTensor(
inputs[i].shape, this.binding.TF_UINT8, data);
tensorIds[i] = inputTensorId;
newTensors.push(i);
} else if (inputTensorInfos[i].tfDtype === 'DT_INT64') {
const data =
encodeInt32ArrayAsInt64(inputs[i].dataSync() as Int32Array);
const inputTensorId = this.binding.createTensor(
inputs[i].shape, this.binding.TF_INT64, data);
tensorIds[i] = inputTensorId;
newTensors.push(i);
}
}
}
return tensorIds;
return {tensorIds, newTensors};
}

runSavedModel(
id: number, inputs: Tensor[], inputTensorInfos: ModelTensorInfo[],
outputOpNames: string[]): Tensor[] {
const {tensorIds, newTensors} =
this.getMappedInputTensorIds(inputs, inputTensorInfos);
const outputMetadata = this.binding.runSavedModel(
id, this.getMappedInputTensorIds(inputs, inputTensorInfos),
inputTensorInfos.map(info => info.name).join(','),
id, tensorIds, inputTensorInfos.map(info => info.name).join(','),
outputOpNames.join(','));
for (let i = 0; i < tensorIds.length; i++) {
if (newTensors.includes(i)) {
this.binding.deleteTensor(tensorIds[i]);
}
}
return outputMetadata.map(m => this.createOutputTensor(m));
}

Expand Down Expand Up @@ -542,9 +551,10 @@ export class NodeJSKernelBackend extends KernelBackend {
}
const opAttrs: TFEOpAttr[] =
[{name: 'T', type: this.binding.TF_ATTR_TYPE, value: typeAttr}];

this.binding.executeOp(
'WriteScalarSummary', opAttrs, this.getInputTensorIds(inputArgs), 0);
const ids = this.getInputTensorIds(inputArgs);
this.binding.executeOp('WriteScalarSummary', opAttrs, ids, 0);
// release the tensorflow tensor for Int64Scalar value of step
this.binding.deleteTensor(ids[1]);
});
}

Expand All @@ -561,9 +571,10 @@ export class NodeJSKernelBackend extends KernelBackend {
// and places the values in 30 buckets, while WriteSummary expects a
// tensor which already describes the bucket widths and counts.
//
// If we were to use WriteHistogramSummary, we wouldn't have to implement
// the "bucketization" of the input tensor, but we also wouldn't have
// control over the number of buckets, or the description of the graph.
// If we were to use WriteHistogramSummary, we wouldn't have to
// implement the "bucketization" of the input tensor, but we also
// wouldn't have control over the number of buckets, or the description
// of the graph.
//
// Therefore, we instead use WriteSummary, which makes it possible to
// support these features. However, the trade-off is that we have to
Expand Down Expand Up @@ -594,8 +605,10 @@ export class NodeJSKernelBackend extends KernelBackend {
const typeAttr = this.typeAttributeFromTensor(buckets);
const opAttrs: TFEOpAttr[] =
[{name: 'T', type: this.binding.TF_ATTR_TYPE, value: typeAttr}];
this.binding.executeOp(
'WriteSummary', opAttrs, this.getInputTensorIds(inputArgs), 0);
const ids = this.getInputTensorIds(inputArgs);
this.binding.executeOp('WriteSummary', opAttrs, ids, 0);
// release the tensorflow tensor for Int64Scalar value of step
this.binding.deleteTensor(ids[1]);
});
}

Expand All @@ -609,9 +622,10 @@ export class NodeJSKernelBackend extends KernelBackend {
*
* @param data A `Tensor` of any shape. Must be castable to `float32`
* @param bucketCount Optional positive `number`
* @returns A `Tensor` of shape `[k, 3]` and type `float32`. The `i`th row is
* a triple `[leftEdge, rightEdge, count]` for a single bucket. The value of
* `k` is either `bucketCount`, `1` or `0`.
* @returns A `Tensor` of shape `[k, 3]` and type `float32`. The `i`th row
* is
* a triple `[leftEdge, rightEdge, count]` for a single bucket. The value
* of `k` is either `bucketCount`, `1` or `0`.
*/
private buckets(data: Tensor, bucketCount?: number): Tensor<tf.Rank> {
if (data.size === 0) {
Expand Down
19 changes: 19 additions & 0 deletions tfjs-node/src/tensorboard_test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,16 @@ describe('tensorboard', () => {
expect(fileNames.length).toEqual(1);
});

it('Writing tf.Scalar no memory leak', () => {
const writer = tfn.node.summaryFileWriter(tmpLogDir);
const beforeNumTFTensors = writer.backend.getNumOfTFTensors();
const value = scalar(42);
writer.scalar('foo', value, 0);
writer.flush();
value.dispose();
expect(writer.backend.getNumOfTFTensors()).toBe(beforeNumTFTensors);
});

it('No crosstalk between two summary writers', () => {
const logDir1 = path.join(tmpLogDir, '1');
const writer1 = tfn.node.summaryFileWriter(logDir1);
Expand Down Expand Up @@ -180,6 +190,15 @@ describe('tensorboard', () => {
expect(fileSize2 - fileSize1).toEqual(2 * incrementPerScalar);
});

it('summaryFileWriter no memory leak', () => {
const writer = tfn.node.summaryFileWriter(tmpLogDir);
const beforeNumTFTensors = writer.backend.getNumOfTFTensors();
const value = tensor1d([1, 2, 3, 4, 5], 'int32');
writer.histogram('foo', value, 0, 5);
writer.flush();
value.dispose();
expect(writer.backend.getNumOfTFTensors()).toBe(beforeNumTFTensors);
});
it('Can create multiple normal distribution', () => {
const writer = tfn.node.summaryFileWriter(tmpLogDir);
tf.tidy(() => {
Expand Down