Skip to content

Commit

Permalink
Revert "Skip logging the input tensors to the loss block. (aws#64)" (a…
Browse files Browse the repository at this point in the history
…ws#87)

This reverts commit 8ad99b6.
  • Loading branch information
rahul003 authored Dec 4, 2019
1 parent 09c9638 commit 7b31c99
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 6 deletions.
5 changes: 2 additions & 3 deletions smdebug/mxnet/hook.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,9 +154,8 @@ def forward_hook(self, block, inputs, outputs):
# This overwhelms the logs; turn back on if you really need it
# logger.debug("Processing the global step {0} for block {1}".format(self.step, block_name))

# Output input tensor if it is not a loss block
if isinstance(block, mx.gluon.loss.Loss) is False:
self._write_inputs(block_name, inputs)
# Output input tensor
self._write_inputs(block_name, inputs)

# Output output tensors
self._write_outputs(block_name, outputs)
Expand Down
3 changes: 0 additions & 3 deletions tests/mxnet/test_hook_loss_collection.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,6 @@ def test_loss_collection_default():
loss_val = loss_tensor.value(step_num=1)
assert len(loss_val) > 0

# Assert that we are not logging the inputs to loss block.
input_loss_tensors = tr.tensor_names(regex=".*loss._input*")
assert len(input_loss_tensors) == 0
shutil.rmtree(out_dir)


Expand Down

0 comments on commit 7b31c99

Please sign in to comment.