diff --git a/nemo/nemo/core/neural_modules.py b/nemo/nemo/core/neural_modules.py index 24b4efb938a5..1ab63787bc68 100644 --- a/nemo/nemo/core/neural_modules.py +++ b/nemo/nemo/core/neural_modules.py @@ -216,8 +216,12 @@ def __call__(self, **kwargs): ) # Creating ad-hoc class for returning from module's forward pass. + output_class_name = f'{self.__class__.__name__}Output' field_names = list(output_port_defs) - result_type = collections.namedtuple('NmOutput', field_names) + result_type = collections.namedtuple( + typename=output_class_name, + field_names=field_names, + ) # Tie tuple of output tensors with corresponding names. result = result_type(*result) diff --git a/tests/test_pytorch_trainers.py b/tests/test_pytorch_trainers.py index cd3d1e851a53..77baeb3bb27e 100644 --- a/tests/test_pytorch_trainers.py +++ b/tests/test_pytorch_trainers.py @@ -33,6 +33,11 @@ def test_simple_train_named_output(self): loss = nemo.backends.pytorch.tutorials.MSELoss() data = data_source() + self.assertEqual( + first=type(data).__name__, + second='RealFunctionDataLayerOutput', + msg='Check output class naming coherence.', + ) y_pred = trainable_module(x=data.x) loss_tensor = loss(predictions=y_pred, target=data.y)