From d25b82c214054189cb5d9fab3dfdd856e009a5a7 Mon Sep 17 00:00:00 2001 From: Ethan Harris Date: Tue, 1 Mar 2022 11:03:24 +0000 Subject: [PATCH 1/2] Fix loss function buffer support --- CHANGELOG.md | 2 ++ flash/core/utilities/apply_func.py | 6 +++++- tests/core/test_model.py | 9 +++++++++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8822d05c38..41ac1f55dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,6 +22,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed DDP support for `VideoClassifier` ([#1189](https://github.com/PyTorchLightning/lightning-flash/pull/1189)) +- Fixed a bug where buffers in loss functions were not correctly registered in the `Task` + ## [0.7.0] - 2022-02-15 ### Added diff --git a/flash/core/utilities/apply_func.py b/flash/core/utilities/apply_func.py index c218b23976..2b08cecbb3 100644 --- a/flash/core/utilities/apply_func.py +++ b/flash/core/utilities/apply_func.py @@ -13,12 +13,16 @@ # limitations under the License. from typing import Callable, Dict, Mapping, Sequence, Type, Union +from torch import nn + def get_callable_name(fn_or_class: Union[Callable, object]) -> str: return getattr(fn_or_class, "__name__", fn_or_class.__class__.__name__).lower() -def get_callable_dict(fn: Union[Callable, Mapping, Sequence]) -> Union[Dict, Mapping]: +def get_callable_dict(fn: Union[nn.Module, Callable, Mapping, Sequence]) -> Union[Dict, Mapping]: + if isinstance(fn, nn.Module): + return nn.ModuleDict({get_callable_name(fn): fn}) if isinstance(fn, Mapping): return fn if isinstance(fn, Sequence): diff --git a/tests/core/test_model.py b/tests/core/test_model.py index ce3f62fc44..76dddeddfa 100644 --- a/tests/core/test_model.py +++ b/tests/core/test_model.py @@ -488,3 +488,12 @@ def on_test_end(self, trainer: "pl.Trainer", pl_module: "pl.LightningModule") -> trainer = flash.Trainer(max_epochs=1, callbacks=CheckAccuracy(), gpus=torch.cuda.device_count()) trainer.fit(task, train_dataloader=DataLoader(train_dataset), val_dataloaders=DataLoader(val_dataset)) trainer.test(task, DataLoader(test_dataset)) + + +def test_loss_fn_buffer(): + weight = torch.rand(10) + model = Task(loss_fn=nn.CrossEntropyLoss(weight=weight)) + state_dict = model.state_dict() + + assert len(state_dict) == 1 + assert torch.allclose(state_dict["loss_fn.crossentropyloss.weight"], weight) From 3bf6ad0a20ad3808c5c8f8ab31283240a8db3c71 Mon Sep 17 00:00:00 2001 From: Ethan Harris Date: Tue, 1 Mar 2022 11:22:43 +0000 Subject: [PATCH 2/2] Update CHANGELOG.md --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 41ac1f55dd..f2d8809093 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -22,7 +22,7 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Fixed DDP support for `VideoClassifier` ([#1189](https://github.com/PyTorchLightning/lightning-flash/pull/1189)) -- Fixed a bug where buffers in loss functions were not correctly registered in the `Task` +- Fixed a bug where buffers in loss functions were not correctly registered in the `Task` ([#1203](https://github.com/PyTorchLightning/lightning-flash/pull/1203)) ## [0.7.0] - 2022-02-15