From 60fe41d01955d7e85372e289c4c4daa973e21349 Mon Sep 17 00:00:00 2001 From: DuYicong515 Date: Thu, 24 Feb 2022 23:00:50 -0800 Subject: [PATCH] Remove AcceleratorConnector.use_dp --- CHANGELOG.md | 3 +++ pytorch_lightning/trainer/configuration_validator.py | 3 ++- .../trainer/connectors/accelerator_connector.py | 5 ----- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 058c92fe20e4d..83bc8b5a7027f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -598,6 +598,9 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Removed the `AcceleratorConnector.device_type` property ([#12081](https://github.com/PyTorchLightning/pytorch-lightning/pull/12081)) +- Removed `AcceleratorConnector.use_dp` property ([#12112](https://github.com/PyTorchLightning/pytorch-lightning/pull/12112)) + + ### Fixed - Fixed an issue where `HorovodStrategy.teardown()` did not complete gracefully if an exception was thrown during callback setup [#11752](https://github.com/PyTorchLightning/pytorch-lightning/pull/11752) diff --git a/pytorch_lightning/trainer/configuration_validator.py b/pytorch_lightning/trainer/configuration_validator.py index ff910865b7676..7e03d98f7debe 100644 --- a/pytorch_lightning/trainer/configuration_validator.py +++ b/pytorch_lightning/trainer/configuration_validator.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import pytorch_lightning as pl +from pytorch_lightning.strategies import DataParallelStrategy from pytorch_lightning.trainer.states import TrainerFn from pytorch_lightning.utilities.exceptions import MisconfigurationException from pytorch_lightning.utilities.model_helpers import is_overridden @@ -208,7 +209,7 @@ def __verify_dp_batch_transfer_support(trainer: "pl.Trainer", model: "pl.Lightni batch_transfer_hooks = ("on_before_batch_transfer", "transfer_batch_to_device", "on_after_batch_transfer") datahook_selector = trainer._data_connector._datahook_selector for hook in batch_transfer_hooks: - if trainer._accelerator_connector.use_dp and ( + if isinstance(trainer.strategy, DataParallelStrategy) and ( is_overridden(hook, datahook_selector.model) or is_overridden(hook, datahook_selector.datamodule) ): raise MisconfigurationException(f"Overriding `{hook}` is not supported in DP mode.") diff --git a/pytorch_lightning/trainer/connectors/accelerator_connector.py b/pytorch_lightning/trainer/connectors/accelerator_connector.py index 42f2b78cb0f6f..c1e2ac21d5823 100644 --- a/pytorch_lightning/trainer/connectors/accelerator_connector.py +++ b/pytorch_lightning/trainer/connectors/accelerator_connector.py @@ -47,7 +47,6 @@ TorchElasticEnvironment, ) from pytorch_lightning.strategies import ( - DataParallelStrategy, DDP2Strategy, DDPFullyShardedStrategy, DDPShardedStrategy, @@ -859,7 +858,3 @@ def use_ipu(self) -> bool: @property def has_tpu(self) -> bool: return isinstance(self.accelerator, TPUAccelerator) - - @property - def use_dp(self) -> bool: - return isinstance(self.strategy, DataParallelStrategy)