diff --git a/CHANGELOG.md b/CHANGELOG.md index 00bd7c84b0fb9..a014b6a2cab72 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -252,6 +252,10 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/). - Changed `HorovodPlugin.all_gather` to return a `torch.Tensor` instead of a list ([#9696](https://github.com/PyTorchLightning/pytorch-lightning/pull/9696)) +- Changed Trainer connectors to be protected attributes: + * Configuration Validator ([#9779](https://github.com/PyTorchLightning/pytorch-lightning/pull/9779)) + + - Restore `current_epoch` and `global_step` irrespective of trainer task ([#9413](https://github.com/PyTorchLightning/pytorch-lightning/pull/9413)) diff --git a/pytorch_lightning/trainer/trainer.py b/pytorch_lightning/trainer/trainer.py index af225e708e343..a33462defb3c7 100644 --- a/pytorch_lightning/trainer/trainer.py +++ b/pytorch_lightning/trainer/trainer.py @@ -391,7 +391,7 @@ def __init__( gpu_ids, tpu_cores = self._parse_devices(gpus, auto_select_gpus, tpu_cores) # init connectors - self.config_validator = ConfigValidator(self) + self._config_validator = ConfigValidator(self) self.data_connector = DataConnector(self, multiple_trainloader_mode) self.optimizer_connector = OptimizerConnector(self) @@ -978,7 +978,7 @@ def _run(self, model: "pl.LightningModule") -> Optional[Union[_EVALUATE_OUTPUT, if hasattr(model, "hparams"): parsing.clean_namespace(model.hparams) - self.config_validator.verify_loop_configurations(model) + self._config_validator.verify_loop_configurations(model) # attach model log function to callback self.callback_connector.attach_model_logging_functions(model)