Skip to content

Commit

Permalink
removing legacy profiler arg (#9178)
Browse files Browse the repository at this point in the history
Co-authored-by: Adrian Wälchli <[email protected]>
  • Loading branch information
Tshimanga and awaelchli authored Aug 30, 2021
1 parent 46b00a7 commit f79993a
Show file tree
Hide file tree
Showing 5 changed files with 5 additions and 38 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).
- Removed `teardown` from `ParallelPlugin` ([#8943](https://github.com/PyTorchLightning/pytorch-lightning/pull/8943))


- Removed deprecated `profiled_functions` argument from `PyTorchProfiler` ([#9178](https://github.com/PyTorchLightning/pytorch-lightning/pull/9178))

### Fixed

- Fixed save/load/resume from checkpoint for DeepSpeed Plugin (
Expand Down
2 changes: 1 addition & 1 deletion pytorch_lightning/core/hooks.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

from pytorch_lightning.utilities import move_data_to_device
from pytorch_lightning.utilities.types import EVAL_DATALOADERS, STEP_OUTPUT, TRAIN_DATALOADERS
from pytorch_lightning.utilities.warnings import rank_zero_deprecation, rank_zero_warn
from pytorch_lightning.utilities.warnings import rank_zero_deprecation


class ModelHooks:
Expand Down
28 changes: 2 additions & 26 deletions pytorch_lightning/profiler/pytorch.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from torch.autograd.profiler import record_function

from pytorch_lightning.profiler.base import BaseProfiler
from pytorch_lightning.utilities import rank_zero_deprecation, rank_zero_warn
from pytorch_lightning.utilities import rank_zero_warn
from pytorch_lightning.utilities.exceptions import MisconfigurationException
from pytorch_lightning.utilities.imports import _KINETO_AVAILABLE

Expand Down Expand Up @@ -222,7 +222,6 @@ def __init__(
sort_by_key: Optional[str] = None,
record_functions: Set[str] = None,
record_module_names: bool = True,
profiled_functions: Optional[List] = None,
output_filename: Optional[str] = None,
**profiler_kwargs: Any,
) -> None:
Expand Down Expand Up @@ -277,14 +276,12 @@ def __init__(
"""
super().__init__(dirpath=dirpath, filename=filename, output_filename=output_filename)

record_functions = self.__deprecation_check(profiled_functions, record_functions)

self._group_by_input_shapes = group_by_input_shapes and profiler_kwargs.get("record_shapes", False)
self._emit_nvtx = emit_nvtx
self._export_to_chrome = export_to_chrome
self._row_limit = row_limit
self._sort_by_key = sort_by_key or f"{'cuda' if profiler_kwargs.get('use_cuda', False) else 'cpu'}_time_total"
self._user_record_functions = record_functions
self._user_record_functions = record_functions or set()
self._record_functions_start = self._user_record_functions | self.START_RECORD_FUNCTIONS
self._record_functions = self._user_record_functions | self.RECORD_FUNCTIONS
self._record_module_names = record_module_names
Expand Down Expand Up @@ -331,27 +328,6 @@ def _init_kineto(self, profiler_kwargs: Any) -> None:
with_stack = profiler_kwargs.get("with_stack", False) or self._export_to_flame_graph
self._profiler_kwargs["with_stack"] = with_stack

def __deprecation_check(
self, profiled_functions: Optional[List[str]], record_functions: Optional[Set[str]]
) -> Set[str]:
if record_functions is None:
record_functions = set()

if profiled_functions is not None:
rank_zero_deprecation(
"`PyTorchProfiler.profiled_functions` has been renamed to"
" `record_functions` in v1.3 and will be removed in v1.5"
)
if not record_functions:
record_functions |= set(profiled_functions)
else:
raise MisconfigurationException(
"You set `PytorchProfiler.profiled_functions` and `PyTorchProfiler.record_functions`."
" Please use only the later."
)

return record_functions

@staticmethod
def _default_schedule() -> Optional[callable]:
if _KINETO_AVAILABLE:
Expand Down
5 changes: 0 additions & 5 deletions tests/deprecated_api/test_remove_1-5.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,6 @@
from tests.helpers.utils import no_warning_call


def test_v1_5_0_legacy_profiler_argument():
with pytest.deprecated_call(match="renamed to `record_functions` in v1.3"):
PyTorchProfiler(profiled_functions=[])


def test_v1_5_0_running_sanity_check():
trainer = Trainer()
with pytest.deprecated_call(match="has been renamed to `Trainer.sanity_checking`"):
Expand Down
6 changes: 0 additions & 6 deletions tests/profiler/test_profiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,12 +258,6 @@ def test_pytorch_profiler_describe(pytorch_profiler):
assert len(data) > 0


def test_pytorch_profiler_raises(pytorch_profiler):
"""Ensure errors are raised where expected."""
with pytest.raises(MisconfigurationException, match="profiled_functions` and `PyTorchProfiler.record"):
PyTorchProfiler(profiled_functions=["a"], record_functions=["b"])


def test_advanced_profiler_cprofile_deepcopy(tmpdir):
"""Checks for pickle issue reported in #6522"""
model = BoringModel()
Expand Down

0 comments on commit f79993a

Please sign in to comment.