Skip to content

Commit

Permalink
Remove deprecated compute_on_step in Regression (#967)
Browse files Browse the repository at this point in the history
* Remove deprecated `compute_on_step` in Regression
* update changelog
* fix flake8
  • Loading branch information
vumichien authored Apr 20, 2022
1 parent 23b2e45 commit 10b3a85
Show file tree
Hide file tree
Showing 12 changed files with 23 additions and 101 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Removed deprecated `compute_on_step` argument ([#962](https://github.com/PyTorchLightning/metrics/pull/962))


-
- Removed deprecated `compute_on_step` argument in Regression ([#967](https://github.com/PyTorchLightning/metrics/pull/967))


### Fixed
Expand Down
11 changes: 2 additions & 9 deletions torchmetrics/regression/cosine_similarity.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List

import torch
from torch import Tensor
Expand Down Expand Up @@ -40,12 +40,6 @@ class CosineSimilarity(Metric):
Args:
reduction: how to reduce over the batch dimension using 'sum', 'mean' or 'none' (taking the individual scores)
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.
.. deprecated:: v0.8
Argument has no use anymore and will be removed v0.9.
kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
Example:
Expand All @@ -65,10 +59,9 @@ class CosineSimilarity(Metric):
def __init__(
self,
reduction: Literal["mean", "sum", "none", None] = "sum",
compute_on_step: Optional[bool] = None,
**kwargs: Dict[str, Any],
) -> None:
super().__init__(compute_on_step=compute_on_step, **kwargs)
super().__init__(**kwargs)
allowed_reduction = ("sum", "mean", "none", None)
if reduction not in allowed_reduction:
raise ValueError(f"Expected argument `reduction` to be one of {allowed_reduction} but got {reduction}")
Expand Down
11 changes: 2 additions & 9 deletions torchmetrics/regression/explained_variance.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional, Sequence, Union
from typing import Any, Dict, Sequence, Union

import torch
from torch import Tensor, tensor
Expand Down Expand Up @@ -47,12 +47,6 @@ class ExplainedVariance(Metric):
* ``'uniform_average'`` scores are uniformly averaged
* ``'variance_weighted'`` scores are weighted by their individual variances
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.
.. deprecated:: v0.8
Argument has no use anymore and will be removed v0.9.
kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
Raises:
Expand Down Expand Up @@ -85,10 +79,9 @@ class ExplainedVariance(Metric):
def __init__(
self,
multioutput: str = "uniform_average",
compute_on_step: Optional[bool] = None,
**kwargs: Dict[str, Any],
) -> None:
super().__init__(compute_on_step=compute_on_step, **kwargs)
super().__init__(**kwargs)
allowed_multioutput = ("raw_values", "uniform_average", "variance_weighted")
if multioutput not in allowed_multioutput:
raise ValueError(
Expand Down
11 changes: 2 additions & 9 deletions torchmetrics/regression/log_mse.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional
from typing import Any, Dict

import torch
from torch import Tensor, tensor
Expand All @@ -28,12 +28,6 @@ class MeanSquaredLogError(Metric):
Where :math:`y` is a tensor of target values, and :math:`\hat{y}` is a tensor of predictions.
Args:
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.
.. deprecated:: v0.8
Argument has no use anymore and will be removed v0.9.
kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
Example:
Expand All @@ -55,10 +49,9 @@ class MeanSquaredLogError(Metric):

def __init__(
self,
compute_on_step: Optional[bool] = None,
**kwargs: Dict[str, Any],
) -> None:
super().__init__(compute_on_step=compute_on_step, **kwargs)
super().__init__(**kwargs)

self.add_state("sum_squared_log_error", default=tensor(0.0), dist_reduce_fx="sum")
self.add_state("total", default=tensor(0), dist_reduce_fx="sum")
Expand Down
11 changes: 2 additions & 9 deletions torchmetrics/regression/mae.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional
from typing import Any, Dict

import torch
from torch import Tensor, tensor
Expand All @@ -28,12 +28,6 @@ class MeanAbsoluteError(Metric):
Where :math:`y` is a tensor of target values, and :math:`\hat{y}` is a tensor of predictions.
Args:
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.
.. deprecated:: v0.8
Argument has no use anymore and will be removed v0.9.
kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
Example:
Expand All @@ -51,10 +45,9 @@ class MeanAbsoluteError(Metric):

def __init__(
self,
compute_on_step: Optional[bool] = None,
**kwargs: Dict[str, Any],
) -> None:
super().__init__(compute_on_step=compute_on_step, **kwargs)
super().__init__(**kwargs)

self.add_state("sum_abs_error", default=tensor(0.0), dist_reduce_fx="sum")
self.add_state("total", default=tensor(0), dist_reduce_fx="sum")
Expand Down
11 changes: 2 additions & 9 deletions torchmetrics/regression/mape.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional
from typing import Any, Dict

import torch
from torch import Tensor, tensor
Expand All @@ -31,12 +31,6 @@ class MeanAbsolutePercentageError(Metric):
Where :math:`y` is a tensor of target values, and :math:`\hat{y}` is a tensor of predictions.
Args:
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.
.. deprecated:: v0.8
Argument has no use anymore and will be removed v0.9.
kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
Note:
Expand All @@ -63,10 +57,9 @@ class MeanAbsolutePercentageError(Metric):

def __init__(
self,
compute_on_step: Optional[bool] = None,
**kwargs: Dict[str, Any],
) -> None:
super().__init__(compute_on_step=compute_on_step, **kwargs)
super().__init__(**kwargs)

self.add_state("sum_abs_per_error", default=tensor(0.0), dist_reduce_fx="sum")
self.add_state("total", default=tensor(0.0), dist_reduce_fx="sum")
Expand Down
11 changes: 2 additions & 9 deletions torchmetrics/regression/mse.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional
from typing import Any, Dict

import torch
from torch import Tensor, tensor
Expand All @@ -29,12 +29,6 @@ class MeanSquaredError(Metric):
Args:
squared: If True returns MSE value, if False returns RMSE value.
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.
.. deprecated:: v0.8
Argument has no use anymore and will be removed v0.9.
kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
Example:
Expand All @@ -54,10 +48,9 @@ class MeanSquaredError(Metric):
def __init__(
self,
squared: bool = True,
compute_on_step: Optional[bool] = None,
**kwargs: Dict[str, Any],
) -> None:
super().__init__(compute_on_step=compute_on_step, **kwargs)
super().__init__(**kwargs)

self.add_state("sum_squared_error", default=tensor(0.0), dist_reduce_fx="sum")
self.add_state("total", default=tensor(0), dist_reduce_fx="sum")
Expand Down
11 changes: 2 additions & 9 deletions torchmetrics/regression/pearson.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, List, Optional, Tuple
from typing import Any, Dict, List, Tuple

import torch
from torch import Tensor
Expand Down Expand Up @@ -66,12 +66,6 @@ class PearsonCorrCoef(Metric):
- ``target``(float tensor): ``(N,)``
Args:
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.
.. deprecated:: v0.8
Argument has no use anymore and will be removed v0.9.
kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
Example:
Expand All @@ -96,10 +90,9 @@ class PearsonCorrCoef(Metric):

def __init__(
self,
compute_on_step: Optional[bool] = None,
**kwargs: Dict[str, Any],
) -> None:
super().__init__(compute_on_step=compute_on_step, **kwargs)
super().__init__(**kwargs)

self.add_state("mean_x", default=torch.tensor(0.0), dist_reduce_fx=None)
self.add_state("mean_y", default=torch.tensor(0.0), dist_reduce_fx=None)
Expand Down
12 changes: 2 additions & 10 deletions torchmetrics/regression/r2.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional
from typing import Any, Dict

import torch
from torch import Tensor, tensor
Expand Down Expand Up @@ -49,13 +49,6 @@ class R2Score(Metric):
* ``'raw_values'`` returns full set of scores
* ``'uniform_average'`` scores are uniformly averaged
* ``'variance_weighted'`` scores are weighted by their individual variances
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.
.. deprecated:: v0.8
Argument has no use anymore and will be removed v0.9.
kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
Raises:
Expand Down Expand Up @@ -91,10 +84,9 @@ def __init__(
num_outputs: int = 1,
adjusted: int = 0,
multioutput: str = "uniform_average",
compute_on_step: Optional[bool] = None,
**kwargs: Dict[str, Any],
) -> None:
super().__init__(compute_on_step=compute_on_step, **kwargs)
super().__init__(**kwargs)

self.num_outputs = num_outputs

Expand Down
11 changes: 2 additions & 9 deletions torchmetrics/regression/spearman.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, List, Optional
from typing import Any, Dict, List

import torch
from torch import Tensor
Expand All @@ -33,12 +33,6 @@ class SpearmanCorrCoef(Metric):
on the rank variables.
Args:
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.
.. deprecated:: v0.8
Argument has no use anymore and will be removed v0.9.
kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
Example:
Expand All @@ -57,10 +51,9 @@ class SpearmanCorrCoef(Metric):

def __init__(
self,
compute_on_step: Optional[bool] = None,
**kwargs: Dict[str, Any],
) -> None:
super().__init__(compute_on_step=compute_on_step, **kwargs)
super().__init__(**kwargs)
rank_zero_warn(
"Metric `SpearmanCorrcoef` will save all targets and predictions in the buffer."
" For large datasets, this may lead to large memory footprint."
Expand Down
11 changes: 2 additions & 9 deletions torchmetrics/regression/symmetric_mape.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional
from typing import Any, Dict

from torch import Tensor, tensor

Expand All @@ -30,12 +30,6 @@ class SymmetricMeanAbsolutePercentageError(Metric):
Where :math:`y` is a tensor of target values, and :math:`\hat{y}` is a tensor of predictions.
Args:
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.
.. deprecated:: v0.8
Argument has no use anymore and will be removed v0.9.
kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
Note:
Expand All @@ -60,10 +54,9 @@ class SymmetricMeanAbsolutePercentageError(Metric):

def __init__(
self,
compute_on_step: Optional[bool] = None,
**kwargs: Dict[str, Any],
) -> None:
super().__init__(compute_on_step=compute_on_step, **kwargs)
super().__init__(**kwargs)

self.add_state("sum_abs_per_error", default=tensor(0.0), dist_reduce_fx="sum")
self.add_state("total", default=tensor(0.0), dist_reduce_fx="sum")
Expand Down
11 changes: 2 additions & 9 deletions torchmetrics/regression/tweedie_deviance.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Any, Dict, Optional
from typing import Any, Dict

import torch
from torch import Tensor
Expand Down Expand Up @@ -54,12 +54,6 @@ class TweedieDevianceScore(Metric):
- power = 3 : Inverse Gaussian distribution. (Requires: targets > 0 and preds > 0.)
- otherwise : Positive stable distribution. (Requires: targets > 0 and preds > 0.)
compute_on_step:
Forward only calls ``update()`` and returns None if this is set to False.
.. deprecated:: v0.8
Argument has no use anymore and will be removed v0.9.
kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
Example:
Expand All @@ -79,10 +73,9 @@ class TweedieDevianceScore(Metric):
def __init__(
self,
power: float = 0.0,
compute_on_step: Optional[bool] = None,
**kwargs: Dict[str, Any],
) -> None:
super().__init__(compute_on_step=compute_on_step, **kwargs)
super().__init__(**kwargs)
if 0 < power < 1:
raise ValueError(f"Deviance Score is not defined for power={power}.")

Expand Down

0 comments on commit 10b3a85

Please sign in to comment.