diff --git a/CHANGELOG.md b/CHANGELOG.md
index b5137f2b3cb..8064ef60063 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -44,6 +44,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 - Removed deprecated `compute_on_step` argument in Image ([#979](https://github.com/PyTorchLightning/metrics/pull/979))
 
 
+- Removed deprecated `compute_on_step` argument in Wrappers ([#991](https://github.com/PyTorchLightning/metrics/pull/991))
+
+
 ### Fixed
 
 - Fixed "Sort currently does not support bool dtype on CUDA" error in MAP for empty preds ([#983](https://github.com/PyTorchLightning/metrics/pull/983))
diff --git a/torchmetrics/wrappers/bootstrapping.py b/torchmetrics/wrappers/bootstrapping.py
index be939b8e067..c1fcafa88dd 100644
--- a/torchmetrics/wrappers/bootstrapping.py
+++ b/torchmetrics/wrappers/bootstrapping.py
@@ -66,12 +66,6 @@ class basically keeps multiple copies of the same base metric in memory and when
             will be given by :math:`n\sim Poisson(\lambda=1)`, which approximates the true bootstrap distribution
             when the number of samples is large. If ``'multinomial'`` is chosen, we will apply true bootstrapping
             at the batch level to approximate bootstrapping over the hole dataset.
-        compute_on_step:
-            Forward only calls ``update()`` and returns None if this is set to False.
-
-            .. deprecated:: v0.8
-                Argument has no use anymore and will be removed v0.9.
-
         kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
 
     Example::
@@ -96,10 +90,9 @@ def __init__(
         quantile: Optional[Union[float, Tensor]] = None,
         raw: bool = False,
         sampling_strategy: str = "poisson",
-        compute_on_step: Optional[bool] = None,
         **kwargs: Dict[str, Any],
     ) -> None:
-        super().__init__(compute_on_step=compute_on_step, **kwargs)
+        super().__init__(**kwargs)
         if not isinstance(base_metric, Metric):
             raise ValueError(
                 "Expected base metric to be an instance of torchmetrics.Metric" f" but received {base_metric}"
diff --git a/torchmetrics/wrappers/minmax.py b/torchmetrics/wrappers/minmax.py
index 3f5f75223a6..faaa3b006c9 100644
--- a/torchmetrics/wrappers/minmax.py
+++ b/torchmetrics/wrappers/minmax.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from typing import Any, Dict, Optional, Union
+from typing import Any, Dict, Union
 
 import torch
 from torch import Tensor
@@ -27,12 +27,6 @@ class MinMaxMetric(Metric):
     Args:
         base_metric:
             The metric of which you want to keep track of its maximum and minimum values.
-        compute_on_step:
-            Forward only calls ``update()`` and returns None if this is set to False.
-
-            .. deprecated:: v0.8
-                Argument has no use anymore and will be removed v0.9.
-
         kwargs: Additional keyword arguments, see :ref:`Metric kwargs` for more info.
 
     Raises:
@@ -63,10 +57,9 @@ class MinMaxMetric(Metric):
     def __init__(
         self,
         base_metric: Metric,
-        compute_on_step: Optional[bool] = None,
         **kwargs: Dict[str, Any],
     ) -> None:
-        super().__init__(compute_on_step=compute_on_step, **kwargs)
+        super().__init__(**kwargs)
         if not isinstance(base_metric, Metric):
             raise ValueError(
                 f"Expected base metric to be an instance of `torchmetrics.Metric` but received {base_metric}"