From d8abf9ac0ff053c2fc23f3902edb9aabebbe1ebf Mon Sep 17 00:00:00 2001 From: Zaida Zhou <58739961+zhouzaida@users.noreply.github.com> Date: Thu, 23 Feb 2023 15:22:33 +0800 Subject: [PATCH] fix typo (#955) --- mmengine/registry/build_functions.py | 1 + mmengine/registry/registry.py | 2 +- mmengine/runner/runner.py | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/mmengine/registry/build_functions.py b/mmengine/registry/build_functions.py index 283fc8c0cb..821c337c35 100644 --- a/mmengine/registry/build_functions.py +++ b/mmengine/registry/build_functions.py @@ -140,6 +140,7 @@ def build_from_cfg( def build_runner_from_cfg(cfg: Union[dict, ConfigDict, Config], registry: Registry) -> 'Runner': """Build a Runner object. + Examples: >>> from mmengine.registry import Registry, build_runner_from_cfg >>> RUNNERS = Registry('runners', build_func=build_runner_from_cfg) diff --git a/mmengine/registry/registry.py b/mmengine/registry/registry.py index 19693a132a..5e0dbd4ff9 100644 --- a/mmengine/registry/registry.py +++ b/mmengine/registry/registry.py @@ -424,7 +424,7 @@ def get(self, key: str) -> Optional[Type]: >>> # hierarchical registry >>> DETECTORS = Registry('detector', parent=MODELS, scope='det') >>> # `ResNet` does not exist in `DETECTORS` but `get` method - >>> # will try to search from its parenet or ancestors + >>> # will try to search from its parents or ancestors >>> resnet_cls = DETECTORS.get('ResNet') >>> CLASSIFIER = Registry('classifier', parent=MODELS, scope='cls') >>> @CLASSIFIER.register_module() diff --git a/mmengine/runner/runner.py b/mmengine/runner/runner.py index 79b2ad4d45..f525463793 100644 --- a/mmengine/runner/runner.py +++ b/mmengine/runner/runner.py @@ -1078,7 +1078,7 @@ def build_optim_wrapper( else: # if `optimizer` is not defined, it should be the case of # training with multiple optimizers. If `constructor` is not - # defined either, Each value of `optim_wrapper` must be an + # defined either, each value of `optim_wrapper` must be an # `OptimWrapper` instance since `DefaultOptimizerConstructor` # will not handle the case of training with multiple # optimizers. `build_optim_wrapper` will directly build the