From 9d5decd80a77203fe167636d5173c61baed89637 Mon Sep 17 00:00:00 2001 From: Tomasz Kornuta Date: Fri, 16 Oct 2020 14:18:46 -0700 Subject: [PATCH 1/8] Initial version of dataloader Signed-off-by: Tomasz Kornuta --- conf/torch.utils.data.yaml | 23 +++++++++++++++++++++++ config/torch/utils/data.py | 31 +++++++++++++++++++++++++++++++ 2 files changed, 54 insertions(+) create mode 100644 conf/torch.utils.data.yaml create mode 100644 config/torch/utils/data.py diff --git a/conf/torch.utils.data.yaml b/conf/torch.utils.data.yaml new file mode 100644 index 0000000..db11cce --- /dev/null +++ b/conf/torch.utils.data.yaml @@ -0,0 +1,23 @@ +configen: + # output directory + output_dir: ${hydra:runtime.cwd} + + header: | + # Copyright (c) 2020 Facebook, Inc. and its affiliates. All Rights Reserved + # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. + # SPDX-License-Identifier: MIT + # + # Generated by configen, do not edit. + # See https://github.com/facebookresearch/hydra/tree/master/tools/configen + # fmt: off + # isort:skip_file + # flake8: noqa + + module_path_pattern: 'config/{{module_path}}.py' + + # list of modules to generate configs for + modules: + - name: torch.utils.data + # for each module, a list of classes + classes: + - DataLoader diff --git a/config/torch/utils/data.py b/config/torch/utils/data.py new file mode 100644 index 0000000..ead9567 --- /dev/null +++ b/config/torch/utils/data.py @@ -0,0 +1,31 @@ +# Copyright (c) 2020 Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. +# SPDX-License-Identifier: MIT +# +# Generated by configen, do not edit. +# See https://github.com/facebookresearch/hydra/tree/master/tools/configen +# fmt: off +# isort:skip_file +# flake8: noqa + +from dataclasses import dataclass, field +from omegaconf import MISSING +from typing import Any + + +@dataclass +class DataLoaderConf: + _target_: str = "torch.utils.data.DataLoader" + dataset: Any = MISSING + batch_size: Any = 1 + shuffle: Any = False + sampler: Any = None + batch_sampler: Any = None + num_workers: Any = 0 + collate_fn: Any = None + pin_memory: Any = False + drop_last: Any = False + timeout: Any = 0 + worker_init_fn: Any = None + multiprocessing_context: Any = None + generator: Any = None From 42c0da88e714e73961e953afbb86da5fb862fa02 Mon Sep 17 00:00:00 2001 From: Tomasz Kornuta Date: Fri, 16 Oct 2020 14:49:05 -0700 Subject: [PATCH 2/8] added license to yaml Signed-off-by: Tomasz Kornuta --- conf/torch.utils.data.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/conf/torch.utils.data.yaml b/conf/torch.utils.data.yaml index db11cce..c843372 100644 --- a/conf/torch.utils.data.yaml +++ b/conf/torch.utils.data.yaml @@ -1,3 +1,7 @@ +# Copyright (c) 2020 Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. +# SPDX-License-Identifier: MIT + configen: # output directory output_dir: ${hydra:runtime.cwd} From 31bd18993516b8ce65a5602a6fee7934529f4d88 Mon Sep 17 00:00:00 2001 From: Tomasz Kornuta Date: Tue, 20 Oct 2020 13:22:41 -0700 Subject: [PATCH 3/8] fixed headers, reorganized folders, used modified configen to regenerate configs with additional URL links Signed-off-by: Tomasz Kornuta --- config/torch/optim/adadelta.py | 2 ++ config/torch/optim/adagrad.py | 4 ++++ config/torch/optim/adam.py | 4 ++++ config/torch/optim/adamax.py | 4 ++++ config/torch/optim/adamw.py | 4 ++++ config/torch/optim/asgd.py | 4 ++++ config/torch/optim/lbfgs.py | 4 ++++ config/torch/optim/lr_scheduler.py | 11 +++++++++++ config/torch/optim/rmsprop.py | 4 ++++ config/torch/optim/rprop.py | 4 ++++ config/torch/optim/sgd.py | 4 ++++ config/torch/optim/sparse_adam.py | 4 ++++ config/torch/utils/data.py | 4 +++- {conf => sources/torch.optim}/configen.yaml | 15 +++++++++++++++ .../torch.utils.data/configen.yaml | 3 +-- 15 files changed, 72 insertions(+), 3 deletions(-) rename {conf => sources/torch.optim}/configen.yaml (68%) rename conf/torch.utils.data.yaml => sources/torch.utils.data/configen.yaml (84%) diff --git a/config/torch/optim/adadelta.py b/config/torch/optim/adadelta.py index 317361d..4193444 100644 --- a/config/torch/optim/adadelta.py +++ b/config/torch/optim/adadelta.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,7 @@ @dataclass class AdadeltaConf: + _target_: str = "torch.optim.adadelta.Adadelta" params: Any = MISSING lr: Any = 1.0 diff --git a/config/torch/optim/adagrad.py b/config/torch/optim/adagrad.py index c3f351f..8340596 100644 --- a/config/torch/optim/adagrad.py +++ b/config/torch/optim/adagrad.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,9 @@ @dataclass class AdagradConf: + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/optim.html#torch.optim.Adagrad + """ _target_: str = "torch.optim.adagrad.Adagrad" params: Any = MISSING lr: Any = 0.01 diff --git a/config/torch/optim/adam.py b/config/torch/optim/adam.py index 72f8473..f79c6af 100644 --- a/config/torch/optim/adam.py +++ b/config/torch/optim/adam.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,9 @@ @dataclass class AdamConf: + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/optim.html#torch.optim.Adam + """ _target_: str = "torch.optim.adam.Adam" params: Any = MISSING lr: Any = 0.001 diff --git a/config/torch/optim/adamax.py b/config/torch/optim/adamax.py index e93e4df..3bf343a 100644 --- a/config/torch/optim/adamax.py +++ b/config/torch/optim/adamax.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,9 @@ @dataclass class AdamaxConf: + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/optim.html#torch.optim.Adamax + """ _target_: str = "torch.optim.adamax.Adamax" params: Any = MISSING lr: Any = 0.002 diff --git a/config/torch/optim/adamw.py b/config/torch/optim/adamw.py index 853bda7..0820da8 100644 --- a/config/torch/optim/adamw.py +++ b/config/torch/optim/adamw.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,9 @@ @dataclass class AdamWConf: + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/optim.html#torch.optim.AdamW + """ _target_: str = "torch.optim.adamw.AdamW" params: Any = MISSING lr: Any = 0.001 diff --git a/config/torch/optim/asgd.py b/config/torch/optim/asgd.py index 3179c6b..a9ef961 100644 --- a/config/torch/optim/asgd.py +++ b/config/torch/optim/asgd.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,9 @@ @dataclass class ASGDConf: + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/optim.html#torch.optim.ASGD + """ _target_: str = "torch.optim.asgd.ASGD" params: Any = MISSING lr: Any = 0.01 diff --git a/config/torch/optim/lbfgs.py b/config/torch/optim/lbfgs.py index ce6c582..83909a9 100644 --- a/config/torch/optim/lbfgs.py +++ b/config/torch/optim/lbfgs.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,9 @@ @dataclass class LBFGSConf: + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/optim.html#torch.optim.LBFGS + """ _target_: str = "torch.optim.lbfgs.LBFGS" params: Any = MISSING lr: Any = 1 diff --git a/config/torch/optim/lr_scheduler.py b/config/torch/optim/lr_scheduler.py index e85b040..58aeb08 100644 --- a/config/torch/optim/lr_scheduler.py +++ b/config/torch/optim/lr_scheduler.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,7 @@ @dataclass class LambdaLRConf: + _target_: str = "torch.optim.lr_scheduler.LambdaLR" optimizer: Any = MISSING lr_lambda: Any = MISSING @@ -21,6 +23,7 @@ class LambdaLRConf: @dataclass class MultiplicativeLRConf: + _target_: str = "torch.optim.lr_scheduler.MultiplicativeLR" optimizer: Any = MISSING lr_lambda: Any = MISSING @@ -29,6 +32,7 @@ class MultiplicativeLRConf: @dataclass class StepLRConf: + _target_: str = "torch.optim.lr_scheduler.StepLR" optimizer: Any = MISSING step_size: Any = MISSING @@ -38,6 +42,7 @@ class StepLRConf: @dataclass class MultiStepLRConf: + _target_: str = "torch.optim.lr_scheduler.MultiStepLR" optimizer: Any = MISSING milestones: Any = MISSING @@ -47,6 +52,7 @@ class MultiStepLRConf: @dataclass class ExponentialLRConf: + _target_: str = "torch.optim.lr_scheduler.ExponentialLR" optimizer: Any = MISSING gamma: Any = MISSING @@ -55,6 +61,7 @@ class ExponentialLRConf: @dataclass class CosineAnnealingLRConf: + _target_: str = "torch.optim.lr_scheduler.CosineAnnealingLR" optimizer: Any = MISSING T_max: Any = MISSING @@ -64,6 +71,7 @@ class CosineAnnealingLRConf: @dataclass class ReduceLROnPlateauConf: + _target_: str = "torch.optim.lr_scheduler.ReduceLROnPlateau" optimizer: Any = MISSING mode: Any = min @@ -79,6 +87,7 @@ class ReduceLROnPlateauConf: @dataclass class CyclicLRConf: + _target_: str = "torch.optim.lr_scheduler.CyclicLR" optimizer: Any = MISSING base_lr: Any = MISSING @@ -97,6 +106,7 @@ class CyclicLRConf: @dataclass class CosineAnnealingWarmRestartsConf: + _target_: str = "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts" optimizer: Any = MISSING T_0: Any = MISSING @@ -107,6 +117,7 @@ class CosineAnnealingWarmRestartsConf: @dataclass class OneCycleLRConf: + _target_: str = "torch.optim.lr_scheduler.OneCycleLR" optimizer: Any = MISSING max_lr: Any = MISSING diff --git a/config/torch/optim/rmsprop.py b/config/torch/optim/rmsprop.py index 414597f..fa33ce3 100644 --- a/config/torch/optim/rmsprop.py +++ b/config/torch/optim/rmsprop.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,9 @@ @dataclass class RMSpropConf: + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/optim.html#torch.optim.RMSprop + """ _target_: str = "torch.optim.rmsprop.RMSprop" params: Any = MISSING lr: Any = 0.01 diff --git a/config/torch/optim/rprop.py b/config/torch/optim/rprop.py index abe0d71..eb0ea20 100644 --- a/config/torch/optim/rprop.py +++ b/config/torch/optim/rprop.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,9 @@ @dataclass class RpropConf: + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/optim.html#torch.optim.Rprop + """ _target_: str = "torch.optim.rprop.Rprop" params: Any = MISSING lr: Any = 0.01 diff --git a/config/torch/optim/sgd.py b/config/torch/optim/sgd.py index 79eb0c2..e048fa5 100644 --- a/config/torch/optim/sgd.py +++ b/config/torch/optim/sgd.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,9 @@ @dataclass class SGDConf: + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/optim.html#torch.optim.SGD + """ _target_: str = "torch.optim.sgd.SGD" params: Any = MISSING lr: Any = MISSING # _RequiredParameter diff --git a/config/torch/optim/sparse_adam.py b/config/torch/optim/sparse_adam.py index 46a17b2..b5a3157 100644 --- a/config/torch/optim/sparse_adam.py +++ b/config/torch/optim/sparse_adam.py @@ -1,4 +1,5 @@ # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -13,6 +14,9 @@ @dataclass class SparseAdamConf: + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/optim.html#torch.optim.SparseAdam + """ _target_: str = "torch.optim.sparse_adam.SparseAdam" params: Any = MISSING lr: Any = 0.001 diff --git a/config/torch/utils/data.py b/config/torch/utils/data.py index ead9567..44f6fd7 100644 --- a/config/torch/utils/data.py +++ b/config/torch/utils/data.py @@ -1,5 +1,4 @@ # Copyright (c) 2020 Facebook, Inc. and its affiliates. All Rights Reserved -# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. @@ -15,6 +14,9 @@ @dataclass class DataLoaderConf: + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/data.html#torch.utils.data.DataLoader + """ _target_: str = "torch.utils.data.DataLoader" dataset: Any = MISSING batch_size: Any = 1 diff --git a/conf/configen.yaml b/sources/torch.optim/configen.yaml similarity index 68% rename from conf/configen.yaml rename to sources/torch.optim/configen.yaml index dd0959b..b95402d 100644 --- a/conf/configen.yaml +++ b/sources/torch.optim/configen.yaml @@ -1,9 +1,13 @@ +# Copyright (c) 2020 Facebook, Inc. and its affiliates. All Rights Reserved +# SPDX-License-Identifier: MIT + configen: # output directory output_dir: ${hydra:runtime.cwd} header: | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. # See https://github.com/facebookresearch/hydra/tree/master/tools/configen @@ -21,56 +25,67 @@ configen: - Adadelta - name: torch.optim.adagrad + url: https://pytorch.org/docs/stable/optim.html#torch.optim.Adagrad # for each module, a list of classes classes: - Adagrad - name: torch.optim.adam + url: https://pytorch.org/docs/stable/optim.html#torch.optim.Adam # for each module, a list of classes classes: - Adam - name: torch.optim.adamw + url: https://pytorch.org/docs/stable/optim.html#torch.optim.AdamW # for each module, a list of classes classes: - AdamW - name: torch.optim.sparse_adam + url: https://pytorch.org/docs/stable/optim.html#torch.optim.SparseAdam # for each module, a list of classes classes: - SparseAdam - name: torch.optim.adamax + url: https://pytorch.org/docs/stable/optim.html#torch.optim.Adamax # for each module, a list of classes classes: - Adamax - name: torch.optim.asgd + url: https://pytorch.org/docs/stable/optim.html#torch.optim.ASGD # for each module, a list of classes classes: - ASGD - name: torch.optim.sgd + url: https://pytorch.org/docs/stable/optim.html#torch.optim.SGD # for each module, a list of classes classes: - SGD - name: torch.optim.rprop + url: https://pytorch.org/docs/stable/optim.html#torch.optim.Rprop # for each module, a list of classes classes: - Rprop - name: torch.optim.rmsprop + url: https://pytorch.org/docs/stable/optim.html#torch.optim.RMSprop # for each module, a list of classes classes: - RMSprop - name: torch.optim.lbfgs + url: https://pytorch.org/docs/stable/optim.html#torch.optim.LBFGS # for each module, a list of classes classes: - LBFGS - name: torch.optim.lr_scheduler + url: classes: # Schedulers - LambdaLR diff --git a/conf/torch.utils.data.yaml b/sources/torch.utils.data/configen.yaml similarity index 84% rename from conf/torch.utils.data.yaml rename to sources/torch.utils.data/configen.yaml index c843372..5758f8f 100644 --- a/conf/torch.utils.data.yaml +++ b/sources/torch.utils.data/configen.yaml @@ -1,5 +1,4 @@ # Copyright (c) 2020 Facebook, Inc. and its affiliates. All Rights Reserved -# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. # SPDX-License-Identifier: MIT configen: @@ -8,7 +7,6 @@ configen: header: | # Copyright (c) 2020 Facebook, Inc. and its affiliates. All Rights Reserved - # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. @@ -22,6 +20,7 @@ configen: # list of modules to generate configs for modules: - name: torch.utils.data + url: https://pytorch.org/docs/stable/data.html#torch.utils.data.DataLoader # for each module, a list of classes classes: - DataLoader From 3554c17703a6e20ea2400bad75e4b944c59dafec Mon Sep 17 00:00:00 2001 From: Tomasz Kornuta Date: Tue, 20 Oct 2020 14:00:27 -0700 Subject: [PATCH 4/8] regenerated adadelta - with url Signed-off-by: Tomasz Kornuta --- config/torch/optim/adadelta.py | 4 +++- config/torch/optim/lr_scheduler.py | 10 ---------- sources/torch.optim/configen.yaml | 1 + 3 files changed, 4 insertions(+), 11 deletions(-) diff --git a/config/torch/optim/adadelta.py b/config/torch/optim/adadelta.py index 4193444..02d48f1 100644 --- a/config/torch/optim/adadelta.py +++ b/config/torch/optim/adadelta.py @@ -14,7 +14,9 @@ @dataclass class AdadeltaConf: - + """For more details on parameteres please refer to the original documentation: + https://pytorch.org/docs/stable/optim.html#torch.optim.Adadelta + """ _target_: str = "torch.optim.adadelta.Adadelta" params: Any = MISSING lr: Any = 1.0 diff --git a/config/torch/optim/lr_scheduler.py b/config/torch/optim/lr_scheduler.py index 58aeb08..33bb856 100644 --- a/config/torch/optim/lr_scheduler.py +++ b/config/torch/optim/lr_scheduler.py @@ -14,7 +14,6 @@ @dataclass class LambdaLRConf: - _target_: str = "torch.optim.lr_scheduler.LambdaLR" optimizer: Any = MISSING lr_lambda: Any = MISSING @@ -23,7 +22,6 @@ class LambdaLRConf: @dataclass class MultiplicativeLRConf: - _target_: str = "torch.optim.lr_scheduler.MultiplicativeLR" optimizer: Any = MISSING lr_lambda: Any = MISSING @@ -32,7 +30,6 @@ class MultiplicativeLRConf: @dataclass class StepLRConf: - _target_: str = "torch.optim.lr_scheduler.StepLR" optimizer: Any = MISSING step_size: Any = MISSING @@ -42,7 +39,6 @@ class StepLRConf: @dataclass class MultiStepLRConf: - _target_: str = "torch.optim.lr_scheduler.MultiStepLR" optimizer: Any = MISSING milestones: Any = MISSING @@ -52,7 +48,6 @@ class MultiStepLRConf: @dataclass class ExponentialLRConf: - _target_: str = "torch.optim.lr_scheduler.ExponentialLR" optimizer: Any = MISSING gamma: Any = MISSING @@ -61,7 +56,6 @@ class ExponentialLRConf: @dataclass class CosineAnnealingLRConf: - _target_: str = "torch.optim.lr_scheduler.CosineAnnealingLR" optimizer: Any = MISSING T_max: Any = MISSING @@ -71,7 +65,6 @@ class CosineAnnealingLRConf: @dataclass class ReduceLROnPlateauConf: - _target_: str = "torch.optim.lr_scheduler.ReduceLROnPlateau" optimizer: Any = MISSING mode: Any = min @@ -87,7 +80,6 @@ class ReduceLROnPlateauConf: @dataclass class CyclicLRConf: - _target_: str = "torch.optim.lr_scheduler.CyclicLR" optimizer: Any = MISSING base_lr: Any = MISSING @@ -106,7 +98,6 @@ class CyclicLRConf: @dataclass class CosineAnnealingWarmRestartsConf: - _target_: str = "torch.optim.lr_scheduler.CosineAnnealingWarmRestarts" optimizer: Any = MISSING T_0: Any = MISSING @@ -117,7 +108,6 @@ class CosineAnnealingWarmRestartsConf: @dataclass class OneCycleLRConf: - _target_: str = "torch.optim.lr_scheduler.OneCycleLR" optimizer: Any = MISSING max_lr: Any = MISSING diff --git a/sources/torch.optim/configen.yaml b/sources/torch.optim/configen.yaml index b95402d..14276b8 100644 --- a/sources/torch.optim/configen.yaml +++ b/sources/torch.optim/configen.yaml @@ -20,6 +20,7 @@ configen: # list of modules to generate configs for modules: - name: torch.optim.adadelta + url: https://pytorch.org/docs/stable/optim.html#torch.optim.Adadelta # for each module, a list of classes classes: - Adadelta From ced1ecb075108195cbe2d4e58df8ed6a35a1490f Mon Sep 17 00:00:00 2001 From: Tomasz Kornuta Date: Tue, 20 Oct 2020 14:02:19 -0700 Subject: [PATCH 5/8] regenerated files - with tiny fix in headers, only facebook header + SPDX identifier Signed-off-by: Tomasz Kornuta --- config/torch/optim/adadelta.py | 2 +- config/torch/optim/adagrad.py | 2 +- config/torch/optim/adam.py | 2 +- config/torch/optim/adamax.py | 2 +- config/torch/optim/adamw.py | 2 +- config/torch/optim/asgd.py | 2 +- config/torch/optim/lbfgs.py | 2 +- config/torch/optim/lr_scheduler.py | 2 +- config/torch/optim/rmsprop.py | 2 +- config/torch/optim/rprop.py | 2 +- config/torch/optim/sgd.py | 2 +- config/torch/optim/sparse_adam.py | 2 +- config/torch/utils/data.py | 2 +- sources/torch.optim/configen.yaml | 4 ++-- sources/torch.utils.data/configen.yaml | 4 ++-- 15 files changed, 17 insertions(+), 17 deletions(-) diff --git a/config/torch/optim/adadelta.py b/config/torch/optim/adadelta.py index 02d48f1..0cd6262 100644 --- a/config/torch/optim/adadelta.py +++ b/config/torch/optim/adadelta.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/optim/adagrad.py b/config/torch/optim/adagrad.py index 8340596..a00fc08 100644 --- a/config/torch/optim/adagrad.py +++ b/config/torch/optim/adagrad.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/optim/adam.py b/config/torch/optim/adam.py index f79c6af..96164a1 100644 --- a/config/torch/optim/adam.py +++ b/config/torch/optim/adam.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/optim/adamax.py b/config/torch/optim/adamax.py index 3bf343a..9cb88d3 100644 --- a/config/torch/optim/adamax.py +++ b/config/torch/optim/adamax.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/optim/adamw.py b/config/torch/optim/adamw.py index 0820da8..2144c01 100644 --- a/config/torch/optim/adamw.py +++ b/config/torch/optim/adamw.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/optim/asgd.py b/config/torch/optim/asgd.py index a9ef961..ec8f33d 100644 --- a/config/torch/optim/asgd.py +++ b/config/torch/optim/asgd.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/optim/lbfgs.py b/config/torch/optim/lbfgs.py index 83909a9..125574a 100644 --- a/config/torch/optim/lbfgs.py +++ b/config/torch/optim/lbfgs.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/optim/lr_scheduler.py b/config/torch/optim/lr_scheduler.py index 33bb856..aed67bc 100644 --- a/config/torch/optim/lr_scheduler.py +++ b/config/torch/optim/lr_scheduler.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/optim/rmsprop.py b/config/torch/optim/rmsprop.py index fa33ce3..1ed98ab 100644 --- a/config/torch/optim/rmsprop.py +++ b/config/torch/optim/rmsprop.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/optim/rprop.py b/config/torch/optim/rprop.py index eb0ea20..6800480 100644 --- a/config/torch/optim/rprop.py +++ b/config/torch/optim/rprop.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/optim/sgd.py b/config/torch/optim/sgd.py index e048fa5..4418026 100644 --- a/config/torch/optim/sgd.py +++ b/config/torch/optim/sgd.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/optim/sparse_adam.py b/config/torch/optim/sparse_adam.py index b5a3157..1b32945 100644 --- a/config/torch/optim/sparse_adam.py +++ b/config/torch/optim/sparse_adam.py @@ -1,4 +1,4 @@ -# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/config/torch/utils/data.py b/config/torch/utils/data.py index 44f6fd7..e933719 100644 --- a/config/torch/utils/data.py +++ b/config/torch/utils/data.py @@ -1,4 +1,4 @@ -# Copyright (c) 2020 Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/sources/torch.optim/configen.yaml b/sources/torch.optim/configen.yaml index 14276b8..a651e14 100644 --- a/sources/torch.optim/configen.yaml +++ b/sources/torch.optim/configen.yaml @@ -1,4 +1,4 @@ -# Copyright (c) 2020 Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT configen: @@ -6,7 +6,7 @@ configen: output_dir: ${hydra:runtime.cwd} header: | - # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + # Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. diff --git a/sources/torch.utils.data/configen.yaml b/sources/torch.utils.data/configen.yaml index 5758f8f..f11340a 100644 --- a/sources/torch.utils.data/configen.yaml +++ b/sources/torch.utils.data/configen.yaml @@ -1,4 +1,4 @@ -# Copyright (c) 2020 Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT configen: @@ -6,7 +6,7 @@ configen: output_dir: ${hydra:runtime.cwd} header: | - # Copyright (c) 2020 Facebook, Inc. and its affiliates. All Rights Reserved + # Copyright (c) 2020, Facebook, Inc. and its affiliates. All Rights Reserved # SPDX-License-Identifier: MIT # # Generated by configen, do not edit. From f596a5ffd3b15a1bea983c4cd015743371e159e1 Mon Sep 17 00:00:00 2001 From: Tomasz Kornuta Date: Tue, 20 Oct 2020 14:11:42 -0700 Subject: [PATCH 6/8] regeneration after rebase Signed-off-by: Tomasz Kornuta --- config/torch/optim/lr_scheduler.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/config/torch/optim/lr_scheduler.py b/config/torch/optim/lr_scheduler.py index a43f5ab..aed67bc 100644 --- a/config/torch/optim/lr_scheduler.py +++ b/config/torch/optim/lr_scheduler.py @@ -67,12 +67,12 @@ class CosineAnnealingLRConf: class ReduceLROnPlateauConf: _target_: str = "torch.optim.lr_scheduler.ReduceLROnPlateau" optimizer: Any = MISSING - mode: str = 'min' + mode: Any = min factor: Any = 0.1 patience: Any = 10 verbose: Any = False threshold: Any = 0.0001 - threshold_mode: str = 'rel' + threshold_mode: Any = rel cooldown: Any = 0 min_lr: Any = 0 eps: Any = 1e-08 @@ -86,10 +86,10 @@ class CyclicLRConf: max_lr: Any = MISSING step_size_up: Any = 2000 step_size_down: Any = None - mode: str = 'triangular' + mode: Any = triangular gamma: Any = 1.0 scale_fn: Any = None - scale_mode: str = 'cycle' + scale_mode: Any = cycle cycle_momentum: Any = True base_momentum: Any = 0.8 max_momentum: Any = 0.9 @@ -115,7 +115,7 @@ class OneCycleLRConf: epochs: Any = None steps_per_epoch: Any = None pct_start: Any = 0.3 - anneal_strategy: str = 'cos' + anneal_strategy: Any = cos cycle_momentum: Any = True base_momentum: Any = 0.85 max_momentum: Any = 0.95 From e074842ee39221541b6f301d5f2543ff1028ce73 Mon Sep 17 00:00:00 2001 From: Tomasz Kornuta Date: Tue, 20 Oct 2020 14:40:07 -0700 Subject: [PATCH 7/8] reverted min -> 'min' Signed-off-by: Tomasz Kornuta --- config/torch/optim/lr_scheduler.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/config/torch/optim/lr_scheduler.py b/config/torch/optim/lr_scheduler.py index aed67bc..23f7744 100644 --- a/config/torch/optim/lr_scheduler.py +++ b/config/torch/optim/lr_scheduler.py @@ -67,7 +67,7 @@ class CosineAnnealingLRConf: class ReduceLROnPlateauConf: _target_: str = "torch.optim.lr_scheduler.ReduceLROnPlateau" optimizer: Any = MISSING - mode: Any = min + mode: Any = 'min' factor: Any = 0.1 patience: Any = 10 verbose: Any = False From 752fc1e227a22099150e6e96f1b4c215869b92d8 Mon Sep 17 00:00:00 2001 From: Tomasz Kornuta Date: Tue, 20 Oct 2020 14:48:09 -0700 Subject: [PATCH 8/8] regenerated torch.optim with lastest + modifed configen Signed-off-by: Tomasz Kornuta --- config/torch/optim/lr_scheduler.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/config/torch/optim/lr_scheduler.py b/config/torch/optim/lr_scheduler.py index 23f7744..fb441b0 100644 --- a/config/torch/optim/lr_scheduler.py +++ b/config/torch/optim/lr_scheduler.py @@ -67,12 +67,12 @@ class CosineAnnealingLRConf: class ReduceLROnPlateauConf: _target_: str = "torch.optim.lr_scheduler.ReduceLROnPlateau" optimizer: Any = MISSING - mode: Any = 'min' + mode: Any = "min" factor: Any = 0.1 patience: Any = 10 verbose: Any = False threshold: Any = 0.0001 - threshold_mode: Any = rel + threshold_mode: Any = "rel" cooldown: Any = 0 min_lr: Any = 0 eps: Any = 1e-08 @@ -86,10 +86,10 @@ class CyclicLRConf: max_lr: Any = MISSING step_size_up: Any = 2000 step_size_down: Any = None - mode: Any = triangular + mode: Any = "triangular" gamma: Any = 1.0 scale_fn: Any = None - scale_mode: Any = cycle + scale_mode: Any = "cycle" cycle_momentum: Any = True base_momentum: Any = 0.8 max_momentum: Any = 0.9 @@ -115,7 +115,7 @@ class OneCycleLRConf: epochs: Any = None steps_per_epoch: Any = None pct_start: Any = 0.3 - anneal_strategy: Any = cos + anneal_strategy: Any = "cos" cycle_momentum: Any = True base_momentum: Any = 0.85 max_momentum: Any = 0.95