From c5e29d4381d4b9739e6cb427adbca87fbb43a3ad Mon Sep 17 00:00:00 2001 From: Max Ryabinin Date: Wed, 28 Jun 2023 17:36:17 +0300 Subject: [PATCH] Fix typing annotations for FSDP and DeepSpeed in TrainingArguments (#24549) * Fix typing annotations for FSDP and DeepSpeed in TrainingArguments * Change dict to Dict --- src/transformers/training_args.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/transformers/training_args.py b/src/transformers/training_args.py index e8c2823f3793..ac875e0570cc 100644 --- a/src/transformers/training_args.py +++ b/src/transformers/training_args.py @@ -976,12 +976,12 @@ class TrainingArguments: ) }, ) - fsdp_config: Optional[str] = field( + fsdp_config: Optional[Union[str, Dict]] = field( default=None, metadata={ "help": ( - "Config to be used with FSDP (Pytorch Fully Sharded Data Parallel). The value is either a" - "fsdp json config file (e.g., `fsdp_config.json`) or an already loaded json file as `dict`." + "Config to be used with FSDP (Pytorch Fully Sharded Data Parallel). The value is either a" + "fsdp json config file (e.g., `fsdp_config.json`) or an already loaded json file as `dict`." ) }, ) @@ -994,11 +994,11 @@ class TrainingArguments: ) }, ) - deepspeed: Optional[str] = field( + deepspeed: Optional[Union[str, Dict]] = field( default=None, metadata={ "help": ( - "Enable deepspeed and pass the path to deepspeed json config file (e.g. ds_config.json) or an already" + "Enable deepspeed and pass the path to deepspeed json config file (e.g. `ds_config.json`) or an already" " loaded json file as a dict" ) },