Skip to content

Commit

Permalink
hacky fix
Browse files Browse the repository at this point in the history
  • Loading branch information
shauray8 committed Jul 10, 2023
1 parent 760f89c commit 20d6b84
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions src/transformers/training_args.py
Original file line number Diff line number Diff line change
Expand Up @@ -944,8 +944,8 @@ class TrainingArguments:
)
},
)
sharded_ddp: Optional[Union[str, bool, List[ShardedDDPOption]]] = field(
default="",
sharded_ddp: Optional[Union[List[ShardedDDPOption], str]] = field(
default='',
metadata={
"help": (
"Whether or not to use sharded DDP training (in distributed training only). The base option should be"
Expand All @@ -955,8 +955,8 @@ class TrainingArguments:
),
},
)
fsdp: Optional[Union[str, bool, List[FSDPOption]]] = field(
default="",
fsdp: Optional[Union[List[FSDPOption], str]] = field(
default='',
metadata={
"help": (
"Whether or not to use PyTorch Fully Sharded Data Parallel (FSDP) training (in distributed training"
Expand Down

0 comments on commit 20d6b84

Please sign in to comment.