Revert "Fix typing annotations for FSDP and DeepSpeed in TrainingArguments" (#24574)

Revert "Fix typing annotations for FSDP and DeepSpeed in TrainingArguments (#24549)"

This reverts commit c5e29d4381.
This commit is contained in:
Sylvain Gugger 2023-06-29 08:14:43 -04:00 committed by GitHub
parent 4f1b31c2ee
commit 2dc5e1a120
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -976,12 +976,12 @@ class TrainingArguments:
)
},
)
fsdp_config: Optional[Union[str, Dict]] = field(
fsdp_config: Optional[str] = field(
default=None,
metadata={
"help": (
"Config to be used with FSDP (Pytorch Fully Sharded Data Parallel). The value is either a"
"fsdp json config file (e.g., `fsdp_config.json`) or an already loaded json file as `dict`."
"Config to be used with FSDP (Pytorch Fully Sharded Data Parallel). The value is either a"
"fsdp json config file (e.g., `fsdp_config.json`) or an already loaded json file as `dict`."
)
},
)
@ -994,11 +994,11 @@ class TrainingArguments:
)
},
)
deepspeed: Optional[Union[str, Dict]] = field(
deepspeed: Optional[str] = field(
default=None,
metadata={
"help": (
"Enable deepspeed and pass the path to deepspeed json config file (e.g. `ds_config.json`) or an already"
"Enable deepspeed and pass the path to deepspeed json config file (e.g. ds_config.json) or an already"
" loaded json file as a dict"
)
},