mirror of
https://github.com/huggingface/transformers.git
synced 2025-07-31 10:12:23 +06:00
fix typos in FSDP config parsing logic in TrainingArguments
(#29189)
fix FSDP config
This commit is contained in:
parent
608fa5496c
commit
697f05bab3
@ -1732,9 +1732,9 @@ class TrainingArguments:
|
||||
os.environ[f"{prefix}TRANSFORMER_CLS_TO_WRAP"] = ",".join(
|
||||
self.fsdp_config["transformer_layer_cls_to_wrap"]
|
||||
)
|
||||
prefetch_policy = self.fsdp_config.get("fsdp_backward_prefetch", "NO_PREFETCH")
|
||||
prefetch_policy = self.fsdp_config.get("backward_prefetch", "NO_PREFETCH")
|
||||
os.environ[f"{prefix}BACKWARD_PREFETCH"] = prefetch_policy.upper()
|
||||
os.environ[f"{prefix}FORWARD_PREFETCH"] = self.fsdp_config.get("forward_prefect", "false")
|
||||
os.environ[f"{prefix}FORWARD_PREFETCH"] = self.fsdp_config.get("forward_prefetch", "false")
|
||||
os.environ[f"{prefix}SYNC_MODULE_STATES"] = self.fsdp_config.get("sync_module_states", "true")
|
||||
os.environ[f"{prefix}USE_ORIG_PARAMS"] = self.fsdp_config.get("use_orig_params", "true")
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user