Skip to content

Commit a2ec21b

Browse files
muellerzrparambharat
authored andcommitted
Fix err with FSDP (huggingface#25991)
* Fix err * Use version check
1 parent 0601cfc commit a2ec21b

File tree

1 file changed

+9
-8
lines changed

1 file changed

+9
-8
lines changed

src/transformers/trainer.py

Lines changed: 9 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -3910,15 +3910,16 @@ def create_accelerator_and_postprocess(self):
39103910
fsdp_plugin.limit_all_gathers = self.args.fsdp_config.get(
39113911
"limit_all_gathers", fsdp_plugin.limit_all_gathers
39123912
)
3913-
fsdp_plugin.activation_checkpointing = self.args.fsdp_config.get(
3914-
"activation_checkpointing", fsdp_plugin.activation_checkpointing
3915-
)
3916-
if fsdp_plugin.activation_checkpointing and self.args.gradient_checkpointing:
3917-
raise ValueError(
3918-
"The activation_checkpointing in FSDP config and the gradient_checkpointing in training arg "
3919-
"can't be set to True simultaneously. Please use FSDP's activation_checkpointing logic "
3920-
"when using FSDP."
3913+
if is_accelerate_available("0.23.0"):
3914+
fsdp_plugin.activation_checkpointing = self.args.fsdp_config.get(
3915+
"activation_checkpointing", fsdp_plugin.activation_checkpointing
39213916
)
3917+
if fsdp_plugin.activation_checkpointing and self.args.gradient_checkpointing:
3918+
raise ValueError(
3919+
"The activation_checkpointing in FSDP config and the gradient_checkpointing in training arg "
3920+
"can't be set to True simultaneously. Please use FSDP's activation_checkpointing logic "
3921+
"when using FSDP."
3922+
)
39223923

39233924
if self.is_deepspeed_enabled:
39243925
if getattr(self.args, "hf_deepspeed_config", None) is None:

0 commit comments

Comments
 (0)