From 990c203790ee28a5a3e06bd68b91ae8caa523401 Mon Sep 17 00:00:00 2001 From: jingyanwangms <47403504+jingyanwangms@users.noreply.github.com> Date: Fri, 23 Feb 2024 01:07:43 -0800 Subject: [PATCH] Add handle for empty fsdp_config[xla_fsdp_v2] (#1706) --- optimum/onnxruntime/training_args.py | 1 + 1 file changed, 1 insertion(+) diff --git a/optimum/onnxruntime/training_args.py b/optimum/onnxruntime/training_args.py index a0cb7c8e983..7c3171855c1 100644 --- a/optimum/onnxruntime/training_args.py +++ b/optimum/onnxruntime/training_args.py @@ -397,6 +397,7 @@ def __post_init__(self): ): raise ValueError("`min_num_params` and `transformer_layer_cls_to_wrap` are mutually exclusive.") self.fsdp_config["xla"] = self.fsdp_config.get("xla", False) + self.fsdp_config["xla_fsdp_v2"] = self.fsdp_config.get("xla_fsdp_v2", False) self.fsdp_config["xla_fsdp_grad_ckpt"] = self.fsdp_config.get("xla_fsdp_grad_ckpt", False) if self.fsdp_config["xla"]: if len(self.fsdp) > 0: