diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index ced74b5988fcec..28d1f7b84b6c1c 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -1720,7 +1720,7 @@ def _inner_training_loop( if resume_from_checkpoint is not None: if self.is_deepspeed_enabled: deepspeed_load_checkpoint( - self.model_wrapped, resume_from_checkpoint, load_module_strict=not _is_peft_model(model) + self.model_wrapped, resume_from_checkpoint, load_module_strict=not _is_peft_model(self.model) ) elif is_sagemaker_mp_enabled() or self.is_fsdp_enabled: self._load_from_checkpoint(resume_from_checkpoint, self.model_wrapped) @@ -2182,7 +2182,7 @@ def _load_best_model(self): model = self.model_wrapped if is_sagemaker_mp_enabled() else self.model if self.is_deepspeed_enabled: deepspeed_load_checkpoint( - self.model_wrapped, self.state.best_model_checkpoint, load_module_strict=not _is_peft_model(model), + self.model_wrapped, self.state.best_model_checkpoint, load_module_strict=not _is_peft_model(self.model), ) elif self.is_fsdp_enabled: load_result = load_fsdp_model(