Skip to content

Commit

Permalink
Include safetensors as part of _load_best_model (#30553)
Browse files Browse the repository at this point in the history
* Include safetensors

* Cleanup
  • Loading branch information
muellerzr authored and Ita Zaporozhets committed May 14, 2024
1 parent 07ba9ab commit 2782292
Showing 1 changed file with 3 additions and 1 deletion.
4 changes: 3 additions & 1 deletion src/transformers/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -2611,7 +2611,9 @@ def _load_best_model(self):
load_result = model.load_state_dict(state_dict, False)
if not is_sagemaker_mp_enabled() and has_been_loaded:
self._issue_warnings_after_load(load_result)
elif os.path.exists(os.path.join(self.state.best_model_checkpoint, WEIGHTS_INDEX_NAME)):
elif os.path.exists(os.path.join(self.state.best_model_checkpoint, SAFE_WEIGHTS_INDEX_NAME)) or os.path.exists(
os.path.join(self.state.best_model_checkpoint, WEIGHTS_INDEX_NAME)
):
load_result = load_sharded_checkpoint(
model, self.state.best_model_checkpoint, strict=is_sagemaker_mp_enabled()
)
Expand Down

0 comments on commit 2782292

Please sign in to comment.