diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index 0b56488907fc17..a5dfd285bf9182 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -2386,7 +2386,7 @@ def _save_checkpoint(self, model, trial, metrics=None): self.args.distributed_state.wait_for_everyone() # Then go through the rewriting process starting on process 0 if staging_output_dir != output_dir: - with self.args.main_process_first(desc="Renaming model checkpoint folder to true location"): + with self.args.main_process_first(desc="Renaming model checkpoint folder to true location",local=self.args.save_on_each_node): if os.path.exists(staging_output_dir): os.rename(staging_output_dir, output_dir)