Skip to content

Commit

Permalink
test 1
Browse files Browse the repository at this point in the history
  • Loading branch information
Cemberk committed Oct 9, 2024
1 parent 1d2314b commit 4cb8d4c
Showing 1 changed file with 2 additions and 2 deletions.
4 changes: 2 additions & 2 deletions src/transformers/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1773,8 +1773,8 @@ def _wrap_model(self, model, training=True, dataloader=None):
return smp.DistributedModel(model, backward_passes_per_step=self.args.gradient_accumulation_steps)

# train/eval could be run multiple-times - if already wrapped, don't re-wrap it again
if self.accelerator.unwrap_model(model) is not model:
#if unwrap_model(model) is not model:
#if self.accelerator.unwrap_model(model) is not model:
if unwrap_model(model) is not model:
if self.args.ort:
from torch_ort import ORTModule
if type(model) is not ORTModule:
Expand Down

0 comments on commit 4cb8d4c

Please sign in to comment.