Skip to content

Commit

Permalink
args.metric_for_best_model default to loss.
Browse files Browse the repository at this point in the history
  • Loading branch information
seanswyi committed Oct 19, 2024
1 parent 239b077 commit dd7ab17
Showing 1 changed file with 5 additions and 8 deletions.
13 changes: 5 additions & 8 deletions src/transformers/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -757,6 +757,9 @@ def tokenizer(self, processing_class) -> None:
)
self.processing_class = processing_class

if not self.args.metric_for_best_model:
self.args.metric_for_best_model = "loss"

def _activate_neftune(self, model):
r"""
Activates the neftune as presented in this code: https://github.com/neelsjain/NEFTune and paper:
Expand Down Expand Up @@ -2991,10 +2994,7 @@ def _maybe_log_save_evaluate(self, tr_loss, grad_norm, model, trial, epoch, igno
new_best_metric = self._determine_best_metric(metrics=metrics, trial=trial)

if self.args.save_strategy == SaveStrategy.BEST:
if new_best_metric:
self.control.should_save = True
else:
self.control_should_save = False
self.control.should_save = new_best_metric

if self.control.should_save:
self._save_checkpoint(model, trial)
Expand Down Expand Up @@ -3098,10 +3098,7 @@ def _determine_best_metric(self, metrics, trial):
f"The available evaluation metrics are: {list(metrics.keys())}. Consider changing the `metric_for_best_model` via the TrainingArguments."
) from exc

if self.args.greater_is_better:
operator = np.greater
else:
operator = np.less
operator = np.greater if self.args.greater_is_better else np.less
else:
metric_value = metrics["eval_loss"]
operator = np.less
Expand Down

0 comments on commit dd7ab17

Please sign in to comment.