From 7bac7a867231e8be808792e28e752e4bdd7ec1ac Mon Sep 17 00:00:00 2001 From: Momo Date: Thu, 30 May 2024 12:27:48 +0200 Subject: [PATCH] update to not(endswith(loss)) --- src/transformers/training_args.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/transformers/training_args.py b/src/transformers/training_args.py index a97139a07ba938..c1d69a05f85f73 100644 --- a/src/transformers/training_args.py +++ b/src/transformers/training_args.py @@ -464,8 +464,8 @@ class TrainingArguments: Use in conjunction with `load_best_model_at_end` and `metric_for_best_model` to specify if better models should have a greater metric or not. Will default to: - - `True` if `metric_for_best_model` is set to a value that isn't `"loss"` or `"eval_loss"`. - - `False` if `metric_for_best_model` is not set, or set to `"loss"` or `"eval_loss"`. + - `True` if `metric_for_best_model` is set to a value that doesn't end in `"loss"`. + - `False` if `metric_for_best_model` is not set, or set to a value that ends in `"loss"`. ignore_data_skip (`bool`, *optional*, defaults to `False`): When resuming training, whether or not to skip the epochs and batches to get the data loading at the same stage as in the previous training. If set to `True`, the training will begin faster (as that skipping step @@ -1582,7 +1582,7 @@ def __post_init__(self): ) and self.metric_for_best_model is None: self.metric_for_best_model = "loss" if self.greater_is_better is None and self.metric_for_best_model is not None: - self.greater_is_better = self.metric_for_best_model not in ["loss", "eval_loss"] + self.greater_is_better = not(self.metric_for_best_model.endswith("loss")) if self.run_name is None: self.run_name = self.output_dir if self.framework == "pt" and is_torch_available():