From 0a4c2d50a279a6c9a077e2abedef0a83794ad016 Mon Sep 17 00:00:00 2001 From: GTimothee <39728445+GTimothee@users.noreply.github.com> Date: Thu, 14 Nov 2024 18:58:50 +0100 Subject: [PATCH 1/7] Update trainer.py --- sentence_transformers/trainer.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sentence_transformers/trainer.py b/sentence_transformers/trainer.py index ef36d922f..509d5e2a1 100644 --- a/sentence_transformers/trainer.py +++ b/sentence_transformers/trainer.py @@ -15,6 +15,7 @@ from transformers import __version__ as transformers_version from transformers.data.data_collator import DataCollator from transformers.integrations import WandbCallback +from transformers.integrations.peft import PeftAdapterMixin from transformers.trainer import TRAINING_ARGS_NAME from transformers.trainer_utils import EvalLoopOutput @@ -497,6 +498,9 @@ def _load_best_model(self) -> None: if not isinstance(self.model[0], Transformer): logger.info("Could not load best model, as the model is not a `transformers`-based model.") return + elif isinstance(self.model[0], PeftAdapterMixin): + logger.info("Could not load best model, as the model is a `PeftAdapterMixin`-based model. Please wait for an update of the transformers library to enable this feature.") + return try: if checkpoint := self.state.best_model_checkpoint: From 718bb0662577ac97bf5b534b0cd27c4e324148cd Mon Sep 17 00:00:00 2001 From: GTimothee <39728445+GTimothee@users.noreply.github.com> Date: Thu, 14 Nov 2024 19:04:22 +0100 Subject: [PATCH 2/7] Update trainer.py --- sentence_transformers/trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sentence_transformers/trainer.py b/sentence_transformers/trainer.py index 509d5e2a1..b71d8f8bb 100644 --- a/sentence_transformers/trainer.py +++ b/sentence_transformers/trainer.py @@ -498,7 +498,7 @@ def _load_best_model(self) -> None: if not isinstance(self.model[0], Transformer): logger.info("Could not load best model, as the model is not a `transformers`-based model.") return - elif isinstance(self.model[0], PeftAdapterMixin): + elif isinstance(self.model[0].auto_model, PeftAdapterMixin): logger.info("Could not load best model, as the model is a `PeftAdapterMixin`-based model. Please wait for an update of the transformers library to enable this feature.") return From 5716f954552c58b67dff1a3d7d0eb9e37c2c2c2b Mon Sep 17 00:00:00 2001 From: GTimothee <39728445+GTimothee@users.noreply.github.com> Date: Thu, 14 Nov 2024 19:16:44 +0100 Subject: [PATCH 3/7] Update trainer.py --- sentence_transformers/trainer.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/sentence_transformers/trainer.py b/sentence_transformers/trainer.py index b71d8f8bb..d8ba6710f 100644 --- a/sentence_transformers/trainer.py +++ b/sentence_transformers/trainer.py @@ -498,8 +498,8 @@ def _load_best_model(self) -> None: if not isinstance(self.model[0], Transformer): logger.info("Could not load best model, as the model is not a `transformers`-based model.") return - elif isinstance(self.model[0].auto_model, PeftAdapterMixin): - logger.info("Could not load best model, as the model is a `PeftAdapterMixin`-based model. Please wait for an update of the transformers library to enable this feature.") + elif len(self.model[0].auto_model.active_adapters()): + logger.info("Could not load best model, as the model has at least one adapter set. Please wait for an update of the transformers library to enable this feature.") return try: From f22ad324e956e3c595929c92e5786677944a5d9a Mon Sep 17 00:00:00 2001 From: GTimothee <39728445+GTimothee@users.noreply.github.com> Date: Thu, 14 Nov 2024 19:18:57 +0100 Subject: [PATCH 4/7] Update trainer.py --- sentence_transformers/trainer.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/sentence_transformers/trainer.py b/sentence_transformers/trainer.py index d8ba6710f..ba14526e7 100644 --- a/sentence_transformers/trainer.py +++ b/sentence_transformers/trainer.py @@ -498,7 +498,10 @@ def _load_best_model(self) -> None: if not isinstance(self.model[0], Transformer): logger.info("Could not load best model, as the model is not a `transformers`-based model.") return - elif len(self.model[0].auto_model.active_adapters()): + + try: + self.model[0].auto_model.active_adapters() + except ValueError: logger.info("Could not load best model, as the model has at least one adapter set. Please wait for an update of the transformers library to enable this feature.") return From 8a2c36ff150515f6affb7923d549fa42ae937f60 Mon Sep 17 00:00:00 2001 From: GTimothee <39728445+GTimothee@users.noreply.github.com> Date: Thu, 14 Nov 2024 19:19:53 +0100 Subject: [PATCH 5/7] Update trainer.py --- sentence_transformers/trainer.py | 1 - 1 file changed, 1 deletion(-) diff --git a/sentence_transformers/trainer.py b/sentence_transformers/trainer.py index ba14526e7..8e74c552b 100644 --- a/sentence_transformers/trainer.py +++ b/sentence_transformers/trainer.py @@ -15,7 +15,6 @@ from transformers import __version__ as transformers_version from transformers.data.data_collator import DataCollator from transformers.integrations import WandbCallback -from transformers.integrations.peft import PeftAdapterMixin from transformers.trainer import TRAINING_ARGS_NAME from transformers.trainer_utils import EvalLoopOutput From 0708cdcb8af458ab228537c4adc5deacfcaa675e Mon Sep 17 00:00:00 2001 From: GTimothee <39728445+GTimothee@users.noreply.github.com> Date: Thu, 14 Nov 2024 19:24:05 +0100 Subject: [PATCH 6/7] Update trainer.py --- sentence_transformers/trainer.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/sentence_transformers/trainer.py b/sentence_transformers/trainer.py index 8e74c552b..e4af4a44f 100644 --- a/sentence_transformers/trainer.py +++ b/sentence_transformers/trainer.py @@ -499,10 +499,11 @@ def _load_best_model(self) -> None: return try: - self.model[0].auto_model.active_adapters() + if len(self.model[0].auto_model.active_adapters()): + logger.info("Could not load best model, as the model has at least one adapter set. Please wait for an update of the transformers library to enable this feature.") + return except ValueError: - logger.info("Could not load best model, as the model has at least one adapter set. Please wait for an update of the transformers library to enable this feature.") - return + pass try: if checkpoint := self.state.best_model_checkpoint: From 52d98ab18fe7df6db63943b8a9661400f96b9119 Mon Sep 17 00:00:00 2001 From: GTimothee <39728445+GTimothee@users.noreply.github.com> Date: Thu, 14 Nov 2024 19:31:42 +0100 Subject: [PATCH 7/7] Update trainer.py --- sentence_transformers/trainer.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/sentence_transformers/trainer.py b/sentence_transformers/trainer.py index e4af4a44f..b10083cb8 100644 --- a/sentence_transformers/trainer.py +++ b/sentence_transformers/trainer.py @@ -2,6 +2,7 @@ import logging import os +import warnings from collections import OrderedDict from contextlib import nullcontext from functools import partial @@ -500,7 +501,13 @@ def _load_best_model(self) -> None: try: if len(self.model[0].auto_model.active_adapters()): - logger.info("Could not load best model, as the model has at least one adapter set. Please wait for an update of the transformers library to enable this feature.") + warn_msg = "Could not load best model, as the model has at least one adapter set. Please wait for an update of the transformers library to enable this feature." + warnings.warn( + warn_msg, + UserWarning, # No need to import UserWarning; it's already available + stacklevel=2 + ) + logger.info(warn_msg) return except ValueError: pass