From 5631d270766503c1ce4a2bb0e64bede66b319a15 Mon Sep 17 00:00:00 2001 From: Benedikt Fuchs Date: Wed, 11 Oct 2023 20:33:19 +0200 Subject: [PATCH 1/3] fix wrong import --- flair/trainers/plugins/__init__.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/flair/trainers/plugins/__init__.py b/flair/trainers/plugins/__init__.py index be02970a0..925b30cf8 100644 --- a/flair/trainers/plugins/__init__.py +++ b/flair/trainers/plugins/__init__.py @@ -1,5 +1,4 @@ from .base import BasePlugin, Pluggable, TrainerPlugin, TrainingInterrupt -from .functional.amp import AmpPlugin from .functional.anneal_on_plateau import AnnealingPlugin from .functional.checkpoints import CheckpointPlugin from .functional.linear_scheduler import LinearSchedulerPlugin @@ -11,7 +10,6 @@ from .metric_records import MetricName, MetricRecord __all__ = [ - "AmpPlugin", "AnnealingPlugin", "CheckpointPlugin", "LinearSchedulerPlugin", From df71f8501bd96cdea8d2efb8f3b5089c2a40dce8 Mon Sep 17 00:00:00 2001 From: Benedikt Fuchs Date: Wed, 11 Oct 2023 21:10:06 +0200 Subject: [PATCH 2/3] fix typing for scheduler --- flair/trainers/plugins/functional/anneal_on_plateau.py | 1 + 1 file changed, 1 insertion(+) diff --git a/flair/trainers/plugins/functional/anneal_on_plateau.py b/flair/trainers/plugins/functional/anneal_on_plateau.py index 0bdf1e6ea..62bf62da2 100644 --- a/flair/trainers/plugins/functional/anneal_on_plateau.py +++ b/flair/trainers/plugins/functional/anneal_on_plateau.py @@ -35,6 +35,7 @@ def __init__( self.anneal_factor = anneal_factor self.patience = patience self.initial_extra_patience = initial_extra_patience + self.scheduler: AnnealOnPlateau def store_learning_rate(self): optimizer = self.trainer.optimizer From a290eb43883e42be40cdf3aef78e47ed9c1f369f Mon Sep 17 00:00:00 2001 From: Benedikt Fuchs Date: Wed, 11 Oct 2023 22:06:25 +0200 Subject: [PATCH 3/3] fix anneal plugin --- .../plugins/functional/anneal_on_plateau.py | 2 -- test_emb.py | 27 +++++++++++++++++++ 2 files changed, 27 insertions(+), 2 deletions(-) create mode 100644 test_emb.py diff --git a/flair/trainers/plugins/functional/anneal_on_plateau.py b/flair/trainers/plugins/functional/anneal_on_plateau.py index 62bf62da2..e0bff9a19 100644 --- a/flair/trainers/plugins/functional/anneal_on_plateau.py +++ b/flair/trainers/plugins/functional/anneal_on_plateau.py @@ -118,6 +118,4 @@ def get_state(self) -> Dict[str, Any]: "patience": self.patience, "initial_extra_patience": self.initial_extra_patience, "anneal_with_restarts": self.anneal_with_restarts, - "bad_epochs": self.scheduler.num_bad_epochs, - "current_best": self.scheduler.best, } diff --git a/test_emb.py b/test_emb.py new file mode 100644 index 000000000..362db8562 --- /dev/null +++ b/test_emb.py @@ -0,0 +1,27 @@ +from flair.data import Sentence +from flair.embeddings import TransformerWordEmbeddings + +phrase_0 = Sentence("a uui") +embeddings_a = TransformerWordEmbeddings( + 'roberta-base', + use_context=True, + use_context_separator=False, +) +ebd_a = embeddings_a.embed(phrase_0) + +phrase_1 = Sentence("a uui") +embeddings_b = TransformerWordEmbeddings( + 'roberta-base', + use_context=True, + use_context_separator=False, +) +ebd_b = embeddings_b.embed(phrase_1) +ebd_b = [phrase_1] +ebd_a = [phrase_0] + +print( + "token run 0:", ebd_a[-1][-1], "\n", + "embedding end run 0:", ebd_a[-1][-1].embedding.tolist()[-2:], "\n", + "token run 1: ", ebd_b[-1][-1], "\n", + "embedding end run 1:", ebd_b[-1][-1].embedding.tolist()[-2:] +) \ No newline at end of file