diff --git a/GenAIEval/evaluation/lm_evaluation_harness/lm_eval/models/huggingface.py b/GenAIEval/evaluation/lm_evaluation_harness/lm_eval/models/huggingface.py index 76c39031..38f5d095 100644 --- a/GenAIEval/evaluation/lm_evaluation_harness/lm_eval/models/huggingface.py +++ b/GenAIEval/evaluation/lm_evaluation_harness/lm_eval/models/huggingface.py @@ -1159,10 +1159,11 @@ class GaudiHFModelAdapter(HFLM): def __init__(self, *args, **kwargs): if kwargs["device"] == "hpu": import habana_frameworks.torch.core as htcore + # Tweak generation so that it runs faster on Gaudi from optimum.habana.transformers.modeling_utils import adapt_transformers_to_gaudi - adapt_transformers_to_gaudi() + adapt_transformers_to_gaudi() super().__init__(*args, **kwargs)