From b5d1b0d2072e1f0d182d6b08447cf0817732b653 Mon Sep 17 00:00:00 2001 From: root Date: Tue, 13 Aug 2024 19:20:22 +0000 Subject: [PATCH] changes for attribute testing --- llmfoundry/models/hf/hf_causal_lm.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/llmfoundry/models/hf/hf_causal_lm.py b/llmfoundry/models/hf/hf_causal_lm.py index b4f7db56c8..652903d82e 100644 --- a/llmfoundry/models/hf/hf_causal_lm.py +++ b/llmfoundry/models/hf/hf_causal_lm.py @@ -234,10 +234,13 @@ def build_inner_model( + 'Please `pip install llm-foundry[gpu]`.', ) - assert hasattr( - model_cls, - 'from_pretrained', - ), 'HF Model class is not supported, check arguments to function call!' + if not ( + hasattr(model_cls, 'from_pretrained') and + hasattr(model_cls, 'from_config') + ): + raise AttributeError( + f'{model_cls=} has missing `from_pretrained` and `from_config` support.', + ) # Hugging Face copies the modules into the # transformers modules cache. On particular systems, this operation seems to cause contention between