Skip to content

Commit

Permalink
Better error message for bitsandbytes import (#27764)
Browse files Browse the repository at this point in the history
* better error message

* fix logic

* fix log
  • Loading branch information
SunMarc authored Dec 1, 2023
1 parent 7b6324e commit abd4cbd
Showing 1 changed file with 5 additions and 6 deletions.
11 changes: 5 additions & 6 deletions src/transformers/modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1010,7 +1010,7 @@ def num_parameters(self, only_trainable: bool = False, exclude_embeddings: bool
else:
raise ValueError(
"bitsandbytes is not installed but it seems that the model has been loaded in 4bit precision, something went wrong"
" make sure to install bitsandbytes with `pip install bitsandbytes`."
" make sure to install bitsandbytes with `pip install bitsandbytes`. You also need a GPU. "
)

for param in total_parameters:
Expand Down Expand Up @@ -2746,11 +2746,13 @@ def from_pretrained(
)

if load_in_8bit or load_in_4bit:
if not torch.cuda.is_available():
raise RuntimeError("No GPU found. A GPU is needed for quantization.")
if not (is_accelerate_available() and is_bitsandbytes_available()):
raise ImportError(
"Using `load_in_8bit=True` requires Accelerate: `pip install accelerate` and the latest version of"
" bitsandbytes `pip install -i https://test.pypi.org/simple/ bitsandbytes` or"
" pip install bitsandbytes` "
" `pip install bitsandbytes`."
)

if torch_dtype is None:
Expand All @@ -2764,10 +2766,7 @@ def from_pretrained(
torch_dtype = torch.float16

if device_map is None:
if torch.cuda.is_available():
device_map = {"": torch.cuda.current_device()}
else:
raise RuntimeError("No GPU found. A GPU is needed for quantization.")
device_map = {"": torch.cuda.current_device()}
logger.info(
"The device_map was not initialized. "
"Setting device_map to {'':torch.cuda.current_device()}. "
Expand Down

0 comments on commit abd4cbd

Please sign in to comment.