Skip to content

Commit

Permalink
[HFQuantizer] Remove check_packages_compatibility logic (#28789)
Browse files Browse the repository at this point in the history
remove `check_packages_compatibility` logic
  • Loading branch information
younesbelkada authored Jan 31, 2024
1 parent ae0c27a commit f9f1f2a
Showing 1 changed file with 0 additions and 22 deletions.
22 changes: 0 additions & 22 deletions src/transformers/quantizers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
from typing import TYPE_CHECKING, Any, Dict, Optional, Union

from ..utils import is_torch_available
from ..utils.import_utils import _is_package_available
from ..utils.quantization_config import QuantizationConfigMixin


Expand Down Expand Up @@ -64,8 +63,6 @@ def __init__(self, quantization_config: QuantizationConfigMixin, **kwargs):
f"pass `pre_quantized=True` while knowing what you are doing."
)

self.check_packages_compatibility()

def update_torch_dtype(self, torch_dtype: "torch.dtype") -> "torch.dtype":
"""
Some quantization methods require to explicitly set the dtype of the model to a
Expand Down Expand Up @@ -152,25 +149,6 @@ def validate_environment(self, *args, **kwargs):
"""
return

def check_packages_compatibility(self):
"""
Check the compatibility of the quantizer with respect to the current environment. Loops over all packages
name under `self.required_packages` and checks if that package is available.
"""
if self.required_packages is not None:
non_available_packages = []
for package_name in self.required_packages:
is_package_available = _is_package_available(package_name)
if not is_package_available:
non_available_packages.append(package_name)

if len(non_available_packages) > 0:
raise ValueError(
f"The packages {self.required_packages} are required to use {self.__class__.__name__}"
f" the following packages are missing in your environment: {non_available_packages}, please make sure"
f" to install them in order to use the quantizer."
)

def preprocess_model(self, model: "PreTrainedModel", **kwargs):
"""
Setting model attributes and/or converting model before weights loading. At this point
Expand Down

0 comments on commit f9f1f2a

Please sign in to comment.