From 2807d386f7ff22b1fe5d799935536e0675d40e0a Mon Sep 17 00:00:00 2001 From: IlyasMoutawwakil Date: Fri, 26 Apr 2024 10:15:23 +0200 Subject: [PATCH 1/5] deprecated use_auth_token --- optimum/configuration_utils.py | 30 ++++++- optimum/exporters/onnx/__main__.py | 21 ++++- optimum/exporters/tasks.py | 58 +++++++++--- optimum/modeling_base.py | 82 +++++++++++------ optimum/onnxruntime/modeling_decoder.py | 31 ++++++- optimum/onnxruntime/modeling_diffusion.py | 27 +++++- optimum/onnxruntime/modeling_ort.py | 105 ++++++++++++++++++---- optimum/onnxruntime/modeling_seq2seq.py | 38 ++++++-- optimum/onnxruntime/quantization.py | 25 ++++-- optimum/pipelines/pipelines_base.py | 2 +- optimum/utils/file_utils.py | 33 +++++-- optimum/utils/testing_utils.py | 9 +- tests/onnxruntime/test_modeling.py | 20 ++--- tests/test_modeling_base.py | 2 +- 14 files changed, 379 insertions(+), 104 deletions(-) diff --git a/optimum/configuration_utils.py b/optimum/configuration_utils.py index 3216d4a94c3..ab5d6c057f8 100644 --- a/optimum/configuration_utils.py +++ b/optimum/configuration_utils.py @@ -18,9 +18,9 @@ import json import os import re +import warnings from typing import Any, Dict, List, Tuple, Union -from huggingface_hub import HfFolder from packaging import version from transformers import PretrainedConfig from transformers import __version__ as transformers_version_str @@ -93,7 +93,19 @@ def save_pretrained(self, save_directory: Union[str, os.PathLike], push_to_hub: repo_id = self._create_repo(repo_id, **kwargs) use_auth_token = kwargs.get("use_auth_token", None) - token = HfFolder.get_token() if use_auth_token is True else use_auth_token + token = kwargs.get("token", None) + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError( + "You cannot use both `use_auth_token` and `token` arguments at the same time." + ) + kwargs["token"] = use_auth_token + token = use_auth_token files_timestamps = self._get_files_timestamps(save_directory) @@ -197,6 +209,7 @@ def _get_config_dict( resume_download = kwargs.pop("resume_download", False) proxies = kwargs.pop("proxies", None) use_auth_token = kwargs.pop("use_auth_token", None) + token = kwargs.pop("token", None) local_files_only = kwargs.pop("local_files_only", False) revision = kwargs.pop("revision", None) trust_remote_code = kwargs.pop("trust_remote_code", None) @@ -205,6 +218,15 @@ def _get_config_dict( from_auto_class = kwargs.pop("_from_auto", False) commit_hash = kwargs.pop("_commit_hash", None) + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + if trust_remote_code is True: logger.warning( "The argument `trust_remote_code` is to be used with Auto classes. It has no effect here and is" @@ -255,7 +277,7 @@ def _get_config_dict( proxies=proxies, resume_download=resume_download, local_files_only=local_files_only, - use_auth_token=use_auth_token, + token=token, user_agent=user_agent, ) else: @@ -268,7 +290,7 @@ def _get_config_dict( proxies=proxies, resume_download=resume_download, local_files_only=local_files_only, - use_auth_token=use_auth_token, + token=token, user_agent=user_agent, revision=revision, subfolder=subfolder, diff --git a/optimum/exporters/onnx/__main__.py b/optimum/exporters/onnx/__main__.py index 585a779c2e5..1e36af06ade 100644 --- a/optimum/exporters/onnx/__main__.py +++ b/optimum/exporters/onnx/__main__.py @@ -15,6 +15,7 @@ """Entry point to the optimum.exporters.onnx command line.""" import argparse +import warnings from pathlib import Path from huggingface_hub.constants import HUGGINGFACE_HUB_CACHE @@ -66,6 +67,7 @@ def main_export( force_download: bool = False, local_files_only: bool = False, use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, for_ort: bool = False, do_validation: bool = True, model_kwargs: Optional[Dict[str, Any]] = None, @@ -135,9 +137,11 @@ def main_export( cached versions if they exist. local_files_only (`Optional[bool]`, defaults to `False`): Whether or not to only look at local files (i.e., do not try to download the model). - use_auth_token (`Optional[str]`, defaults to `None`): + use_auth_token (`Optional[Union[bool,str]]`, defaults to `None`): + Deprecated. Please use the `token` argument instead. + token (`Optional[Union[bool,str]]`, defaults to `None`): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `huggingface_hub.constants.HF_TOKEN_PATH`). model_kwargs (`Optional[Dict[str, Any]]`, defaults to `None`): Experimental usage: keyword arguments to pass to the model during the export. This argument should be used along the `custom_onnx_configs` argument @@ -174,6 +178,15 @@ def main_export( ``` """ + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + if fp16: if dtype is not None: raise ValueError( @@ -250,7 +263,7 @@ def main_export( subfolder=subfolder, revision=revision, cache_dir=cache_dir, - use_auth_token=use_auth_token, + token=token, local_files_only=local_files_only, force_download=force_download, trust_remote_code=trust_remote_code, @@ -283,7 +296,7 @@ def main_export( subfolder=subfolder, revision=revision, cache_dir=cache_dir, - use_auth_token=use_auth_token, + token=token, local_files_only=local_files_only, force_download=force_download, trust_remote_code=trust_remote_code, diff --git a/optimum/exporters/tasks.py b/optimum/exporters/tasks.py index ca71dca92a9..7cbe374eb06 100644 --- a/optimum/exporters/tasks.py +++ b/optimum/exporters/tasks.py @@ -18,12 +18,14 @@ import inspect import itertools import os +import warnings from functools import partial from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Set, Tuple, Type, Union import huggingface_hub from huggingface_hub.constants import HUGGINGFACE_HUB_CACHE +from huggingface_hub.utils import OfflineModeIsEnabled from packaging import version from requests.exceptions import ConnectionError as RequestsConnectionError from transformers import AutoConfig, PretrainedConfig, is_tf_available, is_torch_available @@ -1379,9 +1381,19 @@ def get_model_files( model_name_or_path: Union[str, Path], subfolder: str = "", cache_dir: str = HUGGINGFACE_HUB_CACHE, - use_auth_token: Optional[str] = None, + use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, ): + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + request_exception = None full_model_path = Path(model_name_or_path) / subfolder if full_model_path.is_dir(): @@ -1397,12 +1409,12 @@ def get_model_files( all_files = huggingface_hub.list_repo_files( model_name_or_path, repo_type="model", - token=use_auth_token, + token=token, revision=revision, ) if subfolder != "": all_files = [file[len(subfolder) + 1 :] for file in all_files if file.startswith(subfolder)] - except (RequestsConnectionError, huggingface_hub.utils._http.OfflineModeIsEnabled) as e: + except (RequestsConnectionError, OfflineModeIsEnabled) as e: request_exception = e object_id = model_name_or_path.replace("/", "--") full_model_path = Path(cache_dir, f"models--{object_id}") @@ -1576,7 +1588,7 @@ def _infer_task_from_model_name_or_path( ) try: model_info = huggingface_hub.model_info(model_name_or_path, revision=revision) - except (RequestsConnectionError, huggingface_hub.utils._http.OfflineModeIsEnabled): + except (RequestsConnectionError, OfflineModeIsEnabled): raise RuntimeError( f"Hugging Face Hub is not reachable and we cannot infer the task from a cached model. Make sure you are not offline, or otherwise please specify the `task` (or `--task` in command-line) argument ({', '.join(TasksManager.get_all_tasks())})." ) @@ -1693,7 +1705,8 @@ def infer_library_from_model( revision: Optional[str] = None, cache_dir: str = HUGGINGFACE_HUB_CACHE, library_name: Optional[str] = None, - use_auth_token: Optional[str] = None, + use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, ): """ Infers the library from the model repo. @@ -1711,16 +1724,30 @@ def infer_library_from_model( Path to a directory in which a downloaded pretrained model weights have been cached if the standard cache should not be used. library_name (`Optional[str]`, *optional*): The library name of the model. Can be any of "transformers", "timm", "diffusers", "sentence_transformers". - use_auth_token (`Optional[str]`, defaults to `None`): - The token to use as HTTP bearer authorization for remote files. + use_auth_token (`Optional[Union[bool,str]]`, defaults to `None`): + Deprecated. Please use the `token` argument instead. + token (`Optional[Union[bool,str]]`, defaults to `None`): + The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated + when running `huggingface-cli login` (stored in `huggingface_hub.constants.HF_TOKEN_PATH`). + Returns: `str`: The library name automatically detected from the model repo. """ + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + if library_name is not None: return library_name all_files, _ = TasksManager.get_model_files( - model_name_or_path, subfolder, cache_dir, use_auth_token=use_auth_token + model_name_or_path, subfolder, cache_dir, token=token, revision=revision ) if "model_index.json" in all_files: @@ -1736,7 +1763,7 @@ def infer_library_from_model( "subfolder": subfolder, "revision": revision, "cache_dir": cache_dir, - "use_auth_token": use_auth_token, + "token": token, } config_dict, kwargs = PretrainedConfig.get_config_dict(model_name_or_path, **kwargs) model_config = PretrainedConfig.from_dict(config_dict, **kwargs) @@ -1912,12 +1939,23 @@ def get_model_from_task( elif library_name == "sentence_transformers": cache_folder = model_kwargs.pop("cache_folder", None) use_auth_token = model_kwargs.pop("use_auth_token", None) + token = model_kwargs.pop("token", None) trust_remote_code = model_kwargs.pop("trust_remote_code", False) + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + model = model_class( model_name_or_path, device=device, cache_folder=cache_folder, - use_auth_token=use_auth_token, + token=token, trust_remote_code=trust_remote_code, ) else: diff --git a/optimum/modeling_base.py b/optimum/modeling_base.py index 9523f5c5042..92fbc750dc3 100644 --- a/optimum/modeling_base.py +++ b/optimum/modeling_base.py @@ -17,11 +17,12 @@ import logging import os import subprocess +import warnings from abc import ABC, abstractmethod from pathlib import Path from typing import TYPE_CHECKING, Optional, Union -from huggingface_hub import HfApi, HfFolder +from huggingface_hub import create_repo, upload_file from huggingface_hub.constants import HUGGINGFACE_HUB_CACHE from transformers import AutoConfig, PretrainedConfig, add_start_docstrings @@ -51,9 +52,11 @@ force_download (`bool`, defaults to `True`): Whether or not to force the (re-)download of the model weights and configuration files, overriding the cached versions if they exist. - use_auth_token (`Optional[str]`, defaults to `None`): + use_auth_token (`Optional[Union[bool,str]]`, defaults to `None`): + Deprecated. Please use the `token` argument instead. + token (`Optional[Union[bool,str]]`, defaults to `None`): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated - when running `transformers-cli login` (stored in `~/.huggingface`). + when running `huggingface-cli login` (stored in `huggingface_hub.constants.HF_TOKEN_PATH`). cache_dir (`Optional[str]`, defaults to `None`): Path to a directory in which a downloaded pretrained model configuration should be cached if the standard cache should not be used. @@ -156,33 +159,33 @@ def push_to_hub( save_directory: str, repository_id: str, private: Optional[bool] = None, - use_auth_token: Union[bool, str] = True, + use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, ) -> str: - if isinstance(use_auth_token, str): - huggingface_token = use_auth_token - elif use_auth_token: - huggingface_token = HfFolder.get_token() - else: - raise ValueError("You need to proivde `use_auth_token` to be able to push to the hub") - api = HfApi() - - user = api.whoami(huggingface_token) - self.git_config_username_and_email(git_email=user["email"], git_user=user["fullname"]) - - api.create_repo( - token=huggingface_token, + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + + create_repo( + token=token, repo_id=repository_id, exist_ok=True, private=private, ) + for path, subdirs, files in os.walk(save_directory): for name in files: local_file_path = os.path.join(path, name) _, hub_file_path = os.path.split(local_file_path) # FIXME: when huggingface_hub fixes the return of upload_file try: - api.upload_file( - token=huggingface_token, + upload_file( + token=token, repo_id=f"{repository_id}", path_or_fileobj=os.path.join(os.getcwd(), local_file_path), path_in_repo=hub_file_path, @@ -223,17 +226,28 @@ def _load_config( revision: Optional[str] = None, cache_dir: str = HUGGINGFACE_HUB_CACHE, use_auth_token: Optional[Union[bool, str]] = False, + token: Optional[Union[bool, str]] = None, force_download: bool = False, subfolder: str = "", trust_remote_code: bool = False, ) -> PretrainedConfig: + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + try: config = AutoConfig.from_pretrained( pretrained_model_name_or_path=config_name_or_path, revision=revision, cache_dir=cache_dir, force_download=force_download, - use_auth_token=use_auth_token, + token=token, subfolder=subfolder, trust_remote_code=trust_remote_code, ) @@ -245,7 +259,7 @@ def _load_config( revision=revision, cache_dir=cache_dir, force_download=force_download, - use_auth_token=use_auth_token, + token=token, trust_remote_code=trust_remote_code, ) logger.info( @@ -261,6 +275,7 @@ def _from_pretrained( model_id: Union[str, Path], config: PretrainedConfig, use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, force_download: bool = False, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -277,6 +292,7 @@ def _from_transformers( model_id: Union[str, Path], config: PretrainedConfig, use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, force_download: bool = False, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -297,6 +313,7 @@ def _export( model_id: Union[str, Path], config: PretrainedConfig, use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, force_download: bool = False, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -317,7 +334,8 @@ def from_pretrained( model_id: Union[str, Path], export: bool = False, force_download: bool = False, - use_auth_token: Optional[str] = None, + use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, cache_dir: str = HUGGINGFACE_HUB_CACHE, subfolder: str = "", config: Optional[PretrainedConfig] = None, @@ -330,6 +348,16 @@ def from_pretrained( Returns: `OptimizedModel`: The loaded optimized model. """ + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + if isinstance(model_id, Path): model_id = model_id.as_posix() @@ -347,9 +375,7 @@ def from_pretrained( ) model_id, revision = model_id.split("@") - library_name = TasksManager.infer_library_from_model( - model_id, subfolder, revision, cache_dir, use_auth_token=use_auth_token - ) + library_name = TasksManager.infer_library_from_model(model_id, subfolder, revision, cache_dir, token=token) if library_name == "timm": config = PretrainedConfig.from_pretrained(model_id, subfolder, revision) @@ -374,7 +400,7 @@ def from_pretrained( model_id, revision=revision, cache_dir=cache_dir, - use_auth_token=use_auth_token, + token=token, force_download=force_download, subfolder=subfolder, trust_remote_code=trust_remote_code, @@ -384,7 +410,7 @@ def from_pretrained( config, revision=revision, cache_dir=cache_dir, - use_auth_token=use_auth_token, + token=token, force_download=force_download, subfolder=subfolder, trust_remote_code=trust_remote_code, @@ -405,7 +431,7 @@ def from_pretrained( revision=revision, cache_dir=cache_dir, force_download=force_download, - use_auth_token=use_auth_token, + token=token, subfolder=subfolder, local_files_only=local_files_only, trust_remote_code=trust_remote_code, diff --git a/optimum/onnxruntime/modeling_decoder.py b/optimum/onnxruntime/modeling_decoder.py index 455236126b6..bde882234f4 100644 --- a/optimum/onnxruntime/modeling_decoder.py +++ b/optimum/onnxruntime/modeling_decoder.py @@ -14,6 +14,7 @@ """Classes handling causal-lm related architectures in ONNX Runtime.""" import logging +import warnings from pathlib import Path from tempfile import TemporaryDirectory from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union @@ -406,6 +407,7 @@ def _from_pretrained( model_id: Union[str, Path], config: "PretrainedConfig", use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, force_download: bool = False, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -421,6 +423,16 @@ def _from_pretrained( model_save_dir: Optional[Union[str, Path, TemporaryDirectory]] = None, **kwargs, ) -> "ORTModelForCausalLM": + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + model_path = Path(model_id) # We do not implement the logic for use_cache=False, use_merged=True @@ -450,7 +462,7 @@ def _from_pretrained( [DECODER_MERGED_ONNX_FILE_PATTERN], argument_name=None, subfolder=subfolder, - use_auth_token=use_auth_token, + token=token, revision=revision, ) use_merged = True @@ -472,7 +484,7 @@ def _from_pretrained( [r"^((?!decoder).)*.onnx", pattern], argument_name=None, subfolder=subfolder, - use_auth_token=use_auth_token, + token=token, revision=revision, ) file_name = decoder_path.name @@ -494,7 +506,7 @@ def _from_pretrained( model_cache_path, preprocessors = cls._cached_file( model_path=model_path, - use_auth_token=use_auth_token, + token=token, revision=revision, force_download=force_download, cache_dir=cache_dir, @@ -576,6 +588,7 @@ def _from_transformers( model_id: str, config: "PretrainedConfig", use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: str = "main", force_download: bool = True, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -590,6 +603,16 @@ def _from_transformers( use_io_binding: Optional[bool] = None, task: Optional[str] = None, ) -> "ORTModelForCausalLM": + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + file_name = ONNX_WEIGHTS_NAME if use_merged: @@ -615,7 +638,7 @@ def _from_transformers( subfolder=subfolder, revision=revision, cache_dir=cache_dir, - use_auth_token=use_auth_token, + token=token, local_files_only=local_files_only, force_download=force_download, trust_remote_code=trust_remote_code, diff --git a/optimum/onnxruntime/modeling_diffusion.py b/optimum/onnxruntime/modeling_diffusion.py index 63360ce80a8..c5f96f16849 100644 --- a/optimum/onnxruntime/modeling_diffusion.py +++ b/optimum/onnxruntime/modeling_diffusion.py @@ -16,6 +16,7 @@ import logging import os import shutil +import warnings from abc import abstractmethod from pathlib import Path from tempfile import TemporaryDirectory @@ -272,6 +273,7 @@ def _from_pretrained( model_id: Union[str, Path], config: Dict[str, Any], use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, cache_dir: str = HUGGINGFACE_HUB_CACHE, vae_decoder_file_name: str = ONNX_WEIGHTS_NAME, @@ -287,6 +289,16 @@ def _from_pretrained( model_save_dir: Optional[Union[str, Path, TemporaryDirectory]] = None, **kwargs, ): + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + if provider == "TensorrtExecutionProvider": raise ValueError("The provider `'TensorrtExecutionProvider'` is not supported") @@ -314,7 +326,7 @@ def _from_pretrained( model_id, cache_dir=cache_dir, local_files_only=local_files_only, - use_auth_token=use_auth_token, + token=token, revision=revision, allow_patterns=allow_patterns, ignore_patterns=["*.msgpack", "*.safetensors", "*.bin", "*.xml"], @@ -376,6 +388,7 @@ def _from_transformers( model_id: str, config: Optional[str] = None, use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: str = "main", force_download: bool = True, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -388,6 +401,16 @@ def _from_transformers( use_io_binding: Optional[bool] = None, task: Optional[str] = None, ) -> "ORTStableDiffusionPipeline": + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + if task is None: task = cls._auto_model_to_task(cls.auto_model_class) @@ -403,7 +426,7 @@ def _from_transformers( subfolder=subfolder, revision=revision, cache_dir=cache_dir, - use_auth_token=use_auth_token, + token=token, local_files_only=local_files_only, force_download=force_download, trust_remote_code=trust_remote_code, diff --git a/optimum/onnxruntime/modeling_ort.py b/optimum/onnxruntime/modeling_ort.py index eb38a7fef12..125e770de57 100644 --- a/optimum/onnxruntime/modeling_ort.py +++ b/optimum/onnxruntime/modeling_ort.py @@ -16,13 +16,14 @@ import logging import re import shutil +import warnings from pathlib import Path from tempfile import TemporaryDirectory from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, Union import numpy as np import torch -from huggingface_hub import HfFolder, hf_hub_download +from huggingface_hub import hf_hub_download from huggingface_hub.constants import HUGGINGFACE_HUB_CACHE from huggingface_hub.utils import EntryNotFoundError from transformers import ( @@ -410,9 +411,20 @@ def infer_onnx_filename( argument_name: str, subfolder: str = "", use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, fail_if_not_found: bool = True, ) -> str: + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + onnx_files = [] for pattern in patterns: onnx_files = find_files_matching_pattern( @@ -420,7 +432,7 @@ def infer_onnx_filename( pattern, glob_pattern="**/*.onnx", subfolder=subfolder, - use_auth_token=use_auth_token, + token=token, revision=revision, ) if onnx_files: @@ -448,6 +460,7 @@ def _from_pretrained( model_id: Union[str, Path], config: "PretrainedConfig", use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, force_download: bool = False, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -461,6 +474,16 @@ def _from_pretrained( model_save_dir: Optional[Union[str, Path, TemporaryDirectory]] = None, **kwargs, ) -> "ORTModel": + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + model_path = Path(model_id) regular_onnx_filenames = ORTModel._generate_regular_names_for_filename(ONNX_WEIGHTS_NAME) @@ -468,13 +491,8 @@ def _from_pretrained( if model_path.is_dir(): onnx_files = list(model_path.glob("*.onnx")) else: - if isinstance(use_auth_token, bool): - token = HfFolder().get_token() - else: - token = use_auth_token - repo_files, _ = TasksManager.get_model_files( - model_id, revision=revision, cache_dir=cache_dir, use_auth_token=token + model_id, revision=revision, cache_dir=cache_dir, token=token ) repo_files = map(Path, repo_files) @@ -499,7 +517,7 @@ def _from_pretrained( model_cache_path, preprocessors = cls._cached_file( model_path=model_path, - use_auth_token=use_auth_token, + token=token, revision=revision, force_download=force_download, cache_dir=cache_dir, @@ -535,6 +553,7 @@ def _from_transformers( model_id: str, config: "PretrainedConfig", use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, force_download: bool = False, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -548,13 +567,23 @@ def _from_transformers( task: Optional[str] = None, ) -> "ORTModel": """The method will be deprecated in future releases.""" + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + return cls._export( model_id=model_id, config=config, revision=revision, cache_dir=cache_dir, force_download=force_download, - use_auth_token=use_auth_token, + token=token, subfolder=subfolder, local_files_only=local_files_only, trust_remote_code=trust_remote_code, @@ -571,6 +600,7 @@ def _export( model_id: str, config: "PretrainedConfig", use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, force_download: bool = False, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -583,6 +613,16 @@ def _export( use_io_binding: Optional[bool] = None, task: Optional[str] = None, ) -> "ORTModel": + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + if task is None: task = cls._auto_model_to_task(cls.auto_model_class) @@ -598,7 +638,7 @@ def _export( subfolder=subfolder, revision=revision, cache_dir=cache_dir, - use_auth_token=use_auth_token, + token=token, local_files_only=local_files_only, force_download=force_download, trust_remote_code=trust_remote_code, @@ -624,7 +664,8 @@ def from_pretrained( model_id: Union[str, Path], export: bool = False, force_download: bool = False, - use_auth_token: Optional[str] = None, + use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, cache_dir: str = HUGGINGFACE_HUB_CACHE, subfolder: str = "", config: Optional["PretrainedConfig"] = None, @@ -666,11 +707,21 @@ def from_pretrained( Returns: `ORTModel`: The loaded ORTModel model. """ + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + return super().from_pretrained( model_id, export=export, force_download=force_download, - use_auth_token=use_auth_token, + token=token, cache_dir=cache_dir, subfolder=subfolder, config=config, @@ -856,6 +907,7 @@ def raise_on_numpy_input_io_binding(self, use_torch: bool): def _cached_file( model_path: Union[Path, str], use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, force_download: bool = False, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -863,6 +915,16 @@ def _cached_file( subfolder: str = "", local_files_only: bool = False, ): + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + model_path = Path(model_path) # locates a file in a local folder and repo, downloads and cache it if necessary. @@ -874,7 +936,7 @@ def _cached_file( repo_id=model_path.as_posix(), filename=file_name, subfolder=subfolder, - use_auth_token=use_auth_token, + token=token, revision=revision, cache_dir=cache_dir, force_download=force_download, @@ -886,7 +948,7 @@ def _cached_file( repo_id=model_path.as_posix(), subfolder=subfolder, filename=file_name + "_data", - use_auth_token=use_auth_token, + token=token, revision=revision, cache_dir=cache_dir, force_download=force_download, @@ -1021,6 +1083,7 @@ def _export( model_id: str, config: "PretrainedConfig", use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, force_download: bool = False, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -1033,6 +1096,16 @@ def _export( use_io_binding: Optional[bool] = None, task: Optional[str] = None, ) -> "ORTModel": + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + if task is None: task = cls._auto_model_to_task(cls.auto_model_class) @@ -1049,7 +1122,7 @@ def _export( subfolder=subfolder, revision=revision, cache_dir=cache_dir, - use_auth_token=use_auth_token, + token=token, local_files_only=local_files_only, force_download=force_download, trust_remote_code=trust_remote_code, diff --git a/optimum/onnxruntime/modeling_seq2seq.py b/optimum/onnxruntime/modeling_seq2seq.py index 2da4b4c8c45..e5a26f8346c 100644 --- a/optimum/onnxruntime/modeling_seq2seq.py +++ b/optimum/onnxruntime/modeling_seq2seq.py @@ -777,6 +777,7 @@ def _from_pretrained( model_id: Union[str, Path], config: "PretrainedConfig", use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, force_download: bool = False, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -794,6 +795,16 @@ def _from_pretrained( model_save_dir: Optional[Union[str, Path, TemporaryDirectory]] = None, **kwargs, ): + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + model_path = Path(model_id) # We do not implement the logic for use_cache=False, use_merged=True @@ -815,7 +826,7 @@ def _from_pretrained( [DECODER_MERGED_ONNX_FILE_PATTERN], argument_name=None, subfolder=subfolder, - use_auth_token=use_auth_token, + token=token, revision=revision, ) use_merged = True @@ -838,7 +849,7 @@ def _from_pretrained( [DECODER_ONNX_FILE_PATTERN], "decoder_file_name", subfolder=subfolder, - use_auth_token=use_auth_token, + token=token, revision=revision, ) else: @@ -866,7 +877,7 @@ def _from_pretrained( [DECODER_WITH_PAST_ONNX_FILE_PATTERN], "decoder_with_past_file_name", subfolder=subfolder, - use_auth_token=use_auth_token, + token=token, revision=revision, ) except FileNotFoundError as e: @@ -896,7 +907,7 @@ def _from_pretrained( [ENCODER_ONNX_FILE_PATTERN], "encoder_file_name", subfolder=subfolder, - use_auth_token=use_auth_token, + token=token, revision=revision, ) else: @@ -932,7 +943,7 @@ def _from_pretrained( repo_id=model_id, subfolder=subfolder, filename=filename, - use_auth_token=use_auth_token, + token=token, revision=revision, cache_dir=cache_dir, force_download=force_download, @@ -944,7 +955,7 @@ def _from_pretrained( repo_id=model_id, subfolder=subfolder, filename=filename + "_data", - use_auth_token=use_auth_token, + token=token, revision=revision, cache_dir=cache_dir, force_download=force_download, @@ -989,7 +1000,7 @@ def _from_pretrained( cache_dir=cache_dir, force_download=force_download, local_files_only=local_files_only, - use_auth_token=use_auth_token, + token=token, revision=revision, subfolder=subfolder, ) @@ -1022,6 +1033,7 @@ def _from_transformers( model_id: str, config: "PretrainedConfig", use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: str = "main", force_download: bool = True, cache_dir: str = HUGGINGFACE_HUB_CACHE, @@ -1036,6 +1048,16 @@ def _from_transformers( use_io_binding: Optional[bool] = None, task: Optional[str] = None, ) -> "ORTModelForConditionalGeneration": + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + if use_cache is False and use_merged is True: raise ValueError( "The incompatible arguments use_cache=False, use_merged=True were passed to" @@ -1062,7 +1084,7 @@ def _from_transformers( subfolder=subfolder, revision=revision, cache_dir=cache_dir, - use_auth_token=use_auth_token, + token=token, local_files_only=local_files_only, force_download=force_download, trust_remote_code=trust_remote_code, diff --git a/optimum/onnxruntime/quantization.py b/optimum/onnxruntime/quantization.py index d56e301c3cf..a12321a132c 100644 --- a/optimum/onnxruntime/quantization.py +++ b/optimum/onnxruntime/quantization.py @@ -15,6 +15,7 @@ import logging import os +import warnings from collections import defaultdict from pathlib import Path from typing import TYPE_CHECKING, Callable, Dict, List, Optional, Tuple, Union @@ -441,7 +442,8 @@ def get_calibration_dataset( preprocess_function: Optional[Callable] = None, preprocess_batch: bool = True, seed: int = 2016, - use_auth_token: bool = False, + use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, ) -> Dataset: """ Creates the calibration `datasets.Dataset` to use for the post-training static quantization calibration step. @@ -462,13 +464,26 @@ def get_calibration_dataset( Whether the `preprocess_function` should be batched. seed (`int`, defaults to 2016): The random seed to use when shuffling the calibration dataset. - use_auth_token (`bool`, defaults to `False`): - Whether to use the token generated when running `transformers-cli login` (necessary for some datasets - like ImageNet). + use_auth_token (`Optional[Union[bool,str]]`, defaults to `None`): + Deprecated. Please use the `token` argument instead. + token (`Optional[Union[bool,str]]`, defaults to `None`): + The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated + when running `huggingface-cli login` (stored in `huggingface_hub.constants.HF_TOKEN_PATH`). + Returns: The calibration `datasets.Dataset` to use for the post-training static quantization calibration step. """ + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + if dataset_name is None: raise ValueError( "ORTQuantizer: Static quantization calibration step requires a dataset_name if no calib_dataset is " @@ -479,7 +494,7 @@ def get_calibration_dataset( dataset_name, name=dataset_config_name, split=dataset_split, - use_auth_token=use_auth_token, + token=token, ) if num_samples is not None: diff --git a/optimum/pipelines/pipelines_base.py b/optimum/pipelines/pipelines_base.py index e2046882bd6..32abbe805f2 100644 --- a/optimum/pipelines/pipelines_base.py +++ b/optimum/pipelines/pipelines_base.py @@ -246,7 +246,7 @@ def load_ort_pipeline( pattern, glob_pattern="**/*.onnx", subfolder=subfolder, - use_auth_token=token, + token=token, revision=revision, ) export = len(onnx_files) == 0 diff --git a/optimum/utils/file_utils.py b/optimum/utils/file_utils.py index 3afa5cea81e..16190709f83 100644 --- a/optimum/utils/file_utils.py +++ b/optimum/utils/file_utils.py @@ -15,10 +15,17 @@ """Utility functions related to both local files and files on the Hugging Face Hub.""" import re +import warnings from pathlib import Path from typing import List, Optional, Union -from huggingface_hub import HfApi, HfFolder, get_hf_file_metadata, hf_hub_url +import huggingface_hub +from huggingface_hub import get_hf_file_metadata, hf_hub_url + +from ..utils import logging + + +logger = logging.get_logger(__name__) def validate_file_exists( @@ -44,6 +51,7 @@ def find_files_matching_pattern( glob_pattern: str = "**/*", subfolder: str = "", use_auth_token: Optional[Union[bool, str]] = None, + token: Optional[Union[bool, str]] = None, revision: Optional[str] = None, ) -> List[Path]: """ @@ -59,7 +67,12 @@ def find_files_matching_pattern( subfolder (`str`, defaults to `""`): In case the model files are located inside a subfolder of the model directory / repo on the Hugging Face Hub, you can specify the subfolder name here. - use_auth_token (`Optional[bool, str]`, *optional*): + use_auth_token (`Optional[Union[bool,str]]`, defaults to `None`): + Deprecated. Please use the `token` argument instead. + token (`Optional[Union[bool,str]]`, defaults to `None`): + The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated + when running `huggingface-cli login` (stored in `huggingface_hub.constants.HF_TOKEN_PATH`). + token (`Optional[Union[bool, str]]`, *optional*): The token to use as HTTP bearer authorization for remote files. If `True`, will use the token generated when running `transformers-cli login` (stored in `~/.huggingface`). revision (`Optional[str]`, defaults to `None`): @@ -68,6 +81,16 @@ def find_files_matching_pattern( Returns: `List[Path]` """ + + if use_auth_token is not None: + warnings.warn( + "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", + FutureWarning, + ) + if token is not None: + raise ValueError("You cannot use both `use_auth_token` and `token` arguments at the same time.") + token = use_auth_token + model_path = Path(model_name_or_path) if isinstance(model_name_or_path, str) else model_name_or_path pattern = re.compile(f"{subfolder}/{pattern}" if subfolder != "" else pattern) if model_path.is_dir(): @@ -76,11 +99,7 @@ def find_files_matching_pattern( files = [p for p in files if re.search(pattern, str(p))] else: path = model_name_or_path - if isinstance(use_auth_token, bool): - token = HfFolder().get_token() - else: - token = use_auth_token - repo_files = map(Path, HfApi().list_repo_files(model_name_or_path, revision=revision, token=token)) + repo_files = map(Path, huggingface_hub.list_repo_files(model_name_or_path, revision=revision, token=token)) if subfolder != "": path = f"{path}/{subfolder}" files = [Path(p) for p in repo_files if re.match(pattern, str(p))] diff --git a/optimum/utils/testing_utils.py b/optimum/utils/testing_utils.py index f1c2f668e3c..972a8cd88d0 100644 --- a/optimum/utils/testing_utils.py +++ b/optimum/utils/testing_utils.py @@ -90,8 +90,9 @@ def require_hf_token(test_case): """ Decorator marking a test that requires huggingface hub token. """ - use_auth_token = os.environ.get("HF_AUTH_TOKEN", None) - if use_auth_token is None: + # is HF_AUTH_TOKEN used instead of HF_TOKEN to avoid huggigface_hub picking it up ? + hf_token = os.environ.get("HF_AUTH_TOKEN", None) + if hf_token is None: return unittest.skip("test requires hf token as `HF_AUTH_TOKEN` environment variable")(test_case) else: return test_case @@ -101,9 +102,9 @@ def require_sigopt_token_and_project(test_case): """ Decorator marking a test that requires sigopt API token. """ - use_auth_token = os.environ.get("SIGOPT_API_TOKEN", None) + sigopt_api_token = os.environ.get("SIGOPT_API_TOKEN", None) has_sigopt_project = os.environ.get("SIGOPT_PROJECT", None) - if use_auth_token is None or has_sigopt_project is None: + if sigopt_api_token is None or has_sigopt_project is None: return unittest.skip("test requires an environment variable `SIGOPT_API_TOKEN` and `SIGOPT_PROJECT`")( test_case ) diff --git a/tests/onnxruntime/test_modeling.py b/tests/onnxruntime/test_modeling.py index dd2bc858c41..15920111c96 100644 --- a/tests/onnxruntime/test_modeling.py +++ b/tests/onnxruntime/test_modeling.py @@ -942,7 +942,7 @@ def test_load_model_from_hub_private(self): # Read token of fxmartyclone (dummy user). token = "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" - model = ORTModelForCustomTasks.from_pretrained("fxmartyclone/tiny-onnx-private-2", use_auth_token=token) + model = ORTModelForCustomTasks.from_pretrained("fxmartyclone/tiny-onnx-private-2", token=token) self.assertIsInstance(model.model, onnxruntime.InferenceSession) self.assertIsInstance(model.config, PretrainedConfig) @@ -1113,7 +1113,7 @@ def test_save_model_from_hub(self): model = ORTModel.from_pretrained(self.LOCAL_MODEL_PATH) model.save_pretrained( tmpdirname, - use_auth_token=os.environ.get("HF_AUTH_TOKEN", None), + token=os.environ.get("HF_AUTH_TOKEN", None), push_to_hub=True, repository_id=self.HUB_REPOSITORY, private=True, @@ -1126,7 +1126,7 @@ def test_push_ort_model_with_external_data_to_hub(self): model = ORTModelForSequenceClassification.from_pretrained(MODEL_NAMES["bert"], export=True) model.save_pretrained( tmpdirname + "/onnx", - use_auth_token=os.environ.get("HF_AUTH_TOKEN", None), + token=os.environ.get("HF_AUTH_TOKEN", None), repository_id=MODEL_NAMES["bert"].split("/")[-1] + "-onnx", private=True, push_to_hub=True, @@ -1136,7 +1136,7 @@ def test_push_ort_model_with_external_data_to_hub(self): model = ORTModelForSequenceClassification.from_pretrained( MODEL_NAMES["bert"] + "-onnx", export=False, - use_auth_token=os.environ.get("HF_AUTH_TOKEN", None), + token=os.environ.get("HF_AUTH_TOKEN", None), ) os.environ.pop("FORCE_ONNX_EXTERNAL_DATA") @@ -1147,7 +1147,7 @@ def test_push_decoder_model_with_external_data_to_hub(self): model = ORTModelForCausalLM.from_pretrained(MODEL_NAMES["gpt2"], export=True) model.save_pretrained( tmpdirname + "/onnx", - use_auth_token=os.environ.get("HF_AUTH_TOKEN", None), + token=os.environ.get("HF_AUTH_TOKEN", None), repository_id=MODEL_NAMES["gpt2"].split("/")[-1] + "-onnx", private=True, push_to_hub=True, @@ -1157,7 +1157,7 @@ def test_push_decoder_model_with_external_data_to_hub(self): model = ORTModelForCausalLM.from_pretrained( MODEL_NAMES["gpt2"] + "-onnx", export=False, - use_auth_token=os.environ.get("HF_AUTH_TOKEN", None), + token=os.environ.get("HF_AUTH_TOKEN", None), ) os.environ.pop("FORCE_ONNX_EXTERNAL_DATA") @@ -1168,7 +1168,7 @@ def test_push_seq2seq_model_with_external_data_to_hub(self): model = ORTModelForSeq2SeqLM.from_pretrained(MODEL_NAMES["mbart"], export=True) model.save_pretrained( tmpdirname + "/onnx", - use_auth_token=os.environ.get("HF_AUTH_TOKEN", None), + token=os.environ.get("HF_AUTH_TOKEN", None), repository_id=MODEL_NAMES["mbart"].split("/")[-1] + "-onnx", private=True, push_to_hub=True, @@ -1178,7 +1178,7 @@ def test_push_seq2seq_model_with_external_data_to_hub(self): model = ORTModelForSeq2SeqLM.from_pretrained( MODEL_NAMES["mbart"] + "-onnx", export=False, - use_auth_token=os.environ.get("HF_AUTH_TOKEN", None), + token=os.environ.get("HF_AUTH_TOKEN", None), ) os.environ.pop("FORCE_ONNX_EXTERNAL_DATA") @@ -1189,7 +1189,7 @@ def test_push_stable_diffusion_model_with_external_data_to_hub(self): model = ORTStableDiffusionPipeline.from_pretrained(MODEL_NAMES["stable-diffusion"], export=True) model.save_pretrained( tmpdirname + "/onnx", - use_auth_token=os.environ.get("HF_AUTH_TOKEN", None), + token=os.environ.get("HF_AUTH_TOKEN", None), repository_id=MODEL_NAMES["stable-diffusion"].split("/")[-1] + "-onnx", private=True, push_to_hub=True, @@ -1199,7 +1199,7 @@ def test_push_stable_diffusion_model_with_external_data_to_hub(self): model = ORTStableDiffusionPipeline.from_pretrained( MODEL_NAMES["stable-diffusion"] + "-onnx", export=False, - use_auth_token=os.environ.get("HF_AUTH_TOKEN", None), + token=os.environ.get("HF_AUTH_TOKEN", None), ) os.environ.pop("FORCE_ONNX_EXTERNAL_DATA") diff --git a/tests/test_modeling_base.py b/tests/test_modeling_base.py index 4bee079fbb8..34e66927632 100644 --- a/tests/test_modeling_base.py +++ b/tests/test_modeling_base.py @@ -48,7 +48,7 @@ def test_push_to_hub(self): model.save_pretrained( tmpdirname, - use_auth_token=os.environ.get("HF_AUTH_TOKEN", None), + token=os.environ.get("HF_AUTH_TOKEN", None), push_to_hub=True, repository_id="unit_test_save_model", ) From e06b59e7b678127d8553f3ab507a255983de9272 Mon Sep 17 00:00:00 2001 From: IlyasMoutawwakil Date: Fri, 26 Apr 2024 10:40:38 +0200 Subject: [PATCH 2/5] fix --- optimum/modeling_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/optimum/modeling_base.py b/optimum/modeling_base.py index 92fbc750dc3..f18ff7596b9 100644 --- a/optimum/modeling_base.py +++ b/optimum/modeling_base.py @@ -225,7 +225,7 @@ def _load_config( config_name_or_path: Union[str, os.PathLike], revision: Optional[str] = None, cache_dir: str = HUGGINGFACE_HUB_CACHE, - use_auth_token: Optional[Union[bool, str]] = False, + use_auth_token: Optional[Union[bool, str]] = None, token: Optional[Union[bool, str]] = None, force_download: bool = False, subfolder: str = "", From 5b66cae632af51ea611a2e30e6a7f553ba90a5d1 Mon Sep 17 00:00:00 2001 From: IlyasMoutawwakil Date: Fri, 26 Apr 2024 10:43:11 +0200 Subject: [PATCH 3/5] style --- optimum/modeling_base.py | 1 - optimum/onnxruntime/modeling_decoder.py | 2 -- optimum/onnxruntime/modeling_diffusion.py | 2 -- optimum/onnxruntime/modeling_ort.py | 5 ----- optimum/onnxruntime/modeling_seq2seq.py | 2 -- 5 files changed, 12 deletions(-) diff --git a/optimum/modeling_base.py b/optimum/modeling_base.py index f18ff7596b9..f4bdaefae37 100644 --- a/optimum/modeling_base.py +++ b/optimum/modeling_base.py @@ -231,7 +231,6 @@ def _load_config( subfolder: str = "", trust_remote_code: bool = False, ) -> PretrainedConfig: - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", diff --git a/optimum/onnxruntime/modeling_decoder.py b/optimum/onnxruntime/modeling_decoder.py index bde882234f4..6cecca3184c 100644 --- a/optimum/onnxruntime/modeling_decoder.py +++ b/optimum/onnxruntime/modeling_decoder.py @@ -423,7 +423,6 @@ def _from_pretrained( model_save_dir: Optional[Union[str, Path, TemporaryDirectory]] = None, **kwargs, ) -> "ORTModelForCausalLM": - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", @@ -603,7 +602,6 @@ def _from_transformers( use_io_binding: Optional[bool] = None, task: Optional[str] = None, ) -> "ORTModelForCausalLM": - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", diff --git a/optimum/onnxruntime/modeling_diffusion.py b/optimum/onnxruntime/modeling_diffusion.py index c5f96f16849..f4e54752115 100644 --- a/optimum/onnxruntime/modeling_diffusion.py +++ b/optimum/onnxruntime/modeling_diffusion.py @@ -289,7 +289,6 @@ def _from_pretrained( model_save_dir: Optional[Union[str, Path, TemporaryDirectory]] = None, **kwargs, ): - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", @@ -401,7 +400,6 @@ def _from_transformers( use_io_binding: Optional[bool] = None, task: Optional[str] = None, ) -> "ORTStableDiffusionPipeline": - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", diff --git a/optimum/onnxruntime/modeling_ort.py b/optimum/onnxruntime/modeling_ort.py index 125e770de57..b6e8cdc11de 100644 --- a/optimum/onnxruntime/modeling_ort.py +++ b/optimum/onnxruntime/modeling_ort.py @@ -415,7 +415,6 @@ def infer_onnx_filename( revision: Optional[str] = None, fail_if_not_found: bool = True, ) -> str: - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", @@ -474,7 +473,6 @@ def _from_pretrained( model_save_dir: Optional[Union[str, Path, TemporaryDirectory]] = None, **kwargs, ) -> "ORTModel": - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", @@ -613,7 +611,6 @@ def _export( use_io_binding: Optional[bool] = None, task: Optional[str] = None, ) -> "ORTModel": - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", @@ -915,7 +912,6 @@ def _cached_file( subfolder: str = "", local_files_only: bool = False, ): - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", @@ -1096,7 +1092,6 @@ def _export( use_io_binding: Optional[bool] = None, task: Optional[str] = None, ) -> "ORTModel": - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", diff --git a/optimum/onnxruntime/modeling_seq2seq.py b/optimum/onnxruntime/modeling_seq2seq.py index e5a26f8346c..89a0ae44d58 100644 --- a/optimum/onnxruntime/modeling_seq2seq.py +++ b/optimum/onnxruntime/modeling_seq2seq.py @@ -795,7 +795,6 @@ def _from_pretrained( model_save_dir: Optional[Union[str, Path, TemporaryDirectory]] = None, **kwargs, ): - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", @@ -1048,7 +1047,6 @@ def _from_transformers( use_io_binding: Optional[bool] = None, task: Optional[str] = None, ) -> "ORTModelForConditionalGeneration": - if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed soon. Please use the `token` argument instead.", From aea1601de252b735c294c87afa3cfa7a38d06ebc Mon Sep 17 00:00:00 2001 From: IlyasMoutawwakil Date: Tue, 25 Jun 2024 13:25:34 +0200 Subject: [PATCH 4/5] import offline error --- optimum/exporters/tasks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/optimum/exporters/tasks.py b/optimum/exporters/tasks.py index 7cbe374eb06..18426981863 100644 --- a/optimum/exporters/tasks.py +++ b/optimum/exporters/tasks.py @@ -25,7 +25,7 @@ import huggingface_hub from huggingface_hub.constants import HUGGINGFACE_HUB_CACHE -from huggingface_hub.utils import OfflineModeIsEnabled +from huggingface_hub.errors import OfflineModeIsEnabled from packaging import version from requests.exceptions import ConnectionError as RequestsConnectionError from transformers import AutoConfig, PretrainedConfig, is_tf_available, is_torch_available From 25702ff3427c5886a972e48ac276ce44ca83aba7 Mon Sep 17 00:00:00 2001 From: IlyasMoutawwakil Date: Tue, 25 Jun 2024 14:17:45 +0200 Subject: [PATCH 5/5] style --- tests/onnxruntime/test_modeling.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/tests/onnxruntime/test_modeling.py b/tests/onnxruntime/test_modeling.py index f09a190531f..e15a7e25d09 100644 --- a/tests/onnxruntime/test_modeling.py +++ b/tests/onnxruntime/test_modeling.py @@ -939,9 +939,7 @@ def test_load_model_from_hub_private(self): if token is None: self.skipTest("Test requires a token for fxmartyclone in the environment variable `HF_HUB_READ_TOKEN`.") - model = ORTModelForCustomTasks.from_pretrained( - "optimum-internal-testing/tiny-random-phi-private", token=token - ) + model = ORTModelForCustomTasks.from_pretrained("optimum-internal-testing/tiny-random-phi-private", token=token) self.assertIsInstance(model.model, onnxruntime.InferenceSession) self.assertIsInstance(model.config, PretrainedConfig)