From dd5cbe335a2f18612224cbebe15f393f6f539b3b Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Tue, 9 Jan 2024 08:02:02 +0000 Subject: [PATCH 01/26] v1 tags --- src/transformers/modeling_utils.py | 42 +++++++++++++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 05d74d65425216..631e23a067c310 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -1163,6 +1163,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix _no_split_modules = None _skip_keys_device_placement = None _keep_in_fp32_modules = None + _model_tags = None # a list of `re` patterns of `state_dict` keys that should be removed from the list of missing # keys we find (keys inside the model but not in the checkpoint) and avoid unnecessary warnings. @@ -1225,6 +1226,9 @@ def __init__(self, config: PretrainedConfig, *inputs, **kwargs): # when a different component (e.g. language_model) is used. self._keep_in_fp32_modules = copy.copy(self.__class__._keep_in_fp32_modules) + # Default the model tags with `"transformers"` + self._model_tags = ["transformers"] + def post_init(self): """ A method executed at the end of each Transformer model initialization, to execute code that needs the model's @@ -1239,6 +1243,24 @@ def _backward_compatibility_gradient_checkpointing(self): # Remove the attribute now that is has been consumed, so it's no saved in the config. delattr(self.config, "gradient_checkpointing") + def set_model_tags(self, tags: Union[List[str], str]) -> None: + r""" + Manually set the model tags with `tags` + + Args: + tags (`Union[List[str], str]`): + The desired tags to inject in the model + """ + if isinstance(tags, str): + tags = [tags] + + if self._model_tags is None: + self._model_tags = [] + + for tag in tags: + if tag not in self._model_tags: + self._model_tags.append(tag) + @classmethod def _from_config(cls, config, **kwargs): """ @@ -2401,12 +2423,30 @@ def save_pretrained( # Save the model for shard_file, shard in shards.items(): if safe_serialization: + # Retrieve model tags and convert it to a dict of strings + model_tags = self._model_tags + metadata = {"format": "pt"} + + if model_tags is not None: + model_tags = {i: model_tag for i, model_tag in enumerate(model_tags)} + # Convert as strings + metadata["model_tags"] = json.dumps(model_tags, indent=2, sort_keys=True) + # At some point we will need to deal better with save_function (used for TPU and other distributed # joyfulness), but for now this enough. - safe_save_file(shard, os.path.join(save_directory, shard_file), metadata={"format": "pt"}) + safe_save_file( + shard, + os.path.join(save_directory, shard_file), + metadata=metadata, + ) else: save_function(shard, os.path.join(save_directory, shard_file)) + if self._model_tags is not None: + logger.warning( + "Detected tags in the model but you are not using safe_serialization, they will be silently ignored. To properly save these tags you should use safe serialization." + ) + if index is None: weights_file_name = SAFE_WEIGHTS_NAME if safe_serialization else WEIGHTS_NAME path_to_weights = os.path.join(save_directory, _add_variant(weights_file_name, variant)) From f78ed3186bbe0aec976585cf5d173adb4f1f0224 Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Tue, 9 Jan 2024 08:13:07 +0000 Subject: [PATCH 02/26] remove unneeded conversion --- src/transformers/modeling_utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 631e23a067c310..9df9b19db55116 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -2428,7 +2428,6 @@ def save_pretrained( metadata = {"format": "pt"} if model_tags is not None: - model_tags = {i: model_tag for i, model_tag in enumerate(model_tags)} # Convert as strings metadata["model_tags"] = json.dumps(model_tags, indent=2, sort_keys=True) From da002749bfb0f30467c50c6cbd3ae01b2c758421 Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Tue, 9 Jan 2024 09:35:20 +0000 Subject: [PATCH 03/26] v2 --- src/transformers/modeling_utils.py | 29 +++++++++++++++++--------- src/transformers/utils/hub.py | 33 ++++++++++++++++++++++++++++++ 2 files changed, 52 insertions(+), 10 deletions(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 9df9b19db55116..b03ea44b880f1d 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -87,7 +87,7 @@ replace_return_docstrings, strtobool, ) -from .utils.hub import convert_file_size_to_int, get_checkpoint_shard_files +from .utils.hub import convert_file_size_to_int, create_and_tag_model_card, get_checkpoint_shard_files from .utils.import_utils import ( ENV_VARS_TRUE_VALUES, is_sagemaker_mp_enabled, @@ -2423,20 +2423,12 @@ def save_pretrained( # Save the model for shard_file, shard in shards.items(): if safe_serialization: - # Retrieve model tags and convert it to a dict of strings - model_tags = self._model_tags - metadata = {"format": "pt"} - - if model_tags is not None: - # Convert as strings - metadata["model_tags"] = json.dumps(model_tags, indent=2, sort_keys=True) - # At some point we will need to deal better with save_function (used for TPU and other distributed # joyfulness), but for now this enough. safe_save_file( shard, os.path.join(save_directory, shard_file), - metadata=metadata, + metadata={"format": "pt"}, ) else: save_function(shard, os.path.join(save_directory, shard_file)) @@ -2464,6 +2456,13 @@ def save_pretrained( ) if push_to_hub: + # Eventually create an empty model card + model_card = create_and_tag_model_card(repo_id, self._model_tags) + + # Update model card if needed: + if model_card is not None: + model_card.save(os.path.join(save_directory, "README.md")) + self._upload_modified_files( save_directory, repo_id, @@ -2472,6 +2471,16 @@ def save_pretrained( token=token, ) + @wraps(PushToHubMixin.push_to_hub) + def push_to_hub(self, *args, **kwargs): + if "tags" not in kwargs: + kwargs["tags"] = self._model_tags + elif "tags" in kwargs and self._model_tags is not None: + logger.warning( + "You manually passed `tags` to `push_to_hub` method and the model has already some tags set, we will use the tags that you passed." + ) + return super().push_to_hub(*args, **kwargs) + def get_memory_footprint(self, return_buffers=True): r""" Get the memory footprint of a model. This will return the memory footprint of the current model in bytes. diff --git a/src/transformers/utils/hub.py b/src/transformers/utils/hub.py index 83ef69b5f37213..cec0586b436dbb 100644 --- a/src/transformers/utils/hub.py +++ b/src/transformers/utils/hub.py @@ -33,6 +33,8 @@ from huggingface_hub import ( _CACHED_NO_EXIST, CommitOperationAdd, + ModelCard, + ModelCardData, constants, create_branch, create_commit, @@ -762,6 +764,7 @@ def push_to_hub( safe_serialization: bool = True, revision: str = None, commit_description: str = None, + tags: List[str] = None, **deprecated_kwargs, ) -> str: """ @@ -795,6 +798,8 @@ def push_to_hub( Branch to push the uploaded files to. commit_description (`str`, *optional*): The description of the commit that will be created + tags (`List[str]`, *optional*): + List of tags to push on the Hub. Examples: @@ -855,6 +860,9 @@ def push_to_hub( repo_id, private=private, token=token, repo_url=repo_url, organization=organization ) + # Create a new empty model card and eventually tag it + model_card = create_and_tag_model_card(repo_id, tags) + if use_temp_dir is None: use_temp_dir = not os.path.isdir(working_dir) @@ -864,6 +872,10 @@ def push_to_hub( # Save all files. self.save_pretrained(work_dir, max_shard_size=max_shard_size, safe_serialization=safe_serialization) + # Update model card if needed: + if model_card is not None: + model_card.save(os.path.join(work_dir, "README.md")) + return self._upload_modified_files( work_dir, repo_id, @@ -1081,6 +1093,27 @@ def extract_info_from_url(url): return {"repo": cache_repo, "revision": revision, "filename": filename} +def create_and_tag_model_card(repo_id, tags=None): + """ + Creates a dummy model card and tags it. + """ + model_card = None + + try: + # Check if the model card is present on the remote repo + model_card = ModelCard.load(repo_id) + except EntryNotFoundError: + # Otherwise create a simple model card from template + card_data = ModelCardData(language="en", tags=[]) + model_card = ModelCard.from_template(card_data) + + if model_card is not None and tags is not None: + for model_tag in tags: + model_card.data.tags.append(model_tag) + + return model_card + + def clean_files_for(file): """ Remove, if they exist, file, file.json and file.lock From 1180585a875ef32eee2b285cfdac1d03518bde27 Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Tue, 9 Jan 2024 09:45:08 +0000 Subject: [PATCH 04/26] rm unneeded warning --- src/transformers/modeling_utils.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index b03ea44b880f1d..859d95de7476fd 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -2433,11 +2433,6 @@ def save_pretrained( else: save_function(shard, os.path.join(save_directory, shard_file)) - if self._model_tags is not None: - logger.warning( - "Detected tags in the model but you are not using safe_serialization, they will be silently ignored. To properly save these tags you should use safe serialization." - ) - if index is None: weights_file_name = SAFE_WEIGHTS_NAME if safe_serialization else WEIGHTS_NAME path_to_weights = os.path.join(save_directory, _add_variant(weights_file_name, variant)) From 4b822559d1675f5bd23cd7c57f9e4af9fc200832 Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Tue, 9 Jan 2024 12:33:38 +0000 Subject: [PATCH 05/26] add more utility methods --- src/transformers/modeling_utils.py | 51 ++++++++++++++++++++++++++---- 1 file changed, 45 insertions(+), 6 deletions(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 859d95de7476fd..488165047fdb9a 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -1159,11 +1159,12 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix config_class = None base_model_prefix = "" main_input_name = "input_ids" + model_tags = None + _auto_class = None _no_split_modules = None _skip_keys_device_placement = None _keep_in_fp32_modules = None - _model_tags = None # a list of `re` patterns of `state_dict` keys that should be removed from the list of missing # keys we find (keys inside the model but not in the checkpoint) and avoid unnecessary warnings. @@ -1243,9 +1244,27 @@ def _backward_compatibility_gradient_checkpointing(self): # Remove the attribute now that is has been consumed, so it's no saved in the config. delattr(self.config, "gradient_checkpointing") + def add_model_tags(self, tags: Union[List[str], str]) -> None: + r""" + Add all tags in `tags` to `model_tags`. + + Args: + tags (`Union[List[str], str]`): + The desired tags to inject in the model + """ + if isinstance(tags, str): + tags = [tags] + + if self.model_tags is None: + self.model_tags = [] + + for tag in tags: + if tag not in self.model_tags: + self.model_tags.append(tag) + def set_model_tags(self, tags: Union[List[str], str]) -> None: r""" - Manually set the model tags with `tags` + Manually force-set the model tags with `tags` Args: tags (`Union[List[str], str]`): @@ -1254,12 +1273,32 @@ def set_model_tags(self, tags: Union[List[str], str]) -> None: if isinstance(tags, str): tags = [tags] - if self._model_tags is None: - self._model_tags = [] + self.model_tags = tags + + def reset_model_tags(self) -> None: + r""" + Manually reset the model tags with an empty list + """ + if self.model_tags is not None: + self.model_tags = [] + + def remove_model_tags(self, tags: Union[List[str], str]) -> None: + r""" + Manually remove all elements of `tags` in the model tags + + Args: + tags (`Union[List[str], str]`): + The desired tags to remove from the model + """ + if isinstance(tags, str): + tags = [tags] + + if self.model_tags is None: + return for tag in tags: - if tag not in self._model_tags: - self._model_tags.append(tag) + if tag in self.model_tags: + self.model_tags.remove(tag) @classmethod def _from_config(cls, config, **kwargs): From 4c7806eb52b706dd3fae9ba01cd1004e5e267ffb Mon Sep 17 00:00:00 2001 From: Younes Belkada <49240599+younesbelkada@users.noreply.github.com> Date: Tue, 9 Jan 2024 13:34:16 +0100 Subject: [PATCH 06/26] Update src/transformers/utils/hub.py Co-authored-by: amyeroberts <22614925+amyeroberts@users.noreply.github.com> --- src/transformers/utils/hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/utils/hub.py b/src/transformers/utils/hub.py index cec0586b436dbb..96de6cf151f846 100644 --- a/src/transformers/utils/hub.py +++ b/src/transformers/utils/hub.py @@ -1093,7 +1093,7 @@ def extract_info_from_url(url): return {"repo": cache_repo, "revision": revision, "filename": filename} -def create_and_tag_model_card(repo_id, tags=None): +def create_and_tag_model_card(repo_id: str, tags: Optional[List[str]] = None): """ Creates a dummy model card and tags it. """ From 8b897964356fdb985859c407d9a0f027d6c78805 Mon Sep 17 00:00:00 2001 From: Younes Belkada <49240599+younesbelkada@users.noreply.github.com> Date: Tue, 9 Jan 2024 13:37:52 +0100 Subject: [PATCH 07/26] Update src/transformers/utils/hub.py Co-authored-by: Lucain --- src/transformers/utils/hub.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/transformers/utils/hub.py b/src/transformers/utils/hub.py index 96de6cf151f846..030d209b8ce077 100644 --- a/src/transformers/utils/hub.py +++ b/src/transformers/utils/hub.py @@ -1109,7 +1109,8 @@ def create_and_tag_model_card(repo_id: str, tags: Optional[List[str]] = None): if model_card is not None and tags is not None: for model_tag in tags: - model_card.data.tags.append(model_tag) + if model_tag not in model_card.data.tags: + model_card.data.tags.append(model_tag) return model_card From e73dc7b5b7b37a60ae96f1c06ff933751ef69dd6 Mon Sep 17 00:00:00 2001 From: Younes Belkada <49240599+younesbelkada@users.noreply.github.com> Date: Tue, 9 Jan 2024 13:38:26 +0100 Subject: [PATCH 08/26] Update src/transformers/utils/hub.py Co-authored-by: Lucain --- src/transformers/utils/hub.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/transformers/utils/hub.py b/src/transformers/utils/hub.py index 030d209b8ce077..9254233fdc339c 100644 --- a/src/transformers/utils/hub.py +++ b/src/transformers/utils/hub.py @@ -1097,8 +1097,6 @@ def create_and_tag_model_card(repo_id: str, tags: Optional[List[str]] = None): """ Creates a dummy model card and tags it. """ - model_card = None - try: # Check if the model card is present on the remote repo model_card = ModelCard.load(repo_id) From fbef2dec0d82970abeeb33476fee9f8296f9a11d Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Tue, 9 Jan 2024 13:38:32 +0000 Subject: [PATCH 09/26] more enhancements --- src/transformers/modeling_utils.py | 9 +++------ src/transformers/trainer.py | 12 ++++++++++++ src/transformers/utils/hub.py | 5 +++-- 3 files changed, 18 insertions(+), 8 deletions(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 488165047fdb9a..dda1086e13405f 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -1227,9 +1227,6 @@ def __init__(self, config: PretrainedConfig, *inputs, **kwargs): # when a different component (e.g. language_model) is used. self._keep_in_fp32_modules = copy.copy(self.__class__._keep_in_fp32_modules) - # Default the model tags with `"transformers"` - self._model_tags = ["transformers"] - def post_init(self): """ A method executed at the end of each Transformer model initialization, to execute code that needs the model's @@ -2491,7 +2488,7 @@ def save_pretrained( if push_to_hub: # Eventually create an empty model card - model_card = create_and_tag_model_card(repo_id, self._model_tags) + model_card = create_and_tag_model_card(repo_id, self.model_tags) # Update model card if needed: if model_card is not None: @@ -2508,8 +2505,8 @@ def save_pretrained( @wraps(PushToHubMixin.push_to_hub) def push_to_hub(self, *args, **kwargs): if "tags" not in kwargs: - kwargs["tags"] = self._model_tags - elif "tags" in kwargs and self._model_tags is not None: + kwargs["tags"] = self.model_tags + elif "tags" in kwargs and self.model_tags is not None: logger.warning( "You manually passed `tags` to `push_to_hub` method and the model has already some tags set, we will use the tags that you passed." ) diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index 2bac51fdf04910..7dccff758c2234 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -3684,6 +3684,18 @@ def push_to_hub(self, commit_message: Optional[str] = "End of training", blockin if not self.is_world_process_zero(): return + # Add additional tags in the case the model has already some tags and users pass + # "tags" argument to `push_to_hub` so that trainer automatically handles internal tags + # from all models since Trainer does not call `model.push_to_hub`. + if "tags" in kwargs and getattr(self.model, "model_tags", None) is not None: + # If it is a string, convert it to an array + if isinstance(kwargs["tags"], str): + kwargs["tags"] = [kwargs["tags"]] + + for model_tag in self.model.model_tags: + if model_tag not in kwargs["tags"]: + kwargs["tags"].append(model_tag) + self.create_model_card(model_name=model_name, **kwargs) # Wait for the current upload to be finished. diff --git a/src/transformers/utils/hub.py b/src/transformers/utils/hub.py index 9254233fdc339c..305577524830c6 100644 --- a/src/transformers/utils/hub.py +++ b/src/transformers/utils/hub.py @@ -1102,8 +1102,9 @@ def create_and_tag_model_card(repo_id: str, tags: Optional[List[str]] = None): model_card = ModelCard.load(repo_id) except EntryNotFoundError: # Otherwise create a simple model card from template - card_data = ModelCardData(language="en", tags=[]) - model_card = ModelCard.from_template(card_data) + model_description = "This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated." + card_data = ModelCardData(tags=[] if tags is None else tags, library_name="transformers") + model_card = ModelCard.from_template(card_data, model_description=model_description) if model_card is not None and tags is not None: for model_tag in tags: From 0e4daada3a7282edbede8d86ce3552717aa9b9e7 Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Tue, 9 Jan 2024 13:40:19 +0000 Subject: [PATCH 10/26] oops --- src/transformers/utils/hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/utils/hub.py b/src/transformers/utils/hub.py index 305577524830c6..0f07116d2f3d17 100644 --- a/src/transformers/utils/hub.py +++ b/src/transformers/utils/hub.py @@ -1106,7 +1106,7 @@ def create_and_tag_model_card(repo_id: str, tags: Optional[List[str]] = None): card_data = ModelCardData(tags=[] if tags is None else tags, library_name="transformers") model_card = ModelCard.from_template(card_data, model_description=model_description) - if model_card is not None and tags is not None: + if tags is not None: for model_tag in tags: if model_tag not in model_card.data.tags: model_card.data.tags.append(model_tag) From c19e7518e17804ff444ebee5cb2e4301e0d0586d Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Tue, 9 Jan 2024 13:42:33 +0000 Subject: [PATCH 11/26] merge tags --- src/transformers/modeling_utils.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index dda1086e13405f..90c57bfa2dc106 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -2507,9 +2507,10 @@ def push_to_hub(self, *args, **kwargs): if "tags" not in kwargs: kwargs["tags"] = self.model_tags elif "tags" in kwargs and self.model_tags is not None: - logger.warning( - "You manually passed `tags` to `push_to_hub` method and the model has already some tags set, we will use the tags that you passed." - ) + for model_tag in self.model_tags: + # merge the tags together + if model_tag not in kwargs["tags"]: + kwargs["tags"].append(model_tag) return super().push_to_hub(*args, **kwargs) def get_memory_footprint(self, return_buffers=True): From eb933713bd5ba76758b8ad133324d3c3769c1b62 Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Tue, 9 Jan 2024 13:46:20 +0000 Subject: [PATCH 12/26] clean up --- src/transformers/modeling_utils.py | 3 +-- src/transformers/utils/hub.py | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 90c57bfa2dc106..9eff541c22e0f9 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -2491,8 +2491,7 @@ def save_pretrained( model_card = create_and_tag_model_card(repo_id, self.model_tags) # Update model card if needed: - if model_card is not None: - model_card.save(os.path.join(save_directory, "README.md")) + model_card.save(os.path.join(save_directory, "README.md")) self._upload_modified_files( save_directory, diff --git a/src/transformers/utils/hub.py b/src/transformers/utils/hub.py index 0f07116d2f3d17..f18471026bfed8 100644 --- a/src/transformers/utils/hub.py +++ b/src/transformers/utils/hub.py @@ -873,8 +873,7 @@ def push_to_hub( self.save_pretrained(work_dir, max_shard_size=max_shard_size, safe_serialization=safe_serialization) # Update model card if needed: - if model_card is not None: - model_card.save(os.path.join(work_dir, "README.md")) + model_card.save(os.path.join(work_dir, "README.md")) return self._upload_modified_files( work_dir, From 1fe93b36339121921efd4cdc45e529d01ded8e7a Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Tue, 9 Jan 2024 13:49:16 +0000 Subject: [PATCH 13/26] revert unneeded change --- src/transformers/modeling_utils.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 9eff541c22e0f9..ac4cea2da8d70d 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -2461,11 +2461,7 @@ def save_pretrained( if safe_serialization: # At some point we will need to deal better with save_function (used for TPU and other distributed # joyfulness), but for now this enough. - safe_save_file( - shard, - os.path.join(save_directory, shard_file), - metadata={"format": "pt"}, - ) + safe_save_file(shard, os.path.join(save_directory, shard_file), metadata={"format": "pt"}) else: save_function(shard, os.path.join(save_directory, shard_file)) From 6cfd6f58aab36674ebaf3e76b661321b17a2009e Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Wed, 10 Jan 2024 13:21:30 +0000 Subject: [PATCH 14/26] add extensive docs --- src/transformers/modeling_utils.py | 49 ++++++++---------------------- 1 file changed, 12 insertions(+), 37 deletions(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index f9bba21ae7b95c..761a1ed212aed1 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -1257,59 +1257,34 @@ def _backward_compatibility_gradient_checkpointing(self): def add_model_tags(self, tags: Union[List[str], str]) -> None: r""" - Add all tags in `tags` to `model_tags`. + Add custom tags into the model that gets pushed in 🤗 Hub Args: tags (`Union[List[str], str]`): The desired tags to inject in the model - """ - if isinstance(tags, str): - tags = [tags] - - if self.model_tags is None: - self.model_tags = [] - - for tag in tags: - if tag not in self.model_tags: - self.model_tags.append(tag) - - def set_model_tags(self, tags: Union[List[str], str]) -> None: - r""" - Manually force-set the model tags with `tags` - Args: - tags (`Union[List[str], str]`): - The desired tags to inject in the model - """ - if isinstance(tags, str): - tags = [tags] + Examples: - self.model_tags = tags + ```python + from transformers import AutoModel - def reset_model_tags(self) -> None: - r""" - Manually reset the model tags with an empty list - """ - if self.model_tags is not None: - self.model_tags = [] + model = AutoModel.from_pretrained("bert-base-cased") - def remove_model_tags(self, tags: Union[List[str], str]) -> None: - r""" - Manually remove all elements of `tags` in the model tags + model.add_model_tags(["custom", "custom-bert"]) - Args: - tags (`Union[List[str], str]`): - The desired tags to remove from the model + # Push the model to your namespace with the name "my-custom-bert". + model.push_to_hub("my-custom-bert") + ``` """ if isinstance(tags, str): tags = [tags] if self.model_tags is None: - return + self.model_tags = [] for tag in tags: - if tag in self.model_tags: - self.model_tags.remove(tag) + if tag not in self.model_tags: + self.model_tags.append(tag) @classmethod def _from_config(cls, config, **kwargs): From 40a1d4b99b5c86f2f828b1cbdc93bae38ace4f3d Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Wed, 10 Jan 2024 13:22:14 +0000 Subject: [PATCH 15/26] more docs --- src/transformers/modeling_utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 761a1ed212aed1..73842131dcdbab 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -1257,7 +1257,8 @@ def _backward_compatibility_gradient_checkpointing(self): def add_model_tags(self, tags: Union[List[str], str]) -> None: r""" - Add custom tags into the model that gets pushed in 🤗 Hub + Add custom tags into the model that gets pushed in 🤗 Hub. Will + not overwrite existing tags in the model. Args: tags (`Union[List[str], str]`): From dc319411fb29a18e406bc980f8daf1a806370173 Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Wed, 10 Jan 2024 13:38:06 +0000 Subject: [PATCH 16/26] more kwargs --- src/transformers/modeling_utils.py | 5 ++++- src/transformers/utils/hub.py | 27 +++++++++++++++++++++++---- 2 files changed, 27 insertions(+), 5 deletions(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 73842131dcdbab..4d01035ce10f2b 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -2247,6 +2247,7 @@ def save_pretrained( Additional key word arguments passed along to the [`~utils.PushToHubMixin.push_to_hub`] method. """ use_auth_token = kwargs.pop("use_auth_token", None) + ignore_metadata_errors = kwargs.pop("ignore_metadata_errors", False) if use_auth_token is not None: warnings.warn( @@ -2474,7 +2475,9 @@ def save_pretrained( if push_to_hub: # Eventually create an empty model card - model_card = create_and_tag_model_card(repo_id, self.model_tags) + model_card = create_and_tag_model_card( + repo_id, self.model_tags, token=token, ignore_metadata_errors=ignore_metadata_errors + ) # Update model card if needed: model_card.save(os.path.join(save_directory, "README.md")) diff --git a/src/transformers/utils/hub.py b/src/transformers/utils/hub.py index f18471026bfed8..658c95d3160a08 100644 --- a/src/transformers/utils/hub.py +++ b/src/transformers/utils/hub.py @@ -816,6 +816,7 @@ def push_to_hub( ``` """ use_auth_token = deprecated_kwargs.pop("use_auth_token", None) + ignore_metadata_errors = deprecated_kwargs.pop("ignore_metadata_errors", False) if use_auth_token is not None: warnings.warn( "The `use_auth_token` argument is deprecated and will be removed in v5 of Transformers. Please use `token` instead.", @@ -861,7 +862,9 @@ def push_to_hub( ) # Create a new empty model card and eventually tag it - model_card = create_and_tag_model_card(repo_id, tags) + model_card = create_and_tag_model_card( + repo_id, tags, token=token, ignore_metadata_errors=ignore_metadata_errors + ) if use_temp_dir is None: use_temp_dir = not os.path.isdir(working_dir) @@ -1092,13 +1095,29 @@ def extract_info_from_url(url): return {"repo": cache_repo, "revision": revision, "filename": filename} -def create_and_tag_model_card(repo_id: str, tags: Optional[List[str]] = None): +def create_and_tag_model_card( + repo_id: str, + tags: Optional[List[str]] = None, + token: Optional[str] = None, + ignore_metadata_errors: bool = False, +): """ - Creates a dummy model card and tags it. + Creates or loads an existing model card and tags it. + + Args: + repo_id (`str`): + The repo_id where to look for the model card. + tags (`List[str]`, *optional*): + The list of tags to add in the model card + token (`str`, *optional*): + Authentication token, obtained with `huggingface_hub.HfApi.login` method. Will default to the stored token. + ignore_metadata_errors (`str`): + If True, errors while parsing the metadata section will be ignored. Some information might be lost during + the process. Use it at your own risk. """ try: # Check if the model card is present on the remote repo - model_card = ModelCard.load(repo_id) + model_card = ModelCard.load(repo_id, token=token, ignore_metadata_errors=ignore_metadata_errors) except EntryNotFoundError: # Otherwise create a simple model card from template model_description = "This is the model card of a 🤗 transformers model that has been pushed on the Hub. This model card has been automatically generated." From acd676b2d54bd00af68d4f397db8b12c97f06002 Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Wed, 10 Jan 2024 13:46:41 +0000 Subject: [PATCH 17/26] add test --- tests/test_modeling_utils.py | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/tests/test_modeling_utils.py b/tests/test_modeling_utils.py index 1f632882f02b38..3481d9d5bef90b 100755 --- a/tests/test_modeling_utils.py +++ b/tests/test_modeling_utils.py @@ -1515,6 +1515,28 @@ def test_push_to_hub_dynamic_model(self): new_model = AutoModel.from_config(config, trust_remote_code=True) self.assertEqual(new_model.__class__.__name__, "CustomModel") + def test_push_to_hub_with_tags(self): + from huggingface_hub import ModelCard + + new_tags = ["tag-1", "tag-2"] + + CustomConfig.register_for_auto_class() + CustomModel.register_for_auto_class() + + config = CustomConfig(hidden_size=32) + model = CustomModel(config) + + self.assertTrue(model.model_tags is None) + + model.add_model_tags(new_tags) + + self.assertTrue(model.model_tags == new_tags) + + model.push_to_hub("test-dynamic-model-with-tags", token=self._token) + + loaded_model_card = ModelCard.load("test-dynamic-model-with-tags") + self.assertEqual(loaded_model_card.data.tags, new_tags) + @require_torch class AttentionMaskTester(unittest.TestCase): From db3197daec6c758848530658d18dee282050c7cf Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Wed, 10 Jan 2024 13:47:26 +0000 Subject: [PATCH 18/26] oops --- tests/test_modeling_utils.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tests/test_modeling_utils.py b/tests/test_modeling_utils.py index 3481d9d5bef90b..55727c4ecf3eb2 100755 --- a/tests/test_modeling_utils.py +++ b/tests/test_modeling_utils.py @@ -1428,6 +1428,11 @@ def tearDownClass(cls): except HTTPError: pass + try: + delete_repo(token=cls._token, repo_id="test-dynamic-model-with-tags") + except HTTPError: + pass + @unittest.skip("This test is flaky") def test_push_to_hub(self): config = BertConfig( From f14cf93334ed205dc7467044ba60d05ebb8c49a1 Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Wed, 10 Jan 2024 13:52:35 +0000 Subject: [PATCH 19/26] fix test --- tests/test_modeling_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_modeling_utils.py b/tests/test_modeling_utils.py index 55727c4ecf3eb2..7ff8b81cd7ebf8 100755 --- a/tests/test_modeling_utils.py +++ b/tests/test_modeling_utils.py @@ -1539,7 +1539,7 @@ def test_push_to_hub_with_tags(self): model.push_to_hub("test-dynamic-model-with-tags", token=self._token) - loaded_model_card = ModelCard.load("test-dynamic-model-with-tags") + loaded_model_card = ModelCard.load(f"{USER}/test-dynamic-model-with-tags") self.assertEqual(loaded_model_card.data.tags, new_tags) From 31117f4ed48b96a6d3c305f236b7ae8815c0a728 Mon Sep 17 00:00:00 2001 From: Younes Belkada <49240599+younesbelkada@users.noreply.github.com> Date: Wed, 10 Jan 2024 15:15:45 +0100 Subject: [PATCH 20/26] Update src/transformers/modeling_utils.py Co-authored-by: Omar Sanseviero --- src/transformers/modeling_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 4d01035ce10f2b..2d9de0b560cfff 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -1257,7 +1257,7 @@ def _backward_compatibility_gradient_checkpointing(self): def add_model_tags(self, tags: Union[List[str], str]) -> None: r""" - Add custom tags into the model that gets pushed in 🤗 Hub. Will + Add custom tags into the model that gets pushed to the Hub. Will not overwrite existing tags in the model. Args: From 36f2cb7b427c362649175c6fb9939643d2c4f6c7 Mon Sep 17 00:00:00 2001 From: Younes Belkada <49240599+younesbelkada@users.noreply.github.com> Date: Wed, 10 Jan 2024 15:24:10 +0100 Subject: [PATCH 21/26] Update src/transformers/utils/hub.py Co-authored-by: Lucain --- src/transformers/utils/hub.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/utils/hub.py b/src/transformers/utils/hub.py index 658c95d3160a08..6b427ed4df0af0 100644 --- a/src/transformers/utils/hub.py +++ b/src/transformers/utils/hub.py @@ -764,7 +764,7 @@ def push_to_hub( safe_serialization: bool = True, revision: str = None, commit_description: str = None, - tags: List[str] = None, + tags: Optional[List[str]] = None, **deprecated_kwargs, ) -> str: """ From 514f13b2f6a045df96798e22341f5727a4ef9378 Mon Sep 17 00:00:00 2001 From: Younes Belkada <49240599+younesbelkada@users.noreply.github.com> Date: Wed, 10 Jan 2024 15:25:21 +0100 Subject: [PATCH 22/26] Update src/transformers/modeling_utils.py --- src/transformers/modeling_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 2d9de0b560cfff..8e51e224479be5 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -1257,7 +1257,7 @@ def _backward_compatibility_gradient_checkpointing(self): def add_model_tags(self, tags: Union[List[str], str]) -> None: r""" - Add custom tags into the model that gets pushed to the Hub. Will + Add custom tags into the model that gets pushed to the Hugging Face Hub. Will not overwrite existing tags in the model. Args: From b3d59007d1cfac9c29897bc9be5f33b18c527ed9 Mon Sep 17 00:00:00 2001 From: Younes Belkada <49240599+younesbelkada@users.noreply.github.com> Date: Mon, 15 Jan 2024 09:15:42 +0100 Subject: [PATCH 23/26] Update src/transformers/trainer.py Co-authored-by: amyeroberts <22614925+amyeroberts@users.noreply.github.com> --- src/transformers/trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index 0f4d66f549df20..cef6498d396da6 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -3697,7 +3697,7 @@ def push_to_hub(self, commit_message: Optional[str] = "End of training", blockin # "tags" argument to `push_to_hub` so that trainer automatically handles internal tags # from all models since Trainer does not call `model.push_to_hub`. if "tags" in kwargs and getattr(self.model, "model_tags", None) is not None: - # If it is a string, convert it to an array + # If it is a string, convert it to a list if isinstance(kwargs["tags"], str): kwargs["tags"] = [kwargs["tags"]] From 22d3412dd8dafa729c2d081e6d5041a1609a27ac Mon Sep 17 00:00:00 2001 From: Younes Belkada <49240599+younesbelkada@users.noreply.github.com> Date: Mon, 15 Jan 2024 11:29:23 +0100 Subject: [PATCH 24/26] Update src/transformers/modeling_utils.py Co-authored-by: amyeroberts <22614925+amyeroberts@users.noreply.github.com> --- src/transformers/modeling_utils.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index 8e51e224479be5..dd4107dbeb353d 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -2492,13 +2492,12 @@ def save_pretrained( @wraps(PushToHubMixin.push_to_hub) def push_to_hub(self, *args, **kwargs): - if "tags" not in kwargs: - kwargs["tags"] = self.model_tags - elif "tags" in kwargs and self.model_tags is not None: - for model_tag in self.model_tags: - # merge the tags together - if model_tag not in kwargs["tags"]: - kwargs["tags"].append(model_tag) + tags = self.model_tags if self.model_tags is not None else [] + for tag in kwargs.get("tags", []): + if tag not in tags: + tags.append(tag) + if tags: + kwargs["tags"] = tags return super().push_to_hub(*args, **kwargs) def get_memory_footprint(self, return_buffers=True): From 1e3fc1e306bbf953fe513246273fe2544f057288 Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Mon, 15 Jan 2024 10:44:38 +0000 Subject: [PATCH 25/26] add more conditions --- src/transformers/modeling_utils.py | 8 +++++++- src/transformers/trainer.py | 9 +++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/transformers/modeling_utils.py b/src/transformers/modeling_utils.py index d2bc7ef7513b51..76ff2db343843d 100644 --- a/src/transformers/modeling_utils.py +++ b/src/transformers/modeling_utils.py @@ -2492,9 +2492,15 @@ def save_pretrained( @wraps(PushToHubMixin.push_to_hub) def push_to_hub(self, *args, **kwargs): tags = self.model_tags if self.model_tags is not None else [] - for tag in kwargs.get("tags", []): + + tags_kwargs = kwargs.get("tags", []) + if isinstance(tags_kwargs, str): + tags_kwargs = [tags_kwargs] + + for tag in tags_kwargs: if tag not in tags: tags.append(tag) + if tags: kwargs["tags"] = tags return super().push_to_hub(*args, **kwargs) diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index 615f57581347f7..6ef557dbeca5fb 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -3581,6 +3581,15 @@ def create_model_card( library_name = ModelCard.load(model_card_filepath).data.get("library_name") is_peft_library = library_name == "peft" + # Append existing tags in `tags` + existing_tags = ModelCard.load(model_card_filepath).data.tags + if tags is not None: + if isinstance(tags, str): + tags = [tags] + for tag in existing_tags: + if tag not in tags: + tags.append(tag) + training_summary = TrainingSummary.from_trainer( self, language=language, From 59738c67c273e7a86b13f6648446fff21367151d Mon Sep 17 00:00:00 2001 From: younesbelkada Date: Mon, 15 Jan 2024 10:47:46 +0000 Subject: [PATCH 26/26] more logic --- src/transformers/trainer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transformers/trainer.py b/src/transformers/trainer.py index 6ef557dbeca5fb..6850f4dca067ea 100755 --- a/src/transformers/trainer.py +++ b/src/transformers/trainer.py @@ -3583,7 +3583,7 @@ def create_model_card( # Append existing tags in `tags` existing_tags = ModelCard.load(model_card_filepath).data.tags - if tags is not None: + if tags is not None and existing_tags is not None: if isinstance(tags, str): tags = [tags] for tag in existing_tags: