Skip to content

Commit

Permalink
ran tests
Browse files Browse the repository at this point in the history
  • Loading branch information
BernardZach committed Dec 12, 2024
1 parent cbfa985 commit 125197b
Show file tree
Hide file tree
Showing 10 changed files with 127 additions and 100 deletions.
2 changes: 1 addition & 1 deletion docs/source/en/index.md
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ Flax), PyTorch, and/or TensorFlow.
| [DialoGPT](model_doc/dialogpt) ||||
| [DiNAT](model_doc/dinat) ||||
| [DINOv2](model_doc/dinov2) ||||
| [Dinov2WithRegisters](model_doc/dinov2_with_registers) ||||
| [Dinov2WithRegisters](model_doc/dinov2_with_registers) ||||
| [DistilBERT](model_doc/distilbert) ||||
| [DiT](model_doc/dit) ||||
| [DonutSwin](model_doc/donut) ||||
Expand Down
1 change: 1 addition & 0 deletions docs/source/en/perf_infer_gpu_one.md
Original file line number Diff line number Diff line change
Expand Up @@ -231,6 +231,7 @@ For now, Transformers supports SDPA inference and training for the following arc
* [Dbrx](https://huggingface.co/docs/transformers/model_doc/dbrx#transformers.DbrxModel)
* [DeiT](https://huggingface.co/docs/transformers/model_doc/deit#transformers.DeiTModel)
* [Dinov2](https://huggingface.co/docs/transformers/en/model_doc/dinov2)
* [Dinov2_with_registers](https://huggingface.co/docs/transformers/en/model_doc/dinov2)
* [DistilBert](https://huggingface.co/docs/transformers/model_doc/distilbert#transformers.DistilBertModel)
* [Dpr](https://huggingface.co/docs/transformers/model_doc/dpr#transformers.DprReader)
* [EncoderDecoder](https://huggingface.co/docs/transformers/model_doc/encoder_decoder#transformers.EncoderDecoderModel)
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/commands/add_new_model_like.py
Original file line number Diff line number Diff line change
Expand Up @@ -766,7 +766,7 @@ def retrieve_info_for_model(model_type, frameworks: Optional[List[str]] = None):
image_processor_class = image_processor_classes[0] # we take the slow image processor class.
else:
image_processor_class = image_processor_classes

feature_extractor_class = auto_module.feature_extraction_auto.FEATURE_EXTRACTOR_MAPPING_NAMES.get(model_type, None)
processor_class = auto_module.processing_auto.PROCESSOR_MAPPING_NAMES.get(model_type, None)

Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/auto/modeling_auto.py
Original file line number Diff line number Diff line change
Expand Up @@ -653,14 +653,14 @@
),
("dinat", "DinatForImageClassification"),
("dinov2", "Dinov2ForImageClassification"),
("dinov2_with_registers", "Dinov2WithRegistersForImageClassification"),
(
"efficientformer",
(
"EfficientFormerForImageClassification",
"EfficientFormerForImageClassificationWithTeacher",
),
),
("dinov2_with_registers", "Dinov2WithRegistersForImageClassification"),
("efficientnet", "EfficientNetForImageClassification"),
("focalnet", "FocalNetForImageClassification"),
("hiera", "HieraForImageClassification"),
Expand Down
2 changes: 1 addition & 1 deletion src/transformers/models/dinov2_with_registers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,4 +54,4 @@
else:
import sys

sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
sys.modules[__name__] = _LazyModule(__name__, globals()["__file__"], _import_structure, module_spec=__spec__)
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# 🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨
# This file was automatically generated from <path_to_diff_file.py>.
# Do NOT edit this file manually as any edits will be overwritten by the generation of
# the file from the diff. If any change should be done, please apply the change to the
# diff.py file directly.
# 🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨
# 🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨
# This file was automatically generated from src/transformers/models/dinov2_with_registers/modular_dinov2_with_registers.py.
# Do NOT edit this file manually as any edits will be overwritten by the generation of
# the file from the modular. If any change should be done, please apply the change to the
# modular_dinov2_with_registers.py file directly. One of our CI enforces this.
# 🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨🚨
# coding=utf-8
# Copyright 2024 Meta Inc. and the HuggingFace Inc. team. All rights reserved.
#
Expand All @@ -19,19 +19,10 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


from transformers import PretrainedConfig

from ...utils import (
logging,
)
from ...configuration_utils import PretrainedConfig
from ...utils.backbone_utils import BackboneConfigMixin, get_aligned_output_features_output_indices


logger = logging.get_logger(__name__)


class Dinov2WithRegistersConfig(BackboneConfigMixin, PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`Dinov2WithRegistersModel`]. It is used to instantiate an
Expand Down Expand Up @@ -169,4 +160,4 @@ def __init__(
out_features=out_features, out_indices=out_indices, stage_names=self.stage_names
)
self.apply_layernorm = apply_layernorm
self.reshape_hidden_states = reshape_hidden_states
self.reshape_hidden_states = reshape_hidden_states
Original file line number Diff line number Diff line change
Expand Up @@ -288,4 +288,4 @@ def convert_dinov2_with_registers_checkpoint(model_name, pytorch_dump_folder_pat
)

args = parser.parse_args()
convert_dinov2_with_registers_checkpoint(args.model_name, args.pytorch_dump_folder_path, args.push_to_hub)
convert_dinov2_with_registers_checkpoint(args.model_name, args.pytorch_dump_folder_path, args.push_to_hub)
Loading

0 comments on commit 125197b

Please sign in to comment.