From b8aee2e918d7ba2d5e9e80162ae26b4806873307 Mon Sep 17 00:00:00 2001 From: David <37246112+conditionedstimulus@users.noreply.github.com> Date: Wed, 15 May 2024 12:19:43 +0200 Subject: [PATCH] Remove unused module DETR based models (#30823) * removing heads for classification from DETR models. * quality fix --- .../modeling_conditional_detr.py | 19 ------------------- .../modeling_deformable_detr.py | 19 ------------------- src/transformers/models/deta/modeling_deta.py | 19 ------------------- src/transformers/models/detr/modeling_detr.py | 18 ------------------ .../modeling_table_transformer.py | 19 ------------------- 5 files changed, 94 deletions(-) diff --git a/src/transformers/models/conditional_detr/modeling_conditional_detr.py b/src/transformers/models/conditional_detr/modeling_conditional_detr.py index 2eb0ea885cfa0c..eec95205244f1a 100644 --- a/src/transformers/models/conditional_detr/modeling_conditional_detr.py +++ b/src/transformers/models/conditional_detr/modeling_conditional_detr.py @@ -1091,25 +1091,6 @@ def forward( return outputs -# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead with Detr->ConditionalDetr -class ConditionalDetrClassificationHead(nn.Module): - """Head for sentence-level classification tasks.""" - - def __init__(self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float): - super().__init__() - self.dense = nn.Linear(input_dim, inner_dim) - self.dropout = nn.Dropout(p=pooler_dropout) - self.out_proj = nn.Linear(inner_dim, num_classes) - - def forward(self, hidden_states: torch.Tensor): - hidden_states = self.dropout(hidden_states) - hidden_states = self.dense(hidden_states) - hidden_states = torch.tanh(hidden_states) - hidden_states = self.dropout(hidden_states) - hidden_states = self.out_proj(hidden_states) - return hidden_states - - # Copied from transformers.models.detr.modeling_detr.DetrMLPPredictionHead with DetrMLPPredictionHead->MLP class MLP(nn.Module): """ diff --git a/src/transformers/models/deformable_detr/modeling_deformable_detr.py b/src/transformers/models/deformable_detr/modeling_deformable_detr.py index 61f0f011617a7d..bab552da35d424 100755 --- a/src/transformers/models/deformable_detr/modeling_deformable_detr.py +++ b/src/transformers/models/deformable_detr/modeling_deformable_detr.py @@ -1066,25 +1066,6 @@ def forward( return outputs -# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead -class DeformableDetrClassificationHead(nn.Module): - """Head for sentence-level classification tasks.""" - - def __init__(self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float): - super().__init__() - self.dense = nn.Linear(input_dim, inner_dim) - self.dropout = nn.Dropout(p=pooler_dropout) - self.out_proj = nn.Linear(inner_dim, num_classes) - - def forward(self, hidden_states: torch.Tensor): - hidden_states = self.dropout(hidden_states) - hidden_states = self.dense(hidden_states) - hidden_states = torch.tanh(hidden_states) - hidden_states = self.dropout(hidden_states) - hidden_states = self.out_proj(hidden_states) - return hidden_states - - class DeformableDetrPreTrainedModel(PreTrainedModel): config_class = DeformableDetrConfig base_model_prefix = "model" diff --git a/src/transformers/models/deta/modeling_deta.py b/src/transformers/models/deta/modeling_deta.py index 86ced383a0cc34..bd2faad271c661 100644 --- a/src/transformers/models/deta/modeling_deta.py +++ b/src/transformers/models/deta/modeling_deta.py @@ -1032,25 +1032,6 @@ def forward( return outputs -# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead -class DetaClassificationHead(nn.Module): - """Head for sentence-level classification tasks.""" - - def __init__(self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float): - super().__init__() - self.dense = nn.Linear(input_dim, inner_dim) - self.dropout = nn.Dropout(p=pooler_dropout) - self.out_proj = nn.Linear(inner_dim, num_classes) - - def forward(self, hidden_states: torch.Tensor): - hidden_states = self.dropout(hidden_states) - hidden_states = self.dense(hidden_states) - hidden_states = torch.tanh(hidden_states) - hidden_states = self.dropout(hidden_states) - hidden_states = self.out_proj(hidden_states) - return hidden_states - - class DetaPreTrainedModel(PreTrainedModel): config_class = DetaConfig base_model_prefix = "model" diff --git a/src/transformers/models/detr/modeling_detr.py b/src/transformers/models/detr/modeling_detr.py index ea8735da9f3d45..3ac3c13550af32 100644 --- a/src/transformers/models/detr/modeling_detr.py +++ b/src/transformers/models/detr/modeling_detr.py @@ -875,24 +875,6 @@ def forward( return outputs -class DetrClassificationHead(nn.Module): - """Head for sentence-level classification tasks.""" - - def __init__(self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float): - super().__init__() - self.dense = nn.Linear(input_dim, inner_dim) - self.dropout = nn.Dropout(p=pooler_dropout) - self.out_proj = nn.Linear(inner_dim, num_classes) - - def forward(self, hidden_states: torch.Tensor): - hidden_states = self.dropout(hidden_states) - hidden_states = self.dense(hidden_states) - hidden_states = torch.tanh(hidden_states) - hidden_states = self.dropout(hidden_states) - hidden_states = self.out_proj(hidden_states) - return hidden_states - - class DetrPreTrainedModel(PreTrainedModel): config_class = DetrConfig base_model_prefix = "model" diff --git a/src/transformers/models/table_transformer/modeling_table_transformer.py b/src/transformers/models/table_transformer/modeling_table_transformer.py index 05a548fd5ac92d..73d6a73398fbee 100644 --- a/src/transformers/models/table_transformer/modeling_table_transformer.py +++ b/src/transformers/models/table_transformer/modeling_table_transformer.py @@ -782,25 +782,6 @@ def forward( return outputs -# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead with Detr->TableTransformer -class TableTransformerClassificationHead(nn.Module): - """Head for sentence-level classification tasks.""" - - def __init__(self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float): - super().__init__() - self.dense = nn.Linear(input_dim, inner_dim) - self.dropout = nn.Dropout(p=pooler_dropout) - self.out_proj = nn.Linear(inner_dim, num_classes) - - def forward(self, hidden_states: torch.Tensor): - hidden_states = self.dropout(hidden_states) - hidden_states = self.dense(hidden_states) - hidden_states = torch.tanh(hidden_states) - hidden_states = self.dropout(hidden_states) - hidden_states = self.out_proj(hidden_states) - return hidden_states - - class TableTransformerPreTrainedModel(PreTrainedModel): config_class = TableTransformerConfig base_model_prefix = "model"