Skip to content

Commit

Permalink
Remove unused module DETR based models (#30823)
Browse files Browse the repository at this point in the history
* removing heads for classification from DETR models.

* quality fix
  • Loading branch information
conditionedstimulus authored May 15, 2024
1 parent be3aa43 commit b8aee2e
Show file tree
Hide file tree
Showing 5 changed files with 0 additions and 94 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1091,25 +1091,6 @@ def forward(
return outputs


# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead with Detr->ConditionalDetr
class ConditionalDetrClassificationHead(nn.Module):
"""Head for sentence-level classification tasks."""

def __init__(self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float):
super().__init__()
self.dense = nn.Linear(input_dim, inner_dim)
self.dropout = nn.Dropout(p=pooler_dropout)
self.out_proj = nn.Linear(inner_dim, num_classes)

def forward(self, hidden_states: torch.Tensor):
hidden_states = self.dropout(hidden_states)
hidden_states = self.dense(hidden_states)
hidden_states = torch.tanh(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.out_proj(hidden_states)
return hidden_states


# Copied from transformers.models.detr.modeling_detr.DetrMLPPredictionHead with DetrMLPPredictionHead->MLP
class MLP(nn.Module):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1066,25 +1066,6 @@ def forward(
return outputs


# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead
class DeformableDetrClassificationHead(nn.Module):
"""Head for sentence-level classification tasks."""

def __init__(self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float):
super().__init__()
self.dense = nn.Linear(input_dim, inner_dim)
self.dropout = nn.Dropout(p=pooler_dropout)
self.out_proj = nn.Linear(inner_dim, num_classes)

def forward(self, hidden_states: torch.Tensor):
hidden_states = self.dropout(hidden_states)
hidden_states = self.dense(hidden_states)
hidden_states = torch.tanh(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.out_proj(hidden_states)
return hidden_states


class DeformableDetrPreTrainedModel(PreTrainedModel):
config_class = DeformableDetrConfig
base_model_prefix = "model"
Expand Down
19 changes: 0 additions & 19 deletions src/transformers/models/deta/modeling_deta.py
Original file line number Diff line number Diff line change
Expand Up @@ -1032,25 +1032,6 @@ def forward(
return outputs


# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead
class DetaClassificationHead(nn.Module):
"""Head for sentence-level classification tasks."""

def __init__(self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float):
super().__init__()
self.dense = nn.Linear(input_dim, inner_dim)
self.dropout = nn.Dropout(p=pooler_dropout)
self.out_proj = nn.Linear(inner_dim, num_classes)

def forward(self, hidden_states: torch.Tensor):
hidden_states = self.dropout(hidden_states)
hidden_states = self.dense(hidden_states)
hidden_states = torch.tanh(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.out_proj(hidden_states)
return hidden_states


class DetaPreTrainedModel(PreTrainedModel):
config_class = DetaConfig
base_model_prefix = "model"
Expand Down
18 changes: 0 additions & 18 deletions src/transformers/models/detr/modeling_detr.py
Original file line number Diff line number Diff line change
Expand Up @@ -875,24 +875,6 @@ def forward(
return outputs


class DetrClassificationHead(nn.Module):
"""Head for sentence-level classification tasks."""

def __init__(self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float):
super().__init__()
self.dense = nn.Linear(input_dim, inner_dim)
self.dropout = nn.Dropout(p=pooler_dropout)
self.out_proj = nn.Linear(inner_dim, num_classes)

def forward(self, hidden_states: torch.Tensor):
hidden_states = self.dropout(hidden_states)
hidden_states = self.dense(hidden_states)
hidden_states = torch.tanh(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.out_proj(hidden_states)
return hidden_states


class DetrPreTrainedModel(PreTrainedModel):
config_class = DetrConfig
base_model_prefix = "model"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -782,25 +782,6 @@ def forward(
return outputs


# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead with Detr->TableTransformer
class TableTransformerClassificationHead(nn.Module):
"""Head for sentence-level classification tasks."""

def __init__(self, input_dim: int, inner_dim: int, num_classes: int, pooler_dropout: float):
super().__init__()
self.dense = nn.Linear(input_dim, inner_dim)
self.dropout = nn.Dropout(p=pooler_dropout)
self.out_proj = nn.Linear(inner_dim, num_classes)

def forward(self, hidden_states: torch.Tensor):
hidden_states = self.dropout(hidden_states)
hidden_states = self.dense(hidden_states)
hidden_states = torch.tanh(hidden_states)
hidden_states = self.dropout(hidden_states)
hidden_states = self.out_proj(hidden_states)
return hidden_states


class TableTransformerPreTrainedModel(PreTrainedModel):
config_class = TableTransformerConfig
base_model_prefix = "model"
Expand Down

0 comments on commit b8aee2e

Please sign in to comment.