Skip to content

Commit

Permalink
Repo consistency fix after #33339 (#33873)
Browse files Browse the repository at this point in the history
* Repo consistency fix after #33339

* [run-slow] omdet_turbo
  • Loading branch information
amyeroberts authored Oct 1, 2024
1 parent 68a2b50 commit 1baa088
Showing 1 changed file with 0 additions and 23 deletions.
23 changes: 0 additions & 23 deletions src/transformers/models/omdet_turbo/modeling_omdet_turbo.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,29 +418,6 @@ def __init__(self, config: OmDetTurboConfig, num_heads: int, n_points: int):

self.disable_custom_kernels = config.disable_custom_kernels

self._reset_parameters()

def _reset_parameters(self):
nn.init.constant_(self.sampling_offsets.weight.data, 0.0)
default_dtype = torch.get_default_dtype()
thetas = torch.arange(self.n_heads, dtype=torch.int64).to(default_dtype) * (2.0 * math.pi / self.n_heads)
grid_init = torch.stack([thetas.cos(), thetas.sin()], -1)
grid_init = (
(grid_init / grid_init.abs().max(-1, keepdim=True)[0])
.view(self.n_heads, 1, 1, 2)
.repeat(1, self.n_levels, self.n_points, 1)
)
for i in range(self.n_points):
grid_init[:, :, i, :] *= i + 1
with torch.no_grad():
self.sampling_offsets.bias = nn.Parameter(grid_init.view(-1))
nn.init.constant_(self.attention_weights.weight.data, 0.0)
nn.init.constant_(self.attention_weights.bias.data, 0.0)
nn.init.xavier_uniform_(self.value_proj.weight.data)
nn.init.constant_(self.value_proj.bias.data, 0.0)
nn.init.xavier_uniform_(self.output_proj.weight.data)
nn.init.constant_(self.output_proj.bias.data, 0.0)

def with_pos_embed(self, tensor: torch.Tensor, position_embeddings: Optional[Tensor]):
return tensor if position_embeddings is None else tensor + position_embeddings

Expand Down

0 comments on commit 1baa088

Please sign in to comment.