Skip to content

Commit

Permalink
shared_transf --> shared_transformer
Browse files Browse the repository at this point in the history
  • Loading branch information
Quentin-Anthony committed Sep 23, 2024
1 parent b9e86b0 commit 4b0fb52
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions src/transformers/models/zamba/modeling_zamba.py
Original file line number Diff line number Diff line change
Expand Up @@ -1002,9 +1002,9 @@ def forward(


class HybridLayer(nn.Module):
def __init__(self, shared_transf: ZambaAttentionDecoderLayer, linear: nn.Linear, mamba: ZambaMambaDecoderLayer):
def __init__(self, shared_transformer: ZambaAttentionDecoderLayer, linear: nn.Linear, mamba: ZambaMambaDecoderLayer):
super().__init__()
self.shared_transf = shared_transf
self.shared_transformer = shared_transformer
self.linear = linear
self.mamba = mamba

Expand Down Expand Up @@ -1040,7 +1040,7 @@ def forward(
Indices depicting the position of the input sequence tokens in the sequence.
"""

layer_outputs = self.shared_transf(
layer_outputs = self.shared_transformer(
hidden_states,
original_hidden_states=original_hidden_states,
layer_idx=layer_idx,
Expand Down

0 comments on commit 4b0fb52

Please sign in to comment.