Skip to content

Commit

Permalink
embedding fix
Browse files Browse the repository at this point in the history
  • Loading branch information
microhum committed Jun 5, 2024
1 parent 7e4a357 commit 830b664
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 1 deletion.
5 changes: 4 additions & 1 deletion models/transformers.py
Original file line number Diff line number Diff line change
Expand Up @@ -195,7 +195,10 @@ def __init__(self):
self.decoder_norm = nn.LayerNorm(512)
self.decoder_layers_parallel = clones(DecoderLayer(512, c(attn), c(attn), c(ff), dropout=0.0), 1)
self.decoder_norm_parallel = nn.LayerNorm(512)
self.cls_embedding = nn.Embedding(52,512)
if opts.ref_nshot == 52:
self.cls_embedding = nn.Embedding(92,512)
else:
self.cls_embedding = nn.Embedding(52,512)
self.cls_token = nn.Parameter(torch.zeros(1, 1, 512))

def forward(self, x, memory, trg_char, src_mask=None, tgt_mask=None):
Expand Down
Empty file.

0 comments on commit 830b664

Please sign in to comment.