Skip to content

Commit

Permalink
Set the Linear device equal to the main model device in SoftmaxLoss (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
tomaarsen authored Dec 13, 2023
1 parent 6b524f8 commit 0ba8af3
Showing 1 changed file with 1 addition and 1 deletion.
2 changes: 1 addition & 1 deletion sentence_transformers/losses/SoftmaxLoss.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def __init__(self,
if concatenation_sent_multiplication:
num_vectors_concatenated += 1
logger.info("Softmax loss: #Vectors concatenated: {}".format(num_vectors_concatenated))
self.classifier = nn.Linear(num_vectors_concatenated * sentence_embedding_dimension, num_labels)
self.classifier = nn.Linear(num_vectors_concatenated * sentence_embedding_dimension, num_labels, device=model.device)
self.loss_fct = loss_fct

def forward(self, sentence_features: Iterable[Dict[str, Tensor]], labels: Tensor):
Expand Down

0 comments on commit 0ba8af3

Please sign in to comment.