Skip to content

Commit

Permalink
add minimal test for camembert and xlm_roberta as their test class do…
Browse files Browse the repository at this point in the history
…es not inherit from ModelTesterMixin
  • Loading branch information
fxmarty committed Jun 24, 2024
1 parent 575fd79 commit 759508c
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 2 deletions.
11 changes: 10 additions & 1 deletion tests/models/camembert/test_modeling_camembert.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

from transformers import is_torch_available
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow, torch_device
from transformers.utils.import_utils import is_torch_sdpa_available


if is_torch_available():
Expand All @@ -31,7 +32,7 @@
class CamembertModelIntegrationTest(unittest.TestCase):
@slow
def test_output_embeds_base_model(self):
model = CamembertModel.from_pretrained("almanach/camembert-base")
model = CamembertModel.from_pretrained("almanach/camembert-base", attn_implementation="eager")
model.to(torch_device)

input_ids = torch.tensor(
Expand All @@ -54,3 +55,11 @@ def test_output_embeds_base_model(self):
# expected_slice = roberta.model.forward(input_ids)[0][:, :3, :3].detach()

self.assertTrue(torch.allclose(output[:, :3, :3], expected_slice, atol=1e-4))

if is_torch_sdpa_available():
model = CamembertModel.from_pretrained("almanach/camembert-base", attn_implementation="sdpa").to(
torch_device
)
with torch.no_grad():
output_sdpa = model(input_ids)["last_hidden_state"].detach()
self.assertTrue(torch.allclose(output, output_sdpa, atol=1e-3))
11 changes: 10 additions & 1 deletion tests/models/xlm_roberta/test_modeling_xlm_roberta.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

from transformers import is_torch_available
from transformers.testing_utils import require_sentencepiece, require_tokenizers, require_torch, slow
from transformers.utils.import_utils import is_torch_sdpa_available


if is_torch_available():
Expand All @@ -32,7 +33,7 @@
class XLMRobertaModelIntegrationTest(unittest.TestCase):
@slow
def test_xlm_roberta_base(self):
model = XLMRobertaModel.from_pretrained("FacebookAI/xlm-roberta-base")
model = XLMRobertaModel.from_pretrained("FacebookAI/xlm-roberta-base", attn_implementation="eager")
input_ids = torch.tensor([[0, 581, 10269, 83, 99942, 136, 60742, 23, 70, 80583, 18276, 2]])
# The dog is cute and lives in the garden house

Expand All @@ -49,6 +50,14 @@ def test_xlm_roberta_base(self):
# compare the actual values for a slice of last dim
self.assertTrue(torch.allclose(output[:, :, -1], expected_output_values_last_dim, atol=1e-3))

if is_torch_sdpa_available():
model = XLMRobertaModel.from_pretrained("FacebookAI/xlm-roberta-base", attn_implementation="sdpa")
with torch.no_grad():
output_sdpa = model(input_ids)["last_hidden_state"].detach()
self.assertEqual(output.shape, expected_output_shape)
# compare the actual values for a slice of last dim
self.assertTrue(torch.allclose(output, output_sdpa, atol=1e-3))

@slow
def test_xlm_roberta_large(self):
model = XLMRobertaModel.from_pretrained("FacebookAI/xlm-roberta-large")
Expand Down

0 comments on commit 759508c

Please sign in to comment.