Skip to content

Commit

Permalink
Skip M4T test_retain_grad_hidden_states_attentions (#28060)
Browse files Browse the repository at this point in the history
* skip test from SpeechInput

* refine description of skip
  • Loading branch information
ylacombe authored Dec 15, 2023
1 parent d269c4b commit deb72cb
Showing 1 changed file with 5 additions and 3 deletions.
8 changes: 5 additions & 3 deletions tests/models/seamless_m4t/test_modeling_seamless_m4t.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
import unittest

from transformers import SeamlessM4TConfig, is_speech_available, is_torch_available
from transformers.testing_utils import is_flaky, require_torch, slow, torch_device
from transformers.testing_utils import require_torch, slow, torch_device
from transformers.trainer_utils import set_seed
from transformers.utils import cached_property

Expand Down Expand Up @@ -610,9 +610,11 @@ def test_attention_outputs(self):
[self.model_tester.num_attention_heads, encoder_seq_length, encoder_key_length],
)

@is_flaky()
@unittest.skip(
reason="In training model, the first speech encoder layer is sometimes skipped. Training is not supported yet, so the test is ignored."
)
def test_retain_grad_hidden_states_attentions(self):
super().test_retain_grad_hidden_states_attentions()
pass


@require_torch
Expand Down

0 comments on commit deb72cb

Please sign in to comment.