Skip to content

Commit

Permalink
tag test as flaky
Browse files Browse the repository at this point in the history
  • Loading branch information
gante committed Jul 9, 2024
1 parent cd88500 commit 0f81de3
Showing 1 changed file with 2 additions and 0 deletions.
2 changes: 2 additions & 0 deletions tests/models/whisper/test_modeling_whisper.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import transformers
from transformers import WhisperConfig
from transformers.testing_utils import (
is_flaky,
is_pt_flax_cross_test,
require_flash_attn,
require_torch,
Expand Down Expand Up @@ -1539,6 +1540,7 @@ def test_longform_generate_multi_batch(self):
def test_longform_generate_multi_batch_cond_prev(self):
self._check_longform_generate_multi_batch(condition_on_prev_tokens=True)

@is_flaky() # TODO (joao, sanchit): fails ~9% of the times. Does the original test have the same issue?
def test_custom_4d_attention_mask(self):
config, input_dict = self.model_tester.prepare_config_and_inputs_for_common()
model = WhisperForConditionalGeneration(config).to(device=torch_device, dtype=torch.float32)
Expand Down

0 comments on commit 0f81de3

Please sign in to comment.