Skip to content

Commit

Permalink
test_custom_4d_attention_mask skip with sliding window attn (#30833)
Browse files Browse the repository at this point in the history
  • Loading branch information
poedator authored May 23, 2024
1 parent 87a3518 commit 6739e1d
Showing 1 changed file with 2 additions and 0 deletions.
2 changes: 2 additions & 0 deletions tests/test_modeling_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -4407,6 +4407,8 @@ def test_custom_4d_attention_mask(self):
if not model_class._supports_static_cache:
self.skipTest(f"{model_class.__name__} is not guaranteed to work with custom 4D attention masks")
config, _ = self.model_tester.prepare_config_and_inputs_for_common()
if getattr(config, "sliding_window", 0) > 0:
self.skipTest(f"{model_class.__name__} with sliding window attention is not supported by this test")
model = model_class(config).to(device=torch_device, dtype=torch.float32)

(
Expand Down

0 comments on commit 6739e1d

Please sign in to comment.