From 6739e1d261f80caec34b8c8ac7a030907a4f75a2 Mon Sep 17 00:00:00 2001 From: Poedator <24738311+poedator@users.noreply.github.com> Date: Thu, 23 May 2024 16:22:10 +0300 Subject: [PATCH] test_custom_4d_attention_mask skip with sliding window attn (#30833) --- tests/test_modeling_common.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/test_modeling_common.py b/tests/test_modeling_common.py index 5480105054a909..20f5cf1ca2d713 100755 --- a/tests/test_modeling_common.py +++ b/tests/test_modeling_common.py @@ -4407,6 +4407,8 @@ def test_custom_4d_attention_mask(self): if not model_class._supports_static_cache: self.skipTest(f"{model_class.__name__} is not guaranteed to work with custom 4D attention masks") config, _ = self.model_tester.prepare_config_and_inputs_for_common() + if getattr(config, "sliding_window", 0) > 0: + self.skipTest(f"{model_class.__name__} with sliding window attention is not supported by this test") model = model_class(config).to(device=torch_device, dtype=torch.float32) (