From 0abf5e8eaec361350888517f0d98501b2c5b2559 Mon Sep 17 00:00:00 2001 From: fxmarty <9808326+fxmarty@users.noreply.github.com> Date: Tue, 9 Jul 2024 08:07:46 +0200 Subject: [PATCH] FX symbolic_trace: do not test decoder_inputs_embeds (#31840) only test input_embeds, not decoder_input_embeds --- tests/test_modeling_common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_modeling_common.py b/tests/test_modeling_common.py index 7c3bc3dc9e91f0..299d99280b335b 100755 --- a/tests/test_modeling_common.py +++ b/tests/test_modeling_common.py @@ -1215,7 +1215,7 @@ def _create_and_check_torch_fx_tracing(self, config, inputs_dict, output_loss=Fa (past_mask, inputs_to_test[1]["attention_mask"]), dim=1 ) - if "inputs_embeds" in inspect.signature(model.forward).parameters: + if "inputs_embeds" in inspect.signature(model.forward).parameters and not model.config.is_encoder_decoder: inputs_to_test.append( { "inputs_embeds": torch.rand(