Skip to content

Commit

Permalink
fix default sdpa
Browse files Browse the repository at this point in the history
  • Loading branch information
Cyrilvallez committed Dec 16, 2024
1 parent a28ad19 commit 9bd6c94
Showing 1 changed file with 4 additions and 4 deletions.
8 changes: 4 additions & 4 deletions src/transformers/modeling_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1531,10 +1531,10 @@ def _autoset_attn_implementation(
config = cls._check_and_enable_flex_attn(config, hard_check_only=True)
elif requested_attn_implementation in [None, "sdpa"] and not is_torch_xla_available():
# use_flash_attention_2 takes priority over SDPA, hence SDPA treated in this elif.
# config = cls._check_and_enable_sdpa(
# config,
# hard_check_only=False if requested_attn_implementation is None else True,
# )
config = cls._check_and_enable_sdpa(
config,
hard_check_only=False if requested_attn_implementation is None else True,
)

if (
torch.version.hip is not None
Expand Down

0 comments on commit 9bd6c94

Please sign in to comment.