Skip to content

Commit

Permalink
Add past_key_values to _skip_keys_device_placement for LLaVa
Browse files Browse the repository at this point in the history
  • Loading branch information
aismlv committed Dec 14, 2023
1 parent 3060899 commit 1be9f6c
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 0 deletions.
1 change: 1 addition & 0 deletions src/transformers/models/llava/modeling_llava.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ class LlavaPreTrainedModel(PreTrainedModel):
base_model_prefix = "model"
supports_gradient_checkpointing = True
_no_split_modules = ["LlavaVisionAttention"]
_skip_keys_device_placement = "past_key_values"
_supports_flash_attn_2 = True

def _init_weights(self, module):
Expand Down
1 change: 1 addition & 0 deletions src/transformers/models/vipllava/modeling_vipllava.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,7 @@ class VipLlavaPreTrainedModel(PreTrainedModel):
base_model_prefix = "model"
supports_gradient_checkpointing = True
_no_split_modules = ["VipLlavaVisionAttention"]
_skip_keys_device_placement = "past_key_values"
_supports_flash_attn_2 = True

def _init_weights(self, module):
Expand Down

0 comments on commit 1be9f6c

Please sign in to comment.