Skip to content

Commit

Permalink
Fix daily tests for peft + fsdp (mosaicml#2920)
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg authored Jan 30, 2024
1 parent 01eb20d commit f874956
Showing 1 changed file with 10 additions and 0 deletions.
10 changes: 10 additions & 0 deletions tests/models/test_hf_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -490,6 +490,16 @@ def get_lm_trainer(hf_model,
should_save_peft_only=should_save_peft_only,
)

# On torch 2.0, fsdp wrapped modules can not have both frozen and unfrozen params.
# On 2.1+, if you have use_orig_params=True, they can. So we need a special case for the tests here.
if version.parse(torch.__version__) < version.parse('2.1.0') and peft_config is not None:
for name, module in model.named_modules():
if 'lora' in name.lower() and 'default' in name.lower():
has_parameters = any(True for _ in module.parameters())
has_buffers = any(True for _ in module.buffers())
if has_parameters or has_buffers:
module._fsdp_wrap = True # type: ignore

vocab_size = hf_model.config.vocab_size
sequence_length = 4
size = 4
Expand Down

0 comments on commit f874956

Please sign in to comment.