Skip to content

Commit

Permalink
Moving test_save_and_load_low_cpu_mem_usage to ModelTesterMixin
Browse files Browse the repository at this point in the history
  • Loading branch information
hackyon committed Feb 14, 2024
1 parent e1c153b commit e40f605
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 18 deletions.
18 changes: 0 additions & 18 deletions tests/models/bert/test_modeling_bert.py
Original file line number Diff line number Diff line change
Expand Up @@ -600,24 +600,6 @@ def test_model_from_pretrained(self):
model = BertModel.from_pretrained(model_name)
self.assertIsNotNone(model)

@slow
def test_save_and_load_low_cpu_mem_usage(self):
with tempfile.TemporaryDirectory() as tmpdirname:
for model_class in self.all_model_classes:
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
model_to_save = model_class(config)

model_to_save.save_pretrained(tmpdirname)

model = model_class.from_pretrained(
tmpdirname,
low_cpu_mem_usage=True,
)

# The low_cpu_mem_usage=True causes the model params to be initialized with device=meta. If there are
# any unloaded or untied parameters, then trying to move it to device=torch_device will throw an error.
model.to(torch_device)

@slow
@require_torch_accelerator
def test_torchscript_device_change(self):
Expand Down
17 changes: 17 additions & 0 deletions tests/test_modeling_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -435,6 +435,23 @@ class CopyClass(model_class):
max_diff = (model_slow_init.state_dict()[key] - model_fast_init.state_dict()[key]).sum().item()
self.assertLessEqual(max_diff, 1e-3, msg=f"{key} not identical")

def test_save_and_load_low_cpu_mem_usage(self):
with tempfile.TemporaryDirectory() as tmpdirname:
for model_class in self.all_model_classes:
config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
model_to_save = model_class(config)

model_to_save.save_pretrained(tmpdirname)

model = model_class.from_pretrained(
tmpdirname,
low_cpu_mem_usage=True,
)

# The low_cpu_mem_usage=True causes the model params to be initialized with device=meta. If there are
# any unloaded or untied parameters, then trying to move it to device=torch_device will throw an error.
model.to(torch_device)

def test_fast_init_context_manager(self):
# 1. Create a dummy class. Should have buffers as well? To make sure we test __init__
class MyClass(PreTrainedModel):
Expand Down

0 comments on commit e40f605

Please sign in to comment.