Skip to content

Commit

Permalink
Fix overriding of rope_scaling config (#644)
Browse files Browse the repository at this point in the history
  • Loading branch information
dakinggg authored Oct 3, 2023
1 parent cf015dd commit cb1d94a
Show file tree
Hide file tree
Showing 2 changed files with 33 additions and 0 deletions.
5 changes: 5 additions & 0 deletions llmfoundry/models/hf/hf_causal_lm.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,7 @@ def __init__(self, om_model_config: Union[DictConfig,
)

attr = getattr(config, k)
# attempt to disallow typos in nested configs
if isinstance(attr, Mapping):
extra_keys = [
_k for _k in v.keys() if _k not in attr.keys()
Expand All @@ -120,6 +121,10 @@ def __init__(self, om_model_config: Union[DictConfig,
f'Expected (a subset of) keys: {list(attr.keys())}.'
)
getattr(config, k).update(v)
# necessary case to allow for rope_scaling to be overriden in llama config
elif attr is None and isinstance(v, Mapping):
setattr(config, k, {})
getattr(config, k).update(v)
else:
setattr(config, k, v)

Expand Down
28 changes: 28 additions & 0 deletions tests/test_hf_config.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
# Copyright 2022 MosaicML LLM Foundry authors
# SPDX-License-Identifier: Apache-2.0

import os
import tempfile
from copy import deepcopy
from pathlib import Path
Expand Down Expand Up @@ -139,3 +140,30 @@ def test_hf_config_override(
assert getattr(hf_model.config, k)[_k] == _v
else:
assert getattr(hf_model.config, k) == v


@pytest.mark.skipif('HUGGING_FACE_HUB_TOKEN' not in os.environ,
reason='CI does not have access to llama2')
def test_rope_scaling_override():
model_cfg = {
'name': 'hf_causal_lm',
'pretrained_model_name_or_path': 'meta-llama/Llama-2-7b-hf',
'config_overrides': {
'num_hidden_layers': 2,
'hidden_size': 32,
'intermediate_size': 64,
'rope_scaling': {
'type': 'dynamic',
'factor': 0.5
}
},
'use_auth_token': True,
'pretrained': False,
'init_device': 'cpu',
}
model_cfg = om.create(model_cfg)

model = COMPOSER_MODEL_REGISTRY[model_cfg.name](model_cfg, tokenizer=None)
# This would error if the config isn't parsed into a proper dictionary
model.get_metadata()
assert model.config.rope_scaling == {'type': 'dynamic', 'factor': 0.5}

0 comments on commit cb1d94a

Please sign in to comment.