diff --git a/llmfoundry/utils/config_utils.py b/llmfoundry/utils/config_utils.py index 8c9e516d36..6680154e87 100644 --- a/llmfoundry/utils/config_utils.py +++ b/llmfoundry/utils/config_utils.py @@ -162,5 +162,5 @@ def log_config(cfg: DictConfig) -> None: import mlflow except ImportError as e: raise e - # if mlflow.active_run(): - # mlflow.log_params(params=om.to_container(cfg, resolve=True)) + if mlflow.active_run(): + mlflow.log_params(params=om.to_container(cfg, resolve=True)) diff --git a/tests/test_hf_conversion_script.py b/tests/test_hf_conversion_script.py index 6a569e88b6..81838655ff 100644 --- a/tests/test_hf_conversion_script.py +++ b/tests/test_hf_conversion_script.py @@ -189,7 +189,7 @@ def test_callback_inits_with_defaults(): @pytest.mark.parametrize('log_to_mlflow', [True, False]) def test_huggingface_conversion_callback(model: str, tmp_path: pathlib.Path, fsdp_state_dict_type: str, - log_to_mlflow: bool, monkeypatch): + log_to_mlflow: bool): delete_transformers_cache() dist.initialize_dist(get_device('gpu'))