From c9c2455de2e65aaee372379e06379e11edd449b8 Mon Sep 17 00:00:00 2001 From: Eitan Turok Date: Thu, 26 Sep 2024 20:42:51 +0000 Subject: [PATCH] add experimental_function decorator to tp_strategy --- llmfoundry/utils/builders.py | 2 ++ tests/models/utils/test_tp_strategy.py | 7 +++++++ 2 files changed, 9 insertions(+) diff --git a/llmfoundry/utils/builders.py b/llmfoundry/utils/builders.py index 631d25bc60..dcffbf6caa 100644 --- a/llmfoundry/utils/builders.py +++ b/llmfoundry/utils/builders.py @@ -38,6 +38,7 @@ ) from llmfoundry.utils.config_utils import to_dict_container, to_list_container from llmfoundry.utils.registry_utils import construct_from_registry +from llmfoundry.utils.warnings import experimental_function log = logging.getLogger(__name__) @@ -705,6 +706,7 @@ def _validate_cfg(icl_cfg: dict[str, Any]): return evaluators, logger_keys +@experimental_function('tp_strategy') def build_tp_strategy( name: str, model: ComposerModel, diff --git a/tests/models/utils/test_tp_strategy.py b/tests/models/utils/test_tp_strategy.py index 073a8ff782..19bad8abfd 100644 --- a/tests/models/utils/test_tp_strategy.py +++ b/tests/models/utils/test_tp_strategy.py @@ -21,6 +21,9 @@ @pytest.mark.gpu +@pytest.mark.filterwarnings( + 'ignore:tp_strategy is experimental and may change with future versions.' +) def test_ffn_tp_strategy_layer_plan(): # Actual layer plan from tp_strategy=fnn tp_config = { @@ -128,3 +131,7 @@ def test_no_tp_with_moes(): match='Tensor Parallelism is not currently supported for MoE models.', ): process_init_device(model_cfg, fsdp_cfg, tp_cfg) + + +# if __name__ == '__main__': +# test_ffn_tp_strategy_layer_plan()