Skip to content

Commit

Permalink
add experimental_function decorator to tp_strategy
Browse files Browse the repository at this point in the history
  • Loading branch information
eitanturok committed Sep 26, 2024
1 parent 4e4b6b9 commit c9c2455
Show file tree
Hide file tree
Showing 2 changed files with 9 additions and 0 deletions.
2 changes: 2 additions & 0 deletions llmfoundry/utils/builders.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@
)
from llmfoundry.utils.config_utils import to_dict_container, to_list_container
from llmfoundry.utils.registry_utils import construct_from_registry
from llmfoundry.utils.warnings import experimental_function

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -705,6 +706,7 @@ def _validate_cfg(icl_cfg: dict[str, Any]):
return evaluators, logger_keys


@experimental_function('tp_strategy')
def build_tp_strategy(
name: str,
model: ComposerModel,
Expand Down
7 changes: 7 additions & 0 deletions tests/models/utils/test_tp_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,9 @@


@pytest.mark.gpu
@pytest.mark.filterwarnings(
'ignore:tp_strategy is experimental and may change with future versions.'
)
def test_ffn_tp_strategy_layer_plan():
# Actual layer plan from tp_strategy=fnn
tp_config = {
Expand Down Expand Up @@ -128,3 +131,7 @@ def test_no_tp_with_moes():
match='Tensor Parallelism is not currently supported for MoE models.',
):
process_init_device(model_cfg, fsdp_cfg, tp_cfg)


# if __name__ == '__main__':
# test_ffn_tp_strategy_layer_plan()

0 comments on commit c9c2455

Please sign in to comment.