Skip to content

Commit

Permalink
rename
Browse files Browse the repository at this point in the history
  • Loading branch information
eitanturok committed Sep 26, 2024
1 parent c9a8078 commit e6ab929
Show file tree
Hide file tree
Showing 3 changed files with 5 additions and 9 deletions.
6 changes: 3 additions & 3 deletions llmfoundry/tp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
# SPDX-License-Identifier: Apache-2.0

from llmfoundry.registry import tp_strategies
from llmfoundry.tp.tp_strategies import ffn_tp_strategies
from llmfoundry.tp.ffn_tp_strategy import ffn

tp_strategies.register('ffn', func=ffn_tp_strategies)
tp_strategies.register('ffn', func=ffn)

__all__ = [
'ffn_tp_strategies',
'ffn',
]
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from torch.distributed.tensor.parallel.style import ParallelStyle


def ffn_tp_strategies(model: ComposerModel) -> dict[str, ParallelStyle]:
def ffn(model: ComposerModel) -> dict[str, ParallelStyle]:
TP_LAYERS = {'ffn', 'ffn.up_proj', 'ffn.down_proj'}

# Validate that all TP_LAYERS are in model
Expand Down
6 changes: 1 addition & 5 deletions tests/models/utils/test_tp_strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
@pytest.mark.filterwarnings(
'ignore:tp_strategies is experimental and may change with future versions.',
)
def test_ffn_tp_strategies_layer_plan():
def test_ffn_tp_strategy():
# Create layer plan from fnn tp_strategy
tp_config = {
'strategy': 'ffn',
Expand Down Expand Up @@ -133,7 +133,3 @@ def test_no_tp_with_moes():
match='Tensor Parallelism is not currently supported for MoE models.',
):
process_init_device(model_cfg, fsdp_cfg, tp_cfg)


# if __name__ == '__main__':
# test_ffn_tp_strategies_layer_plan()

0 comments on commit e6ab929

Please sign in to comment.