Skip to content

Commit

Permalink
better function names
Browse files Browse the repository at this point in the history
  • Loading branch information
eitanturok committed Sep 26, 2024
1 parent e6ab929 commit 6caeea9
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
6 changes: 3 additions & 3 deletions llmfoundry/tp/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
# SPDX-License-Identifier: Apache-2.0

from llmfoundry.registry import tp_strategies
from llmfoundry.tp.ffn_tp_strategy import ffn
from llmfoundry.tp.ffn_tp_strategy import ffn_tp_strategy

tp_strategies.register('ffn', func=ffn)
tp_strategies.register('ffn', func=ffn_tp_strategy)

__all__ = [
'ffn',
'ffn_tp_strategy',
]
2 changes: 1 addition & 1 deletion llmfoundry/tp/ffn_tp_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from torch.distributed.tensor.parallel.style import ParallelStyle


def ffn(model: ComposerModel) -> dict[str, ParallelStyle]:
def ffn_tp_strategy(model: ComposerModel) -> dict[str, ParallelStyle]:
TP_LAYERS = {'ffn', 'ffn.up_proj', 'ffn.down_proj'}

# Validate that all TP_LAYERS are in model
Expand Down

0 comments on commit 6caeea9

Please sign in to comment.