Skip to content

Commit

Permalink
lint fix
Browse files Browse the repository at this point in the history
  • Loading branch information
dpower4 committed Dec 19, 2024
1 parent 025256e commit 0273915
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 11 deletions.
8 changes: 6 additions & 2 deletions superbench/benchmarks/model_benchmarks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,10 @@
from superbench.benchmarks.model_benchmarks.pytorch_cnn import PytorchCNN
from superbench.benchmarks.model_benchmarks.pytorch_lstm import PytorchLSTM
from superbench.benchmarks.model_benchmarks.megatron_gpt3 import MegatronGPT
from superbench.benchmarks.model_benchmarks.pytorch_llama import PytorchLlama
from superbench.benchmarks.model_benchmarks.pytorch_mixtral import PytorchMixtral

__all__ = ['ModelBenchmark', 'PytorchBERT', 'PytorchGPT2', 'PytorchCNN', 'PytorchLSTM', 'MegatronGPT',
'PytorchLlama', 'PytorchMixtral']
__all__ = [
'ModelBenchmark', 'PytorchBERT', 'PytorchGPT2', 'PytorchCNN', 'PytorchLSTM', 'MegatronGPT', 'PytorchLlama',
'PytorchMixtral'
]
14 changes: 8 additions & 6 deletions superbench/benchmarks/model_benchmarks/pytorch_mixtral.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,14 +255,16 @@ def _inference_step(self, precision):

# Register Mixtral benchmark with 8x7b parameters.
BenchmarkRegistry.register_benchmark(
'pytorch-mixtral-8x7b', PytorchMixtral, parameters='--hidden_size=4096 --num_hidden_layers=32 \
--num_attention_heads=32 --intermediate_size=14336 --num_key_value_heads=8 \
--max_position_embeddings=32768 --router_aux_loss_coef=0.02'
'pytorch-mixtral-8x7b',
PytorchMixtral,
parameters='--hidden_size=4096 --num_hidden_layers=32 --num_attention_heads=32 --intermediate_size=14336 \
--num_key_value_heads=8 --max_position_embeddings=32768 --router_aux_loss_coef=0.02'
)

# Register Mixtral benchmark with 8x22b parameters.
BenchmarkRegistry.register_benchmark(
'pytorch-mixtral-8x22b', PytorchMixtral, parameters='--hidden_size=6144 --num_hidden_layers=56 \
--num_attention_heads=48 --intermediate_size=16384 --num_key_value_heads=8 \
--max_position_embeddings=65536 --router_aux_loss_coef=0.001'
'pytorch-mixtral-8x22b',
PytorchMixtral,
parameters='--hidden_size=6144 --num_hidden_layers=56 --num_attention_heads=48 --intermediate_size=16384 \
--num_key_value_heads=8 --max_position_embeddings=65536 --router_aux_loss_coef=0.001'
)
4 changes: 1 addition & 3 deletions tests/benchmarks/model_benchmarks/test_pytorch_mixtral.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,7 @@ def test_pytorch_mixtral_8x7b():
assert (benchmark.run_count == 1)
assert (benchmark.return_code == ReturnCode.SUCCESS)

for metric in [
'fp8_e4m3_inference_step_time', 'fp8_e4m3_inference_throughput'
]:
for metric in [ 'fp8_e4m3_inference_step_time', 'fp8_e4m3_inference_throughput']:
assert (len(benchmark.raw_data[metric]) == benchmark.run_count)
assert (len(benchmark.raw_data[metric][0]) == benchmark._args.num_steps)
assert (len(benchmark.result[metric]) == benchmark.run_count)

0 comments on commit 0273915

Please sign in to comment.