Skip to content

Commit

Permalink
remove train unit test due to memroy constraints
Browse files Browse the repository at this point in the history
  • Loading branch information
dpower4 committed Dec 19, 2024
1 parent 5f5a75d commit 025256e
Showing 1 changed file with 3 additions and 3 deletions.
6 changes: 3 additions & 3 deletions tests/benchmarks/model_benchmarks/test_pytorch_mixtral.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ def test_pytorch_mixtral_8x7b():
context = BenchmarkRegistry.create_benchmark_context(
'mixtral-8x7b',
platform=Platform.CUDA,
parameters='--batch_size 1 --seq_len 32 --num_warmup 1 --num_steps 2 --precision float16 \
--model_action train inference',
parameters='--batch_size 1 --seq_len 32 --num_warmup 1 --num_steps 2 --precision fp8_e4m3 \
--model_action inference',
framework=Framework.PYTORCH
)

Expand Down Expand Up @@ -54,7 +54,7 @@ def test_pytorch_mixtral_8x7b():
assert (benchmark.return_code == ReturnCode.SUCCESS)

for metric in [
'fp16_train_step_time', 'fp16_train_throughput', 'fp16_inference_step_time', 'fp16_inference_throughput'
'fp8_e4m3_inference_step_time', 'fp8_e4m3_inference_throughput'
]:
assert (len(benchmark.raw_data[metric]) == benchmark.run_count)
assert (len(benchmark.raw_data[metric][0]) == benchmark._args.num_steps)
Expand Down

0 comments on commit 025256e

Please sign in to comment.