diff --git a/morpheus/stages/inference/triton_inference_stage.py b/morpheus/stages/inference/triton_inference_stage.py index e5901363f9..e6c5c0fbb7 100644 --- a/morpheus/stages/inference/triton_inference_stage.py +++ b/morpheus/stages/inference/triton_inference_stage.py @@ -781,3 +781,13 @@ def _get_cpp_inference_node(self, builder: mrc.Builder) -> mrc.SegmentObject: self._needs_logits, self._input_mapping, self._output_mapping) + + def _build_single(self, builder: mrc.Builder, input_node: mrc.SegmentObject) -> mrc.SegmentObject: + node = super()._build_single(builder, input_node) + + # ensure that the C++ impl only uses a single progress engine + if (self._build_cpp_node()): + node.launch_options.pe_count = 1 + node.launch_options.engines_per_pe = 1 + + return node diff --git a/scripts/validation/val-run-all.sh b/scripts/validation/val-run-all.sh index 905ee7f7e5..c85711cdbf 100755 --- a/scripts/validation/val-run-all.sh +++ b/scripts/validation/val-run-all.sh @@ -31,7 +31,10 @@ ensure_triton_running export USE_CPP=0 ${SCRIPT_DIR}/abp/val-abp-all.sh -${SCRIPT_DIR}/hammah/val-hammah-all.sh + +# Disabled per #1641 +# ${SCRIPT_DIR}/hammah/val-hammah-all.sh + ${SCRIPT_DIR}/phishing/val-phishing-all.sh ${SCRIPT_DIR}/sid/val-sid-all.sh @@ -39,6 +42,9 @@ ${SCRIPT_DIR}/sid/val-sid-all.sh export USE_CPP=1 ${SCRIPT_DIR}/abp/val-abp-all.sh -${SCRIPT_DIR}/hammah/val-hammah-all.sh + +# Disabled per #1641 +# ${SCRIPT_DIR}/hammah/val-hammah-all.sh + ${SCRIPT_DIR}/phishing/val-phishing-all.sh ${SCRIPT_DIR}/sid/val-sid-all.sh diff --git a/scripts/validation/val-run-pipeline.sh b/scripts/validation/val-run-pipeline.sh index ee8b00075c..65641a1370 100755 --- a/scripts/validation/val-run-pipeline.sh +++ b/scripts/validation/val-run-pipeline.sh @@ -37,7 +37,7 @@ function run_pipeline_sid_minibert(){ VAL_FILE=$4 VAL_OUTPUT=$5 - morpheus --log_level=DEBUG run --num_threads=1 --pipeline_batch_size=1024 --model_max_batch_size=32 --use_cpp=${USE_CPP} \ + morpheus --log_level=DEBUG run --num_threads=$(nproc) --pipeline_batch_size=1024 --model_max_batch_size=32 --use_cpp=${USE_CPP} \ pipeline-nlp --model_seq_length=256 \ from-file --filename=${INPUT_FILE} \ deserialize \ @@ -58,7 +58,7 @@ function run_pipeline_sid_bert(){ VAL_FILE=$4 VAL_OUTPUT=$5 - morpheus --log_level=DEBUG run --num_threads=1 --pipeline_batch_size=1024 --model_max_batch_size=32 --use_cpp=${USE_CPP} \ + morpheus --log_level=DEBUG run --num_threads=$(nproc) --pipeline_batch_size=1024 --model_max_batch_size=32 --use_cpp=${USE_CPP} \ pipeline-nlp --model_seq_length=256 \ from-file --filename=${INPUT_FILE} \ deserialize \ @@ -79,7 +79,7 @@ function run_pipeline_abp_nvsmi(){ VAL_FILE=$4 VAL_OUTPUT=$5 - morpheus --log_level=DEBUG run --num_threads=1 --pipeline_batch_size=1024 --model_max_batch_size=1024 --use_cpp=${USE_CPP} \ + morpheus --log_level=DEBUG run --num_threads=$(nproc) --pipeline_batch_size=1024 --model_max_batch_size=1024 --use_cpp=${USE_CPP} \ pipeline-fil --columns_file=${MORPHEUS_ROOT}/morpheus/data/columns_fil.txt \ from-file --filename=${INPUT_FILE} \ deserialize \ @@ -100,7 +100,7 @@ function run_pipeline_phishing_email(){ VAL_FILE=$4 VAL_OUTPUT=$5 - morpheus --log_level=DEBUG run --num_threads=1 --pipeline_batch_size=1024 --model_max_batch_size=32 --use_cpp=${USE_CPP} \ + morpheus --log_level=DEBUG run --num_threads=$(nproc) --pipeline_batch_size=1024 --model_max_batch_size=32 --use_cpp=${USE_CPP} \ pipeline-nlp --model_seq_length=128 --labels_file=${MORPHEUS_ROOT}/morpheus/data/labels_phishing.txt \ from-file --filename=${INPUT_FILE} \ deserialize \ @@ -121,7 +121,7 @@ function run_pipeline_hammah_user123(){ VAL_FILE=$4 VAL_OUTPUT=$5 - morpheus --log_level=DEBUG run --num_threads=1 --pipeline_batch_size=1024 --model_max_batch_size=1024 --use_cpp=${USE_CPP} \ + morpheus --log_level=DEBUG run --num_threads=$(nproc) --pipeline_batch_size=1024 --model_max_batch_size=1024 --use_cpp=${USE_CPP} \ pipeline-ae --columns_file="${MORPHEUS_ROOT}/morpheus/data/columns_ae_cloudtrail.txt" --userid_filter="user123" --userid_column_name="userIdentitysessionContextsessionIssueruserName" --timestamp_column_name="event_dt" \ from-cloudtrail --input_glob="${MORPHEUS_ROOT}/models/datasets/validation-data/dfp-cloudtrail-*-input.csv" \ train-ae --train_data_glob="${MORPHEUS_ROOT}/models/datasets/training-data/dfp-cloudtrail-*.csv" --source_stage_class=morpheus.stages.input.cloud_trail_source_stage.CloudTrailSourceStage --seed 42 \ @@ -143,7 +143,7 @@ function run_pipeline_hammah_role-g(){ VAL_FILE=$4 VAL_OUTPUT=$5 - morpheus --log_level=DEBUG run --num_threads=1 --pipeline_batch_size=1024 --model_max_batch_size=1024 --use_cpp=${USE_CPP} \ + morpheus --log_level=DEBUG run --num_threads=$(nproc) --pipeline_batch_size=1024 --model_max_batch_size=1024 --use_cpp=${USE_CPP} \ pipeline-ae --columns_file="${MORPHEUS_ROOT}/morpheus/data/columns_ae_cloudtrail.txt" --userid_filter="role-g" --userid_column_name="userIdentitysessionContextsessionIssueruserName" --timestamp_column_name="event_dt" \ from-cloudtrail --input_glob="${MORPHEUS_ROOT}/models/datasets/validation-data/dfp-cloudtrail-*-input.csv" \ train-ae --train_data_glob="${MORPHEUS_ROOT}/models/datasets/training-data/dfp-cloudtrail-*.csv" --source_stage_class=morpheus.stages.input.cloud_trail_source_stage.CloudTrailSourceStage --seed 42 \