From f11c96fe05ad355040aca81fc9c1cee14d99ff13 Mon Sep 17 00:00:00 2001 From: Anatoliy Talamanov Date: Tue, 24 Dec 2024 10:35:21 +0000 Subject: [PATCH] Update test_llm_pipeline_static.py --- tests/python_tests/test_llm_pipeline_static.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/python_tests/test_llm_pipeline_static.py b/tests/python_tests/test_llm_pipeline_static.py index 7f81a7bed1..e11c07d5da 100644 --- a/tests/python_tests/test_llm_pipeline_static.py +++ b/tests/python_tests/test_llm_pipeline_static.py @@ -144,10 +144,10 @@ def test_chat_generation(): ] model_descr = get_chat_models_list()[0] - _, model_path, _, _ = read_model((model_descr[0], model_descr[1] / '_test_chat'), add_special_tokens=False) + model_info = read_model((model_descr[0], model_descr[1] / '_test_chat'), add_special_tokens=False) - chat_history_stateful = generate_chat_history(model_path, "CPU", { }, questions) - chat_history_static = generate_chat_history(model_path, "NPU", common_config, questions) + chat_history_stateful = generate_chat_history(model_info[1], "CPU", { }, questions) + chat_history_static = generate_chat_history(model_info[1], "NPU", common_config, questions) print('npu chat: \n{chat_history_static}\n') print('cpu chat: \n{chat_history_stateful}')