From 91a08710dfaea7ed7135b9367502242f78c18347 Mon Sep 17 00:00:00 2001 From: TolyaTalamanov Date: Mon, 15 Jul 2024 14:16:21 +0000 Subject: [PATCH] Extend chat_sample to accept device string --- samples/cpp/chat_sample/chat_sample.cpp | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/samples/cpp/chat_sample/chat_sample.cpp b/samples/cpp/chat_sample/chat_sample.cpp index d9d9c2b2de..3fafb0b75b 100644 --- a/samples/cpp/chat_sample/chat_sample.cpp +++ b/samples/cpp/chat_sample/chat_sample.cpp @@ -4,15 +4,15 @@ #include "openvino/genai/llm_pipeline.hpp" int main(int argc, char* argv[]) try { - if (2 != argc) { - throw std::runtime_error(std::string{"Usage: "} + argv[0] + " "); + if (2 != argc || 3 != argc) { + throw std::runtime_error(std::string{"Usage: "} + argv[0] + " "); } std::string prompt; std::string model_path = argv[1]; + std::string device = argc == 3 ? argv[2] : "CPU"; + + ov::genai::LLMPipeline pipe(model_path, device); - std::string device = "CPU"; // GPU can be used as well - ov::genai::LLMPipeline pipe(model_path, "CPU"); - ov::genai::GenerationConfig config; config.max_new_tokens = 100; std::function streamer = [](std::string word) {