Skip to content

Commit

Permalink
Extend chat_sample to accept device string
Browse files Browse the repository at this point in the history
  • Loading branch information
TolyaTalamanov committed Jul 15, 2024
1 parent 361aa05 commit 91a0871
Showing 1 changed file with 5 additions and 5 deletions.
10 changes: 5 additions & 5 deletions samples/cpp/chat_sample/chat_sample.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,15 @@
#include "openvino/genai/llm_pipeline.hpp"

int main(int argc, char* argv[]) try {
if (2 != argc) {
throw std::runtime_error(std::string{"Usage: "} + argv[0] + " <MODEL_DIR>");
if (2 != argc || 3 != argc) {
throw std::runtime_error(std::string{"Usage: "} + argv[0] + " <MODEL_DIR> <DEVICE>");
}
std::string prompt;
std::string model_path = argv[1];
std::string device = argc == 3 ? argv[2] : "CPU";

ov::genai::LLMPipeline pipe(model_path, device);

std::string device = "CPU"; // GPU can be used as well
ov::genai::LLMPipeline pipe(model_path, "CPU");

ov::genai::GenerationConfig config;
config.max_new_tokens = 100;
std::function<bool(std::string)> streamer = [](std::string word) {
Expand Down

0 comments on commit 91a0871

Please sign in to comment.