From 998d208e149daa1a9aebe90614d0f488fac13b3f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sigbj=C3=B8rn=20Skj=C3=A6ret?= Date: Thu, 30 May 2024 16:33:06 +0200 Subject: [PATCH] More checks before assuming FIM tokens for Llama arch --- llama.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama.cpp b/llama.cpp index 2025e45582b49..f0f1555f1cf2f 100644 --- a/llama.cpp +++ b/llama.cpp @@ -4330,7 +4330,7 @@ static void llm_load_vocab( [](unsigned char c){ return std::tolower(c); }); if (gen_name.find("code") != std::string::npos) { - if (model.arch == LLM_ARCH_LLAMA) { + if (model.arch == LLM_ARCH_LLAMA && gen_name.find("llama") != std::string::npos) { vocab.special_prefix_id = 32007; vocab.special_suffix_id = 32008; vocab.special_middle_id = 32009;