Просмотр исходного кода

llama : make general.name optional (#6709)

Georgi Gerganov 1 год назад
Родитель
Сommit
532c1737a1
1 измененных файлов с 3 добавлено и 1 удалено
  1. 3 1
      llama.cpp

+ 3 - 1
llama.cpp

@@ -4136,9 +4136,11 @@ static void llm_load_vocab(
             // CodeGemma (LLM_ARCH_GEMMA). This can potentially be removed once
             // CodeGemma (LLM_ARCH_GEMMA). This can potentially be removed once
             // new versions of these models have been published.
             // new versions of these models have been published.
             std::string gen_name;
             std::string gen_name;
-            ml.get_key(LLM_KV_GENERAL_NAME, gen_name);
+            ml.get_key(LLM_KV_GENERAL_NAME, gen_name, false);
+
             std::transform(gen_name.begin(), gen_name.end(), gen_name.begin(),
             std::transform(gen_name.begin(), gen_name.end(), gen_name.begin(),
                 [](unsigned char c){ return std::tolower(c); });
                 [](unsigned char c){ return std::tolower(c); });
+
             if (gen_name.find("code") != std::string::npos) {
             if (gen_name.find("code") != std::string::npos) {
                 if (model.arch == LLM_ARCH_LLAMA) {
                 if (model.arch == LLM_ARCH_LLAMA) {
                     vocab.special_prefix_id = 32007;
                     vocab.special_prefix_id = 32007;