Просмотр исходного кода

llama : use llm_build_granite for minicpm (#13911)

zhangkaihuo 7 месяцев назад
Родитель
Сommit
2c90da4c7e
1 измененных файлов с 1 добавлено и 1 удалено
  1. 1 1
      src/llama-model.cpp

+ 1 - 1
src/llama-model.cpp

@@ -13260,7 +13260,6 @@ llm_graph_result_ptr llama_model::build_graph(
 
 
     switch (arch) {
     switch (arch) {
         case LLM_ARCH_LLAMA:
         case LLM_ARCH_LLAMA:
-        case LLM_ARCH_MINICPM:
             {
             {
                 llm = std::make_unique<llm_build_llama>(*this, params, gf);
                 llm = std::make_unique<llm_build_llama>(*this, params, gf);
             } break;
             } break;
@@ -13501,6 +13500,7 @@ llm_graph_result_ptr llama_model::build_graph(
             } break;
             } break;
         case LLM_ARCH_GRANITE:
         case LLM_ARCH_GRANITE:
         case LLM_ARCH_GRANITE_MOE:
         case LLM_ARCH_GRANITE_MOE:
+        case LLM_ARCH_MINICPM:
             {
             {
                 llm = std::make_unique<llm_build_granite>(*this, params, gf);
                 llm = std::make_unique<llm_build_granite>(*this, params, gf);
             } break;
             } break;