|
|
@@ -1369,7 +1369,7 @@ void llama_model::load_hparams(llama_model_loader & ml) {
|
|
|
// that have no expert_gating_func model parameter set
|
|
|
hparams.expert_gating_func = LLAMA_EXPERT_GATING_FUNC_TYPE_SOFTMAX;
|
|
|
}
|
|
|
- ml.get_key(LLM_KV_ROPE_SCALING_YARN_LOG_MUL, hparams.rope_yarn_log_mul);
|
|
|
+ ml.get_key(LLM_KV_ROPE_SCALING_YARN_LOG_MUL, hparams.rope_yarn_log_mul, false);
|
|
|
|
|
|
switch (hparams.n_layer) {
|
|
|
case 27: type = LLM_TYPE_16B; break;
|