|
|
@@ -35,6 +35,7 @@ static const std::map<std::string, llm_chat_template> LLM_CHAT_TEMPLATES = {
|
|
|
{ "mistral-v3-tekken", LLM_CHAT_TEMPLATE_MISTRAL_V3_TEKKEN },
|
|
|
{ "mistral-v7", LLM_CHAT_TEMPLATE_MISTRAL_V7 },
|
|
|
{ "phi3", LLM_CHAT_TEMPLATE_PHI_3 },
|
|
|
+ { "phi4", LLM_CHAT_TEMPLATE_PHI_4 },
|
|
|
{ "falcon3", LLM_CHAT_TEMPLATE_FALCON_3 },
|
|
|
{ "zephyr", LLM_CHAT_TEMPLATE_ZEPHYR },
|
|
|
{ "monarch", LLM_CHAT_TEMPLATE_MONARCH },
|
|
|
@@ -73,7 +74,9 @@ llm_chat_template llm_chat_detect_template(const std::string & tmpl) {
|
|
|
return tmpl.find(haystack) != std::string::npos;
|
|
|
};
|
|
|
if (tmpl_contains("<|im_start|>")) {
|
|
|
- return LLM_CHAT_TEMPLATE_CHATML;
|
|
|
+ return tmpl_contains("<|im_sep|>")
|
|
|
+ ? LLM_CHAT_TEMPLATE_PHI_4
|
|
|
+ : LLM_CHAT_TEMPLATE_CHATML;
|
|
|
} else if (tmpl.find("mistral") == 0 || tmpl_contains("[INST]")) {
|
|
|
if (tmpl_contains("[SYSTEM_PROMPT]")) {
|
|
|
return LLM_CHAT_TEMPLATE_MISTRAL_V7;
|
|
|
@@ -269,6 +272,14 @@ int32_t llm_chat_apply_template(
|
|
|
if (add_ass) {
|
|
|
ss << "<|assistant|>\n";
|
|
|
}
|
|
|
+ } else if (tmpl == LLM_CHAT_TEMPLATE_PHI_4) {
|
|
|
+ // chatml template
|
|
|
+ for (auto message : chat) {
|
|
|
+ ss << "<|im_start|>" << message->role << "<|im_sep|>" << message->content << "<|im_end|>";
|
|
|
+ }
|
|
|
+ if (add_ass) {
|
|
|
+ ss << "<|im_start|>assistant<|im_sep|>";
|
|
|
+ }
|
|
|
} else if (tmpl == LLM_CHAT_TEMPLATE_FALCON_3) {
|
|
|
// Falcon 3
|
|
|
for (auto message : chat) {
|