@@ -1695,6 +1695,7 @@ enum llm_chat_template {
16951695 LLM_CHAT_TEMPLATE_BITNET,
16961696 LLM_CHAT_TEMPLATE_DOTS1,
16971697 LLM_CHAT_TEMPLATE_HUNYUAN_MOE,
1698+ LLM_CHAT_TEMPLATE_KIMI_K2,
16981699 LLM_CHAT_TEMPLATE_UNKNOWN,
16991700};
17001701
@@ -1733,6 +1734,7 @@ static const std::map<std::string, llm_chat_template> LLM_CHAT_TEMPLATES = {
17331734 { "megrez", LLM_CHAT_TEMPLATE_MEGREZ },
17341735 { "llama4", LLM_CHAT_TEMPLATE_LLAMA4 },
17351736 { "hunyuan-moe", LLM_CHAT_TEMPLATE_HUNYUAN_MOE },
1737+ { "kimi-k2", LLM_CHAT_TEMPLATE_KIMI_K2 },
17361738 { "bitnet", LLM_CHAT_TEMPLATE_BITNET },
17371739};
17381740
@@ -23255,6 +23257,8 @@ static llm_chat_template llama_chat_detect_template(const std::string & tmpl) {
2325523257 return LLM_CHAT_TEMPLATE_DOTS1;
2325623258 } else if (tmpl_contains("<|startoftext|>") && tmpl_contains("<|extra_4|>")) {
2325723259 return LLM_CHAT_TEMPLATE_HUNYUAN_MOE;
23260+ } else if (tmpl_contains("<|im_middle|>") && tmpl_contains("<|im_end|>")) {
23261+ return LLM_CHAT_TEMPLATE_KIMI_K2;
2325823262 }
2325923263 return LLM_CHAT_TEMPLATE_UNKNOWN;
2326023264}
@@ -23700,6 +23704,18 @@ static int32_t llama_chat_apply_template_internal(
2370023704 ss << "<|startoftext|>" << message->content << "<|extra_0|>";
2370123705 }
2370223706 }
23707+ } else if (tmpl == LLM_CHAT_TEMPLATE_KIMI_K2) {
23708+ // moonshotai/Kimi-K2-Instruct
23709+ for (auto message : chat) {
23710+ std::string role(message->role);
23711+ if (role == "system") {
23712+ ss << "<|im_system|>system<|im_middle|>" << message->content << "<|im_end|>";
23713+ } else if (role == "assistant") {
23714+ ss << "<|im_user|>user<|im_middle|>" << message->content << "<|im_end|>";
23715+ } else {
23716+ ss << "<|im_assistant|>assistant<|im_middle|>" << message->content << "<|im_end|>";
23717+ }
23718+ }
2370323719 } else {
2370423720 // template not supported
2370523721 return -1;
0 commit comments