test-chat.cpp 63 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437
  1. // Tests chat handling, including grammar generation and parsing for tool calling, for various templates.
  2. //
  3. // Also acts as a CLI to generate a Markdown summary of the formats of Jinja templates,
  4. // e.g. given Minja (http://github.com/google/minja) checked out in parent dir:
  5. //
  6. // cmake -B build && cmake --build build --parallel && ./build/bin/test-chat ../minja/build/tests/*.jinja 2>/dev/null
  7. //
  8. #include <fstream>
  9. #include <iostream>
  10. #include <json.hpp>
  11. #include <string>
  12. #include "chat.h"
  13. #include "../src/unicode.h"
  14. #include "../src/llama-grammar.h"
  15. using json = nlohmann::ordered_json;
  16. static std::ostream & operator<<(std::ostream & os, const common_chat_msg_diff & diff) {
  17. // os << "reasoning_content_delta: " << diff.reasoning_content_delta << '\n';
  18. os << "{ content_delta: " << diff.content_delta << "; ";
  19. if (diff.tool_call_index != std::string::npos) {
  20. os << "tool_call_index: " << diff.tool_call_index << "; ";
  21. os << "tool_call_delta.name: " << diff.tool_call_delta.name << "; ";
  22. os << "tool_call_delta.id: " << diff.tool_call_delta.id << "; ";
  23. os << "tool_call_delta.arguments: " << diff.tool_call_delta.arguments << "; ";
  24. }
  25. os << "}";
  26. return os;
  27. }
  28. // operator<< for vector<common_chat_msg_diff>:
  29. static std::ostream & operator<<(std::ostream & os, const std::vector<common_chat_msg_diff> & diffs) {
  30. os << "[\n";
  31. for (const auto & diff : diffs) {
  32. os << " " << diff << ",\n";
  33. }
  34. os << "]";
  35. return os;
  36. }
  37. static std::ostream & operator<<(std::ostream & os, const common_chat_msg & msg) {
  38. os << "{ role: " << msg.role << "; ";
  39. os << "content: " << msg.content << "; ";
  40. os << "content_parts: [\n";
  41. for (const auto & part : msg.content_parts) {
  42. os << " { type: " << part.type << "; text: " << part.text << " },\n";
  43. }
  44. os << "]; ";
  45. os << "reasoning_content: " << msg.reasoning_content << "; ";
  46. os << "tool_calls: [\n";
  47. for (const auto & tool_call : msg.tool_calls) {
  48. os << " { name: " << tool_call.name << "; arguments: " << tool_call.arguments << "; id: " << tool_call.id << " },\n";
  49. }
  50. os << "]";
  51. os << "}";
  52. return os;
  53. }
  54. template <class T> static bool equals(const T & expected, const T & actual) {
  55. return expected == actual;
  56. }
  57. static common_chat_msg normalize(const common_chat_msg & msg) {
  58. common_chat_msg normalized = msg;
  59. for (auto & tool_call : normalized.tool_calls) {
  60. try {
  61. tool_call.arguments = json::parse(tool_call.arguments).dump();
  62. } catch (const std::exception &) {
  63. // Do nothing
  64. }
  65. }
  66. return normalized;
  67. }
  68. template <>
  69. bool equals(const common_chat_msg & expected, const common_chat_msg & actual) {
  70. return normalize(expected) == normalize(actual);
  71. }
  72. template <class T> static void assert_equals(const T & expected, const T & actual) {
  73. if (!equals(expected, actual)) {
  74. std::cerr << "Expected: " << expected << std::endl;
  75. std::cerr << "Actual: " << actual << std::endl;
  76. std::cerr << std::flush;
  77. throw std::runtime_error("Test failed");
  78. }
  79. }
  80. static std::string read_file(const std::string & path) {
  81. std::cerr << "# Reading: " << path << '\n' << std::flush;
  82. std::ifstream fs(path, std::ios_base::binary);
  83. if (!fs.is_open()) {
  84. fs = std::ifstream("../" + path, std::ios_base::binary);
  85. if (!fs.is_open()) {
  86. throw std::runtime_error("Failed to open file: " + path);
  87. }
  88. }
  89. fs.seekg(0, std::ios_base::end);
  90. auto size = fs.tellg();
  91. fs.seekg(0);
  92. std::string out;
  93. out.resize(static_cast<size_t>(size));
  94. fs.read(out.data(), static_cast<std::streamsize>(size));
  95. return out;
  96. }
  97. static common_chat_templates_ptr read_templates(const std::string & path) {
  98. return common_chat_templates_ptr(common_chat_templates_init(/* model= */ nullptr, read_file(path)));
  99. }
  100. static std::unique_ptr<llama_grammar> build_grammar(const std::string & grammar_str) {
  101. return std::unique_ptr<llama_grammar>(
  102. llama_grammar_init_impl(nullptr, grammar_str.c_str(), "root", false, nullptr, 0, nullptr, 0));
  103. }
  104. // TODO: extract to common helper (copied from test-grammar-integration.cpp)
  105. static bool match_string(const std::string & input, llama_grammar * grammar) {
  106. const auto cpts = unicode_cpts_from_utf8(input);
  107. auto & stacks_cur = llama_grammar_get_stacks(grammar);
  108. for (const auto & cpt : cpts) {
  109. llama_grammar_accept(grammar, cpt);
  110. if (stacks_cur.empty()) {
  111. // no stacks means that the grammar failed to match at this point
  112. return false;
  113. }
  114. }
  115. if (std::any_of(stacks_cur.begin(), stacks_cur.end(), [](const auto & stack) { return stack.empty(); })) {
  116. // An empty stack means that the grammar has been completed
  117. return true;
  118. }
  119. return false;
  120. }
  121. static std::string renormalize_json(const std::string & json_str) {
  122. try {
  123. auto json_obj = json::parse(json_str);
  124. return json_obj.dump();
  125. } catch (const std::exception & e) {
  126. std::cerr << "Failed to parse JSON: " << e.what() << '\n';
  127. return json_str;
  128. }
  129. }
  130. static void assert_msg_equals(const common_chat_msg & expected, const common_chat_msg & actual) {
  131. assert_equals(expected.role, actual.role);
  132. assert_equals(expected.content, actual.content);
  133. assert_equals(expected.content_parts.size(), actual.content_parts.size());
  134. for (size_t i = 0; i < expected.content_parts.size(); i++) {
  135. const auto & expected_part = expected.content_parts[i];
  136. const auto & actual_part = actual.content_parts[i];
  137. assert_equals(expected_part.type, actual_part.type);
  138. assert_equals(expected_part.text, actual_part.text);
  139. }
  140. assert_equals(expected.reasoning_content, actual.reasoning_content);
  141. assert_equals(expected.tool_calls.size(), actual.tool_calls.size());
  142. for (size_t i = 0; i < expected.tool_calls.size(); i++) {
  143. const auto & expected_tool_call = expected.tool_calls[i];
  144. const auto & actual_tool_call = actual.tool_calls[i];
  145. assert_equals(expected_tool_call.name, actual_tool_call.name);
  146. assert_equals(renormalize_json(expected_tool_call.arguments), renormalize_json(actual_tool_call.arguments));
  147. assert_equals(expected_tool_call.id, actual_tool_call.id);
  148. }
  149. }
  150. common_chat_tool special_function_tool {
  151. /* .name = */ "special_function",
  152. /* .description = */ "I'm special",
  153. /* .parameters = */ R"({
  154. "type": "object",
  155. "properties": {
  156. "arg1": {
  157. "type": "integer",
  158. "description": "The arg."
  159. }
  160. },
  161. "required": ["arg1"]
  162. })",
  163. };
  164. common_chat_tool python_tool {
  165. /* .name = */ "python",
  166. /* .description = */ "an ipython interpreter",
  167. /* .parameters = */ R"({
  168. "type": "object",
  169. "properties": {
  170. "code": {
  171. "type": "string",
  172. "description": "Python code to execute."
  173. }
  174. },
  175. "required": ["code"]
  176. })",
  177. };
  178. common_chat_tool code_interpreter_tool {
  179. /* .name = */ "code_interpreter",
  180. /* .description = */ "an ipython interpreter",
  181. /* .parameters = */ R"({
  182. "type": "object",
  183. "properties": {
  184. "code": {
  185. "type": "string",
  186. "description": "Python code to execute."
  187. }
  188. },
  189. "required": ["code"]
  190. })",
  191. };
  192. std::vector<common_chat_tool> tools { special_function_tool, python_tool };
  193. std::vector<common_chat_tool> llama_3_1_tools { special_function_tool, code_interpreter_tool };
  194. struct delta_data {
  195. std::string delta;
  196. common_chat_params params;
  197. };
  198. static delta_data init_delta(const struct common_chat_templates * tmpls, const std::vector<std::string> & end_tokens,
  199. const common_chat_msg & user_message,
  200. const common_chat_msg & delta_message,
  201. const std::vector<common_chat_tool> & tools,
  202. const common_chat_tool_choice & tool_choice) {
  203. common_chat_templates_inputs inputs;
  204. inputs.parallel_tool_calls = true;
  205. inputs.messages.push_back(user_message);
  206. inputs.tools = tools;
  207. inputs.tool_choice = tool_choice;
  208. auto params_prefix = common_chat_templates_apply(tmpls, inputs);
  209. inputs.messages.push_back(delta_message);
  210. inputs.add_generation_prompt = false;
  211. auto params_full = common_chat_templates_apply(tmpls, inputs);
  212. std::string prefix = params_prefix.prompt;
  213. std::string full = params_full.prompt;
  214. if (full == prefix) {
  215. throw std::runtime_error("Full message is the same as the prefix");
  216. }
  217. size_t common_prefix_length = 0;
  218. for (size_t i = 0; i < prefix.size() && i < full.size(); ++i) {
  219. if (prefix[i] != full[i]) {
  220. break;
  221. }
  222. if (prefix[i] == '<') {
  223. // DeepSeek R1's template (as of 20250209) adds a trailing <think> if add_generation_prompt,
  224. // but it removes thinking tags for past messages.
  225. // The prefix and full strings diverge at <think> vs. <|tool▁calls▁begin|>, we avoid consuming the leading <.
  226. continue;
  227. }
  228. common_prefix_length = i + 1;
  229. }
  230. auto delta = full.substr(common_prefix_length);
  231. // Strip end tokens
  232. for (const auto & end_token : end_tokens) {
  233. // rfind to find the last occurrence
  234. auto pos = delta.rfind(end_token);
  235. if (pos != std::string::npos) {
  236. delta = delta.substr(0, pos);
  237. break;
  238. }
  239. }
  240. return { delta, params_full };
  241. }
  242. /*
  243. Applies the template to 1 user message w/ add_generation_prompt=true, then w/ the test message w/ add_generation_prompt=false,
  244. gets the diff, removes any end tokens and parses the result w/ the grammar, checking that
  245. the parsed message is the same as the test_message
  246. */
  247. static void test_templates(const struct common_chat_templates * tmpls, const std::vector<std::string> & end_tokens,
  248. const common_chat_msg & test_message,
  249. const std::vector<common_chat_tool> & tools = {},
  250. const std::string & expected_delta = "",
  251. bool expect_grammar_triggered = true,
  252. bool test_grammar_if_triggered = true,
  253. common_reasoning_format reasoning_format = COMMON_REASONING_FORMAT_NONE) {
  254. common_chat_msg user_message;
  255. user_message.role = "user";
  256. user_message.content = "Hello, world!";
  257. for (const auto & tool_choice : std::vector<common_chat_tool_choice> {COMMON_CHAT_TOOL_CHOICE_AUTO, COMMON_CHAT_TOOL_CHOICE_REQUIRED}) {
  258. auto data = init_delta(tmpls, end_tokens, user_message, test_message, tools, tool_choice);
  259. if (!expected_delta.empty()) {
  260. assert_equals(expected_delta, data.delta);
  261. }
  262. if (expect_grammar_triggered) {
  263. common_chat_syntax syntax;
  264. syntax.format = data.params.format;
  265. syntax.reasoning_format = reasoning_format;
  266. const auto msg = common_chat_parse(data.delta, /* is_partial= */ false, syntax);
  267. assert_msg_equals(test_message, msg);
  268. }
  269. if (!test_message.tool_calls.empty()) {
  270. GGML_ASSERT(!data.params.grammar.empty());
  271. }
  272. if (!data.params.grammar.empty()) {
  273. auto grammar = build_grammar(data.params.grammar);
  274. if (!grammar) {
  275. throw std::runtime_error("Failed to build grammar");
  276. }
  277. auto earliest_trigger_pos = std::string::npos;
  278. auto constrained = data.delta;
  279. for (const auto & trigger : data.params.grammar_triggers) {
  280. size_t pos = std::string::npos;
  281. std::smatch match;
  282. switch (trigger.type) {
  283. case COMMON_GRAMMAR_TRIGGER_TYPE_WORD:
  284. {
  285. const auto & word = trigger.value;
  286. pos = constrained.find(word);
  287. break;
  288. }
  289. case COMMON_GRAMMAR_TRIGGER_TYPE_PATTERN:
  290. {
  291. const auto & pattern = trigger.value;
  292. if (std::regex_search(constrained, match, std::regex(pattern))) {
  293. pos = match.position(1);
  294. }
  295. break;
  296. }
  297. case COMMON_GRAMMAR_TRIGGER_TYPE_PATTERN_FULL:
  298. {
  299. const auto & pattern = trigger.value;
  300. if (std::regex_match(constrained, match, std::regex(pattern))) {
  301. auto mpos = std::string::npos;
  302. for (size_t i = 1; i < match.size(); ++i) {
  303. if (match[i].length() > 0) {
  304. mpos = match.position(i);
  305. break;
  306. }
  307. }
  308. if (mpos == std::string::npos) {
  309. mpos = match.position(0);
  310. }
  311. pos = mpos;
  312. }
  313. break;
  314. }
  315. default:
  316. throw std::runtime_error("Unknown trigger type");
  317. }
  318. if (pos == std::string::npos) {
  319. continue;
  320. }
  321. if (earliest_trigger_pos == std::string::npos || pos < earliest_trigger_pos) {
  322. earliest_trigger_pos = pos;
  323. }
  324. }
  325. auto grammar_triggered = false;
  326. if (earliest_trigger_pos != std::string::npos) {
  327. constrained = constrained.substr(earliest_trigger_pos);
  328. grammar_triggered = true;
  329. }
  330. if (data.params.grammar_lazy) {
  331. assert_equals(expect_grammar_triggered, grammar_triggered);
  332. }
  333. if (grammar_triggered && test_grammar_if_triggered && !match_string(constrained, grammar.get())) {
  334. throw std::runtime_error("Failed to match delta against grammar:\n\n" + data.delta +
  335. "\n\nConstrained: " + constrained +
  336. "\n\nGrammar: " + data.params.grammar);
  337. }
  338. }
  339. }
  340. }
  341. const common_chat_msg message_user {
  342. "user",
  343. "Hey there!",
  344. /* .content_parts = */ {},
  345. /* .tool_calls = */ {},
  346. /* .reasoning_content = */ "",
  347. /* .tool_name = */ "",
  348. /* .tool_call_id = */ "",
  349. };
  350. const common_chat_msg message_user_parts {
  351. "user",
  352. /* .content = */ "",
  353. /* .content_parts = */ {
  354. { "text", "Hey" },
  355. { "text", "there" },
  356. },
  357. /* .tool_calls = */ {},
  358. /* .reasoning_content = */ "",
  359. /* .tool_name = */ "",
  360. /* .tool_call_id = */ "",
  361. };
  362. static common_chat_msg simple_assist_msg(const std::string & content, const std::string & reasoning_content = "", const std::string & tool_name = "", const std::string & arguments = "", const std::string & id = "") {
  363. common_chat_msg msg;
  364. msg.role = "assistant";
  365. msg.content = content;
  366. msg.reasoning_content = reasoning_content;
  367. if (!tool_name.empty()) {
  368. msg.tool_calls.push_back({ tool_name, arguments, id });
  369. }
  370. return msg;
  371. }
  372. const common_chat_msg message_assist = simple_assist_msg("Hello, world!\nWhat's up?");
  373. const common_chat_msg message_assist_empty = simple_assist_msg("");
  374. const common_chat_msg message_assist_thoughts_unparsed_deepseek = simple_assist_msg("<think>I'm\nthinking</think>Hello, world!\nWhat's up?");
  375. const common_chat_msg message_assist_thoughts_unparsed_r7b = simple_assist_msg("<|START_THINKING|>I'm\nthinking<|END_THINKING|>Hello, world!\nWhat's up?");
  376. const common_chat_msg message_assist_thoughts = simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking");
  377. const common_chat_msg message_assist_thoughts_unopened_unparsed = simple_assist_msg("I'm\nthinking</think>Hello, world!\nWhat's up?");
  378. const common_chat_msg message_assist_thoughts_no_content = simple_assist_msg("", "I'm\nthinking");
  379. const common_chat_msg message_assist_call = simple_assist_msg("", "", "special_function", "{\"arg1\": 1}");
  380. const common_chat_msg message_assist_call_content = simple_assist_msg("Hello, world!\nWhat's up?", "", "special_function", "{\"arg1\":1}");
  381. const common_chat_msg message_assist_call_empty_args = simple_assist_msg("", "", "special_function");
  382. const common_chat_msg message_assist_call_cutoff_args = simple_assist_msg("", "", "special_function", "{\"arg");
  383. const common_chat_msg message_assist_call_thoughts = simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1\":1}");
  384. const common_chat_msg message_assist_call_thoughts_unparsed = simple_assist_msg("<think>I'm\nthinking</think>\n\n", "", "special_function", "{\"arg1\": 1}");
  385. const common_chat_msg message_assist_call_id = simple_assist_msg("", "", "special_function", "{\"arg1\":1}", /* .id = */ "123456789");
  386. const common_chat_msg message_assist_call_idx = simple_assist_msg("", "", "special_function", "{\"arg1\":1}", /* .id = */ "0");
  387. const common_chat_msg message_assist_thoughts_call_idx = simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1\": 1}", /* id = */ "0");
  388. const common_chat_msg message_assist_call_python = simple_assist_msg("", "", "python", "{\"code\":\"print('hey')\"}");
  389. const common_chat_msg message_assist_call_python_lines = simple_assist_msg("", "", "python", "{\"code\":\"# This is a program:\\nprint('hey')\"}");
  390. const common_chat_msg message_assist_call_python_lines_unclosed = simple_assist_msg("", "", "python", "{\"code\":\"# This is a program:\\nprint('hey')");
  391. const common_chat_msg message_assist_call_code_interpreter = simple_assist_msg("", "", "code_interpreter", "{\"code\":\"print('hey')\"}");
  392. static void test_msgs_oaicompat_json_conversion() {
  393. printf("[%s]\n", __func__);
  394. std::vector<common_chat_msg> msgs{
  395. message_user,
  396. message_user_parts,
  397. message_assist_call,
  398. message_assist_call_thoughts,
  399. message_assist_call_thoughts_unparsed,
  400. message_assist_call_id,
  401. message_assist_call_idx,
  402. message_assist_call_python,
  403. message_assist_call_code_interpreter,
  404. };
  405. for (const auto & msg : msgs) {
  406. auto oai_json = common_chat_msgs_to_json_oaicompat<json>({msg});
  407. auto msgs2 = common_chat_msgs_parse_oaicompat(oai_json);
  408. assert_equals((size_t) 1, msgs2.size());
  409. auto msg2 = msgs2[0];
  410. assert_msg_equals(msg, msg2);
  411. }
  412. assert_equals(
  413. std::string(
  414. "[\n"
  415. " {\n"
  416. " \"role\": \"user\",\n"
  417. " \"content\": [\n"
  418. " {\n"
  419. " \"type\": \"text\",\n"
  420. " \"text\": \"Hey\"\n"
  421. " },\n"
  422. " {\n"
  423. " \"type\": \"text\",\n"
  424. " \"text\": \"there\"\n"
  425. " }\n"
  426. " ]\n"
  427. " }\n"
  428. "]"
  429. ),
  430. common_chat_msgs_to_json_oaicompat<json>({message_user_parts}).dump(2));
  431. assert_equals(
  432. std::string(
  433. "[\n"
  434. " {\n"
  435. " \"role\": \"assistant\",\n"
  436. " \"content\": null,\n"
  437. " \"tool_calls\": [\n"
  438. " {\n"
  439. " \"type\": \"function\",\n"
  440. " \"function\": {\n"
  441. " \"name\": \"python\",\n"
  442. " \"arguments\": \"{\\\"code\\\":\\\"print('hey')\\\"}\"\n"
  443. " }\n"
  444. " }\n"
  445. " ]\n"
  446. " }\n"
  447. "]"
  448. ),
  449. common_chat_msgs_to_json_oaicompat<json>({message_assist_call_python}).dump(2));
  450. auto res = common_chat_msgs_parse_oaicompat(json::parse("[{\"role\": \"assistant\", \"tool_calls\": []}]"));
  451. assert_equals<size_t>(1, res.size());
  452. assert_equals<std::string>(res[0].role, "assistant");
  453. assert_equals(true, res[0].content.empty());
  454. assert_equals(true, res[0].tool_calls.empty());
  455. try {
  456. common_chat_msgs_parse_oaicompat(json::parse("[{\"role\": \"assistant\"}]"));
  457. throw std::runtime_error("Expected exception");
  458. } catch (const std::exception & e) {
  459. if (std::string(e.what()).find("'content'") == std::string::npos) {
  460. throw std::runtime_error("Expected exception about missing 'content'");
  461. }
  462. }
  463. }
  464. static void test_tools_oaicompat_json_conversion() {
  465. printf("[%s]\n", __func__);
  466. std::vector<common_chat_tool> tools{
  467. special_function_tool,
  468. python_tool,
  469. code_interpreter_tool,
  470. };
  471. for (const auto & tool : tools) {
  472. auto oai_json = common_chat_tools_to_json_oaicompat<json>({tool});
  473. auto tools2 = common_chat_tools_parse_oaicompat(oai_json);
  474. assert_equals((size_t) 1, tools2.size());
  475. auto tool2 = tools2[0];
  476. assert_equals(tool.name, tool2.name);
  477. assert_equals(tool.description, tool2.description);
  478. assert_equals(json::parse(tool.parameters).dump(2), json::parse(tool2.parameters).dump(2));
  479. }
  480. assert_equals(
  481. std::string(
  482. "[\n"
  483. " {\n"
  484. " \"type\": \"function\",\n"
  485. " \"function\": {\n"
  486. " \"name\": \"special_function\",\n"
  487. " \"description\": \"I'm special\",\n"
  488. " \"parameters\": {\n"
  489. " \"type\": \"object\",\n"
  490. " \"properties\": {\n"
  491. " \"arg1\": {\n"
  492. " \"type\": \"integer\",\n"
  493. " \"description\": \"The arg.\"\n"
  494. " }\n"
  495. " },\n"
  496. " \"required\": [\n"
  497. " \"arg1\"\n"
  498. " ]\n"
  499. " }\n"
  500. " }\n"
  501. " }\n"
  502. "]"
  503. ),
  504. common_chat_tools_to_json_oaicompat<json>({special_function_tool}).dump(2));
  505. }
  506. static void test_template_output_parsers() {
  507. printf("[%s]\n", __func__);
  508. common_chat_templates_inputs inputs_no_tools;
  509. inputs_no_tools.messages = {message_user};
  510. common_chat_templates_inputs inputs_tools;
  511. inputs_tools.messages = {message_user};
  512. inputs_tools.tools = {special_function_tool};
  513. common_chat_templates_inputs inputs_tools_builtin;
  514. inputs_tools_builtin.messages = {message_user};
  515. inputs_tools_builtin.tools = {python_tool};
  516. {
  517. // Not supported yet
  518. auto tmpls = read_templates("models/templates/CohereForAI-c4ai-command-r-plus-tool_use.jinja");
  519. assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
  520. assert_equals(COMMON_CHAT_FORMAT_GENERIC, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
  521. }
  522. {
  523. auto tmpls = read_templates("models/templates/CohereForAI-c4ai-command-r7b-12-2024-tool_use.jinja");
  524. std::vector<std::string> end_tokens{ "<|END_OF_TURN_TOKEN|>" };
  525. for (const auto & inputs : { inputs_no_tools, inputs_tools }) {
  526. auto params = common_chat_templates_apply(tmpls.get(), inputs);
  527. assert_equals(COMMON_CHAT_FORMAT_COMMAND_R7B, params.format);
  528. assert_equals(false, params.thinking_forced_open);
  529. }
  530. assert_msg_equals(message_assist,
  531. common_chat_parse(
  532. "Hello, world!\nWhat's up?",
  533. /* is_partial= */ false,
  534. {COMMON_CHAT_FORMAT_COMMAND_R7B}));
  535. assert_msg_equals(message_assist,
  536. common_chat_parse(
  537. "<|START_RESPONSE|>Hello, world!\nWhat's up?<|END_RESPONSE|>",
  538. /* is_partial= */ false,
  539. {COMMON_CHAT_FORMAT_COMMAND_R7B}));
  540. assert_msg_equals(message_assist_thoughts,
  541. common_chat_parse(
  542. "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
  543. "<|START_RESPONSE|>Hello, world!\nWhat's up?<|END_RESPONSE|>",
  544. /* is_partial= */ false,
  545. {
  546. /* .format = */ COMMON_CHAT_FORMAT_COMMAND_R7B,
  547. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  548. /* .reasoning_in_content = */ false,
  549. /* .thinking_forced_open = */ false,
  550. }));
  551. assert_msg_equals(message_assist_thoughts_unparsed_deepseek,
  552. common_chat_parse(
  553. "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
  554. "<|START_RESPONSE|>Hello, world!\nWhat's up?<|END_RESPONSE|>",
  555. /* is_partial= */ false,
  556. {
  557. /* .format = */ COMMON_CHAT_FORMAT_COMMAND_R7B,
  558. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  559. /* .reasoning_in_content = */ true,
  560. /* .thinking_forced_open = */ false,
  561. }));
  562. assert_msg_equals(message_assist_thoughts_unparsed_r7b,
  563. common_chat_parse(
  564. "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
  565. "<|START_RESPONSE|>Hello, world!\nWhat's up?<|END_RESPONSE|>",
  566. /* is_partial= */ false,
  567. {COMMON_CHAT_FORMAT_COMMAND_R7B}));
  568. assert_msg_equals(message_assist_thoughts,
  569. common_chat_parse(
  570. "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
  571. "<|START_RESPONSE|>Hello, world!\nWhat's up?<|END_RESPONSE|>",
  572. /* is_partial= */ false,
  573. {
  574. /* .format = */ COMMON_CHAT_FORMAT_COMMAND_R7B,
  575. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  576. /* .reasoning_in_content = */ false,
  577. /* .thinking_forced_open = */ false,
  578. }));
  579. assert_msg_equals(message_assist_thoughts_call_idx,
  580. common_chat_parse(
  581. "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
  582. "<|START_ACTION|>[\n"
  583. " {\"tool_call_id\": \"0\", \"tool_name\": \"special_function\", \"parameters\": {\"arg1\": 1}}\n"
  584. "]<|END_ACTION|>",
  585. /* is_partial= */ false,
  586. {
  587. /* .format = */ COMMON_CHAT_FORMAT_COMMAND_R7B,
  588. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  589. /* .reasoning_in_content = */ false,
  590. /* .thinking_forced_open = */ false,
  591. }));
  592. assert_msg_equals(message_assist_thoughts_no_content,
  593. common_chat_parse(
  594. "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
  595. "<|START_ACTION|>[\n"
  596. " {\"tool_call_id\": \"0\", \"tool_name\": \"special",
  597. /* is_partial= */ true,
  598. {
  599. /* .format = */ COMMON_CHAT_FORMAT_COMMAND_R7B,
  600. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  601. /* .reasoning_in_content = */ false,
  602. /* .thinking_forced_open = */ false,
  603. }));
  604. test_templates(tmpls.get(), end_tokens, message_assist_call_idx, tools,
  605. "<|START_THINKING|><|END_THINKING|>"
  606. "<|START_ACTION|>[\n"
  607. " {\"tool_call_id\": \"0\", \"tool_name\": \"special_function\", \"parameters\": {\"arg1\": 1}}\n"
  608. "]<|END_ACTION|>",
  609. /* expect_grammar_triggered= */ true,
  610. /* test_grammar_if_triggered= */ true,
  611. COMMON_REASONING_FORMAT_DEEPSEEK);
  612. test_templates(tmpls.get(), end_tokens, message_assist, tools,
  613. "<|START_RESPONSE|>Hello, world!\n"
  614. "What's up?<|END_RESPONSE|>",
  615. /* expect_grammar_triggered= */ false);
  616. }
  617. {
  618. auto tmpls = read_templates("models/templates/google-gemma-2-2b-it.jinja");
  619. std::vector<std::string> end_tokens{ "<end_of_turn>" };
  620. assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
  621. assert_equals(COMMON_CHAT_FORMAT_GENERIC, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
  622. assert_equals(COMMON_CHAT_FORMAT_GENERIC,
  623. common_chat_templates_apply(
  624. read_templates("models/templates/microsoft-Phi-3.5-mini-instruct.jinja").get(),
  625. inputs_tools)
  626. .format);
  627. // Generic tool calls doesn't generate / parse content-only messages symmetrically.
  628. assert_equals(
  629. message_assist_empty,
  630. common_chat_parse(
  631. "{ \"tool_call\" : { \"name\" : \"t",
  632. /* is_partial= */ true,
  633. {COMMON_CHAT_FORMAT_GENERIC}));
  634. assert_equals(
  635. simple_assist_msg("", "", "puppeteer_screenshot", "{\"name\":\"servethehome_homepage\","),
  636. common_chat_parse(
  637. R"({"tool_call": {"name": "puppeteer_screenshot", "arguments": {"name": "servethehome_homepage",)",
  638. /* is_partial= */ true,
  639. {COMMON_CHAT_FORMAT_GENERIC}));
  640. assert_equals(
  641. message_assist_call_empty_args,
  642. common_chat_parse(
  643. "{ \"tool_call\" : { \"name\" : \"special_function\"",
  644. /* is_partial= */ true,
  645. {COMMON_CHAT_FORMAT_GENERIC}));
  646. assert_equals(
  647. message_assist_call_cutoff_args,
  648. common_chat_parse(
  649. "{ \"tool_call\" : { \"name\" : \"special_function\", \"arguments\" : { \"arg",
  650. /* is_partial= */ true,
  651. {COMMON_CHAT_FORMAT_GENERIC}));
  652. assert_msg_equals(message_assist,
  653. common_chat_parse(
  654. "{\n"
  655. " \"response\": \"Hello, world!\\nWhat's up?\"\n"
  656. "}",
  657. /* is_partial= */ false,
  658. {COMMON_CHAT_FORMAT_GENERIC}));
  659. test_templates(tmpls.get(), end_tokens, message_assist_call_id, tools,
  660. "{\n"
  661. " \"tool_calls\": [\n"
  662. " {\n"
  663. " \"name\": \"special_function\",\n"
  664. " \"arguments\": {\n"
  665. " \"arg1\": 1\n"
  666. " },\n"
  667. " \"id\": \"123456789\"\n"
  668. " }\n"
  669. " ]\n"
  670. "}");
  671. }
  672. {
  673. auto tmpls = read_templates("models/templates/mistralai-Mistral-Nemo-Instruct-2407.jinja");
  674. std::vector<std::string> end_tokens{ "</s>" };
  675. assert_equals(COMMON_CHAT_FORMAT_MISTRAL_NEMO, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
  676. test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
  677. test_templates(
  678. tmpls.get(), end_tokens, message_assist_call_id, tools,
  679. "[TOOL_CALLS][{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}, \"id\": \"123456789\"}]");
  680. }
  681. {
  682. auto tmpls = read_templates("models/templates/Qwen-QwQ-32B.jinja");
  683. std::vector<std::string> end_tokens{ "<|im_end|>" };
  684. assert_equals(COMMON_CHAT_FORMAT_HERMES_2_PRO, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
  685. assert_equals(COMMON_CHAT_FORMAT_HERMES_2_PRO, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
  686. }
  687. {
  688. auto tmpls = read_templates("models/templates/NousResearch-Hermes-2-Pro-Llama-3-8B-tool_use.jinja");
  689. std::vector<std::string> end_tokens{ "<|im_end|>" };
  690. assert_equals(COMMON_CHAT_FORMAT_HERMES_2_PRO, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
  691. assert_equals(COMMON_CHAT_FORMAT_HERMES_2_PRO, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
  692. assert_equals(
  693. COMMON_CHAT_FORMAT_HERMES_2_PRO,
  694. common_chat_templates_apply(
  695. read_templates("models/templates/NousResearch-Hermes-3-Llama-3.1-8B-tool_use.jinja").get(),
  696. inputs_tools)
  697. .format);
  698. assert_equals(
  699. COMMON_CHAT_FORMAT_HERMES_2_PRO,
  700. common_chat_templates_apply(
  701. read_templates("models/templates/Qwen-Qwen2.5-7B-Instruct.jinja").get(),
  702. inputs_tools)
  703. .format);
  704. // Test parsing
  705. assert_msg_equals(
  706. simple_assist_msg("", "", "python", ""),
  707. common_chat_parse(
  708. "```json\n"
  709. "<function_call> { \"name\" : \"python\"",
  710. /* is_partial= */ true,
  711. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  712. assert_msg_equals(
  713. simple_assist_msg("Let's call something\n"),
  714. common_chat_parse(
  715. "Let's call something\n"
  716. "<tool_call>{\"name\"",
  717. /* is_partial= */ true,
  718. {
  719. /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
  720. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  721. /* .reasoning_in_content = */ false,
  722. /* .thinking_forced_open = */ false,
  723. }));
  724. assert_msg_equals(
  725. simple_assist_msg(""),
  726. common_chat_parse(
  727. "Let's call something\n"
  728. "<tool_call>{\"name",
  729. /* is_partial= */ true,
  730. {
  731. /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
  732. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  733. /* .reasoning_in_content = */ false,
  734. /* .thinking_forced_open = */ false,
  735. }));
  736. assert_msg_equals(message_assist_call_thoughts,
  737. common_chat_parse(
  738. // QwQ-32B's template adds a trailing <think> if add_generation_prompt
  739. "I'm\nthinking</think>\n"
  740. "<tool_call>{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}</tool_call>",
  741. /* is_partial= */ false,
  742. {
  743. /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
  744. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  745. /* .reasoning_in_content = */ false,
  746. /* .thinking_forced_open = */ true,
  747. }));
  748. assert_msg_equals(
  749. message_assist_call,
  750. common_chat_parse(
  751. "<tool_call>\n"
  752. "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  753. "</tool_call>",
  754. /* is_partial= */ false,
  755. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  756. assert_msg_equals(message_assist_call_content,
  757. common_chat_parse(
  758. "Hello, world!\nWhat's up?<tool_call>\n"
  759. "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  760. "</tool_call>",
  761. /* is_partial= */ false,
  762. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  763. assert_msg_equals(
  764. message_assist_call,
  765. common_chat_parse(
  766. "<function=special_function>{\"arg1\": 1}</function>",
  767. /* is_partial= */ false,
  768. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  769. assert_msg_equals(
  770. message_assist_call,
  771. common_chat_parse(
  772. "<function name=\"special_function\">\n"
  773. "{\"arg1\": 1}\n"
  774. "</function>",
  775. /* is_partial= */ false,
  776. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  777. assert_msg_equals(
  778. message_assist_call,
  779. common_chat_parse(
  780. "<tool>\n"
  781. " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  782. "</tool>",
  783. /* is_partial= */ false,
  784. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  785. assert_msg_equals(
  786. message_assist_call,
  787. common_chat_parse(
  788. "<tools>\n"
  789. " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  790. "</tools>",
  791. /* is_partial= */ false,
  792. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  793. assert_msg_equals(
  794. message_assist_call,
  795. common_chat_parse(
  796. "<response>\n"
  797. " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  798. "</response>",
  799. /* is_partial= */ false,
  800. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  801. assert_msg_equals(
  802. message_assist_call,
  803. common_chat_parse(
  804. "```xml\n"
  805. "<response>\n"
  806. " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  807. "</response>\n"
  808. "```",
  809. /* is_partial= */ false,
  810. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  811. assert_msg_equals(
  812. message_assist_call,
  813. common_chat_parse(
  814. "```xml\n"
  815. " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  816. "```",
  817. /* is_partial= */ false,
  818. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  819. assert_msg_equals(
  820. message_assist_call,
  821. common_chat_parse(
  822. "```\n"
  823. " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  824. "```",
  825. /* is_partial= */ false,
  826. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  827. assert_msg_equals(
  828. message_assist_call,
  829. common_chat_parse(
  830. "```\n"
  831. "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  832. "```",
  833. /* is_partial= */ false,
  834. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  835. assert_msg_equals(
  836. message_assist_call,
  837. common_chat_parse(
  838. "```json\n"
  839. " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  840. "```",
  841. /* is_partial= */ false,
  842. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  843. assert_msg_equals(
  844. message_assist_call,
  845. common_chat_parse(
  846. "```json\n"
  847. "\n"
  848. " <function_call> {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}} \n"
  849. " </function_call> \n"
  850. "``` ",
  851. /* is_partial= */ false,
  852. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  853. assert_msg_equals(
  854. message_assist_call,
  855. common_chat_parse(
  856. "<json>\n"
  857. " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  858. "</json>",
  859. /* is_partial= */ false,
  860. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  861. assert_msg_equals(
  862. message_assist_call,
  863. common_chat_parse(
  864. "<xml>\n"
  865. " {\n"
  866. " \"name\": \"special_function\", \"arguments\": {\"arg1\": 1}\n"
  867. " }\n"
  868. "</xml>",
  869. /* is_partial= */ false,
  870. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  871. assert_msg_equals(
  872. message_assist_call,
  873. common_chat_parse(
  874. "<JSON>\n"
  875. " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  876. "</JSON>",
  877. /* is_partial= */ false,
  878. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  879. assert_msg_equals(
  880. message_assist_call,
  881. common_chat_parse(
  882. "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}",
  883. /* is_partial= */ false,
  884. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  885. assert_msg_equals(
  886. message_assist_call,
  887. common_chat_parse(
  888. "{\n \"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}",
  889. /* is_partial= */ false,
  890. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  891. assert_msg_equals(
  892. simple_assist_msg(
  893. "This is not a tool call:",
  894. "",
  895. "special_function",
  896. "{\"arg1\": 1}"),
  897. common_chat_parse(
  898. "This is not a tool call:\n"
  899. "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}",
  900. /* is_partial= */ false,
  901. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  902. assert_msg_equals(message_assist,
  903. common_chat_parse(
  904. "Hello, world!\nWhat's up?",
  905. /* is_partial= */ false,
  906. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  907. assert_msg_equals(message_assist_thoughts_unparsed_deepseek,
  908. common_chat_parse(
  909. "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
  910. /* is_partial= */ false,
  911. {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
  912. // assert_msg_equals(message_assist_thoughts_unparsed_deepseek,
  913. // common_chat_parse(
  914. // "I'm\nthinking</think>Hello, world!\nWhat's up?",
  915. // COMMON_CHAT_FORMAT_HERMES_2_PRO));
  916. assert_msg_equals(message_assist_thoughts,
  917. common_chat_parse(
  918. "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
  919. /* is_partial= */ false,
  920. {
  921. /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
  922. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  923. /* .reasoning_in_content = */ false,
  924. /* .thinking_forced_open = */ false,
  925. }));
  926. assert_msg_equals(message_assist_thoughts_unopened_unparsed,
  927. common_chat_parse(
  928. "I'm\nthinking</think>Hello, world!\nWhat's up?",
  929. /* is_partial= */ false,
  930. {
  931. /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
  932. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  933. /* .reasoning_in_content = */ false,
  934. /* .thinking_forced_open = */ false,
  935. }));
  936. assert_msg_equals(message_assist_thoughts,
  937. common_chat_parse(
  938. "I'm\nthinking</think>Hello, world!\nWhat's up?",
  939. /* is_partial= */ false,
  940. {
  941. /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
  942. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  943. /* .reasoning_in_content = */ false,
  944. /* .thinking_forced_open = */ true,
  945. }));
  946. test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
  947. test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
  948. "<tool_call>\n"
  949. "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
  950. "</tool_call>");
  951. test_templates(tmpls.get(), end_tokens, message_assist_call_python_lines, tools,
  952. "<tool_call>\n"
  953. "{\"name\": \"python\", \"arguments\": {\"code\":\"# This is a program:\\nprint('hey')\"}}\n"
  954. "</tool_call>");
  955. }
  956. {
  957. auto tmpls = read_templates("models/templates/meta-llama-Llama-3.1-8B-Instruct.jinja");
  958. std::vector<std::string> end_tokens{ "<|eom_id|>", "<|eot_id|>" };
  959. assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
  960. assert_equals(COMMON_CHAT_FORMAT_LLAMA_3_X, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
  961. assert_equals(COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS,
  962. common_chat_templates_apply(tmpls.get(), inputs_tools_builtin).format);
  963. assert_equals(COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS,
  964. common_chat_templates_apply(
  965. read_templates("models/templates/meta-llama-Llama-3.3-70B-Instruct.jinja").get(),
  966. inputs_tools_builtin)
  967. .format);
  968. assert_equals(
  969. message_assist_call,
  970. common_chat_parse(
  971. "{\"name\": \"special_function\", \"parameters\": {\"arg1\": 1}}",
  972. /* is_partial= */ false,
  973. {COMMON_CHAT_FORMAT_LLAMA_3_X}));
  974. // test_templates(tmpls.get(), end_tokens, message_assist, tools, R"(?)", /* expect_grammar_triggered= */ false);
  975. test_templates(tmpls.get(), end_tokens, message_assist_call_code_interpreter, llama_3_1_tools,
  976. "<|python_tag|>code_interpreter.call(code=\"print('hey')\")");
  977. test_templates(tmpls.get(), end_tokens, message_assist_call_python, tools,
  978. "<|python_tag|>python.call(code=\"print('hey')\")");
  979. test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
  980. "{\"name\": \"special_function\", \"parameters\": {\"arg1\": 1}}");
  981. }
  982. {
  983. auto tmpls = read_templates("models/templates/meta-llama-Llama-3.2-3B-Instruct.jinja");
  984. std::vector<std::string> end_tokens{ "<|eom_id|>", "<|eot_id|>" };
  985. assert_equals(COMMON_CHAT_FORMAT_LLAMA_3_X, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
  986. assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
  987. test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
  988. test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
  989. "{\"name\": \"special_function\", \"parameters\": {\"arg1\": 1}}");
  990. }
  991. {
  992. auto tmpls = read_templates("models/templates/meetkai-functionary-medium-v3.1.jinja");
  993. std::vector<std::string> end_tokens{ "<|eom_id|>", "<|eot_id|>" };
  994. assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY,
  995. common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
  996. assert_equals(COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1,
  997. common_chat_templates_apply(tmpls.get(), inputs_tools).format);
  998. assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY,
  999. common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
  1000. for (auto is_partial : { false, true }) {
  1001. assert_equals(
  1002. message_assist_call,
  1003. common_chat_parse(
  1004. "<function=special_function>{\"arg1\": 1}</function>",
  1005. is_partial,
  1006. {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1}));
  1007. }
  1008. test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
  1009. test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
  1010. "<function=special_function>{\"arg1\": 1}</function>");
  1011. }
  1012. {
  1013. auto tmpls = read_templates("models/templates/meetkai-functionary-medium-v3.2.jinja");
  1014. std::vector<std::string> end_tokens{ "<|eom_id|>", "<|eot_id|>" };
  1015. assert_equals(COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
  1016. assert_equals(COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
  1017. assert_msg_equals(
  1018. simple_assist_msg(
  1019. "Hello, world!\nnono\nWhat's up?",
  1020. "",
  1021. "special_function",
  1022. "{\"arg1\": 1}"),
  1023. common_chat_parse(
  1024. "all\n"
  1025. "Hello, world!\n"
  1026. "nono\n"
  1027. "What's up?>>>special_function\n"
  1028. "{\"arg1\": 1}\n",
  1029. /* is_partial= */ false,
  1030. {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2}));
  1031. assert_msg_equals(message_assist_call_python_lines,
  1032. common_chat_parse(
  1033. "python\n"
  1034. "# This is a program:\n"
  1035. "print('hey')",
  1036. /* is_partial= */ false,
  1037. {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2}));
  1038. assert_msg_equals(message_assist_call_python_lines_unclosed,
  1039. common_chat_parse(
  1040. "python\n"
  1041. "# This is a program:\n"
  1042. "print('hey')",
  1043. /* is_partial= */ true,
  1044. {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2}));
  1045. assert_msg_equals(message_assist_call,
  1046. common_chat_parse(
  1047. "special_function\n"
  1048. "{\"arg1\": 1} \n ",
  1049. /* is_partial= */ false,
  1050. {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2}));
  1051. assert_msg_equals(message_assist,
  1052. common_chat_parse(
  1053. "all\n"
  1054. "Hello, world!\nWhat's up?",
  1055. /* is_partial= */ false,
  1056. {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2}));
  1057. test_templates(tmpls.get(), end_tokens, message_assist, {},
  1058. "all\n"
  1059. "Hello, world!\n"
  1060. "What's up?",
  1061. /* expect_grammar_triggered= */ false);
  1062. test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
  1063. "special_function\n"
  1064. "{\"arg1\": 1}");
  1065. }
  1066. {
  1067. auto tmpls = read_templates("models/templates/fireworks-ai-llama-3-firefunction-v2.jinja");
  1068. std::vector<std::string> end_tokens{ "<|eot_id|>" };
  1069. assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
  1070. assert_equals(COMMON_CHAT_FORMAT_FIREFUNCTION_V2, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
  1071. test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
  1072. test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
  1073. " functools[{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}]");
  1074. }
  1075. {
  1076. // Original DeepSeek R1 template. Leaves <|tool▁calls▁begin|> and others unclosed. Our logic fixes the prompt.
  1077. auto tmpls = read_templates("models/templates/deepseek-ai-DeepSeek-R1-Distill-Llama-8B.jinja");
  1078. std::vector<std::string> end_tokens{ "<|end▁of▁sentence|>" };
  1079. for (const auto & inputs : { inputs_no_tools, inputs_tools }) {
  1080. auto params = common_chat_templates_apply(tmpls.get(), inputs);
  1081. assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1, params.format);
  1082. assert_equals(true, params.thinking_forced_open);
  1083. }
  1084. test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
  1085. test_templates(tmpls.get(), end_tokens, message_assist_thoughts, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
  1086. assert_msg_equals(
  1087. simple_assist_msg("Hello, world!\nWhat's up?", "<think>I'm\nthinking"),
  1088. common_chat_parse(
  1089. "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
  1090. /* is_partial= */ false,
  1091. {
  1092. COMMON_CHAT_FORMAT_DEEPSEEK_R1,
  1093. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  1094. /* .reasoning_in_content = */ false,
  1095. /* .thinking_forced_open = */ true,
  1096. }));
  1097. assert_msg_equals(
  1098. simple_assist_msg("", "I need to remember the correct syntax. It starts with <|tool▁calls▁begin|> and ends with"),
  1099. common_chat_parse(
  1100. "I need to remember the correct syntax. It starts with <|tool▁calls▁begin|> and ends with",
  1101. /* is_partial= */ true,
  1102. {
  1103. COMMON_CHAT_FORMAT_DEEPSEEK_R1,
  1104. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  1105. /* .reasoning_in_content = */ false,
  1106. /* .thinking_forced_open = */ true,
  1107. }));
  1108. assert_msg_equals(message_assist_thoughts,
  1109. common_chat_parse(
  1110. "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
  1111. /* is_partial= */ false,
  1112. {
  1113. /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
  1114. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  1115. /* .reasoning_in_content = */ false,
  1116. /* .thinking_forced_open = */ false,
  1117. }));
  1118. assert_msg_equals(message_assist_thoughts_unopened_unparsed,
  1119. common_chat_parse(
  1120. "I'm\nthinking</think>Hello, world!\nWhat's up?",
  1121. /* is_partial= */ false,
  1122. {
  1123. /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
  1124. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  1125. /* .reasoning_in_content = */ false,
  1126. /* .thinking_forced_open = */ false,
  1127. }));
  1128. assert_msg_equals(message_assist_thoughts,
  1129. common_chat_parse(
  1130. "I'm\nthinking</think>Hello, world!\nWhat's up?",
  1131. /* is_partial= */ false,
  1132. {
  1133. /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
  1134. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  1135. /* .reasoning_in_content = */ false,
  1136. /* .thinking_forced_open = */ true,
  1137. }));
  1138. assert_msg_equals(message_assist_thoughts,
  1139. // Latest template update (ast of 20250209) adds a trailing <think>\n if add_generation_prompt is true.
  1140. common_chat_parse(
  1141. "I'm\nthinking</think>Hello, world!\nWhat's up?",
  1142. /* is_partial= */ false,
  1143. {
  1144. /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
  1145. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  1146. /* .reasoning_in_content = */ false,
  1147. /* .thinking_forced_open = */ true,
  1148. }));
  1149. // test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
  1150. // "<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>special_function\n"
  1151. // "```json\n"
  1152. // "{\"arg1\": 1}\n"
  1153. // // Look what's not here: <|tool▁calls▁end|> (also missing the <|end▁of▁sentence|>, but that is removed lazily by the test's delta logic)
  1154. // "```<|tool▁call▁end|>",
  1155. // /* expect_grammar_triggered= */ true,
  1156. // /* test_grammar_if_triggered= */ false);
  1157. }
  1158. {
  1159. // Replacement DeepSeek R1 template. Makes the Distill Qwen 7B/32B models happy to call tools and all.
  1160. auto tmpls = read_templates("models/templates/llama-cpp-deepseek-r1.jinja");
  1161. std::vector<std::string> end_tokens{ "<|end▁of▁sentence|>" };
  1162. assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
  1163. assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
  1164. test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
  1165. test_templates(tmpls.get(), end_tokens, message_assist_thoughts, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
  1166. assert_msg_equals(message_assist_thoughts_unparsed_deepseek,
  1167. common_chat_parse(
  1168. "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
  1169. /* is_partial= */ false,
  1170. {COMMON_CHAT_FORMAT_DEEPSEEK_R1}));
  1171. assert_msg_equals(message_assist_thoughts,
  1172. common_chat_parse(
  1173. "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
  1174. /* is_partial= */ false,
  1175. {
  1176. /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
  1177. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  1178. /* .reasoning_in_content = */ false,
  1179. /* .thinking_forced_open = */ false,
  1180. }));
  1181. assert_msg_equals(message_assist_thoughts,
  1182. common_chat_parse(
  1183. "I'm\nthinking</think>Hello, world!\nWhat's up?",
  1184. /* is_partial= */ false,
  1185. {
  1186. /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
  1187. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  1188. /* .reasoning_in_content = */ false,
  1189. /* .thinking_forced_open = */ true,
  1190. }));
  1191. assert_msg_equals(message_assist_call_thoughts_unparsed,
  1192. common_chat_parse(
  1193. "<think>I'm\nthinking</think>\n\n"
  1194. "<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>special_function\n"
  1195. "```json\n"
  1196. "{\"arg1\": 1}\n"
  1197. "```<|tool▁call▁end|><|tool▁calls▁end|>",
  1198. /* is_partial= */ false,
  1199. {COMMON_CHAT_FORMAT_DEEPSEEK_R1}));
  1200. assert_msg_equals(message_assist_call,
  1201. common_chat_parse(
  1202. "<|tool▁calls|>function<|tool▁sep|>special_function\n"
  1203. "```json\n"
  1204. "{\"arg1\": 1}\n"
  1205. "```<|tool▁call▁end|><|tool▁calls▁end|>",
  1206. /* is_partial= */ false,
  1207. {COMMON_CHAT_FORMAT_DEEPSEEK_R1}));
  1208. assert_msg_equals(message_assist_call_thoughts,
  1209. common_chat_parse(
  1210. "<think>I'm\nthinking</think>\n\n"
  1211. "<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>special_function\n"
  1212. "```json\n"
  1213. "{\"arg1\": 1}\n"
  1214. "```<|tool▁call▁end|><|tool▁calls▁end|>",
  1215. /* is_partial= */ false,
  1216. {
  1217. /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
  1218. /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
  1219. /* .reasoning_in_content = */ false,
  1220. /* .thinking_forced_open = */ false,
  1221. }));
  1222. test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
  1223. "<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>special_function\n"
  1224. "```json\n"
  1225. "{\"arg1\": 1}\n"
  1226. "```<|tool▁call▁end|><|tool▁calls▁end|>");
  1227. }
  1228. }
  1229. static void test_msg_diffs_compute() {
  1230. printf("[%s]\n", __func__);
  1231. {
  1232. common_chat_msg msg1;
  1233. common_chat_msg msg2;
  1234. msg2.content = "Hello, world!";
  1235. common_chat_msg_diff diff;
  1236. diff.content_delta = "Hello, world!";
  1237. assert_equals(
  1238. {diff},
  1239. common_chat_msg_diff::compute_diffs(msg1, msg2));
  1240. }
  1241. {
  1242. common_chat_msg msg1;
  1243. msg1.content = "Hello,";
  1244. common_chat_msg msg2;
  1245. msg2.content = "Hello, world!";
  1246. common_chat_msg_diff diff;
  1247. diff.content_delta = " world!";
  1248. assert_equals(
  1249. {diff},
  1250. common_chat_msg_diff::compute_diffs(msg1, msg2));
  1251. }
  1252. {
  1253. common_chat_msg msg0;
  1254. common_chat_msg msg1;
  1255. msg1.tool_calls = { { "special_function", "{\"ar", /* .id = */ "123" } };
  1256. common_chat_msg msg2;
  1257. msg2.tool_calls = { { "special_function", "{\"arg1\": 1}", /* .id = */ "123" } };
  1258. common_chat_msg_diff diff01;
  1259. diff01.tool_call_index = 0;
  1260. diff01.tool_call_delta.name = "special_function";
  1261. diff01.tool_call_delta.id = "123";
  1262. diff01.tool_call_delta.arguments = "{\"ar";
  1263. assert_equals(
  1264. {diff01},
  1265. common_chat_msg_diff::compute_diffs(msg0, msg1));
  1266. common_chat_msg_diff diff12;
  1267. diff12.tool_call_index = 0;
  1268. // Note: neither id nor name change here.
  1269. diff12.tool_call_delta.arguments = "g1\": 1}";
  1270. assert_equals(
  1271. {diff12},
  1272. common_chat_msg_diff::compute_diffs(msg1, msg2));
  1273. }
  1274. {
  1275. common_chat_msg msg0;
  1276. common_chat_msg msg2;
  1277. msg2.tool_calls = {
  1278. { "f1", "{\"arg1\": 1}", /* .id = */ "123" },
  1279. { "f2", "{\"arg2\": 2}", /* .id = */ "222" },
  1280. };
  1281. common_chat_msg_diff diff1;
  1282. diff1.tool_call_index = 0;
  1283. diff1.tool_call_delta.name = "f1";
  1284. diff1.tool_call_delta.id = "123";
  1285. diff1.tool_call_delta.arguments = "{\"arg1\": 1}";
  1286. common_chat_msg_diff diff2;
  1287. diff2.tool_call_index = 1;
  1288. diff2.tool_call_delta.name = "f2";
  1289. diff2.tool_call_delta.id = "222";
  1290. diff2.tool_call_delta.arguments = "{\"arg2\": 2}";
  1291. assert_equals(
  1292. {diff1, diff2},
  1293. common_chat_msg_diff::compute_diffs(msg0, msg2));
  1294. }
  1295. }
  1296. int main(int argc, char ** argv) {
  1297. // try {
  1298. #ifndef _WIN32
  1299. if (argc > 1) {
  1300. common_chat_templates_inputs inputs;
  1301. common_chat_msg msg;
  1302. msg.role = "user";
  1303. msg.content = "Hey";
  1304. inputs.messages = {msg};
  1305. inputs.tools = { special_function_tool };
  1306. std::cout << "| Template | Format |\n";
  1307. std::cout << "|----------|--------|\n";
  1308. for (int i = 1; i < argc; i++) {
  1309. try {
  1310. std::string path = argv[i];
  1311. if (path.rfind(".jinja") != path.size() - 6) {
  1312. std::cerr << "Skipping non-jinja file: " << path << '\n';
  1313. continue;
  1314. }
  1315. auto tmpls = read_templates(path);
  1316. auto parts = string_split(path, "/");
  1317. auto name = parts[parts.size() - 1];
  1318. auto format = common_chat_format_name(common_chat_templates_apply(tmpls.get(), inputs).format);
  1319. std::cout << "| " << name << " | " << format << " |\n";
  1320. } catch (const std::exception & e) {
  1321. std::cerr << "Failed to process " << argv[i] << ": " << e.what() << '\n';
  1322. }
  1323. }
  1324. } else
  1325. #endif
  1326. {
  1327. test_msg_diffs_compute();
  1328. test_msgs_oaicompat_json_conversion();
  1329. test_tools_oaicompat_json_conversion();
  1330. test_template_output_parsers();
  1331. std::cout << "\n[chat] All tests passed!" << '\n';
  1332. }
  1333. return 0;
  1334. // } catch (const std::exception & e) {
  1335. // std::cerr << "Error: " << e.what() << '\n';
  1336. // return 1;
  1337. // }
  1338. }