chat.cpp 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848
  1. #include "chat.hpp"
  2. #include "chat-template.hpp"
  3. #include "json-schema-to-grammar.h"
  4. #include "log.h"
  5. #include "minja.hpp"
  6. std::string common_chat_format_name(common_chat_format format) {
  7. switch (format) {
  8. case COMMON_CHAT_FORMAT_CONTENT_ONLY: return "Content-only";
  9. case COMMON_CHAT_FORMAT_GENERIC: return "Generic";
  10. case COMMON_CHAT_FORMAT_MISTRAL_NEMO: return "Mistral Nemo";
  11. case COMMON_CHAT_FORMAT_LLAMA_3_X: return "Llama 3.x";
  12. case COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS: return "Llama 3.x with builtin tools";
  13. case COMMON_CHAT_FORMAT_DEEPSEEK_R1: return "DeepSeek R1";
  14. case COMMON_CHAT_FORMAT_FIREFUNCTION_V2: return "FireFunction v2";
  15. case COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2: return "Functionary v3.2";
  16. case COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1: return "Functionary v3.1 Llama 3.1";
  17. case COMMON_CHAT_FORMAT_HERMES_2_PRO: return "Hermes 2 Pro";
  18. default:
  19. throw std::runtime_error("Unknown chat format");
  20. }
  21. }
  22. const common_grammar_options grammar_options {
  23. /* .dotall = */ false,
  24. /* .compact_spaces = */ false,
  25. // /* .compact_spaces = */ true,
  26. };
  27. static bool parse_json(std::string::const_iterator & it, const std::string::const_iterator & end, json & out) {
  28. // // https://json.nlohmann.me/features/parsing/sax_interface/
  29. struct json_error_locator : public nlohmann::json_sax<json> {
  30. std::size_t position;
  31. bool found_error;
  32. json_error_locator() : position(0), found_error(false) {}
  33. bool parse_error(std::size_t position, const std::string &, const json::exception &) override {
  34. this->position = position - 1;
  35. this->found_error = true;
  36. return false;
  37. }
  38. bool null() override { return true; }
  39. bool boolean(bool) override { return true; }
  40. bool number_integer(number_integer_t) override { return true; }
  41. bool number_unsigned(number_unsigned_t) override { return true; }
  42. bool number_float(number_float_t, const string_t &) override { return true; }
  43. bool string(string_t &) override { return true; }
  44. bool binary(binary_t &) override { return true; }
  45. bool start_object(std::size_t) override { return true; }
  46. bool key(string_t &) override { return true; }
  47. bool end_object() override { return true; }
  48. bool start_array(std::size_t) override { return true; }
  49. bool end_array() override { return true; }
  50. };
  51. json_error_locator err_loc;
  52. json::sax_parse(it, end, &err_loc);
  53. std::string::const_iterator temptative_end;
  54. if (err_loc.found_error) {
  55. temptative_end = it + err_loc.position;
  56. } else {
  57. temptative_end = end;
  58. }
  59. std::string json_sub {it, temptative_end};
  60. try {
  61. out = json::parse(json_sub);
  62. it = temptative_end;
  63. return true;
  64. } catch (const std::exception &) {
  65. return false;
  66. }
  67. }
  68. /**
  69. * Takes a prefix regex that must have 1 group to capture the function name, a closing suffix, and expects json parameters in between.
  70. * Aggregates the prefix, suffix and in-between text into the content.
  71. */
  72. static common_chat_msg parse_json_tool_calls(
  73. const std::string& input,
  74. const std::optional<std::regex> & trigger_opt,
  75. const std::regex & function_regex,
  76. const std::regex & close_regex) {
  77. std::smatch match;
  78. common_chat_msg result;
  79. result.role = "assistant";
  80. auto end = input.end();
  81. auto it = input.begin();
  82. if (trigger_opt) {
  83. if (!std::regex_search(it, end, match, *trigger_opt)) {
  84. result.content = input;
  85. return result;
  86. }
  87. result.content = match.prefix().str();
  88. it = match.suffix().first;
  89. }
  90. while (it != end) {
  91. std::sregex_iterator rend;
  92. std::sregex_iterator rit(it, end, function_regex);
  93. if (rit == rend) {
  94. fprintf(stderr, "No more tool calls found\n");
  95. result.content += std::string(it, end);
  96. break;
  97. }
  98. auto name = rit->str(1);
  99. result.content += std::string(it, rit->prefix().second);
  100. it = rit->suffix().first;
  101. json arguments;
  102. if (!parse_json(it, end, arguments)) {
  103. throw std::runtime_error("Failed to parse json tool call arguments");
  104. }
  105. if (!std::regex_search(it, end, match, close_regex)) {
  106. throw std::runtime_error("Malformed input, missing closing pattern");
  107. }
  108. it = match.suffix().first;
  109. result.tool_calls.push_back({name, arguments.is_string() ? arguments.get<std::string>() : arguments.dump(), /* id= */ ""});
  110. }
  111. return result;
  112. }
  113. static common_chat_msg parse_prefixed_json_tool_call_array(const std::string& input, const std::string & prefix, size_t rstrip_prefix = 0) {
  114. auto content_end = input.find(prefix);
  115. size_t tc_start = std::string::npos;
  116. common_chat_msg result;
  117. result.role = "assistant";
  118. const auto process_tool_calls = [&](const json & tool_calls) {
  119. for (const auto & tool_call : tool_calls) {
  120. const auto & arguments = tool_call["arguments"];
  121. result.tool_calls.push_back({
  122. tool_call["name"],
  123. arguments.is_string() ? arguments.get<std::string>() : arguments.dump(),
  124. tool_call.contains("id") ? tool_call["id"] : "",
  125. });
  126. }
  127. };
  128. if (content_end == std::string::npos) {
  129. result.content = input;
  130. } else {
  131. tc_start = content_end + prefix.size() - rstrip_prefix;
  132. result.content = input.substr(0, content_end);
  133. auto tool_calls = json::parse(input.substr(tc_start));
  134. process_tool_calls(tool_calls);
  135. }
  136. return result;
  137. }
  138. static void foreach_function(const json & tools, const std::function<void(const json &)> & fn) {
  139. for (const auto & tool : tools) {
  140. if (!tool.contains("type") || tool["type"] != "function" || !tool.contains("function")) {
  141. LOG_INF("Skipping tool without function: %s", tool.dump(2).c_str());
  142. continue;
  143. }
  144. fn(tool);
  145. }
  146. }
  147. static common_chat_params common_chat_params_init_generic(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  148. common_chat_params data;
  149. auto tool_call_schemas = json::array();
  150. foreach_function(inputs.tools, [&](const json & tool) {
  151. const auto & function = tool["function"];
  152. auto tool_schema = json {
  153. {"type", "object"},
  154. {"properties", {
  155. {"name", {
  156. {"type", "string"},
  157. {"const", function["name"]},
  158. }},
  159. {"arguments", function["parameters"]},
  160. }},
  161. {"required", json::array({"name", "arguments"})},
  162. };
  163. if (function.contains("description")) {
  164. tool_schema["description"] = function["description"];
  165. }
  166. if (inputs.parallel_tool_calls) {
  167. tool_schema["properties"]["id"] = {
  168. {"type", "string"},
  169. {"minLength", 4},
  170. };
  171. tool_schema["required"].push_back("id");
  172. }
  173. tool_call_schemas.emplace_back(tool_schema);
  174. });
  175. const auto tool_call =
  176. inputs.parallel_tool_calls
  177. ? json {
  178. {"type", "object"},
  179. {"properties", {
  180. {"tool_calls", {
  181. {"type", "array"},
  182. {"items", tool_call_schemas.size() == 1 ? tool_call_schemas[0] : json {
  183. {"anyOf", tool_call_schemas},
  184. }},
  185. {"minItems", 1},
  186. }},
  187. }},
  188. {"required", json::array({"tool_calls"})},
  189. }
  190. : json {
  191. {"type", "object"},
  192. {"properties", {
  193. {"tool_call", tool_call_schemas.size() == 1 ? tool_call_schemas[0] : json {
  194. {"anyOf", tool_call_schemas},
  195. }},
  196. }},
  197. {"required", json::array({"tool_call"})},
  198. };
  199. const auto schema =
  200. inputs.tool_choice != "required"
  201. ? json {
  202. {"anyOf", json::array({
  203. tool_call,
  204. {
  205. {"type", "object"},
  206. {"properties", {
  207. {"response", inputs.json_schema.is_null()
  208. ? json {{"type", "string"}}
  209. : inputs.json_schema
  210. },
  211. }},
  212. {"required", json::array({"response"})},
  213. },
  214. })}
  215. }
  216. : tool_call;
  217. data.grammar_lazy = false;
  218. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  219. builder.add_schema("root", schema);
  220. }, grammar_options);
  221. auto tweaked_messages = common_chat_template::add_system(
  222. inputs.messages,
  223. "Respond in JSON format, either with `tool_call` (a request to call tools) or with `response` reply to the user's request");
  224. data.prompt = tmpl.apply(tweaked_messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  225. data.format = COMMON_CHAT_FORMAT_GENERIC;
  226. return data;
  227. }
  228. static common_chat_msg common_chat_parse_generic(const std::string & input) {
  229. json data = json::parse(input);
  230. common_chat_msg result;
  231. result.role = "assistant";
  232. if (data.contains("tool_calls")) {
  233. for (const auto & tool_call : data["tool_calls"]) {
  234. result.tool_calls.push_back({
  235. tool_call["name"],
  236. tool_call["arguments"].dump(),
  237. tool_call.contains("id") ? tool_call["id"] : "",
  238. });
  239. }
  240. } else if (data.contains("tool_call")) {
  241. result.tool_calls.push_back({
  242. data["tool_call"]["name"],
  243. data["tool_call"]["arguments"].dump(),
  244. /* id= */ "",
  245. });
  246. } else if (data.contains("response")) {
  247. const auto & response = data["response"];
  248. result.content = response.is_string() ? response.get<std::string>() : response.dump(2);
  249. }
  250. return result;
  251. }
  252. static common_chat_params common_chat_params_init_mistral_nemo(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  253. common_chat_params data;
  254. data.grammar_lazy = inputs.tool_choice != "required";
  255. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  256. auto schemas = json::array();
  257. foreach_function(inputs.tools, [&](const json & tool) {
  258. const auto & function = tool["function"];
  259. schemas.push_back({
  260. {"type", "object"},
  261. {"properties", {
  262. // Important note: the model is probably trained to take a JSON stringified arguments value.
  263. // It's hard to constrain that for now (while reusing the JSON schema conversion), so we're just expecting a plain object.
  264. {"name", {
  265. {"type", "string"},
  266. {"const", function["name"]},
  267. }},
  268. {"arguments", function["parameters"]},
  269. {"id", {
  270. {"type", "string"},
  271. // Nemo's template expects a 9-character alphanumeric ID.
  272. {"pattern", "^[a-zA-Z0-9]{9}$"},
  273. }},
  274. }},
  275. {"required", json::array({"name", "arguments", "id"})},
  276. });
  277. });
  278. auto schema = json {
  279. {"type", "array"},
  280. {"items", schemas.size() == 1 ? schemas[0] : json {{"anyOf", schemas}}},
  281. {"minItems", 1},
  282. };
  283. if (!inputs.parallel_tool_calls) {
  284. schema["maxItems"] = 1;
  285. }
  286. builder.add_rule("root", "\"[TOOL_CALLS]\" " + builder.add_schema("tool_calls", schema));
  287. }, grammar_options);
  288. data.grammar_triggers.push_back({"[TOOL_CALLS]", /* .at_start = */ true});
  289. data.prompt = tmpl.apply(inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  290. data.format = COMMON_CHAT_FORMAT_MISTRAL_NEMO;
  291. return data;
  292. }
  293. static common_chat_msg common_chat_parse_mistral_nemo(const std::string & input) {
  294. return parse_prefixed_json_tool_call_array(input, "[TOOL_CALLS]");
  295. }
  296. static void expect_tool_parameters(const std::string & name, const json & parameters, const std::vector<std::string> & expected_properties) {
  297. if (!parameters.is_object() || !parameters.contains("type") || parameters["type"] != "object" || !parameters.contains("properties") || !parameters.contains("required")) {
  298. throw std::runtime_error("Parameters of tool " + name + " must be an object w/ required properties");
  299. }
  300. const auto & parameters_properties = parameters.at("properties");
  301. const auto & parameters_required = parameters.at("required");
  302. for (const auto & prop : expected_properties) {
  303. if (!parameters_properties.contains(prop)) {
  304. throw std::runtime_error("Parameters of tool " + name + " is missing property: " + prop);
  305. }
  306. if (std::find(parameters_required.begin(), parameters_required.end(), json(prop)) == parameters_required.end()) {
  307. throw std::runtime_error("Parameters of tool " + name + " must have property marked as required: " + prop);
  308. }
  309. }
  310. if (parameters_properties.size() != expected_properties.size()) {
  311. throw std::runtime_error("Parameters of tool " + name + " must only have these properties:" + string_join(expected_properties, ", "));
  312. }
  313. }
  314. static common_chat_params common_chat_params_init_llama_3_1_tool_calls(const common_chat_template & tmpl, const struct common_chat_inputs & inputs, bool allow_python_tag_builtin_tools) {
  315. auto builtin_tools = json::array();
  316. common_chat_params data;
  317. data.grammar_lazy = inputs.tool_choice != "required";
  318. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  319. std::vector<std::string> tool_rules;
  320. auto handle_builtin_tool = [&](const std::string & name, const json & parameters) {
  321. if (name == "wolfram_alpha") {
  322. // https://github.com/meta-llama/llama-stack/blob/main/llama_stack/providers/remote/tool_runtime/wolfram_alpha/wolfram_alpha.py
  323. expect_tool_parameters(name, parameters, {"query"});
  324. } else if (name == "web_search" || name == "brave_search") {
  325. // https://github.com/meta-llama/llama-stack/blob/main/llama_stack/providers/remote/tool_runtime/brave_search/brave_search.py
  326. expect_tool_parameters(name, parameters, {"query"});
  327. } else if (name == "python" || name == "code_interpreter") {
  328. // https://github.com/meta-llama/llama-stack/blob/main/llama_stack/providers/inline/tool_runtime/code_interpreter/code_interpreter.py
  329. expect_tool_parameters(name, parameters, {"code"});
  330. } else {
  331. return false;
  332. }
  333. std::vector<std::string> kvs;
  334. for (const auto & [key, value] : parameters.at("properties").items()) {
  335. kvs.push_back("\"" + key + "=\" " + builder.add_schema(name + "-args-" + key, value));
  336. }
  337. tool_rules.push_back(
  338. builder.add_rule(
  339. name + "-call",
  340. "\"<|python_tag|>" + name + ".call(\" " + string_join(kvs, " \", \" ") + " \")\""));
  341. builtin_tools.push_back(name);
  342. return true;
  343. };
  344. foreach_function(inputs.tools, [&](const json & tool) {
  345. const auto & function = tool["function"];
  346. std::string name = function["name"];
  347. auto parameters = function["parameters"];
  348. builder.resolve_refs(parameters);
  349. // https://github.com/meta-llama/llama-stack/tree/main/llama_stack/providers/remote/tool_runtime
  350. if (allow_python_tag_builtin_tools) {
  351. handle_builtin_tool(name, parameters);
  352. }
  353. tool_rules.push_back(
  354. builder.add_rule(
  355. name + "-call",
  356. "\"{\" ( \"\\\"type\\\": \\\"function\\\", \" | space ) "
  357. "\"\\\"name\\\": \\\"" + name + "\\\", \\\"parameters\\\": \" " +
  358. builder.add_schema(name + "-args", parameters) +
  359. " \"}\""));
  360. data.grammar_triggers.push_back({"{\"name\": \"" + name + "\"", /* .at_start = */ true});
  361. });
  362. data.grammar_triggers.push_back({"{\"name\":", /* .at_start = */ true});
  363. data.grammar_triggers.push_back({"{\"type\": \"function\"", /* .at_start = */ true});
  364. if (!builtin_tools.empty()) {
  365. data.grammar_triggers.push_back({"<|python_tag|>", /* .at_start = */ false});
  366. }
  367. builder.add_rule("root", string_join(tool_rules, " | "));
  368. }, grammar_options);
  369. data.additional_stops.push_back("<|eom_id|>");
  370. data.prompt = tmpl.apply(inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt, {
  371. {"tools_in_user_message", false},
  372. {"builtin_tools", builtin_tools.empty() ? json() : builtin_tools},
  373. });
  374. data.format = allow_python_tag_builtin_tools && !builtin_tools.empty()
  375. ? COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS
  376. : COMMON_CHAT_FORMAT_LLAMA_3_X;
  377. return data;
  378. }
  379. static common_chat_msg common_chat_parse_llama_3_1(const std::string & input, bool with_builtin_tools = false) {
  380. // TODO: tighten & simplify the parser, don't accept leading text context.
  381. static std::regex function_regex("\\{[\\s\\n\\r]*(?:\"type\"[\\s\\n\\r]*:[\\s\\n\\r]*\"function\"[\\s\\n\\r]*,[\\s\\n\\r]*|[\\s\\n\\r]*)\"name\"[\\s\\n\\r]*:[\\s\\n\\r]*\"([^\"]+)\"[\\s\\n\\r]*,[\\s\\n\\r]*\"parameters\": ");
  382. static std::regex close_regex("\\}");
  383. static std::regex builtin_call_regex("<\\|python_tag\\|>([^.(]+)\\.call\\((.*)\\)");
  384. if (with_builtin_tools) {
  385. std::smatch match;
  386. if (std::regex_match(input, match, builtin_call_regex)) {
  387. auto name = match[1].str();
  388. auto raw_args = match[2].str();
  389. // TODO: if/when builtin tools start accepting more than 1 argument, use parse_json for real parsing.
  390. auto it_eq = raw_args.find('=');
  391. auto arg_name = raw_args.substr(0, it_eq);
  392. auto arg_value_str = raw_args.substr(it_eq + 1);
  393. auto arg_value = json::parse(arg_value_str);
  394. return {
  395. /* .role = */ "assistant",
  396. /* .content = */ match.prefix().str(),
  397. /* .tool_calls = */ {
  398. {
  399. /* .name = */ match[1],
  400. /* .arguments = */ (json {
  401. {arg_name, arg_value},
  402. }).dump(),
  403. /* .id = */ "",
  404. },
  405. },
  406. };
  407. }
  408. }
  409. return parse_json_tool_calls(input, std::nullopt, function_regex, close_regex);
  410. }
  411. static common_chat_params common_chat_params_init_deepseek_r1(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  412. common_chat_params data;
  413. data.grammar_lazy = inputs.tool_choice != "required";
  414. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  415. std::vector<std::string> tool_rules;
  416. foreach_function(inputs.tools, [&](const json & tool) {
  417. const auto & function = tool["function"];
  418. std::string name = function["name"];
  419. auto parameters = function["parameters"];
  420. auto args_rule = builder.add_schema(name + "-args", parameters);
  421. tool_rules.push_back(builder.add_rule(name + "-call",
  422. "\"<|tool▁call▁begin|>function<|tool▁sep|>" + name + "\\n```json\\n\" " + args_rule + " \"```<|tool▁call▁end|>\""));
  423. });
  424. data.grammar_triggers.push_back({"<|tool▁calls▁begin|>", /* .at_start = */ false});
  425. builder.add_rule("root", "\"<|tool▁calls▁begin|>\" (" + string_join(tool_rules, " | ") + ")" + (inputs.parallel_tool_calls ? "*" : "") + " space");
  426. }, grammar_options);
  427. data.prompt = tmpl.apply(inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  428. data.format = COMMON_CHAT_FORMAT_DEEPSEEK_R1;
  429. return data;
  430. }
  431. static common_chat_msg common_chat_parse_deepseek_r1(const std::string & input) {
  432. static std::regex trigger_regex("<|tool▁calls▁begin|>");
  433. static std::regex function_regex("<|tool▁call▁begin|>function<|tool▁sep|>([^\n]+)\n```json\n");
  434. static std::regex close_regex("```<|tool▁call▁end|>");
  435. return parse_json_tool_calls(input, trigger_regex, function_regex, close_regex);
  436. }
  437. static common_chat_params common_chat_params_init_firefunction_v2(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  438. fprintf(stderr, "%s\n", __func__);
  439. common_chat_params data;
  440. data.prompt = tmpl.apply(inputs.messages, /* tools= */ nullptr, inputs.add_generation_prompt, {
  441. {"datetime", "Jan 29 2025 13:00:00 GMT"},
  442. {"functions", json(inputs.tools.empty() ? "" : inputs.tools.dump(2))},
  443. }, /* adjust_inputs= */ false);
  444. if (!inputs.tools.is_null() && !inputs.tools.empty()) {
  445. data.grammar_lazy = inputs.tool_choice != "required";
  446. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  447. auto schemas = json::array();
  448. foreach_function(inputs.tools, [&](const json & tool) {
  449. const auto & function = tool["function"];
  450. schemas.push_back({
  451. {"type", "object"},
  452. {"properties", {
  453. {"name", {
  454. {"type", "string"},
  455. {"const", function["name"]},
  456. }},
  457. {"arguments", function["parameters"]},
  458. }},
  459. {"required", json::array({"name", "arguments", "id"})},
  460. });
  461. });
  462. auto schema = json {
  463. {"type", "array"},
  464. {"items", schemas.size() == 1 ? schemas[0] : json {{"anyOf", schemas}}},
  465. {"minItems", 1},
  466. };
  467. if (!inputs.parallel_tool_calls) {
  468. schema["maxItems"] = 1;
  469. }
  470. builder.add_rule("root", "\" functools\"? " + builder.add_schema("tool_calls", schema));
  471. }, grammar_options);
  472. data.grammar_triggers.push_back({" functools[", /* .at_start = */ false});
  473. data.format = COMMON_CHAT_FORMAT_FIREFUNCTION_V2;
  474. } else {
  475. data.format = COMMON_CHAT_FORMAT_CONTENT_ONLY;
  476. }
  477. return data;
  478. }
  479. static common_chat_msg common_chat_parse_firefunction_v2(const std::string & input) {
  480. return parse_prefixed_json_tool_call_array(input, " functools[", /* rstrip_prefix= */ 1);
  481. }
  482. static common_chat_params common_chat_params_init_functionary_v3_2(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  483. // >>>all\nlet's call functions>>>fn1\n{"arg1": 1...}\n>>>fn2\n{"arg1": 1...}...
  484. // Using ">>>f1\n", ">>>f2\n"... as trigger words for the grammar
  485. common_chat_params data;
  486. data.prompt = tmpl.apply(inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  487. data.format = COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2;
  488. if (!inputs.tools.is_null() && !inputs.tools.empty()) {
  489. data.grammar_lazy = inputs.tool_choice != "required";
  490. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  491. std::vector<std::string> first_tool_rules;
  492. std::vector<std::string> subsequent_tool_rules;
  493. foreach_function(inputs.tools, [&](const json & tool) {
  494. const auto & function = tool["function"];
  495. std::string name = function["name"];
  496. auto parameters = function["parameters"];
  497. auto args_rule = builder.add_schema(name + "-args", parameters);
  498. first_tool_rules.push_back(builder.add_rule(name + "-call", "\"" + name + "\\n\" " + args_rule));
  499. subsequent_tool_rules.push_back(builder.add_rule(name + "-call2", "\">>>" + name + "\\n\" " + args_rule));
  500. data.grammar_triggers.push_back({name, /* .at_start = */ true});
  501. data.grammar_triggers.push_back({">>>" + name, /* .at_start = */ false});
  502. });
  503. auto first_rule = first_tool_rules.empty() ? "" : builder.add_rule("first_tool_call", string_join(first_tool_rules, " | ")) + " space";
  504. if (inputs.parallel_tool_calls) {
  505. auto subsequent_rule = builder.add_rule("subsequent_tool_call", string_join(subsequent_tool_rules, " | ")) + " space";
  506. builder.add_rule("root", first_rule + " (" + subsequent_rule + ")*");
  507. } else {
  508. builder.add_rule("root", first_rule);
  509. }
  510. }, grammar_options);
  511. }
  512. return data;
  513. }
  514. static bool consume(std::string::const_iterator & it, const std::string::const_iterator & end, const std::string & expected) {
  515. auto expected_it = expected.begin();
  516. auto tmp_it = it;
  517. while (tmp_it != end && expected_it != expected.end() && *tmp_it == *expected_it) {
  518. ++tmp_it;
  519. ++expected_it;
  520. }
  521. if (expected_it == expected.end()) {
  522. it = tmp_it;
  523. return true;
  524. }
  525. return false;
  526. }
  527. static common_chat_msg common_chat_parse_functionary_v3_2(const std::string & input) {
  528. static std::regex function_regex(R"((?:>>>)?(\w+)\n)");
  529. static std::regex close_regex(R"($|(?=>>>))");
  530. std::string content;
  531. auto it = input.begin();
  532. const auto end = input.end();
  533. if (consume(it, end, "all\n")) {
  534. std::smatch match;
  535. if (std::regex_search(it, end, match, function_regex)) {
  536. auto fun_it = match.prefix().second;
  537. content = std::string(it, fun_it);
  538. it = fun_it;
  539. } else {
  540. common_chat_msg res;
  541. res.role = "assistant";
  542. res.content = std::string(it, end);
  543. return res;
  544. }
  545. }
  546. // TODO: tighten & simplify.
  547. auto res = parse_json_tool_calls(std::string(it, end), std::nullopt, function_regex, close_regex);
  548. res.content = content;
  549. return res;
  550. }
  551. static common_chat_params common_chat_params_init_functionary_v3_1_llama_3_1(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  552. // https://github.com/MeetKai/functionary/blob/main/tests/prompt_test_v3-llama3.1.txt
  553. common_chat_params data;
  554. json tools = inputs.tools.is_null() ? inputs.tools : json::array();
  555. std::string python_code_argument_name;
  556. auto has_raw_python = false;
  557. data.grammar_lazy = inputs.tool_choice != "required";
  558. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  559. std::vector<std::string> tool_rules;
  560. foreach_function(inputs.tools, [&](const json & tool) {
  561. const auto & function = tool["function"];
  562. const auto & parameters = function["parameters"];
  563. std::string name = function["name"];
  564. if (name == "python" || name == "ipython") {
  565. if (!parameters.contains("type")) {
  566. throw std::runtime_error("Missing type in python tool");
  567. }
  568. has_raw_python = true;
  569. auto type = parameters.at("type");
  570. if (type == "object") {
  571. auto properties = parameters.at("properties");
  572. for (auto it = properties.begin(); it != properties.end(); ++it) {
  573. if (it.value().at("type") == "string") {
  574. if (!python_code_argument_name.empty()) {
  575. throw std::runtime_error("Multiple string arguments found in python tool");
  576. }
  577. python_code_argument_name = it.key();
  578. }
  579. }
  580. if (python_code_argument_name.empty()) {
  581. throw std::runtime_error("No string argument found in python tool");
  582. }
  583. } else if (type != "string") {
  584. throw std::runtime_error("Invalid type in python tool: " + type.dump());
  585. }
  586. }
  587. tool_rules.push_back(builder.add_rule(name + "-call", "\"<function=" + name + ">\" " + builder.add_schema(name + "-args", parameters) + " \"</function>\" space"));
  588. });
  589. if (has_raw_python) {
  590. tool_rules.push_back(builder.add_rule("python-call", "\"<|python_tag|>\" .*"));
  591. data.grammar_triggers.push_back({"<|python_tag|>", /* .at_start = */ false});
  592. }
  593. auto tool_call = builder.add_rule("tool_call", string_join(tool_rules, " | ")) + " space";
  594. builder.add_rule("root", inputs.parallel_tool_calls ? "(" + tool_call + ")+" : tool_call);
  595. data.grammar_triggers.push_back({"<function=", /* .at_start = */ false});
  596. }, grammar_options);
  597. data.prompt = tmpl.apply(inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  598. // TODO: if (has_raw_python)
  599. data.format = COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1;
  600. return data;
  601. }
  602. static common_chat_msg common_chat_parse_functionary_v3_1_llama_3_1(const std::string & input) {
  603. // This version of Functionary still supports the llama 3.1 tool call format for the python tool.
  604. static std::regex python_tag_regex(R"(<\|python_tag\|>([\s\S\n]*)$)");
  605. std::smatch match;
  606. if (std::regex_search(input, match, python_tag_regex)) {
  607. auto code = match[1].str();
  608. return {
  609. /* .role = */ "assistant",
  610. /* .content = */ match.prefix().str(),
  611. /* .tool_calls = */ {
  612. {
  613. /* .name = */ "python",
  614. /* .arguments = */ (json {{"code", code}}).dump(),
  615. /* .id = */ "",
  616. },
  617. }
  618. };
  619. }
  620. static std::regex function_regex(R"(<function=(\w+)>)");
  621. static std::regex close_regex(R"(</function>)");
  622. // TODO: tighten & simplify.
  623. return parse_json_tool_calls(input, std::nullopt, function_regex, close_regex);
  624. }
  625. static common_chat_params common_chat_params_init_hermes_2_pro(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  626. common_chat_params data;
  627. // (content)?(<tool_call>{"name": "foo", "arguments": {"a": 1}}</tool_call>)*
  628. data.grammar_lazy = inputs.tool_choice != "required";
  629. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  630. std::vector<std::string> tool_rules;
  631. foreach_function(inputs.tools, [&](const json & tool) {
  632. const auto & function = tool["function"];
  633. std::string name = function["name"];
  634. auto parameters = function["parameters"];
  635. builder.resolve_refs(parameters);
  636. tool_rules.push_back(builder.add_schema(name + "-call", {
  637. {"type", "object"},
  638. {"properties", json {
  639. {"name", json {{"const", name}}},
  640. {"arguments", parameters},
  641. }},
  642. {"required", json::array({"name", "arguments"})},
  643. }));
  644. });
  645. auto tool_call = "\"<tool_call>\" space " + builder.add_rule("tool_call", string_join(tool_rules, " | ")) + " \"</tool_call>\" space";
  646. builder.add_rule("root", inputs.parallel_tool_calls ? "(" + tool_call + ")+" : tool_call);
  647. data.grammar_triggers.push_back({"<tool_call>", /* .at_start = */ false});
  648. // Not really a trigger but need to print this special token to get a successful parse.
  649. data.grammar_triggers.push_back({"</tool_call>", /* .at_start = */ false});
  650. }, grammar_options);
  651. data.prompt = tmpl.apply(inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  652. data.format = COMMON_CHAT_FORMAT_HERMES_2_PRO;
  653. return data;
  654. }
  655. static common_chat_msg common_chat_parse_hermes_2_pro(const std::string & input) {
  656. try {
  657. std::regex start_pattern(R"([\n\s]*<tool_call>)");
  658. std::regex middle_pattern(R"([\n\s]*</tool_call>[\n\s]*<tool_call>)");
  659. std::regex end_pattern(R"([\n\s]*</tool_call>[\n\s]*$)");
  660. auto end = input.end();
  661. std::sregex_iterator rend;
  662. std::sregex_iterator rit(input.begin(), end, start_pattern);
  663. if (rit == rend) {
  664. return {
  665. /* .role = */ "assistant",
  666. /* .content = */ input,
  667. /* .tool_calls = */ {},
  668. };
  669. }
  670. common_chat_msg result;
  671. result.role = "assistant";
  672. result.content = rit->prefix();
  673. auto it = rit->suffix().first;
  674. while (it != end) {
  675. json call;
  676. if (!parse_json(it, end, call)) {
  677. throw std::runtime_error("Failed to parse json tool call");
  678. }
  679. const auto & arguments = call["arguments"];
  680. result.tool_calls.push_back({
  681. call["name"],
  682. arguments.dump(),
  683. // arguments.is_string() ? arguments.get<std::string>() : arguments.dump(),
  684. /* id= */ "",
  685. });
  686. rit = {it, end, middle_pattern};
  687. if (rit != rend) {
  688. it = rit->suffix().first;
  689. } else {
  690. rit = {it, end, end_pattern};
  691. if (rit == rend) {
  692. throw std::runtime_error("Malformed input, missing </tool_call>");
  693. }
  694. break;
  695. }
  696. }
  697. return result;
  698. } catch (const std::exception & e) {
  699. return {
  700. /* .role = */ "assistant",
  701. /* .content = */ input,
  702. /* .tool_calls = */ {},
  703. };
  704. }
  705. }
  706. static common_chat_params common_chat_params_init_without_tools(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  707. common_chat_params data;
  708. data.prompt = tmpl.apply(inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  709. data.format = COMMON_CHAT_FORMAT_CONTENT_ONLY;
  710. data.grammar_lazy = false;
  711. if (!inputs.json_schema.is_null()) {
  712. if (!inputs.grammar.empty()) {
  713. throw std::runtime_error("Either \"json_schema\" or \"grammar\" can be specified, but not both");
  714. }
  715. data.grammar = json_schema_to_grammar(inputs.json_schema);
  716. } else {
  717. data.grammar = inputs.grammar.empty();
  718. }
  719. return data;
  720. }
  721. common_chat_params common_chat_params_init(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  722. auto has_tools = !inputs.tools.is_null() && inputs.tool_choice != "none";
  723. LOG_DBG("[%s] has_tools=%s\n", __func__, has_tools ? "true" : "false");
  724. if (has_tools && !inputs.grammar.empty()) {
  725. throw std::runtime_error("Cannot specify grammar with tools");
  726. }
  727. const auto & src = tmpl.source();
  728. if (src.find(">>>all") != std::string::npos) {
  729. // Functionary prepends "all\n" to plain content outputs, so we use the parser no matter when
  730. return common_chat_params_init_functionary_v3_2(tmpl, inputs);
  731. }
  732. if (src.find(" functools[") != std::string::npos) {
  733. // Firefunction v2 requires datetime and functions in the context, even w/o tools.
  734. return common_chat_params_init_firefunction_v2(tmpl, inputs);
  735. }
  736. if (!has_tools) {
  737. return common_chat_params_init_without_tools(tmpl, inputs);
  738. }
  739. if (src.find("<tool_call>") != std::string::npos) {
  740. return common_chat_params_init_hermes_2_pro(tmpl, inputs);
  741. }
  742. if (src.find("<|start_header_id|>") != std::string::npos
  743. && src.find("<function=") != std::string::npos) {
  744. return common_chat_params_init_functionary_v3_1_llama_3_1(tmpl, inputs);
  745. }
  746. if (src.find("<|start_header_id|>ipython<|end_header_id|>") != std::string::npos) {
  747. auto allow_python_tag_builtin_tools = src.find("<|python_tag|>") != std::string::npos;
  748. return common_chat_params_init_llama_3_1_tool_calls(tmpl, inputs, allow_python_tag_builtin_tools);
  749. }
  750. if (src.find("<|tool▁calls▁begin|>") != std::string::npos) {
  751. return common_chat_params_init_deepseek_r1(tmpl, inputs);
  752. }
  753. if (src.find("[TOOL_CALLS]") != std::string::npos) {
  754. return common_chat_params_init_mistral_nemo(tmpl, inputs);
  755. }
  756. return common_chat_params_init_generic(tmpl, inputs);
  757. }
  758. static common_chat_msg common_chat_parse_content_only(const std::string & input) {
  759. return {
  760. /* .role = */ "assistant",
  761. /* .content = */ input,
  762. /* .tool_calls = */ {},
  763. };
  764. }
  765. common_chat_msg common_chat_parse(const std::string & input, common_chat_format format) {
  766. switch (format) {
  767. case COMMON_CHAT_FORMAT_CONTENT_ONLY:
  768. return common_chat_parse_content_only(input);
  769. case COMMON_CHAT_FORMAT_GENERIC:
  770. return common_chat_parse_generic(input);
  771. case COMMON_CHAT_FORMAT_MISTRAL_NEMO:
  772. return common_chat_parse_mistral_nemo(input);
  773. case COMMON_CHAT_FORMAT_LLAMA_3_X:
  774. return common_chat_parse_llama_3_1(input);
  775. case COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS:
  776. return common_chat_parse_llama_3_1(input, /* with_builtin_tools= */ true);
  777. case COMMON_CHAT_FORMAT_DEEPSEEK_R1:
  778. return common_chat_parse_deepseek_r1(input);
  779. case COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2:
  780. return common_chat_parse_functionary_v3_2(input);
  781. case COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1:
  782. return common_chat_parse_functionary_v3_1_llama_3_1(input);
  783. case COMMON_CHAT_FORMAT_HERMES_2_PRO:
  784. return common_chat_parse_hermes_2_pro(input);
  785. case COMMON_CHAT_FORMAT_FIREFUNCTION_V2:
  786. return common_chat_parse_firefunction_v2(input);
  787. default:
  788. throw std::runtime_error("Unsupported format: " + common_chat_format_name(format));
  789. }
  790. }