chat.cpp 43 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966
  1. #include "chat.hpp"
  2. #include "chat-template.hpp"
  3. #include "json-schema-to-grammar.h"
  4. #include "log.h"
  5. #include "minja.hpp"
  6. std::string common_chat_format_name(common_chat_format format) {
  7. switch (format) {
  8. case COMMON_CHAT_FORMAT_CONTENT_ONLY: return "Content-only";
  9. case COMMON_CHAT_FORMAT_GENERIC: return "Generic";
  10. case COMMON_CHAT_FORMAT_MISTRAL_NEMO: return "Mistral Nemo";
  11. case COMMON_CHAT_FORMAT_LLAMA_3_X: return "Llama 3.x";
  12. case COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS: return "Llama 3.x with builtin tools";
  13. case COMMON_CHAT_FORMAT_DEEPSEEK_R1: return "DeepSeek R1";
  14. case COMMON_CHAT_FORMAT_FIREFUNCTION_V2: return "FireFunction v2";
  15. case COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2: return "Functionary v3.2";
  16. case COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1: return "Functionary v3.1 Llama 3.1";
  17. case COMMON_CHAT_FORMAT_HERMES_2_PRO: return "Hermes 2 Pro";
  18. case COMMON_CHAT_FORMAT_COMMAND_R7B: return "Command R7B";
  19. default:
  20. throw std::runtime_error("Unknown chat format");
  21. }
  22. }
  23. const common_grammar_options grammar_options {
  24. /* .dotall = */ false,
  25. /* .compact_spaces = */ false,
  26. // /* .compact_spaces = */ true,
  27. };
  28. static bool parse_json(std::string::const_iterator & it, const std::string::const_iterator & end, json & out) {
  29. // // https://json.nlohmann.me/features/parsing/sax_interface/
  30. struct json_error_locator : public nlohmann::json_sax<json> {
  31. std::size_t position;
  32. bool found_error;
  33. json_error_locator() : position(0), found_error(false) {}
  34. bool parse_error(std::size_t position, const std::string &, const json::exception &) override {
  35. this->position = position - 1;
  36. this->found_error = true;
  37. return false;
  38. }
  39. bool null() override { return true; }
  40. bool boolean(bool) override { return true; }
  41. bool number_integer(number_integer_t) override { return true; }
  42. bool number_unsigned(number_unsigned_t) override { return true; }
  43. bool number_float(number_float_t, const string_t &) override { return true; }
  44. bool string(string_t &) override { return true; }
  45. bool binary(binary_t &) override { return true; }
  46. bool start_object(std::size_t) override { return true; }
  47. bool key(string_t &) override { return true; }
  48. bool end_object() override { return true; }
  49. bool start_array(std::size_t) override { return true; }
  50. bool end_array() override { return true; }
  51. };
  52. json_error_locator err_loc;
  53. json::sax_parse(it, end, &err_loc);
  54. std::string::const_iterator temptative_end;
  55. if (err_loc.found_error) {
  56. temptative_end = it + err_loc.position;
  57. } else {
  58. temptative_end = end;
  59. }
  60. std::string json_sub {it, temptative_end};
  61. try {
  62. out = json::parse(json_sub);
  63. it = temptative_end;
  64. return true;
  65. } catch (const std::exception &) {
  66. return false;
  67. }
  68. }
  69. /**
  70. * Takes a prefix regex that must have 1 group to capture the function name, a closing suffix, and expects json parameters in between.
  71. * Aggregates the prefix, suffix and in-between text into the content.
  72. */
  73. static common_chat_msg parse_json_tool_calls(
  74. const std::string& input,
  75. const std::optional<std::regex> & trigger_opt,
  76. const std::regex & function_regex,
  77. const std::regex & close_regex) {
  78. std::smatch match;
  79. common_chat_msg result;
  80. result.role = "assistant";
  81. auto end = input.end();
  82. auto it = input.begin();
  83. if (trigger_opt) {
  84. if (!std::regex_search(it, end, match, *trigger_opt)) {
  85. result.content = input;
  86. return result;
  87. }
  88. result.content = match.prefix().str();
  89. it = match.suffix().first;
  90. }
  91. while (it != end) {
  92. std::sregex_iterator rend;
  93. std::sregex_iterator rit(it, end, function_regex);
  94. if (rit == rend) {
  95. fprintf(stderr, "No more tool calls found\n");
  96. result.content += std::string(it, end);
  97. break;
  98. }
  99. auto name = rit->str(1);
  100. result.content += std::string(it, rit->prefix().second);
  101. it = rit->suffix().first;
  102. json arguments;
  103. if (!parse_json(it, end, arguments)) {
  104. throw std::runtime_error("Failed to parse json tool call arguments");
  105. }
  106. if (!std::regex_search(it, end, match, close_regex)) {
  107. throw std::runtime_error("Malformed input, missing closing pattern");
  108. }
  109. it = match.suffix().first;
  110. result.tool_calls.push_back({name, arguments.is_string() ? arguments.get<std::string>() : arguments.dump(), /* id= */ ""});
  111. }
  112. return result;
  113. }
  114. static common_chat_msg parse_prefixed_json_tool_call_array(const std::string& input, const std::string & prefix, size_t rstrip_prefix = 0) {
  115. auto content_end = input.find(prefix);
  116. size_t tc_start = std::string::npos;
  117. common_chat_msg result;
  118. result.role = "assistant";
  119. const auto process_tool_calls = [&](const json & tool_calls) {
  120. for (const auto & tool_call : tool_calls) {
  121. const auto & arguments = tool_call["arguments"];
  122. result.tool_calls.push_back({
  123. tool_call["name"],
  124. arguments.is_string() ? arguments.get<std::string>() : arguments.dump(),
  125. tool_call.contains("id") ? tool_call["id"] : "",
  126. });
  127. }
  128. };
  129. if (content_end == std::string::npos) {
  130. result.content = input;
  131. } else {
  132. tc_start = content_end + prefix.size() - rstrip_prefix;
  133. result.content = input.substr(0, content_end);
  134. auto tool_calls = json::parse(input.substr(tc_start));
  135. process_tool_calls(tool_calls);
  136. }
  137. return result;
  138. }
  139. static void foreach_function(const json & tools, const std::function<void(const json &)> & fn) {
  140. for (const auto & tool : tools) {
  141. if (!tool.contains("type") || tool["type"] != "function" || !tool.contains("function")) {
  142. LOG_INF("Skipping tool without function: %s", tool.dump(2).c_str());
  143. continue;
  144. }
  145. fn(tool);
  146. }
  147. }
  148. static std::string apply(
  149. const common_chat_template & tmpl,
  150. const nlohmann::ordered_json & messages,
  151. const nlohmann::ordered_json & tools,
  152. bool add_generation_prompt,
  153. const nlohmann::ordered_json & extra_context = nlohmann::ordered_json())
  154. {
  155. minja::chat_template_inputs tmpl_inputs;
  156. tmpl_inputs.messages = messages;
  157. tmpl_inputs.tools = tools;
  158. tmpl_inputs.add_generation_prompt = add_generation_prompt;
  159. tmpl_inputs.extra_context = extra_context;
  160. // TODO: add flag to control date/time, if only for testing purposes.
  161. // tmpl_inputs.now = std::chrono::system_clock::now();
  162. minja::chat_template_options tmpl_opts;
  163. tmpl_opts.use_bos_token = false;
  164. tmpl_opts.use_eos_token = false;
  165. return tmpl.apply(tmpl_inputs, tmpl_opts);
  166. }
  167. static common_chat_params common_chat_params_init_generic(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  168. common_chat_params data;
  169. auto tool_call_schemas = json::array();
  170. foreach_function(inputs.tools, [&](const json & tool) {
  171. const auto & function = tool["function"];
  172. auto tool_schema = json {
  173. {"type", "object"},
  174. {"properties", {
  175. {"name", {
  176. {"type", "string"},
  177. {"const", function["name"]},
  178. }},
  179. {"arguments", function["parameters"]},
  180. }},
  181. {"required", json::array({"name", "arguments"})},
  182. };
  183. if (function.contains("description")) {
  184. tool_schema["description"] = function["description"];
  185. }
  186. if (inputs.parallel_tool_calls) {
  187. tool_schema["properties"]["id"] = {
  188. {"type", "string"},
  189. {"minLength", 4},
  190. };
  191. tool_schema["required"].push_back("id");
  192. }
  193. tool_call_schemas.emplace_back(tool_schema);
  194. });
  195. const auto tool_call =
  196. inputs.parallel_tool_calls
  197. ? json {
  198. {"type", "object"},
  199. {"properties", {
  200. {"tool_calls", {
  201. {"type", "array"},
  202. {"items", tool_call_schemas.size() == 1 ? tool_call_schemas[0] : json {
  203. {"anyOf", tool_call_schemas},
  204. }},
  205. {"minItems", 1},
  206. }},
  207. }},
  208. {"required", json::array({"tool_calls"})},
  209. }
  210. : json {
  211. {"type", "object"},
  212. {"properties", {
  213. {"tool_call", tool_call_schemas.size() == 1 ? tool_call_schemas[0] : json {
  214. {"anyOf", tool_call_schemas},
  215. }},
  216. }},
  217. {"required", json::array({"tool_call"})},
  218. };
  219. const auto schema =
  220. inputs.tool_choice != "required"
  221. ? json {
  222. {"anyOf", json::array({
  223. tool_call,
  224. {
  225. {"type", "object"},
  226. {"properties", {
  227. {"response", inputs.json_schema.is_null()
  228. ? json {{"type", "string"}}
  229. : inputs.json_schema
  230. },
  231. }},
  232. {"required", json::array({"response"})},
  233. },
  234. })}
  235. }
  236. : tool_call;
  237. data.grammar_lazy = false;
  238. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  239. builder.add_schema("root", schema);
  240. }, grammar_options);
  241. auto tweaked_messages = common_chat_template::add_system(
  242. inputs.messages,
  243. "Respond in JSON format, either with `tool_call` (a request to call tools) or with `response` reply to the user's request");
  244. data.prompt = apply(tmpl, tweaked_messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  245. data.format = COMMON_CHAT_FORMAT_GENERIC;
  246. return data;
  247. }
  248. static common_chat_msg common_chat_parse_generic(const std::string & input) {
  249. json data = json::parse(input);
  250. common_chat_msg result;
  251. result.role = "assistant";
  252. if (data.contains("tool_calls")) {
  253. for (const auto & tool_call : data["tool_calls"]) {
  254. result.tool_calls.push_back({
  255. tool_call["name"],
  256. tool_call["arguments"].dump(),
  257. tool_call.contains("id") ? tool_call["id"] : "",
  258. });
  259. }
  260. } else if (data.contains("tool_call")) {
  261. result.tool_calls.push_back({
  262. data["tool_call"]["name"],
  263. data["tool_call"]["arguments"].dump(),
  264. /* id= */ "",
  265. });
  266. } else if (data.contains("response")) {
  267. const auto & response = data["response"];
  268. result.content = response.is_string() ? response.get<std::string>() : response.dump(2);
  269. }
  270. return result;
  271. }
  272. static common_chat_params common_chat_params_init_mistral_nemo(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  273. common_chat_params data;
  274. data.grammar_lazy = inputs.tool_choice != "required";
  275. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  276. auto schemas = json::array();
  277. foreach_function(inputs.tools, [&](const json & tool) {
  278. const auto & function = tool["function"];
  279. schemas.push_back({
  280. {"type", "object"},
  281. {"properties", {
  282. // Important note: the model is probably trained to take a JSON stringified arguments value.
  283. // It's hard to constrain that for now (while reusing the JSON schema conversion), so we're just expecting a plain object.
  284. {"name", {
  285. {"type", "string"},
  286. {"const", function["name"]},
  287. }},
  288. {"arguments", function["parameters"]},
  289. {"id", {
  290. {"type", "string"},
  291. // Nemo's template expects a 9-character alphanumeric ID.
  292. {"pattern", "^[a-zA-Z0-9]{9}$"},
  293. }},
  294. }},
  295. {"required", json::array({"name", "arguments", "id"})},
  296. });
  297. });
  298. auto schema = json {
  299. {"type", "array"},
  300. {"items", schemas.size() == 1 ? schemas[0] : json {{"anyOf", schemas}}},
  301. {"minItems", 1},
  302. };
  303. if (!inputs.parallel_tool_calls) {
  304. schema["maxItems"] = 1;
  305. }
  306. builder.add_rule("root", "\"[TOOL_CALLS]\" " + builder.add_schema("tool_calls", schema));
  307. }, grammar_options);
  308. data.grammar_triggers.push_back({"[TOOL_CALLS]", /* .at_start = */ true});
  309. data.prompt = apply(tmpl, inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  310. data.format = COMMON_CHAT_FORMAT_MISTRAL_NEMO;
  311. return data;
  312. }
  313. static common_chat_msg common_chat_parse_mistral_nemo(const std::string & input) {
  314. return parse_prefixed_json_tool_call_array(input, "[TOOL_CALLS]");
  315. }
  316. static common_chat_params common_chat_params_init_command_r7b(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  317. common_chat_params data;
  318. data.grammar_lazy = inputs.tool_choice != "required";
  319. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  320. auto schemas = json::array();
  321. foreach_function(inputs.tools, [&](const json & tool) {
  322. const auto & function = tool["function"];
  323. schemas.push_back({
  324. {"type", "object"},
  325. {"properties", {
  326. {"tool_call_id", {
  327. {"type", "string"},
  328. // Command-R's template expects an integer string.
  329. {"pattern", "^[0-9]{1,10}$"},
  330. }},
  331. {"tool_name", {
  332. {"type", "string"},
  333. {"const", function["name"]},
  334. }},
  335. {"parameters", function["parameters"]},
  336. }},
  337. {"required", json::array({"tool_call_id", "tool_name", "parameters"})},
  338. });
  339. });
  340. auto schema = json {
  341. {"type", "array"},
  342. {"items", schemas.size() == 1 ? schemas[0] : json {{"anyOf", schemas}}},
  343. {"minItems", 1},
  344. };
  345. if (!inputs.parallel_tool_calls) {
  346. schema["maxItems"] = 1;
  347. }
  348. builder.add_rule("root", "\"<|START_ACTION|>\" " + builder.add_schema("tool_calls", schema) + " \"<|END_ACTION|>\"");
  349. }, grammar_options);
  350. data.grammar_triggers.push_back({"<|START_ACTION|>", /* .at_start = */ false});
  351. data.preserved_tokens = {
  352. "<|START_RESPONSE|>",
  353. "<|END_RESPONSE|>",
  354. "<|START_THINKING|>",
  355. "<|END_THINKING|>",
  356. "<|END_ACTION|>",
  357. };
  358. data.prompt = apply(tmpl, inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  359. data.format = COMMON_CHAT_FORMAT_COMMAND_R7B;
  360. return data;
  361. }
  362. static common_chat_msg common_chat_parse_command_r7b(const std::string & input) {
  363. static std::regex response_regex("<\\|START_RESPONSE\\|>([\\s\\S\\n\\r]*?)<\\|END_RESPONSE\\|>");
  364. static std::regex thought_action_regex("<\\|START_THINKING\\|>([\\s\\S\\n\\r]*?)<\\|END_THINKING\\|><\\|START_ACTION\\|>([\\s\\S\\n\\r]*?)<\\|END_ACTION\\|>");
  365. std::smatch match;
  366. common_chat_msg result;
  367. result.role = "assistant";
  368. if (std::regex_match(input, match, response_regex)) {
  369. result.content = match[1].str();
  370. } else if (std::regex_match(input, match, thought_action_regex)) {
  371. result.tool_plan = match[1].str();
  372. auto actions_str = match[2].str();
  373. auto actions = json::parse(actions_str);
  374. for (const auto & action : actions) {
  375. result.tool_calls.push_back({
  376. /* .name = */ action["tool_name"],
  377. /* .arguments = */ action["parameters"].dump(),
  378. /* .id = */ action["tool_call_id"],
  379. });
  380. }
  381. } else {
  382. LOG_ERR("Failed to parse command_r output");
  383. result.content = input;
  384. }
  385. return result;
  386. }
  387. static void expect_tool_parameters(const std::string & name, const json & parameters, const std::vector<std::string> & expected_properties) {
  388. if (!parameters.is_object() || !parameters.contains("type") || parameters["type"] != "object" || !parameters.contains("properties") || !parameters.contains("required")) {
  389. throw std::runtime_error("Parameters of tool " + name + " must be an object w/ required properties");
  390. }
  391. const auto & parameters_properties = parameters.at("properties");
  392. const auto & parameters_required = parameters.at("required");
  393. for (const auto & prop : expected_properties) {
  394. if (!parameters_properties.contains(prop)) {
  395. throw std::runtime_error("Parameters of tool " + name + " is missing property: " + prop);
  396. }
  397. if (std::find(parameters_required.begin(), parameters_required.end(), json(prop)) == parameters_required.end()) {
  398. throw std::runtime_error("Parameters of tool " + name + " must have property marked as required: " + prop);
  399. }
  400. }
  401. if (parameters_properties.size() != expected_properties.size()) {
  402. throw std::runtime_error("Parameters of tool " + name + " must only have these properties:" + string_join(expected_properties, ", "));
  403. }
  404. }
  405. static common_chat_params common_chat_params_init_llama_3_1_tool_calls(const common_chat_template & tmpl, const struct common_chat_inputs & inputs, bool allow_python_tag_builtin_tools) {
  406. auto builtin_tools = json::array();
  407. common_chat_params data;
  408. data.grammar_lazy = inputs.tool_choice != "required";
  409. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  410. std::vector<std::string> tool_rules;
  411. auto handle_builtin_tool = [&](const std::string & name, const json & parameters) {
  412. if (name == "wolfram_alpha") {
  413. // https://github.com/meta-llama/llama-stack/blob/main/llama_stack/providers/remote/tool_runtime/wolfram_alpha/wolfram_alpha.py
  414. expect_tool_parameters(name, parameters, {"query"});
  415. } else if (name == "web_search" || name == "brave_search") {
  416. // https://github.com/meta-llama/llama-stack/blob/main/llama_stack/providers/remote/tool_runtime/brave_search/brave_search.py
  417. expect_tool_parameters(name, parameters, {"query"});
  418. } else if (name == "python" || name == "code_interpreter") {
  419. // https://github.com/meta-llama/llama-stack/blob/main/llama_stack/providers/inline/tool_runtime/code_interpreter/code_interpreter.py
  420. expect_tool_parameters(name, parameters, {"code"});
  421. } else {
  422. return false;
  423. }
  424. std::vector<std::string> kvs;
  425. for (const auto & [key, value] : parameters.at("properties").items()) {
  426. kvs.push_back("\"" + key + "=\" " + builder.add_schema(name + "-args-" + key, value));
  427. }
  428. tool_rules.push_back(
  429. builder.add_rule(
  430. name + "-call",
  431. "\"<|python_tag|>" + name + ".call(\" " + string_join(kvs, " \", \" ") + " \")\""));
  432. builtin_tools.push_back(name);
  433. return true;
  434. };
  435. foreach_function(inputs.tools, [&](const json & tool) {
  436. const auto & function = tool["function"];
  437. std::string name = function["name"];
  438. auto parameters = function["parameters"];
  439. builder.resolve_refs(parameters);
  440. // https://github.com/meta-llama/llama-stack/tree/main/llama_stack/providers/remote/tool_runtime
  441. if (allow_python_tag_builtin_tools) {
  442. handle_builtin_tool(name, parameters);
  443. }
  444. tool_rules.push_back(
  445. builder.add_rule(
  446. name + "-call",
  447. "\"{\" space "
  448. "( \"\\\"type\\\":\" space \"\\\"function\\\",\" space )? "
  449. "\"\\\"name\\\": \\\"" + name + "\\\", \\\"parameters\\\": \" " +
  450. builder.add_schema(name + "-args", parameters) +
  451. " \"}\""));
  452. data.grammar_triggers.push_back({"{\"name\": \"" + name + "\"", /* .at_start = */ true});
  453. });
  454. data.grammar_triggers.push_back({"{\"name\":", /* .at_start = */ true});
  455. data.grammar_triggers.push_back({"{\n \"name\":", /* .at_start = */ true});
  456. data.grammar_triggers.push_back({"{\n \"name\":", /* .at_start = */ true});
  457. data.grammar_triggers.push_back({"{\"type\": \"function\"", /* .at_start = */ true});
  458. data.grammar_triggers.push_back({"{\n \"type\": \"function\"", /* .at_start = */ true});
  459. data.grammar_triggers.push_back({"{\n \"type\": \"function\"", /* .at_start = */ true});
  460. if (!builtin_tools.empty()) {
  461. data.grammar_triggers.push_back({"<|python_tag|>", /* .at_start = */ false});
  462. }
  463. builder.add_rule("root", string_join(tool_rules, " | "));
  464. }, grammar_options);
  465. data.additional_stops.push_back("<|eom_id|>");
  466. data.prompt = apply(tmpl, inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt, {
  467. {"tools_in_user_message", false},
  468. {"builtin_tools", builtin_tools.empty() ? json() : builtin_tools},
  469. });
  470. data.format = allow_python_tag_builtin_tools && !builtin_tools.empty()
  471. ? COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS
  472. : COMMON_CHAT_FORMAT_LLAMA_3_X;
  473. return data;
  474. }
  475. static common_chat_msg common_chat_parse_llama_3_1(const std::string & input, bool with_builtin_tools = false) {
  476. // TODO: tighten & simplify the parser, don't accept leading text context.
  477. static std::regex function_regex("\\{[\\s\\n\\r]*(?:\"type\"[\\s\\n\\r]*:[\\s\\n\\r]*\"function\"[\\s\\n\\r]*,[\\s\\n\\r]*|[\\s\\n\\r]*)\"name\"[\\s\\n\\r]*:[\\s\\n\\r]*\"([^\"]+)\"[\\s\\n\\r]*,[\\s\\n\\r]*\"parameters\": ");
  478. static std::regex close_regex("\\}");
  479. static std::regex builtin_call_regex("<\\|python_tag\\|>([^.(]+)\\.call\\((.*)\\)");
  480. if (with_builtin_tools) {
  481. std::smatch match;
  482. if (std::regex_match(input, match, builtin_call_regex)) {
  483. auto name = match[1].str();
  484. auto raw_args = match[2].str();
  485. // TODO: if/when builtin tools start accepting more than 1 argument, use parse_json for real parsing.
  486. auto it_eq = raw_args.find('=');
  487. auto arg_name = raw_args.substr(0, it_eq);
  488. auto arg_value_str = raw_args.substr(it_eq + 1);
  489. auto arg_value = json::parse(arg_value_str);
  490. return {
  491. /* .role = */ "assistant",
  492. /* .content = */ match.prefix().str(),
  493. /* .tool_calls = */ {
  494. {
  495. /* .name = */ match[1],
  496. /* .arguments = */ (json {
  497. {arg_name, arg_value},
  498. }).dump(),
  499. /* .id = */ "",
  500. },
  501. },
  502. };
  503. }
  504. }
  505. return parse_json_tool_calls(input, std::nullopt, function_regex, close_regex);
  506. }
  507. static common_chat_params common_chat_params_init_deepseek_r1(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  508. common_chat_params data;
  509. data.grammar_lazy = inputs.tool_choice != "required";
  510. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  511. std::vector<std::string> tool_rules;
  512. foreach_function(inputs.tools, [&](const json & tool) {
  513. const auto & function = tool["function"];
  514. std::string name = function["name"];
  515. auto parameters = function["parameters"];
  516. auto args_rule = builder.add_schema(name + "-args", parameters);
  517. tool_rules.push_back(builder.add_rule(name + "-call",
  518. "\"<|tool▁call▁begin|>function<|tool▁sep|>" + name + "\\n```json\\n\" " + args_rule + " \"```<|tool▁call▁end|>\""));
  519. });
  520. data.grammar_triggers.push_back({"<|tool▁calls▁begin|>", /* .at_start = */ false});
  521. data.preserved_tokens = {
  522. "<|tool▁sep|>",
  523. "<|tool▁call▁end|>",
  524. };
  525. builder.add_rule("root", "\"<|tool▁calls▁begin|>\" (" + string_join(tool_rules, " | ") + ")" + (inputs.parallel_tool_calls ? "*" : "") + " space");
  526. }, grammar_options);
  527. auto prompt = apply(tmpl, inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  528. data.prompt = prompt;
  529. data.format = COMMON_CHAT_FORMAT_DEEPSEEK_R1;
  530. return data;
  531. }
  532. static common_chat_msg common_chat_parse_deepseek_r1(const std::string & input) {
  533. static std::regex trigger_regex("<|tool▁calls▁begin|>");
  534. static std::regex function_regex("<|tool▁call▁begin|>function<|tool▁sep|>([^\n]+)\n```json\n");
  535. static std::regex close_regex("```<|tool▁call▁end|>");
  536. return parse_json_tool_calls(input, trigger_regex, function_regex, close_regex);
  537. }
  538. static common_chat_params common_chat_params_init_firefunction_v2(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  539. fprintf(stderr, "%s\n", __func__);
  540. common_chat_params data;
  541. data.prompt = apply(tmpl, inputs.messages, /* tools= */ nullptr, inputs.add_generation_prompt, {
  542. {"datetime", "Jan 29 2025 13:00:00 GMT"},
  543. {"functions", json(inputs.tools.empty() ? "" : inputs.tools.dump(2))},
  544. });
  545. if (!inputs.tools.is_null() && !inputs.tools.empty()) {
  546. data.grammar_lazy = inputs.tool_choice != "required";
  547. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  548. auto schemas = json::array();
  549. foreach_function(inputs.tools, [&](const json & tool) {
  550. const auto & function = tool["function"];
  551. schemas.push_back({
  552. {"type", "object"},
  553. {"properties", {
  554. {"name", {
  555. {"type", "string"},
  556. {"const", function["name"]},
  557. }},
  558. {"arguments", function["parameters"]},
  559. }},
  560. {"required", json::array({"name", "arguments", "id"})},
  561. });
  562. });
  563. auto schema = json {
  564. {"type", "array"},
  565. {"items", schemas.size() == 1 ? schemas[0] : json {{"anyOf", schemas}}},
  566. {"minItems", 1},
  567. };
  568. if (!inputs.parallel_tool_calls) {
  569. schema["maxItems"] = 1;
  570. }
  571. builder.add_rule("root", "\" functools\"? " + builder.add_schema("tool_calls", schema));
  572. }, grammar_options);
  573. data.grammar_triggers.push_back({" functools[", /* .at_start = */ false});
  574. data.format = COMMON_CHAT_FORMAT_FIREFUNCTION_V2;
  575. } else {
  576. data.format = COMMON_CHAT_FORMAT_CONTENT_ONLY;
  577. }
  578. return data;
  579. }
  580. static common_chat_msg common_chat_parse_firefunction_v2(const std::string & input) {
  581. return parse_prefixed_json_tool_call_array(input, " functools[", /* rstrip_prefix= */ 1);
  582. }
  583. static common_chat_params common_chat_params_init_functionary_v3_2(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  584. // >>>all\nlet's call functions>>>fn1\n{"arg1": 1...}\n>>>fn2\n{"arg1": 1...}...
  585. // Using ">>>f1\n", ">>>f2\n"... as trigger words for the grammar
  586. common_chat_params data;
  587. data.prompt = apply(tmpl, inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  588. data.format = COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2;
  589. if (!inputs.tools.is_null() && !inputs.tools.empty()) {
  590. data.grammar_lazy = inputs.tool_choice != "required";
  591. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  592. std::vector<std::string> first_tool_rules;
  593. std::vector<std::string> subsequent_tool_rules;
  594. foreach_function(inputs.tools, [&](const json & tool) {
  595. const auto & function = tool["function"];
  596. std::string name = function["name"];
  597. auto parameters = function["parameters"];
  598. auto args_rule = builder.add_schema(name + "-args", parameters);
  599. first_tool_rules.push_back(builder.add_rule(name + "-call", "\"" + name + "\\n\" " + args_rule));
  600. subsequent_tool_rules.push_back(builder.add_rule(name + "-call2", "\">>>" + name + "\\n\" " + args_rule));
  601. data.grammar_triggers.push_back({name, /* .at_start = */ true});
  602. data.grammar_triggers.push_back({">>>" + name, /* .at_start = */ false});
  603. });
  604. auto first_rule = first_tool_rules.empty() ? "" : builder.add_rule("first_tool_call", string_join(first_tool_rules, " | ")) + " space";
  605. if (inputs.parallel_tool_calls) {
  606. auto subsequent_rule = builder.add_rule("subsequent_tool_call", string_join(subsequent_tool_rules, " | ")) + " space";
  607. builder.add_rule("root", first_rule + " (" + subsequent_rule + ")*");
  608. } else {
  609. builder.add_rule("root", first_rule);
  610. }
  611. }, grammar_options);
  612. }
  613. return data;
  614. }
  615. static bool consume(std::string::const_iterator & it, const std::string::const_iterator & end, const std::string & expected) {
  616. auto expected_it = expected.begin();
  617. auto tmp_it = it;
  618. while (tmp_it != end && expected_it != expected.end() && *tmp_it == *expected_it) {
  619. ++tmp_it;
  620. ++expected_it;
  621. }
  622. if (expected_it == expected.end()) {
  623. it = tmp_it;
  624. return true;
  625. }
  626. return false;
  627. }
  628. static common_chat_msg common_chat_parse_functionary_v3_2(const std::string & input) {
  629. static std::regex function_regex(R"((?:>>>)?(\w+)\n)");
  630. static std::regex close_regex(R"($|(?=>>>))");
  631. std::string content;
  632. auto it = input.begin();
  633. const auto end = input.end();
  634. if (consume(it, end, "all\n")) {
  635. std::smatch match;
  636. if (std::regex_search(it, end, match, function_regex)) {
  637. auto fun_it = match.prefix().second;
  638. content = std::string(it, fun_it);
  639. it = fun_it;
  640. } else {
  641. common_chat_msg res;
  642. res.role = "assistant";
  643. res.content = std::string(it, end);
  644. return res;
  645. }
  646. }
  647. // TODO: tighten & simplify.
  648. try {
  649. auto res = parse_json_tool_calls(std::string(it, end), std::nullopt, function_regex, close_regex);
  650. res.content = content + res.content;
  651. return res;
  652. } catch (const std::exception & e) {
  653. LOG_ERR("Failed to parse functionary v3.2 input: %s\n", e.what());
  654. common_chat_msg res;
  655. res.role = "assistant";
  656. res.content = input;
  657. return res;
  658. }
  659. }
  660. static common_chat_params common_chat_params_init_functionary_v3_1_llama_3_1(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  661. // https://github.com/MeetKai/functionary/blob/main/tests/prompt_test_v3-llama3.1.txt
  662. common_chat_params data;
  663. json tools = inputs.tools.is_null() ? inputs.tools : json::array();
  664. std::string python_code_argument_name;
  665. auto has_raw_python = false;
  666. data.grammar_lazy = inputs.tool_choice != "required";
  667. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  668. std::vector<std::string> tool_rules;
  669. foreach_function(inputs.tools, [&](const json & tool) {
  670. const auto & function = tool["function"];
  671. const auto & parameters = function["parameters"];
  672. std::string name = function["name"];
  673. if (name == "python" || name == "ipython") {
  674. if (!parameters.contains("type")) {
  675. throw std::runtime_error("Missing type in python tool");
  676. }
  677. has_raw_python = true;
  678. auto type = parameters.at("type");
  679. if (type == "object") {
  680. auto properties = parameters.at("properties");
  681. for (auto it = properties.begin(); it != properties.end(); ++it) {
  682. if (it.value().at("type") == "string") {
  683. if (!python_code_argument_name.empty()) {
  684. throw std::runtime_error("Multiple string arguments found in python tool");
  685. }
  686. python_code_argument_name = it.key();
  687. }
  688. }
  689. if (python_code_argument_name.empty()) {
  690. throw std::runtime_error("No string argument found in python tool");
  691. }
  692. } else if (type != "string") {
  693. throw std::runtime_error("Invalid type in python tool: " + type.dump());
  694. }
  695. }
  696. tool_rules.push_back(builder.add_rule(name + "-call", "\"<function=" + name + ">\" " + builder.add_schema(name + "-args", parameters) + " \"</function>\" space"));
  697. });
  698. if (has_raw_python) {
  699. tool_rules.push_back(builder.add_rule("python-call", "\"<|python_tag|>\" .*"));
  700. data.grammar_triggers.push_back({"<|python_tag|>", /* .at_start = */ false});
  701. }
  702. auto tool_call = builder.add_rule("tool_call", string_join(tool_rules, " | ")) + " space";
  703. builder.add_rule("root", inputs.parallel_tool_calls ? "(" + tool_call + ")+" : tool_call);
  704. data.grammar_triggers.push_back({"<function=", /* .at_start = */ false});
  705. }, grammar_options);
  706. data.prompt = apply(tmpl, inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  707. // TODO: if (has_raw_python)
  708. data.format = COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1;
  709. return data;
  710. }
  711. static common_chat_msg common_chat_parse_functionary_v3_1_llama_3_1(const std::string & input) {
  712. // This version of Functionary still supports the llama 3.1 tool call format for the python tool.
  713. static std::regex python_tag_regex(R"(<\|python_tag\|>([\s\S\n]*)$)");
  714. std::smatch match;
  715. if (std::regex_search(input, match, python_tag_regex)) {
  716. auto code = match[1].str();
  717. return {
  718. /* .role = */ "assistant",
  719. /* .content = */ match.prefix().str(),
  720. /* .tool_calls = */ {
  721. {
  722. /* .name = */ "python",
  723. /* .arguments = */ (json {{"code", code}}).dump(),
  724. /* .id = */ "",
  725. },
  726. }
  727. };
  728. }
  729. static std::regex function_regex(R"(<function=(\w+)>)");
  730. static std::regex close_regex(R"(</function>)");
  731. // TODO: tighten & simplify.
  732. return parse_json_tool_calls(input, std::nullopt, function_regex, close_regex);
  733. }
  734. static common_chat_params common_chat_params_init_hermes_2_pro(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  735. common_chat_params data;
  736. // (content)?(<tool_call>{"name": "foo", "arguments": {"a": 1}}</tool_call>)*
  737. data.grammar_lazy = inputs.tool_choice != "required";
  738. data.grammar = build_grammar([&](const common_grammar_builder & builder) {
  739. std::vector<std::string> tool_rules;
  740. foreach_function(inputs.tools, [&](const json & tool) {
  741. const auto & function = tool["function"];
  742. std::string name = function["name"];
  743. auto parameters = function["parameters"];
  744. builder.resolve_refs(parameters);
  745. tool_rules.push_back(builder.add_schema(name + "-call", {
  746. {"type", "object"},
  747. {"properties", json {
  748. {"name", json {{"const", name}}},
  749. {"arguments", parameters},
  750. }},
  751. {"required", json::array({"name", "arguments"})},
  752. }));
  753. });
  754. auto tool_call = "\"<tool_call>\" space " + builder.add_rule("tool_call", string_join(tool_rules, " | ")) + " \"</tool_call>\" space";
  755. builder.add_rule("root", inputs.parallel_tool_calls ? "(" + tool_call + ")+" : tool_call);
  756. data.grammar_triggers.push_back({"<tool_call>", /* .at_start = */ false});
  757. data.preserved_tokens = { "</tool_call>" };
  758. }, grammar_options);
  759. data.prompt = apply(tmpl, inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  760. data.format = COMMON_CHAT_FORMAT_HERMES_2_PRO;
  761. return data;
  762. }
  763. static common_chat_msg common_chat_parse_hermes_2_pro(const std::string & input) {
  764. try {
  765. std::regex start_pattern(R"([\n\s]*<tool_call>)");
  766. std::regex middle_pattern(R"([\n\s]*</tool_call>[\n\s]*<tool_call>)");
  767. std::regex end_pattern(R"([\n\s]*</tool_call>[\n\s]*$)");
  768. auto end = input.end();
  769. std::sregex_iterator rend;
  770. std::sregex_iterator rit(input.begin(), end, start_pattern);
  771. if (rit == rend) {
  772. return {
  773. /* .role = */ "assistant",
  774. /* .content = */ input,
  775. /* .tool_calls = */ {},
  776. };
  777. }
  778. common_chat_msg result;
  779. result.role = "assistant";
  780. result.content = rit->prefix();
  781. auto it = rit->suffix().first;
  782. while (it != end) {
  783. json call;
  784. if (!parse_json(it, end, call)) {
  785. throw std::runtime_error("Failed to parse json tool call");
  786. }
  787. const auto & arguments = call["arguments"];
  788. result.tool_calls.push_back({
  789. call["name"],
  790. arguments.dump(),
  791. // arguments.is_string() ? arguments.get<std::string>() : arguments.dump(),
  792. /* id= */ "",
  793. });
  794. rit = {it, end, middle_pattern};
  795. if (rit != rend) {
  796. it = rit->suffix().first;
  797. } else {
  798. rit = {it, end, end_pattern};
  799. if (rit == rend) {
  800. throw std::runtime_error("Malformed input, missing </tool_call>");
  801. }
  802. break;
  803. }
  804. }
  805. return result;
  806. } catch (const std::exception & e) {
  807. return {
  808. /* .role = */ "assistant",
  809. /* .content = */ input,
  810. /* .tool_calls = */ {},
  811. };
  812. }
  813. }
  814. static common_chat_params common_chat_params_init_without_tools(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  815. common_chat_params data;
  816. data.prompt = apply(tmpl, inputs.messages, inputs.tools.empty() ? json() : inputs.tools, inputs.add_generation_prompt);
  817. data.format = COMMON_CHAT_FORMAT_CONTENT_ONLY;
  818. data.grammar_lazy = false;
  819. if (!inputs.json_schema.is_null()) {
  820. if (!inputs.grammar.empty()) {
  821. throw std::runtime_error("Either \"json_schema\" or \"grammar\" can be specified, but not both");
  822. }
  823. data.grammar = json_schema_to_grammar(inputs.json_schema);
  824. } else {
  825. data.grammar = inputs.grammar.empty();
  826. }
  827. return data;
  828. }
  829. common_chat_params common_chat_params_init(const common_chat_template & tmpl, const struct common_chat_inputs & inputs) {
  830. auto has_tools = !inputs.tools.is_null() && inputs.tool_choice != "none";
  831. LOG_DBG("[%s] has_tools=%s\n", __func__, has_tools ? "true" : "false");
  832. if (has_tools && !inputs.grammar.empty()) {
  833. throw std::runtime_error("Cannot specify grammar with tools");
  834. }
  835. const auto & src = tmpl.source();
  836. if (src.find(">>>all") != std::string::npos) {
  837. // Functionary prepends "all\n" to plain content outputs, so we use the parser no matter when
  838. return common_chat_params_init_functionary_v3_2(tmpl, inputs);
  839. }
  840. if (src.find(" functools[") != std::string::npos) {
  841. // Firefunction v2 requires datetime and functions in the context, even w/o tools.
  842. return common_chat_params_init_firefunction_v2(tmpl, inputs);
  843. }
  844. if (!has_tools) {
  845. return common_chat_params_init_without_tools(tmpl, inputs);
  846. }
  847. if (src.find("<tool_call>") != std::string::npos) {
  848. return common_chat_params_init_hermes_2_pro(tmpl, inputs);
  849. }
  850. if (src.find("<|start_header_id|>") != std::string::npos
  851. && src.find("<function=") != std::string::npos) {
  852. return common_chat_params_init_functionary_v3_1_llama_3_1(tmpl, inputs);
  853. }
  854. if (src.find("<|start_header_id|>ipython<|end_header_id|>") != std::string::npos) {
  855. auto allow_python_tag_builtin_tools = src.find("<|python_tag|>") != std::string::npos;
  856. return common_chat_params_init_llama_3_1_tool_calls(tmpl, inputs, allow_python_tag_builtin_tools);
  857. }
  858. if (src.find("<|tool▁calls▁begin|>") != std::string::npos) {
  859. return common_chat_params_init_deepseek_r1(tmpl, inputs);
  860. }
  861. if (src.find("[TOOL_CALLS]") != std::string::npos) {
  862. return common_chat_params_init_mistral_nemo(tmpl, inputs);
  863. }
  864. if (src.find("<|END_THINKING|><|START_ACTION|>") != std::string::npos) {
  865. return common_chat_params_init_command_r7b(tmpl, inputs);
  866. }
  867. return common_chat_params_init_generic(tmpl, inputs);
  868. }
  869. static common_chat_msg common_chat_parse_content_only(const std::string & input) {
  870. return {
  871. /* .role = */ "assistant",
  872. /* .content = */ input,
  873. /* .tool_calls = */ {},
  874. };
  875. }
  876. common_chat_msg common_chat_parse(const std::string & input, common_chat_format format) {
  877. switch (format) {
  878. case COMMON_CHAT_FORMAT_CONTENT_ONLY:
  879. return common_chat_parse_content_only(input);
  880. case COMMON_CHAT_FORMAT_GENERIC:
  881. return common_chat_parse_generic(input);
  882. case COMMON_CHAT_FORMAT_MISTRAL_NEMO:
  883. return common_chat_parse_mistral_nemo(input);
  884. case COMMON_CHAT_FORMAT_LLAMA_3_X:
  885. return common_chat_parse_llama_3_1(input);
  886. case COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS:
  887. return common_chat_parse_llama_3_1(input, /* with_builtin_tools= */ true);
  888. case COMMON_CHAT_FORMAT_DEEPSEEK_R1:
  889. return common_chat_parse_deepseek_r1(input);
  890. case COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2:
  891. return common_chat_parse_functionary_v3_2(input);
  892. case COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1:
  893. return common_chat_parse_functionary_v3_1_llama_3_1(input);
  894. case COMMON_CHAT_FORMAT_HERMES_2_PRO:
  895. return common_chat_parse_hermes_2_pro(input);
  896. case COMMON_CHAT_FORMAT_FIREFUNCTION_V2:
  897. return common_chat_parse_firefunction_v2(input);
  898. case COMMON_CHAT_FORMAT_COMMAND_R7B:
  899. return common_chat_parse_command_r7b(input);
  900. default:
  901. throw std::runtime_error("Unsupported format: " + common_chat_format_name(format));
  902. }
  903. }