| 12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881 |
- // Tests chat handling, including grammar generation and parsing for tool calling, for various templates.
- //
- // Also acts as a CLI to generate a Markdown summary of the formats of Jinja templates,
- // e.g. given Minja (http://github.com/google/minja) checked out in parent dir:
- //
- // cmake -B build && cmake --build build --parallel && ./build/bin/test-chat ../minja/build/tests/*.jinja 2>/dev/null
- //
- #include "chat.h"
- #include "log.h"
- #include "../src/unicode.h"
- #include "../src/llama-grammar.h"
- #include <nlohmann/json.hpp>
- #include <fstream>
- #include <iostream>
- #include <functional>
- #include <string>
- using json = nlohmann::ordered_json;
- static std::ostream & operator<<(std::ostream & os, const common_chat_msg_diff & diff) {
- os << "{ content_delta: " << diff.content_delta << "; ";
- os << "reasoning_content_delta: " << diff.reasoning_content_delta << "; ";
- if (diff.tool_call_index != std::string::npos) {
- os << "tool_call_index: " << diff.tool_call_index << "; ";
- os << "tool_call_delta.name: " << diff.tool_call_delta.name << "; ";
- os << "tool_call_delta.id: " << diff.tool_call_delta.id << "; ";
- os << "tool_call_delta.arguments: " << diff.tool_call_delta.arguments << "; ";
- }
- os << "}";
- return os;
- }
- // operator<< for vector<common_chat_msg_diff>:
- static std::ostream & operator<<(std::ostream & os, const std::vector<common_chat_msg_diff> & diffs) {
- os << "[\n";
- for (const auto & diff : diffs) {
- os << " " << diff << ",\n";
- }
- os << "]";
- return os;
- }
- static std::ostream & operator<<(std::ostream & os, const common_chat_msg & msg) {
- os << "{ role: " << msg.role << "; ";
- os << "content: " << msg.content << "; ";
- os << "content_parts: [\n";
- for (const auto & part : msg.content_parts) {
- os << " { type: " << part.type << "; text: " << part.text << " },\n";
- }
- os << "]; ";
- os << "reasoning_content: " << msg.reasoning_content << "; ";
- os << "tool_calls: [\n";
- for (const auto & tool_call : msg.tool_calls) {
- os << " { name: " << tool_call.name << "; arguments: " << tool_call.arguments << "; id: " << tool_call.id << " },\n";
- }
- os << "]";
- os << "}";
- return os;
- }
- template <class T> static bool equals(const T & expected, const T & actual) {
- return expected == actual;
- }
- static common_chat_msg normalize(const common_chat_msg & msg) {
- common_chat_msg normalized = msg;
- for (auto & tool_call : normalized.tool_calls) {
- try {
- tool_call.arguments = json::parse(tool_call.arguments).dump();
- } catch (const std::exception &) {
- // Do nothing
- }
- }
- return normalized;
- }
- template <>
- bool equals(const common_chat_msg & expected, const common_chat_msg & actual) {
- return normalize(expected) == normalize(actual);
- }
- template <class T> static void assert_equals(const T & expected, const T & actual) {
- if (!equals(expected, actual)) {
- std::cerr << "Expected: " << expected << std::endl;
- std::cerr << "Actual: " << actual << std::endl;
- std::cerr << std::flush;
- throw std::runtime_error("Test failed");
- }
- }
- static std::string read_file(const std::string & path) {
- std::cerr << "# Reading: " << path << '\n' << std::flush;
- std::ifstream fs(path, std::ios_base::binary);
- if (!fs.is_open()) {
- fs = std::ifstream("../" + path, std::ios_base::binary);
- if (!fs.is_open()) {
- throw std::runtime_error("Failed to open file: " + path);
- }
- }
- fs.seekg(0, std::ios_base::end);
- auto size = fs.tellg();
- fs.seekg(0);
- std::string out;
- out.resize(static_cast<size_t>(size));
- fs.read(out.data(), static_cast<std::streamsize>(size));
- return out;
- }
- static common_chat_templates_ptr read_templates(const std::string & path) {
- return common_chat_templates_ptr(common_chat_templates_init(/* model= */ nullptr, read_file(path)));
- }
- static std::unique_ptr<llama_grammar> build_grammar(const std::string & grammar_str) {
- return std::unique_ptr<llama_grammar>(
- llama_grammar_init_impl(nullptr, grammar_str.c_str(), "root", false, nullptr, 0, nullptr, 0));
- }
- // TODO: extract to common helper (copied from test-grammar-integration.cpp)
- static bool match_string(const std::string & input, llama_grammar * grammar) {
- const auto cpts = unicode_cpts_from_utf8(input);
- auto & stacks_cur = llama_grammar_get_stacks(grammar);
- for (const auto & cpt : cpts) {
- llama_grammar_accept(grammar, cpt);
- if (stacks_cur.empty()) {
- // no stacks means that the grammar failed to match at this point
- return false;
- }
- }
- if (std::any_of(stacks_cur.begin(), stacks_cur.end(), [](const auto & stack) { return stack.empty(); })) {
- // An empty stack means that the grammar has been completed
- return true;
- }
- return false;
- }
- static std::string renormalize_json(const std::string & json_str) {
- try {
- auto json_obj = json::parse(json_str);
- return json_obj.dump();
- } catch (const std::exception & e) {
- std::cerr << "Failed to parse JSON: " << e.what() << '\n';
- return json_str;
- }
- }
- static void assert_msg_equals(const common_chat_msg & expected, const common_chat_msg & actual, bool ignore_whitespace_differences = false) {
- assert_equals(expected.role, actual.role);
- if (ignore_whitespace_differences) {
- assert_equals(string_strip(expected.content), string_strip(actual.content));
- } else {
- assert_equals(expected.content, actual.content);
- }
- assert_equals(expected.content_parts.size(), actual.content_parts.size());
- for (size_t i = 0; i < expected.content_parts.size(); i++) {
- const auto & expected_part = expected.content_parts[i];
- const auto & actual_part = actual.content_parts[i];
- assert_equals(expected_part.type, actual_part.type);
- if (ignore_whitespace_differences) {
- assert_equals(string_strip(expected_part.text), string_strip(actual_part.text));
- } else {
- assert_equals(expected_part.text, actual_part.text);
- }
- }
- if (ignore_whitespace_differences) {
- assert_equals(string_strip(expected.reasoning_content), string_strip(actual.reasoning_content));
- } else {
- assert_equals(expected.reasoning_content, actual.reasoning_content);
- }
- assert_equals(expected.tool_calls.size(), actual.tool_calls.size());
- for (size_t i = 0; i < expected.tool_calls.size(); i++) {
- const auto & expected_tool_call = expected.tool_calls[i];
- const auto & actual_tool_call = actual.tool_calls[i];
- assert_equals(expected_tool_call.name, actual_tool_call.name);
- assert_equals(renormalize_json(expected_tool_call.arguments), renormalize_json(actual_tool_call.arguments));
- assert_equals(expected_tool_call.id, actual_tool_call.id);
- }
- }
- common_chat_tool special_function_tool {
- /* .name = */ "special_function",
- /* .description = */ "I'm special",
- /* .parameters = */ R"({
- "type": "object",
- "properties": {
- "arg1": {
- "type": "integer",
- "description": "The arg."
- }
- },
- "required": ["arg1"]
- })",
- };
- common_chat_tool special_function_tool_with_optional_param {
- /* .name = */ "special_function_with_opt",
- /* .description = */ "I'm special but have optional stuff",
- /* .parameters = */ R"({
- "type": "object",
- "properties": {
- "arg1": {
- "type": "integer",
- "description": "The arg."
- },
- "arg2": {
- "type": "integer",
- "description": "The optional arg."
- }
- },
- "required": ["arg1"]
- })",
- };
- common_chat_tool python_tool {
- /* .name = */ "python",
- /* .description = */ "an ipython interpreter",
- /* .parameters = */ R"({
- "type": "object",
- "properties": {
- "code": {
- "type": "string",
- "description": "Python code to execute."
- }
- },
- "required": ["code"]
- })",
- };
- common_chat_tool code_interpreter_tool {
- /* .name = */ "code_interpreter",
- /* .description = */ "an ipython interpreter",
- /* .parameters = */ R"({
- "type": "object",
- "properties": {
- "code": {
- "type": "string",
- "description": "Python code to execute."
- }
- },
- "required": ["code"]
- })",
- };
- std::vector<common_chat_tool> tools { special_function_tool, special_function_tool_with_optional_param, python_tool };
- std::vector<common_chat_tool> llama_3_1_tools { special_function_tool, code_interpreter_tool };
- struct delta_data {
- std::string delta;
- common_chat_params params;
- };
- static common_chat_msg simple_assist_msg(const std::string & content, const std::string & reasoning_content = "", const std::string & tool_name = "", const std::string & arguments = "", const std::string & id = "") {
- common_chat_msg msg;
- msg.role = "assistant";
- msg.content = content;
- msg.reasoning_content = reasoning_content;
- if (!tool_name.empty()) {
- msg.tool_calls.push_back({ tool_name, arguments, id });
- }
- return msg;
- }
- static delta_data init_delta(const struct common_chat_templates * tmpls, const std::vector<std::string> & end_tokens,
- const common_chat_msg & user_message,
- const common_chat_msg & delta_message,
- const std::vector<common_chat_tool> & tools,
- const common_chat_tool_choice & tool_choice) {
- common_chat_templates_inputs inputs;
- inputs.parallel_tool_calls = true;
- inputs.messages.push_back(user_message);
- inputs.tools = tools;
- inputs.tool_choice = tool_choice;
- auto params_prefix = common_chat_templates_apply(tmpls, inputs);
- inputs.messages.push_back(delta_message);
- inputs.add_generation_prompt = false;
- auto params_full = common_chat_templates_apply(tmpls, inputs);
- std::string prefix = params_prefix.prompt;
- std::string full = params_full.prompt;
- if (full == prefix) {
- throw std::runtime_error("Full message is the same as the prefix");
- }
- size_t common_prefix_length = 0;
- for (size_t i = 0; i < prefix.size() && i < full.size(); ++i) {
- if (prefix[i] != full[i]) {
- break;
- }
- if (prefix[i] == '<') {
- // DeepSeek R1's template (as of 20250209) adds a trailing <think> if add_generation_prompt,
- // but it removes thinking tags for past messages.
- // The prefix and full strings diverge at <think> vs. <|tool▁calls▁begin|>, we avoid consuming the leading <.
- continue;
- }
- common_prefix_length = i + 1;
- }
- auto delta = full.substr(common_prefix_length);
- // Strip end tokens
- for (const auto & end_token : end_tokens) {
- // rfind to find the last occurrence
- auto pos = delta.rfind(end_token);
- if (pos != std::string::npos) {
- delta = delta.substr(0, pos);
- break;
- }
- }
- return { delta, params_full };
- }
- /*
- Applies the template to 1 user message w/ add_generation_prompt=true, then w/ the test message w/ add_generation_prompt=false,
- gets the diff, removes any end tokens and parses the result w/ the grammar, checking that
- the parsed message is the same as the test_message
- */
- static void test_templates(const struct common_chat_templates * tmpls, const std::vector<std::string> & end_tokens,
- const common_chat_msg & test_message,
- const std::vector<common_chat_tool> & tools = {},
- const std::string & expected_delta = "",
- bool expect_grammar_triggered = true,
- bool test_grammar_if_triggered = true,
- common_reasoning_format reasoning_format = COMMON_REASONING_FORMAT_NONE,
- bool ignore_whitespace_differences = false
- ) {
- common_chat_msg user_message;
- user_message.role = "user";
- user_message.content = "Hello, world!";
- for (const auto & tool_choice : std::vector<common_chat_tool_choice> {COMMON_CHAT_TOOL_CHOICE_AUTO, COMMON_CHAT_TOOL_CHOICE_REQUIRED}) {
- auto data = init_delta(tmpls, end_tokens, user_message, test_message, tools, tool_choice);
- if (!expected_delta.empty()) {
- if (ignore_whitespace_differences) {
- assert_equals(string_strip(expected_delta), string_strip(data.delta));
- } else {
- assert_equals(expected_delta, data.delta);
- }
- }
- if (expect_grammar_triggered) {
- common_chat_syntax syntax;
- syntax.format = data.params.format;
- syntax.reasoning_format = reasoning_format;
- const auto msg = common_chat_parse(data.delta, /* is_partial= */ false, syntax);
- assert_msg_equals(test_message, msg, ignore_whitespace_differences);
- }
- if (!test_message.tool_calls.empty()) {
- GGML_ASSERT(!data.params.grammar.empty());
- }
- if (!data.params.grammar.empty()) {
- auto grammar = build_grammar(data.params.grammar);
- if (!grammar) {
- throw std::runtime_error("Failed to build grammar");
- }
- auto earliest_trigger_pos = std::string::npos;
- auto constrained = data.delta;
- for (const auto & trigger : data.params.grammar_triggers) {
- size_t pos = std::string::npos;
- std::smatch match;
- switch (trigger.type) {
- case COMMON_GRAMMAR_TRIGGER_TYPE_WORD:
- {
- const auto & word = trigger.value;
- pos = constrained.find(word);
- break;
- }
- case COMMON_GRAMMAR_TRIGGER_TYPE_PATTERN:
- {
- const auto & pattern = trigger.value;
- if (std::regex_search(constrained, match, std::regex(pattern))) {
- pos = match.position(1);
- }
- break;
- }
- case COMMON_GRAMMAR_TRIGGER_TYPE_PATTERN_FULL:
- {
- const auto & pattern = trigger.value;
- if (std::regex_match(constrained, match, std::regex(pattern))) {
- auto mpos = std::string::npos;
- for (size_t i = 1; i < match.size(); ++i) {
- if (match[i].length() > 0) {
- mpos = match.position(i);
- break;
- }
- }
- if (mpos == std::string::npos) {
- mpos = match.position(0);
- }
- pos = mpos;
- }
- break;
- }
- default:
- throw std::runtime_error("Unknown trigger type");
- }
- if (pos == std::string::npos) {
- continue;
- }
- if (earliest_trigger_pos == std::string::npos || pos < earliest_trigger_pos) {
- earliest_trigger_pos = pos;
- }
- }
- auto grammar_triggered = false;
- if (earliest_trigger_pos != std::string::npos) {
- constrained = constrained.substr(earliest_trigger_pos);
- grammar_triggered = true;
- }
- if (data.params.grammar_lazy) {
- assert_equals(expect_grammar_triggered, grammar_triggered);
- }
- if (grammar_triggered && test_grammar_if_triggered && !match_string(constrained, grammar.get())) {
- throw std::runtime_error("Failed to match delta against grammar:\n\n" + data.delta +
- "\n\nConstrained: " + constrained +
- "\n\nGrammar: " + data.params.grammar);
- }
- }
- }
- }
- /**
- * Test if streaming=true is consistant with streaming=false for given partial parser
- * Also test if there is any problem with partial message
- */
- template <typename T>
- static void test_parser_with_streaming(const common_chat_msg & expected, const std::string & raw_message, T parse_msg) {
- constexpr auto utf8_truncate_safe_len = [](const std::string_view s) -> size_t {
- auto len = s.size();
- if (len == 0) return 0;
- auto i = len;
- for (size_t back = 0; back < 4 && i > 0; ++back) {
- --i;
- unsigned char c = s[i];
- if ((c & 0x80) == 0) {
- return len;
- } else if ((c & 0xC0) == 0xC0) {
- size_t expected_len = 0;
- if ((c & 0xE0) == 0xC0) expected_len = 2;
- else if ((c & 0xF0) == 0xE0) expected_len = 3;
- else if ((c & 0xF8) == 0xF0) expected_len = 4;
- else return i;
- if (len - i >= expected_len) {
- return len;
- } else {
- return i;
- }
- }
- }
- return len - std::min(len, size_t(3));
- };
- constexpr auto utf8_truncate_safe_view = [utf8_truncate_safe_len](const std::string_view s) {
- return s.substr(0, utf8_truncate_safe_len(s));
- };
- auto merged = simple_assist_msg("");
- auto last_msg = parse_msg("");
- for (size_t i = 1; i <= raw_message.size(); ++i) {
- auto curr_msg = parse_msg(std::string(utf8_truncate_safe_view(std::string_view(raw_message).substr(0, i))));
- if (curr_msg == simple_assist_msg("")) continue;
- LOG_INF("Streaming msg: %s\n", common_chat_msgs_to_json_oaicompat<json>({curr_msg}).dump().c_str());
- for (auto diff: common_chat_msg_diff::compute_diffs(last_msg, curr_msg)) {
- LOG_INF("Streaming diff: %s\n", common_chat_msg_diff_to_json_oaicompat<json>(diff).dump().c_str());
- if (!diff.reasoning_content_delta.empty()) {
- merged.reasoning_content += diff.reasoning_content_delta;
- }
- if (!diff.content_delta.empty()) {
- merged.content += diff.content_delta;
- }
- if (diff.tool_call_index != std::string::npos) {
- if (!diff.tool_call_delta.name.empty()) {
- merged.tool_calls.push_back({diff.tool_call_delta.name, "", ""});
- }
- if (!diff.tool_call_delta.arguments.empty()) {
- GGML_ASSERT(!merged.tool_calls.empty());
- merged.tool_calls.back().arguments += diff.tool_call_delta.arguments;
- }
- }
- LOG_INF("Streaming merged: %s\n", common_chat_msgs_to_json_oaicompat<json>({merged}).dump().c_str());
- }
- assert_msg_equals(curr_msg, merged, true);
- last_msg = curr_msg;
- }
- assert_msg_equals(expected, parse_msg(raw_message), true);
- assert_msg_equals(expected, merged, true);
- }
- const common_chat_msg message_user {
- "user",
- "Hey there!",
- /* .content_parts = */ {},
- /* .tool_calls = */ {},
- /* .reasoning_content = */ "",
- /* .tool_name = */ "",
- /* .tool_call_id = */ "",
- };
- const common_chat_msg message_user_parts {
- "user",
- /* .content = */ "",
- /* .content_parts = */ {
- { "text", "Hey" },
- { "text", "there" },
- },
- /* .tool_calls = */ {},
- /* .reasoning_content = */ "",
- /* .tool_name = */ "",
- /* .tool_call_id = */ "",
- };
- const common_chat_msg message_assist = simple_assist_msg("Hello, world!\nWhat's up?");
- const common_chat_msg message_assist_empty = simple_assist_msg("");
- const common_chat_msg message_assist_thoughts_unparsed_deepseek = simple_assist_msg("<think>I'm\nthinking</think>Hello, world!\nWhat's up?");
- const common_chat_msg message_assist_thoughts_unparsed_md = simple_assist_msg("<think>I'm\nthinking</think>Hello, world!\nWhat's up?\n```json\n{}```");
- const common_chat_msg message_assist_thoughts_unparsed_md_partial = simple_assist_msg("<think>I'm\nthinking</think>Hello, world!\nWhat's up?\n```json\n{}");
- const common_chat_msg message_assist_thoughts_unparsed_r7b = simple_assist_msg("<|START_THINKING|>I'm\nthinking<|END_THINKING|>Hello, world!\nWhat's up?");
- const common_chat_msg message_assist_thoughts_unparsed_magistral = simple_assist_msg("[THINK]raisonnement[/THINK]Réponse");
- const common_chat_msg message_assist_thoughts = simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking");
- const common_chat_msg message_assist_thoughts_unopened_unparsed = simple_assist_msg("I'm\nthinking</think>Hello, world!\nWhat's up?");
- const common_chat_msg message_assist_thoughts_no_content = simple_assist_msg("", "I'm\nthinking");
- const common_chat_msg message_assist_call = simple_assist_msg("", "", "special_function", "{\"arg1\": 1}");
- const common_chat_msg message_assist_call_noopt = simple_assist_msg("", "", "special_function_with_opt", "{\"arg1\": 1}");
- const common_chat_msg message_assist_call_withopt = simple_assist_msg("", "", "special_function_with_opt", "{\"arg1\": 1, \"arg2\": 2}");
- const common_chat_msg message_assist_call_content = simple_assist_msg("Hello, world!\nWhat's up?", "", "special_function", "{\"arg1\":1}");
- const common_chat_msg message_assist_call_empty_args = simple_assist_msg("", "", "special_function");
- const common_chat_msg message_assist_call_cutoff_args = simple_assist_msg("", "", "special_function", "{\"arg");
- const common_chat_msg message_assist_call_thoughts = simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1\":1}");
- const common_chat_msg message_assist_call_thoughts_unparsed = simple_assist_msg("<think>I'm\nthinking</think>\n\n", "", "special_function", "{\"arg1\": 1}");
- const common_chat_msg message_assist_call_thoughts_content = simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking", "special_function", "{\"arg1\": 1}");
- const common_chat_msg message_assist_call_id = simple_assist_msg("", "", "special_function", "{\"arg1\":1}", /* .id = */ "123456789");
- const common_chat_msg message_assist_call_idx = simple_assist_msg("", "", "special_function", "{\"arg1\":1}", /* .id = */ "0");
- const common_chat_msg message_assist_thoughts_call_idx = simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1\": 1}", /* id = */ "0");
- const common_chat_msg message_assist_call_python = simple_assist_msg("", "", "python", "{\"code\":\"print('hey')\"}");
- const common_chat_msg message_assist_call_python_lines = simple_assist_msg("", "", "python", "{\"code\":\"# This is a program:\\nprint('hey')\"}");
- const common_chat_msg message_assist_call_python_lines_unclosed = simple_assist_msg("", "", "python", "{\"code\":\"# This is a program:\\nprint('hey')");
- const common_chat_msg message_assist_call_code_interpreter = simple_assist_msg("", "", "code_interpreter", "{\"code\":\"print('hey')\"}");
- // Use for PEG parser implementations
- struct peg_test_case {
- common_chat_templates_inputs params;
- std::string input;
- common_chat_msg expect;
- };
- struct make_peg_parser {
- common_chat_params params_;
- common_peg_arena arena_;
- make_peg_parser(common_chat_templates * tmpls, const common_chat_templates_inputs & inputs) {
- params_ = common_chat_templates_apply(tmpls, inputs);
- arena_.load(params_.parser);
- }
- common_chat_msg parse(const std::string & msg, bool is_partial) {
- return common_chat_peg_parse(arena_, msg, is_partial, /* syntax = */ {params_.format});
- }
- };
- static void test_peg_parser(common_chat_templates * tmpls, const std::function<void(peg_test_case &)> & init) {
- peg_test_case tc;
- init(tc);
- if (tc.params.messages.empty()) {
- tc.params.messages = {message_user};
- }
- if (tc.expect.role.empty()) {
- tc.expect.role = "assistant";
- }
- auto parser = make_peg_parser(tmpls, tc.params);
- common_chat_msg msg_accum;
- common_chat_msg msg_prev;
- msg_accum.role = msg_prev.role = "assistant";
- for (size_t i = 1; i <= tc.input.size(); ++i) {
- auto is_partial = i < tc.input.size();
- common_chat_msg msg_current = parser.parse(tc.input.substr(0, i), is_partial);
- for (const auto & diff : common_chat_msg_diff::compute_diffs(msg_prev, msg_current)) {
- if (!diff.reasoning_content_delta.empty()) {
- msg_accum.reasoning_content += diff.reasoning_content_delta;
- }
- if (!diff.content_delta.empty()) {
- msg_accum.content += diff.content_delta;
- }
- if (diff.tool_call_index != std::string::npos) {
- if (!diff.tool_call_delta.name.empty()) {
- msg_accum.tool_calls.push_back({diff.tool_call_delta.name, "", ""});
- }
- if (!diff.tool_call_delta.arguments.empty()) {
- msg_accum.tool_calls.back().arguments += diff.tool_call_delta.arguments;
- }
- }
- }
- assert_msg_equals(msg_current, msg_accum, true);
- msg_prev = msg_current;
- }
- assert_msg_equals(tc.expect, parser.parse(tc.input, false), true);
- assert_msg_equals(tc.expect, msg_accum, true);
- }
- static void test_msgs_oaicompat_json_conversion() {
- printf("[%s]\n", __func__);
- std::vector<common_chat_msg> msgs{
- message_user,
- message_user_parts,
- message_assist_call,
- message_assist_call_thoughts,
- message_assist_call_thoughts_unparsed,
- message_assist_call_thoughts_content,
- message_assist_call_id,
- message_assist_call_idx,
- message_assist_call_python,
- message_assist_call_code_interpreter,
- };
- for (const auto & msg : msgs) {
- auto oai_json = common_chat_msgs_to_json_oaicompat<json>({msg});
- auto msgs2 = common_chat_msgs_parse_oaicompat(oai_json);
- assert_equals((size_t) 1, msgs2.size());
- auto msg2 = msgs2[0];
- assert_msg_equals(msg, msg2);
- }
- assert_equals(
- std::string(
- "[\n"
- " {\n"
- " \"role\": \"user\",\n"
- " \"content\": [\n"
- " {\n"
- " \"type\": \"text\",\n"
- " \"text\": \"Hey\"\n"
- " },\n"
- " {\n"
- " \"type\": \"text\",\n"
- " \"text\": \"there\"\n"
- " }\n"
- " ]\n"
- " }\n"
- "]"
- ),
- common_chat_msgs_to_json_oaicompat<json>({message_user_parts}).dump(2));
- assert_equals(
- std::string(
- "[\n"
- " {\n"
- " \"role\": \"assistant\",\n"
- " \"content\": null,\n"
- " \"tool_calls\": [\n"
- " {\n"
- " \"type\": \"function\",\n"
- " \"function\": {\n"
- " \"name\": \"python\",\n"
- " \"arguments\": \"{\\\"code\\\":\\\"print('hey')\\\"}\"\n"
- " }\n"
- " }\n"
- " ]\n"
- " }\n"
- "]"
- ),
- common_chat_msgs_to_json_oaicompat<json>({message_assist_call_python}).dump(2));
- auto res = common_chat_msgs_parse_oaicompat(json::parse("[{\"role\": \"assistant\", \"tool_calls\": []}]"));
- assert_equals<size_t>(1, res.size());
- assert_equals<std::string>(res[0].role, "assistant");
- assert_equals(true, res[0].content.empty());
- assert_equals(true, res[0].tool_calls.empty());
- try {
- common_chat_msgs_parse_oaicompat(json::parse("[{\"role\": \"assistant\"}]"));
- throw std::runtime_error("Expected exception");
- } catch (const std::exception & e) {
- if (std::string(e.what()).find("'content'") == std::string::npos) {
- throw std::runtime_error("Expected exception about missing 'content'");
- }
- }
- }
- static void test_tools_oaicompat_json_conversion() {
- printf("[%s]\n", __func__);
- std::vector<common_chat_tool> tools{
- special_function_tool,
- python_tool,
- code_interpreter_tool,
- };
- for (const auto & tool : tools) {
- auto oai_json = common_chat_tools_to_json_oaicompat<json>({tool});
- auto tools2 = common_chat_tools_parse_oaicompat(oai_json);
- assert_equals((size_t) 1, tools2.size());
- auto tool2 = tools2[0];
- assert_equals(tool.name, tool2.name);
- assert_equals(tool.description, tool2.description);
- assert_equals(json::parse(tool.parameters).dump(2), json::parse(tool2.parameters).dump(2));
- }
- assert_equals(
- std::string(
- "[\n"
- " {\n"
- " \"type\": \"function\",\n"
- " \"function\": {\n"
- " \"name\": \"special_function\",\n"
- " \"description\": \"I'm special\",\n"
- " \"parameters\": {\n"
- " \"type\": \"object\",\n"
- " \"properties\": {\n"
- " \"arg1\": {\n"
- " \"type\": \"integer\",\n"
- " \"description\": \"The arg.\"\n"
- " }\n"
- " },\n"
- " \"required\": [\n"
- " \"arg1\"\n"
- " ]\n"
- " }\n"
- " }\n"
- " }\n"
- "]"
- ),
- common_chat_tools_to_json_oaicompat<json>({special_function_tool}).dump(2));
- }
- static void test_template_output_parsers() {
- printf("[%s]\n", __func__);
- common_chat_templates_inputs inputs_no_tools;
- inputs_no_tools.messages = {message_user};
- common_chat_templates_inputs inputs_tools;
- inputs_tools.messages = {message_user};
- inputs_tools.tools = {special_function_tool};
- common_chat_templates_inputs inputs_tools_builtin;
- inputs_tools_builtin.messages = {message_user};
- inputs_tools_builtin.tools = {python_tool};
- {
- // Not supported yet
- auto tmpls = read_templates("models/templates/CohereForAI-c4ai-command-r-plus-tool_use.jinja");
- assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_GENERIC, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- }
- {
- auto tmpls = read_templates("models/templates/CohereForAI-c4ai-command-r7b-12-2024-tool_use.jinja");
- std::vector<std::string> end_tokens{ "<|END_OF_TURN_TOKEN|>" };
- for (const auto & inputs : { inputs_no_tools, inputs_tools }) {
- auto params = common_chat_templates_apply(tmpls.get(), inputs);
- assert_equals(COMMON_CHAT_FORMAT_COMMAND_R7B, params.format);
- assert_equals(false, params.thinking_forced_open);
- }
- assert_msg_equals(message_assist,
- common_chat_parse(
- "Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_COMMAND_R7B}));
- assert_msg_equals(message_assist,
- common_chat_parse(
- "<|START_RESPONSE|>Hello, world!\nWhat's up?<|END_RESPONSE|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_COMMAND_R7B}));
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
- "<|START_RESPONSE|>Hello, world!\nWhat's up?<|END_RESPONSE|>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_COMMAND_R7B,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_thoughts_unparsed_deepseek,
- common_chat_parse(
- "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
- "<|START_RESPONSE|>Hello, world!\nWhat's up?<|END_RESPONSE|>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_COMMAND_R7B,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ true,
- /* .thinking_forced_open = */ false,
- }));
- assert_msg_equals(message_assist_thoughts_unparsed_r7b,
- common_chat_parse(
- "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
- "<|START_RESPONSE|>Hello, world!\nWhat's up?<|END_RESPONSE|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_COMMAND_R7B}));
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
- "<|START_RESPONSE|>Hello, world!\nWhat's up?<|END_RESPONSE|>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_COMMAND_R7B,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_thoughts_call_idx,
- common_chat_parse(
- "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
- "<|START_ACTION|>[\n"
- " {\"tool_call_id\": \"0\", \"tool_name\": \"special_function\", \"parameters\": {\"arg1\": 1}}\n"
- "]<|END_ACTION|>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_COMMAND_R7B,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_thoughts_no_content,
- common_chat_parse(
- "<|START_THINKING|>I'm\nthinking<|END_THINKING|>"
- "<|START_ACTION|>[\n"
- " {\"tool_call_id\": \"0\", \"tool_name\": \"special",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_COMMAND_R7B,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- test_templates(tmpls.get(), end_tokens, message_assist_call_idx, tools,
- "<|START_THINKING|><|END_THINKING|>"
- "<|START_ACTION|>[\n"
- " {\"tool_call_id\": \"0\", \"tool_name\": \"special_function\", \"parameters\": {\"arg1\": 1}}\n"
- "]<|END_ACTION|>",
- /* expect_grammar_triggered= */ true,
- /* test_grammar_if_triggered= */ true,
- COMMON_REASONING_FORMAT_DEEPSEEK);
- test_templates(tmpls.get(), end_tokens, message_assist, tools,
- "<|START_RESPONSE|>Hello, world!\n"
- "What's up?<|END_RESPONSE|>",
- /* expect_grammar_triggered= */ false);
- }
- {
- auto tmpls = read_templates("models/templates/google-gemma-2-2b-it.jinja");
- std::vector<std::string> end_tokens{ "<end_of_turn>" };
- assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_GENERIC, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_GENERIC,
- common_chat_templates_apply(
- read_templates("models/templates/microsoft-Phi-3.5-mini-instruct.jinja").get(),
- inputs_tools)
- .format);
- // Generic tool calls doesn't generate / parse content-only messages symmetrically.
- assert_equals(
- simple_assist_msg("{ \"tool_call\" : { \"name\" : \"t"),
- common_chat_parse(
- "{ \"tool_call\" : { \"name\" : \"t",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GENERIC,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- /* .parse_tool_calls = */ false,
- }));
- assert_equals(
- message_assist_empty,
- common_chat_parse(
- "{ \"tool_call\" : { \"name\" : \"t",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_GENERIC}));
- assert_equals(
- simple_assist_msg("", "", "puppeteer_screenshot", "{\"name\":\"servethehome_homepage\","),
- common_chat_parse(
- R"({"tool_call": {"name": "puppeteer_screenshot", "arguments": {"name": "servethehome_homepage",)",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_GENERIC}));
- assert_equals(
- message_assist_call_empty_args,
- common_chat_parse(
- "{ \"tool_call\" : { \"name\" : \"special_function\"",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_GENERIC}));
- assert_equals(
- message_assist_call_cutoff_args,
- common_chat_parse(
- "{ \"tool_call\" : { \"name\" : \"special_function\", \"arguments\" : { \"arg",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_GENERIC}));
- assert_msg_equals(message_assist,
- common_chat_parse(
- "{\n"
- " \"response\": \"Hello, world!\\nWhat's up?\"\n"
- "}",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_GENERIC}));
- test_templates(tmpls.get(), end_tokens, message_assist_call_id, tools,
- "{\n"
- " \"tool_calls\": [\n"
- " {\n"
- " \"name\": \"special_function\",\n"
- " \"arguments\": {\n"
- " \"arg1\": 1\n"
- " },\n"
- " \"id\": \"123456789\"\n"
- " }\n"
- " ]\n"
- "}");
- }
- {
- auto tmpls = read_templates("models/templates/mistralai-Mistral-Nemo-Instruct-2407.jinja");
- std::vector<std::string> end_tokens{ "</s>" };
- assert_equals(COMMON_CHAT_FORMAT_MISTRAL_NEMO, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- test_templates(
- tmpls.get(), end_tokens, message_assist_call_id, tools,
- "[TOOL_CALLS][{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}, \"id\": \"123456789\"}]");
- }
- {
- assert_msg_equals(
- simple_assist_msg("Réponse", "raisonnement"),
- common_chat_parse(
- message_assist_thoughts_unparsed_magistral.content,
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_MAGISTRAL,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- }
- {
- auto tmpls = read_templates("models/templates/Qwen-QwQ-32B.jinja");
- std::vector<std::string> end_tokens{ "<|im_end|>" };
- assert_equals(COMMON_CHAT_FORMAT_HERMES_2_PRO, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_HERMES_2_PRO, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- }
- {
- auto tmpls = read_templates("models/templates/NousResearch-Hermes-2-Pro-Llama-3-8B-tool_use.jinja");
- std::vector<std::string> end_tokens{ "<|im_end|>" };
- assert_equals(COMMON_CHAT_FORMAT_HERMES_2_PRO, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_HERMES_2_PRO, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- assert_equals(
- COMMON_CHAT_FORMAT_HERMES_2_PRO,
- common_chat_templates_apply(
- read_templates("models/templates/NousResearch-Hermes-3-Llama-3.1-8B-tool_use.jinja").get(),
- inputs_tools)
- .format);
- assert_equals(
- COMMON_CHAT_FORMAT_HERMES_2_PRO,
- common_chat_templates_apply(
- read_templates("models/templates/Qwen-Qwen2.5-7B-Instruct.jinja").get(),
- inputs_tools)
- .format);
- // Test parsing
- assert_msg_equals(
- simple_assist_msg("", "", "python", ""),
- common_chat_parse(
- "```json\n"
- "<function_call> { \"name\" : \"python\"",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- simple_assist_msg("Let's call something\n"),
- common_chat_parse(
- "Let's call something\n"
- "<tool_call>{\"name\"",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(
- simple_assist_msg("Let's call something\n"),
- common_chat_parse(
- "Let's call something\n"
- "<tool_call>{\"name",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_call_thoughts,
- common_chat_parse(
- // QwQ-32B's template adds a trailing <think> if add_generation_prompt
- "I'm\nthinking</think>\n"
- "<tool_call>{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}</tool_call>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- }));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "<tool_call>\n"
- "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "</tool_call>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(message_assist_call_content,
- common_chat_parse(
- "Hello, world!\nWhat's up?<tool_call>\n"
- "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "</tool_call>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "<function=special_function>{\"arg1\": 1}</function>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "<function name=\"special_function\">\n"
- "{\"arg1\": 1}\n"
- "</function>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "<tool>\n"
- " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "</tool>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "<tools>\n"
- " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "</tools>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "<response>\n"
- " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "</response>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "```xml\n"
- "<response>\n"
- " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "</response>\n"
- "```",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "```xml\n"
- " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "```",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "```\n"
- " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "```",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "```\n"
- "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "```",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "```json\n"
- " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "```",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "```json\n"
- "\n"
- " <function_call> {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}} \n"
- " </function_call> \n"
- "``` ",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "<json>\n"
- " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "</json>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "<xml>\n"
- " {\n"
- " \"name\": \"special_function\", \"arguments\": {\"arg1\": 1}\n"
- " }\n"
- "</xml>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "<JSON>\n"
- " {\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "</JSON>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "{\n \"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- // Test multiple tool calls
- common_chat_msg message_assist_multiple_calls;
- message_assist_multiple_calls.role = "assistant";
- message_assist_multiple_calls.content = "";
- message_assist_multiple_calls.tool_calls.push_back({"special_function", "{\"arg1\": 1}", ""});
- message_assist_multiple_calls.tool_calls.push_back({"python", "{\"code\":\"print('hello')\"}", ""});
- assert_msg_equals(
- message_assist_multiple_calls,
- common_chat_parse(
- "<tool_call>\n"
- "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "</tool_call>\n"
- "<tool_call>\n"
- "{\"name\": \"python\", \"arguments\": {\"code\":\"print('hello')\"}}\n"
- "</tool_call>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- message_assist_multiple_calls,
- common_chat_parse(
- "<function=special_function>{\"arg1\": 1}</function>\n"
- "<function=python>{\"code\":\"print('hello')\"}</function>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(
- simple_assist_msg(
- "This is not a tool call:",
- "",
- "special_function",
- "{\"arg1\": 1}"),
- common_chat_parse(
- "This is not a tool call:\n"
- "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(message_assist,
- common_chat_parse(
- "Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- assert_msg_equals(message_assist_thoughts_unparsed_deepseek,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_HERMES_2_PRO}));
- // assert_msg_equals(message_assist_thoughts_unparsed_deepseek,
- // common_chat_parse(
- // "I'm\nthinking</think>Hello, world!\nWhat's up?",
- // COMMON_CHAT_FORMAT_HERMES_2_PRO));
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_thoughts_unparsed_md,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?\n```json\n{}```",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ true,
- /* .thinking_forced_open = */ false,
- /* .parse_tool_calls = */ false,
- }));
- assert_msg_equals(message_assist_thoughts_unparsed_md_partial,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?\n```json\n{}```",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ true,
- /* .thinking_forced_open = */ false,
- }));
- assert_msg_equals(message_assist_thoughts_unopened_unparsed,
- common_chat_parse(
- "I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- }));
- test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- "<tool_call>\n"
- "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "</tool_call>");
- // Test multiple tool calls with template
- common_chat_msg message_assist_multiple_calls_template;
- message_assist_multiple_calls_template.role = "assistant";
- message_assist_multiple_calls_template.content = "";
- message_assist_multiple_calls_template.tool_calls.push_back({"special_function", "{\"arg1\": 1}", ""});
- message_assist_multiple_calls_template.tool_calls.push_back({"python", "{\"code\":\"print('test')\"}", ""});
- test_templates(tmpls.get(), end_tokens, message_assist_multiple_calls_template, tools,
- "<tool_call>\n"
- "{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}\n"
- "</tool_call>\n"
- "<tool_call>\n"
- "{\"name\": \"python\", \"arguments\": {\"code\":\"print('test')\"}}\n"
- "</tool_call>");
- test_templates(tmpls.get(), end_tokens, message_assist_call_python_lines, tools,
- "<tool_call>\n"
- "{\"name\": \"python\", \"arguments\": {\"code\":\"# This is a program:\\nprint('hey')\"}}\n"
- "</tool_call>");
- assert_msg_equals(
- simple_assist_msg("", /* reasoning_content= */ "<tool_call>nah uhg</tool_call>"),
- common_chat_parse(
- "<think><tool_call>nah uhg</tool_call>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_HERMES_2_PRO,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- }
- {
- auto tmpls = read_templates("models/templates/meta-llama-Llama-3.1-8B-Instruct.jinja");
- std::vector<std::string> end_tokens{ "<|eom_id|>", "<|eot_id|>" };
- assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_LLAMA_3_X, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS,
- common_chat_templates_apply(tmpls.get(), inputs_tools_builtin).format);
- assert_equals(COMMON_CHAT_FORMAT_LLAMA_3_X_WITH_BUILTIN_TOOLS,
- common_chat_templates_apply(
- read_templates("models/templates/meta-llama-Llama-3.3-70B-Instruct.jinja").get(),
- inputs_tools_builtin)
- .format);
- assert_equals(
- message_assist_call,
- common_chat_parse(
- "{\"name\": \"special_function\", \"parameters\": {\"arg1\": 1}}",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_LLAMA_3_X}));
- // test_templates(tmpls.get(), end_tokens, message_assist, tools, R"(?)", /* expect_grammar_triggered= */ false);
- test_templates(tmpls.get(), end_tokens, message_assist_call_code_interpreter, llama_3_1_tools,
- "<|python_tag|>code_interpreter.call(code=\"print('hey')\")");
- test_templates(tmpls.get(), end_tokens, message_assist_call_python, tools,
- "<|python_tag|>python.call(code=\"print('hey')\")");
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- "{\"name\": \"special_function\", \"parameters\": {\"arg1\": 1}}");
- }
- {
- auto tmpls = read_templates("models/templates/meta-llama-Llama-3.2-3B-Instruct.jinja");
- std::vector<std::string> end_tokens{ "<|eom_id|>", "<|eot_id|>" };
- assert_equals(COMMON_CHAT_FORMAT_LLAMA_3_X, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- "{\"name\": \"special_function\", \"parameters\": {\"arg1\": 1}}");
- }
- {
- auto tmpls = read_templates("models/templates/meetkai-functionary-medium-v3.1.jinja");
- std::vector<std::string> end_tokens{ "<|eom_id|>", "<|eot_id|>" };
- assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY,
- common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1,
- common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY,
- common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- for (auto is_partial : { false, true }) {
- assert_equals(
- message_assist_call,
- common_chat_parse(
- "<function=special_function>{\"arg1\": 1}</function>",
- is_partial,
- {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1}));
- }
- assert_equals(
- message_assist_call,
- common_chat_parse(
- "<function=special_function>{\"arg1\": 1}<",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_1_LLAMA_3_1}));
- test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- "<function=special_function>{\"arg1\": 1}</function>");
- }
- {
- auto tmpls = read_templates("models/templates/meetkai-functionary-medium-v3.2.jinja");
- std::vector<std::string> end_tokens{ "<|eom_id|>", "<|eot_id|>" };
- assert_equals(COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- assert_msg_equals(
- simple_assist_msg(
- "Hello, world!\nnono\nWhat's up?",
- "",
- "special_function",
- "{\"arg1\": 1}"),
- common_chat_parse(
- "all\n"
- "Hello, world!\n"
- "nono\n"
- "What's up?>>>special_function\n"
- "{\"arg1\": 1}\n",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2}));
- assert_msg_equals(message_assist_call_python_lines,
- common_chat_parse(
- "python\n"
- "# This is a program:\n"
- "print('hey')",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2}));
- assert_msg_equals(message_assist_call_python_lines_unclosed,
- common_chat_parse(
- "python\n"
- "# This is a program:\n"
- "print('hey')",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2}));
- assert_msg_equals(message_assist_call,
- common_chat_parse(
- "special_function\n"
- "{\"arg1\": 1} \n ",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2}));
- assert_msg_equals(message_assist,
- common_chat_parse(
- "all\n"
- "Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_FUNCTIONARY_V3_2}));
- test_templates(tmpls.get(), end_tokens, message_assist, {},
- "all\n"
- "Hello, world!\n"
- "What's up?",
- /* expect_grammar_triggered= */ false);
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- "special_function\n"
- "{\"arg1\": 1}");
- }
- {
- auto tmpls = read_templates("models/templates/fireworks-ai-llama-3-firefunction-v2.jinja");
- std::vector<std::string> end_tokens{ "<|eot_id|>" };
- assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_FIREFUNCTION_V2, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- " functools[{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}]");
- }
- {
- // Original DeepSeek R1 template. Leaves <|tool▁calls▁begin|> and others unclosed. Our logic fixes the prompt.
- auto tmpls = read_templates("models/templates/deepseek-ai-DeepSeek-R1-Distill-Llama-8B.jinja");
- std::vector<std::string> end_tokens{ "<|end▁of▁sentence|>" };
- for (const auto & inputs : { inputs_no_tools, inputs_tools }) {
- auto params = common_chat_templates_apply(tmpls.get(), inputs);
- assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1, params.format);
- assert_equals(true, params.thinking_forced_open);
- }
- test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- test_templates(tmpls.get(), end_tokens, message_assist_thoughts, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- assert_msg_equals(
- simple_assist_msg("Hello, world!\nWhat's up?", "<think>I'm\nthinking"),
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- COMMON_CHAT_FORMAT_DEEPSEEK_R1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- }));
- assert_msg_equals(
- simple_assist_msg("", "I need to remember the correct syntax. It starts with <|tool▁calls▁begin|> and ends with"),
- common_chat_parse(
- "I need to remember the correct syntax. It starts with <|tool▁calls▁begin|> and ends with",
- /* is_partial= */ true,
- {
- COMMON_CHAT_FORMAT_DEEPSEEK_R1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- }));
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_thoughts_unopened_unparsed,
- common_chat_parse(
- "I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- }));
- assert_msg_equals(message_assist_thoughts,
- // Latest template update (ast of 20250209) adds a trailing <think>\n if add_generation_prompt is true.
- common_chat_parse(
- "I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- }));
- // test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- // "<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>special_function\n"
- // "```json\n"
- // "{\"arg1\": 1}\n"
- // // Look what's not here: <|tool▁calls▁end|> (also missing the <|end▁of▁sentence|>, but that is removed lazily by the test's delta logic)
- // "```<|tool▁call▁end|>",
- // /* expect_grammar_triggered= */ true,
- // /* test_grammar_if_triggered= */ false);
- }
- {
- // Replacement DeepSeek R1 template. Makes the Distill Qwen 7B/32B models happy to call tools and all.
- auto tmpls = read_templates("models/templates/llama-cpp-deepseek-r1.jinja");
- std::vector<std::string> end_tokens{ "<|end▁of▁sentence|>" };
- assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_R1, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- test_templates(tmpls.get(), end_tokens, message_assist_thoughts, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- assert_msg_equals(message_assist_thoughts_unparsed_deepseek,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_DEEPSEEK_R1}));
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- }));
- assert_msg_equals(message_assist_call_thoughts_unparsed,
- common_chat_parse(
- "<think>I'm\nthinking</think>\n\n"
- "<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>special_function\n"
- "```json\n"
- "{\"arg1\": 1}\n"
- "```<|tool▁call▁end|><|tool▁calls▁end|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_DEEPSEEK_R1}));
- assert_msg_equals(message_assist_call,
- common_chat_parse(
- "<|tool▁calls|>function<|tool▁sep|>special_function\n"
- "```json\n"
- "{\"arg1\": 1}\n"
- "```<|tool▁call▁end|><|tool▁calls▁end|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_DEEPSEEK_R1}));
- assert_msg_equals(message_assist_call_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think>\n\n"
- "<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>special_function\n"
- "```json\n"
- "{\"arg1\": 1}\n"
- "```<|tool▁call▁end|><|tool▁calls▁end|>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_DEEPSEEK_R1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- "<|tool▁calls▁begin|><|tool▁call▁begin|>function<|tool▁sep|>special_function\n"
- "```json\n"
- "{\"arg1\": 1}\n"
- "```<|tool▁call▁end|><|tool▁calls▁end|>");
- }
- {
- auto tmpls = read_templates("models/templates/ibm-granite-granite-3.3-2B-Instruct.jinja");
- std::vector<std::string> end_tokens{ "<|end_of_text|>" };
- assert_equals(COMMON_CHAT_FORMAT_GRANITE, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_GRANITE, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- // Test parsing regular content
- assert_msg_equals(message_assist,
- common_chat_parse(
- "Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_GRANITE}));
- assert_msg_equals(
- message_assist,
- common_chat_parse(
- "Hello, world!\nWhat's up?",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_GRANITE}));
- // Test parsing content with thinking
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GRANITE,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_thoughts_unparsed_deepseek,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_GRANITE}));
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think><response>Hello, world!\nWhat's up?",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GRANITE,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think><response>Hello, world!\nWhat's up?</response>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GRANITE,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(simple_assist_msg("<think>I'm\nthinking</think><response>Hello, world!\nWhat's up?</response>"),
- common_chat_parse(
- "<think>I'm\nthinking</think><response>Hello, world!\nWhat's up?</response>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_GRANITE}));
- assert_msg_equals(message_assist_empty,
- common_chat_parse(
- "<think",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GRANITE,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(message_assist_empty,
- common_chat_parse(
- "<think",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_GRANITE}));
- assert_msg_equals(message_assist_thoughts_no_content,
- common_chat_parse(
- "<think>I'm\nthinking",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GRANITE,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- assert_msg_equals(
- message_assist_empty,
- common_chat_parse(
- "<think>I'm\nthinking</think><response",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_GRANITE}));
- // Test parsing tool calls
- assert_msg_equals(message_assist_call,
- common_chat_parse(
- "<|tool_call|>[{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}]",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_GRANITE}));
- assert_msg_equals(
- message_assist_call_empty_args,
- common_chat_parse(
- "<|tool_call|>[{\"name\": \"special_function\"",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_GRANITE}));
- assert_msg_equals(
- message_assist_call_cutoff_args,
- common_chat_parse(
- "<|tool_call|>[{\"name\": \"special_function\", \"arguments\": {\"arg",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_GRANITE}));
- assert_msg_equals(
- message_assist_call_cutoff_args,
- common_chat_parse(
- "<|tool_call|>[{\"name\": \"special_function\", \"arguments\": {\"arg",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GRANITE,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- // Test parsing tool calls with thinking
- assert_msg_equals(
- message_assist_call_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think><|tool_call|>[{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}, {",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GRANITE,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- // Test template generation for regular content
- test_templates(tmpls.get(), end_tokens, message_assist, tools,
- "Hello, world!\nWhat's up?",
- /* expect_grammar_triggered= */ false);
- // Test template generation for tool calls
- test_templates(tmpls.get(), end_tokens, message_assist_call_id, tools,
- "{\n"
- " \"tool_calls\": [\n"
- " {\n"
- " \"name\": \"special_function\",\n"
- " \"arguments\": {\n"
- " \"arg1\": 1\n"
- " },\n"
- " \"id\": \"123456789\"\n"
- " }\n"
- " ]\n"
- "}",
- /* expect_grammar_triggered= */ false
- );
- }
- {
- auto tmpls = read_templates("models/templates/openai-gpt-oss-120b.jinja");
- std::vector<std::string> end_tokens{ "<|return|>", "<|call|>" };
- assert_equals(COMMON_CHAT_FORMAT_GPT_OSS, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_GPT_OSS, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- assert_msg_equals(simple_assist_msg("", "I'm\nthink"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthink",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- assert_msg_equals(simple_assist_msg("", "I'm\nthinking"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- assert_msg_equals(simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>final<|message|>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- assert_msg_equals(simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>commentary to=functions.special_function <|constrain|>json<|message|>{\"arg1",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- assert_msg_equals(simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>commentary to=functions.special_function<|message|>{\"arg1",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- assert_msg_equals(simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1\": 1}"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>commentary to=functions.special_function <|constrain|>json<|message|>{\"arg1\": 1}",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- assert_msg_equals(simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1\": 1}"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>analysis to=functions.special_function <|constrain|>json<|message|>{\"arg1\": 1}",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- assert_msg_equals(simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>commentary<|message|>Hello, world!\nWhat's up?",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- assert_msg_equals(simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking", "special_function", "{\"arg1\": 1}"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>commentary<|message|>Hello, world!\nWhat's up?<|end|>"
- "<|start|>assistant<|channel|>commentary to=functions.special_function <|constrain|>json<|message|>{\"arg1\": 1}",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- // Test parse_tool_calls == false
- assert_msg_equals(
- simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>final<|message|>Hello, world!\nWhat's up?",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ false,
- /* .parse_tool_calls = */ false,
- }));
- assert_msg_equals(
- simple_assist_msg("", "I'm\nthinking"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>commentary to=functions.special_function<|message|>{\"arg1",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ false,
- /* .parse_tool_calls = */ false,
- }));
- assert_msg_equals(
- simple_assist_msg("", "I'm\nthinking"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>commentary to=functions.special_function <|constrain|>json<|message|>{\"arg1\": 1}",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ false,
- /* .parse_tool_calls = */ false,
- }));
- // Test reasoning formats
- assert_msg_equals(
- simple_assist_msg(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>Hello, world!\nWhat's up?"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>final<|message|>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_NONE,
- }));
- assert_msg_equals(
- simple_assist_msg(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>Hello, world!\nWhat's up?"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant<|channel|>final<|message|>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- /* .reasoning_in_content = */ true,
- }));
- // Test tool calling in role header
- assert_msg_equals(simple_assist_msg("", "", "special_function", "{\"arg1\": 1}"),
- common_chat_parse(
- " to=functions.special_function<|channel|>commentary <|constrain|>json<|message|>{\"arg1\": 1}",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- assert_msg_equals(simple_assist_msg("", "", "special_function", "{\"arg1\": 1}"),
- common_chat_parse(
- " to=functions.special_function<|channel|>analysis <|constrain|>json<|message|>{\"arg1\": 1}",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- assert_msg_equals(simple_assist_msg("", "I'm\nthinking", "special_function", "{\"arg1\": 1}"),
- common_chat_parse(
- "<|channel|>analysis<|message|>I'm\nthinking<|end|>"
- "<|start|>assistant to=functions.special_function<|channel|>analysis <|constrain|>json<|message|>{\"arg1\": 1}",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GPT_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_AUTO,
- }));
- }
- {
- // Seed-OSS format tests
- auto tmpls = read_templates("models/templates/ByteDance-Seed-OSS.jinja");
- std::vector<std::string> end_tokens{ "<seed:eos>" };
- assert_equals(COMMON_CHAT_FORMAT_SEED_OSS, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_SEED_OSS, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- test_templates(tmpls.get(), end_tokens, message_assist, tools, "Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- // Test simple reasoning content
- assert_msg_equals(
- simple_assist_msg("Hello, world!", "I'm thinking about the answer"),
- common_chat_parse(
- "<seed:think>I'm thinking about the answer</seed:think>Hello, world!",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_SEED_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- // Test budget reflection tags
- common_chat_msg msg_budget_reflect;
- msg_budget_reflect.role = "assistant";
- msg_budget_reflect.content = "<seed:cot_budget_reflect>Token usage: 45/1000\nI should continue thinking to find the best solution.</seed:cot_budget_reflect>I need to calculate this step by step.";
- msg_budget_reflect.reasoning_content = "Token usage: 45/1000\nI should continue thinking to find the best solution.";
- assert_msg_equals(
- msg_budget_reflect,
- common_chat_parse(
- "<seed:think>Token usage: 45/1000\nI should continue thinking to find the best solution.</seed:think>"
- "<seed:cot_budget_reflect>Token usage: 45/1000\nI should continue thinking to find the best solution.</seed:cot_budget_reflect>"
- "I need to calculate this step by step.",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_SEED_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- // Test tool calls with Seed-OSS format
- common_chat_msg msg_tool_call;
- msg_tool_call.role = "assistant";
- msg_tool_call.tool_calls.push_back({"calculate_sum", "{\"numbers\": [1, 2, 3]}", ""});
- assert_msg_equals(
- msg_tool_call,
- common_chat_parse(
- "<seed:tool_call>\n"
- "<function=calculate_sum>\n"
- "<parameter=numbers>[1, 2, 3]</parameter>\n"
- "</function>\n"
- "</seed:tool_call>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_SEED_OSS}));
- // Test reasoning + tool call combination
- common_chat_msg msg_reasoning_tool;
- msg_reasoning_tool.role = "assistant";
- msg_reasoning_tool.content = "";
- msg_reasoning_tool.reasoning_content = "I need to calculate the sum of these numbers";
- msg_reasoning_tool.tool_calls.push_back({"calculate_sum", "{\"numbers\": [1, 2, 3]}", ""});
- assert_msg_equals(
- msg_reasoning_tool,
- common_chat_parse(
- "<seed:think>I need to calculate the sum of these numbers</seed:think>"
- "<seed:tool_call>\n"
- "<function=calculate_sum>\n"
- "<parameter=numbers>[1, 2, 3]</parameter>\n"
- "</function>\n"
- "</seed:tool_call>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_SEED_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- // Test deltas: the number of tool calls in partial parses should never decrease
- std::string tool_msg = "<seed:tool_call>\n"
- "<function=fun>\n"
- "<parameter=smth>[1, 2, 3]</parameter>\n"
- "</function>";
- std::size_t previousToolCalls = 0;
- for (std::size_t i = std::string("<seed:tool_call>").length(); i < tool_msg.length() - 1; i++) {
- auto partial = tool_msg.substr(0, i);
- auto partial_res = common_chat_parse(partial, true, { COMMON_CHAT_FORMAT_SEED_OSS, COMMON_REASONING_FORMAT_DEEPSEEK });
- if (partial_res.tool_calls.size() < previousToolCalls) {
- throw std::runtime_error("Tool call size decreased on partial: " + partial + " from " + std::to_string(previousToolCalls) + " to " + std::to_string(partial_res.tool_calls.size()));
- }
- previousToolCalls = partial_res.tool_calls.size();
- }
- // Test multiple parameters in tool call
- common_chat_msg msg_multi_param;
- msg_multi_param.role = "assistant";
- msg_multi_param.tool_calls.push_back({"process_data", "{\"input\": \"test\", \"format\": \"json\"}", ""});
- assert_msg_equals(
- msg_multi_param,
- common_chat_parse(
- "<seed:tool_call>\n"
- "<function=process_data>\n"
- "<parameter=input>test</parameter>\n"
- "<parameter=format>json</parameter>\n"
- "</function>\n"
- "</seed:tool_call>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_SEED_OSS}));
- // Test partial parsing for incomplete tool call - don't actually add the call until parsing parameters is done
- assert_msg_equals(
- simple_assist_msg("", "", "calculate_sum", "{\"numbers\":"),
- common_chat_parse(
- "<seed:tool_call>\n"
- "<function=calculate_sum>\n"
- "<parameter=numbers>[1,\n",
- /* is_partial= */ true,
- {COMMON_CHAT_FORMAT_SEED_OSS}));
- // Test incomplete reasoning tag
- assert_msg_equals(
- simple_assist_msg("", "I was thinking"),
- common_chat_parse(
- "<seed:think>I was thinking",
- /* is_partial= */ true,
- {
- /* .format = */ COMMON_CHAT_FORMAT_SEED_OSS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- // Test content without reasoning
- assert_msg_equals(
- simple_assist_msg("This is a simple response without reasoning."),
- common_chat_parse(
- "This is a simple response without reasoning.",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_SEED_OSS}));
- }
- {
- auto tmpls = read_templates("models/templates/NVIDIA-Nemotron-Nano-v2.jinja");
- std::vector<std::string> end_tokens{ "<SPECIAL_12>" };
- assert_equals(COMMON_CHAT_FORMAT_NEMOTRON_V2, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_NEMOTRON_V2, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- // Test parsing regular content
- assert_msg_equals(message_assist,
- common_chat_parse(
- "Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_NEMOTRON_V2}));
- // Test parsing content with thinking
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_NEMOTRON_V2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- // Test parsing tool calls
- assert_msg_equals(message_assist_call,
- common_chat_parse(
- "<TOOLCALL>[{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}]</TOOLCALL>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_NEMOTRON_V2}));
- // Test parsing tool calls with thinking
- assert_msg_equals(message_assist_call_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think><TOOLCALL>[{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}]</TOOLCALL>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_NEMOTRON_V2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }));
- // Test tool calls with extra content
- assert_msg_equals(message_assist_call_content,
- common_chat_parse(
- "<TOOLCALL>[{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}]</TOOLCALL>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_NEMOTRON_V2}
- ));
- // Test tool calls with extra content AND thinking
- assert_msg_equals(message_assist_call_thoughts_content,
- common_chat_parse(
- "<think>I'm\nthinking</think><TOOLCALL>[{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}]</TOOLCALL>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_NEMOTRON_V2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }));
- // Test template generation for regular content
- test_templates(tmpls.get(), end_tokens, message_assist, tools,
- "Hello, world!\nWhat's up?\n",
- /* expect_grammar_triggered= */ false);
- // Test template generation for tool calls
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- "<TOOLCALL>[{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}]</TOOLCALL>",
- /* expect_grammar_triggered= */ true
- );
- }
- {
- auto tmpls = read_templates("models/templates/deepseek-ai-DeepSeek-V3.1.jinja");
- std::vector<std::string> end_tokens{ "<|end▁of▁sentence|>" };
- for (const auto & inputs : { inputs_no_tools, inputs_tools }) {
- auto params = common_chat_templates_apply(tmpls.get(), inputs);
- assert_equals(COMMON_CHAT_FORMAT_DEEPSEEK_V3_1, params.format);
- assert_equals(true, params.thinking_forced_open);
- }
- test_templates(tmpls.get(), end_tokens, message_assist, tools, "</think>Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- test_templates(tmpls.get(), end_tokens, message_assist_thoughts, tools, "</think>Hello, world!\nWhat's up?", /* expect_grammar_triggered= */ false);
- assert_msg_equals(
- simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking"),
- common_chat_parse(
- "I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- COMMON_CHAT_FORMAT_DEEPSEEK_V3_1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- }));
- // variant: thinking forced open, reasoning_format none
- assert_msg_equals(
- simple_assist_msg("REASONING</think>ok", ""),
- common_chat_parse(
- "REASONING</think>ok",
- /* is_partial= */ false,
- {
- COMMON_CHAT_FORMAT_DEEPSEEK_V3_1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_NONE,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- /* .parse_tool_calls = */ true,
- }));
- // variant: happy path for when it works as the model card says it should
- assert_msg_equals(
- simple_assist_msg("", "", "get_time", "{\"city\":\"Tokyo\"}"),
- common_chat_parse(
- "<|tool▁calls▁begin|><|tool▁call▁begin|>get_time<|tool▁sep|>{\"city\": \"Tokyo\"}<|tool▁call▁end|><|tool▁calls▁end|>",
- /* is_partial= */ false,
- {
- COMMON_CHAT_FORMAT_DEEPSEEK_V3_1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ false,
- /* .parse_tool_calls = */ true,
- }));
- // variant: simple + thinking open
- assert_msg_equals(
- simple_assist_msg("", "REASONING", "get_time", "{\"city\":\"Tokyo\"}"),
- common_chat_parse(
- "REASONING</think><|tool▁calls▁begin|><|tool▁call▁begin|>get_time<|tool▁sep|>{\"city\": \"Tokyo\"}<|tool▁call▁end|><|tool▁calls▁end|>",
- /* is_partial= */ false,
- {
- COMMON_CHAT_FORMAT_DEEPSEEK_V3_1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- /* .parse_tool_calls = */ true,
- }));
- // variant: simple + multiple tool calls
- common_chat_msg message_assist_multiple_calls;
- message_assist_multiple_calls.role = "assistant";
- message_assist_multiple_calls.content = "CONTENT";
- message_assist_multiple_calls.tool_calls.push_back({"get_time", "{\"city\":\"Paris\"}", ""});
- message_assist_multiple_calls.tool_calls.push_back({"get_weather", "{\"city\":\"Paris\"}", ""});
- assert_msg_equals(
- message_assist_multiple_calls,
- common_chat_parse(
- "CONTENT<|tool▁calls▁begin|><|tool▁call▁begin|>get_time<|tool▁sep|>{\"city\": \"Paris\"}<|tool▁call▁end|><|tool▁call▁begin|>get_weather<|tool▁sep|>{\"city\": \"Paris\"}<|tool▁call▁end|><|tool▁calls▁end|>",
- /* is_partial= */ false,
- {
- COMMON_CHAT_FORMAT_DEEPSEEK_V3_1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ false,
- /* .parse_tool_calls = */ true,
- }));
- // variant: thinking forced open + tool call in reasoning content
- assert_msg_equals(
- simple_assist_msg("", "REASONING<|tool▁calls▁begin|><|tool▁call▁begin|>get_time2<|tool▁sep|>{\"city\": \"Tokyo2\"}<|tool▁call▁end|><|tool▁calls▁end|>REASONING", "get_time", "{\"city\":\"Tokyo\"}"),
- common_chat_parse(
- "REASONING<|tool▁calls▁begin|><|tool▁call▁begin|>get_time2<|tool▁sep|>{\"city\": \"Tokyo2\"}<|tool▁call▁end|><|tool▁calls▁end|>REASONING</think><|tool▁calls▁begin|><|tool▁call▁begin|>get_time<|tool▁sep|>{\"city\": \"Tokyo\"}<|tool▁call▁end|><|tool▁calls▁end|>",
- /* is_partial= */ false,
- {
- COMMON_CHAT_FORMAT_DEEPSEEK_V3_1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- /* .parse_tool_calls = */ true,
- }));
- // variant: thinking forced open + tool call in reasoning content + no closing think + not partial
- // This is a bit of a fine tuning issue on the model's part IMO. It really should not be attempting
- // to make tool calls in reasoning content according to the model card, but it does sometimes, so
- // add the reasoning content as regular content and parse the tool calls.
- assert_msg_equals(
- simple_assist_msg("REASONING", "", "get_time", "{\"city\":\"Tokyo\"}"),
- common_chat_parse(
- "REASONING<|tool▁calls▁begin|><|tool▁call▁begin|>get_time<|tool▁sep|>{\"city\": \"Tokyo\"}<|tool▁call▁end|><|tool▁calls▁end|>",
- /* is_partial= */ false,
- {
- COMMON_CHAT_FORMAT_DEEPSEEK_V3_1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- /* .parse_tool_calls = */ true,
- }));
- // variant: thinking forced open + tool call in reasoning content + no closing think + partial
- assert_msg_equals(
- simple_assist_msg("", "REASONING<|tool▁calls▁begin|><|tool▁call▁begin|>get_time<|tool▁sep|>{\"city\": \"Tokyo\"}<|tool▁call▁end|><|tool▁calls▁end|>", "", ""),
- common_chat_parse(
- "REASONING<|tool▁calls▁begin|><|tool▁call▁begin|>get_time<|tool▁sep|>{\"city\": \"Tokyo\"}<|tool▁call▁end|><|tool▁calls▁end|>",
- /* is_partial= */ true,
- {
- COMMON_CHAT_FORMAT_DEEPSEEK_V3_1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ true,
- /* .parse_tool_calls = */ true,
- }));
- // variant: thinking not forced open + missing reasoning + no tool calls
- assert_msg_equals(
- simple_assist_msg("CONTENT", ""),
- common_chat_parse(
- "CONTENT",
- /* is_partial= */ false,
- {
- COMMON_CHAT_FORMAT_DEEPSEEK_V3_1,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* .reasoning_in_content = */ false,
- /* .thinking_forced_open = */ false,
- /* .parse_tool_calls = */ true,
- }));
- }
- {
- auto tmpls = read_templates("models/templates/Apertus-8B-Instruct.jinja");
- std::vector<std::string> end_tokens{ "<|assistant_end|>" };
- assert_equals(COMMON_CHAT_FORMAT_APERTUS, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_APERTUS, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- // Test parsing regular content
- assert_msg_equals(message_assist,
- common_chat_parse(
- "Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_APERTUS}));
- // Test parsing content with thinking
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<|inner_prefix|>I'm\nthinking<|inner_suffix|>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_APERTUS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- // Test parsing tool calls
- assert_msg_equals(message_assist_call,
- common_chat_parse(
- "<|tools_prefix|>[{\"special_function\": {\"arg1\": 1}}]<|tools_suffix|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_APERTUS}));
- // Test parsing tool calls with thinking
- assert_msg_equals(message_assist_call_thoughts,
- common_chat_parse(
- "<|inner_prefix|>I'm\nthinking<|inner_suffix|><|tools_prefix|>[{\"special_function\": {\"arg1\": 1}}]<|tools_suffix|>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_APERTUS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }));
- // Test tool calls with extra content
- assert_msg_equals(message_assist_call_content,
- common_chat_parse(
- "<|tools_prefix|>[{\"special_function\": {\"arg1\": 1}}]<|tools_suffix|>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_APERTUS}
- ));
- // Test tool calls with extra content AND thinking
- assert_msg_equals(message_assist_call_thoughts_content,
- common_chat_parse(
- "<|inner_prefix|>I'm\nthinking<|inner_suffix|><|tools_prefix|>[{\"special_function\": {\"arg1\": 1}}]<|tools_suffix|>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_APERTUS,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }));
- // Test template generation for regular content
- test_templates(tmpls.get(), end_tokens, message_assist, tools,
- "Hello, world!\nWhat's up?",
- /* expect_grammar_triggered= */ false);
- // Test template generation for tool calls
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- "<|tools_prefix|>[{\"special_function\": {\"arg1\": 1}}]<|tools_suffix|>",
- /* expect_grammar_triggered= */ true
- );
- assert_equals(true, common_chat_templates_support_enable_thinking(tmpls.get()));
- }
- {
- // LFM2 format tests
- auto tmpls = read_templates("models/templates/llama-cpp-lfm2.jinja");
- std::vector<std::string> end_tokens{ "<|im_end|>" };
- auto inputs_tools_forced_json_schema = std::invoke([&]() -> common_chat_templates_inputs {
- common_chat_templates_inputs inputs;
- inputs.messages = {
- std::invoke([&]() -> common_chat_msg {
- common_chat_msg msg;
- msg.role = "system";
- msg.content = "force json schema.\n";
- return msg;
- }),
- message_user,
- };
- inputs.tools = {special_function_tool};
- return inputs;
- });
- {
- auto params = common_chat_templates_apply(tmpls.get(), inputs_no_tools);
- assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, params.format);
- assert_equals(false, params.grammar_lazy);
- assert_equals(std::string(R"(<|im_start|>user
- Hey there!<|im_end|>
- <|im_start|>assistant
- )"), params.prompt);
- }
- {
- auto params = common_chat_templates_apply(tmpls.get(), inputs_tools);
- assert_equals(COMMON_CHAT_FORMAT_CONTENT_ONLY, params.format);
- assert_equals(false, params.grammar_lazy);
- assert_equals(std::string(R"(<|im_start|>system
- List of tools: <|tool_list_start|>[{"type": "function", "function": {"name": "special_function", "description": "I'm special", "parameters": {"type": "object", "properties": {"arg1": {"type": "integer", "description": "The arg."}}, "required": ["arg1"]}}}]<|tool_list_end|><|im_end|>
- <|im_start|>user
- Hey there!<|im_end|>
- <|im_start|>assistant
- )"), params.prompt);
- assert_equals(true, params.grammar.empty());
- }
- {
- auto params = common_chat_templates_apply(tmpls.get(), inputs_tools_forced_json_schema);
- assert_equals(COMMON_CHAT_FORMAT_LFM2_WITH_JSON_TOOLS, params.format);
- assert_equals(true, params.grammar_lazy);
- assert_equals(std::string(R"(<|im_start|>system
- List of tools: <|tool_list_start|>[{"type": "function", "function": {"name": "special_function", "description": "I'm special", "parameters": {"type": "object", "properties": {"arg1": {"type": "integer", "description": "The arg."}}, "required": ["arg1"]}}}]<|tool_list_end|><|im_end|>
- <|im_start|>user
- Hey there!<|im_end|>
- <|im_start|>assistant
- )"), params.prompt);
- assert_equals(false, params.grammar.empty());
- }
- // Test parsing regular content
- assert_msg_equals(message_assist,
- common_chat_parse(
- "Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_LFM2_WITH_JSON_TOOLS}));
- // Test single tool call with JSON format
- common_chat_msg msg_single_tool_call;
- msg_single_tool_call.role = "assistant";
- msg_single_tool_call.tool_calls.push_back({"special_function", "{\"arg1\":1}", ""});
- assert_msg_equals(
- msg_single_tool_call,
- common_chat_parse(
- "<|tool_call_start|>[{\"name\": \"special_function\", \"arguments\": {\"arg1\": 1}}]<|tool_call_end|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_LFM2_WITH_JSON_TOOLS}));
- // Test tool call with string argument
- common_chat_msg msg_tool_call_string;
- msg_tool_call_string.role = "assistant";
- msg_tool_call_string.tool_calls.push_back({"get_weather", "{\"location\":\"Paris\"}", ""});
- assert_msg_equals(
- msg_tool_call_string,
- common_chat_parse(
- "<|tool_call_start|>[{\"name\": \"get_weather\", \"arguments\": {\"location\": \"Paris\"}}]<|tool_call_end|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_LFM2_WITH_JSON_TOOLS}));
- // Test tool call with multiple arguments
- common_chat_msg msg_multi_args;
- msg_multi_args.role = "assistant";
- msg_multi_args.tool_calls.push_back({"calculate", "{\"x\":10,\"y\":20,\"operation\":\"add\"}", ""});
- assert_msg_equals(
- msg_multi_args,
- common_chat_parse(
- "<|tool_call_start|>[{\"name\": \"calculate\", \"arguments\": {\"x\": 10, \"y\": 20, \"operation\": \"add\"}}]<|tool_call_end|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_LFM2_WITH_JSON_TOOLS}));
- // Test multiple tool calls in single array
- common_chat_msg msg_multiple_tools;
- msg_multiple_tools.role = "assistant";
- msg_multiple_tools.tool_calls.push_back({"get_weather", "{\"location\":\"Paris\"}", ""});
- msg_multiple_tools.tool_calls.push_back({"get_time", "{\"timezone\":\"UTC\"}", ""});
- assert_msg_equals(
- msg_multiple_tools,
- common_chat_parse(
- "<|tool_call_start|>[{\"name\": \"get_weather\", \"arguments\": {\"location\": \"Paris\"}}, {\"name\": \"get_time\", \"arguments\": {\"timezone\": \"UTC\"}}]<|tool_call_end|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_LFM2_WITH_JSON_TOOLS}));
- // Test tool call with content before
- common_chat_msg msg_content_before_tool;
- msg_content_before_tool.role = "assistant";
- msg_content_before_tool.content = "Let me check the weather for you.";
- msg_content_before_tool.tool_calls.push_back({"get_weather", "{\"location\":\"Paris\"}", ""});
- assert_msg_equals(
- msg_content_before_tool,
- common_chat_parse(
- "Let me check the weather for you.<|tool_call_start|>[{\"name\": \"get_weather\", \"arguments\": {\"location\": \"Paris\"}}]<|tool_call_end|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_LFM2_WITH_JSON_TOOLS}));
- // Test tool call with content after
- common_chat_msg msg_content_after_tool;
- msg_content_after_tool.role = "assistant";
- msg_content_after_tool.content = "Here's the result.";
- msg_content_after_tool.tool_calls.push_back({"get_weather", "{\"location\":\"Paris\"}", ""});
- assert_msg_equals(
- msg_content_after_tool,
- common_chat_parse(
- "<|tool_call_start|>[{\"name\": \"get_weather\", \"arguments\": {\"location\": \"Paris\"}}]<|tool_call_end|>Here's the result.",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_LFM2_WITH_JSON_TOOLS}));
- // Test tool call with newlines (common in LLM output)
- common_chat_msg msg_tool_call_newlines;
- msg_tool_call_newlines.role = "assistant";
- msg_tool_call_newlines.tool_calls.push_back({"get_current_time", "{\"location\":\"Paris\"}", ""});
- assert_msg_equals(
- msg_tool_call_newlines,
- common_chat_parse(
- "<|tool_call_start|>[{\n \"name\": \"get_current_time\",\n \"arguments\": {\n \"location\": \"Paris\"\n }\n}]<|tool_call_end|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_LFM2_WITH_JSON_TOOLS}));
- // Note: LFM2 uses JSON format for tool calls: [{"name": "...", "arguments": {...}}]
- // Unlike other formats, LFM2 template does not render tool calls in conversation history,
- // so we don't use test_templates() for tool call generation. Instead, the parsing tests
- // above verify edge cases and format variations for the tool call output format.
- }
- {
- auto tmpls = read_templates("models/templates/MiniMax-M2.jinja");
- std::vector<std::string> end_tokens{ "[e~[" };
- assert_equals(COMMON_CHAT_FORMAT_MINIMAX_M2, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_MINIMAX_M2, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- // Test parsing regular content
- assert_msg_equals(message_assist,
- common_chat_parse(
- "Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_MINIMAX_M2}));
- // Test parsing content with thinking
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_MINIMAX_M2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- // Test parsing tool calls
- assert_msg_equals(message_assist_call,
- common_chat_parse(
- "<minimax:tool_call><invoke name=\"special_function\"><parameter name=\"arg1\">1</parameter></invoke></minimax:tool_call>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_MINIMAX_M2}));
- // Test parsing tool calls with thinking
- assert_msg_equals(message_assist_call_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think><minimax:tool_call><invoke name=\"special_function\"><parameter name=\"arg1\">1</parameter></invoke></minimax:tool_call>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_MINIMAX_M2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }));
- // Test tool calls with extra content
- assert_msg_equals(message_assist_call_content,
- common_chat_parse(
- "<minimax:tool_call><invoke name=\"special_function\"><parameter name=\"arg1\">1</parameter></invoke></minimax:tool_call>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_MINIMAX_M2}
- ));
- // Test tool calls with extra content AND thinking
- assert_msg_equals(message_assist_call_thoughts_content,
- common_chat_parse(
- "<think>I'm\nthinking</think><minimax:tool_call><invoke name=\"special_function\"><parameter name=\"arg1\">1</parameter></invoke></minimax:tool_call>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_MINIMAX_M2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }));
- // Test streaming
- test_parser_with_streaming(message_assist_call_thoughts_content,
- "<think>I'm\nthinking\n</think>Hello, world!\nWhat's up?\n<minimax:tool_call><invoke name=\"special_function\"><parameter name=\"arg1\">1</parameter></invoke></minimax:tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_MINIMAX_M2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(message_assist_call_thoughts_unparsed,
- "<think>I'm\nthinking</think>\n\n<minimax:tool_call><invoke name=\"special_function\"><parameter name=\"arg1\">1</parameter></invoke></minimax:tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_MINIMAX_M2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_NONE
- }); });
- test_parser_with_streaming(message_assist_call_thoughts_content,
- "<think>I'm\nthinking\n</think>\n\nHello, world!\nWhat's up?\n\n<minimax:tool_call>\n<invoke name=\"special_function\">\n<parameter name=\"arg1\">1</parameter>\n</invoke>\n</minimax:tool_call>\n",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_MINIMAX_M2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(message_assist_call_withopt,
- "<minimax:tool_call>\n<invoke name=\"special_function_with_opt\">\n<parameter name=\"arg1\">1</parameter>\n<parameter name=\"arg2\">2</parameter>\n</invoke>\n</minimax:tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_MINIMAX_M2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_NONE
- }); });
- // Test template generation for regular content
- test_templates(tmpls.get(), end_tokens, message_assist, tools,
- "Hello, world!\nWhat's up?",
- /* expect_grammar_triggered= */ false);
- // Test template generation for tool calls
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- "<minimax:tool_call>\n<invoke name=\"special_function\">\n<parameter name=\"arg1\">1</parameter>\n</invoke>\n</minimax:tool_call>",
- /* expect_grammar_triggered= */ true,
- /* test_grammar_if_triggered= */ true,
- /* common_reasoning_format= */ COMMON_REASONING_FORMAT_NONE,
- /* ignore_whitespace_differences= */ true
- );
- // Test template generation for tools with optional parameters
- test_templates(tmpls.get(), end_tokens, message_assist_call_noopt, tools,
- "<minimax:tool_call>\n<invoke name=\"special_function_with_opt\">\n<parameter name=\"arg1\">1</parameter>\n</invoke>\n</minimax:tool_call>",
- /* expect_grammar_triggered= */ true,
- /* test_grammar_if_triggered= */ true,
- /* common_reasoning_format= */ COMMON_REASONING_FORMAT_NONE,
- /* ignore_whitespace_differences= */ true
- );
- test_templates(tmpls.get(), end_tokens, message_assist_call_withopt, tools,
- "<minimax:tool_call>\n<invoke name=\"special_function_with_opt\">\n<parameter name=\"arg1\">1</parameter>\n<parameter name=\"arg2\">2</parameter>\n</invoke>\n</minimax:tool_call>",
- /* expect_grammar_triggered= */ true,
- /* test_grammar_if_triggered= */ true,
- /* common_reasoning_format= */ COMMON_REASONING_FORMAT_NONE,
- /* ignore_whitespace_differences= */ true
- );
- }
- {
- auto tmpls = read_templates("models/templates/GLM-4.6.jinja");
- std::vector<std::string> end_tokens{ "<|assistant|>", "<|observation|>" };
- assert_equals(COMMON_CHAT_FORMAT_GLM_4_5, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_GLM_4_5, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- // Test parsing regular content
- assert_msg_equals(message_assist,
- common_chat_parse(
- "Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_GLM_4_5}));
- // Test parsing content with thinking
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "\n<think>I'm\nthinking</think>\nHello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GLM_4_5,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }), true);
- // Test parsing tool calls
- assert_msg_equals(message_assist_call,
- common_chat_parse(
- "\n<tool_call>special_function\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n</tool_call>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_GLM_4_5}), true);
- // Test parsing tool calls with thinking
- assert_msg_equals(message_assist_call_thoughts,
- common_chat_parse(
- "\n<think>I'm\nthinking</think>\n<tool_call>special_function\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n</tool_call>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GLM_4_5,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }), true);
- // Test tool calls with extra content
- assert_msg_equals(message_assist_call_content,
- common_chat_parse(
- "\n<tool_call>special_function\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n</tool_call>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_GLM_4_5}
- ), true);
- // Test tool calls with extra content AND thinking
- assert_msg_equals(message_assist_call_thoughts_content,
- common_chat_parse(
- "\n<think>I'm\nthinking</think>Hello, world!\nWhat's up?\n<tool_call>special_function\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n</tool_call>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_GLM_4_5,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }), true);
- // Test streaming
- test_parser_with_streaming(message_assist_call_thoughts_content,
- "\n<think>I'm\nthinking</think>Hello, world!\nWhat's up?\n<tool_call>special_function\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_GLM_4_5,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(message_assist_call_thoughts_unparsed,
- "\n<think>I'm\nthinking</think>\n\n<tool_call>special_function\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_GLM_4_5,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_NONE
- }); });
- test_parser_with_streaming(message_assist_call_withopt,
- "\n<think></think>\n<tool_call>special_function_with_opt\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n<arg_key>arg2</arg_key>\n<arg_value>2</arg_value>\n</tool_call>\n",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_GLM_4_5,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(
- simple_assist_msg("", "", "complex_function", "{\"name\":\"John Doe\",\"age\":30,\"active\":true,\"score\":95.5}"),
- "<tool_call>complex_function\n"
- "<arg_key>name</arg_key>\n"
- "<arg_value>John Doe</arg_value>\n"
- "<arg_key>age</arg_key>\n"
- "<arg_value>30</arg_value>\n"
- "<arg_key>active</arg_key>\n"
- "<arg_value>true</arg_value>\n"
- "<arg_key>score</arg_key>\n"
- "<arg_value>95.5</arg_value>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_GLM_4_5}); });
- test_parser_with_streaming(
- simple_assist_msg("", "", "web_search", "{\"query\":\"\\\"From Zero\\\" Linkin Park album tracklist complete songs\",\"limit\":3,\"type\":\"text\"}"),
- "<tool_call>web_search\n"
- "<arg_key>query</arg_key>\n"
- "<arg_value>\"From Zero\" Linkin Park album tracklist complete songs</arg_value>\n"
- "<arg_key>limit</arg_key>\n"
- "<arg_value>3</arg_value>\n"
- "<arg_key>type</arg_key>\n"
- "<arg_value>text</arg_value>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_GLM_4_5}); });
- // Test interleaved thinking
- test_parser_with_streaming(simple_assist_msg("Hello, world!\n\nWhat's up?", "I'm\nthinkingThinking2", "special_function", "{\"arg1\": 1}"),
- "\n<think>I'm\nthinking</think>Hello, world!\n<think>Thinking2</think>What's up?\n<tool_call>special_function\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_GLM_4_5,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(simple_assist_msg("\n<think>I'm\nthinking</think>Hello, world!\n<think>Thinking2</think>What's up?", "", "special_function", "{\"arg1\": 1}"),
- "\n<think>I'm\nthinking</think>Hello, world!\n<think>Thinking2</think>What's up?\n<tool_call>special_function\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_GLM_4_5,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_NONE
- }); });
- // Test template generation for regular content
- test_templates(tmpls.get(), end_tokens, message_assist, tools,
- "\n<think></think>\nHello, world!\nWhat's up?",
- /* expect_grammar_triggered= */ false);
- // Test template generation for tool calls
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- "\n<think></think>\n<tool_call>special_function\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n</tool_call>\n",
- /* expect_grammar_triggered= */ true,
- /* test_grammar_if_triggered= */ false,
- /* common_reasoning_format= */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* ignore_whitespace_differences= */ true
- );
- // Test template generation for tools with optional parameters
- test_templates(tmpls.get(), end_tokens, message_assist_call_noopt, tools,
- "\n<think></think>\n<tool_call>special_function_with_opt\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n</tool_call>\n",
- /* expect_grammar_triggered= */ true,
- /* test_grammar_if_triggered= */ false,
- /* common_reasoning_format= */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* ignore_whitespace_differences= */ true
- );
- test_templates(tmpls.get(), end_tokens, message_assist_call_withopt, tools,
- "\n<think></think>\n<tool_call>special_function_with_opt\n<arg_key>arg1</arg_key>\n<arg_value>1</arg_value>\n<arg_key>arg2</arg_key>\n<arg_value>2</arg_value>\n</tool_call>\n",
- /* expect_grammar_triggered= */ true,
- /* test_grammar_if_triggered= */ false,
- /* common_reasoning_format= */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* ignore_whitespace_differences= */ true
- );
- }
- {
- auto tmpls = read_templates("models/templates/Kimi-K2-Thinking.jinja");
- std::vector<std::string> end_tokens{ "<|im_end|>" };
- assert_equals(COMMON_CHAT_FORMAT_KIMI_K2, common_chat_templates_apply(tmpls.get(), inputs_no_tools).format);
- assert_equals(COMMON_CHAT_FORMAT_KIMI_K2, common_chat_templates_apply(tmpls.get(), inputs_tools).format);
- // Test parsing regular content
- assert_msg_equals(message_assist,
- common_chat_parse(
- "Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_KIMI_K2}));
- // Test parsing content with thinking
- assert_msg_equals(message_assist_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_KIMI_K2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK,
- }));
- // Test parsing tool calls
- assert_msg_equals(message_assist_call,
- common_chat_parse(
- "<|tool_calls_section_begin|><|tool_call_begin|>functions.special_function:0<|tool_call_argument_begin|>{\"arg1\": 1}<|tool_call_end|><|tool_calls_section_end|>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_KIMI_K2}));
- // Test parsing tool calls with thinking
- assert_msg_equals(message_assist_call_thoughts,
- common_chat_parse(
- "<think>I'm\nthinking</think><|tool_calls_section_begin|><|tool_call_begin|>functions.special_function:0<|tool_call_argument_begin|>{\"arg1\": 1}<|tool_call_end|><|tool_calls_section_end|>",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_KIMI_K2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }));
- // Test tool calls with extra content
- assert_msg_equals(message_assist_call_content,
- common_chat_parse(
- "<|tool_calls_section_begin|><|tool_call_begin|>functions.special_function:0<|tool_call_argument_begin|>{\"arg1\": 1}<|tool_call_end|><|tool_calls_section_end|>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_KIMI_K2}
- ));
- // Test tool calls with extra content AND thinking
- assert_msg_equals(message_assist_call_thoughts_content,
- common_chat_parse(
- "<think>I'm\nthinking</think><|tool_calls_section_begin|><|tool_call_begin|>functions.special_function:0<|tool_call_argument_begin|>{\"arg1\": 1}<|tool_call_end|><|tool_calls_section_end|>Hello, world!\nWhat's up?",
- /* is_partial= */ false,
- {
- /* .format = */ COMMON_CHAT_FORMAT_KIMI_K2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }));
- // Test streaming
- test_parser_with_streaming(message_assist_call_thoughts_content,
- "<think>I'm\nthinking\n</think>Hello, world!\nWhat's up?\n<|tool_calls_section_begin|><|tool_call_begin|>functions.special_function:0<|tool_call_argument_begin|>{\"arg1\": 1}<|tool_call_end|><|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_KIMI_K2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(message_assist_call_thoughts_unparsed,
- "<think>I'm\nthinking</think>\n\n<|tool_calls_section_begin|><|tool_call_begin|>functions.special_function:0<|tool_call_argument_begin|>{\"arg1\": 1}<|tool_call_end|><|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_KIMI_K2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_NONE
- }); });
- test_parser_with_streaming(message_assist_call_thoughts_content,
- "<think>I'm\nthinking\n</think>\n\nHello, world!\nWhat's up?\n\n<|tool_calls_section_begin|><|tool_call_begin|>functions.special_function:0<|tool_call_argument_begin|>{\"arg1\": 1}<|tool_call_end|><|tool_calls_section_end|>\n",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_KIMI_K2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(message_assist_call_withopt,
- "<|tool_calls_section_begin|><|tool_call_begin|>functions.special_function_with_opt:0<|tool_call_argument_begin|>{\"arg1\": 1, \"arg2\": 2}<|tool_call_end|><|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_KIMI_K2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_NONE
- }); });
- test_parser_with_streaming(simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking", "special_function", "{\"arg1\": \"123456\"}"),
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?\n<|tool_calls_section_begin|><|tool_call_begin|>functions.special_function:0<|tool_call_argument_begin|>{\"arg1\": \"123456\"}<|tool_call_end|><|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_KIMI_K2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking", "special_function", "{\"arg1\": [1, 2, \"345\", 6]}"),
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?\n<|tool_calls_section_begin|><|tool_call_begin|>functions.special_function:0<|tool_call_argument_begin|>{\"arg1\": [1, 2, \"345\", 6]}<|tool_call_end|><|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_KIMI_K2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(simple_assist_msg("Hello, world!\nWhat's up?", "I'm\nthinking", "special_function", "{\"arg1\": {\"12\": 34, \"5\": [67, 8], \"9\": \"10\"}}"),
- "<think>I'm\nthinking</think>Hello, world!\nWhat's up?\n<|tool_calls_section_begin|><|tool_call_begin|>functions.special_function:0<|tool_call_argument_begin|>{\"arg1\": {\"12\": 34, \"5\": [67, 8], \"9\": \"10\"}}<|tool_call_end|><|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- /* .format = */ COMMON_CHAT_FORMAT_KIMI_K2,
- /* .reasoning_format = */ COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(
- simple_assist_msg("", "", "complex_function", "{\"name\":\"John Doe\",\"age\":30,\"active\":true,\"score\":95.5}"),
- "<|tool_calls_section_begin|><|tool_call_begin|>functions.complex_function:0<|tool_call_argument_begin|>"
- "{\"name\": \"John Doe\", \"age\": 30, \"active\": true, \"score\": 95.5}"
- "<|tool_call_end|><|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_KIMI_K2}); });
- test_parser_with_streaming(
- simple_assist_msg("", "", "web_search", "{\"query\":\"\\\"From Zero\\\" Linkin Park album tracklist complete songs\",\"limit\":3,\"type\":\"text\"}"),
- "<|tool_calls_section_begin|><|tool_call_begin|>functions.web_search:0<|tool_call_argument_begin|>"
- "{\"query\":\"\\\"From Zero\\\" Linkin Park album tracklist complete songs\",\"limit\":3,\"type\":\"text\"}"
- "<|tool_call_end|><|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_KIMI_K2}); });
- test_parser_with_streaming(
- simple_assist_msg("", "", "read_file", "{\"args\": [{\"path\": \"src/providers/ThemeProvider.tsx\"}, {\"path\": \"src/components/Header.tsx\"}, {\"path\": \"src/components/ThemeToggle.tsx\"}, {\"path\": \"src/app/globals.css\"}, {\"path\": \"src/app/layout.tsx\"}]}"),
- "<|tool_calls_section_begin|><|tool_call_begin|>functions.read_file:0<|tool_call_argument_begin|>"
- "{\"args\": [{\"path\": \"src/providers/ThemeProvider.tsx\"}, {\"path\": \"src/components/Header.tsx\"}, {\"path\": \"src/components/ThemeToggle.tsx\"}, {\"path\": \"src/app/globals.css\"}, {\"path\": \"src/app/layout.tsx\"}]}"
- "<|tool_call_end|><|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_KIMI_K2}); });
- test_parser_with_streaming(
- simple_assist_msg(
- "Let me start by examining the relevant files to understand the current implementation.", "",
- "read_file",
- "{\"files\": [{\"path\": \"src/app/Partners.tsx\", \"line_ranges\": [\"1-100\"]}]}"),
- "Let me start by examining the relevant files to understand the current implementation."
- "<|tool_calls_section_begin|><|tool_call_begin|>functions.read_file:0<|tool_call_argument_begin|>"
- "{\"files\":[{\"path\":\"src/app/Partners.tsx\",\"line_ranges\":[\"1-100\"]}]}"
- "<|tool_call_end|><|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_KIMI_K2}); });
- auto multi_tool_msg = simple_assist_msg("Let me call multiple tools.", "I'm thinking.");
- multi_tool_msg.tool_calls.push_back({ "read_file", "{\"files\": [{\"path\": \"src/app/Partners.tsx\", \"line_ranges\": [\"1-100\"]}]}", "" });
- multi_tool_msg.tool_calls.push_back({ "web_search", "{\"query\":\"\\\"From Zero\\\" Linkin Park album tracklist complete songs\",\"limit\":3,\"type\":\"text\"}", "" });
- multi_tool_msg.tool_calls.push_back({ "complex_function", "{\"name\": \"John Doe\", \"age\": 30, \"active\": true, \"score\": 95.5}", "" });
- multi_tool_msg.tool_calls.push_back({ "emoji_function", "{\"message\":\"Hello! 👋 🌟 🚀 Testing emojis: 😀😃😄😁 and symbols: ∑∏∆∇\"}", "" });
- test_parser_with_streaming(multi_tool_msg,
- "<think>I'm thinking.</think>Let me call multiple tools."
- "<|tool_calls_section_begin|>"
- "<|tool_call_begin|>functions.read_file:0<|tool_call_argument_begin|>"
- "{\"files\":[{\"path\":\"src/app/Partners.tsx\",\"line_ranges\":[\"1-100\"]}]}"
- "<|tool_call_end|>"
- "<|tool_call_begin|>functions.web_search:1<|tool_call_argument_begin|>"
- "{\"query\":\"\\\"From Zero\\\" Linkin Park album tracklist complete songs\",\"limit\":3,\"type\":\"text\"}"
- "<|tool_call_end|>"
- "<|tool_call_begin|>functions.complex_function:2<|tool_call_argument_begin|>"
- "{\"name\": \"John Doe\", \"age\": 30, \"active\": true, \"score\": 95.5}"
- "<|tool_call_end|>"
- "<|tool_call_begin|>functions.emoji_function:3<|tool_call_argument_begin|>"
- "{\"message\":\"Hello! 👋 🌟 🚀 Testing emojis: 😀😃😄😁 and symbols: ∑∏∆∇\"}"
- "<|tool_call_end|>"
- "<|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- COMMON_CHAT_FORMAT_KIMI_K2,
- COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(
- simple_assist_msg("", "I'm thinking", "complex_function_in_think", "{\"name\":\"John Doe\",\"age\":30,\"active\":true,\"score\":95.5}"),
- "<think>I'm thinking<|tool_calls_section_begin|><|tool_call_begin|>functions.complex_function_in_think:0<|tool_call_argument_begin|>"
- "{\"name\": \"John Doe\", \"age\": 30, \"active\": true, \"score\": 95.5}"
- "<|tool_call_end|><|tool_calls_section_end|>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- COMMON_CHAT_FORMAT_KIMI_K2,
- COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- test_parser_with_streaming(
- simple_assist_msg("Hello", "I'm thinkingI'm still thinking", "complex_function_in_think", "{\"name\":\"John Doe\",\"age\":30,\"active\":true,\"score\":95.5}"),
- "<think>I'm thinking<|tool_calls_section_begin|><|tool_call_begin|>functions.complex_function_in_think:0<|tool_call_argument_begin|>"
- "{\"name\": \"John Doe\", \"age\": 30, \"active\": true, \"score\": 95.5}"
- "<|tool_call_end|><|tool_calls_section_end|>I'm still thinking</think>Hello",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {
- COMMON_CHAT_FORMAT_KIMI_K2,
- COMMON_REASONING_FORMAT_DEEPSEEK
- }); });
- // Test template rendering
- common_chat_templates_inputs conversation_with_tools = inputs_tools;
- conversation_with_tools.messages.push_back(simple_assist_msg("Let's do it", "Think first", "complex_function", "{\"name\":\"John Doe\",\"age\":30,\"active\":true,\"score\":95.5}"));
- conversation_with_tools.messages.push_back({
- "tool",
- "Tool response 1",
- /* .content_parts = */ {},
- /* .tool_calls = */ {},
- /* .reasoning_content = */ "",
- /* .tool_name = */ "complex_function",
- /* .tool_call_id = */ "",
- });
- conversation_with_tools.messages.push_back(simple_assist_msg("Continue", "Think next", "web_search", "{\"query\":\"\\\"From Zero\\\" Linkin Park album tracklist complete songs\",\"limit\":3,\"type\":\"text\"}"));
- conversation_with_tools.messages.push_back({
- "tool",
- "Tool response 2",
- /* .content_parts = */ {},
- /* .tool_calls = */ {},
- /* .reasoning_content = */ "",
- /* .tool_name = */ "web_search",
- /* .tool_call_id = */ "",
- });
- conversation_with_tools.messages.push_back(simple_assist_msg("CC", "Think last", "read_file", "{\"args\": [{\"path\": \"src/providers/ThemeProvider.tsx\"}, {\"path\": \"src/components/Header.tsx\"}, {\"path\": \"src/components/ThemeToggle.tsx\"}, {\"path\": \"src/app/globals.css\"}, {\"path\": \"src/app/layout.tsx\"}]}"));
- conversation_with_tools.messages.push_back({
- "tool",
- "Tool response 3",
- /* .content_parts = */ {},
- /* .tool_calls = */ {},
- /* .reasoning_content = */ "",
- /* .tool_name = */ "read_file",
- /* .tool_call_id = */ "",
- });
- assert_equals(common_chat_templates_apply(tmpls.get(), conversation_with_tools).prompt, std::string("<|im_system|>tool_declare<|im_middle|>[{\"type\": \"function\", \"function\": {\"name\": \"special_function\", \"description\": \"I'm special\", \"parameters\": {\"type\": \"object\", \"properties\": {\"arg1\": {\"type\": \"integer\", \"description\": \"The arg.\"}}, \"required\": [\"arg1\"]}}}]<|im_end|><|im_system|>system<|im_middle|>You are Kimi, an AI assistant created by Moonshot AI.<|im_end|><|im_user|>user<|im_middle|>Hey there!<|im_end|><|im_assistant|>assistant<|im_middle|><think>Think first</think>Let's do it<|tool_calls_section_begin|><|tool_call_begin|>functions.complex_function:0<|tool_call_argument_begin|>{\"name\":\"John Doe\",\"age\":30,\"active\":true,\"score\":95.5}<|tool_call_end|><|tool_calls_section_end|><|im_end|><|im_system|>complex_function<|im_middle|>## Return of functions.complex_function:0\nTool response 1<|im_end|><|im_assistant|>assistant<|im_middle|><think>Think next</think>Continue<|tool_calls_section_begin|><|tool_call_begin|>functions.web_search:1<|tool_call_argument_begin|>{\"query\":\"\\\"From Zero\\\" Linkin Park album tracklist complete songs\",\"limit\":3,\"type\":\"text\"}<|tool_call_end|><|tool_calls_section_end|><|im_end|><|im_system|>web_search<|im_middle|>## Return of functions.web_search:1\nTool response 2<|im_end|><|im_assistant|>assistant<|im_middle|><think>Think last</think>CC<|tool_calls_section_begin|><|tool_call_begin|>functions.read_file:2<|tool_call_argument_begin|>{\"args\": [{\"path\": \"src/providers/ThemeProvider.tsx\"}, {\"path\": \"src/components/Header.tsx\"}, {\"path\": \"src/components/ThemeToggle.tsx\"}, {\"path\": \"src/app/globals.css\"}, {\"path\": \"src/app/layout.tsx\"}]}<|tool_call_end|><|tool_calls_section_end|><|im_end|><|im_system|>read_file<|im_middle|>## Return of functions.read_file:2\nTool response 3<|im_end|><|im_assistant|>assistant<|im_middle|>"));
- // Test template generation for regular content
- test_templates(tmpls.get(), end_tokens, message_assist, tools,
- "<think></think>Hello, world!\nWhat's up?",
- /* expect_grammar_triggered= */ false);
- // Test template generation for tool calls
- test_templates(tmpls.get(), end_tokens, message_assist_call, tools,
- "<think></think><|tool_calls_section_begin|><|tool_call_begin|>functions.special_function:0<|tool_call_argument_begin|>{\"arg1\": 1}<|tool_call_end|><|tool_calls_section_end|>",
- /* expect_grammar_triggered= */ true,
- /* test_grammar_if_triggered= */ true,
- /* common_reasoning_format= */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* ignore_whitespace_differences= */ true
- );
- // Test template generation for tools with optional parameters
- test_templates(tmpls.get(), end_tokens, message_assist_call_noopt, tools,
- "<think></think><|tool_calls_section_begin|><|tool_call_begin|>functions.special_function_with_opt:0<|tool_call_argument_begin|>{\"arg1\": 1}<|tool_call_end|><|tool_calls_section_end|>",
- /* expect_grammar_triggered= */ true,
- /* test_grammar_if_triggered= */ true,
- /* common_reasoning_format= */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* ignore_whitespace_differences= */ true
- );
- test_templates(tmpls.get(), end_tokens, message_assist_call_withopt, tools,
- "<think></think><|tool_calls_section_begin|><|tool_call_begin|>functions.special_function_with_opt:0<|tool_call_argument_begin|>{\"arg1\": 1, \"arg2\": 2}<|tool_call_end|><|tool_calls_section_end|>",
- /* expect_grammar_triggered= */ true,
- /* test_grammar_if_triggered= */ true,
- /* common_reasoning_format= */ COMMON_REASONING_FORMAT_DEEPSEEK,
- /* ignore_whitespace_differences= */ true
- );
- }
- // Test Qwen3-Coder XML format
- {
- // Basic XML tool call parsing
- assert_msg_equals(
- message_assist_call,
- common_chat_parse(
- "<tool_call>\n"
- " <function=special_function>\n"
- " <parameter=arg1>\n"
- " 1\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- /* is_partial= */ false,
- {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}));
- // Multiple parameters with different types
- common_chat_msg expected_multi_param;
- expected_multi_param.role = "assistant";
- expected_multi_param.tool_calls = {
- { "complex_function", "{\"name\":\"John Doe\",\"age\":30,\"active\":true,\"score\":95.5}", "" }
- };
- test_parser_with_streaming(expected_multi_param,
- "<tool_call>\n"
- " <function=complex_function>\n"
- " <parameter=name>\n"
- " John Doe\n"
- " </parameter>\n"
- " <parameter=age>\n"
- " 30\n"
- " </parameter>\n"
- " <parameter=active>\n"
- " true\n"
- " </parameter>\n"
- " <parameter=score>\n"
- " 95.5\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Special characters and Unicode
- common_chat_msg expected_special_chars;
- expected_special_chars.role = "assistant";
- expected_special_chars.tool_calls = {
- { "unicode_function", "{\"message\":\"Hello 世界! 🌍 Special chars: @#$%^&*()\"}", "" }
- };
- test_parser_with_streaming(expected_special_chars,
- "<tool_call>\n"
- " <function=unicode_function>\n"
- " <parameter=message>\n"
- " Hello 世界! 🌍 Special chars: @#$%^&*()\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Multiline content with newlines and indentation
- common_chat_msg expected_multiline;
- expected_multiline.role = "assistant";
- expected_multiline.tool_calls = {
- { "code_function", "{\"code\":\"def hello():\\n print(\\\"Hello, World!\\\")\\n return True\"}", "" }
- };
- test_parser_with_streaming(expected_multiline,
- "<tool_call>\n"
- " <function=code_function>\n"
- " <parameter=code>\n"
- "def hello():\n"
- " print(\"Hello, World!\")\n"
- " return True\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // JSON object as parameter value
- common_chat_msg expected_json_param;
- expected_json_param.role = "assistant";
- expected_json_param.tool_calls = {
- { "json_function", "{\"config\":{\"host\":\"localhost\",\"port\":8080,\"ssl\":false}}", "" }
- };
- test_parser_with_streaming(
- expected_json_param,
- "<tool_call>\n"
- " <function=json_function>\n"
- " <parameter=config>\n"
- " {\"host\": \"localhost\", \"port\": 8080, \"ssl\": false}\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Array as parameter value
- common_chat_msg expected_array_param;
- expected_array_param.role = "assistant";
- expected_array_param.tool_calls = {
- { "array_function", "{\"items\":[\"apple\",\"banana\",\"cherry\"]}", "" }
- };
- test_parser_with_streaming(
- expected_array_param,
- "<tool_call>\n"
- " <function=array_function>\n"
- " <parameter=items>\n"
- " [\"apple\", \"banana\", \"cherry\"]\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Empty parameter
- common_chat_msg expected_empty_param;
- expected_empty_param.role = "assistant";
- expected_empty_param.tool_calls = {
- { "empty_function", "{\"empty_param\":\"\"}", "" }
- };
- test_parser_with_streaming(
- expected_empty_param,
- "<tool_call>\n"
- " <function=empty_function>\n"
- " <parameter=empty_param>\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Boolean values (true/false)
- common_chat_msg expected_boolean;
- expected_boolean.role = "assistant";
- expected_boolean.tool_calls = {
- { "boolean_function", "{\"enabled\":true,\"debug\":false}", "" }
- };
- test_parser_with_streaming(
- expected_boolean,
- "<tool_call>\n"
- " <function=boolean_function>\n"
- " <parameter=enabled>\n"
- " true\n"
- " </parameter>\n"
- " <parameter=debug>\n"
- " false\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Null value
- common_chat_msg expected_null;
- expected_null.role = "assistant";
- expected_null.tool_calls = {
- { "null_function", "{\"optional_param\":null}", "" }
- };
- test_parser_with_streaming(
- expected_null,
- "<tool_call>\n"
- " <function=null_function>\n"
- " <parameter=optional_param>\n"
- " null\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Negative numbers and scientific notation
- common_chat_msg expected_numbers;
- expected_numbers.role = "assistant";
- expected_numbers.tool_calls = {
- { "math_function", "{\"negative\":-42,\"decimal\":-3.14,\"scientific\":1.23e-4}", "" }
- };
- test_parser_with_streaming(
- expected_numbers,
- "<tool_call>\n"
- " <function=math_function>\n"
- " <parameter=negative>\n"
- " -42\n"
- " </parameter>\n"
- " <parameter=decimal>\n"
- " -3.14\n"
- " </parameter>\n"
- " <parameter=scientific>\n"
- " 1.23e-4\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // XML-like content in parameters (should be escaped)
- common_chat_msg expected_xml_content;
- expected_xml_content.role = "assistant";
- expected_xml_content.tool_calls = {
- { "xml_function", "{\"xml_content\":\"<root><item>value</item></root>\"}", "" }
- };
- test_parser_with_streaming(
- expected_xml_content,
- "<tool_call>\n"
- " <function=xml_function>\n"
- " <parameter=xml_content>\n"
- " <root><item>value</item></root>\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Quotes and escape characters
- common_chat_msg expected_quotes;
- expected_quotes.role = "assistant";
- expected_quotes.tool_calls = {
- { "quote_function", "{\"message\":\"She said \\\"Hello!\\\" and left.\"}", "" }
- };
- test_parser_with_streaming(
- expected_quotes,
- "<tool_call>\n"
- " <function=quote_function>\n"
- " <parameter=message>\n"
- " She said \"Hello!\" and left.\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Long parameter value (simplified)
- std::string long_text = "This is a long text parameter that should test the parser's ability to handle larger amounts of text data.";
- common_chat_msg expected_long_text;
- expected_long_text.role = "assistant";
- expected_long_text.tool_calls = {
- { "long_function", "{\"long_text\":\"" + long_text + "\"}", "" }
- };
- test_parser_with_streaming(
- expected_long_text,
- "<tool_call>\n"
- " <function=long_function>\n"
- " <parameter=long_text>\n"
- " " + long_text + "\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Mixed content with text before and after tool call
- common_chat_msg expected_mixed_content;
- expected_mixed_content.role = "assistant";
- expected_mixed_content.content = "I'll help you search for products. ";
- expected_mixed_content.tool_calls = {
- { "search_function", "{\"query\":\"laptops\"}", "" }
- };
- test_parser_with_streaming(
- expected_mixed_content,
- "I'll help you search for products. <tool_call>\n"
- " <function=search_function>\n"
- " <parameter=query>\n"
- " laptops\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Compact format (no extra whitespace)
- common_chat_msg expected_compact;
- expected_compact.role = "assistant";
- expected_compact.tool_calls = {
- { "compact_function", "{\"param\":\"value\"}", "" }
- };
- test_parser_with_streaming(
- expected_compact,
- "<tool_call><function=compact_function><parameter=param>value</parameter></function></tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Function name with underscores and numbers
- common_chat_msg expected_complex_name;
- expected_complex_name.role = "assistant";
- expected_complex_name.tool_calls = {
- { "get_user_data_v2", "{\"user_id\":12345}", "" }
- };
- test_parser_with_streaming(
- expected_complex_name,
- "<tool_call>\n"
- " <function=get_user_data_v2>\n"
- " <parameter=user_id>\n"
- " 12345\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Parameter names with underscores and numbers
- common_chat_msg expected_complex_params;
- expected_complex_params.role = "assistant";
- expected_complex_params.tool_calls = {
- { "test_function", "{\"param_1\":\"value1\",\"param_2_name\":\"value2\",\"param3\":123}", "" }
- };
- test_parser_with_streaming(
- expected_complex_params,
- "<tool_call>\n"
- " <function=test_function>\n"
- " <parameter=param_1>\n"
- " value1\n"
- " </parameter>\n"
- " <parameter=param_2_name>\n"
- " value2\n"
- " </parameter>\n"
- " <parameter=param3>\n"
- " 123\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Very deeply nested XML content in parameter
- common_chat_msg expected_deep_xml;
- expected_deep_xml.role = "assistant";
- expected_deep_xml.tool_calls = {
- { "xml_parser", "{\"xml\":\"<root><level1><level2><level3>deep content</level3></level2></level1></root>\"}", "" }
- };
- test_parser_with_streaming(
- expected_deep_xml,
- "<tool_call>\n"
- " <function=xml_parser>\n"
- " <parameter=xml>\n"
- " <root><level1><level2><level3>deep content</level3></level2></level1></root>\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Parameter with only whitespace
- common_chat_msg expected_whitespace_param;
- expected_whitespace_param.role = "assistant";
- expected_whitespace_param.tool_calls = {
- { "whitespace_function", "{\"spaces\":\"\"}", "" }
- };
- test_parser_with_streaming(
- expected_whitespace_param,
- "<tool_call>\n"
- " <function=whitespace_function>\n"
- " <parameter=spaces>\n"
- " \n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Parameter with tabs and mixed whitespace
- common_chat_msg expected_mixed_whitespace;
- expected_mixed_whitespace.role = "assistant";
- expected_mixed_whitespace.tool_calls = {
- { "tab_function", "{\"content\":\"line1\\n\\tindented line\\n spaces\"}", "" }
- };
- test_parser_with_streaming(
- expected_mixed_whitespace,
- "<tool_call>\n"
- " <function=tab_function>\n"
- " <parameter=content>\n"
- "line1\n"
- "\tindented line\n"
- " spaces\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Control characters and special Unicode
- common_chat_msg expected_control_chars;
- expected_control_chars.role = "assistant";
- expected_control_chars.tool_calls = {
- { "control_function", "{\"text\":\"Line1\\nLine2\\tTabbed\\rCarriage return\"}", "" }
- };
- test_parser_with_streaming(
- expected_control_chars,
- "<tool_call>\n"
- " <function=control_function>\n"
- " <parameter=text>\n"
- "Line1\nLine2\tTabbed\rCarriage return\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Emoji and extended Unicode characters
- common_chat_msg expected_emoji;
- expected_emoji.role = "assistant";
- expected_emoji.tool_calls = {
- { "emoji_function", "{\"message\":\"Hello! 👋 🌟 🚀 Testing emojis: 😀😃😄😁 and symbols: ∑∏∆∇\"}", "" }
- };
- test_parser_with_streaming(
- expected_emoji,
- "<tool_call>\n"
- " <function=emoji_function>\n"
- " <parameter=message>\n"
- " Hello! 👋 🌟 🚀 Testing emojis: 😀😃😄😁 and symbols: ∑∏∆∇\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Mathematical expressions and formulas
- common_chat_msg expected_math;
- expected_math.role = "assistant";
- expected_math.tool_calls = {
- { "math_function", "{\"formula\":\"E = mc² and ∫f(x)dx = F(x) + C\"}", "" }
- };
- test_parser_with_streaming(
- expected_math,
- "<tool_call>\n"
- " <function=math_function>\n"
- " <parameter=formula>\n"
- " E = mc² and ∫f(x)dx = F(x) + C\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // SQL injection-like content (should be safely escaped)
- common_chat_msg expected_sql;
- expected_sql.role = "assistant";
- expected_sql.tool_calls = {
- { "sql_function", "{\"query\":\"SELECT * FROM users WHERE id = 1; DROP TABLE users; --\"}", "" }
- };
- test_parser_with_streaming(
- expected_sql,
- "<tool_call>\n"
- " <function=sql_function>\n"
- " <parameter=query>\n"
- " SELECT * FROM users WHERE id = 1; DROP TABLE users; --\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // HTML/XML injection content
- common_chat_msg expected_html;
- expected_html.role = "assistant";
- expected_html.tool_calls = {
- { "html_function", "{\"content\":\"<script>alert('xss')</script><img src=x onerror=alert(1)>\"}", "" }
- };
- test_parser_with_streaming(
- expected_html,
- "<tool_call>\n"
- " <function=html_function>\n"
- " <parameter=content>\n"
- " <script>alert('xss')</script><img src=x onerror=alert(1)>\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Binary-like content (base64)
- common_chat_msg expected_binary;
- expected_binary.role = "assistant";
- expected_binary.tool_calls = {
- { "binary_function", "{\"data\":\"SGVsbG8gV29ybGQhIFRoaXMgaXMgYmFzZTY0IGVuY29kZWQgdGV4dC4=\"}", "" }
- };
- test_parser_with_streaming(
- expected_binary,
- "<tool_call>\n"
- " <function=binary_function>\n"
- " <parameter=data>\n"
- " SGVsbG8gV29ybGQhIFRoaXMgaXMgYmFzZTY0IGVuY29kZWQgdGV4dC4=\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- // Very large numbers (should be parsed as scientific notation)
- common_chat_msg expected_large_numbers;
- expected_large_numbers.role = "assistant";
- expected_large_numbers.tool_calls = {
- { "number_function", "{\"big_int\":1e+60}", "" } // Large number becomes scientific notation
- };
- test_parser_with_streaming(
- expected_large_numbers,
- "<tool_call>\n"
- " <function=number_function>\n"
- " <parameter=big_int>\n"
- " 999999999999999999999999999999999999999999999999999999999999\n"
- " </parameter>\n"
- " </function>\n"
- "</tool_call>",
- [&](const std::string &msg) { return common_chat_parse(msg, /* is_partial= */ true, {COMMON_CHAT_FORMAT_QWEN3_CODER_XML}); });
- }
- {
- // Qwen3-Coder template
- auto tmpls = read_templates("models/templates/Qwen3-Coder.jinja");
- common_chat_templates_inputs inputs;
- inputs.messages = { message_user };
- common_chat_tool qwen_union_tool {
- /* .name = */ "qwen_union",
- /* .description = */ "Test tool for union/anyOf handling",
- /* .parameters = */ R"({
- "type": "object",
- "properties": {
- "priority": { "type": ["number", "null"] },
- "maybe_text": { "anyOf": [ { "type": "string" } ] },
- "config": { "anyOf": [ { "type": "object" }, { "type": "null" } ] }
- },
- "required": []
- })",
- };
- inputs.tools = { qwen_union_tool };
- auto params = common_chat_templates_apply(tmpls.get(), inputs);
- assert_equals(COMMON_CHAT_FORMAT_QWEN3_CODER_XML, params.format);
- assert_equals(false, params.grammar.empty());
- // Grammar should compile successfully
- auto grammar = build_grammar(params.grammar);
- GGML_ASSERT(grammar && "Failed to build Qwen3-Coder grammar with union types");
- }
- }
- static void test_template_output_peg_parsers() {
- printf("[%s]\n", __func__);
- // JSON schemas
- const char * invoice_schema = R"({
- "type": "object",
- "properties": {
- "amount": {"type": "number"},
- "date": {"type": "string"}
- }
- })";
- {
- // Ministral-3-14B-Reasoning-2512
- auto tmpls = read_templates("models/templates/mistralai-Ministral-3-14B-Reasoning-2512.jinja");
- // Test basic message
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input = "Hello, world!\nWhat's up?";
- t.expect = message_assist;
- });
- // Test basic message and reasoning with reasoning_format = none
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input = "[THINK]I'm\nthinking[/THINK]Hello, world!\nWhat's up?";
- t.expect.content = "[THINK]I'm\nthinking[/THINK]Hello, world!\nWhat's up?";
- });
- // Test basic message and reasoning with reasoning_format = auto
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input = "[THINK]I'm\nthinking[/THINK]Hello, world!\nWhat's up?";
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.expect = message_assist_thoughts;
- });
- // Test tool call
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input = R"([TOOL_CALLS]special_function[ARGS]{"arg1":1})";
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.params.tools = {special_function_tool};
- t.expect = message_assist_call;
- });
- // Test tool call with reasoning
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input = "[THINK]I'm\nthinking[/THINK]"
- R"([TOOL_CALLS]special_function[ARGS]{"arg1":1})";
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.params.tools = {special_function_tool};
- t.expect = message_assist_call_thoughts;
- });
- // Test parallel tool calls
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input = R"([TOOL_CALLS]special_function[ARGS]{"arg1": 1})"
- R"([TOOL_CALLS]special_function_with_opt[ARGS]{"arg1": 1, "arg2": 2})";
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.params.parallel_tool_calls = true;
- t.params.tools = {special_function_tool, special_function_tool_with_optional_param};
- t.expect.tool_calls = {{
- /* .name = */ "special_function",
- /* .arguments = */ R"({"arg1": 1})",
- /* .id = */ {},
- }, {
- /* .name = */ "special_function_with_opt",
- /* .arguments = */ R"({"arg1": 1, "arg2": 2})",
- /* .id = */ {},
- }};
- });
- // Test response format
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input = "[THINK]I need to output the invoice details in JSON[/THINK]"
- "```json\n"
- R"({"amount": 123.45, "date": "2025-12-03"})"
- "\n```";
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.params.json_schema = invoice_schema;
- t.expect.reasoning_content = "I need to output the invoice details in JSON";
- t.expect.content =R"({"amount": 123.45, "date": "2025-12-03"})";
- });
- }
- {
- // NVIDIA Nemotron-3 Nano
- auto tmpls = read_templates("models/templates/NVIDIA-Nemotron-3-Nano-30B-A3B-BF16.jinja");
- // Test basic message
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input = "Hello, world!\nWhat's up?";
- t.expect = message_assist;
- });
- // Test basic message and reasoning with reasoning_format = none
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input = "I'm\nthinking\n</think>\nHello, world!\nWhat's up?";
- t.expect.content = "I'm\nthinking\n</think>\nHello, world!\nWhat's up?";
- });
- // Test basic message and reasoning with reasoning_format = auto
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input = "I'm\nthinking\n</think>\nHello, world!\nWhat's up?";
- t.params.enable_thinking = true;
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.expect = message_assist_thoughts;
- });
- // Test tool call
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input =
- "<tool_call>\n"
- "<function=special_function>\n"
- "<parameter=arg1>\n"
- "1\n"
- "</parameter>\n"
- "</function>\n"
- "</tool_call>";
- t.params.enable_thinking = false;
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.params.tools = {special_function_tool};
- t.expect = message_assist_call;
- });
- // Test tool call with reasoning
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input =
- "I'm\nthinking\n</think>\n"
- "<tool_call>\n"
- "<function=special_function>\n"
- "<parameter=arg1>\n"
- "1\n"
- "</parameter>\n"
- "</function>\n"
- "</tool_call>";
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.params.tools = {special_function_tool};
- t.expect = message_assist_call_thoughts;
- });
- // Test parallel tool calls
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input =
- "<tool_call>\n"
- "<function=special_function>\n"
- "<parameter=arg1>\n"
- "1\n"
- "</parameter>\n"
- "</function>\n"
- "</tool_call>\n"
- "<tool_call>\n"
- "<function=special_function_with_opt>\n"
- "<parameter=arg1>\n"
- "1\n"
- "</parameter>\n"
- "<parameter=arg2>\n"
- "2\n"
- "</parameter>\n"
- "</function>\n"
- "</tool_call>";
- t.params.enable_thinking = false;
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.params.parallel_tool_calls = true;
- t.params.tools = {special_function_tool, special_function_tool_with_optional_param};
- t.expect.tool_calls = {{
- /* .name = */ "special_function",
- /* .arguments = */ R"({"arg1": 1})",
- /* .id = */ {},
- }, {
- /* .name = */ "special_function_with_opt",
- /* .arguments = */ R"({"arg1": 1, "arg2": 2})",
- /* .id = */ {},
- }};
- });
- // Test tool call with string parameter
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input =
- "<tool_call>\n"
- "<function=python>\n"
- "<parameter=code>\n"
- "def hello():\n"
- " print(\"Hello, world!\")\n"
- "\n"
- "hello()\n"
- "</parameter>\n"
- "</function>\n"
- "</tool_call>";
- t.params.enable_thinking = false;
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.params.tools = {python_tool};
- t.expect.tool_calls = {{
- /* .name = */ "python",
- /* .arguments = */ "{\"code\": \"def hello():\\n print(\\\"Hello, world!\\\")\\n\\nhello()\"}",
- /* .id = */ {},
- }};
- });
- // Test tool call with string parameter and no closing </parameter> tag
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input =
- "<tool_call>\n"
- "<function=python>\n"
- "<parameter=code>\n"
- "def hello():\n"
- " print(\"Hello, world!\")\n"
- "\n"
- "hello()\n"
- "</function>\n"
- "</tool_call>";
- t.params.enable_thinking = false;
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.params.tools = {python_tool};
- t.expect.tool_calls = {{
- /* .name = */ "python",
- /* .arguments = */ "{\"code\": \"def hello():\\n print(\\\"Hello, world!\\\")\\n\\nhello()\"}",
- /* .id = */ {},
- }};
- });
- // Test response format
- test_peg_parser(tmpls.get(), [&](auto & t) {
- t.input =
- "I need to output the invoice details in JSON\n"
- "</think>\n"
- R"({"amount": 123.45, "date": "2025-12-03"})";
- t.params.reasoning_format = COMMON_REASONING_FORMAT_AUTO;
- t.params.json_schema = invoice_schema;
- t.expect.reasoning_content = "I need to output the invoice details in JSON";
- t.expect.content = R"({"amount": 123.45, "date": "2025-12-03"})";
- });
- }
- }
- static void test_msg_diffs_compute() {
- printf("[%s]\n", __func__);
- {
- common_chat_msg msg1;
- common_chat_msg msg2;
- msg2.content = "Hello, world!";
- common_chat_msg_diff diff;
- diff.content_delta = "Hello, world!";
- assert_equals(
- {diff},
- common_chat_msg_diff::compute_diffs(msg1, msg2));
- }
- {
- common_chat_msg msg1;
- msg1.content = "Hello,";
- common_chat_msg msg2;
- msg2.content = "Hello, world!";
- common_chat_msg_diff diff;
- diff.content_delta = " world!";
- assert_equals(
- {diff},
- common_chat_msg_diff::compute_diffs(msg1, msg2));
- }
- {
- common_chat_msg msg0;
- common_chat_msg msg1;
- msg1.tool_calls = { { "special_function", "{\"ar", /* .id = */ "123" } };
- common_chat_msg msg2;
- msg2.tool_calls = { { "special_function", "{\"arg1\": 1}", /* .id = */ "123" } };
- common_chat_msg_diff diff01;
- diff01.tool_call_index = 0;
- diff01.tool_call_delta.name = "special_function";
- diff01.tool_call_delta.id = "123";
- diff01.tool_call_delta.arguments = "{\"ar";
- assert_equals(
- {diff01},
- common_chat_msg_diff::compute_diffs(msg0, msg1));
- common_chat_msg_diff diff12;
- diff12.tool_call_index = 0;
- // Note: neither id nor name change here.
- diff12.tool_call_delta.arguments = "g1\": 1}";
- assert_equals(
- {diff12},
- common_chat_msg_diff::compute_diffs(msg1, msg2));
- }
- {
- common_chat_msg msg0;
- common_chat_msg msg2;
- msg2.tool_calls = {
- { "f1", "{\"arg1\": 1}", /* .id = */ "123" },
- { "f2", "{\"arg2\": 2}", /* .id = */ "222" },
- };
- common_chat_msg_diff diff1;
- diff1.tool_call_index = 0;
- diff1.tool_call_delta.name = "f1";
- diff1.tool_call_delta.id = "123";
- diff1.tool_call_delta.arguments = "{\"arg1\": 1}";
- common_chat_msg_diff diff2;
- diff2.tool_call_index = 1;
- diff2.tool_call_delta.name = "f2";
- diff2.tool_call_delta.id = "222";
- diff2.tool_call_delta.arguments = "{\"arg2\": 2}";
- assert_equals(
- {diff1, diff2},
- common_chat_msg_diff::compute_diffs(msg0, msg2));
- }
- }
- int main(int argc, char ** argv) {
- common_log_set_verbosity_thold(999);
- // try {
- #ifndef _WIN32
- if (argc > 1) {
- common_chat_templates_inputs inputs;
- common_chat_msg msg;
- msg.role = "user";
- msg.content = "Hey";
- inputs.messages = {msg};
- inputs.tools = { special_function_tool };
- std::cout << "| Template | Format |\n";
- std::cout << "|----------|--------|\n";
- for (int i = 1; i < argc; i++) {
- try {
- std::string path = argv[i];
- if (path.rfind(".jinja") != path.size() - 6) {
- std::cerr << "Skipping non-jinja file: " << path << '\n';
- continue;
- }
- auto tmpls = read_templates(path);
- auto parts = string_split(path, "/");
- auto name = parts[parts.size() - 1];
- auto format = common_chat_format_name(common_chat_templates_apply(tmpls.get(), inputs).format);
- std::cout << "| " << name << " | " << format << " |\n";
- } catch (const std::exception & e) {
- std::cerr << "Failed to process " << argv[i] << ": " << e.what() << '\n';
- }
- }
- } else
- #endif
- {
- test_msg_diffs_compute();
- test_msgs_oaicompat_json_conversion();
- test_tools_oaicompat_json_conversion();
- test_template_output_parsers();
- test_template_output_peg_parsers();
- std::cout << "\n[chat] All tests passed!" << '\n';
- }
- return 0;
- // } catch (const std::exception & e) {
- // std::cerr << "Error: " << e.what() << '\n';
- // return 1;
- // }
- }
|