| 1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557 |
- #include "llama-arch.h"
- #include "llama-impl.h"
- #include <map>
- #include <set>
- static const std::map<llm_arch, const char *> LLM_ARCH_NAMES = {
- { LLM_ARCH_CLIP, "clip" }, // dummy, only used by llama-quantize
- { LLM_ARCH_LLAMA, "llama" },
- { LLM_ARCH_LLAMA4, "llama4" },
- { LLM_ARCH_DECI, "deci" },
- { LLM_ARCH_FALCON, "falcon" },
- { LLM_ARCH_GROK, "grok" },
- { LLM_ARCH_GPT2, "gpt2" },
- { LLM_ARCH_GPTJ, "gptj" },
- { LLM_ARCH_GPTNEOX, "gptneox" },
- { LLM_ARCH_MPT, "mpt" },
- { LLM_ARCH_BAICHUAN, "baichuan" },
- { LLM_ARCH_STARCODER, "starcoder" },
- { LLM_ARCH_REFACT, "refact" },
- { LLM_ARCH_BERT, "bert" },
- { LLM_ARCH_MODERN_BERT, "modern-bert" },
- { LLM_ARCH_NOMIC_BERT, "nomic-bert" },
- { LLM_ARCH_NOMIC_BERT_MOE, "nomic-bert-moe" },
- { LLM_ARCH_NEO_BERT, "neo-bert" },
- { LLM_ARCH_JINA_BERT_V2, "jina-bert-v2" },
- { LLM_ARCH_JINA_BERT_V3, "jina-bert-v3" },
- { LLM_ARCH_BLOOM, "bloom" },
- { LLM_ARCH_STABLELM, "stablelm" },
- { LLM_ARCH_QWEN, "qwen" },
- { LLM_ARCH_QWEN2, "qwen2" },
- { LLM_ARCH_QWEN2MOE, "qwen2moe" },
- { LLM_ARCH_QWEN2VL, "qwen2vl" },
- { LLM_ARCH_QWEN3, "qwen3" },
- { LLM_ARCH_QWEN3MOE, "qwen3moe" },
- { LLM_ARCH_QWEN3NEXT, "qwen3next" },
- { LLM_ARCH_QWEN3VL, "qwen3vl" },
- { LLM_ARCH_QWEN3VLMOE, "qwen3vlmoe" },
- { LLM_ARCH_PHI2, "phi2" },
- { LLM_ARCH_PHI3, "phi3" },
- { LLM_ARCH_PHIMOE, "phimoe" },
- { LLM_ARCH_PLAMO, "plamo" },
- { LLM_ARCH_PLAMO2, "plamo2" },
- { LLM_ARCH_PLAMO3, "plamo3" },
- { LLM_ARCH_CODESHELL, "codeshell" },
- { LLM_ARCH_ORION, "orion" },
- { LLM_ARCH_INTERNLM2, "internlm2" },
- { LLM_ARCH_MINICPM, "minicpm" },
- { LLM_ARCH_MINICPM3, "minicpm3" },
- { LLM_ARCH_GEMMA, "gemma" },
- { LLM_ARCH_GEMMA2, "gemma2" },
- { LLM_ARCH_GEMMA3, "gemma3" },
- { LLM_ARCH_GEMMA3N, "gemma3n" },
- { LLM_ARCH_GEMMA_EMBEDDING, "gemma-embedding" },
- { LLM_ARCH_STARCODER2, "starcoder2" },
- { LLM_ARCH_MAMBA, "mamba" },
- { LLM_ARCH_MAMBA2, "mamba2" },
- { LLM_ARCH_JAMBA, "jamba" },
- { LLM_ARCH_FALCON_H1, "falcon-h1" },
- { LLM_ARCH_XVERSE, "xverse" },
- { LLM_ARCH_COMMAND_R, "command-r" },
- { LLM_ARCH_COHERE2, "cohere2" },
- { LLM_ARCH_DBRX, "dbrx" },
- { LLM_ARCH_OLMO, "olmo" },
- { LLM_ARCH_OLMO2, "olmo2" },
- { LLM_ARCH_OLMOE, "olmoe" },
- { LLM_ARCH_OPENELM, "openelm" },
- { LLM_ARCH_ARCTIC, "arctic" },
- { LLM_ARCH_DEEPSEEK, "deepseek" },
- { LLM_ARCH_DEEPSEEK2, "deepseek2" },
- { LLM_ARCH_CHATGLM, "chatglm" },
- { LLM_ARCH_GLM4, "glm4" },
- { LLM_ARCH_GLM4_MOE, "glm4moe" },
- { LLM_ARCH_BITNET, "bitnet" },
- { LLM_ARCH_T5, "t5" },
- { LLM_ARCH_T5ENCODER, "t5encoder" },
- { LLM_ARCH_JAIS, "jais" },
- { LLM_ARCH_NEMOTRON, "nemotron" },
- { LLM_ARCH_NEMOTRON_H, "nemotron_h" },
- { LLM_ARCH_NEMOTRON_H_MOE, "nemotron_h_moe" },
- { LLM_ARCH_EXAONE, "exaone" },
- { LLM_ARCH_EXAONE4, "exaone4" },
- { LLM_ARCH_RWKV6, "rwkv6" },
- { LLM_ARCH_RWKV6QWEN2, "rwkv6qwen2" },
- { LLM_ARCH_RWKV7, "rwkv7" },
- { LLM_ARCH_ARWKV7, "arwkv7" },
- { LLM_ARCH_GRANITE, "granite" },
- { LLM_ARCH_GRANITE_MOE, "granitemoe" },
- { LLM_ARCH_GRANITE_HYBRID, "granitehybrid" },
- { LLM_ARCH_CHAMELEON, "chameleon" },
- { LLM_ARCH_WAVTOKENIZER_DEC, "wavtokenizer-dec" },
- { LLM_ARCH_PLM, "plm" },
- { LLM_ARCH_BAILINGMOE, "bailingmoe" },
- { LLM_ARCH_BAILINGMOE2, "bailingmoe2" },
- { LLM_ARCH_DOTS1, "dots1" },
- { LLM_ARCH_ARCEE, "arcee" },
- { LLM_ARCH_AFMOE, "afmoe" },
- { LLM_ARCH_ERNIE4_5, "ernie4_5" },
- { LLM_ARCH_ERNIE4_5_MOE, "ernie4_5-moe" },
- { LLM_ARCH_HUNYUAN_MOE, "hunyuan-moe" },
- { LLM_ARCH_HUNYUAN_DENSE, "hunyuan-dense" },
- { LLM_ARCH_SMOLLM3, "smollm3" },
- { LLM_ARCH_OPENAI_MOE, "gpt-oss" },
- { LLM_ARCH_LFM2, "lfm2" },
- { LLM_ARCH_LFM2MOE, "lfm2moe" },
- { LLM_ARCH_DREAM, "dream" },
- { LLM_ARCH_SMALLTHINKER, "smallthinker" },
- { LLM_ARCH_LLADA, "llada" },
- { LLM_ARCH_LLADA_MOE, "llada-moe" },
- { LLM_ARCH_SEED_OSS, "seed_oss" },
- { LLM_ARCH_GROVEMOE, "grovemoe" },
- { LLM_ARCH_APERTUS, "apertus" },
- { LLM_ARCH_MINIMAX_M2, "minimax-m2" },
- { LLM_ARCH_COGVLM, "cogvlm" },
- { LLM_ARCH_RND1, "rnd1" },
- { LLM_ARCH_PANGU_EMBED, "pangu-embedded" },
- { LLM_ARCH_MISTRAL3, "mistral3" },
- { LLM_ARCH_MIMO2, "mimo2" },
- { LLM_ARCH_LLAMA_EMBED, "llama-embed" },
- { LLM_ARCH_MAINCODER, "maincoder" },
- { LLM_ARCH_UNKNOWN, "(unknown)" },
- };
- static const std::map<llm_kv, const char *> LLM_KV_NAMES = {
- { LLM_KV_GENERAL_TYPE, "general.type" },
- { LLM_KV_GENERAL_ARCHITECTURE, "general.architecture" },
- { LLM_KV_GENERAL_QUANTIZATION_VERSION, "general.quantization_version" },
- { LLM_KV_GENERAL_ALIGNMENT, "general.alignment" },
- { LLM_KV_GENERAL_FILE_TYPE, "general.file_type" },
- { LLM_KV_GENERAL_SAMPLING_SEQUENCE, "general.sampling.sequence" },
- { LLM_KV_GENERAL_SAMPLING_TOP_K, "general.sampling.top_k" },
- { LLM_KV_GENERAL_SAMPLING_TOP_P, "general.sampling.top_p" },
- { LLM_KV_GENERAL_SAMPLING_MIN_P, "general.sampling.min_p" },
- { LLM_KV_GENERAL_SAMPLING_XTC_PROBABILITY, "general.sampling.xtc_probability" },
- { LLM_KV_GENERAL_SAMPLING_XTC_THRESHOLD, "general.sampling.xtc_threshold" },
- { LLM_KV_GENERAL_SAMPLING_TEMP, "general.sampling.temp" },
- { LLM_KV_GENERAL_SAMPLING_PENALTY_LAST_N, "general.sampling.penalty_last_n" },
- { LLM_KV_GENERAL_SAMPLING_PENALTY_REPEAT, "general.sampling.penalty_repeat" },
- { LLM_KV_GENERAL_SAMPLING_MIROSTAT, "general.sampling.mirostat" },
- { LLM_KV_GENERAL_SAMPLING_MIROSTAT_TAU, "general.sampling.mirostat_tau" },
- { LLM_KV_GENERAL_SAMPLING_MIROSTAT_ETA, "general.sampling.mirostat_eta" },
- { LLM_KV_GENERAL_NAME, "general.name" },
- { LLM_KV_GENERAL_AUTHOR, "general.author" },
- { LLM_KV_GENERAL_VERSION, "general.version" },
- { LLM_KV_GENERAL_URL, "general.url" },
- { LLM_KV_GENERAL_DESCRIPTION, "general.description" },
- { LLM_KV_GENERAL_LICENSE, "general.license" },
- { LLM_KV_GENERAL_SOURCE_URL, "general.source.url" },
- { LLM_KV_GENERAL_SOURCE_HF_REPO, "general.source.huggingface.repository" },
- { LLM_KV_VOCAB_SIZE, "%s.vocab_size" },
- { LLM_KV_CONTEXT_LENGTH, "%s.context_length" },
- { LLM_KV_EMBEDDING_LENGTH, "%s.embedding_length" },
- { LLM_KV_EMBEDDING_LENGTH_OUT, "%s.embedding_length_out" },
- { LLM_KV_FEATURES_LENGTH, "%s.features_length" },
- { LLM_KV_BLOCK_COUNT, "%s.block_count" },
- { LLM_KV_LEADING_DENSE_BLOCK_COUNT, "%s.leading_dense_block_count" },
- { LLM_KV_FEED_FORWARD_LENGTH, "%s.feed_forward_length" },
- { LLM_KV_EXPERT_FEED_FORWARD_LENGTH, "%s.expert_feed_forward_length" },
- { LLM_KV_EXPERT_SHARED_FEED_FORWARD_LENGTH, "%s.expert_shared_feed_forward_length" },
- { LLM_KV_EXPERT_CHUNK_FEED_FORWARD_LENGTH, "%s.expert_chunk_feed_forward_length" },
- { LLM_KV_USE_PARALLEL_RESIDUAL, "%s.use_parallel_residual" },
- { LLM_KV_TENSOR_DATA_LAYOUT, "%s.tensor_data_layout" },
- { LLM_KV_EXPERT_COUNT, "%s.expert_count" },
- { LLM_KV_EXPERT_USED_COUNT, "%s.expert_used_count" },
- { LLM_KV_EXPERT_SHARED_COUNT, "%s.expert_shared_count" },
- { LLM_KV_EXPERT_GROUP_COUNT, "%s.expert_group_count" },
- { LLM_KV_EXPERT_GROUP_USED_COUNT, "%s.expert_group_used_count" },
- { LLM_KV_EXPERT_WEIGHTS_SCALE, "%s.expert_weights_scale" },
- { LLM_KV_EXPERT_WEIGHTS_NORM, "%s.expert_weights_norm" },
- { LLM_KV_EXPERT_GATING_FUNC, "%s.expert_gating_func" },
- { LLM_KV_EXPERT_GROUP_SCALE, "%s.expert_group_scale" },
- { LLM_KV_EXPERTS_PER_GROUP, "%s.experts_per_group" },
- { LLM_KV_MOE_EVERY_N_LAYERS, "%s.moe_every_n_layers" },
- { LLM_KV_NEXTN_PREDICT_LAYERS, "%s.nextn_predict_layers" },
- { LLM_KV_NUM_DEEPSTACK_LAYERS, "%s.n_deepstack_layers" },
- { LLM_KV_POOLING_TYPE, "%s.pooling_type" },
- { LLM_KV_LOGIT_SCALE, "%s.logit_scale" },
- { LLM_KV_DECODER_START_TOKEN_ID, "%s.decoder_start_token_id" },
- { LLM_KV_DECODER_BLOCK_COUNT, "%s.decoder_block_count" },
- { LLM_KV_ATTN_LOGIT_SOFTCAPPING, "%s.attn_logit_softcapping" },
- { LLM_KV_ROUTER_LOGIT_SOFTCAPPING, "%s.router_logit_softcapping" },
- { LLM_KV_FINAL_LOGIT_SOFTCAPPING, "%s.final_logit_softcapping" },
- { LLM_KV_SWIN_NORM, "%s.swin_norm" },
- { LLM_KV_RESCALE_EVERY_N_LAYERS, "%s.rescale_every_n_layers" },
- { LLM_KV_TIME_MIX_EXTRA_DIM, "%s.time_mix_extra_dim" },
- { LLM_KV_TIME_DECAY_EXTRA_DIM, "%s.time_decay_extra_dim" },
- { LLM_KV_RESIDUAL_SCALE, "%s.residual_scale" },
- { LLM_KV_EMBEDDING_SCALE, "%s.embedding_scale" },
- { LLM_KV_TOKEN_SHIFT_COUNT, "%s.token_shift_count" },
- { LLM_KV_INTERLEAVE_MOE_LAYER_STEP, "%s.interleave_moe_layer_step" },
- { LLM_KV_ATTENTION_HEAD_COUNT, "%s.attention.head_count" },
- { LLM_KV_ATTENTION_HEAD_COUNT_KV, "%s.attention.head_count_kv" },
- { LLM_KV_ATTENTION_MAX_ALIBI_BIAS, "%s.attention.max_alibi_bias" },
- { LLM_KV_ATTENTION_CLAMP_KQV, "%s.attention.clamp_kqv" },
- { LLM_KV_ATTENTION_KEY_LENGTH, "%s.attention.key_length" },
- { LLM_KV_ATTENTION_VALUE_LENGTH, "%s.attention.value_length" },
- { LLM_KV_ATTENTION_LAYERNORM_EPS, "%s.attention.layer_norm_epsilon" },
- { LLM_KV_ATTENTION_LAYERNORM_RMS_EPS, "%s.attention.layer_norm_rms_epsilon" },
- { LLM_KV_ATTENTION_GROUPNORM_EPS, "%s.attention.group_norm_epsilon" },
- { LLM_KV_ATTENTION_GROUPNORM_GROUPS, "%s.attention.group_norm_groups" },
- { LLM_KV_ATTENTION_CAUSAL, "%s.attention.causal" },
- { LLM_KV_ATTENTION_Q_LORA_RANK, "%s.attention.q_lora_rank" },
- { LLM_KV_ATTENTION_KV_LORA_RANK, "%s.attention.kv_lora_rank" },
- { LLM_KV_ATTENTION_DECAY_LORA_RANK, "%s.attention.decay_lora_rank" },
- { LLM_KV_ATTENTION_ICLR_LORA_RANK, "%s.attention.iclr_lora_rank" },
- { LLM_KV_ATTENTION_VALUE_RESIDUAL_MIX_LORA_RANK, "%s.attention.value_residual_mix_lora_rank" },
- { LLM_KV_ATTENTION_GATE_LORA_RANK, "%s.attention.gate_lora_rank" },
- { LLM_KV_ATTENTION_RELATIVE_BUCKETS_COUNT, "%s.attention.relative_buckets_count" },
- { LLM_KV_ATTENTION_SLIDING_WINDOW, "%s.attention.sliding_window" },
- { LLM_KV_ATTENTION_SLIDING_WINDOW_PATTERN, "%s.attention.sliding_window_pattern" },
- { LLM_KV_ATTENTION_SCALE, "%s.attention.scale" },
- { LLM_KV_ATTENTION_OUTPUT_SCALE, "%s.attention.output_scale" },
- { LLM_KV_ATTENTION_TEMPERATURE_LENGTH, "%s.attention.temperature_length" },
- { LLM_KV_ATTENTION_TEMPERATURE_SCALE, "%s.attention.temperature_scale" },
- { LLM_KV_ATTENTION_KEY_LENGTH_MLA, "%s.attention.key_length_mla" },
- { LLM_KV_ATTENTION_VALUE_LENGTH_MLA, "%s.attention.value_length_mla" },
- { LLM_KV_ROPE_DIMENSION_COUNT, "%s.rope.dimension_count" },
- { LLM_KV_ROPE_DIMENSION_SECTIONS, "%s.rope.dimension_sections" },
- { LLM_KV_ROPE_FREQ_BASE, "%s.rope.freq_base" },
- { LLM_KV_ROPE_FREQ_BASE_SWA, "%s.rope.freq_base_swa" },
- { LLM_KV_ROPE_SCALE_LINEAR, "%s.rope.scale_linear" },
- { LLM_KV_ROPE_SCALING_TYPE, "%s.rope.scaling.type" },
- { LLM_KV_ROPE_SCALING_FACTOR, "%s.rope.scaling.factor" },
- { LLM_KV_ROPE_SCALING_ATTN_FACTOR, "%s.rope.scaling.attn_factor" },
- { LLM_KV_ROPE_SCALING_ORIG_CTX_LEN, "%s.rope.scaling.original_context_length" },
- { LLM_KV_ROPE_SCALING_FINETUNED, "%s.rope.scaling.finetuned" },
- { LLM_KV_ROPE_SCALING_YARN_LOG_MUL, "%s.rope.scaling.yarn_log_multiplier" },
- { LLM_KV_ROPE_SCALING_YARN_EXT_FACTOR, "%s.rope.scaling.yarn_ext_factor" },
- { LLM_KV_ROPE_SCALING_YARN_ATTN_FACTOR, "%s.rope.scaling.yarn_attn_factor" },
- { LLM_KV_ROPE_SCALING_YARN_BETA_FAST, "%s.rope.scaling.yarn_beta_fast" },
- { LLM_KV_ROPE_SCALING_YARN_BETA_SLOW, "%s.rope.scaling.yarn_beta_slow" },
- { LLM_KV_SPLIT_NO, "split.no" },
- { LLM_KV_SPLIT_COUNT, "split.count" },
- { LLM_KV_SPLIT_TENSORS_COUNT, "split.tensors.count" },
- { LLM_KV_SSM_CONV_KERNEL, "%s.ssm.conv_kernel" },
- { LLM_KV_SSM_INNER_SIZE, "%s.ssm.inner_size" },
- { LLM_KV_SSM_STATE_SIZE, "%s.ssm.state_size" },
- { LLM_KV_SSM_TIME_STEP_RANK, "%s.ssm.time_step_rank" },
- { LLM_KV_SSM_GROUP_COUNT, "%s.ssm.group_count" },
- { LLM_KV_SSM_DT_B_C_RMS, "%s.ssm.dt_b_c_rms" },
- { LLM_KV_WKV_HEAD_SIZE, "%s.wkv.head_size" },
- { LLM_KV_POSNET_EMBEDDING_LENGTH, "%s.posnet.embedding_length" },
- { LLM_KV_POSNET_BLOCK_COUNT, "%s.posnet.block_count" },
- { LLM_KV_CONVNEXT_EMBEDDING_LENGTH, "%s.convnext.embedding_length" },
- { LLM_KV_CONVNEXT_BLOCK_COUNT, "%s.convnext.block_count" },
- { LLM_KV_CLASSIFIER_OUTPUT_LABELS, "%s.classifier.output_labels" },
- { LLM_KV_SHORTCONV_L_CACHE, "%s.shortconv.l_cache" },
- // sentence-transformers dense modules feature dims
- { LLM_KV_DENSE_2_FEAT_IN, "%s.dense_2_feat_in" },
- { LLM_KV_DENSE_2_FEAT_OUT, "%s.dense_2_feat_out" },
- { LLM_KV_DENSE_3_FEAT_IN, "%s.dense_3_feat_in" },
- { LLM_KV_DENSE_3_FEAT_OUT, "%s.dense_3_feat_out" },
- { LLM_KV_TOKENIZER_MODEL, "tokenizer.ggml.model" },
- { LLM_KV_TOKENIZER_PRE, "tokenizer.ggml.pre" },
- { LLM_KV_TOKENIZER_LIST, "tokenizer.ggml.tokens" },
- { LLM_KV_TOKENIZER_TOKEN_TYPE, "tokenizer.ggml.token_type" },
- { LLM_KV_TOKENIZER_TOKEN_TYPE_COUNT, "tokenizer.ggml.token_type_count" },
- { LLM_KV_TOKENIZER_SCORES, "tokenizer.ggml.scores" },
- { LLM_KV_TOKENIZER_MERGES, "tokenizer.ggml.merges" },
- { LLM_KV_TOKENIZER_BOS_ID, "tokenizer.ggml.bos_token_id" },
- { LLM_KV_TOKENIZER_EOS_ID, "tokenizer.ggml.eos_token_id" },
- { LLM_KV_TOKENIZER_EOT_ID, "tokenizer.ggml.eot_token_id" },
- { LLM_KV_TOKENIZER_EOM_ID, "tokenizer.ggml.eom_token_id" },
- { LLM_KV_TOKENIZER_UNK_ID, "tokenizer.ggml.unknown_token_id" },
- { LLM_KV_TOKENIZER_SEP_ID, "tokenizer.ggml.seperator_token_id" },
- { LLM_KV_TOKENIZER_PAD_ID, "tokenizer.ggml.padding_token_id" },
- { LLM_KV_TOKENIZER_CLS_ID, "tokenizer.ggml.cls_token_id" },
- { LLM_KV_TOKENIZER_MASK_ID, "tokenizer.ggml.mask_token_id" },
- { LLM_KV_TOKENIZER_ADD_BOS, "tokenizer.ggml.add_bos_token" },
- { LLM_KV_TOKENIZER_ADD_EOS, "tokenizer.ggml.add_eos_token" },
- { LLM_KV_TOKENIZER_ADD_SEP, "tokenizer.ggml.add_sep_token" },
- { LLM_KV_TOKENIZER_ADD_PREFIX, "tokenizer.ggml.add_space_prefix" },
- { LLM_KV_TOKENIZER_REMOVE_EXTRA_WS, "tokenizer.ggml.remove_extra_whitespaces" },
- { LLM_KV_TOKENIZER_PRECOMPILED_CHARSMAP, "tokenizer.ggml.precompiled_charsmap" },
- { LLM_KV_TOKENIZER_HF_JSON, "tokenizer.huggingface.json" },
- { LLM_KV_TOKENIZER_RWKV, "tokenizer.rwkv.world" },
- { LLM_KV_TOKENIZER_CHAT_TEMPLATE, "tokenizer.chat_template" },
- { LLM_KV_TOKENIZER_FIM_PRE_ID, "tokenizer.ggml.fim_pre_token_id" },
- { LLM_KV_TOKENIZER_FIM_SUF_ID, "tokenizer.ggml.fim_suf_token_id" },
- { LLM_KV_TOKENIZER_FIM_MID_ID, "tokenizer.ggml.fim_mid_token_id" },
- { LLM_KV_TOKENIZER_FIM_PAD_ID, "tokenizer.ggml.fim_pad_token_id" },
- { LLM_KV_TOKENIZER_FIM_REP_ID, "tokenizer.ggml.fim_rep_token_id" },
- { LLM_KV_TOKENIZER_FIM_SEP_ID, "tokenizer.ggml.fim_sep_token_id" },
- { LLM_KV_ADAPTER_TYPE, "adapter.type" },
- { LLM_KV_ADAPTER_LORA_ALPHA, "adapter.lora.alpha" },
- { LLM_KV_ADAPTER_LORA_TASK_NAME, "adapter.lora.task_name" },
- { LLM_KV_ADAPTER_LORA_PROMPT_PREFIX, "adapter.lora.prompt_prefix" },
- { LLM_KV_ADAPTER_ALORA_INVOCATION_TOKENS, "adapter.alora.invocation_tokens" },
- { LLM_KV_XIELU_ALPHA_N, "xielu.alpha_n" },
- { LLM_KV_XIELU_ALPHA_P, "xielu.alpha_p" },
- { LLM_KV_XIELU_BETA, "xielu.beta" },
- { LLM_KV_XIELU_EPS, "xielu.eps" },
- // deprecated
- { LLM_KV_TOKENIZER_PREFIX_ID, "tokenizer.ggml.prefix_token_id" },
- { LLM_KV_TOKENIZER_SUFFIX_ID, "tokenizer.ggml.suffix_token_id" },
- { LLM_KV_TOKENIZER_MIDDLE_ID, "tokenizer.ggml.middle_token_id" },
- };
- static const std::map<llm_tensor, const char *> LLM_TENSOR_NAMES = {
- { LLM_TENSOR_TOKEN_EMBD, "token_embd" },
- { LLM_TENSOR_OUTPUT_NORM, "output_norm" },
- { LLM_TENSOR_OUTPUT_NORM_LFM2, "token_embd_norm" }, // fix for wrong tensor name
- { LLM_TENSOR_OUTPUT, "output" },
- { LLM_TENSOR_ROPE_FREQS, "rope_freqs" },
- { LLM_TENSOR_ATTN_NORM, "blk.%d.attn_norm" },
- { LLM_TENSOR_ATTN_Q, "blk.%d.attn_q" },
- { LLM_TENSOR_ATTN_K, "blk.%d.attn_k" },
- { LLM_TENSOR_ATTN_V, "blk.%d.attn_v" },
- { LLM_TENSOR_ATTN_OUT, "blk.%d.attn_output" },
- { LLM_TENSOR_ATTN_ROT_EMBD, "blk.%d.attn_rot_embd" },
- { LLM_TENSOR_FFN_GATE_INP, "blk.%d.ffn_gate_inp" },
- { LLM_TENSOR_FFN_NORM, "blk.%d.ffn_norm" },
- { LLM_TENSOR_FFN_GATE, "blk.%d.ffn_gate" },
- { LLM_TENSOR_FFN_DOWN, "blk.%d.ffn_down" },
- { LLM_TENSOR_FFN_UP, "blk.%d.ffn_up" },
- { LLM_TENSOR_FFN_GATE_EXP, "blk.%d.ffn_gate.%d" },
- { LLM_TENSOR_FFN_DOWN_EXP, "blk.%d.ffn_down.%d" },
- { LLM_TENSOR_FFN_UP_EXP, "blk.%d.ffn_up.%d" },
- { LLM_TENSOR_FFN_GATE_EXPS, "blk.%d.ffn_gate_exps" },
- { LLM_TENSOR_FFN_DOWN_EXPS, "blk.%d.ffn_down_exps" },
- { LLM_TENSOR_FFN_UP_EXPS, "blk.%d.ffn_up_exps" },
- { LLM_TENSOR_ATTN_POST_NORM, "blk.%d.post_attention_norm" },
- { LLM_TENSOR_ATTN_Q_NORM, "blk.%d.attn_q_norm" },
- { LLM_TENSOR_ATTN_K_NORM, "blk.%d.attn_k_norm" },
- { LLM_TENSOR_ATTN_GATE, "blk.%d.attn_gate" },
- { LLM_TENSOR_FFN_POST_NORM, "blk.%d.post_ffw_norm" },
- { LLM_TENSOR_FFN_GATE_SHEXP, "blk.%d.ffn_gate_shexp" },
- { LLM_TENSOR_FFN_UP_SHEXP, "blk.%d.ffn_up_shexp" },
- { LLM_TENSOR_FFN_DOWN_SHEXP, "blk.%d.ffn_down_shexp" },
- { LLM_TENSOR_FFN_EXP_PROBS_B, "blk.%d.exp_probs_b" },
- { LLM_TENSOR_ATTN_NORM_2, "blk.%d.attn_norm_2" },
- { LLM_TENSOR_ATTN_QKV, "blk.%d.attn_qkv" },
- { LLM_TENSOR_LAYER_OUT_NORM, "blk.%d.layer_output_norm" },
- { LLM_TENSOR_ATTN_OUT_NORM, "blk.%d.attn_output_norm" },
- { LLM_TENSOR_POS_EMBD, "position_embd" },
- { LLM_TENSOR_FFN_ACT, "blk.%d.ffn.act" },
- { LLM_TENSOR_TOKEN_EMBD_NORM, "token_embd_norm" },
- { LLM_TENSOR_TOKEN_TYPES, "token_types" },
- { LLM_TENSOR_CLS, "cls" },
- { LLM_TENSOR_CLS_OUT, "cls.output" },
- { LLM_TENSOR_ENC_OUTPUT_NORM, "enc.output_norm" },
- { LLM_TENSOR_FFN_GATE_INP_SHEXP, "blk.%d.ffn_gate_inp_shexp" },
- { LLM_TENSOR_SSM_A_NOSCAN, "blk.%d.ssm_a" },
- { LLM_TENSOR_SSM_CONV1D, "blk.%d.ssm_conv1d" },
- { LLM_TENSOR_SSM_DT, "blk.%d.ssm_dt" },
- { LLM_TENSOR_SSM_BETA_ALPHA, "blk.%d.ssm_ba" },
- { LLM_TENSOR_SSM_IN, "blk.%d.ssm_in" },
- { LLM_TENSOR_SSM_NORM, "blk.%d.ssm_norm" },
- { LLM_TENSOR_SSM_OUT, "blk.%d.ssm_out" },
- { LLM_TENSOR_ROPE_FACTORS_LONG, "rope_factors_long" },
- { LLM_TENSOR_ROPE_FACTORS_SHORT, "rope_factors_short" },
- { LLM_TENSOR_SSM_X, "blk.%d.ssm_x" },
- { LLM_TENSOR_SSM_A, "blk.%d.ssm_a" },
- { LLM_TENSOR_SSM_D, "blk.%d.ssm_d" },
- { LLM_TENSOR_SSM_DT_NORM, "blk.%d.ssm_dt_norm" },
- { LLM_TENSOR_SSM_B_NORM, "blk.%d.ssm_b_norm" },
- { LLM_TENSOR_SSM_C_NORM, "blk.%d.ssm_c_norm" },
- { LLM_TENSOR_ATTN_Q_A_NORM, "blk.%d.attn_q_a_norm" },
- { LLM_TENSOR_ATTN_KV_A_NORM, "blk.%d.attn_kv_a_norm" },
- { LLM_TENSOR_ATTN_Q_A, "blk.%d.attn_q_a" },
- { LLM_TENSOR_ATTN_Q_B, "blk.%d.attn_q_b" },
- { LLM_TENSOR_ATTN_KV_A_MQA, "blk.%d.attn_kv_a_mqa" },
- { LLM_TENSOR_ATTN_KV_B, "blk.%d.attn_kv_b" },
- { LLM_TENSOR_PER_LAYER_TOKEN_EMBD, "per_layer_token_embd" },
- { LLM_TENSOR_PER_LAYER_MODEL_PROJ, "per_layer_model_proj" },
- { LLM_TENSOR_PER_LAYER_PROJ_NORM, "per_layer_proj_norm" },
- { LLM_TENSOR_ALTUP_UNEMBD_PROJ, "altup_unembd_proj" },
- { LLM_TENSOR_ALTUP_PROJ, "altup_proj" },
- { LLM_TENSOR_PER_LAYER_INP_GATE, "blk.%d.inp_gate" },
- { LLM_TENSOR_PER_LAYER_PROJ, "blk.%d.proj" },
- { LLM_TENSOR_PER_LAYER_POST_NORM, "blk.%d.post_norm" },
- { LLM_TENSOR_ALTUP_CORRECT_COEF, "blk.%d.altup_correct_coef" },
- { LLM_TENSOR_ALTUP_CORRECT_SCALE, "blk.%d.altup_correct_scale" },
- { LLM_TENSOR_ALTUP_PREDICT_COEF, "blk.%d.altup_predict_coef" },
- { LLM_TENSOR_ALTUP_ROUTER, "blk.%d.altup_router" },
- { LLM_TENSOR_ALTUP_ROUTER_NORM, "blk.%d.altup_router_norm" },
- { LLM_TENSOR_LAUREL_L, "blk.%d.laurel_l" },
- { LLM_TENSOR_LAUREL_R, "blk.%d.laurel_r" },
- { LLM_TENSOR_LAUREL_POST_NORM, "blk.%d.laurel_post_norm" },
- { LLM_TENSOR_DENSE_2_OUT, "dense_2" },
- { LLM_TENSOR_DENSE_3_OUT, "dense_3" },
- { LLM_TENSOR_FFN_NORM_EXPS, "blk.%d.ffn_norm_exps" },
- { LLM_TENSOR_ATTN_K_B, "blk.%d.attn_k_b" },
- { LLM_TENSOR_ATTN_V_B, "blk.%d.attn_v_b" },
- { LLM_TENSOR_NEXTN_EH_PROJ, "blk.%d.nextn.eh_proj" },
- { LLM_TENSOR_NEXTN_EMBED_TOKENS, "blk.%d.nextn.embed_tokens" },
- { LLM_TENSOR_NEXTN_ENORM, "blk.%d.nextn.enorm" },
- { LLM_TENSOR_NEXTN_HNORM, "blk.%d.nextn.hnorm" },
- { LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD, "blk.%d.nextn.shared_head_head" },
- { LLM_TENSOR_NEXTN_SHARED_HEAD_NORM, "blk.%d.nextn.shared_head_norm" },
- { LLM_TENSOR_ATTN_SUB_NORM, "blk.%d.attn_sub_norm" },
- { LLM_TENSOR_FFN_SUB_NORM, "blk.%d.ffn_sub_norm" },
- { LLM_TENSOR_DEC_OUTPUT_NORM, "dec.output_norm" },
- { LLM_TENSOR_DEC_ATTN_NORM, "dec.blk.%d.attn_norm" },
- { LLM_TENSOR_DEC_ATTN_Q, "dec.blk.%d.attn_q" },
- { LLM_TENSOR_DEC_ATTN_K, "dec.blk.%d.attn_k" },
- { LLM_TENSOR_DEC_ATTN_V, "dec.blk.%d.attn_v" },
- { LLM_TENSOR_DEC_ATTN_OUT, "dec.blk.%d.attn_o" },
- { LLM_TENSOR_DEC_ATTN_REL_B, "dec.blk.%d.attn_rel_b" },
- { LLM_TENSOR_DEC_CROSS_ATTN_NORM, "dec.blk.%d.cross_attn_norm" },
- { LLM_TENSOR_DEC_CROSS_ATTN_Q, "dec.blk.%d.cross_attn_q" },
- { LLM_TENSOR_DEC_CROSS_ATTN_K, "dec.blk.%d.cross_attn_k" },
- { LLM_TENSOR_DEC_CROSS_ATTN_V, "dec.blk.%d.cross_attn_v" },
- { LLM_TENSOR_DEC_CROSS_ATTN_OUT, "dec.blk.%d.cross_attn_o" },
- { LLM_TENSOR_DEC_CROSS_ATTN_REL_B, "dec.blk.%d.cross_attn_rel_b" },
- { LLM_TENSOR_DEC_FFN_NORM, "dec.blk.%d.ffn_norm" },
- { LLM_TENSOR_DEC_FFN_GATE, "dec.blk.%d.ffn_gate" },
- { LLM_TENSOR_DEC_FFN_DOWN, "dec.blk.%d.ffn_down" },
- { LLM_TENSOR_DEC_FFN_UP, "dec.blk.%d.ffn_up" },
- { LLM_TENSOR_ENC_ATTN_NORM, "enc.blk.%d.attn_norm" },
- { LLM_TENSOR_ENC_ATTN_Q, "enc.blk.%d.attn_q" },
- { LLM_TENSOR_ENC_ATTN_K, "enc.blk.%d.attn_k" },
- { LLM_TENSOR_ENC_ATTN_V, "enc.blk.%d.attn_v" },
- { LLM_TENSOR_ENC_ATTN_OUT, "enc.blk.%d.attn_o" },
- { LLM_TENSOR_ENC_ATTN_REL_B, "enc.blk.%d.attn_rel_b" },
- { LLM_TENSOR_ENC_FFN_NORM, "enc.blk.%d.ffn_norm" },
- { LLM_TENSOR_ENC_FFN_GATE, "enc.blk.%d.ffn_gate" },
- { LLM_TENSOR_ENC_FFN_DOWN, "enc.blk.%d.ffn_down" },
- { LLM_TENSOR_ENC_FFN_UP, "enc.blk.%d.ffn_up" },
- { LLM_TENSOR_TIME_MIX_W1, "blk.%d.time_mix_w1" },
- { LLM_TENSOR_TIME_MIX_W2, "blk.%d.time_mix_w2" },
- { LLM_TENSOR_TIME_MIX_LERP_X, "blk.%d.time_mix_lerp_x" },
- { LLM_TENSOR_TIME_MIX_LERP_W, "blk.%d.time_mix_lerp_w" },
- { LLM_TENSOR_TIME_MIX_LERP_K, "blk.%d.time_mix_lerp_k" },
- { LLM_TENSOR_TIME_MIX_LERP_V, "blk.%d.time_mix_lerp_v" },
- { LLM_TENSOR_TIME_MIX_LERP_R, "blk.%d.time_mix_lerp_r" },
- { LLM_TENSOR_TIME_MIX_LERP_G, "blk.%d.time_mix_lerp_g" },
- { LLM_TENSOR_TIME_MIX_LERP_FUSED, "blk.%d.time_mix_lerp_fused" },
- { LLM_TENSOR_TIME_MIX_FIRST, "blk.%d.time_mix_first" },
- { LLM_TENSOR_TIME_MIX_DECAY, "blk.%d.time_mix_decay" },
- { LLM_TENSOR_TIME_MIX_DECAY_W1, "blk.%d.time_mix_decay_w1" },
- { LLM_TENSOR_TIME_MIX_DECAY_W2, "blk.%d.time_mix_decay_w2" },
- { LLM_TENSOR_TIME_MIX_KEY, "blk.%d.time_mix_key" },
- { LLM_TENSOR_TIME_MIX_VALUE, "blk.%d.time_mix_value" },
- { LLM_TENSOR_TIME_MIX_RECEPTANCE, "blk.%d.time_mix_receptance" },
- { LLM_TENSOR_TIME_MIX_GATE, "blk.%d.time_mix_gate" },
- { LLM_TENSOR_TIME_MIX_LN, "blk.%d.time_mix_ln" },
- { LLM_TENSOR_TIME_MIX_OUTPUT, "blk.%d.time_mix_output" },
- { LLM_TENSOR_CHANNEL_MIX_LERP_K, "blk.%d.channel_mix_lerp_k" },
- { LLM_TENSOR_CHANNEL_MIX_LERP_R, "blk.%d.channel_mix_lerp_r" },
- { LLM_TENSOR_CHANNEL_MIX_KEY, "blk.%d.channel_mix_key" },
- { LLM_TENSOR_CHANNEL_MIX_VALUE, "blk.%d.channel_mix_value" },
- { LLM_TENSOR_CHANNEL_MIX_RECEPTANCE, "blk.%d.channel_mix_receptance" },
- { LLM_TENSOR_TIME_MIX_W0, "blk.%d.time_mix_w0" },
- { LLM_TENSOR_TIME_MIX_A0, "blk.%d.time_mix_a0" },
- { LLM_TENSOR_TIME_MIX_A1, "blk.%d.time_mix_a1" },
- { LLM_TENSOR_TIME_MIX_A2, "blk.%d.time_mix_a2" },
- { LLM_TENSOR_TIME_MIX_V0, "blk.%d.time_mix_v0" },
- { LLM_TENSOR_TIME_MIX_V1, "blk.%d.time_mix_v1" },
- { LLM_TENSOR_TIME_MIX_V2, "blk.%d.time_mix_v2" },
- { LLM_TENSOR_TIME_MIX_G1, "blk.%d.time_mix_g1" },
- { LLM_TENSOR_TIME_MIX_G2, "blk.%d.time_mix_g2" },
- { LLM_TENSOR_TIME_MIX_K_K, "blk.%d.time_mix_k_k" },
- { LLM_TENSOR_TIME_MIX_K_A, "blk.%d.time_mix_k_a" },
- { LLM_TENSOR_TIME_MIX_R_K, "blk.%d.time_mix_r_k" },
- { LLM_TENSOR_CONV1D, "conv1d" },
- { LLM_TENSOR_CONVNEXT_DW, "convnext.%d.dw" },
- { LLM_TENSOR_CONVNEXT_NORM, "convnext.%d.norm" },
- { LLM_TENSOR_CONVNEXT_PW1, "convnext.%d.pw1" },
- { LLM_TENSOR_CONVNEXT_PW2, "convnext.%d.pw2" },
- { LLM_TENSOR_CONVNEXT_GAMMA, "convnext.%d.gamma" },
- { LLM_TENSOR_POS_NET_CONV1, "posnet.%d.conv1" },
- { LLM_TENSOR_POS_NET_CONV2, "posnet.%d.conv2" },
- { LLM_TENSOR_POS_NET_NORM, "posnet.%d.norm" },
- { LLM_TENSOR_POS_NET_NORM1, "posnet.%d.norm1" },
- { LLM_TENSOR_POS_NET_NORM2, "posnet.%d.norm2" },
- { LLM_TENSOR_POS_NET_ATTN_NORM, "posnet.%d.attn_norm" },
- { LLM_TENSOR_POS_NET_ATTN_Q, "posnet.%d.attn_q" },
- { LLM_TENSOR_POS_NET_ATTN_K, "posnet.%d.attn_k" },
- { LLM_TENSOR_POS_NET_ATTN_V, "posnet.%d.attn_v" },
- { LLM_TENSOR_POS_NET_ATTN_OUT, "posnet.%d.attn_output" },
- { LLM_TENSOR_ATTN_SINKS, "blk.%d.attn_sinks" },
- { LLM_TENSOR_SHORTCONV_CONV, "blk.%d.shortconv.conv" },
- { LLM_TENSOR_SHORTCONV_INPROJ, "blk.%d.shortconv.in_proj" },
- { LLM_TENSOR_SHORTCONV_OUTPROJ, "blk.%d.shortconv.out_proj" },
- { LLM_TENSOR_FFN_GATE_CHEXPS, "blk.%d.ffn_gate_chexps" },
- { LLM_TENSOR_FFN_DOWN_CHEXPS, "blk.%d.ffn_down_chexps" },
- { LLM_TENSOR_FFN_UP_CHEXPS, "blk.%d.ffn_up_chexps" },
- { LLM_TENSOR_VISEXP_ATTN_QKV, "blk.%d.vis_attn_qkv" },
- { LLM_TENSOR_VISEXP_ATTN_OUT, "blk.%d.vis_attn_output" },
- { LLM_TENSOR_VISEXP_FFN_GATE, "blk.%d.vis_gate" },
- { LLM_TENSOR_VISEXP_FFN_DOWN, "blk.%d.vis_down" },
- { LLM_TENSOR_VISEXP_FFN_UP, "blk.%d.vis_up" },
- };
- static std::set<llm_tensor> llm_get_tensor_names(llm_arch arch) {
- switch (arch) {
- case LLM_ARCH_CLIP:
- return {};
- case LLM_ARCH_LLAMA:
- case LLM_ARCH_DECI:
- case LLM_ARCH_MISTRAL3:
- case LLM_ARCH_LLAMA_EMBED:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_ROT_EMBD,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_EXP,
- LLM_TENSOR_FFN_DOWN_EXP,
- LLM_TENSOR_FFN_UP_EXP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- };
- case LLM_ARCH_ARCEE:
- case LLM_ARCH_STARCODER2:
- case LLM_ARCH_NEMOTRON:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_ROT_EMBD,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_AFMOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_GATE,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_POST_NORM,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_EXP_PROBS_B,
- };
- case LLM_ARCH_LLAMA4:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_ROT_EMBD,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_EXP,
- LLM_TENSOR_FFN_DOWN_EXP,
- LLM_TENSOR_FFN_UP_EXP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- };
- case LLM_ARCH_BAICHUAN:
- case LLM_ARCH_ORION:
- case LLM_ARCH_XVERSE:
- case LLM_ARCH_EXAONE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_ROT_EMBD,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_FALCON:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_NORM_2,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_GROK:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_ROT_EMBD,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_EXP,
- LLM_TENSOR_FFN_DOWN_EXP,
- LLM_TENSOR_FFN_UP_EXP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_POST_NORM,
- LLM_TENSOR_LAYER_OUT_NORM,
- LLM_TENSOR_ATTN_OUT_NORM,
- };
- case LLM_ARCH_GPT2:
- case LLM_ARCH_STARCODER:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_POS_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_DOWN,
- };
- case LLM_ARCH_GPTNEOX:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_MPT:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_ACT,
- LLM_TENSOR_POS_EMBD,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- };
- case LLM_ARCH_REFACT:
- case LLM_ARCH_QWEN2:
- case LLM_ARCH_QWEN2VL:
- case LLM_ARCH_INTERNLM2:
- case LLM_ARCH_GRANITE:
- case LLM_ARCH_ERNIE4_5:
- case LLM_ARCH_SMOLLM3:
- case LLM_ARCH_DREAM:
- case LLM_ARCH_LLADA:
- case LLM_ARCH_PANGU_EMBED:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_BERT:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_TOKEN_EMBD_NORM,
- LLM_TENSOR_TOKEN_TYPES,
- LLM_TENSOR_POS_EMBD,
- LLM_TENSOR_ATTN_OUT_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_LAYER_OUT_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_CLS,
- LLM_TENSOR_CLS_OUT,
- };
- case LLM_ARCH_NOMIC_BERT:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_TOKEN_EMBD_NORM,
- LLM_TENSOR_TOKEN_TYPES,
- LLM_TENSOR_ATTN_OUT_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_LAYER_OUT_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_NOMIC_BERT_MOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_TOKEN_EMBD_NORM,
- LLM_TENSOR_TOKEN_TYPES,
- LLM_TENSOR_ATTN_OUT_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_LAYER_OUT_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- };
- case LLM_ARCH_NEO_BERT:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_ENC_OUTPUT_NORM,
- LLM_TENSOR_CLS,
- LLM_TENSOR_CLS_OUT,
- };
- case LLM_ARCH_MODERN_BERT:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_TOKEN_EMBD_NORM,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_CLS,
- LLM_TENSOR_CLS_OUT,
- };
- case LLM_ARCH_JINA_BERT_V2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_TOKEN_EMBD_NORM,
- LLM_TENSOR_TOKEN_TYPES,
- LLM_TENSOR_ATTN_NORM_2,
- LLM_TENSOR_ATTN_OUT_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_LAYER_OUT_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_CLS,
- };
- case LLM_ARCH_JINA_BERT_V3:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_TOKEN_EMBD_NORM,
- LLM_TENSOR_TOKEN_TYPES,
- LLM_TENSOR_ATTN_OUT_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_LAYER_OUT_NORM,
- };
- case LLM_ARCH_BLOOM:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_TOKEN_EMBD_NORM,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_DOWN,
- };
- case LLM_ARCH_STABLELM:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- };
- case LLM_ARCH_QWEN:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_QWEN2MOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_INP_SHEXP,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- };
- case LLM_ARCH_QWEN3:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_CLS_OUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_QWEN3MOE:
- case LLM_ARCH_QWEN3VLMOE:
- case LLM_ARCH_OLMOE:
- case LLM_ARCH_LLADA_MOE:
- case LLM_ARCH_RND1:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- };
- case LLM_ARCH_QWEN3NEXT:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_INP_SHEXP,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- LLM_TENSOR_SSM_A_NOSCAN,
- LLM_TENSOR_SSM_CONV1D,
- LLM_TENSOR_SSM_DT,
- LLM_TENSOR_SSM_BETA_ALPHA,
- LLM_TENSOR_SSM_IN,
- LLM_TENSOR_SSM_NORM,
- LLM_TENSOR_SSM_OUT,
- };
- case LLM_ARCH_QWEN3VL:
- case LLM_ARCH_CHAMELEON:
- case LLM_ARCH_HUNYUAN_DENSE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_PHI2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_PHI3:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FACTORS_LONG,
- LLM_TENSOR_ROPE_FACTORS_SHORT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_PHIMOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FACTORS_LONG,
- LLM_TENSOR_ROPE_FACTORS_SHORT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- };
- case LLM_ARCH_PLAMO:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_ROT_EMBD,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_PLAMO2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_ROT_EMBD,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_SSM_IN,
- LLM_TENSOR_SSM_CONV1D,
- LLM_TENSOR_SSM_X,
- LLM_TENSOR_SSM_DT,
- LLM_TENSOR_SSM_A,
- LLM_TENSOR_SSM_D,
- LLM_TENSOR_SSM_OUT,
- LLM_TENSOR_SSM_DT_NORM,
- LLM_TENSOR_SSM_B_NORM,
- LLM_TENSOR_SSM_C_NORM,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_FFN_POST_NORM,
- };
- case LLM_ARCH_PLAMO3:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_POST_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_CODESHELL:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_ROT_EMBD,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_MINICPM:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ROPE_FACTORS_LONG,
- LLM_TENSOR_ROPE_FACTORS_SHORT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_ROT_EMBD,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_EXP,
- LLM_TENSOR_FFN_DOWN_EXP,
- LLM_TENSOR_FFN_UP_EXP,
- };
- case LLM_ARCH_MINICPM3:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FACTORS_LONG,
- LLM_TENSOR_ROPE_FACTORS_SHORT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q_A_NORM,
- LLM_TENSOR_ATTN_KV_A_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_A,
- LLM_TENSOR_ATTN_Q_B,
- LLM_TENSOR_ATTN_KV_A_MQA,
- LLM_TENSOR_ATTN_KV_B,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_DOWN,
- };
- case LLM_ARCH_GEMMA:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_GEMMA2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_POST_NORM,
- };
- case LLM_ARCH_GEMMA3:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_POST_NORM,
- };
- case LLM_ARCH_GEMMA3N:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_POST_NORM,
- LLM_TENSOR_PER_LAYER_TOKEN_EMBD,
- LLM_TENSOR_PER_LAYER_MODEL_PROJ,
- LLM_TENSOR_PER_LAYER_PROJ_NORM,
- LLM_TENSOR_ALTUP_UNEMBD_PROJ,
- LLM_TENSOR_ALTUP_PROJ,
- LLM_TENSOR_PER_LAYER_INP_GATE,
- LLM_TENSOR_PER_LAYER_PROJ,
- LLM_TENSOR_PER_LAYER_POST_NORM,
- LLM_TENSOR_ALTUP_CORRECT_COEF,
- LLM_TENSOR_ALTUP_CORRECT_SCALE,
- LLM_TENSOR_ALTUP_PREDICT_COEF,
- LLM_TENSOR_ALTUP_ROUTER,
- LLM_TENSOR_ALTUP_ROUTER_NORM,
- LLM_TENSOR_LAUREL_L,
- LLM_TENSOR_LAUREL_R,
- LLM_TENSOR_LAUREL_POST_NORM,
- };
- case LLM_ARCH_GEMMA_EMBEDDING:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_DENSE_2_OUT,
- LLM_TENSOR_DENSE_3_OUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_POST_NORM,
- };
- case LLM_ARCH_MAMBA:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_SSM_IN,
- LLM_TENSOR_SSM_CONV1D,
- LLM_TENSOR_SSM_X,
- LLM_TENSOR_SSM_DT,
- LLM_TENSOR_SSM_A,
- LLM_TENSOR_SSM_D,
- LLM_TENSOR_SSM_OUT,
- };
- case LLM_ARCH_MAMBA2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_SSM_IN,
- LLM_TENSOR_SSM_CONV1D,
- LLM_TENSOR_SSM_DT,
- LLM_TENSOR_SSM_A,
- LLM_TENSOR_SSM_D,
- LLM_TENSOR_SSM_NORM,
- LLM_TENSOR_SSM_OUT,
- };
- case LLM_ARCH_JAMBA:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_SSM_IN,
- LLM_TENSOR_SSM_CONV1D,
- LLM_TENSOR_SSM_X,
- LLM_TENSOR_SSM_DT,
- LLM_TENSOR_SSM_DT_NORM,
- LLM_TENSOR_SSM_A,
- LLM_TENSOR_SSM_B_NORM,
- LLM_TENSOR_SSM_C_NORM,
- LLM_TENSOR_SSM_D,
- LLM_TENSOR_SSM_OUT,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- };
- case LLM_ARCH_FALCON_H1:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_SSM_IN,
- LLM_TENSOR_SSM_CONV1D,
- LLM_TENSOR_SSM_DT,
- LLM_TENSOR_SSM_A,
- LLM_TENSOR_SSM_D,
- LLM_TENSOR_SSM_NORM,
- LLM_TENSOR_SSM_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_COMMAND_R:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- };
- case LLM_ARCH_COHERE2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_DBRX:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_OUT_NORM,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- };
- case LLM_ARCH_OLMO:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_OLMO2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_FFN_POST_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_OPENELM:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_ARCTIC:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_NORM_EXPS,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- };
- case LLM_ARCH_DEEPSEEK:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_ROT_EMBD,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_INP_SHEXP,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- };
- case LLM_ARCH_DEEPSEEK2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q_A_NORM,
- LLM_TENSOR_ATTN_KV_A_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_A,
- LLM_TENSOR_ATTN_Q_B,
- LLM_TENSOR_ATTN_KV_A_MQA,
- LLM_TENSOR_ATTN_KV_B,
- LLM_TENSOR_ATTN_K_B,
- LLM_TENSOR_ATTN_V_B,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_INP_SHEXP,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- LLM_TENSOR_FFN_EXP_PROBS_B,
- };
- case LLM_ARCH_PLM:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_KV_A_MQA,
- LLM_TENSOR_ATTN_KV_A_NORM,
- LLM_TENSOR_ATTN_KV_B,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_CHATGLM:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_DOWN,
- };
- case LLM_ARCH_GLM4:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_FFN_POST_NORM,
- };
- case LLM_ARCH_GLM4_MOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- LLM_TENSOR_FFN_EXP_PROBS_B,
- LLM_TENSOR_NEXTN_EH_PROJ,
- LLM_TENSOR_NEXTN_EMBED_TOKENS,
- LLM_TENSOR_NEXTN_ENORM,
- LLM_TENSOR_NEXTN_HNORM,
- LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD,
- LLM_TENSOR_NEXTN_SHARED_HEAD_NORM,
- };
- case LLM_ARCH_BITNET:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_SUB_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_SUB_NORM,
- };
- case LLM_ARCH_T5:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_DEC_OUTPUT_NORM,
- LLM_TENSOR_DEC_ATTN_NORM,
- LLM_TENSOR_DEC_ATTN_Q,
- LLM_TENSOR_DEC_ATTN_K,
- LLM_TENSOR_DEC_ATTN_V,
- LLM_TENSOR_DEC_ATTN_OUT,
- LLM_TENSOR_DEC_ATTN_REL_B,
- LLM_TENSOR_DEC_CROSS_ATTN_NORM,
- LLM_TENSOR_DEC_CROSS_ATTN_Q,
- LLM_TENSOR_DEC_CROSS_ATTN_K,
- LLM_TENSOR_DEC_CROSS_ATTN_V,
- LLM_TENSOR_DEC_CROSS_ATTN_OUT,
- LLM_TENSOR_DEC_CROSS_ATTN_REL_B,
- LLM_TENSOR_DEC_FFN_NORM,
- LLM_TENSOR_DEC_FFN_GATE,
- LLM_TENSOR_DEC_FFN_DOWN,
- LLM_TENSOR_DEC_FFN_UP,
- LLM_TENSOR_ENC_OUTPUT_NORM,
- LLM_TENSOR_ENC_ATTN_NORM,
- LLM_TENSOR_ENC_ATTN_Q,
- LLM_TENSOR_ENC_ATTN_K,
- LLM_TENSOR_ENC_ATTN_V,
- LLM_TENSOR_ENC_ATTN_OUT,
- LLM_TENSOR_ENC_ATTN_REL_B,
- LLM_TENSOR_ENC_FFN_NORM,
- LLM_TENSOR_ENC_FFN_GATE,
- LLM_TENSOR_ENC_FFN_DOWN,
- LLM_TENSOR_ENC_FFN_UP,
- };
- case LLM_ARCH_T5ENCODER:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ENC_OUTPUT_NORM,
- LLM_TENSOR_ENC_ATTN_NORM,
- LLM_TENSOR_ENC_ATTN_Q,
- LLM_TENSOR_ENC_ATTN_K,
- LLM_TENSOR_ENC_ATTN_V,
- LLM_TENSOR_ENC_ATTN_OUT,
- LLM_TENSOR_ENC_ATTN_REL_B,
- LLM_TENSOR_ENC_FFN_NORM,
- LLM_TENSOR_ENC_FFN_GATE,
- LLM_TENSOR_ENC_FFN_DOWN,
- LLM_TENSOR_ENC_FFN_UP,
- };
- case LLM_ARCH_JAIS:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- };
- case LLM_ARCH_NEMOTRON_H:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_SSM_IN,
- LLM_TENSOR_SSM_CONV1D,
- LLM_TENSOR_SSM_DT,
- LLM_TENSOR_SSM_A,
- LLM_TENSOR_SSM_D,
- LLM_TENSOR_SSM_NORM,
- LLM_TENSOR_SSM_OUT,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_NEMOTRON_H_MOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- // mamba(2) ssm layers
- LLM_TENSOR_SSM_IN,
- LLM_TENSOR_SSM_CONV1D,
- LLM_TENSOR_SSM_DT,
- LLM_TENSOR_SSM_A,
- LLM_TENSOR_SSM_D,
- LLM_TENSOR_SSM_NORM,
- LLM_TENSOR_SSM_OUT,
- // attention layers
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- // dense FFN
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- // MoE FFN (for MoE layers)
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_EXP_PROBS_B,
- // MoE shared expert layer
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- };
- case LLM_ARCH_EXAONE4:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_POST_NORM,
- };
- case LLM_ARCH_RWKV6:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_TOKEN_EMBD_NORM,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_NORM_2,
- LLM_TENSOR_TIME_MIX_W1,
- LLM_TENSOR_TIME_MIX_W2,
- LLM_TENSOR_TIME_MIX_LERP_X,
- LLM_TENSOR_TIME_MIX_LERP_W,
- LLM_TENSOR_TIME_MIX_LERP_K,
- LLM_TENSOR_TIME_MIX_LERP_V,
- LLM_TENSOR_TIME_MIX_LERP_R,
- LLM_TENSOR_TIME_MIX_LERP_G,
- LLM_TENSOR_TIME_MIX_LERP_FUSED,
- LLM_TENSOR_TIME_MIX_FIRST,
- LLM_TENSOR_TIME_MIX_DECAY,
- LLM_TENSOR_TIME_MIX_DECAY_W1,
- LLM_TENSOR_TIME_MIX_DECAY_W2,
- LLM_TENSOR_TIME_MIX_KEY,
- LLM_TENSOR_TIME_MIX_VALUE,
- LLM_TENSOR_TIME_MIX_RECEPTANCE,
- LLM_TENSOR_TIME_MIX_GATE,
- LLM_TENSOR_TIME_MIX_LN,
- LLM_TENSOR_TIME_MIX_OUTPUT,
- LLM_TENSOR_CHANNEL_MIX_LERP_K,
- LLM_TENSOR_CHANNEL_MIX_LERP_R,
- LLM_TENSOR_CHANNEL_MIX_KEY,
- LLM_TENSOR_CHANNEL_MIX_VALUE,
- LLM_TENSOR_CHANNEL_MIX_RECEPTANCE,
- };
- case LLM_ARCH_RWKV6QWEN2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_TIME_MIX_W1,
- LLM_TENSOR_TIME_MIX_W2,
- LLM_TENSOR_TIME_MIX_LERP_X,
- LLM_TENSOR_TIME_MIX_LERP_FUSED,
- LLM_TENSOR_TIME_MIX_FIRST,
- LLM_TENSOR_TIME_MIX_DECAY,
- LLM_TENSOR_TIME_MIX_DECAY_W1,
- LLM_TENSOR_TIME_MIX_DECAY_W2,
- LLM_TENSOR_TIME_MIX_KEY,
- LLM_TENSOR_TIME_MIX_VALUE,
- LLM_TENSOR_TIME_MIX_RECEPTANCE,
- LLM_TENSOR_TIME_MIX_GATE,
- LLM_TENSOR_TIME_MIX_OUTPUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_RWKV7:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_TOKEN_EMBD_NORM,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_NORM_2,
- LLM_TENSOR_TIME_MIX_W0,
- LLM_TENSOR_TIME_MIX_W1,
- LLM_TENSOR_TIME_MIX_W2,
- LLM_TENSOR_TIME_MIX_A0,
- LLM_TENSOR_TIME_MIX_A1,
- LLM_TENSOR_TIME_MIX_A2,
- LLM_TENSOR_TIME_MIX_V0,
- LLM_TENSOR_TIME_MIX_V1,
- LLM_TENSOR_TIME_MIX_V2,
- LLM_TENSOR_TIME_MIX_G1,
- LLM_TENSOR_TIME_MIX_G2,
- LLM_TENSOR_TIME_MIX_K_K,
- LLM_TENSOR_TIME_MIX_K_A,
- LLM_TENSOR_TIME_MIX_R_K,
- LLM_TENSOR_TIME_MIX_LERP_FUSED,
- LLM_TENSOR_TIME_MIX_KEY,
- LLM_TENSOR_TIME_MIX_VALUE,
- LLM_TENSOR_TIME_MIX_RECEPTANCE,
- LLM_TENSOR_TIME_MIX_LN,
- LLM_TENSOR_TIME_MIX_OUTPUT,
- LLM_TENSOR_CHANNEL_MIX_LERP_K,
- LLM_TENSOR_CHANNEL_MIX_KEY,
- LLM_TENSOR_CHANNEL_MIX_VALUE,
- };
- case LLM_ARCH_ARWKV7:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_TOKEN_EMBD_NORM,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_TIME_MIX_W0,
- LLM_TENSOR_TIME_MIX_W1,
- LLM_TENSOR_TIME_MIX_W2,
- LLM_TENSOR_TIME_MIX_A0,
- LLM_TENSOR_TIME_MIX_A1,
- LLM_TENSOR_TIME_MIX_A2,
- LLM_TENSOR_TIME_MIX_V0,
- LLM_TENSOR_TIME_MIX_V1,
- LLM_TENSOR_TIME_MIX_V2,
- LLM_TENSOR_TIME_MIX_G1,
- LLM_TENSOR_TIME_MIX_G2,
- LLM_TENSOR_TIME_MIX_K_K,
- LLM_TENSOR_TIME_MIX_K_A,
- LLM_TENSOR_TIME_MIX_R_K,
- LLM_TENSOR_TIME_MIX_LERP_FUSED,
- LLM_TENSOR_TIME_MIX_KEY,
- LLM_TENSOR_TIME_MIX_VALUE,
- LLM_TENSOR_TIME_MIX_RECEPTANCE,
- LLM_TENSOR_TIME_MIX_LN,
- LLM_TENSOR_TIME_MIX_OUTPUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_GRANITE_MOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- };
- case LLM_ARCH_GRANITE_HYBRID:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_SSM_IN,
- LLM_TENSOR_SSM_CONV1D,
- LLM_TENSOR_SSM_DT,
- LLM_TENSOR_SSM_A,
- LLM_TENSOR_SSM_D,
- LLM_TENSOR_SSM_NORM,
- LLM_TENSOR_SSM_OUT,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- };
- case LLM_ARCH_WAVTOKENIZER_DEC:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_TOKEN_EMBD_NORM,
- LLM_TENSOR_CONV1D,
- LLM_TENSOR_CONVNEXT_DW,
- LLM_TENSOR_CONVNEXT_NORM,
- LLM_TENSOR_CONVNEXT_PW1,
- LLM_TENSOR_CONVNEXT_PW2,
- LLM_TENSOR_CONVNEXT_GAMMA,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_POS_NET_CONV1,
- LLM_TENSOR_POS_NET_CONV2,
- LLM_TENSOR_POS_NET_NORM,
- LLM_TENSOR_POS_NET_NORM1,
- LLM_TENSOR_POS_NET_NORM2,
- LLM_TENSOR_POS_NET_ATTN_NORM,
- LLM_TENSOR_POS_NET_ATTN_Q,
- LLM_TENSOR_POS_NET_ATTN_K,
- LLM_TENSOR_POS_NET_ATTN_V,
- LLM_TENSOR_POS_NET_ATTN_OUT,
- };
- case LLM_ARCH_BAILINGMOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_INP_SHEXP,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- };
- case LLM_ARCH_BAILINGMOE2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_EXP_PROBS_B,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- LLM_TENSOR_NEXTN_EH_PROJ,
- LLM_TENSOR_NEXTN_EMBED_TOKENS,
- LLM_TENSOR_NEXTN_ENORM,
- LLM_TENSOR_NEXTN_HNORM,
- LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD,
- LLM_TENSOR_NEXTN_SHARED_HEAD_NORM,
- LLM_TENSOR_LAYER_OUT_NORM,
- };
- case LLM_ARCH_DOTS1:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_INP_SHEXP,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- LLM_TENSOR_FFN_EXP_PROBS_B,
- };
- case LLM_ARCH_ERNIE4_5_MOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_EXP_PROBS_B,
- };
- case LLM_ARCH_HUNYUAN_MOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE_SHEXP,
- LLM_TENSOR_FFN_DOWN_SHEXP,
- LLM_TENSOR_FFN_UP_SHEXP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- };
- case LLM_ARCH_OPENAI_MOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_SINKS,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- };
- case LLM_ARCH_LFM2:
- return {
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_SHORTCONV_CONV,
- LLM_TENSOR_SHORTCONV_INPROJ,
- LLM_TENSOR_SHORTCONV_OUTPROJ,
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM_LFM2,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_DENSE_2_OUT,
- };
- case LLM_ARCH_LFM2MOE:
- return {
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_SHORTCONV_CONV,
- LLM_TENSOR_SHORTCONV_INPROJ,
- LLM_TENSOR_SHORTCONV_OUTPROJ,
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM_LFM2,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_EXP_PROBS_B,
- };
- case LLM_ARCH_SMALLTHINKER:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- };
- case LLM_ARCH_APERTUS:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ROPE_FREQS,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_SEED_OSS:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_POST_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- case LLM_ARCH_GROVEMOE:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_GATE_CHEXPS,
- LLM_TENSOR_FFN_DOWN_CHEXPS,
- LLM_TENSOR_FFN_UP_CHEXPS,
- };
- case LLM_ARCH_MINIMAX_M2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_EXP_PROBS_B,
- };
- case LLM_ARCH_COGVLM:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_QKV,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_VISEXP_ATTN_QKV,
- LLM_TENSOR_VISEXP_ATTN_OUT,
- LLM_TENSOR_VISEXP_FFN_GATE,
- LLM_TENSOR_VISEXP_FFN_DOWN,
- LLM_TENSOR_VISEXP_FFN_UP,
- };
- case LLM_ARCH_MIMO2:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_SINKS,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- LLM_TENSOR_FFN_GATE_INP,
- LLM_TENSOR_FFN_GATE_EXPS,
- LLM_TENSOR_FFN_DOWN_EXPS,
- LLM_TENSOR_FFN_UP_EXPS,
- LLM_TENSOR_FFN_EXP_PROBS_B,
- };
- case LLM_ARCH_GPTJ:
- case LLM_ARCH_UNKNOWN:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- };
- case LLM_ARCH_MAINCODER:
- return {
- LLM_TENSOR_TOKEN_EMBD,
- LLM_TENSOR_OUTPUT_NORM,
- LLM_TENSOR_OUTPUT,
- LLM_TENSOR_ATTN_NORM,
- LLM_TENSOR_ATTN_Q,
- LLM_TENSOR_ATTN_Q_NORM,
- LLM_TENSOR_ATTN_K,
- LLM_TENSOR_ATTN_K_NORM,
- LLM_TENSOR_ATTN_V,
- LLM_TENSOR_ATTN_OUT,
- LLM_TENSOR_FFN_NORM,
- LLM_TENSOR_FFN_GATE,
- LLM_TENSOR_FFN_DOWN,
- LLM_TENSOR_FFN_UP,
- };
- default:
- GGML_ABORT("unknown architecture for tensor mapping");
- }
- }
- // declare information about the model weight tensors:
- // - the layer in which the tensor is going to be used. this is needed in order to assign the correct buffer type for the weight
- // - the operator which is going to use the weight. this is needed to determine if the respective backend supports the operator
- //
- // for example, input layers are usually assigned to CPU/host buffer types
- //
- // a mismatch between the declared information and the actual layer/op in which the tensor is used can lead to sub-optimal
- // assignment of the buffer types and extra overhead during computation
- // example: https://github.com/ggml-org/llama.cpp/pull/17548
- //
- static const std::map<llm_tensor, llm_tensor_info> LLM_TENSOR_INFOS = {
- {LLM_TENSOR_TOKEN_EMBD, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
- {LLM_TENSOR_POS_EMBD, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
- {LLM_TENSOR_TOKEN_TYPES, {LLM_TENSOR_LAYER_INPUT, GGML_OP_GET_ROWS}},
- {LLM_TENSOR_TOKEN_EMBD_NORM, {LLM_TENSOR_LAYER_INPUT, GGML_OP_MUL}},
- {LLM_TENSOR_OUTPUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_CLS, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_CLS_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_DENSE_2_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}}, // Dense layer output
- {LLM_TENSOR_DENSE_3_OUT, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}}, // Dense layer output
- {LLM_TENSOR_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
- {LLM_TENSOR_OUTPUT_NORM_LFM2, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
- {LLM_TENSOR_DEC_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
- {LLM_TENSOR_ENC_OUTPUT_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
- {LLM_TENSOR_ROPE_FREQS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ROPE}},
- {LLM_TENSOR_ROPE_FACTORS_LONG, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ROPE}},
- {LLM_TENSOR_ROPE_FACTORS_SHORT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ROPE}},
- {LLM_TENSOR_ATTN_Q, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_V, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_QKV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_FFN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_FFN_DOWN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_FFN_UP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_FFN_DOWN_SHEXP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_FFN_GATE_SHEXP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_FFN_UP_SHEXP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_Q_A, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_Q_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_KV_A_MQA, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_KV_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_K_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_V_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ATTN_SINKS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SCALE}},
- {LLM_TENSOR_DEC_ATTN_Q, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_DEC_ATTN_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_DEC_ATTN_V, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_DEC_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_DEC_CROSS_ATTN_Q, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_DEC_CROSS_ATTN_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_DEC_CROSS_ATTN_V, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_DEC_CROSS_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_DEC_FFN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_DEC_FFN_DOWN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_DEC_FFN_UP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ENC_ATTN_Q, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ENC_ATTN_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ENC_ATTN_V, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ENC_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ENC_FFN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ENC_FFN_DOWN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ENC_FFN_UP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_FFN_GATE_INP_SHEXP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_FFN_GATE_INP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_SSM_IN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_SSM_X, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_SSM_DT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_SSM_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_SSM_BETA_ALPHA, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_W1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_W2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_A1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_A2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_V1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_V2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_G1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_G2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_DECAY_W1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_DECAY_W2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_KEY, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_VALUE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_RECEPTANCE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_TIME_MIX_OUTPUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_CHANNEL_MIX_KEY, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_CHANNEL_MIX_RECEPTANCE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_CHANNEL_MIX_VALUE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_FFN_ACT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_DIV}},
- {LLM_TENSOR_SSM_CONV1D, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_CONV}},
- {LLM_TENSOR_SSM_A, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_SCAN}},
- {LLM_TENSOR_SSM_A_NOSCAN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}}, // a version of SSM_A used for MUL instead of SSM_SCAN
- {LLM_TENSOR_SSM_DT_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_SSM_B_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_SSM_C_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_SSM_D, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_SSM_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_TIME_MIX_LERP_X, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_TIME_MIX_LN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_CHANNEL_MIX_LERP_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_CHANNEL_MIX_LERP_R, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_TIME_MIX_K_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_TIME_MIX_K_A, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_TIME_MIX_R_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_TIME_MIX_LERP_W, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
- {LLM_TENSOR_TIME_MIX_LERP_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
- {LLM_TENSOR_TIME_MIX_LERP_V, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
- {LLM_TENSOR_TIME_MIX_LERP_R, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
- {LLM_TENSOR_TIME_MIX_LERP_G, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
- {LLM_TENSOR_TIME_MIX_LERP_FUSED, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
- {LLM_TENSOR_TIME_MIX_DECAY, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
- {LLM_TENSOR_TIME_MIX_W0, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
- {LLM_TENSOR_TIME_MIX_A0, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
- {LLM_TENSOR_TIME_MIX_V0, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
- {LLM_TENSOR_TIME_MIX_FIRST, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_RWKV_WKV6}},
- {LLM_TENSOR_ATTN_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ATTN_NORM_2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ATTN_OUT_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ATTN_POST_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_FFN_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_FFN_POST_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_FFN_NORM_EXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ATTN_Q_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ATTN_K_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_LAYER_OUT_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ATTN_Q_A_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ATTN_KV_A_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ATTN_SUB_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_FFN_SUB_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_DEC_ATTN_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_DEC_CROSS_ATTN_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_DEC_FFN_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ENC_ATTN_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ENC_FFN_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_DEC_ATTN_REL_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_GET_ROWS}},
- {LLM_TENSOR_ENC_ATTN_REL_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_GET_ROWS}},
- {LLM_TENSOR_FFN_DOWN_EXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
- {LLM_TENSOR_FFN_GATE_EXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
- {LLM_TENSOR_FFN_UP_EXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
- {LLM_TENSOR_FFN_DOWN_CHEXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
- {LLM_TENSOR_FFN_GATE_CHEXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
- {LLM_TENSOR_FFN_UP_CHEXPS, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT_ID}},
- {LLM_TENSOR_FFN_EXP_PROBS_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_ADD}},
- // altup / laurel (gemma 3n)
- {LLM_TENSOR_PER_LAYER_TOKEN_EMBD, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_GET_ROWS}},
- {LLM_TENSOR_PER_LAYER_MODEL_PROJ, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_PER_LAYER_PROJ_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
- {LLM_TENSOR_ALTUP_PROJ, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ALTUP_UNEMBD_PROJ, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_PER_LAYER_INP_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_PER_LAYER_PROJ, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_PER_LAYER_POST_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ALTUP_CORRECT_COEF, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ALTUP_CORRECT_SCALE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_ALTUP_PREDICT_COEF, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ALTUP_ROUTER, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_ALTUP_ROUTER_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_LAUREL_L, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_LAUREL_R, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_LAUREL_POST_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- // this tensor is loaded for T5, but never used
- {LLM_TENSOR_DEC_CROSS_ATTN_REL_B, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_NONE}},
- {LLM_TENSOR_CONV1D, {LLM_TENSOR_LAYER_INPUT, GGML_OP_IM2COL}},
- {LLM_TENSOR_POS_NET_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_POS_NET_NORM1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_POS_NET_NORM2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_POS_NET_CONV1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_IM2COL}},
- {LLM_TENSOR_POS_NET_CONV2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_IM2COL}},
- {LLM_TENSOR_POS_NET_ATTN_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_POS_NET_ATTN_Q, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_POS_NET_ATTN_K, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_POS_NET_ATTN_V, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_POS_NET_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_CONVNEXT_DW, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_IM2COL}},
- {LLM_TENSOR_CONVNEXT_NORM, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_CONVNEXT_PW1, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_CONVNEXT_PW2, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_CONVNEXT_GAMMA, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL}},
- {LLM_TENSOR_SHORTCONV_CONV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_SSM_CONV}},
- {LLM_TENSOR_SHORTCONV_INPROJ, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_SHORTCONV_OUTPROJ, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_VISEXP_ATTN_QKV, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_VISEXP_ATTN_OUT, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_VISEXP_FFN_GATE, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_VISEXP_FFN_DOWN, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_VISEXP_FFN_UP, {LLM_TENSOR_LAYER_REPEATING, GGML_OP_MUL_MAT}},
- // NextN/MTP tensors are currently ignored (reserved for future MTP support)
- // These tensors only exist in the last layer(s) and are treated as output tensors
- {LLM_TENSOR_NEXTN_EH_PROJ, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_NEXTN_EMBED_TOKENS, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_GET_ROWS}},
- {LLM_TENSOR_NEXTN_ENORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_GET_ROWS}},
- {LLM_TENSOR_NEXTN_HNORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
- {LLM_TENSOR_NEXTN_SHARED_HEAD_HEAD, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL_MAT}},
- {LLM_TENSOR_NEXTN_SHARED_HEAD_NORM, {LLM_TENSOR_LAYER_OUTPUT, GGML_OP_MUL}},
- };
- LLM_KV::LLM_KV(llm_arch arch, const char * suffix) : arch(arch), suffix(suffix) {}
- std::string LLM_KV::operator()(llm_kv kv) const {
- std::string name = ::format(LLM_KV_NAMES.at(kv), LLM_ARCH_NAMES.at(arch));
- if (suffix != nullptr) {
- name += ".";
- name += suffix;
- }
- return name;
- }
- LLM_TN_IMPL::LLM_TN_IMPL(llm_arch arch, llm_tensor tensor, const char * suffix, int bid, int xid)
- : arch(arch), tensor(tensor), suffix(suffix), bid(bid), xid(xid),
- model_tensors(llm_get_tensor_names(arch)) {}
- std::string LLM_TN_IMPL::str() const {
- if (LLM_TENSOR_NAMES.find(tensor) == LLM_TENSOR_NAMES.end()) {
- GGML_ABORT("unknown tensor name for tensor id %d", static_cast<int>(tensor));
- }
- if (model_tensors.find(tensor) == model_tensors.end()) {
- return LLM_TENSOR_NAMES.at(tensor);
- }
- std::string name = ::format(LLM_TENSOR_NAMES.at(tensor), bid, xid);
- if (suffix != nullptr) {
- name += ".";
- name += suffix;
- }
- return name;
- }
- const char * llm_arch_name(llm_arch arch) {
- auto it = LLM_ARCH_NAMES.find(arch);
- if (it == LLM_ARCH_NAMES.end()) {
- return "unknown";
- }
- return it->second;
- }
- llm_arch llm_arch_from_string(const std::string & name) {
- for (const auto & kv : LLM_ARCH_NAMES) { // NOLINT
- if (kv.second == name) {
- return kv.first;
- }
- }
- return LLM_ARCH_UNKNOWN;
- }
- const llm_tensor_info & llm_tensor_info_for(llm_tensor tensor) {
- return LLM_TENSOR_INFOS.at(tensor);
- }
- bool llm_arch_is_recurrent(const llm_arch & arch) {
- switch (arch) {
- case LLM_ARCH_MAMBA:
- case LLM_ARCH_MAMBA2:
- case LLM_ARCH_RWKV6:
- case LLM_ARCH_RWKV6QWEN2:
- case LLM_ARCH_RWKV7:
- case LLM_ARCH_ARWKV7:
- return true;
- default:
- return false;
- }
- }
- bool llm_arch_is_hybrid(const llm_arch & arch) {
- switch (arch) {
- case LLM_ARCH_JAMBA:
- case LLM_ARCH_FALCON_H1:
- case LLM_ARCH_PLAMO2:
- case LLM_ARCH_GRANITE_HYBRID:
- case LLM_ARCH_LFM2:
- case LLM_ARCH_LFM2MOE:
- case LLM_ARCH_NEMOTRON_H:
- case LLM_ARCH_NEMOTRON_H_MOE:
- case LLM_ARCH_QWEN3NEXT:
- return true;
- default:
- return false;
- }
- }
- bool llm_arch_is_diffusion(const llm_arch & arch) {
- switch (arch) {
- case LLM_ARCH_DREAM:
- case LLM_ARCH_LLADA:
- case LLM_ARCH_LLADA_MOE:
- case LLM_ARCH_RND1:
- return true;
- default:
- return false;
- }
- }
|