constants.py 54 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545
  1. from __future__ import annotations
  2. from enum import Enum, IntEnum, auto
  3. from typing import Any
  4. #
  5. # constants
  6. #
  7. GGUF_MAGIC = 0x46554747 # "GGUF"
  8. GGUF_VERSION = 3
  9. GGUF_DEFAULT_ALIGNMENT = 32
  10. GGML_QUANT_VERSION = 2 # GGML_QNT_VERSION from ggml.h
  11. #
  12. # metadata keys
  13. #
  14. class Keys:
  15. class General:
  16. TYPE = "general.type"
  17. ARCHITECTURE = "general.architecture"
  18. QUANTIZATION_VERSION = "general.quantization_version"
  19. ALIGNMENT = "general.alignment"
  20. FILE_TYPE = "general.file_type"
  21. # Authorship Metadata
  22. NAME = "general.name"
  23. AUTHOR = "general.author"
  24. VERSION = "general.version"
  25. ORGANIZATION = "general.organization"
  26. FINETUNE = "general.finetune"
  27. BASENAME = "general.basename"
  28. DESCRIPTION = "general.description"
  29. QUANTIZED_BY = "general.quantized_by"
  30. SIZE_LABEL = "general.size_label"
  31. # Licensing details
  32. LICENSE = "general.license"
  33. LICENSE_NAME = "general.license.name"
  34. LICENSE_LINK = "general.license.link"
  35. # Typically represents the converted GGUF repo (Unless native)
  36. URL = "general.url" # Model Website/Paper
  37. DOI = "general.doi"
  38. UUID = "general.uuid"
  39. REPO_URL = "general.repo_url" # Model Source Repository (git/svn/etc...)
  40. # Model Source during conversion
  41. SOURCE_URL = "general.source.url" # Model Website/Paper
  42. SOURCE_DOI = "general.source.doi"
  43. SOURCE_UUID = "general.source.uuid"
  44. SOURCE_REPO_URL = "general.source.repo_url" # Model Source Repository (git/svn/etc...)
  45. # Base Model Source. There can be more than one source if it's a merged
  46. # model like with 'Mistral-7B-Merge-14-v0.1'. This will assist in
  47. # tracing linage of models as it is finetuned or merged over time.
  48. BASE_MODEL_COUNT = "general.base_model.count"
  49. BASE_MODEL_NAME = "general.base_model.{id}.name"
  50. BASE_MODEL_AUTHOR = "general.base_model.{id}.author"
  51. BASE_MODEL_VERSION = "general.base_model.{id}.version"
  52. BASE_MODEL_ORGANIZATION = "general.base_model.{id}.organization"
  53. BASE_MODEL_URL = "general.base_model.{id}.url" # Model Website/Paper
  54. BASE_MODEL_DOI = "general.base_model.{id}.doi"
  55. BASE_MODEL_UUID = "general.base_model.{id}.uuid"
  56. BASE_MODEL_REPO_URL = "general.base_model.{id}.repo_url" # Model Source Repository (git/svn/etc...)
  57. # Array based KV stores
  58. TAGS = "general.tags"
  59. LANGUAGES = "general.languages"
  60. DATASETS = "general.datasets"
  61. class LLM:
  62. VOCAB_SIZE = "{arch}.vocab_size"
  63. CONTEXT_LENGTH = "{arch}.context_length"
  64. EMBEDDING_LENGTH = "{arch}.embedding_length"
  65. BLOCK_COUNT = "{arch}.block_count"
  66. LEADING_DENSE_BLOCK_COUNT = "{arch}.leading_dense_block_count"
  67. FEED_FORWARD_LENGTH = "{arch}.feed_forward_length"
  68. EXPERT_FEED_FORWARD_LENGTH = "{arch}.expert_feed_forward_length"
  69. EXPERT_SHARED_FEED_FORWARD_LENGTH = "{arch}.expert_shared_feed_forward_length"
  70. USE_PARALLEL_RESIDUAL = "{arch}.use_parallel_residual"
  71. TENSOR_DATA_LAYOUT = "{arch}.tensor_data_layout"
  72. EXPERT_COUNT = "{arch}.expert_count"
  73. EXPERT_USED_COUNT = "{arch}.expert_used_count"
  74. EXPERT_SHARED_COUNT = "{arch}.expert_shared_count"
  75. EXPERT_WEIGHTS_SCALE = "{arch}.expert_weights_scale"
  76. POOLING_TYPE = "{arch}.pooling_type"
  77. LOGIT_SCALE = "{arch}.logit_scale"
  78. DECODER_START_TOKEN_ID = "{arch}.decoder_start_token_id"
  79. ATTN_LOGIT_SOFTCAPPING = "{arch}.attn_logit_softcapping"
  80. FINAL_LOGIT_SOFTCAPPING = "{arch}.final_logit_softcapping"
  81. RESCALE_EVERY_N_LAYERS = "{arch}.rescale_every_n_layers"
  82. TIME_MIX_EXTRA_DIM = "{arch}.time_mix_extra_dim"
  83. TIME_DECAY_EXTRA_DIM = "{arch}.time_decay_extra_dim"
  84. RESIDUAL_SCALE = "{arch}.residual_scale"
  85. EMBEDDING_SCALE = "{arch}.embedding_scale"
  86. class Attention:
  87. HEAD_COUNT = "{arch}.attention.head_count"
  88. HEAD_COUNT_KV = "{arch}.attention.head_count_kv"
  89. MAX_ALIBI_BIAS = "{arch}.attention.max_alibi_bias"
  90. CLAMP_KQV = "{arch}.attention.clamp_kqv"
  91. KEY_LENGTH = "{arch}.attention.key_length"
  92. VALUE_LENGTH = "{arch}.attention.value_length"
  93. LAYERNORM_EPS = "{arch}.attention.layer_norm_epsilon"
  94. LAYERNORM_RMS_EPS = "{arch}.attention.layer_norm_rms_epsilon"
  95. CAUSAL = "{arch}.attention.causal"
  96. Q_LORA_RANK = "{arch}.attention.q_lora_rank"
  97. KV_LORA_RANK = "{arch}.attention.kv_lora_rank"
  98. REL_BUCKETS_COUNT = "{arch}.attention.relative_buckets_count"
  99. SLIDING_WINDOW = "{arch}.attention.sliding_window"
  100. SCALE = "{arch}.attention.scale"
  101. class Rope:
  102. DIMENSION_COUNT = "{arch}.rope.dimension_count"
  103. FREQ_BASE = "{arch}.rope.freq_base"
  104. SCALING_TYPE = "{arch}.rope.scaling.type"
  105. SCALING_FACTOR = "{arch}.rope.scaling.factor"
  106. SCALING_ATTN_FACTOR = "{arch}.rope.scaling.attn_factor"
  107. SCALING_ORIG_CTX_LEN = "{arch}.rope.scaling.original_context_length"
  108. SCALING_FINETUNED = "{arch}.rope.scaling.finetuned"
  109. SCALING_YARN_LOG_MUL = "{arch}.rope.scaling.yarn_log_multiplier"
  110. class Split:
  111. LLM_KV_SPLIT_NO = "split.no"
  112. LLM_KV_SPLIT_COUNT = "split.count"
  113. LLM_KV_SPLIT_TENSORS_COUNT = "split.tensors.count"
  114. class SSM:
  115. CONV_KERNEL = "{arch}.ssm.conv_kernel"
  116. INNER_SIZE = "{arch}.ssm.inner_size"
  117. STATE_SIZE = "{arch}.ssm.state_size"
  118. TIME_STEP_RANK = "{arch}.ssm.time_step_rank"
  119. DT_B_C_RMS = "{arch}.ssm.dt_b_c_rms"
  120. class WKV:
  121. HEAD_SIZE = "{arch}.wkv.head_size"
  122. class Tokenizer:
  123. MODEL = "tokenizer.ggml.model"
  124. PRE = "tokenizer.ggml.pre"
  125. LIST = "tokenizer.ggml.tokens"
  126. TOKEN_TYPE = "tokenizer.ggml.token_type"
  127. TOKEN_TYPE_COUNT = "tokenizer.ggml.token_type_count" # for BERT-style token types
  128. SCORES = "tokenizer.ggml.scores"
  129. MERGES = "tokenizer.ggml.merges"
  130. BOS_ID = "tokenizer.ggml.bos_token_id"
  131. EOS_ID = "tokenizer.ggml.eos_token_id"
  132. UNK_ID = "tokenizer.ggml.unknown_token_id"
  133. SEP_ID = "tokenizer.ggml.seperator_token_id"
  134. PAD_ID = "tokenizer.ggml.padding_token_id"
  135. CLS_ID = "tokenizer.ggml.cls_token_id"
  136. MASK_ID = "tokenizer.ggml.mask_token_id"
  137. ADD_BOS = "tokenizer.ggml.add_bos_token"
  138. ADD_EOS = "tokenizer.ggml.add_eos_token"
  139. ADD_PREFIX = "tokenizer.ggml.add_space_prefix"
  140. REMOVE_EXTRA_WS = "tokenizer.ggml.remove_extra_whitespaces"
  141. PRECOMPILED_CHARSMAP = "tokenizer.ggml.precompiled_charsmap"
  142. HF_JSON = "tokenizer.huggingface.json"
  143. RWKV = "tokenizer.rwkv.world"
  144. CHAT_TEMPLATE = "tokenizer.chat_template"
  145. CHAT_TEMPLATE_N = "tokenizer.chat_template.{name}"
  146. CHAT_TEMPLATES = "tokenizer.chat_templates"
  147. # FIM/Infill special tokens constants
  148. PREFIX_ID = "tokenizer.ggml.prefix_token_id"
  149. SUFFIX_ID = "tokenizer.ggml.suffix_token_id"
  150. MIDDLE_ID = "tokenizer.ggml.middle_token_id"
  151. EOT_ID = "tokenizer.ggml.eot_token_id"
  152. EOM_ID = "tokenizer.ggml.eom_token_id"
  153. class Adapter:
  154. TYPE = "adapter.type"
  155. LORA_ALPHA = "adapter.lora.alpha"
  156. #
  157. # recommended mapping of model tensor names for storage in gguf
  158. #
  159. class GGUFType:
  160. MODEL = "model"
  161. ADAPTER = "adapter"
  162. class MODEL_ARCH(IntEnum):
  163. LLAMA = auto()
  164. FALCON = auto()
  165. BAICHUAN = auto()
  166. GROK = auto()
  167. GPT2 = auto()
  168. GPTJ = auto()
  169. GPTNEOX = auto()
  170. MPT = auto()
  171. STARCODER = auto()
  172. REFACT = auto()
  173. BERT = auto()
  174. NOMIC_BERT = auto()
  175. JINA_BERT_V2 = auto()
  176. BLOOM = auto()
  177. STABLELM = auto()
  178. QWEN = auto()
  179. QWEN2 = auto()
  180. QWEN2MOE = auto()
  181. PHI2 = auto()
  182. PHI3 = auto()
  183. PLAMO = auto()
  184. CODESHELL = auto()
  185. ORION = auto()
  186. INTERNLM2 = auto()
  187. MINICPM = auto()
  188. MINICPM3 = auto()
  189. GEMMA = auto()
  190. GEMMA2 = auto()
  191. STARCODER2 = auto()
  192. RWKV6 = auto()
  193. MAMBA = auto()
  194. XVERSE = auto()
  195. COMMAND_R = auto()
  196. DBRX = auto()
  197. OLMO = auto()
  198. OLMOE = auto()
  199. OPENELM = auto()
  200. ARCTIC = auto()
  201. DEEPSEEK2 = auto()
  202. CHATGLM = auto()
  203. BITNET = auto()
  204. T5 = auto()
  205. T5ENCODER = auto()
  206. JAIS = auto()
  207. NEMOTRON = auto()
  208. EXAONE = auto()
  209. GRANITE = auto()
  210. class MODEL_TENSOR(IntEnum):
  211. TOKEN_EMBD = auto()
  212. TOKEN_EMBD_NORM = auto()
  213. TOKEN_TYPES = auto()
  214. POS_EMBD = auto()
  215. OUTPUT = auto()
  216. OUTPUT_NORM = auto()
  217. ROPE_FREQS = auto()
  218. ROPE_FACTORS_LONG = auto()
  219. ROPE_FACTORS_SHORT = auto()
  220. ATTN_Q = auto()
  221. ATTN_K = auto()
  222. ATTN_V = auto()
  223. ATTN_QKV = auto()
  224. ATTN_OUT = auto()
  225. ATTN_NORM = auto()
  226. ATTN_NORM_2 = auto()
  227. ATTN_OUT_NORM = auto()
  228. ATTN_POST_NORM = auto()
  229. ATTN_ROT_EMBD = auto()
  230. FFN_GATE_INP = auto()
  231. FFN_GATE_INP_SHEXP = auto()
  232. FFN_NORM = auto()
  233. FFN_PRE_NORM = auto()
  234. FFN_POST_NORM = auto()
  235. FFN_GATE = auto()
  236. FFN_DOWN = auto()
  237. FFN_UP = auto()
  238. FFN_ACT = auto()
  239. FFN_NORM_EXP = auto()
  240. FFN_GATE_EXP = auto()
  241. FFN_DOWN_EXP = auto()
  242. FFN_UP_EXP = auto()
  243. FFN_GATE_SHEXP = auto()
  244. FFN_DOWN_SHEXP = auto()
  245. FFN_UP_SHEXP = auto()
  246. ATTN_Q_NORM = auto()
  247. ATTN_K_NORM = auto()
  248. LAYER_OUT_NORM = auto()
  249. SSM_IN = auto()
  250. SSM_CONV1D = auto()
  251. SSM_X = auto()
  252. SSM_DT = auto()
  253. SSM_A = auto()
  254. SSM_D = auto()
  255. SSM_OUT = auto()
  256. TIME_MIX_W1 = auto()
  257. TIME_MIX_W2 = auto()
  258. TIME_MIX_LERP_X = auto()
  259. TIME_MIX_LERP_K = auto()
  260. TIME_MIX_LERP_V = auto()
  261. TIME_MIX_LERP_R = auto()
  262. TIME_MIX_LERP_G = auto()
  263. TIME_MIX_LERP_W = auto()
  264. TIME_MIX_FIRST = auto()
  265. TIME_MIX_DECAY = auto()
  266. TIME_MIX_DECAY_W1 = auto()
  267. TIME_MIX_DECAY_W2 = auto()
  268. TIME_MIX_KEY = auto()
  269. TIME_MIX_VALUE = auto()
  270. TIME_MIX_RECEPTANCE = auto()
  271. TIME_MIX_GATE = auto()
  272. TIME_MIX_LN = auto()
  273. TIME_MIX_OUTPUT = auto()
  274. CHANNEL_MIX_LERP_K = auto()
  275. CHANNEL_MIX_LERP_R = auto()
  276. CHANNEL_MIX_KEY = auto()
  277. CHANNEL_MIX_RECEPTANCE = auto()
  278. CHANNEL_MIX_VALUE = auto()
  279. ATTN_Q_A = auto()
  280. ATTN_Q_B = auto()
  281. ATTN_KV_A_MQA = auto()
  282. ATTN_KV_B = auto()
  283. ATTN_Q_A_NORM = auto()
  284. ATTN_KV_A_NORM = auto()
  285. FFN_SUB_NORM = auto()
  286. ATTN_SUB_NORM = auto()
  287. DEC_ATTN_NORM = auto()
  288. DEC_ATTN_Q = auto()
  289. DEC_ATTN_K = auto()
  290. DEC_ATTN_V = auto()
  291. DEC_ATTN_OUT = auto()
  292. DEC_ATTN_REL_B = auto()
  293. DEC_CROSS_ATTN_NORM = auto()
  294. DEC_CROSS_ATTN_Q = auto()
  295. DEC_CROSS_ATTN_K = auto()
  296. DEC_CROSS_ATTN_V = auto()
  297. DEC_CROSS_ATTN_OUT = auto()
  298. DEC_CROSS_ATTN_REL_B = auto()
  299. DEC_FFN_NORM = auto()
  300. DEC_FFN_GATE = auto()
  301. DEC_FFN_DOWN = auto()
  302. DEC_FFN_UP = auto()
  303. DEC_OUTPUT_NORM = auto()
  304. ENC_ATTN_NORM = auto()
  305. ENC_ATTN_Q = auto()
  306. ENC_ATTN_K = auto()
  307. ENC_ATTN_V = auto()
  308. ENC_ATTN_OUT = auto()
  309. ENC_ATTN_REL_B = auto()
  310. ENC_FFN_NORM = auto()
  311. ENC_FFN_GATE = auto()
  312. ENC_FFN_DOWN = auto()
  313. ENC_FFN_UP = auto()
  314. ENC_OUTPUT_NORM = auto()
  315. MODEL_ARCH_NAMES: dict[MODEL_ARCH, str] = {
  316. MODEL_ARCH.LLAMA: "llama",
  317. MODEL_ARCH.FALCON: "falcon",
  318. MODEL_ARCH.BAICHUAN: "baichuan",
  319. MODEL_ARCH.GROK: "grok",
  320. MODEL_ARCH.GPT2: "gpt2",
  321. MODEL_ARCH.GPTJ: "gptj",
  322. MODEL_ARCH.GPTNEOX: "gptneox",
  323. MODEL_ARCH.MPT: "mpt",
  324. MODEL_ARCH.STARCODER: "starcoder",
  325. MODEL_ARCH.REFACT: "refact",
  326. MODEL_ARCH.BERT: "bert",
  327. MODEL_ARCH.NOMIC_BERT: "nomic-bert",
  328. MODEL_ARCH.JINA_BERT_V2: "jina-bert-v2",
  329. MODEL_ARCH.BLOOM: "bloom",
  330. MODEL_ARCH.STABLELM: "stablelm",
  331. MODEL_ARCH.QWEN: "qwen",
  332. MODEL_ARCH.QWEN2: "qwen2",
  333. MODEL_ARCH.QWEN2MOE: "qwen2moe",
  334. MODEL_ARCH.PHI2: "phi2",
  335. MODEL_ARCH.PHI3: "phi3",
  336. MODEL_ARCH.PLAMO: "plamo",
  337. MODEL_ARCH.CODESHELL: "codeshell",
  338. MODEL_ARCH.ORION: "orion",
  339. MODEL_ARCH.INTERNLM2: "internlm2",
  340. MODEL_ARCH.MINICPM: "minicpm",
  341. MODEL_ARCH.MINICPM3: "minicpm3",
  342. MODEL_ARCH.GEMMA: "gemma",
  343. MODEL_ARCH.GEMMA2: "gemma2",
  344. MODEL_ARCH.STARCODER2: "starcoder2",
  345. MODEL_ARCH.RWKV6: "rwkv6",
  346. MODEL_ARCH.MAMBA: "mamba",
  347. MODEL_ARCH.XVERSE: "xverse",
  348. MODEL_ARCH.COMMAND_R: "command-r",
  349. MODEL_ARCH.DBRX: "dbrx",
  350. MODEL_ARCH.OLMO: "olmo",
  351. MODEL_ARCH.OLMOE: "olmoe",
  352. MODEL_ARCH.OPENELM: "openelm",
  353. MODEL_ARCH.ARCTIC: "arctic",
  354. MODEL_ARCH.DEEPSEEK2: "deepseek2",
  355. MODEL_ARCH.CHATGLM: "chatglm",
  356. MODEL_ARCH.BITNET: "bitnet",
  357. MODEL_ARCH.T5: "t5",
  358. MODEL_ARCH.T5ENCODER: "t5encoder",
  359. MODEL_ARCH.JAIS: "jais",
  360. MODEL_ARCH.NEMOTRON: "nemotron",
  361. MODEL_ARCH.EXAONE: "exaone",
  362. MODEL_ARCH.GRANITE: "granite",
  363. }
  364. TENSOR_NAMES: dict[MODEL_TENSOR, str] = {
  365. MODEL_TENSOR.TOKEN_EMBD: "token_embd",
  366. MODEL_TENSOR.TOKEN_EMBD_NORM: "token_embd_norm",
  367. MODEL_TENSOR.TOKEN_TYPES: "token_types",
  368. MODEL_TENSOR.POS_EMBD: "position_embd",
  369. MODEL_TENSOR.OUTPUT_NORM: "output_norm",
  370. MODEL_TENSOR.OUTPUT: "output",
  371. MODEL_TENSOR.ROPE_FREQS: "rope_freqs",
  372. MODEL_TENSOR.ROPE_FACTORS_LONG: "rope_factors_long",
  373. MODEL_TENSOR.ROPE_FACTORS_SHORT: "rope_factors_short",
  374. MODEL_TENSOR.ATTN_NORM: "blk.{bid}.attn_norm",
  375. MODEL_TENSOR.ATTN_NORM_2: "blk.{bid}.attn_norm_2",
  376. MODEL_TENSOR.ATTN_QKV: "blk.{bid}.attn_qkv",
  377. MODEL_TENSOR.ATTN_Q: "blk.{bid}.attn_q",
  378. MODEL_TENSOR.ATTN_K: "blk.{bid}.attn_k",
  379. MODEL_TENSOR.ATTN_V: "blk.{bid}.attn_v",
  380. MODEL_TENSOR.ATTN_OUT: "blk.{bid}.attn_output",
  381. MODEL_TENSOR.ATTN_ROT_EMBD: "blk.{bid}.attn_rot_embd",
  382. MODEL_TENSOR.ATTN_Q_NORM: "blk.{bid}.attn_q_norm",
  383. MODEL_TENSOR.ATTN_K_NORM: "blk.{bid}.attn_k_norm",
  384. MODEL_TENSOR.ATTN_OUT_NORM: "blk.{bid}.attn_output_norm",
  385. MODEL_TENSOR.ATTN_POST_NORM: "blk.{bid}.post_attention_norm",
  386. MODEL_TENSOR.FFN_GATE_INP: "blk.{bid}.ffn_gate_inp",
  387. MODEL_TENSOR.FFN_GATE_INP_SHEXP: "blk.{bid}.ffn_gate_inp_shexp",
  388. MODEL_TENSOR.FFN_NORM: "blk.{bid}.ffn_norm",
  389. MODEL_TENSOR.FFN_PRE_NORM: "blk.{bid}.ffn_norm",
  390. MODEL_TENSOR.FFN_POST_NORM: "blk.{bid}.post_ffw_norm",
  391. MODEL_TENSOR.FFN_GATE: "blk.{bid}.ffn_gate",
  392. MODEL_TENSOR.FFN_DOWN: "blk.{bid}.ffn_down",
  393. MODEL_TENSOR.FFN_UP: "blk.{bid}.ffn_up",
  394. MODEL_TENSOR.FFN_GATE_SHEXP: "blk.{bid}.ffn_gate_shexp",
  395. MODEL_TENSOR.FFN_DOWN_SHEXP: "blk.{bid}.ffn_down_shexp",
  396. MODEL_TENSOR.FFN_UP_SHEXP: "blk.{bid}.ffn_up_shexp",
  397. MODEL_TENSOR.FFN_ACT: "blk.{bid}.ffn",
  398. MODEL_TENSOR.FFN_NORM_EXP: "blk.{bid}.ffn_norm_exps",
  399. MODEL_TENSOR.FFN_GATE_EXP: "blk.{bid}.ffn_gate_exps",
  400. MODEL_TENSOR.FFN_DOWN_EXP: "blk.{bid}.ffn_down_exps",
  401. MODEL_TENSOR.FFN_UP_EXP: "blk.{bid}.ffn_up_exps",
  402. MODEL_TENSOR.LAYER_OUT_NORM: "blk.{bid}.layer_output_norm",
  403. MODEL_TENSOR.SSM_IN: "blk.{bid}.ssm_in",
  404. MODEL_TENSOR.SSM_CONV1D: "blk.{bid}.ssm_conv1d",
  405. MODEL_TENSOR.SSM_X: "blk.{bid}.ssm_x",
  406. MODEL_TENSOR.SSM_DT: "blk.{bid}.ssm_dt",
  407. MODEL_TENSOR.SSM_A: "blk.{bid}.ssm_a",
  408. MODEL_TENSOR.SSM_D: "blk.{bid}.ssm_d",
  409. MODEL_TENSOR.SSM_OUT: "blk.{bid}.ssm_out",
  410. MODEL_TENSOR.TIME_MIX_W1: "blk.{bid}.time_mix_w1",
  411. MODEL_TENSOR.TIME_MIX_W2: "blk.{bid}.time_mix_w2",
  412. MODEL_TENSOR.TIME_MIX_LERP_X: "blk.{bid}.time_mix_lerp_x",
  413. MODEL_TENSOR.TIME_MIX_LERP_K: "blk.{bid}.time_mix_lerp_k",
  414. MODEL_TENSOR.TIME_MIX_LERP_V: "blk.{bid}.time_mix_lerp_v",
  415. MODEL_TENSOR.TIME_MIX_LERP_R: "blk.{bid}.time_mix_lerp_r",
  416. MODEL_TENSOR.TIME_MIX_LERP_G: "blk.{bid}.time_mix_lerp_g",
  417. MODEL_TENSOR.TIME_MIX_LERP_W: "blk.{bid}.time_mix_lerp_w",
  418. MODEL_TENSOR.TIME_MIX_FIRST: "blk.{bid}.time_mix_first",
  419. MODEL_TENSOR.TIME_MIX_DECAY: "blk.{bid}.time_mix_decay",
  420. MODEL_TENSOR.TIME_MIX_DECAY_W1: "blk.{bid}.time_mix_decay_w1",
  421. MODEL_TENSOR.TIME_MIX_DECAY_W2: "blk.{bid}.time_mix_decay_w2",
  422. MODEL_TENSOR.TIME_MIX_KEY: "blk.{bid}.time_mix_key",
  423. MODEL_TENSOR.TIME_MIX_VALUE: "blk.{bid}.time_mix_value",
  424. MODEL_TENSOR.TIME_MIX_RECEPTANCE: "blk.{bid}.time_mix_receptance",
  425. MODEL_TENSOR.TIME_MIX_GATE: "blk.{bid}.time_mix_gate",
  426. MODEL_TENSOR.TIME_MIX_LN: "blk.{bid}.time_mix_ln",
  427. MODEL_TENSOR.TIME_MIX_OUTPUT: "blk.{bid}.time_mix_output",
  428. MODEL_TENSOR.CHANNEL_MIX_LERP_K: "blk.{bid}.channel_mix_lerp_k",
  429. MODEL_TENSOR.CHANNEL_MIX_LERP_R: "blk.{bid}.channel_mix_lerp_r",
  430. MODEL_TENSOR.CHANNEL_MIX_KEY: "blk.{bid}.channel_mix_key",
  431. MODEL_TENSOR.CHANNEL_MIX_RECEPTANCE: "blk.{bid}.channel_mix_receptance",
  432. MODEL_TENSOR.CHANNEL_MIX_VALUE: "blk.{bid}.channel_mix_value",
  433. MODEL_TENSOR.ATTN_Q_A: "blk.{bid}.attn_q_a",
  434. MODEL_TENSOR.ATTN_Q_B: "blk.{bid}.attn_q_b",
  435. MODEL_TENSOR.ATTN_KV_A_MQA: "blk.{bid}.attn_kv_a_mqa",
  436. MODEL_TENSOR.ATTN_KV_B: "blk.{bid}.attn_kv_b",
  437. MODEL_TENSOR.ATTN_Q_A_NORM: "blk.{bid}.attn_q_a_norm",
  438. MODEL_TENSOR.ATTN_KV_A_NORM: "blk.{bid}.attn_kv_a_norm",
  439. MODEL_TENSOR.ATTN_SUB_NORM: "blk.{bid}.attn_sub_norm",
  440. MODEL_TENSOR.FFN_SUB_NORM: "blk.{bid}.ffn_sub_norm",
  441. MODEL_TENSOR.DEC_ATTN_NORM: "dec.blk.{bid}.attn_norm",
  442. MODEL_TENSOR.DEC_ATTN_Q: "dec.blk.{bid}.attn_q",
  443. MODEL_TENSOR.DEC_ATTN_K: "dec.blk.{bid}.attn_k",
  444. MODEL_TENSOR.DEC_ATTN_V: "dec.blk.{bid}.attn_v",
  445. MODEL_TENSOR.DEC_ATTN_OUT: "dec.blk.{bid}.attn_o",
  446. MODEL_TENSOR.DEC_ATTN_REL_B: "dec.blk.{bid}.attn_rel_b",
  447. MODEL_TENSOR.DEC_CROSS_ATTN_NORM: "dec.blk.{bid}.cross_attn_norm",
  448. MODEL_TENSOR.DEC_CROSS_ATTN_Q: "dec.blk.{bid}.cross_attn_q",
  449. MODEL_TENSOR.DEC_CROSS_ATTN_K: "dec.blk.{bid}.cross_attn_k",
  450. MODEL_TENSOR.DEC_CROSS_ATTN_V: "dec.blk.{bid}.cross_attn_v",
  451. MODEL_TENSOR.DEC_CROSS_ATTN_OUT: "dec.blk.{bid}.cross_attn_o",
  452. MODEL_TENSOR.DEC_CROSS_ATTN_REL_B: "dec.blk.{bid}.cross_attn_rel_b",
  453. MODEL_TENSOR.DEC_FFN_NORM: "dec.blk.{bid}.ffn_norm",
  454. MODEL_TENSOR.DEC_FFN_GATE: "dec.blk.{bid}.ffn_gate",
  455. MODEL_TENSOR.DEC_FFN_DOWN: "dec.blk.{bid}.ffn_down",
  456. MODEL_TENSOR.DEC_FFN_UP: "dec.blk.{bid}.ffn_up",
  457. MODEL_TENSOR.DEC_OUTPUT_NORM: "dec.output_norm",
  458. MODEL_TENSOR.ENC_ATTN_NORM: "enc.blk.{bid}.attn_norm",
  459. MODEL_TENSOR.ENC_ATTN_Q: "enc.blk.{bid}.attn_q",
  460. MODEL_TENSOR.ENC_ATTN_K: "enc.blk.{bid}.attn_k",
  461. MODEL_TENSOR.ENC_ATTN_V: "enc.blk.{bid}.attn_v",
  462. MODEL_TENSOR.ENC_ATTN_OUT: "enc.blk.{bid}.attn_o",
  463. MODEL_TENSOR.ENC_ATTN_REL_B: "enc.blk.{bid}.attn_rel_b",
  464. MODEL_TENSOR.ENC_FFN_NORM: "enc.blk.{bid}.ffn_norm",
  465. MODEL_TENSOR.ENC_FFN_GATE: "enc.blk.{bid}.ffn_gate",
  466. MODEL_TENSOR.ENC_FFN_DOWN: "enc.blk.{bid}.ffn_down",
  467. MODEL_TENSOR.ENC_FFN_UP: "enc.blk.{bid}.ffn_up",
  468. MODEL_TENSOR.ENC_OUTPUT_NORM: "enc.output_norm",
  469. }
  470. MODEL_TENSORS: dict[MODEL_ARCH, list[MODEL_TENSOR]] = {
  471. MODEL_ARCH.LLAMA: [
  472. MODEL_TENSOR.TOKEN_EMBD,
  473. MODEL_TENSOR.OUTPUT_NORM,
  474. MODEL_TENSOR.OUTPUT,
  475. MODEL_TENSOR.ROPE_FREQS,
  476. MODEL_TENSOR.ATTN_NORM,
  477. MODEL_TENSOR.ATTN_Q,
  478. MODEL_TENSOR.ATTN_K,
  479. MODEL_TENSOR.ATTN_V,
  480. MODEL_TENSOR.ATTN_OUT,
  481. MODEL_TENSOR.ATTN_ROT_EMBD,
  482. MODEL_TENSOR.FFN_GATE_INP,
  483. MODEL_TENSOR.FFN_NORM,
  484. MODEL_TENSOR.FFN_GATE,
  485. MODEL_TENSOR.FFN_DOWN,
  486. MODEL_TENSOR.FFN_UP,
  487. MODEL_TENSOR.FFN_GATE_EXP,
  488. MODEL_TENSOR.FFN_DOWN_EXP,
  489. MODEL_TENSOR.FFN_UP_EXP,
  490. ],
  491. MODEL_ARCH.GROK: [
  492. MODEL_TENSOR.TOKEN_EMBD,
  493. MODEL_TENSOR.OUTPUT_NORM,
  494. MODEL_TENSOR.OUTPUT,
  495. MODEL_TENSOR.ROPE_FREQS,
  496. MODEL_TENSOR.ATTN_NORM,
  497. MODEL_TENSOR.ATTN_Q,
  498. MODEL_TENSOR.ATTN_K,
  499. MODEL_TENSOR.ATTN_V,
  500. MODEL_TENSOR.ATTN_OUT,
  501. MODEL_TENSOR.ATTN_ROT_EMBD,
  502. MODEL_TENSOR.ATTN_OUT_NORM,
  503. MODEL_TENSOR.FFN_GATE_INP,
  504. MODEL_TENSOR.FFN_NORM,
  505. MODEL_TENSOR.FFN_GATE,
  506. MODEL_TENSOR.FFN_DOWN,
  507. MODEL_TENSOR.FFN_UP,
  508. MODEL_TENSOR.FFN_GATE_EXP,
  509. MODEL_TENSOR.FFN_DOWN_EXP,
  510. MODEL_TENSOR.FFN_UP_EXP,
  511. MODEL_TENSOR.LAYER_OUT_NORM,
  512. ],
  513. MODEL_ARCH.GPTNEOX: [
  514. MODEL_TENSOR.TOKEN_EMBD,
  515. MODEL_TENSOR.OUTPUT_NORM,
  516. MODEL_TENSOR.OUTPUT,
  517. MODEL_TENSOR.ATTN_NORM,
  518. MODEL_TENSOR.ATTN_QKV,
  519. MODEL_TENSOR.ATTN_OUT,
  520. MODEL_TENSOR.FFN_NORM,
  521. MODEL_TENSOR.FFN_DOWN,
  522. MODEL_TENSOR.FFN_UP,
  523. ],
  524. MODEL_ARCH.FALCON: [
  525. MODEL_TENSOR.TOKEN_EMBD,
  526. MODEL_TENSOR.OUTPUT_NORM,
  527. MODEL_TENSOR.OUTPUT,
  528. MODEL_TENSOR.ATTN_NORM,
  529. MODEL_TENSOR.ATTN_NORM_2,
  530. MODEL_TENSOR.ATTN_QKV,
  531. MODEL_TENSOR.ATTN_OUT,
  532. MODEL_TENSOR.FFN_DOWN,
  533. MODEL_TENSOR.FFN_UP,
  534. ],
  535. MODEL_ARCH.BAICHUAN: [
  536. MODEL_TENSOR.TOKEN_EMBD,
  537. MODEL_TENSOR.OUTPUT_NORM,
  538. MODEL_TENSOR.OUTPUT,
  539. MODEL_TENSOR.ROPE_FREQS,
  540. MODEL_TENSOR.ATTN_NORM,
  541. MODEL_TENSOR.ATTN_Q,
  542. MODEL_TENSOR.ATTN_K,
  543. MODEL_TENSOR.ATTN_V,
  544. MODEL_TENSOR.ATTN_OUT,
  545. MODEL_TENSOR.ATTN_ROT_EMBD,
  546. MODEL_TENSOR.FFN_NORM,
  547. MODEL_TENSOR.FFN_GATE,
  548. MODEL_TENSOR.FFN_DOWN,
  549. MODEL_TENSOR.FFN_UP,
  550. ],
  551. MODEL_ARCH.STARCODER: [
  552. MODEL_TENSOR.TOKEN_EMBD,
  553. MODEL_TENSOR.POS_EMBD,
  554. MODEL_TENSOR.OUTPUT_NORM,
  555. MODEL_TENSOR.OUTPUT,
  556. MODEL_TENSOR.ATTN_NORM,
  557. MODEL_TENSOR.ATTN_QKV,
  558. MODEL_TENSOR.ATTN_OUT,
  559. MODEL_TENSOR.FFN_NORM,
  560. MODEL_TENSOR.FFN_DOWN,
  561. MODEL_TENSOR.FFN_UP,
  562. ],
  563. MODEL_ARCH.BERT: [
  564. MODEL_TENSOR.TOKEN_EMBD,
  565. MODEL_TENSOR.TOKEN_EMBD_NORM,
  566. MODEL_TENSOR.TOKEN_TYPES,
  567. MODEL_TENSOR.POS_EMBD,
  568. MODEL_TENSOR.OUTPUT_NORM,
  569. MODEL_TENSOR.ATTN_OUT_NORM,
  570. MODEL_TENSOR.ATTN_Q,
  571. MODEL_TENSOR.ATTN_K,
  572. MODEL_TENSOR.ATTN_V,
  573. MODEL_TENSOR.ATTN_OUT,
  574. MODEL_TENSOR.FFN_DOWN,
  575. MODEL_TENSOR.FFN_UP,
  576. MODEL_TENSOR.LAYER_OUT_NORM,
  577. ],
  578. MODEL_ARCH.NOMIC_BERT: [
  579. MODEL_TENSOR.TOKEN_EMBD,
  580. MODEL_TENSOR.TOKEN_EMBD_NORM,
  581. MODEL_TENSOR.TOKEN_TYPES,
  582. MODEL_TENSOR.POS_EMBD,
  583. MODEL_TENSOR.OUTPUT_NORM,
  584. MODEL_TENSOR.ATTN_OUT_NORM,
  585. MODEL_TENSOR.ATTN_QKV,
  586. MODEL_TENSOR.ATTN_OUT,
  587. MODEL_TENSOR.FFN_GATE,
  588. MODEL_TENSOR.FFN_DOWN,
  589. MODEL_TENSOR.FFN_UP,
  590. MODEL_TENSOR.LAYER_OUT_NORM,
  591. ],
  592. MODEL_ARCH.JINA_BERT_V2: [
  593. MODEL_TENSOR.TOKEN_EMBD,
  594. MODEL_TENSOR.TOKEN_EMBD_NORM,
  595. MODEL_TENSOR.TOKEN_TYPES,
  596. MODEL_TENSOR.ATTN_NORM_2,
  597. MODEL_TENSOR.ATTN_OUT_NORM,
  598. MODEL_TENSOR.ATTN_Q,
  599. MODEL_TENSOR.ATTN_Q_NORM,
  600. MODEL_TENSOR.ATTN_K,
  601. MODEL_TENSOR.ATTN_K_NORM,
  602. MODEL_TENSOR.ATTN_V,
  603. MODEL_TENSOR.ATTN_OUT,
  604. MODEL_TENSOR.FFN_UP,
  605. MODEL_TENSOR.FFN_GATE,
  606. MODEL_TENSOR.FFN_DOWN,
  607. MODEL_TENSOR.LAYER_OUT_NORM,
  608. ],
  609. MODEL_ARCH.MPT: [
  610. MODEL_TENSOR.TOKEN_EMBD,
  611. MODEL_TENSOR.OUTPUT_NORM,
  612. MODEL_TENSOR.OUTPUT,
  613. MODEL_TENSOR.ATTN_NORM,
  614. MODEL_TENSOR.ATTN_QKV,
  615. MODEL_TENSOR.ATTN_OUT,
  616. MODEL_TENSOR.FFN_NORM,
  617. MODEL_TENSOR.FFN_DOWN,
  618. MODEL_TENSOR.FFN_UP,
  619. MODEL_TENSOR.FFN_ACT,
  620. MODEL_TENSOR.ATTN_Q_NORM,
  621. MODEL_TENSOR.ATTN_K_NORM,
  622. MODEL_TENSOR.POS_EMBD,
  623. ],
  624. MODEL_ARCH.GPTJ: [
  625. MODEL_TENSOR.TOKEN_EMBD,
  626. MODEL_TENSOR.OUTPUT_NORM,
  627. MODEL_TENSOR.OUTPUT,
  628. MODEL_TENSOR.ATTN_NORM,
  629. MODEL_TENSOR.ATTN_Q,
  630. MODEL_TENSOR.ATTN_K,
  631. MODEL_TENSOR.ATTN_V,
  632. MODEL_TENSOR.ATTN_OUT,
  633. MODEL_TENSOR.FFN_DOWN,
  634. MODEL_TENSOR.FFN_UP,
  635. ],
  636. MODEL_ARCH.REFACT: [
  637. MODEL_TENSOR.TOKEN_EMBD,
  638. MODEL_TENSOR.OUTPUT_NORM,
  639. MODEL_TENSOR.OUTPUT,
  640. MODEL_TENSOR.ATTN_NORM,
  641. MODEL_TENSOR.ATTN_Q,
  642. MODEL_TENSOR.ATTN_K,
  643. MODEL_TENSOR.ATTN_V,
  644. MODEL_TENSOR.ATTN_OUT,
  645. MODEL_TENSOR.FFN_NORM,
  646. MODEL_TENSOR.FFN_GATE,
  647. MODEL_TENSOR.FFN_DOWN,
  648. MODEL_TENSOR.FFN_UP,
  649. ],
  650. MODEL_ARCH.BLOOM: [
  651. MODEL_TENSOR.TOKEN_EMBD,
  652. MODEL_TENSOR.TOKEN_EMBD_NORM,
  653. MODEL_TENSOR.OUTPUT_NORM,
  654. MODEL_TENSOR.OUTPUT,
  655. MODEL_TENSOR.ATTN_NORM,
  656. MODEL_TENSOR.ATTN_QKV,
  657. MODEL_TENSOR.ATTN_OUT,
  658. MODEL_TENSOR.FFN_NORM,
  659. MODEL_TENSOR.FFN_DOWN,
  660. MODEL_TENSOR.FFN_UP,
  661. ],
  662. MODEL_ARCH.STABLELM: [
  663. MODEL_TENSOR.TOKEN_EMBD,
  664. MODEL_TENSOR.OUTPUT_NORM,
  665. MODEL_TENSOR.OUTPUT,
  666. MODEL_TENSOR.ROPE_FREQS,
  667. MODEL_TENSOR.ATTN_NORM,
  668. MODEL_TENSOR.ATTN_Q,
  669. MODEL_TENSOR.ATTN_K,
  670. MODEL_TENSOR.ATTN_V,
  671. MODEL_TENSOR.ATTN_OUT,
  672. MODEL_TENSOR.FFN_NORM,
  673. MODEL_TENSOR.FFN_GATE,
  674. MODEL_TENSOR.FFN_DOWN,
  675. MODEL_TENSOR.FFN_UP,
  676. MODEL_TENSOR.ATTN_Q_NORM,
  677. MODEL_TENSOR.ATTN_K_NORM,
  678. ],
  679. MODEL_ARCH.QWEN: [
  680. MODEL_TENSOR.TOKEN_EMBD,
  681. MODEL_TENSOR.OUTPUT_NORM,
  682. MODEL_TENSOR.OUTPUT,
  683. MODEL_TENSOR.ROPE_FREQS,
  684. MODEL_TENSOR.ATTN_NORM,
  685. MODEL_TENSOR.ATTN_QKV,
  686. MODEL_TENSOR.ATTN_OUT,
  687. MODEL_TENSOR.ATTN_ROT_EMBD,
  688. MODEL_TENSOR.FFN_NORM,
  689. MODEL_TENSOR.FFN_GATE,
  690. MODEL_TENSOR.FFN_DOWN,
  691. MODEL_TENSOR.FFN_UP,
  692. ],
  693. MODEL_ARCH.QWEN2: [
  694. MODEL_TENSOR.TOKEN_EMBD,
  695. MODEL_TENSOR.OUTPUT_NORM,
  696. MODEL_TENSOR.OUTPUT,
  697. MODEL_TENSOR.ATTN_NORM,
  698. MODEL_TENSOR.ATTN_Q,
  699. MODEL_TENSOR.ATTN_K,
  700. MODEL_TENSOR.ATTN_V,
  701. MODEL_TENSOR.ATTN_OUT,
  702. MODEL_TENSOR.FFN_NORM,
  703. MODEL_TENSOR.FFN_GATE,
  704. MODEL_TENSOR.FFN_DOWN,
  705. MODEL_TENSOR.FFN_UP,
  706. ],
  707. MODEL_ARCH.QWEN2MOE: [
  708. MODEL_TENSOR.TOKEN_EMBD,
  709. MODEL_TENSOR.OUTPUT_NORM,
  710. MODEL_TENSOR.OUTPUT,
  711. MODEL_TENSOR.ATTN_NORM,
  712. MODEL_TENSOR.ATTN_Q,
  713. MODEL_TENSOR.ATTN_K,
  714. MODEL_TENSOR.ATTN_V,
  715. MODEL_TENSOR.ATTN_OUT,
  716. MODEL_TENSOR.FFN_NORM,
  717. MODEL_TENSOR.FFN_GATE_INP,
  718. MODEL_TENSOR.FFN_GATE_EXP,
  719. MODEL_TENSOR.FFN_DOWN_EXP,
  720. MODEL_TENSOR.FFN_UP_EXP,
  721. MODEL_TENSOR.FFN_GATE_INP_SHEXP,
  722. MODEL_TENSOR.FFN_GATE_SHEXP,
  723. MODEL_TENSOR.FFN_DOWN_SHEXP,
  724. MODEL_TENSOR.FFN_UP_SHEXP,
  725. ],
  726. MODEL_ARCH.PLAMO: [
  727. MODEL_TENSOR.TOKEN_EMBD,
  728. MODEL_TENSOR.OUTPUT_NORM,
  729. MODEL_TENSOR.OUTPUT,
  730. MODEL_TENSOR.ROPE_FREQS,
  731. MODEL_TENSOR.ATTN_NORM,
  732. MODEL_TENSOR.ATTN_Q,
  733. MODEL_TENSOR.ATTN_K,
  734. MODEL_TENSOR.ATTN_V,
  735. MODEL_TENSOR.ATTN_OUT,
  736. MODEL_TENSOR.ATTN_ROT_EMBD,
  737. MODEL_TENSOR.FFN_GATE,
  738. MODEL_TENSOR.FFN_DOWN,
  739. MODEL_TENSOR.FFN_UP,
  740. ],
  741. MODEL_ARCH.GPT2: [
  742. MODEL_TENSOR.TOKEN_EMBD,
  743. MODEL_TENSOR.POS_EMBD,
  744. MODEL_TENSOR.OUTPUT_NORM,
  745. MODEL_TENSOR.OUTPUT,
  746. MODEL_TENSOR.ATTN_NORM,
  747. MODEL_TENSOR.ATTN_QKV,
  748. MODEL_TENSOR.ATTN_OUT,
  749. MODEL_TENSOR.FFN_NORM,
  750. MODEL_TENSOR.FFN_DOWN,
  751. MODEL_TENSOR.FFN_UP,
  752. ],
  753. MODEL_ARCH.PHI2: [
  754. MODEL_TENSOR.TOKEN_EMBD,
  755. MODEL_TENSOR.OUTPUT_NORM,
  756. MODEL_TENSOR.OUTPUT,
  757. MODEL_TENSOR.ATTN_NORM,
  758. MODEL_TENSOR.ATTN_QKV,
  759. MODEL_TENSOR.ATTN_Q,
  760. MODEL_TENSOR.ATTN_K,
  761. MODEL_TENSOR.ATTN_V,
  762. MODEL_TENSOR.ATTN_OUT,
  763. MODEL_TENSOR.FFN_NORM,
  764. MODEL_TENSOR.FFN_DOWN,
  765. MODEL_TENSOR.FFN_UP,
  766. ],
  767. MODEL_ARCH.PHI3: [
  768. MODEL_TENSOR.TOKEN_EMBD,
  769. MODEL_TENSOR.OUTPUT_NORM,
  770. MODEL_TENSOR.OUTPUT,
  771. MODEL_TENSOR.ATTN_NORM,
  772. MODEL_TENSOR.ATTN_QKV,
  773. MODEL_TENSOR.ATTN_Q,
  774. MODEL_TENSOR.ATTN_K,
  775. MODEL_TENSOR.ATTN_V,
  776. MODEL_TENSOR.ATTN_OUT,
  777. MODEL_TENSOR.FFN_NORM,
  778. MODEL_TENSOR.FFN_DOWN,
  779. MODEL_TENSOR.FFN_UP,
  780. ],
  781. MODEL_ARCH.CODESHELL: [
  782. MODEL_TENSOR.TOKEN_EMBD,
  783. MODEL_TENSOR.POS_EMBD,
  784. MODEL_TENSOR.OUTPUT_NORM,
  785. MODEL_TENSOR.OUTPUT,
  786. MODEL_TENSOR.ATTN_NORM,
  787. MODEL_TENSOR.ATTN_QKV,
  788. MODEL_TENSOR.ATTN_OUT,
  789. MODEL_TENSOR.ATTN_ROT_EMBD,
  790. MODEL_TENSOR.FFN_NORM,
  791. MODEL_TENSOR.FFN_DOWN,
  792. MODEL_TENSOR.FFN_UP,
  793. ],
  794. MODEL_ARCH.ORION: [
  795. MODEL_TENSOR.TOKEN_EMBD,
  796. MODEL_TENSOR.OUTPUT_NORM,
  797. MODEL_TENSOR.OUTPUT,
  798. MODEL_TENSOR.ROPE_FREQS,
  799. MODEL_TENSOR.ATTN_NORM,
  800. MODEL_TENSOR.ATTN_Q,
  801. MODEL_TENSOR.ATTN_K,
  802. MODEL_TENSOR.ATTN_V,
  803. MODEL_TENSOR.ATTN_OUT,
  804. MODEL_TENSOR.ATTN_ROT_EMBD,
  805. MODEL_TENSOR.FFN_NORM,
  806. MODEL_TENSOR.FFN_GATE,
  807. MODEL_TENSOR.FFN_DOWN,
  808. MODEL_TENSOR.FFN_UP,
  809. ],
  810. MODEL_ARCH.INTERNLM2: [
  811. MODEL_TENSOR.TOKEN_EMBD,
  812. MODEL_TENSOR.OUTPUT_NORM,
  813. MODEL_TENSOR.OUTPUT,
  814. MODEL_TENSOR.ATTN_NORM,
  815. MODEL_TENSOR.ATTN_Q,
  816. MODEL_TENSOR.ATTN_K,
  817. MODEL_TENSOR.ATTN_V,
  818. MODEL_TENSOR.ATTN_OUT,
  819. MODEL_TENSOR.ATTN_ROT_EMBD,
  820. MODEL_TENSOR.FFN_NORM,
  821. MODEL_TENSOR.FFN_GATE,
  822. MODEL_TENSOR.FFN_DOWN,
  823. MODEL_TENSOR.FFN_UP,
  824. ],
  825. MODEL_ARCH.MINICPM: [
  826. MODEL_TENSOR.TOKEN_EMBD,
  827. MODEL_TENSOR.OUTPUT,
  828. MODEL_TENSOR.OUTPUT_NORM,
  829. MODEL_TENSOR.ROPE_FREQS,
  830. MODEL_TENSOR.ATTN_NORM,
  831. MODEL_TENSOR.ATTN_Q,
  832. MODEL_TENSOR.ATTN_K,
  833. MODEL_TENSOR.ATTN_V,
  834. MODEL_TENSOR.ATTN_OUT,
  835. MODEL_TENSOR.ATTN_ROT_EMBD,
  836. MODEL_TENSOR.FFN_GATE_INP,
  837. MODEL_TENSOR.FFN_NORM,
  838. MODEL_TENSOR.FFN_GATE,
  839. MODEL_TENSOR.FFN_DOWN,
  840. MODEL_TENSOR.FFN_UP,
  841. MODEL_TENSOR.FFN_GATE_EXP,
  842. MODEL_TENSOR.FFN_DOWN_EXP,
  843. MODEL_TENSOR.FFN_UP_EXP,
  844. ],
  845. MODEL_ARCH.MINICPM3: [
  846. MODEL_TENSOR.TOKEN_EMBD,
  847. MODEL_TENSOR.OUTPUT_NORM,
  848. MODEL_TENSOR.OUTPUT,
  849. MODEL_TENSOR.ATTN_NORM,
  850. MODEL_TENSOR.ATTN_Q_A,
  851. MODEL_TENSOR.ATTN_Q_B,
  852. MODEL_TENSOR.ATTN_KV_A_MQA,
  853. MODEL_TENSOR.ATTN_KV_B,
  854. MODEL_TENSOR.ATTN_Q_A_NORM,
  855. MODEL_TENSOR.ATTN_KV_A_NORM,
  856. MODEL_TENSOR.ATTN_OUT,
  857. MODEL_TENSOR.FFN_NORM,
  858. MODEL_TENSOR.FFN_GATE,
  859. MODEL_TENSOR.FFN_DOWN,
  860. MODEL_TENSOR.FFN_UP,
  861. ],
  862. MODEL_ARCH.GEMMA: [
  863. MODEL_TENSOR.TOKEN_EMBD,
  864. MODEL_TENSOR.OUTPUT_NORM,
  865. MODEL_TENSOR.ATTN_NORM,
  866. MODEL_TENSOR.ATTN_Q,
  867. MODEL_TENSOR.ATTN_K,
  868. MODEL_TENSOR.ATTN_V,
  869. MODEL_TENSOR.ATTN_OUT,
  870. MODEL_TENSOR.FFN_GATE,
  871. MODEL_TENSOR.FFN_DOWN,
  872. MODEL_TENSOR.FFN_UP,
  873. MODEL_TENSOR.FFN_NORM,
  874. ],
  875. MODEL_ARCH.GEMMA2: [
  876. MODEL_TENSOR.TOKEN_EMBD,
  877. MODEL_TENSOR.OUTPUT_NORM,
  878. MODEL_TENSOR.ATTN_Q,
  879. MODEL_TENSOR.ATTN_K,
  880. MODEL_TENSOR.ATTN_V,
  881. MODEL_TENSOR.ATTN_OUT,
  882. MODEL_TENSOR.FFN_GATE,
  883. MODEL_TENSOR.FFN_DOWN,
  884. MODEL_TENSOR.FFN_UP,
  885. MODEL_TENSOR.ATTN_NORM,
  886. MODEL_TENSOR.ATTN_POST_NORM,
  887. MODEL_TENSOR.FFN_PRE_NORM,
  888. MODEL_TENSOR.FFN_POST_NORM,
  889. ],
  890. MODEL_ARCH.STARCODER2: [
  891. MODEL_TENSOR.TOKEN_EMBD,
  892. MODEL_TENSOR.OUTPUT_NORM,
  893. MODEL_TENSOR.OUTPUT,
  894. MODEL_TENSOR.ROPE_FREQS,
  895. MODEL_TENSOR.ATTN_NORM,
  896. MODEL_TENSOR.ATTN_Q,
  897. MODEL_TENSOR.ATTN_K,
  898. MODEL_TENSOR.ATTN_V,
  899. MODEL_TENSOR.ATTN_OUT,
  900. MODEL_TENSOR.ATTN_ROT_EMBD,
  901. MODEL_TENSOR.FFN_NORM,
  902. MODEL_TENSOR.FFN_DOWN,
  903. MODEL_TENSOR.FFN_UP,
  904. ],
  905. MODEL_ARCH.RWKV6: [
  906. MODEL_TENSOR.TOKEN_EMBD,
  907. MODEL_TENSOR.TOKEN_EMBD_NORM,
  908. MODEL_TENSOR.OUTPUT_NORM,
  909. MODEL_TENSOR.OUTPUT,
  910. MODEL_TENSOR.ATTN_NORM,
  911. MODEL_TENSOR.ATTN_NORM_2,
  912. MODEL_TENSOR.TIME_MIX_W1,
  913. MODEL_TENSOR.TIME_MIX_W2,
  914. MODEL_TENSOR.TIME_MIX_LERP_X,
  915. MODEL_TENSOR.TIME_MIX_LERP_K,
  916. MODEL_TENSOR.TIME_MIX_LERP_V,
  917. MODEL_TENSOR.TIME_MIX_LERP_R,
  918. MODEL_TENSOR.TIME_MIX_LERP_G,
  919. MODEL_TENSOR.TIME_MIX_LERP_W,
  920. MODEL_TENSOR.TIME_MIX_FIRST,
  921. MODEL_TENSOR.TIME_MIX_DECAY,
  922. MODEL_TENSOR.TIME_MIX_DECAY_W1,
  923. MODEL_TENSOR.TIME_MIX_DECAY_W2,
  924. MODEL_TENSOR.TIME_MIX_KEY,
  925. MODEL_TENSOR.TIME_MIX_VALUE,
  926. MODEL_TENSOR.TIME_MIX_RECEPTANCE,
  927. MODEL_TENSOR.TIME_MIX_GATE,
  928. MODEL_TENSOR.TIME_MIX_LN,
  929. MODEL_TENSOR.TIME_MIX_OUTPUT,
  930. MODEL_TENSOR.CHANNEL_MIX_LERP_K,
  931. MODEL_TENSOR.CHANNEL_MIX_LERP_R,
  932. MODEL_TENSOR.CHANNEL_MIX_KEY,
  933. MODEL_TENSOR.CHANNEL_MIX_RECEPTANCE,
  934. MODEL_TENSOR.CHANNEL_MIX_VALUE,
  935. ],
  936. MODEL_ARCH.MAMBA: [
  937. MODEL_TENSOR.TOKEN_EMBD,
  938. MODEL_TENSOR.OUTPUT_NORM,
  939. MODEL_TENSOR.OUTPUT,
  940. MODEL_TENSOR.ATTN_NORM,
  941. MODEL_TENSOR.SSM_IN,
  942. MODEL_TENSOR.SSM_CONV1D,
  943. MODEL_TENSOR.SSM_X,
  944. MODEL_TENSOR.SSM_DT,
  945. MODEL_TENSOR.SSM_A,
  946. MODEL_TENSOR.SSM_D,
  947. MODEL_TENSOR.SSM_OUT,
  948. ],
  949. MODEL_ARCH.XVERSE: [
  950. MODEL_TENSOR.TOKEN_EMBD,
  951. MODEL_TENSOR.OUTPUT_NORM,
  952. MODEL_TENSOR.OUTPUT,
  953. MODEL_TENSOR.ROPE_FREQS,
  954. MODEL_TENSOR.ATTN_NORM,
  955. MODEL_TENSOR.ATTN_Q,
  956. MODEL_TENSOR.ATTN_K,
  957. MODEL_TENSOR.ATTN_V,
  958. MODEL_TENSOR.ATTN_OUT,
  959. MODEL_TENSOR.ATTN_ROT_EMBD,
  960. MODEL_TENSOR.FFN_NORM,
  961. MODEL_TENSOR.FFN_GATE,
  962. MODEL_TENSOR.FFN_DOWN,
  963. MODEL_TENSOR.FFN_UP,
  964. ],
  965. MODEL_ARCH.COMMAND_R: [
  966. MODEL_TENSOR.TOKEN_EMBD,
  967. MODEL_TENSOR.OUTPUT_NORM,
  968. MODEL_TENSOR.ATTN_NORM,
  969. MODEL_TENSOR.ATTN_Q,
  970. MODEL_TENSOR.ATTN_K,
  971. MODEL_TENSOR.ATTN_V,
  972. MODEL_TENSOR.ATTN_OUT,
  973. MODEL_TENSOR.FFN_GATE,
  974. MODEL_TENSOR.FFN_DOWN,
  975. MODEL_TENSOR.FFN_UP,
  976. MODEL_TENSOR.ATTN_K_NORM,
  977. MODEL_TENSOR.ATTN_Q_NORM,
  978. ],
  979. MODEL_ARCH.DBRX: [
  980. MODEL_TENSOR.TOKEN_EMBD,
  981. MODEL_TENSOR.OUTPUT_NORM,
  982. MODEL_TENSOR.OUTPUT,
  983. MODEL_TENSOR.ATTN_NORM,
  984. MODEL_TENSOR.ATTN_QKV,
  985. MODEL_TENSOR.ATTN_OUT,
  986. MODEL_TENSOR.ATTN_OUT_NORM,
  987. MODEL_TENSOR.FFN_GATE_INP,
  988. MODEL_TENSOR.FFN_GATE_EXP,
  989. MODEL_TENSOR.FFN_DOWN_EXP,
  990. MODEL_TENSOR.FFN_UP_EXP,
  991. ],
  992. MODEL_ARCH.OLMO: [
  993. MODEL_TENSOR.TOKEN_EMBD,
  994. MODEL_TENSOR.OUTPUT,
  995. MODEL_TENSOR.ATTN_Q,
  996. MODEL_TENSOR.ATTN_K,
  997. MODEL_TENSOR.ATTN_V,
  998. MODEL_TENSOR.ATTN_OUT,
  999. MODEL_TENSOR.FFN_GATE,
  1000. MODEL_TENSOR.FFN_DOWN,
  1001. MODEL_TENSOR.FFN_UP,
  1002. ],
  1003. MODEL_ARCH.OLMOE: [
  1004. MODEL_TENSOR.TOKEN_EMBD,
  1005. MODEL_TENSOR.OUTPUT_NORM,
  1006. MODEL_TENSOR.OUTPUT,
  1007. MODEL_TENSOR.ATTN_OUT,
  1008. MODEL_TENSOR.ATTN_Q,
  1009. MODEL_TENSOR.ATTN_K,
  1010. MODEL_TENSOR.ATTN_V,
  1011. MODEL_TENSOR.ATTN_NORM,
  1012. MODEL_TENSOR.ATTN_Q_NORM,
  1013. MODEL_TENSOR.ATTN_K_NORM,
  1014. MODEL_TENSOR.FFN_NORM,
  1015. MODEL_TENSOR.FFN_GATE_INP,
  1016. MODEL_TENSOR.FFN_GATE_EXP,
  1017. MODEL_TENSOR.FFN_UP_EXP,
  1018. MODEL_TENSOR.FFN_DOWN_EXP,
  1019. ],
  1020. MODEL_ARCH.OPENELM: [
  1021. MODEL_TENSOR.TOKEN_EMBD,
  1022. MODEL_TENSOR.OUTPUT_NORM,
  1023. MODEL_TENSOR.ATTN_NORM,
  1024. MODEL_TENSOR.ATTN_QKV,
  1025. MODEL_TENSOR.ATTN_Q_NORM,
  1026. MODEL_TENSOR.ATTN_K_NORM,
  1027. MODEL_TENSOR.ATTN_OUT,
  1028. MODEL_TENSOR.FFN_NORM,
  1029. MODEL_TENSOR.FFN_GATE,
  1030. MODEL_TENSOR.FFN_DOWN,
  1031. MODEL_TENSOR.FFN_UP,
  1032. ],
  1033. MODEL_ARCH.ARCTIC: [
  1034. MODEL_TENSOR.TOKEN_EMBD,
  1035. MODEL_TENSOR.OUTPUT_NORM,
  1036. MODEL_TENSOR.OUTPUT,
  1037. MODEL_TENSOR.ROPE_FREQS,
  1038. MODEL_TENSOR.ATTN_NORM,
  1039. MODEL_TENSOR.ATTN_Q,
  1040. MODEL_TENSOR.ATTN_K,
  1041. MODEL_TENSOR.ATTN_V,
  1042. MODEL_TENSOR.ATTN_OUT,
  1043. MODEL_TENSOR.ATTN_ROT_EMBD,
  1044. MODEL_TENSOR.FFN_GATE_INP,
  1045. MODEL_TENSOR.FFN_NORM,
  1046. MODEL_TENSOR.FFN_GATE,
  1047. MODEL_TENSOR.FFN_DOWN,
  1048. MODEL_TENSOR.FFN_UP,
  1049. MODEL_TENSOR.FFN_NORM_EXP,
  1050. MODEL_TENSOR.FFN_GATE_EXP,
  1051. MODEL_TENSOR.FFN_DOWN_EXP,
  1052. MODEL_TENSOR.FFN_UP_EXP,
  1053. ],
  1054. MODEL_ARCH.DEEPSEEK2: [
  1055. MODEL_TENSOR.TOKEN_EMBD,
  1056. MODEL_TENSOR.OUTPUT_NORM,
  1057. MODEL_TENSOR.OUTPUT,
  1058. MODEL_TENSOR.ROPE_FREQS,
  1059. MODEL_TENSOR.ATTN_NORM,
  1060. MODEL_TENSOR.ATTN_Q,
  1061. MODEL_TENSOR.ATTN_Q_A,
  1062. MODEL_TENSOR.ATTN_Q_B,
  1063. MODEL_TENSOR.ATTN_KV_A_MQA,
  1064. MODEL_TENSOR.ATTN_KV_B,
  1065. MODEL_TENSOR.ATTN_Q_A_NORM,
  1066. MODEL_TENSOR.ATTN_KV_A_NORM,
  1067. MODEL_TENSOR.ATTN_OUT,
  1068. MODEL_TENSOR.ATTN_ROT_EMBD,
  1069. MODEL_TENSOR.FFN_GATE_INP,
  1070. MODEL_TENSOR.FFN_NORM,
  1071. MODEL_TENSOR.FFN_GATE,
  1072. MODEL_TENSOR.FFN_DOWN,
  1073. MODEL_TENSOR.FFN_UP,
  1074. MODEL_TENSOR.FFN_GATE_EXP,
  1075. MODEL_TENSOR.FFN_DOWN_EXP,
  1076. MODEL_TENSOR.FFN_UP_EXP,
  1077. MODEL_TENSOR.FFN_GATE_SHEXP,
  1078. MODEL_TENSOR.FFN_DOWN_SHEXP,
  1079. MODEL_TENSOR.FFN_UP_SHEXP,
  1080. ],
  1081. MODEL_ARCH.CHATGLM : [
  1082. MODEL_TENSOR.TOKEN_EMBD,
  1083. MODEL_TENSOR.ROPE_FREQS,
  1084. MODEL_TENSOR.OUTPUT_NORM,
  1085. MODEL_TENSOR.OUTPUT,
  1086. MODEL_TENSOR.ATTN_NORM,
  1087. MODEL_TENSOR.ATTN_QKV,
  1088. MODEL_TENSOR.ATTN_OUT,
  1089. MODEL_TENSOR.FFN_NORM,
  1090. MODEL_TENSOR.FFN_DOWN,
  1091. MODEL_TENSOR.FFN_UP,
  1092. ],
  1093. MODEL_ARCH.BITNET: [
  1094. MODEL_TENSOR.ATTN_Q,
  1095. MODEL_TENSOR.ATTN_K,
  1096. MODEL_TENSOR.ATTN_V,
  1097. MODEL_TENSOR.TOKEN_EMBD,
  1098. MODEL_TENSOR.OUTPUT_NORM,
  1099. MODEL_TENSOR.ATTN_NORM,
  1100. MODEL_TENSOR.ATTN_OUT,
  1101. MODEL_TENSOR.FFN_NORM,
  1102. MODEL_TENSOR.FFN_GATE,
  1103. MODEL_TENSOR.FFN_DOWN,
  1104. MODEL_TENSOR.FFN_UP,
  1105. MODEL_TENSOR.ATTN_SUB_NORM,
  1106. MODEL_TENSOR.FFN_SUB_NORM,
  1107. ],
  1108. MODEL_ARCH.T5: [
  1109. MODEL_TENSOR.TOKEN_EMBD,
  1110. MODEL_TENSOR.OUTPUT,
  1111. MODEL_TENSOR.DEC_ATTN_NORM,
  1112. MODEL_TENSOR.DEC_ATTN_Q,
  1113. MODEL_TENSOR.DEC_ATTN_K,
  1114. MODEL_TENSOR.DEC_ATTN_V,
  1115. MODEL_TENSOR.DEC_ATTN_OUT,
  1116. MODEL_TENSOR.DEC_ATTN_REL_B,
  1117. MODEL_TENSOR.DEC_CROSS_ATTN_NORM,
  1118. MODEL_TENSOR.DEC_CROSS_ATTN_Q,
  1119. MODEL_TENSOR.DEC_CROSS_ATTN_K,
  1120. MODEL_TENSOR.DEC_CROSS_ATTN_V,
  1121. MODEL_TENSOR.DEC_CROSS_ATTN_OUT,
  1122. MODEL_TENSOR.DEC_CROSS_ATTN_REL_B,
  1123. MODEL_TENSOR.DEC_FFN_NORM,
  1124. MODEL_TENSOR.DEC_FFN_GATE,
  1125. MODEL_TENSOR.DEC_FFN_DOWN,
  1126. MODEL_TENSOR.DEC_FFN_UP,
  1127. MODEL_TENSOR.DEC_OUTPUT_NORM,
  1128. MODEL_TENSOR.ENC_ATTN_NORM,
  1129. MODEL_TENSOR.ENC_ATTN_Q,
  1130. MODEL_TENSOR.ENC_ATTN_K,
  1131. MODEL_TENSOR.ENC_ATTN_V,
  1132. MODEL_TENSOR.ENC_ATTN_OUT,
  1133. MODEL_TENSOR.ENC_ATTN_REL_B,
  1134. MODEL_TENSOR.ENC_FFN_NORM,
  1135. MODEL_TENSOR.ENC_FFN_GATE,
  1136. MODEL_TENSOR.ENC_FFN_DOWN,
  1137. MODEL_TENSOR.ENC_FFN_UP,
  1138. MODEL_TENSOR.ENC_OUTPUT_NORM,
  1139. ],
  1140. MODEL_ARCH.T5ENCODER: [
  1141. MODEL_TENSOR.TOKEN_EMBD,
  1142. MODEL_TENSOR.OUTPUT,
  1143. MODEL_TENSOR.ENC_ATTN_NORM,
  1144. MODEL_TENSOR.ENC_ATTN_Q,
  1145. MODEL_TENSOR.ENC_ATTN_K,
  1146. MODEL_TENSOR.ENC_ATTN_V,
  1147. MODEL_TENSOR.ENC_ATTN_OUT,
  1148. MODEL_TENSOR.ENC_ATTN_REL_B,
  1149. MODEL_TENSOR.ENC_FFN_NORM,
  1150. MODEL_TENSOR.ENC_FFN_GATE,
  1151. MODEL_TENSOR.ENC_FFN_DOWN,
  1152. MODEL_TENSOR.ENC_FFN_UP,
  1153. MODEL_TENSOR.ENC_OUTPUT_NORM,
  1154. ],
  1155. MODEL_ARCH.JAIS: [
  1156. MODEL_TENSOR.TOKEN_EMBD,
  1157. MODEL_TENSOR.OUTPUT_NORM,
  1158. MODEL_TENSOR.OUTPUT,
  1159. MODEL_TENSOR.ATTN_NORM,
  1160. MODEL_TENSOR.ATTN_QKV,
  1161. MODEL_TENSOR.ATTN_OUT,
  1162. MODEL_TENSOR.FFN_NORM,
  1163. MODEL_TENSOR.FFN_DOWN,
  1164. MODEL_TENSOR.FFN_GATE,
  1165. MODEL_TENSOR.FFN_UP,
  1166. ],
  1167. MODEL_ARCH.NEMOTRON: [
  1168. MODEL_TENSOR.TOKEN_EMBD,
  1169. MODEL_TENSOR.OUTPUT_NORM,
  1170. MODEL_TENSOR.OUTPUT,
  1171. MODEL_TENSOR.ROPE_FREQS,
  1172. MODEL_TENSOR.ATTN_NORM,
  1173. MODEL_TENSOR.ATTN_Q,
  1174. MODEL_TENSOR.ATTN_K,
  1175. MODEL_TENSOR.ATTN_V,
  1176. MODEL_TENSOR.ATTN_OUT,
  1177. MODEL_TENSOR.ATTN_ROT_EMBD,
  1178. MODEL_TENSOR.FFN_NORM,
  1179. MODEL_TENSOR.FFN_DOWN,
  1180. MODEL_TENSOR.FFN_UP,
  1181. ],
  1182. MODEL_ARCH.EXAONE: [
  1183. MODEL_TENSOR.TOKEN_EMBD,
  1184. MODEL_TENSOR.OUTPUT_NORM,
  1185. MODEL_TENSOR.OUTPUT,
  1186. MODEL_TENSOR.ROPE_FREQS,
  1187. MODEL_TENSOR.ATTN_NORM,
  1188. MODEL_TENSOR.ATTN_Q,
  1189. MODEL_TENSOR.ATTN_K,
  1190. MODEL_TENSOR.ATTN_V,
  1191. MODEL_TENSOR.ATTN_OUT,
  1192. MODEL_TENSOR.ATTN_ROT_EMBD,
  1193. MODEL_TENSOR.FFN_NORM,
  1194. MODEL_TENSOR.FFN_GATE,
  1195. MODEL_TENSOR.FFN_DOWN,
  1196. MODEL_TENSOR.FFN_UP,
  1197. ],
  1198. MODEL_ARCH.GRANITE: [
  1199. MODEL_TENSOR.TOKEN_EMBD,
  1200. MODEL_TENSOR.OUTPUT_NORM,
  1201. MODEL_TENSOR.ATTN_NORM,
  1202. MODEL_TENSOR.ATTN_Q,
  1203. MODEL_TENSOR.ATTN_K,
  1204. MODEL_TENSOR.ATTN_V,
  1205. MODEL_TENSOR.ATTN_OUT,
  1206. MODEL_TENSOR.FFN_NORM,
  1207. MODEL_TENSOR.FFN_GATE,
  1208. MODEL_TENSOR.FFN_DOWN,
  1209. MODEL_TENSOR.FFN_UP,
  1210. ],
  1211. # TODO
  1212. }
  1213. # tensors that will not be serialized
  1214. MODEL_TENSOR_SKIP: dict[MODEL_ARCH, list[MODEL_TENSOR]] = {
  1215. MODEL_ARCH.LLAMA: [
  1216. MODEL_TENSOR.ROPE_FREQS,
  1217. MODEL_TENSOR.ATTN_ROT_EMBD,
  1218. ],
  1219. MODEL_ARCH.BAICHUAN: [
  1220. MODEL_TENSOR.ROPE_FREQS,
  1221. MODEL_TENSOR.ATTN_ROT_EMBD,
  1222. ],
  1223. MODEL_ARCH.QWEN: [
  1224. MODEL_TENSOR.ROPE_FREQS,
  1225. MODEL_TENSOR.ATTN_ROT_EMBD,
  1226. ],
  1227. MODEL_ARCH.CODESHELL: [
  1228. MODEL_TENSOR.ROPE_FREQS,
  1229. MODEL_TENSOR.ATTN_ROT_EMBD,
  1230. ],
  1231. MODEL_ARCH.ORION: [
  1232. MODEL_TENSOR.ROPE_FREQS,
  1233. MODEL_TENSOR.ATTN_ROT_EMBD,
  1234. ],
  1235. MODEL_ARCH.STARCODER2: [
  1236. MODEL_TENSOR.ROPE_FREQS,
  1237. MODEL_TENSOR.ATTN_ROT_EMBD,
  1238. ],
  1239. MODEL_ARCH.XVERSE: [
  1240. MODEL_TENSOR.ROPE_FREQS,
  1241. MODEL_TENSOR.ATTN_ROT_EMBD,
  1242. ],
  1243. MODEL_ARCH.DEEPSEEK2: [
  1244. MODEL_TENSOR.ROPE_FREQS,
  1245. MODEL_TENSOR.ATTN_ROT_EMBD,
  1246. ],
  1247. MODEL_ARCH.CHATGLM: [
  1248. MODEL_TENSOR.ROPE_FREQS,
  1249. ],
  1250. MODEL_ARCH.NEMOTRON: [
  1251. MODEL_TENSOR.ROPE_FREQS,
  1252. MODEL_TENSOR.ATTN_ROT_EMBD,
  1253. ],
  1254. }
  1255. #
  1256. # types
  1257. #
  1258. class TokenType(IntEnum):
  1259. NORMAL = 1
  1260. UNKNOWN = 2
  1261. CONTROL = 3
  1262. USER_DEFINED = 4
  1263. UNUSED = 5
  1264. BYTE = 6
  1265. class RopeScalingType(Enum):
  1266. NONE = 'none'
  1267. LINEAR = 'linear'
  1268. YARN = 'yarn'
  1269. class PoolingType(IntEnum):
  1270. NONE = 0
  1271. MEAN = 1
  1272. CLS = 2
  1273. class GGMLQuantizationType(IntEnum):
  1274. F32 = 0
  1275. F16 = 1
  1276. Q4_0 = 2
  1277. Q4_1 = 3
  1278. Q5_0 = 6
  1279. Q5_1 = 7
  1280. Q8_0 = 8
  1281. Q8_1 = 9
  1282. Q2_K = 10
  1283. Q3_K = 11
  1284. Q4_K = 12
  1285. Q5_K = 13
  1286. Q6_K = 14
  1287. Q8_K = 15
  1288. IQ2_XXS = 16
  1289. IQ2_XS = 17
  1290. IQ3_XXS = 18
  1291. IQ1_S = 19
  1292. IQ4_NL = 20
  1293. IQ3_S = 21
  1294. IQ2_S = 22
  1295. IQ4_XS = 23
  1296. I8 = 24
  1297. I16 = 25
  1298. I32 = 26
  1299. I64 = 27
  1300. F64 = 28
  1301. IQ1_M = 29
  1302. BF16 = 30
  1303. Q4_0_4_4 = 31
  1304. Q4_0_4_8 = 32
  1305. Q4_0_8_8 = 33
  1306. TQ1_0 = 34
  1307. TQ2_0 = 35
  1308. # TODO: add GGMLFileType from ggml_ftype in ggml.h
  1309. # from llama_ftype in llama.h
  1310. # ALL VALUES SHOULD BE THE SAME HERE AS THEY ARE OVER THERE.
  1311. class LlamaFileType(IntEnum):
  1312. ALL_F32 = 0
  1313. MOSTLY_F16 = 1 # except 1d tensors
  1314. MOSTLY_Q4_0 = 2 # except 1d tensors
  1315. MOSTLY_Q4_1 = 3 # except 1d tensors
  1316. # MOSTLY_Q4_1_SOME_F16 = 4 # tok_embeddings.weight and output.weight are F16
  1317. # MOSTLY_Q4_2 = 5 # support has been removed
  1318. # MOSTLY_Q4_3 = 6 # support has been removed
  1319. MOSTLY_Q8_0 = 7 # except 1d tensors
  1320. MOSTLY_Q5_0 = 8 # except 1d tensors
  1321. MOSTLY_Q5_1 = 9 # except 1d tensors
  1322. MOSTLY_Q2_K = 10 # except 1d tensors
  1323. MOSTLY_Q3_K_S = 11 # except 1d tensors
  1324. MOSTLY_Q3_K_M = 12 # except 1d tensors
  1325. MOSTLY_Q3_K_L = 13 # except 1d tensors
  1326. MOSTLY_Q4_K_S = 14 # except 1d tensors
  1327. MOSTLY_Q4_K_M = 15 # except 1d tensors
  1328. MOSTLY_Q5_K_S = 16 # except 1d tensors
  1329. MOSTLY_Q5_K_M = 17 # except 1d tensors
  1330. MOSTLY_Q6_K = 18 # except 1d tensors
  1331. MOSTLY_IQ2_XXS = 19 # except 1d tensors
  1332. MOSTLY_IQ2_XS = 20 # except 1d tensors
  1333. MOSTLY_Q2_K_S = 21 # except 1d tensors
  1334. MOSTLY_IQ3_XS = 22 # except 1d tensors
  1335. MOSTLY_IQ3_XXS = 23 # except 1d tensors
  1336. MOSTLY_IQ1_S = 24 # except 1d tensors
  1337. MOSTLY_IQ4_NL = 25 # except 1d tensors
  1338. MOSTLY_IQ3_S = 26 # except 1d tensors
  1339. MOSTLY_IQ3_M = 27 # except 1d tensors
  1340. MOSTLY_IQ2_S = 28 # except 1d tensors
  1341. MOSTLY_IQ2_M = 29 # except 1d tensors
  1342. MOSTLY_IQ4_XS = 30 # except 1d tensors
  1343. MOSTLY_IQ1_M = 31 # except 1d tensors
  1344. MOSTLY_BF16 = 32 # except 1d tensors
  1345. MOSTLY_Q4_0_4_4 = 33 # except 1d tensors
  1346. MOSTLY_Q4_0_4_8 = 34 # except 1d tensors
  1347. MOSTLY_Q4_0_8_8 = 35 # except 1d tensors
  1348. MOSTLY_TQ1_0 = 36 # except 1d tensors
  1349. MOSTLY_TQ2_0 = 37 # except 1d tensors
  1350. GUESSED = 1024 # not specified in the model file
  1351. class GGUFEndian(IntEnum):
  1352. LITTLE = 0
  1353. BIG = 1
  1354. class GGUFValueType(IntEnum):
  1355. UINT8 = 0
  1356. INT8 = 1
  1357. UINT16 = 2
  1358. INT16 = 3
  1359. UINT32 = 4
  1360. INT32 = 5
  1361. FLOAT32 = 6
  1362. BOOL = 7
  1363. STRING = 8
  1364. ARRAY = 9
  1365. UINT64 = 10
  1366. INT64 = 11
  1367. FLOAT64 = 12
  1368. @staticmethod
  1369. def get_type(val: Any) -> GGUFValueType:
  1370. if isinstance(val, (str, bytes, bytearray)):
  1371. return GGUFValueType.STRING
  1372. elif isinstance(val, list):
  1373. return GGUFValueType.ARRAY
  1374. elif isinstance(val, float):
  1375. return GGUFValueType.FLOAT32
  1376. elif isinstance(val, bool):
  1377. return GGUFValueType.BOOL
  1378. elif isinstance(val, int):
  1379. return GGUFValueType.INT32
  1380. # TODO: need help with 64-bit types in Python
  1381. else:
  1382. raise ValueError(f"Unknown type: {type(val)}")
  1383. # Items here are (block size, type size)
  1384. QK_K = 256
  1385. GGML_QUANT_SIZES: dict[GGMLQuantizationType, tuple[int, int]] = {
  1386. GGMLQuantizationType.F32: (1, 4),
  1387. GGMLQuantizationType.F16: (1, 2),
  1388. GGMLQuantizationType.Q4_0: (32, 2 + 16),
  1389. GGMLQuantizationType.Q4_1: (32, 2 + 2 + 16),
  1390. GGMLQuantizationType.Q5_0: (32, 2 + 4 + 16),
  1391. GGMLQuantizationType.Q5_1: (32, 2 + 2 + 4 + 16),
  1392. GGMLQuantizationType.Q8_0: (32, 2 + 32),
  1393. GGMLQuantizationType.Q8_1: (32, 4 + 4 + 32),
  1394. GGMLQuantizationType.Q2_K: (256, 2 + 2 + QK_K // 16 + QK_K // 4),
  1395. GGMLQuantizationType.Q3_K: (256, 2 + QK_K // 4 + QK_K // 8 + 12),
  1396. GGMLQuantizationType.Q4_K: (256, 2 + 2 + QK_K // 2 + 12),
  1397. GGMLQuantizationType.Q5_K: (256, 2 + 2 + QK_K // 2 + QK_K // 8 + 12),
  1398. GGMLQuantizationType.Q6_K: (256, 2 + QK_K // 2 + QK_K // 4 + QK_K // 16),
  1399. GGMLQuantizationType.Q8_K: (256, 4 + QK_K + QK_K // 8),
  1400. GGMLQuantizationType.IQ2_XXS: (256, 2 + QK_K // 4),
  1401. GGMLQuantizationType.IQ2_XS: (256, 2 + QK_K // 4 + QK_K // 32),
  1402. GGMLQuantizationType.IQ3_XXS: (256, 2 + QK_K // 4 + QK_K // 8),
  1403. GGMLQuantizationType.IQ1_S: (256, 2 + QK_K // 8 + QK_K // 16),
  1404. GGMLQuantizationType.IQ4_NL: (32, 2 + 16),
  1405. GGMLQuantizationType.IQ3_S: (256, 2 + QK_K // 4 + QK_K // 8 + QK_K // 32 + 4),
  1406. GGMLQuantizationType.IQ2_S: (256, 2 + QK_K // 4 + QK_K // 16),
  1407. GGMLQuantizationType.IQ4_XS: (256, 2 + 2 + QK_K // 2 + QK_K // 64),
  1408. GGMLQuantizationType.I8: (1, 1),
  1409. GGMLQuantizationType.I16: (1, 2),
  1410. GGMLQuantizationType.I32: (1, 4),
  1411. GGMLQuantizationType.I64: (1, 8),
  1412. GGMLQuantizationType.F64: (1, 8),
  1413. GGMLQuantizationType.IQ1_M: (256, QK_K // 8 + QK_K // 16 + QK_K // 32),
  1414. GGMLQuantizationType.BF16: (1, 2),
  1415. GGMLQuantizationType.Q4_0_4_4:(32, 2 + 16),
  1416. GGMLQuantizationType.Q4_0_4_8:(32, 2 + 16),
  1417. GGMLQuantizationType.Q4_0_8_8:(32, 2 + 16),
  1418. GGMLQuantizationType.TQ1_0: (256, 2 + 4 * 13),
  1419. GGMLQuantizationType.TQ2_0: (256, 2 + 64),
  1420. }
  1421. # Aliases for backward compatibility.
  1422. # general
  1423. KEY_GENERAL_ARCHITECTURE = Keys.General.ARCHITECTURE
  1424. KEY_GENERAL_QUANTIZATION_VERSION = Keys.General.QUANTIZATION_VERSION
  1425. KEY_GENERAL_ALIGNMENT = Keys.General.ALIGNMENT
  1426. KEY_GENERAL_NAME = Keys.General.NAME
  1427. KEY_GENERAL_AUTHOR = Keys.General.AUTHOR
  1428. KEY_GENERAL_URL = Keys.General.URL
  1429. KEY_GENERAL_DESCRIPTION = Keys.General.DESCRIPTION
  1430. KEY_GENERAL_LICENSE = Keys.General.LICENSE
  1431. KEY_GENERAL_SOURCE_URL = Keys.General.SOURCE_URL
  1432. KEY_GENERAL_FILE_TYPE = Keys.General.FILE_TYPE
  1433. # LLM
  1434. KEY_VOCAB_SIZE = Keys.LLM.VOCAB_SIZE
  1435. KEY_CONTEXT_LENGTH = Keys.LLM.CONTEXT_LENGTH
  1436. KEY_EMBEDDING_LENGTH = Keys.LLM.EMBEDDING_LENGTH
  1437. KEY_BLOCK_COUNT = Keys.LLM.BLOCK_COUNT
  1438. KEY_FEED_FORWARD_LENGTH = Keys.LLM.FEED_FORWARD_LENGTH
  1439. KEY_USE_PARALLEL_RESIDUAL = Keys.LLM.USE_PARALLEL_RESIDUAL
  1440. KEY_TENSOR_DATA_LAYOUT = Keys.LLM.TENSOR_DATA_LAYOUT
  1441. # attention
  1442. KEY_ATTENTION_HEAD_COUNT = Keys.Attention.HEAD_COUNT
  1443. KEY_ATTENTION_HEAD_COUNT_KV = Keys.Attention.HEAD_COUNT_KV
  1444. KEY_ATTENTION_MAX_ALIBI_BIAS = Keys.Attention.MAX_ALIBI_BIAS
  1445. KEY_ATTENTION_CLAMP_KQV = Keys.Attention.CLAMP_KQV
  1446. KEY_ATTENTION_LAYERNORM_EPS = Keys.Attention.LAYERNORM_EPS
  1447. KEY_ATTENTION_LAYERNORM_RMS_EPS = Keys.Attention.LAYERNORM_RMS_EPS
  1448. # RoPE
  1449. KEY_ROPE_DIMENSION_COUNT = Keys.Rope.DIMENSION_COUNT
  1450. KEY_ROPE_FREQ_BASE = Keys.Rope.FREQ_BASE
  1451. KEY_ROPE_SCALING_TYPE = Keys.Rope.SCALING_TYPE
  1452. KEY_ROPE_SCALING_FACTOR = Keys.Rope.SCALING_FACTOR
  1453. KEY_ROPE_SCALING_ORIG_CTX_LEN = Keys.Rope.SCALING_ORIG_CTX_LEN
  1454. KEY_ROPE_SCALING_FINETUNED = Keys.Rope.SCALING_FINETUNED
  1455. # SSM
  1456. KEY_SSM_CONV_KERNEL = Keys.SSM.CONV_KERNEL
  1457. KEY_SSM_INNER_SIZE = Keys.SSM.INNER_SIZE
  1458. KEY_SSM_STATE_SIZE = Keys.SSM.STATE_SIZE
  1459. KEY_SSM_TIME_STEP_RANK = Keys.SSM.TIME_STEP_RANK
  1460. KEY_SSM_DT_B_C_RMS = Keys.SSM.DT_B_C_RMS
  1461. # tokenization
  1462. KEY_TOKENIZER_MODEL = Keys.Tokenizer.MODEL
  1463. KEY_TOKENIZER_PRE = Keys.Tokenizer.PRE
  1464. KEY_TOKENIZER_LIST = Keys.Tokenizer.LIST
  1465. KEY_TOKENIZER_TOKEN_TYPE = Keys.Tokenizer.TOKEN_TYPE
  1466. KEY_TOKENIZER_SCORES = Keys.Tokenizer.SCORES
  1467. KEY_TOKENIZER_MERGES = Keys.Tokenizer.MERGES
  1468. KEY_TOKENIZER_BOS_ID = Keys.Tokenizer.BOS_ID
  1469. KEY_TOKENIZER_EOS_ID = Keys.Tokenizer.EOS_ID
  1470. KEY_TOKENIZER_UNK_ID = Keys.Tokenizer.UNK_ID
  1471. KEY_TOKENIZER_SEP_ID = Keys.Tokenizer.SEP_ID
  1472. KEY_TOKENIZER_PAD_ID = Keys.Tokenizer.PAD_ID
  1473. KEY_TOKENIZER_CLS_ID = Keys.Tokenizer.CLS_ID
  1474. KEY_TOKENIZER_MASK_ID = Keys.Tokenizer.MASK_ID
  1475. KEY_TOKENIZER_HF_JSON = Keys.Tokenizer.HF_JSON
  1476. KEY_TOKENIZER_RWKV = Keys.Tokenizer.RWKV
  1477. KEY_TOKENIZER_PRIFIX_ID = Keys.Tokenizer.PREFIX_ID
  1478. KEY_TOKENIZER_SUFFIX_ID = Keys.Tokenizer.SUFFIX_ID
  1479. KEY_TOKENIZER_MIDDLE_ID = Keys.Tokenizer.MIDDLE_ID
  1480. KEY_TOKENIZER_EOT_ID = Keys.Tokenizer.EOT_ID
  1481. KEY_TOKENIZER_EOM_ID = Keys.Tokenizer.EOM_ID