Просмотр исходного кода

model : add text-only support for Kimi-VL (and find special tokens in text_config) (#15051)

* basic kimi-vl textmodel conversion

* check config["text_config"] for special tokens
Gabriel Larson 5 месяцев назад
Родитель
Сommit
83bc2f288c
2 измененных файлов с 13 добавлено и 1 удалено
  1. 8 0
      convert_hf_to_gguf.py
  2. 5 1
      gguf-py/gguf/vocab.py

+ 8 - 0
convert_hf_to_gguf.py

@@ -6059,6 +6059,7 @@ class DeepseekModel(TextModel):
 
 
 @ModelBase.register("DeepseekV2ForCausalLM")
 @ModelBase.register("DeepseekV2ForCausalLM")
 @ModelBase.register("DeepseekV3ForCausalLM")
 @ModelBase.register("DeepseekV3ForCausalLM")
+@ModelBase.register("KimiVLForConditionalGeneration")
 class DeepseekV2Model(TextModel):
 class DeepseekV2Model(TextModel):
     model_arch = gguf.MODEL_ARCH.DEEPSEEK2
     model_arch = gguf.MODEL_ARCH.DEEPSEEK2
 
 
@@ -6161,6 +6162,13 @@ class DeepseekV2Model(TextModel):
     _experts: list[dict[str, Tensor]] | None = None
     _experts: list[dict[str, Tensor]] | None = None
 
 
     def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]:
     def modify_tensors(self, data_torch: Tensor, name: str, bid: int | None) -> Iterable[tuple[str, Tensor]]:
+        # skip vision tensors and remove "language_model." for Kimi-VL
+        if "vision_tower" in name or "multi_modal_projector" in name:
+            return []
+
+        if name.startswith("language_model."):
+            name = name.replace("language_model.", "")
+
         # rename e_score_correction_bias tensors
         # rename e_score_correction_bias tensors
         if name.endswith("e_score_correction_bias"):
         if name.endswith("e_score_correction_bias"):
             name = name.replace("e_score_correction_bias", "e_score_correction.bias")
             name = name.replace("e_score_correction_bias", "e_score_correction.bias")

+ 5 - 1
gguf-py/gguf/vocab.py

@@ -312,7 +312,11 @@ class SpecialVocab:
         with open(config_file, encoding = 'utf-8') as f:
         with open(config_file, encoding = 'utf-8') as f:
             config = json.load(f)
             config = json.load(f)
         for typ in self.special_token_types:
         for typ in self.special_token_types:
-            self._set_special_token(typ, config.get(f'{typ}_token_id'))
+            token_id = config.get(f'{typ}_token_id')
+            # If not found at root, check in text_config (for multimodal models like Kimi-VL)
+            if token_id is None and 'text_config' in config:
+                token_id = config['text_config'].get(f'{typ}_token_id')
+            self._set_special_token(typ, token_id)
         return True
         return True