1
0

gguf_new_metadata.py 10.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216
  1. #!/usr/bin/env python3
  2. from __future__ import annotations
  3. import logging
  4. import argparse
  5. import os
  6. import sys
  7. import json
  8. from pathlib import Path
  9. from tqdm import tqdm
  10. from typing import Any, Sequence, NamedTuple
  11. # Necessary to load the local gguf package
  12. if "NO_LOCAL_GGUF" not in os.environ and (Path(__file__).parent.parent.parent.parent / 'gguf-py').exists():
  13. sys.path.insert(0, str(Path(__file__).parent.parent.parent))
  14. import gguf
  15. logger = logging.getLogger("gguf-new-metadata")
  16. class MetadataDetails(NamedTuple):
  17. type: gguf.GGUFValueType
  18. value: Any
  19. description: str = ''
  20. sub_type: gguf.GGUFValueType | None = None
  21. def get_field_data(reader: gguf.GGUFReader, key: str) -> Any:
  22. field = reader.get_field(key)
  23. return field.contents() if field else None
  24. def find_token(token_list: Sequence[int], token: str) -> Sequence[int]:
  25. token_ids = [index for index, value in enumerate(token_list) if value == token]
  26. if len(token_ids) == 0:
  27. raise LookupError(f'Unable to find "{token}" in token list!')
  28. return token_ids
  29. def copy_with_new_metadata(reader: gguf.GGUFReader, writer: gguf.GGUFWriter, new_metadata: dict[str, MetadataDetails], remove_metadata: Sequence[str]) -> None:
  30. for field in reader.fields.values():
  31. # Suppress virtual fields and fields written by GGUFWriter
  32. if field.name == gguf.Keys.General.ARCHITECTURE or field.name.startswith('GGUF.'):
  33. logger.debug(f'Suppressing {field.name}')
  34. continue
  35. # Skip old chat templates if we have new ones
  36. if field.name.startswith(gguf.Keys.Tokenizer.CHAT_TEMPLATE) and gguf.Keys.Tokenizer.CHAT_TEMPLATE in new_metadata:
  37. logger.debug(f'Skipping {field.name}')
  38. continue
  39. if field.name in remove_metadata:
  40. logger.debug(f'Removing {field.name}')
  41. continue
  42. val_type = field.types[0]
  43. sub_type = field.types[-1] if val_type == gguf.GGUFValueType.ARRAY else None
  44. old_val = MetadataDetails(val_type, field.contents(), sub_type=sub_type)
  45. val = new_metadata.get(field.name, old_val)
  46. if field.name in new_metadata:
  47. logger.debug(f'Modifying {field.name}: "{old_val.value}" -> "{val.value}" {val.description}')
  48. del new_metadata[field.name]
  49. elif val.value is not None:
  50. logger.debug(f'Copying {field.name}')
  51. if val.value is not None:
  52. writer.add_key_value(field.name, val.value, val.type, sub_type=sub_type if val.sub_type is None else val.sub_type)
  53. if gguf.Keys.Tokenizer.CHAT_TEMPLATE in new_metadata:
  54. logger.debug('Adding chat template(s)')
  55. writer.add_chat_template(new_metadata[gguf.Keys.Tokenizer.CHAT_TEMPLATE].value)
  56. del new_metadata[gguf.Keys.Tokenizer.CHAT_TEMPLATE]
  57. for key, val in new_metadata.items():
  58. logger.debug(f'Adding {key}: "{val.value}" {val.description}')
  59. writer.add_key_value(key, val.value, val.type)
  60. total_bytes = 0
  61. for tensor in reader.tensors:
  62. total_bytes += tensor.n_bytes
  63. writer.add_tensor_info(tensor.name, tensor.data.shape, tensor.data.dtype, tensor.data.nbytes, tensor.tensor_type)
  64. bar = tqdm(desc="Writing", total=total_bytes, unit="byte", unit_scale=True)
  65. writer.write_header_to_file()
  66. writer.write_kv_data_to_file()
  67. writer.write_ti_data_to_file()
  68. for tensor in reader.tensors:
  69. writer.write_tensor_data(tensor.data, tensor_endianess=reader.endianess)
  70. bar.update(tensor.n_bytes)
  71. writer.close()
  72. def main() -> None:
  73. tokenizer_metadata = (getattr(gguf.Keys.Tokenizer, n) for n in gguf.Keys.Tokenizer.__dict__.keys() if not n.startswith('_'))
  74. token_names = dict((n.split('.')[-1][:-len('_token_id')], n) for n in tokenizer_metadata if n.endswith('_token_id'))
  75. parser = argparse.ArgumentParser(description="Make a copy of a GGUF file with new metadata")
  76. parser.add_argument("input", type=Path, help="GGUF format model input filename")
  77. parser.add_argument("output", type=Path, help="GGUF format model output filename")
  78. parser.add_argument("--general-name", type=str, help="The models general.name", metavar='"name"')
  79. parser.add_argument("--general-description", type=str, help="The models general.description", metavar='"Description ..."')
  80. parser.add_argument("--chat-template", type=str, help="Chat template string (or JSON string containing templates)", metavar='"{% ... %} ..."')
  81. parser.add_argument("--chat-template-config", type=Path, help="Config file containing chat template(s)", metavar='tokenizer_config.json')
  82. parser.add_argument("--chat-template-file", type=Path, help="Jinja file containing chat template", metavar='chat_template.jinja')
  83. parser.add_argument("--pre-tokenizer", type=str, help="The models tokenizer.ggml.pre", metavar='"pre tokenizer"')
  84. parser.add_argument("--remove-metadata", action="append", type=str, help="Remove metadata (by key name) from output model", metavar='general.url')
  85. parser.add_argument("--special-token", action="append", type=str, help="Special token by value", nargs=2, metavar=(' | '.join(token_names.keys()), '"<token>"'))
  86. parser.add_argument("--special-token-by-id", action="append", type=str, help="Special token by id", nargs=2, metavar=(' | '.join(token_names.keys()), '0'))
  87. parser.add_argument("--force", action="store_true", help="Bypass warnings without confirmation")
  88. parser.add_argument("--verbose", action="store_true", help="Increase output verbosity")
  89. args = parser.parse_args(None if len(sys.argv) > 2 else ["--help"])
  90. logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO)
  91. new_metadata = {}
  92. remove_metadata = args.remove_metadata or []
  93. if args.general_name:
  94. new_metadata[gguf.Keys.General.NAME] = MetadataDetails(gguf.GGUFValueType.STRING, args.general_name)
  95. if args.general_description:
  96. new_metadata[gguf.Keys.General.DESCRIPTION] = MetadataDetails(gguf.GGUFValueType.STRING, args.general_description)
  97. if args.chat_template:
  98. new_metadata[gguf.Keys.Tokenizer.CHAT_TEMPLATE] = MetadataDetails(gguf.GGUFValueType.STRING, json.loads(args.chat_template) if args.chat_template.startswith('[') else args.chat_template)
  99. if args.chat_template_config:
  100. with open(args.chat_template_config, 'r', encoding='utf-8') as fp:
  101. config = json.load(fp)
  102. template = config.get('chat_template')
  103. if template:
  104. new_metadata[gguf.Keys.Tokenizer.CHAT_TEMPLATE] = MetadataDetails(gguf.GGUFValueType.STRING, template)
  105. if args.chat_template_file:
  106. with open(args.chat_template_file, 'r', encoding='utf-8') as fp:
  107. template = fp.read()
  108. new_metadata[gguf.Keys.Tokenizer.CHAT_TEMPLATE] = MetadataDetails(gguf.GGUFValueType.STRING, template)
  109. if args.pre_tokenizer:
  110. new_metadata[gguf.Keys.Tokenizer.PRE] = MetadataDetails(gguf.GGUFValueType.STRING, args.pre_tokenizer)
  111. if remove_metadata:
  112. logger.warning('*** Warning *** Warning *** Warning **')
  113. logger.warning('* Most metadata is required for a fully functional GGUF file,')
  114. logger.warning('* removing crucial metadata may result in a corrupt output file!')
  115. if not args.force:
  116. logger.warning('* Enter exactly YES if you are positive you want to proceed:')
  117. response = input('YES, I am sure> ')
  118. if response != 'YES':
  119. logger.info("You didn't enter YES. Okay then, see ya!")
  120. sys.exit(0)
  121. logger.info(f'* Loading: {args.input}')
  122. reader = gguf.GGUFReader(args.input, 'r')
  123. arch = get_field_data(reader, gguf.Keys.General.ARCHITECTURE)
  124. token_list = get_field_data(reader, gguf.Keys.Tokenizer.LIST) or []
  125. for name, token in args.special_token or []:
  126. if name not in token_names:
  127. logger.warning(f'Unknown special token "{name}", ignoring...')
  128. else:
  129. ids = find_token(token_list, token)
  130. new_metadata[token_names[name]] = MetadataDetails(gguf.GGUFValueType.UINT32, ids[0], f'= {token}')
  131. if len(ids) > 1:
  132. logger.warning(f'Multiple "{token}" tokens found, choosing ID {ids[0]}, use --special-token-by-id if you want another:')
  133. logger.warning(', '.join(str(i) for i in ids))
  134. for name, id_string in args.special_token_by_id or []:
  135. if name not in token_names:
  136. logger.warning(f'Unknown special token "{name}", ignoring...')
  137. elif not id_string.isdecimal():
  138. raise LookupError(f'Token ID "{id_string}" is not a valid ID!')
  139. else:
  140. id_int = int(id_string)
  141. if id_int >= 0 and id_int < len(token_list):
  142. new_metadata[token_names[name]] = MetadataDetails(gguf.GGUFValueType.UINT32, id_int, f'= {token_list[id_int]}')
  143. else:
  144. raise LookupError(f'Token ID {id_int} is not within token list!')
  145. if os.path.isfile(args.output) and not args.force:
  146. logger.warning('*** Warning *** Warning *** Warning **')
  147. logger.warning(f'* The "{args.output}" GGUF file already exists, it will be overwritten!')
  148. logger.warning('* Enter exactly YES if you are positive you want to proceed:')
  149. response = input('YES, I am sure> ')
  150. if response != 'YES':
  151. logger.info("You didn't enter YES. Okay then, see ya!")
  152. sys.exit(0)
  153. logger.info(f'* Writing: {args.output}')
  154. writer = gguf.GGUFWriter(args.output, arch=arch, endianess=reader.endianess)
  155. alignment = get_field_data(reader, gguf.Keys.General.ALIGNMENT)
  156. if alignment is not None:
  157. logger.debug(f'Setting custom alignment: {alignment}')
  158. writer.data_alignment = alignment
  159. copy_with_new_metadata(reader, writer, new_metadata, remove_metadata)
  160. if __name__ == '__main__':
  161. main()