CMakeLists.txt 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156
  1. llama_add_compile_flags()
  2. function(llama_test target)
  3. include(CMakeParseArguments)
  4. set(options)
  5. set(oneValueArgs NAME LABEL WORKING_DIRECTORY)
  6. set(multiValueArgs ARGS)
  7. cmake_parse_arguments(LLAMA_TEST "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
  8. if (NOT DEFINED LLAMA_TEST_LABEL)
  9. set(LLAMA_TEST_LABEL "main")
  10. endif()
  11. if (NOT DEFINED LLAMA_TEST_WORKING_DIRECTORY)
  12. set(LLAMA_TEST_WORKING_DIRECTORY .)
  13. endif()
  14. if (DEFINED LLAMA_TEST_NAME)
  15. set(TEST_NAME ${LLAMA_TEST_NAME})
  16. else()
  17. set(TEST_NAME ${target})
  18. endif()
  19. set(TEST_TARGET ${target})
  20. add_test(
  21. NAME ${TEST_NAME}
  22. WORKING_DIRECTORY ${LLAMA_TEST_WORKING_DIRECTORY}
  23. COMMAND $<TARGET_FILE:${TEST_TARGET}>
  24. ${LLAMA_TEST_ARGS})
  25. set_property(TEST ${TEST_NAME} PROPERTY LABELS ${LLAMA_TEST_LABEL})
  26. endfunction()
  27. # Builds and runs a test source file.
  28. # Optional args:
  29. # - NAME: name of the executable & test target (defaults to the source file name without extension)
  30. # - LABEL: label for the test (defaults to main)
  31. # - ARGS: arguments to pass to the test executable
  32. # - WORKING_DIRECTORY
  33. function(llama_target_and_test source)
  34. include(CMakeParseArguments)
  35. set(options)
  36. set(oneValueArgs NAME LABEL WORKING_DIRECTORY)
  37. set(multiValueArgs ARGS)
  38. cmake_parse_arguments(LLAMA_TEST "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
  39. if (NOT DEFINED LLAMA_TEST_LABEL)
  40. set(LLAMA_TEST_LABEL "main")
  41. endif()
  42. if (NOT DEFINED LLAMA_TEST_WORKING_DIRECTORY)
  43. set(LLAMA_TEST_WORKING_DIRECTORY .)
  44. endif()
  45. if (DEFINED LLAMA_TEST_NAME)
  46. set(TEST_TARGET ${LLAMA_TEST_NAME})
  47. else()
  48. get_filename_component(TEST_TARGET ${source} NAME_WE)
  49. endif()
  50. add_executable(${TEST_TARGET} ${source} get-model.cpp)
  51. install(TARGETS ${TEST_TARGET} RUNTIME)
  52. target_link_libraries(${TEST_TARGET} PRIVATE common)
  53. add_test(
  54. NAME ${TEST_TARGET}
  55. WORKING_DIRECTORY ${LLAMA_TEST_WORKING_DIRECTORY}
  56. COMMAND $<TARGET_FILE:${TEST_TARGET}>
  57. ${LLAMA_TEST_ARGS})
  58. set_property(TEST ${TEST_TARGET} PROPERTY LABELS ${LLAMA_TEST_LABEL})
  59. endfunction()
  60. # build test-tokenizer-0 target once and add many tests
  61. add_executable(test-tokenizer-0 test-tokenizer-0.cpp)
  62. target_link_libraries(test-tokenizer-0 PRIVATE common)
  63. install(TARGETS test-tokenizer-0 RUNTIME)
  64. llama_test(test-tokenizer-0 NAME test-tokenizer-0-bert-bge ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-bert-bge.gguf)
  65. llama_test(test-tokenizer-0 NAME test-tokenizer-0-command-r ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-command-r.gguf)
  66. llama_test(test-tokenizer-0 NAME test-tokenizer-0-deepseek-coder ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-deepseek-coder.gguf)
  67. llama_test(test-tokenizer-0 NAME test-tokenizer-0-deepseek-llm ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-deepseek-llm.gguf)
  68. llama_test(test-tokenizer-0 NAME test-tokenizer-0-falcon ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-falcon.gguf)
  69. llama_test(test-tokenizer-0 NAME test-tokenizer-0-gpt-2 ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-gpt-2.gguf)
  70. llama_test(test-tokenizer-0 NAME test-tokenizer-0-llama-bpe ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama-bpe.gguf)
  71. llama_test(test-tokenizer-0 NAME test-tokenizer-0-llama-spm ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama-spm.gguf)
  72. llama_test(test-tokenizer-0 NAME test-tokenizer-0-mpt ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-mpt.gguf)
  73. llama_test(test-tokenizer-0 NAME test-tokenizer-0-phi-3 ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-phi-3.gguf)
  74. llama_test(test-tokenizer-0 NAME test-tokenizer-0-qwen2 ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-qwen2.gguf)
  75. llama_test(test-tokenizer-0 NAME test-tokenizer-0-refact ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-refact.gguf)
  76. llama_test(test-tokenizer-0 NAME test-tokenizer-0-starcoder ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-starcoder.gguf)
  77. if (LLAMA_LLGUIDANCE)
  78. llama_target_and_test(test-grammar-llguidance.cpp ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama-bpe.gguf)
  79. endif ()
  80. if (NOT WIN32)
  81. # these tests are disabled on Windows because they use internal functions not exported with LLAMA_API
  82. llama_target_and_test(test-sampling.cpp)
  83. llama_target_and_test(test-grammar-parser.cpp)
  84. llama_target_and_test(test-grammar-integration.cpp)
  85. llama_target_and_test(test-llama-grammar.cpp)
  86. llama_target_and_test(test-chat.cpp)
  87. # TODO: disabled on loongarch64 because the ggml-ci node lacks Python 3.8
  88. if (NOT ${CMAKE_SYSTEM_PROCESSOR} MATCHES "loongarch64")
  89. llama_target_and_test(test-json-schema-to-grammar.cpp WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/..)
  90. target_include_directories(test-json-schema-to-grammar PRIVATE ${CMAKE_CURRENT_SOURCE_DIR}/../examples/server)
  91. endif()
  92. # build test-tokenizer-1-bpe target once and add many tests
  93. add_executable(test-tokenizer-1-bpe test-tokenizer-1-bpe.cpp)
  94. target_link_libraries(test-tokenizer-1-bpe PRIVATE common)
  95. install(TARGETS test-tokenizer-1-bpe RUNTIME)
  96. # TODO: disabled due to slowness
  97. #llama_test(test-tokenizer-1-bpe NAME test-tokenizer-1-aquila ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-aquila.gguf)
  98. #llama_test(test-tokenizer-1-bpe NAME test-tokenizer-1-falcon ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-falcon.gguf)
  99. #llama_test(test-tokenizer-1-bpe NAME test-tokenizer-1-gpt-2 ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-gpt-2.gguf)
  100. #llama_test(test-tokenizer-1-bpe NAME test-tokenizer-1-gpt-neox ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-gpt-neox.gguf)
  101. #llama_test(test-tokenizer-1-bpe NAME test-tokenizer-1-llama-bpe ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama-bpe.gguf --ignore-merges)
  102. #llama_test(test-tokenizer-1-bpe NAME test-tokenizer-1-mpt ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-mpt.gguf)
  103. #llama_test(test-tokenizer-1-bpe NAME test-tokenizer-1-refact ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-refact.gguf)
  104. #llama_test(test-tokenizer-1-bpe NAME test-tokenizer-1-starcoder ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-starcoder.gguf)
  105. # build test-tokenizer-1-spm target once and add many tests
  106. add_executable(test-tokenizer-1-spm test-tokenizer-1-spm.cpp)
  107. target_link_libraries(test-tokenizer-1-spm PRIVATE common)
  108. install(TARGETS test-tokenizer-1-spm RUNTIME)
  109. llama_test(test-tokenizer-1-spm NAME test-tokenizer-1-llama-spm ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-llama-spm.gguf)
  110. #llama_test(test-tokenizer-1-spm NAME test-tokenizer-1-baichuan ARGS ${CMAKE_CURRENT_SOURCE_DIR}/../models/ggml-vocab-baichuan.gguf)
  111. # llama_target_and_test(test-double-float.cpp) # SLOW
  112. endif()
  113. llama_target_and_test(test-log.cpp)
  114. llama_target_and_test(test-arg-parser.cpp)
  115. llama_target_and_test(test-chat-template.cpp)
  116. # llama_target_and_test(test-opt.cpp) # SLOW
  117. llama_target_and_test(test-gguf.cpp)
  118. llama_target_and_test(test-backend-ops.cpp)
  119. llama_target_and_test(test-model-load-cancel.cpp LABEL "model")
  120. llama_target_and_test(test-autorelease.cpp LABEL "model")
  121. if (NOT GGML_BACKEND_DL)
  122. # these tests use the backends directly and cannot be built with dynamic loading
  123. llama_target_and_test(test-barrier.cpp)
  124. llama_target_and_test(test-quantize-fns.cpp)
  125. llama_target_and_test(test-quantize-perf.cpp)
  126. llama_target_and_test(test-rope.cpp)
  127. endif()
  128. # dummy executable - not installed
  129. get_filename_component(TEST_TARGET test-c.c NAME_WE)
  130. add_executable(${TEST_TARGET} test-c.c)
  131. target_link_libraries(${TEST_TARGET} PRIVATE llama)