CMakeLists.txt 1.4 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546
  1. set(TARGET llama-server)
  2. include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR})
  3. if (MINGW)
  4. # fix: https://github.com/ggml-org/llama.cpp/actions/runs/9651004652/job/26617901362?pr=8006
  5. add_compile_definitions(_WIN32_WINNT=${GGML_WIN_VER})
  6. endif()
  7. if (NOT LLAMA_HTTPLIB)
  8. message(FATAL_ERROR "LLAMA_HTTPLIB is OFF, cannot build llama-server. Hint: to skip building server, set -DLLAMA_BUILD_SERVER=OFF")
  9. endif()
  10. set(TARGET_SRCS
  11. server.cpp
  12. utils.hpp
  13. )
  14. set(PUBLIC_ASSETS
  15. index.html.gz
  16. loading.html
  17. )
  18. foreach(asset ${PUBLIC_ASSETS})
  19. set(input "${CMAKE_CURRENT_SOURCE_DIR}/public/${asset}")
  20. set(output "${CMAKE_CURRENT_BINARY_DIR}/${asset}.hpp")
  21. list(APPEND TARGET_SRCS ${output})
  22. add_custom_command(
  23. DEPENDS "${input}"
  24. OUTPUT "${output}"
  25. COMMAND "${CMAKE_COMMAND}" "-DINPUT=${input}" "-DOUTPUT=${output}" -P "${PROJECT_SOURCE_DIR}/scripts/xxd.cmake"
  26. )
  27. set_source_files_properties(${output} PROPERTIES GENERATED TRUE)
  28. endforeach()
  29. add_executable(${TARGET} ${TARGET_SRCS})
  30. install(TARGETS ${TARGET} RUNTIME)
  31. target_include_directories(${TARGET} PRIVATE ../mtmd)
  32. target_include_directories(${TARGET} PRIVATE ${CMAKE_SOURCE_DIR})
  33. target_link_libraries(${TARGET} PRIVATE common mtmd cpp-httplib ${CMAKE_THREAD_LIBS_INIT})
  34. if (WIN32)
  35. TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32)
  36. endif()
  37. target_compile_features(${TARGET} PRIVATE cxx_std_17)