CMakeLists.txt 1.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. set(TARGET llama-server)
  2. include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR})
  3. if (MINGW)
  4. # fix: https://github.com/ggml-org/llama.cpp/actions/runs/9651004652/job/26617901362?pr=8006
  5. add_compile_definitions(_WIN32_WINNT=${GGML_WIN_VER})
  6. endif()
  7. if (NOT LLAMA_HTTPLIB)
  8. message(FATAL_ERROR "LLAMA_HTTPLIB is OFF, cannot build llama-server. Hint: to skip building server, set -DLLAMA_BUILD_SERVER=OFF")
  9. endif()
  10. set(TARGET_SRCS
  11. server.cpp
  12. utils.hpp
  13. server-http.cpp
  14. server-http.h
  15. )
  16. set(PUBLIC_ASSETS
  17. index.html.gz
  18. loading.html
  19. )
  20. foreach(asset ${PUBLIC_ASSETS})
  21. set(input "${CMAKE_CURRENT_SOURCE_DIR}/public/${asset}")
  22. set(output "${CMAKE_CURRENT_BINARY_DIR}/${asset}.hpp")
  23. list(APPEND TARGET_SRCS ${output})
  24. add_custom_command(
  25. DEPENDS "${input}"
  26. OUTPUT "${output}"
  27. COMMAND "${CMAKE_COMMAND}" "-DINPUT=${input}" "-DOUTPUT=${output}" -P "${PROJECT_SOURCE_DIR}/scripts/xxd.cmake"
  28. )
  29. set_source_files_properties(${output} PROPERTIES GENERATED TRUE)
  30. endforeach()
  31. add_executable(${TARGET} ${TARGET_SRCS})
  32. install(TARGETS ${TARGET} RUNTIME)
  33. target_include_directories(${TARGET} PRIVATE ../mtmd)
  34. target_include_directories(${TARGET} PRIVATE ${CMAKE_SOURCE_DIR})
  35. target_link_libraries(${TARGET} PRIVATE common mtmd cpp-httplib ${CMAKE_THREAD_LIBS_INIT})
  36. if (WIN32)
  37. TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32)
  38. endif()
  39. target_compile_features(${TARGET} PRIVATE cxx_std_17)