CMakeLists.txt 1.6 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455
  1. set(TARGET llama-server)
  2. include_directories(${CMAKE_CURRENT_SOURCE_DIR} ${CMAKE_CURRENT_BINARY_DIR})
  3. if (MINGW)
  4. # fix: https://github.com/ggml-org/llama.cpp/actions/runs/9651004652/job/26617901362?pr=8006
  5. add_compile_definitions(_WIN32_WINNT=${GGML_WIN_VER})
  6. endif()
  7. if (NOT LLAMA_HTTPLIB)
  8. message(FATAL_ERROR "LLAMA_HTTPLIB is OFF, cannot build llama-server. Hint: to skip building server, set -DLLAMA_BUILD_SERVER=OFF")
  9. endif()
  10. set(TARGET_SRCS
  11. server.cpp
  12. server-http.cpp
  13. server-http.h
  14. server-task.cpp
  15. server-task.h
  16. server-queue.cpp
  17. server-queue.h
  18. server-common.cpp
  19. server-common.h
  20. server-context.cpp
  21. server-context.h
  22. )
  23. set(PUBLIC_ASSETS
  24. index.html.gz
  25. loading.html
  26. )
  27. foreach(asset ${PUBLIC_ASSETS})
  28. set(input "${CMAKE_CURRENT_SOURCE_DIR}/public/${asset}")
  29. set(output "${CMAKE_CURRENT_BINARY_DIR}/${asset}.hpp")
  30. list(APPEND TARGET_SRCS ${output})
  31. add_custom_command(
  32. DEPENDS "${input}"
  33. OUTPUT "${output}"
  34. COMMAND "${CMAKE_COMMAND}" "-DINPUT=${input}" "-DOUTPUT=${output}" -P "${PROJECT_SOURCE_DIR}/scripts/xxd.cmake"
  35. )
  36. set_source_files_properties(${output} PROPERTIES GENERATED TRUE)
  37. endforeach()
  38. add_executable(${TARGET} ${TARGET_SRCS})
  39. install(TARGETS ${TARGET} RUNTIME)
  40. target_include_directories(${TARGET} PRIVATE ../mtmd)
  41. target_include_directories(${TARGET} PRIVATE ${CMAKE_SOURCE_DIR})
  42. target_link_libraries(${TARGET} PRIVATE common mtmd cpp-httplib ${CMAKE_THREAD_LIBS_INIT})
  43. if (WIN32)
  44. TARGET_LINK_LIBRARIES(${TARGET} PRIVATE ws2_32)
  45. endif()
  46. target_compile_features(${TARGET} PRIVATE cxx_std_17)