package.nix 7.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246
  1. {
  2. lib,
  3. glibc,
  4. config,
  5. stdenv,
  6. runCommand,
  7. cmake,
  8. ninja,
  9. pkg-config,
  10. git,
  11. mpi,
  12. blas,
  13. cudaPackages,
  14. autoAddDriverRunpath,
  15. darwin,
  16. rocmPackages,
  17. vulkan-headers,
  18. vulkan-loader,
  19. curl,
  20. shaderc,
  21. useBlas ?
  22. builtins.all (x: !x) [
  23. useCuda
  24. useMetalKit
  25. useRocm
  26. useVulkan
  27. ]
  28. && blas.meta.available,
  29. useCuda ? config.cudaSupport,
  30. useMetalKit ? stdenv.isAarch64 && stdenv.isDarwin,
  31. # Increases the runtime closure size by ~700M
  32. useMpi ? false,
  33. useRocm ? config.rocmSupport,
  34. rocmGpuTargets ? builtins.concatStringsSep ";" rocmPackages.clr.gpuTargets,
  35. enableCurl ? true,
  36. useVulkan ? false,
  37. useRpc ? false,
  38. llamaVersion ? "0.0.0", # Arbitrary version, substituted by the flake
  39. # It's necessary to consistently use backendStdenv when building with CUDA support,
  40. # otherwise we get libstdc++ errors downstream.
  41. effectiveStdenv ? if useCuda then cudaPackages.backendStdenv else stdenv,
  42. enableStatic ? effectiveStdenv.hostPlatform.isStatic,
  43. precompileMetalShaders ? false,
  44. }:
  45. let
  46. inherit (lib)
  47. cmakeBool
  48. cmakeFeature
  49. optionalAttrs
  50. optionals
  51. strings
  52. ;
  53. stdenv = throw "Use effectiveStdenv instead";
  54. suffices =
  55. lib.optionals useBlas [ "BLAS" ]
  56. ++ lib.optionals useCuda [ "CUDA" ]
  57. ++ lib.optionals useMetalKit [ "MetalKit" ]
  58. ++ lib.optionals useMpi [ "MPI" ]
  59. ++ lib.optionals useRocm [ "ROCm" ]
  60. ++ lib.optionals useVulkan [ "Vulkan" ];
  61. pnameSuffix =
  62. strings.optionalString (suffices != [ ])
  63. "-${strings.concatMapStringsSep "-" strings.toLower suffices}";
  64. descriptionSuffix = strings.optionalString (
  65. suffices != [ ]
  66. ) ", accelerated with ${strings.concatStringsSep ", " suffices}";
  67. xcrunHost = runCommand "xcrunHost" { } ''
  68. mkdir -p $out/bin
  69. ln -s /usr/bin/xcrun $out/bin
  70. '';
  71. # apple_sdk is supposed to choose sane defaults, no need to handle isAarch64
  72. # separately
  73. darwinBuildInputs =
  74. with darwin.apple_sdk.frameworks;
  75. [
  76. Accelerate
  77. CoreVideo
  78. CoreGraphics
  79. ]
  80. ++ optionals useMetalKit [ MetalKit ];
  81. cudaBuildInputs = with cudaPackages; [
  82. cuda_cudart
  83. cuda_cccl # <nv/target>
  84. libcublas
  85. ];
  86. rocmBuildInputs = with rocmPackages; [
  87. clr
  88. hipblas
  89. rocblas
  90. ];
  91. vulkanBuildInputs = [
  92. vulkan-headers
  93. vulkan-loader
  94. shaderc
  95. ];
  96. in
  97. effectiveStdenv.mkDerivation (finalAttrs: {
  98. pname = "llama-cpp${pnameSuffix}";
  99. version = llamaVersion;
  100. # Note: none of the files discarded here are visible in the sandbox or
  101. # affect the output hash. This also means they can be modified without
  102. # triggering a rebuild.
  103. src = lib.cleanSourceWith {
  104. filter =
  105. name: type:
  106. let
  107. noneOf = builtins.all (x: !x);
  108. baseName = baseNameOf name;
  109. in
  110. noneOf [
  111. (lib.hasSuffix ".nix" name) # Ignore *.nix files when computing outPaths
  112. (lib.hasSuffix ".md" name) # Ignore *.md changes whe computing outPaths
  113. (lib.hasPrefix "." baseName) # Skip hidden files and directories
  114. (baseName == "flake.lock")
  115. ];
  116. src = lib.cleanSource ../../.;
  117. };
  118. postPatch = ''
  119. '';
  120. # With PR#6015 https://github.com/ggml-org/llama.cpp/pull/6015,
  121. # `default.metallib` may be compiled with Metal compiler from XCode
  122. # and we need to escape sandbox on MacOS to access Metal compiler.
  123. # `xcrun` is used find the path of the Metal compiler, which is varible
  124. # and not on $PATH
  125. # see https://github.com/ggml-org/llama.cpp/pull/6118 for discussion
  126. __noChroot = effectiveStdenv.isDarwin && useMetalKit && precompileMetalShaders;
  127. nativeBuildInputs =
  128. [
  129. cmake
  130. ninja
  131. pkg-config
  132. git
  133. ]
  134. ++ optionals useCuda [
  135. cudaPackages.cuda_nvcc
  136. autoAddDriverRunpath
  137. ]
  138. ++ optionals (effectiveStdenv.hostPlatform.isGnu && enableStatic) [ glibc.static ]
  139. ++ optionals (effectiveStdenv.isDarwin && useMetalKit && precompileMetalShaders) [ xcrunHost ];
  140. buildInputs =
  141. optionals effectiveStdenv.isDarwin darwinBuildInputs
  142. ++ optionals useCuda cudaBuildInputs
  143. ++ optionals useMpi [ mpi ]
  144. ++ optionals useRocm rocmBuildInputs
  145. ++ optionals useBlas [ blas ]
  146. ++ optionals useVulkan vulkanBuildInputs
  147. ++ optionals enableCurl [ curl ];
  148. cmakeFlags =
  149. [
  150. (cmakeBool "LLAMA_BUILD_SERVER" true)
  151. (cmakeBool "BUILD_SHARED_LIBS" (!enableStatic))
  152. (cmakeBool "CMAKE_SKIP_BUILD_RPATH" true)
  153. (cmakeBool "LLAMA_CURL" enableCurl)
  154. (cmakeBool "GGML_NATIVE" false)
  155. (cmakeBool "GGML_BLAS" useBlas)
  156. (cmakeBool "GGML_CUDA" useCuda)
  157. (cmakeBool "GGML_HIP" useRocm)
  158. (cmakeBool "GGML_METAL" useMetalKit)
  159. (cmakeBool "GGML_VULKAN" useVulkan)
  160. (cmakeBool "GGML_STATIC" enableStatic)
  161. (cmakeBool "GGML_RPC" useRpc)
  162. ]
  163. ++ optionals useCuda [
  164. (
  165. with cudaPackages.flags;
  166. cmakeFeature "CMAKE_CUDA_ARCHITECTURES" (
  167. builtins.concatStringsSep ";" (map dropDot cudaCapabilities)
  168. )
  169. )
  170. ]
  171. ++ optionals useRocm [
  172. (cmakeFeature "CMAKE_HIP_COMPILER" "${rocmPackages.llvm.clang}/bin/clang")
  173. (cmakeFeature "CMAKE_HIP_ARCHITECTURES" rocmGpuTargets)
  174. ]
  175. ++ optionals useMetalKit [
  176. (lib.cmakeFeature "CMAKE_C_FLAGS" "-D__ARM_FEATURE_DOTPROD=1")
  177. (cmakeBool "GGML_METAL_EMBED_LIBRARY" (!precompileMetalShaders))
  178. ];
  179. # Environment variables needed for ROCm
  180. env = optionalAttrs useRocm {
  181. ROCM_PATH = "${rocmPackages.clr}";
  182. HIP_DEVICE_LIB_PATH = "${rocmPackages.rocm-device-libs}/amdgcn/bitcode";
  183. };
  184. # TODO(SomeoneSerge): It's better to add proper install targets at the CMake level,
  185. # if they haven't been added yet.
  186. postInstall = ''
  187. mkdir -p $out/include
  188. cp $src/include/llama.h $out/include/
  189. '';
  190. meta = {
  191. # Configurations we don't want even the CI to evaluate. Results in the
  192. # "unsupported platform" messages. This is mostly a no-op, because
  193. # cudaPackages would've refused to evaluate anyway.
  194. badPlatforms = optionals useCuda lib.platforms.darwin;
  195. # Configurations that are known to result in build failures. Can be
  196. # overridden by importing Nixpkgs with `allowBroken = true`.
  197. broken = (useMetalKit && !effectiveStdenv.isDarwin);
  198. description = "Inference of LLaMA model in pure C/C++${descriptionSuffix}";
  199. homepage = "https://github.com/ggml-org/llama.cpp/";
  200. license = lib.licenses.mit;
  201. # Accommodates `nix run` and `lib.getExe`
  202. mainProgram = "llama-cli";
  203. # These people might respond, on the best effort basis, if you ping them
  204. # in case of Nix-specific regressions or for reviewing Nix-specific PRs.
  205. # Consider adding yourself to this list if you want to ensure this flake
  206. # stays maintained and you're willing to invest your time. Do not add
  207. # other people without their consent. Consider removing people after
  208. # they've been unreachable for long periods of time.
  209. # Note that lib.maintainers is defined in Nixpkgs, but you may just add
  210. # an attrset following the same format as in
  211. # https://github.com/NixOS/nixpkgs/blob/f36a80e54da29775c78d7eff0e628c2b4e34d1d7/maintainers/maintainer-list.nix
  212. maintainers = with lib.maintainers; [
  213. philiptaron
  214. SomeoneSerge
  215. ];
  216. # Extend `badPlatforms` instead
  217. platforms = lib.platforms.all;
  218. };
  219. })