flake.nix 6.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176
  1. # The flake interface to llama.cpp's Nix expressions. The flake is used as a
  2. # more discoverable entry-point, as well as a way to pin the dependencies and
  3. # expose default outputs, including the outputs built by the CI.
  4. # For more serious applications involving some kind of customization you may
  5. # want to consider consuming the overlay, or instantiating `llamaPackages`
  6. # directly:
  7. #
  8. # ```nix
  9. # pkgs.callPackage ${llama-cpp-root}/.devops/nix/scope.nix { }`
  10. # ```
  11. # Cf. https://jade.fyi/blog/flakes-arent-real/ for a more detailed exposition
  12. # of the relation between Nix and the Nix Flakes.
  13. {
  14. description = "Port of Facebook's LLaMA model in C/C++";
  15. inputs = {
  16. nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
  17. flake-parts.url = "github:hercules-ci/flake-parts";
  18. };
  19. # There's an optional binary cache available. The details are below, but they're commented out.
  20. #
  21. # Why? The terrible experience of being prompted to accept them on every single Nix command run.
  22. # Plus, there are warnings shown about not being a trusted user on a default Nix install
  23. # if you *do* say yes to the prompts.
  24. #
  25. # This experience makes having `nixConfig` in a flake a persistent UX problem.
  26. #
  27. # To make use of the binary cache, please add the relevant settings to your `nix.conf`.
  28. # It's located at `/etc/nix/nix.conf` on non-NixOS systems. On NixOS, adjust the `nix.settings`
  29. # option in your NixOS configuration to add `extra-substituters` and `extra-trusted-public-keys`,
  30. # as shown below.
  31. #
  32. # ```
  33. # nixConfig = {
  34. # extra-substituters = [
  35. # # Populated by the CI in ggerganov/llama.cpp
  36. # "https://llama-cpp.cachix.org"
  37. #
  38. # # A development cache for nixpkgs imported with `config.cudaSupport = true`.
  39. # # Populated by https://hercules-ci.com/github/SomeoneSerge/nixpkgs-cuda-ci.
  40. # # This lets one skip building e.g. the CUDA-enabled openmpi.
  41. # # TODO: Replace once nix-community obtains an official one.
  42. # "https://cuda-maintainers.cachix.org"
  43. # ];
  44. #
  45. # # Verify these are the same keys as published on
  46. # # - https://app.cachix.org/cache/llama-cpp
  47. # # - https://app.cachix.org/cache/cuda-maintainers
  48. # extra-trusted-public-keys = [
  49. # "llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc="
  50. # "cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E="
  51. # ];
  52. # };
  53. # ```
  54. # For inspection, use `nix flake show github:ggerganov/llama.cpp` or the nix repl:
  55. #
  56. # ```bash
  57. # ❯ nix repl
  58. # nix-repl> :lf github:ggerganov/llama.cpp
  59. # Added 13 variables.
  60. # nix-repl> outputs.apps.x86_64-linux.quantize
  61. # { program = "/nix/store/00000000000000000000000000000000-llama.cpp/bin/quantize"; type = "app"; }
  62. # ```
  63. outputs =
  64. { self, flake-parts, ... }@inputs:
  65. let
  66. # We could include the git revisions in the package names but those would
  67. # needlessly trigger rebuilds:
  68. # llamaVersion = self.dirtyShortRev or self.shortRev;
  69. # Nix already uses cryptographic hashes for versioning, so we'll just fix
  70. # the fake semver for now:
  71. llamaVersion = "0.0.0";
  72. in
  73. flake-parts.lib.mkFlake { inherit inputs; }
  74. {
  75. imports = [
  76. .devops/nix/nixpkgs-instances.nix
  77. .devops/nix/apps.nix
  78. .devops/nix/devshells.nix
  79. .devops/nix/jetson-support.nix
  80. ];
  81. # An overlay can be used to have a more granular control over llama-cpp's
  82. # dependencies and configuration, than that offered by the `.override`
  83. # mechanism. Cf. https://nixos.org/manual/nixpkgs/stable/#chap-overlays.
  84. #
  85. # E.g. in a flake:
  86. # ```
  87. # { nixpkgs, llama-cpp, ... }:
  88. # let pkgs = import nixpkgs {
  89. # overlays = [ (llama-cpp.overlays.default) ];
  90. # system = "aarch64-linux";
  91. # config.allowUnfree = true;
  92. # config.cudaSupport = true;
  93. # config.cudaCapabilities = [ "7.2" ];
  94. # config.cudaEnableForwardCompat = false;
  95. # }; in {
  96. # packages.aarch64-linux.llamaJetsonXavier = pkgs.llamaPackages.llama-cpp;
  97. # }
  98. # ```
  99. #
  100. # Cf. https://nixos.org/manual/nix/unstable/command-ref/new-cli/nix3-flake.html?highlight=flake#flake-format
  101. flake.overlays.default = (
  102. final: prev: {
  103. llamaPackages = final.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
  104. inherit (final.llamaPackages) llama-cpp;
  105. }
  106. );
  107. systems = [
  108. "aarch64-darwin"
  109. "aarch64-linux"
  110. "x86_64-darwin" # x86_64-darwin isn't tested (and likely isn't relevant)
  111. "x86_64-linux"
  112. ];
  113. perSystem =
  114. {
  115. config,
  116. lib,
  117. system,
  118. pkgs,
  119. pkgsCuda,
  120. pkgsRocm,
  121. ...
  122. }:
  123. {
  124. # For standardised reproducible formatting with `nix fmt`
  125. formatter = pkgs.nixfmt-rfc-style;
  126. # Unlike `.#packages`, legacyPackages may contain values of
  127. # arbitrary types (including nested attrsets) and may even throw
  128. # exceptions. This attribute isn't recursed into by `nix flake
  129. # show` either.
  130. #
  131. # You can add arbitrary scripts to `.devops/nix/scope.nix` and
  132. # access them as `nix build .#llamaPackages.${scriptName}` using
  133. # the same path you would with an overlay.
  134. legacyPackages = {
  135. llamaPackages = pkgs.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
  136. llamaPackagesCuda = pkgsCuda.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
  137. llamaPackagesRocm = pkgsRocm.callPackage .devops/nix/scope.nix { inherit llamaVersion; };
  138. };
  139. # We don't use the overlay here so as to avoid making too many instances of nixpkgs,
  140. # cf. https://zimbatm.com/notes/1000-instances-of-nixpkgs
  141. packages =
  142. {
  143. default = config.legacyPackages.llamaPackages.llama-cpp;
  144. vulkan = config.packages.default.override { useVulkan = true; };
  145. }
  146. // lib.optionalAttrs pkgs.stdenv.isLinux {
  147. opencl = config.packages.default.override { useOpenCL = true; };
  148. cuda = config.legacyPackages.llamaPackagesCuda.llama-cpp;
  149. mpi-cpu = config.packages.default.override { useMpi = true; };
  150. mpi-cuda = config.packages.default.override { useMpi = true; };
  151. }
  152. // lib.optionalAttrs (system == "x86_64-linux") {
  153. rocm = config.legacyPackages.llamaPackagesRocm.llama-cpp;
  154. };
  155. # Packages exposed in `.#checks` will be built by the CI and by
  156. # `nix flake check`. Currently we expose all packages, but we could
  157. # make more granular choices
  158. checks = config.packages;
  159. };
  160. };
  161. }