|
@@ -6,6 +6,29 @@
|
|
|
flake-parts.url = "github:hercules-ci/flake-parts";
|
|
flake-parts.url = "github:hercules-ci/flake-parts";
|
|
|
};
|
|
};
|
|
|
|
|
|
|
|
|
|
+ # Optional binary cache
|
|
|
|
|
+ nixConfig = {
|
|
|
|
|
+ extra-substituters = [
|
|
|
|
|
+ # Populated by the CI in ggerganov/llama.cpp
|
|
|
|
|
+ "https://llama-cpp.cachix.org"
|
|
|
|
|
+
|
|
|
|
|
+ # A development cache for nixpkgs imported with `config.cudaSupport = true`.
|
|
|
|
|
+ # Populated by https://hercules-ci.com/github/SomeoneSerge/nixpkgs-cuda-ci.
|
|
|
|
|
+ # This lets one skip building e.g. the CUDA-enabled openmpi.
|
|
|
|
|
+ # TODO: Replace once nix-community obtains an official one.
|
|
|
|
|
+ "https://cuda-maintainers.cachix.org"
|
|
|
|
|
+ ];
|
|
|
|
|
+
|
|
|
|
|
+ # Verify these are the same keys as published on
|
|
|
|
|
+ # - https://app.cachix.org/cache/llama-cpp
|
|
|
|
|
+ # - https://app.cachix.org/cache/cuda-maintainers
|
|
|
|
|
+ extra-trusted-public-keys = [
|
|
|
|
|
+ "llama-cpp.cachix.org-1:H75X+w83wUKTIPSO1KWy9ADUrzThyGs8P5tmAbkWhQc="
|
|
|
|
|
+ "cuda-maintainers.cachix.org-1:0dq3bujKpuEPMCX6U4WylrUDZ9JyUG0VpVZa7CNfq5E="
|
|
|
|
|
+ ];
|
|
|
|
|
+ };
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
# For inspection, use `nix flake show github:ggerganov/llama.cpp` or the nix repl:
|
|
# For inspection, use `nix flake show github:ggerganov/llama.cpp` or the nix repl:
|
|
|
#
|
|
#
|
|
|
# ```bash
|
|
# ```bash
|