flake.nix 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950
  1. {
  2. inputs = {
  3. nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
  4. flake-utils.url = "github:numtide/flake-utils";
  5. };
  6. outputs = { self, nixpkgs, flake-utils }:
  7. flake-utils.lib.eachDefaultSystem (system:
  8. let
  9. pkgs = import nixpkgs {
  10. inherit system;
  11. };
  12. llama-python = pkgs.python310.withPackages (ps: with ps; [
  13. torch
  14. numpy
  15. sentencepiece
  16. ]);
  17. in
  18. {
  19. packages.default = pkgs.stdenv.mkDerivation {
  20. name = "llama.cpp";
  21. src = ./.;
  22. nativeBuildInputs = with pkgs; [ cmake ];
  23. buildInputs = with pkgs; lib.optionals stdenv.isDarwin [
  24. darwin.apple_sdk.frameworks.Accelerate
  25. ];
  26. cmakeFlags = with pkgs; lib.optionals (system == "aarch64-darwin") [
  27. "-DCMAKE_C_FLAGS=-D__ARM_FEATURE_DOTPROD=1"
  28. ];
  29. installPhase = ''
  30. mkdir -p $out/bin
  31. mv bin/* $out/bin/
  32. mv $out/bin/main $out/bin/llama
  33. echo "#!${llama-python}/bin/python" > $out/bin/convert-pth-to-ggml
  34. cat ${./convert-pth-to-ggml.py} >> $out/bin/convert-pth-to-ggml
  35. chmod +x $out/bin/convert-pth-to-ggml
  36. '';
  37. meta.mainProgram = "llama";
  38. };
  39. devShells.default = pkgs.mkShell {
  40. packages = with pkgs; [
  41. cmake
  42. llama-python
  43. ] ++ lib.optionals stdenv.isDarwin [
  44. darwin.apple_sdk.frameworks.Accelerate
  45. ];
  46. };
  47. }
  48. );
  49. }