diff options
author | Rowan Hart <rowanbhart@gmail.com> | 2023-06-24 04:07:08 -0700 |
---|---|---|
committer | GitHub <noreply@github.com> | 2023-06-24 14:07:08 +0300 |
commit | fdd18609113862dc6eb34dfc44a093d54c59ff1f (patch) | |
tree | dacf92994df572970eb02537597a681e358eeaa6 | |
parent | c943d823c14cef33092205ca3944de6fdf7abf99 (diff) |
flake : fix ggml-metal.metal path and run nixfmt (#1974)
-rw-r--r-- | flake.nix | 50 |
1 files changed, 26 insertions, 24 deletions
@@ -9,27 +9,33 @@ inherit (pkgs.stdenv) isAarch64 isDarwin; inherit (pkgs.lib) optionals; isM1 = isAarch64 && isDarwin; - osSpecific = - if isM1 then with pkgs.darwin.apple_sdk_11_0.frameworks; [ Accelerate MetalKit MetalPerformanceShaders MetalPerformanceShadersGraph ] - else if isDarwin then with pkgs.darwin.apple_sdk.frameworks; [ Accelerate CoreGraphics CoreVideo ] - else [ ]; - pkgs = import nixpkgs { - inherit system; - }; - llama-python = pkgs.python310.withPackages (ps: with ps; [ - numpy - sentencepiece - ]); - in - { + osSpecific = if isM1 then + with pkgs.darwin.apple_sdk_11_0.frameworks; [ + Accelerate + MetalKit + MetalPerformanceShaders + MetalPerformanceShadersGraph + ] + else if isDarwin then + with pkgs.darwin.apple_sdk.frameworks; [ + Accelerate + CoreGraphics + CoreVideo + ] + else + [ ]; + pkgs = import nixpkgs { inherit system; }; + llama-python = + pkgs.python310.withPackages (ps: with ps; [ numpy sentencepiece ]); + in { packages.default = pkgs.stdenv.mkDerivation { name = "llama.cpp"; src = ./.; - postPatch = - if isM1 then '' - substituteInPlace ./ggml-metal.m \ - --replace '[bundle pathForResource:@"ggml-metal" ofType:@"metal"];' "@\"$out/ggml-metal.metal\";" - '' else ""; + postPatch = if isM1 then '' + substituteInPlace ./ggml-metal.m \ + --replace '[bundle pathForResource:@"ggml-metal" ofType:@"metal"];' "@\"$out/bin/ggml-metal.metal\";" + '' else + ""; nativeBuildInputs = with pkgs; [ cmake ]; buildInputs = osSpecific; cmakeFlags = [ "-DLLAMA_BUILD_SERVER=ON" ] ++ (optionals isM1 [ @@ -62,11 +68,7 @@ }; apps.default = self.apps.${system}.llama; devShells.default = pkgs.mkShell { - packages = with pkgs; [ - cmake - llama-python - ] ++ osSpecific; + packages = with pkgs; [ cmake llama-python ] ++ osSpecific; }; - } - ); + }); } |