diff options
author | Ben Sima <ben@bsima.me> | 2024-04-02 00:08:24 -0400 |
---|---|---|
committer | Ben Sima <ben@bsima.me> | 2024-04-02 00:11:33 -0400 |
commit | d7383136dc434a35f7c7efa65343dd0bafea5256 (patch) | |
tree | 44fabbb11dde4122854a5d18ccd138dfb4c83a53 | |
parent | 6040b2ceae7e6f4a0d41052663bf7826d1e52b4a (diff) |
Add llama-cpp from nixos-23.11
Removes my custom llama-cpp build and instead pulls in the upstream build from
nixos-23.11.
-rw-r--r-- | Biz/Bild/Deps.nix | 2 | ||||
-rw-r--r-- | Biz/Bild/Deps/llama-cpp.nix | 32 | ||||
-rw-r--r-- | Biz/Bild/Sources.json | 12 | ||||
-rw-r--r-- | Biz/Llamacpp.py | 6 |
4 files changed, 6 insertions, 46 deletions
diff --git a/Biz/Bild/Deps.nix b/Biz/Bild/Deps.nix index bc4b638..9a18c90 100644 --- a/Biz/Bild/Deps.nix +++ b/Biz/Bild/Deps.nix @@ -83,7 +83,5 @@ in rec { ]; }; - llama-cpp = super.callPackage ./Deps/llama-cpp.nix { }; - nostr-rs-relay = super.callPackage ./Deps/nostr-rs-relay.nix { }; } diff --git a/Biz/Bild/Deps/llama-cpp.nix b/Biz/Bild/Deps/llama-cpp.nix deleted file mode 100644 index 2e2aae7..0000000 --- a/Biz/Bild/Deps/llama-cpp.nix +++ /dev/null @@ -1,32 +0,0 @@ -{ stdenv, sources, python3, cmake, pkgconfig, openmpi, cudaPackages }: -let llama-python = python3.withPackages (ps: with ps; [ numpy sentencepiece ]); -in stdenv.mkDerivation { - name = "llama.cpp"; - version = sources.llama-cpp.rev; - - src = sources.llama-cpp; - - postPatch = '' - substituteInPlace ./ggml-metal.m \ - --replace '[bundle pathForResource:@"ggml-metal" ofType:@"metal"];' "@\"$out/bin/ggml-metal.metal\";" - substituteInPlace ./*.py --replace '/usr/bin/env python' '${llama-python}/bin/python' - ''; - - nativeBuildInputs = [ cmake pkgconfig ]; - buildInputs = [ openmpi cudaPackages.cudatoolkit ]; - - cmakeFlags = [ - "-DLLAMA_BUILD_SERVER=ON" - "-DLLAMA_MPI=ON" - "-DBUILD_SHARED_LIBS=ON" - "-DCMAKE_SKIP_BUILD_RPATH=ON" - "-DLLAMA_CUBLAS=ON" - ]; - - postInstall = '' - mv $out/bin/main $out/bin/llama - mv $out/bin/server $out/bin/llama-server - ''; - - meta.mainProgram = "llama"; -} diff --git a/Biz/Bild/Sources.json b/Biz/Bild/Sources.json index 7cdb418..52112b5 100644 --- a/Biz/Bild/Sources.json +++ b/Biz/Bild/Sources.json @@ -76,18 +76,6 @@ "url": "https://github.com/MegaIng/interegular/archive/v0.2.1.tar.gz", "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz" }, - "llama-cpp": { - "branch": "master", - "description": "Port of Facebook's LLaMA model in C/C++", - "homepage": null, - "owner": "ggerganov", - "repo": "llama.cpp", - "rev": "e59fcb2bc129881f4a269fee748fb38bce0a64de", - "sha256": "18171pv8ymgkvv2q3y8f6l64sm9dmpa0w7yqipzhdxx2n9m1x6ln", - "type": "tarball", - "url": "https://github.com/ggerganov/llama.cpp/archive/e59fcb2bc129881f4a269fee748fb38bce0a64de.tar.gz", - "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz" - }, "niv": { "branch": "master", "description": "Easy dependency management for Nix projects", diff --git a/Biz/Llamacpp.py b/Biz/Llamacpp.py new file mode 100644 index 0000000..e75de5b --- /dev/null +++ b/Biz/Llamacpp.py @@ -0,0 +1,6 @@ +""" +Llamacpp +""" + +# : run nixos-23_11.llama-cpp +# : run nixos-23_11.openblas |