summaryrefslogtreecommitdiff
path: root/Biz/Bild/Deps
diff options
context:
space:
mode:
Diffstat (limited to 'Biz/Bild/Deps')
-rw-r--r--Biz/Bild/Deps/llama-cpp.nix32
1 files changed, 0 insertions, 32 deletions
diff --git a/Biz/Bild/Deps/llama-cpp.nix b/Biz/Bild/Deps/llama-cpp.nix
deleted file mode 100644
index 2e2aae7..0000000
--- a/Biz/Bild/Deps/llama-cpp.nix
+++ /dev/null
@@ -1,32 +0,0 @@
-{ stdenv, sources, python3, cmake, pkgconfig, openmpi, cudaPackages }:
-let llama-python = python3.withPackages (ps: with ps; [ numpy sentencepiece ]);
-in stdenv.mkDerivation {
- name = "llama.cpp";
- version = sources.llama-cpp.rev;
-
- src = sources.llama-cpp;
-
- postPatch = ''
- substituteInPlace ./ggml-metal.m \
- --replace '[bundle pathForResource:@"ggml-metal" ofType:@"metal"];' "@\"$out/bin/ggml-metal.metal\";"
- substituteInPlace ./*.py --replace '/usr/bin/env python' '${llama-python}/bin/python'
- '';
-
- nativeBuildInputs = [ cmake pkgconfig ];
- buildInputs = [ openmpi cudaPackages.cudatoolkit ];
-
- cmakeFlags = [
- "-DLLAMA_BUILD_SERVER=ON"
- "-DLLAMA_MPI=ON"
- "-DBUILD_SHARED_LIBS=ON"
- "-DCMAKE_SKIP_BUILD_RPATH=ON"
- "-DLLAMA_CUBLAS=ON"
- ];
-
- postInstall = ''
- mv $out/bin/main $out/bin/llama
- mv $out/bin/server $out/bin/llama-server
- '';
-
- meta.mainProgram = "llama";
-}