summaryrefslogtreecommitdiff
path: root/Biz
diff options
context:
space:
mode:
authorBen Sima <ben@bsima.me>2024-05-14 11:18:58 -0400
committerBen Sima <ben@bsima.me>2024-05-20 22:15:50 -0400
commit20985f8985d810092a84f31a705144b9318235dd (patch)
treef3a8cb4c71dc77f23598b6e377cb1ed81afefca4 /Biz
parent2d33aa547ff6a516c90ca2b47b13e2add200583a (diff)
Test that llama-cpp is buildable
This small Llamacpp.py file is simply intended to test that llama.cpp can build. This was previously not working, I guess, because the build system doesn't verify that the final executable has its dependencies set properly in $PATH. Not sure if it *should* do that verification or not. Anyway, I rewrote this to actually test if it could call `llama`, and it could not, because the Python builder needed the rundeps in its propagatedBuildInputs. That alone makes `llama` available to the final artifact, but the test still failed. This is because the wrapPythonPrograms function from nixpkgs (which adds stuff to PATH) is called in postFixup, which happens after installPhase, but checkPhase happens before installPhase. So I was testing a program that didn't have PATH set yet. Moving the test to installCheck fixed this because it runs after the postFixup phase. I opted to keep the lint/typecheck stuff in the checkPhase because they don't need any external dependencies, and having those fail earlier is probably better? Maybe doesn't make a huge difference time-wise but it kinda makes the intention clearer to be separate, in checkPhase you are checking the code itself, in installCheck you are including the installation environment as well.
Diffstat (limited to 'Biz')
-rw-r--r--Biz/Bild.nix1
-rw-r--r--Biz/Bild/Builder.nix19
-rw-r--r--Biz/Llamacpp.py37
3 files changed, 50 insertions, 7 deletions
diff --git a/Biz/Bild.nix b/Biz/Bild.nix
index 894ebe6..dabebd4 100644
--- a/Biz/Bild.nix
+++ b/Biz/Bild.nix
@@ -82,6 +82,7 @@ let
inherit bat bc cmark ctags deadnix fd figlet fzf git git-branchless
gitlint guile hlint indent jq lolcat mypy nixfmt ormolu pkg-config
ripgrep rustc tree wemux;
+ llama-cpp = nixpkgs.nixos-unstable-small.llama-cpp;
llm = nixpkgs.nixos-unstable-small.llm.withPlugins
[ nixpkgs.nixos-unstable-small.python3.pkgs.llm-ollama ];
ollama = nixpkgs.nixos-unstable-small.ollama;
diff --git a/Biz/Bild/Builder.nix b/Biz/Bild/Builder.nix
index cf4d1e0..df5aeba 100644
--- a/Biz/Bild/Builder.nix
+++ b/Biz/Bild/Builder.nix
@@ -7,6 +7,15 @@
with bild;
let
analysis = builtins.fromJSON analysisJSON;
+
+ # common bash functions for the builder
+ commonBash = builtins.toFile "common.bash" ''
+ # Check that a command succeeds, fail and log if not.
+ function check {
+ $@ || { echo "fail: $name: $3"; exit 1; }
+ }
+ '';
+
build = _: target:
let
name = target.out;
@@ -107,13 +116,12 @@ let
python = python.buildPythonApplication rec {
inherit name src CODEROOT;
- propagatedBuildInputs = langdeps_ ++ sysdeps_;
+ nativeBuildInputs = [ makeWrapper ];
+ propagatedBuildInputs = langdeps_ ++ sysdeps_ ++ rundeps_;
buildInputs = sysdeps_;
nativeCheckInputs = [ pkgs.ruff python.packages.mypy ];
checkPhase = ''
- check() {
- $@ || { echo "fail: $name: $3"; exit 1; }
- }
+ . ${commonBash}
cp ${../../pyproject.toml} ./pyproject.toml
check ruff format --exclude 'setup.py' --check .
check ruff check --exclude 'setup.py' --exclude '__init__.py' .
@@ -123,6 +131,9 @@ let
--no-error-summary \
--exclude 'setup\.py$' \
.
+ '';
+ installCheck = ''
+ . ${commonBash}
check python -m ${mainModule} test
'';
preBuild = ''
diff --git a/Biz/Llamacpp.py b/Biz/Llamacpp.py
index cd47e1e..9a2ff86 100644
--- a/Biz/Llamacpp.py
+++ b/Biz/Llamacpp.py
@@ -1,4 +1,35 @@
-"""Llamacpp."""
+"""
+Test that llama.cpp can build and exec in the omni repo.
-# : run nixos-23_11.llama-cpp
-# : run nixos-23_11.openblas
+Note that this does not test if llama-cpp can actually execute any models. I
+(currently) use ollama for running and managing models, but I'd like to make
+sure llama-cpp still works in case I need/want to switch at some point.
+"""
+
+# : out llamacpp-test
+# : run llama-cpp
+
+import os
+import sys
+import unittest
+
+
+class TestLlamaCpp(unittest.TestCase):
+ """Test that llama.cpp is available."""
+
+ def test_in_path(self) -> None:
+ """Test that llama.cpp is in $PATH."""
+ self.assertTrue("llama-cpp" in os.environ.get("PATH", ""))
+
+
+def main() -> None:
+ """Entrypoint."""
+ if sys.argv[1] == "test":
+ sys.argv.pop()
+ unittest.main()
+ else:
+ sys.exit(0)
+
+
+if __name__ == "__main__":
+ main()