diff options
-rw-r--r-- | Biz/Bild.nix | 1 | ||||
-rw-r--r-- | Biz/Bild/Builder.nix | 19 | ||||
-rw-r--r-- | Biz/Llamacpp.py | 37 | ||||
-rw-r--r-- | pyproject.toml | 1 |
4 files changed, 51 insertions, 7 deletions
diff --git a/Biz/Bild.nix b/Biz/Bild.nix index 894ebe6..dabebd4 100644 --- a/Biz/Bild.nix +++ b/Biz/Bild.nix @@ -82,6 +82,7 @@ let inherit bat bc cmark ctags deadnix fd figlet fzf git git-branchless gitlint guile hlint indent jq lolcat mypy nixfmt ormolu pkg-config ripgrep rustc tree wemux; + llama-cpp = nixpkgs.nixos-unstable-small.llama-cpp; llm = nixpkgs.nixos-unstable-small.llm.withPlugins [ nixpkgs.nixos-unstable-small.python3.pkgs.llm-ollama ]; ollama = nixpkgs.nixos-unstable-small.ollama; diff --git a/Biz/Bild/Builder.nix b/Biz/Bild/Builder.nix index cf4d1e0..df5aeba 100644 --- a/Biz/Bild/Builder.nix +++ b/Biz/Bild/Builder.nix @@ -7,6 +7,15 @@ with bild; let analysis = builtins.fromJSON analysisJSON; + + # common bash functions for the builder + commonBash = builtins.toFile "common.bash" '' + # Check that a command succeeds, fail and log if not. + function check { + $@ || { echo "fail: $name: $3"; exit 1; } + } + ''; + build = _: target: let name = target.out; @@ -107,13 +116,12 @@ let python = python.buildPythonApplication rec { inherit name src CODEROOT; - propagatedBuildInputs = langdeps_ ++ sysdeps_; + nativeBuildInputs = [ makeWrapper ]; + propagatedBuildInputs = langdeps_ ++ sysdeps_ ++ rundeps_; buildInputs = sysdeps_; nativeCheckInputs = [ pkgs.ruff python.packages.mypy ]; checkPhase = '' - check() { - $@ || { echo "fail: $name: $3"; exit 1; } - } + . ${commonBash} cp ${../../pyproject.toml} ./pyproject.toml check ruff format --exclude 'setup.py' --check . check ruff check --exclude 'setup.py' --exclude '__init__.py' . @@ -123,6 +131,9 @@ let --no-error-summary \ --exclude 'setup\.py$' \ . + ''; + installCheck = '' + . ${commonBash} check python -m ${mainModule} test ''; preBuild = '' diff --git a/Biz/Llamacpp.py b/Biz/Llamacpp.py index cd47e1e..9a2ff86 100644 --- a/Biz/Llamacpp.py +++ b/Biz/Llamacpp.py @@ -1,4 +1,35 @@ -"""Llamacpp.""" +""" +Test that llama.cpp can build and exec in the omni repo. -# : run nixos-23_11.llama-cpp -# : run nixos-23_11.openblas +Note that this does not test if llama-cpp can actually execute any models. I +(currently) use ollama for running and managing models, but I'd like to make +sure llama-cpp still works in case I need/want to switch at some point. +""" + +# : out llamacpp-test +# : run llama-cpp + +import os +import sys +import unittest + + +class TestLlamaCpp(unittest.TestCase): + """Test that llama.cpp is available.""" + + def test_in_path(self) -> None: + """Test that llama.cpp is in $PATH.""" + self.assertTrue("llama-cpp" in os.environ.get("PATH", "")) + + +def main() -> None: + """Entrypoint.""" + if sys.argv[1] == "test": + sys.argv.pop() + unittest.main() + else: + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml index e8266ae..62eebf4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,6 +27,7 @@ ignore = [ "E203", # whitespace-before-punctuation, doesn't work with ruff format "INP001", # implicit-namespace-package "N999", # invalid-module-name + "PT009", # pytest-unittest-assertion, conflicts with assert (S101) "S310", # suspicious-url-open-usage, doesn't work in 0.1.5 "S404", # suspicious-subprocess-import, not stable "S603", # subprocess-without-shell-equals-true, false positives |