#!/usr/bin/env run.sh """ Test that llama.cpp can build and exec in the omni repo. Note that this does not test if llama-cpp can actually execute any models. I (currently) use ollama for running and managing models, but I'd like to make sure llama-cpp still works in case I need/want to switch at some point. """ # : out llamacpp-test # : run llama-cpp import os import sys import unittest class TestLlamaCpp(unittest.TestCase): """Test that llama.cpp is available.""" def test_in_path(self) -> None: """Test that llama.cpp is in $PATH.""" self.assertIn("llama-cpp", os.environ.get("PATH", "")) def test() -> None: """Run this module's test suite.""" suite = unittest.TestSuite() suite.addTests( unittest.defaultTestLoader.loadTestsFromTestCase(TestLlamaCpp), ) unittest.TextTestRunner().run(suite) def main() -> None: """Entrypoint.""" if sys.argv[1] == "test": test() else: sys.exit(0) if __name__ == "__main__": main()