blob: b8975f238500d1f0e3a1265840f8b6ba2132f612 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
|
#!/usr/bin/env run.sh
"""
Test that llama.cpp can build and exec in the omni repo.
Note that this does not test if llama-cpp can actually execute any models. I
(currently) use ollama for running and managing models, but I'd like to make
sure llama-cpp still works in case I need/want to switch at some point.
"""
# : out llamacpp-test
# : run llama-cpp
import os
import sys
import unittest
class TestLlamaCpp(unittest.TestCase):
"""Test that llama.cpp is available."""
def test_in_path(self) -> None:
"""Test that llama.cpp is in $PATH."""
self.assertIn("llama-cpp", os.environ.get("PATH", ""))
def test() -> None:
"""Run this module's test suite."""
suite = unittest.TestSuite()
suite.addTests(
unittest.defaultTestLoader.loadTestsFromTestCase(TestLlamaCpp),
)
unittest.TextTestRunner().run(suite)
def main() -> None:
"""Entrypoint."""
if sys.argv[1] == "test":
test()
else:
sys.exit(0)
if __name__ == "__main__":
main()
|