blob: 9a2ff8648dd4e3944b23d84a63cf2751b089e800 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
|
"""
Test that llama.cpp can build and exec in the omni repo.
Note that this does not test if llama-cpp can actually execute any models. I
(currently) use ollama for running and managing models, but I'd like to make
sure llama-cpp still works in case I need/want to switch at some point.
"""
# : out llamacpp-test
# : run llama-cpp
import os
import sys
import unittest
class TestLlamaCpp(unittest.TestCase):
"""Test that llama.cpp is available."""
def test_in_path(self) -> None:
"""Test that llama.cpp is in $PATH."""
self.assertTrue("llama-cpp" in os.environ.get("PATH", ""))
def main() -> None:
"""Entrypoint."""
if sys.argv[1] == "test":
sys.argv.pop()
unittest.main()
else:
sys.exit(0)
if __name__ == "__main__":
main()
|