summaryrefslogtreecommitdiff
path: root/Biz/Dev
diff options
context:
space:
mode:
authorBen Sima <ben@bsima.me>2024-05-11 14:28:09 -0400
committerBen Sima <ben@bsima.me>2024-05-20 22:15:49 -0400
commitcceefa62d147594d43478e398bbaa9c630670935 (patch)
tree458b468f271c156a7e91944e58d2861afd5e1dab /Biz/Dev
parentdd0bc9610cf0e6842f5d5ac67a73f2fd6f4eba2f (diff)
Setup ollama API service
This superceedes exllama and tabbyAPI which I could never get working fully. Unfortunately I had to switch to NixOS unstable to get all the Go builder stuff to work, so this is a cause of yet another version drift, but I guess it's inevitable and I should just learn to mitigate it with my nixpkgs shenanigans.
Diffstat (limited to 'Biz/Dev')
-rw-r--r--Biz/Dev/Beryllium.nix3
-rw-r--r--Biz/Dev/Beryllium/Ollama.nix47
2 files changed, 49 insertions, 1 deletions
diff --git a/Biz/Dev/Beryllium.nix b/Biz/Dev/Beryllium.nix
index 607e5c1..cca0997 100644
--- a/Biz/Dev/Beryllium.nix
+++ b/Biz/Dev/Beryllium.nix
@@ -1,11 +1,12 @@
{ bild }:
-bild.os {
+bild.os-unstable {
imports = [
../OsBase.nix
../Packages.nix
../Users.nix
./Beryllium/Configuration.nix
./Beryllium/Hardware.nix
+ ./Beryllium/Ollama.nix
./Vpn.nix
];
networking.hostName = "beryllium";
diff --git a/Biz/Dev/Beryllium/Ollama.nix b/Biz/Dev/Beryllium/Ollama.nix
new file mode 100644
index 0000000..00aa327
--- /dev/null
+++ b/Biz/Dev/Beryllium/Ollama.nix
@@ -0,0 +1,47 @@
+{ pkgs, ... }:
+/* Ollama API service
+
+ Don't put too much work into this, there's a much better and more complete
+ ollama service (with webui!) being built here:
+ https://github.com/NixOS/nixpkgs/pull/275448
+
+ If you want to spend time on it, spend time over there.
+*/
+{
+
+ systemd.services.ollama = {
+ description = "ollama";
+ after = [ "network.target" ];
+ wantedBy = [ "multi-user.target" ];
+
+ environment = {
+ OLLAMA_HOST = "localhost:11434";
+ # Where to store LLM model files.
+ HOME = "%S/ollama";
+ OLLAMA_MODELS = "%S/ollama/models";
+ OLLAMA_DEBUG = "1";
+ };
+
+ serviceConfig = {
+ ExecStart = "${pkgs.ollama}/bin/ollama serve";
+ User = "ollama";
+ Group = "ollama";
+ Type = "simple";
+ Restart = "on-failure";
+ RestartSec = 3;
+ # Persistent storage for model files, i.e. /var/lib/<StateDirectory>
+ StateDirectory = [ "ollama" ];
+ };
+ };
+
+ # for administration, make this available to users' PATH
+ environment.systemPackages = [ pkgs.ollama ];
+
+ users.groups.ollama = { };
+
+ users.users.ollama = {
+ group = "ollama";
+ isSystemUser = true;
+ extraGroups = [ "render" "video" ];
+ };
+}