diff options
Diffstat (limited to 'Omni/Dev/Beryllium/Ollama.nix')
-rw-r--r-- | Omni/Dev/Beryllium/Ollama.nix | 48 |
1 files changed, 48 insertions, 0 deletions
diff --git a/Omni/Dev/Beryllium/Ollama.nix b/Omni/Dev/Beryllium/Ollama.nix new file mode 100644 index 0000000..35b4fe1 --- /dev/null +++ b/Omni/Dev/Beryllium/Ollama.nix @@ -0,0 +1,48 @@ +{ pkgs, ... }: +/* Ollama API service + + Don't put too much work into this, there's a much better and more complete + ollama service (with webui!) being built here: + https://github.com/NixOS/nixpkgs/pull/275448 + + If you want to spend time on it, spend time over there. +*/ +let pkg = pkgs.unstable.ollama; +in { + + systemd.services.ollama = { + description = "ollama"; + after = [ "network.target" ]; + wantedBy = [ "multi-user.target" ]; + + environment = { + OLLAMA_HOST = "localhost:11434"; + # Where to store LLM model files. + HOME = "%S/ollama"; + OLLAMA_MODELS = "%S/ollama/models"; + OLLAMA_DEBUG = "1"; + }; + + serviceConfig = { + ExecStart = "${pkg}/bin/ollama serve"; + User = "ollama"; + Group = "ollama"; + Type = "simple"; + Restart = "on-failure"; + RestartSec = 3; + # Persistent storage for model files, i.e. /var/lib/<StateDirectory> + StateDirectory = [ "ollama" ]; + }; + }; + + # for administration, make this available to users' PATH + environment.systemPackages = [ pkg ]; + + users.groups.ollama = { }; + + users.users.ollama = { + group = "ollama"; + isSystemUser = true; + extraGroups = [ "render" "video" ]; + }; +} |