{ pkgs, ... }: /* Ollama API service Don't put too much work into this, there's a much better and more complete ollama service (with webui!) being built here: https://github.com/NixOS/nixpkgs/pull/275448 If you want to spend time on it, spend time over there. */ { systemd.services.ollama = { description = "ollama"; after = [ "network.target" ]; wantedBy = [ "multi-user.target" ]; environment = { OLLAMA_HOST = "localhost:11434"; # Where to store LLM model files. HOME = "%S/ollama"; OLLAMA_MODELS = "%S/ollama/models"; OLLAMA_DEBUG = "1"; }; serviceConfig = { ExecStart = "${pkgs.ollama}/bin/ollama serve"; User = "ollama"; Group = "ollama"; Type = "simple"; Restart = "on-failure"; RestartSec = 3; # Persistent storage for model files, i.e. /var/lib/ StateDirectory = [ "ollama" ]; }; }; # for administration, make this available to users' PATH environment.systemPackages = [ pkgs.ollama ]; users.groups.ollama = { }; users.users.ollama = { group = "ollama"; isSystemUser = true; extraGroups = [ "render" "video" ]; }; }