From cceefa62d147594d43478e398bbaa9c630670935 Mon Sep 17 00:00:00 2001 From: Ben Sima Date: Sat, 11 May 2024 14:28:09 -0400 Subject: Setup ollama API service This superceedes exllama and tabbyAPI which I could never get working fully. Unfortunately I had to switch to NixOS unstable to get all the Go builder stuff to work, so this is a cause of yet another version drift, but I guess it's inevitable and I should just learn to mitigate it with my nixpkgs shenanigans. --- Biz/Dev/Beryllium/Ollama.nix | 47 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 Biz/Dev/Beryllium/Ollama.nix (limited to 'Biz/Dev/Beryllium') diff --git a/Biz/Dev/Beryllium/Ollama.nix b/Biz/Dev/Beryllium/Ollama.nix new file mode 100644 index 0000000..00aa327 --- /dev/null +++ b/Biz/Dev/Beryllium/Ollama.nix @@ -0,0 +1,47 @@ +{ pkgs, ... }: +/* Ollama API service + + Don't put too much work into this, there's a much better and more complete + ollama service (with webui!) being built here: + https://github.com/NixOS/nixpkgs/pull/275448 + + If you want to spend time on it, spend time over there. +*/ +{ + + systemd.services.ollama = { + description = "ollama"; + after = [ "network.target" ]; + wantedBy = [ "multi-user.target" ]; + + environment = { + OLLAMA_HOST = "localhost:11434"; + # Where to store LLM model files. + HOME = "%S/ollama"; + OLLAMA_MODELS = "%S/ollama/models"; + OLLAMA_DEBUG = "1"; + }; + + serviceConfig = { + ExecStart = "${pkgs.ollama}/bin/ollama serve"; + User = "ollama"; + Group = "ollama"; + Type = "simple"; + Restart = "on-failure"; + RestartSec = 3; + # Persistent storage for model files, i.e. /var/lib/ + StateDirectory = [ "ollama" ]; + }; + }; + + # for administration, make this available to users' PATH + environment.systemPackages = [ pkgs.ollama ]; + + users.groups.ollama = { }; + + users.users.ollama = { + group = "ollama"; + isSystemUser = true; + extraGroups = [ "render" "video" ]; + }; +} -- cgit v1.2.3