summaryrefslogtreecommitdiff
path: root/Biz/Dev/Beryllium/Ollama.nix
diff options
context:
space:
mode:
Diffstat (limited to 'Biz/Dev/Beryllium/Ollama.nix')
-rw-r--r--Biz/Dev/Beryllium/Ollama.nix47
1 files changed, 47 insertions, 0 deletions
diff --git a/Biz/Dev/Beryllium/Ollama.nix b/Biz/Dev/Beryllium/Ollama.nix
new file mode 100644
index 0000000..00aa327
--- /dev/null
+++ b/Biz/Dev/Beryllium/Ollama.nix
@@ -0,0 +1,47 @@
+{ pkgs, ... }:
+/* Ollama API service
+
+ Don't put too much work into this, there's a much better and more complete
+ ollama service (with webui!) being built here:
+ https://github.com/NixOS/nixpkgs/pull/275448
+
+ If you want to spend time on it, spend time over there.
+*/
+{
+
+ systemd.services.ollama = {
+ description = "ollama";
+ after = [ "network.target" ];
+ wantedBy = [ "multi-user.target" ];
+
+ environment = {
+ OLLAMA_HOST = "localhost:11434";
+ # Where to store LLM model files.
+ HOME = "%S/ollama";
+ OLLAMA_MODELS = "%S/ollama/models";
+ OLLAMA_DEBUG = "1";
+ };
+
+ serviceConfig = {
+ ExecStart = "${pkgs.ollama}/bin/ollama serve";
+ User = "ollama";
+ Group = "ollama";
+ Type = "simple";
+ Restart = "on-failure";
+ RestartSec = 3;
+ # Persistent storage for model files, i.e. /var/lib/<StateDirectory>
+ StateDirectory = [ "ollama" ];
+ };
+ };
+
+ # for administration, make this available to users' PATH
+ environment.systemPackages = [ pkgs.ollama ];
+
+ users.groups.ollama = { };
+
+ users.users.ollama = {
+ group = "ollama";
+ isSystemUser = true;
+ extraGroups = [ "render" "video" ];
+ };
+}