From 2532b500c3ed2b8940e831039dcec5a5ea093afc Mon Sep 17 00:00:00 2001 From: Terje Larsen Date: Fri, 10 Jan 2025 12:31:03 +0100 Subject: [PATCH] ollama: add module (#5735) --- modules/modules.nix | 1 + modules/services/ollama.nix | 97 +++++++++++++++++++ tests/default.nix | 1 + tests/modules/services/ollama/basic.nix | 13 +++ tests/modules/services/ollama/default.nix | 4 + .../ollama/set-environment-variables.nix | 22 +++++ 6 files changed, 138 insertions(+) create mode 100644 modules/services/ollama.nix create mode 100644 tests/modules/services/ollama/basic.nix create mode 100644 tests/modules/services/ollama/default.nix create mode 100644 tests/modules/services/ollama/set-environment-variables.nix diff --git a/modules/modules.nix b/modules/modules.nix index 342ed06e..d20300d1 100644 --- a/modules/modules.nix +++ b/modules/modules.nix @@ -346,6 +346,7 @@ let ./services/nextcloud-client.nix ./services/nix-gc.nix ./services/notify-osd.nix + ./services/ollama.nix ./services/opensnitch-ui.nix ./services/osmscout-server.nix ./services/owncloud-client.nix diff --git a/modules/services/ollama.nix b/modules/services/ollama.nix new file mode 100644 index 00000000..66e1e837 --- /dev/null +++ b/modules/services/ollama.nix @@ -0,0 +1,97 @@ +{ config, lib, pkgs, ... }: + +with lib; + +let + + cfg = config.services.ollama; + + ollamaPackage = if cfg.acceleration == null then + cfg.package + else + cfg.package.override { inherit (cfg) acceleration; }; + +in { + meta.maintainers = [ maintainers.terlar ]; + + options = { + services.ollama = { + enable = mkEnableOption "ollama server for local large language models"; + + package = mkPackageOption pkgs "ollama" { }; + + host = mkOption { + type = types.str; + default = "127.0.0.1"; + example = "[::]"; + description = '' + The host address which the ollama server HTTP interface listens to. + ''; + }; + + port = mkOption { + type = types.port; + default = 11434; + example = 11111; + description = '' + Which port the ollama server listens to. + ''; + }; + + acceleration = mkOption { + type = types.nullOr (types.enum [ false "rocm" "cuda" ]); + default = null; + example = "rocm"; + description = '' + What interface to use for hardware acceleration. + + - `null`: default behavior + - if `nixpkgs.config.rocmSupport` is enabled, uses `"rocm"` + - if `nixpkgs.config.cudaSupport` is enabled, uses `"cuda"` + - otherwise defaults to `false` + - `false`: disable GPU, only use CPU + - `"rocm"`: supported by most modern AMD GPUs + - may require overriding gpu type with `services.ollama.rocmOverrideGfx` + if rocm doesn't detect your AMD gpu + - `"cuda"`: supported by most modern NVIDIA GPUs + ''; + }; + + environmentVariables = mkOption { + type = types.attrsOf types.str; + default = { }; + example = { + OLLAMA_LLM_LIBRARY = "cpu"; + HIP_VISIBLE_DEVICES = "0,1"; + }; + description = '' + Set arbitrary environment variables for the ollama service. + + Be aware that these are only seen by the ollama server (systemd service), + not normal invocations like `ollama run`. + Since `ollama run` is mostly a shell around the ollama server, this is usually sufficient. + ''; + }; + }; + }; + + config = mkIf cfg.enable { + systemd.user.services.ollama = { + Unit = { + Description = "Server for local large language models"; + After = [ "network.target" ]; + }; + + Service = { + ExecStart = "${getExe ollamaPackage} serve"; + Environment = + (mapAttrsToList (n: v: "${n}=${v}") cfg.environmentVariables) + ++ [ "OLLAMA_HOST=${cfg.host}:${toString cfg.port}" ]; + }; + + Install = { WantedBy = [ "default.target" ]; }; + }; + + home.packages = [ ollamaPackage ]; + }; +} diff --git a/tests/default.nix b/tests/default.nix index 63884575..1f34bbcc 100644 --- a/tests/default.nix +++ b/tests/default.nix @@ -270,6 +270,7 @@ in import nmtSrc { ./modules/services/mpd-mpris ./modules/services/mpdris2 ./modules/services/nix-gc + ./modules/services/ollama ./modules/services/osmscout-server ./modules/services/pantalaimon ./modules/services/parcellite diff --git a/tests/modules/services/ollama/basic.nix b/tests/modules/services/ollama/basic.nix new file mode 100644 index 00000000..fecee950 --- /dev/null +++ b/tests/modules/services/ollama/basic.nix @@ -0,0 +1,13 @@ +{ + config = { + services.ollama.enable = true; + + test.stubs.ollama = { }; + + nmt.script = '' + serviceFile="home-files/.config/systemd/user/ollama.service" + assertFileRegex "$serviceFile" 'After=network\.target' + assertFileRegex "$serviceFile" 'Environment=OLLAMA_HOST=127.0.0.1:11434' + ''; + }; +} diff --git a/tests/modules/services/ollama/default.nix b/tests/modules/services/ollama/default.nix new file mode 100644 index 00000000..84ad2c99 --- /dev/null +++ b/tests/modules/services/ollama/default.nix @@ -0,0 +1,4 @@ +{ + ollama-basic = ./basic.nix; + ollama-set-environment-variables = ./set-environment-variables.nix; +} diff --git a/tests/modules/services/ollama/set-environment-variables.nix b/tests/modules/services/ollama/set-environment-variables.nix new file mode 100644 index 00000000..b9ad280c --- /dev/null +++ b/tests/modules/services/ollama/set-environment-variables.nix @@ -0,0 +1,22 @@ +{ + config = { + services.ollama = { + enable = true; + host = "localhost"; + port = 11111; + environmentVariables = { + OLLAMA_LLM_LIBRARY = "cpu"; + HIP_VISIBLE_DEVICES = "0,1"; + }; + }; + + test.stubs.ollama = { }; + + nmt.script = '' + serviceFile="home-files/.config/systemd/user/ollama.service" + assertFileRegex "$serviceFile" 'Environment=OLLAMA_HOST=localhost:11111' + assertFileRegex "$serviceFile" 'Environment=OLLAMA_LLM_LIBRARY=cpu' + assertFileRegex "$serviceFile" 'Environment=HIP_VISIBLE_DEVICES=0,1' + ''; + }; +}