diff --git a/nixos/doc/manual/release-notes/rl-2405.section.md b/nixos/doc/manual/release-notes/rl-2405.section.md index 8f6dd2fc74cc..019cac01905f 100644 --- a/nixos/doc/manual/release-notes/rl-2405.section.md +++ b/nixos/doc/manual/release-notes/rl-2405.section.md @@ -28,6 +28,8 @@ In addition to numerous new and upgraded packages, this release has the followin - [rspamd-trainer](https://gitlab.com/onlime/rspamd-trainer), script triggered by a helper which reads mails from a specific mail inbox and feeds them into rspamd for spam/ham training. +- [ollama](https://ollama.ai), server for running large language models locally. + - [Anki Sync Server](https://docs.ankiweb.net/sync-server.html), the official sync server built into recent versions of Anki. Available as [services.anki-sync-server](#opt-services.anki-sync-server.enable). The pre-existing [services.ankisyncd](#opt-services.ankisyncd.enable) has been marked deprecated and will be dropped after 24.05 due to lack of maintenance of the anki-sync-server softwares. diff --git a/nixos/modules/module-list.nix b/nixos/modules/module-list.nix index 3bb50d8e6b05..65047bdd110a 100644 --- a/nixos/modules/module-list.nix +++ b/nixos/modules/module-list.nix @@ -723,6 +723,7 @@ ./services/misc/nzbget.nix ./services/misc/nzbhydra2.nix ./services/misc/octoprint.nix + ./services/misc/ollama.nix ./services/misc/ombi.nix ./services/misc/osrm.nix ./services/misc/owncast.nix diff --git a/nixos/modules/services/misc/ollama.nix b/nixos/modules/services/misc/ollama.nix new file mode 100644 index 000000000000..9794bbbec464 --- /dev/null +++ b/nixos/modules/services/misc/ollama.nix @@ -0,0 +1,42 @@ +{ config, lib, pkgs, ... }: let + + cfg = config.services.ollama; + +in { + + options = { + services.ollama = { + enable = lib.mkEnableOption ( + lib.mdDoc "Server for local large language models" + ); + package = lib.mkPackageOption pkgs "ollama" { }; + }; + }; + + config = lib.mkIf cfg.enable { + + systemd = { + services.ollama = { + wantedBy = [ "multi-user.target" ]; + description = "Server for local large language models"; + after = [ "network.target" ]; + environment = { + HOME = "%S/ollama"; + OLLAMA_MODELS = "%S/ollama/models"; + }; + serviceConfig = { + ExecStart = "${lib.getExe cfg.package} serve"; + WorkingDirectory = "/var/lib/ollama"; + StateDirectory = [ "ollama" ]; + DynamicUser = true; + }; + }; + }; + + environment.systemPackages = [ cfg.package ]; + + }; + + meta.maintainers = with lib.maintainers; [ onny ]; + +}