nixos/ollama: init

This commit is contained in:
Jonas Heinrich 2023-12-29 01:46:01 +01:00
parent 017bc47e74
commit 67a799c40f
3 changed files with 45 additions and 0 deletions

View file

@ -28,6 +28,8 @@ In addition to numerous new and upgraded packages, this release has the followin
- [rspamd-trainer](https://gitlab.com/onlime/rspamd-trainer), script triggered by a helper which reads mails from a specific mail inbox and feeds them into rspamd for spam/ham training.
- [ollama](https://ollama.ai), server for running large language models locally.
- [Anki Sync Server](https://docs.ankiweb.net/sync-server.html), the official sync server built into recent versions of Anki. Available as [services.anki-sync-server](#opt-services.anki-sync-server.enable).
The pre-existing [services.ankisyncd](#opt-services.ankisyncd.enable) has been marked deprecated and will be dropped after 24.05 due to lack of maintenance of the anki-sync-server softwares.

View file

@ -723,6 +723,7 @@
./services/misc/nzbget.nix
./services/misc/nzbhydra2.nix
./services/misc/octoprint.nix
./services/misc/ollama.nix
./services/misc/ombi.nix
./services/misc/osrm.nix
./services/misc/owncast.nix

View file

@ -0,0 +1,42 @@
{ config, lib, pkgs, ... }: let
cfg = config.services.ollama;
in {
options = {
services.ollama = {
enable = lib.mkEnableOption (
lib.mdDoc "Server for local large language models"
);
package = lib.mkPackageOption pkgs "ollama" { };
};
};
config = lib.mkIf cfg.enable {
systemd = {
services.ollama = {
wantedBy = [ "multi-user.target" ];
description = "Server for local large language models";
after = [ "network.target" ];
environment = {
HOME = "%S/ollama";
OLLAMA_MODELS = "%S/ollama/models";
};
serviceConfig = {
ExecStart = "${lib.getExe cfg.package} serve";
WorkingDirectory = "/var/lib/ollama";
StateDirectory = [ "ollama" ];
DynamicUser = true;
};
};
};
environment.systemPackages = [ cfg.package ];
};
meta.maintainers = with lib.maintainers; [ onny ];
}