depot/third_party/nixpkgs/nixos/modules/services/misc/ollama.nix
Default email 504525a148 Project import generated by Copybara.
GitOrigin-RevId: bd645e8668ec6612439a9ee7e71f7eac4099d4f6
2024-01-02 12:29:13 +01:00

42 lines
958 B
Nix

{ config, lib, pkgs, ... }: let
cfg = config.services.ollama;
in {
options = {
services.ollama = {
enable = lib.mkEnableOption (
lib.mdDoc "Server for local large language models"
);
package = lib.mkPackageOption pkgs "ollama" { };
};
};
config = lib.mkIf cfg.enable {
systemd = {
services.ollama = {
wantedBy = [ "multi-user.target" ];
description = "Server for local large language models";
after = [ "network.target" ];
environment = {
HOME = "%S/ollama";
OLLAMA_MODELS = "%S/ollama/models";
};
serviceConfig = {
ExecStart = "${lib.getExe cfg.package} serve";
WorkingDirectory = "/var/lib/ollama";
StateDirectory = [ "ollama" ];
DynamicUser = true;
};
};
};
environment.systemPackages = [ cfg.package ];
};
meta.maintainers = with lib.maintainers; [ onny ];
}