2024-07-27 06:49:29 +00:00
|
|
|
{
|
|
|
|
config,
|
|
|
|
lib,
|
|
|
|
pkgs,
|
|
|
|
...
|
|
|
|
}:
|
2024-02-29 20:09:43 +00:00
|
|
|
let
|
2024-09-19 14:19:46 +00:00
|
|
|
inherit (lib) literalExpression types;
|
2024-01-02 11:29:13 +00:00
|
|
|
|
|
|
|
cfg = config.services.ollama;
|
2024-07-27 06:49:29 +00:00
|
|
|
ollamaPackage = cfg.package.override { inherit (cfg) acceleration; };
|
|
|
|
|
|
|
|
staticUser = cfg.user != null && cfg.group != null;
|
2024-02-29 20:09:43 +00:00
|
|
|
in
|
|
|
|
{
|
2024-06-05 15:53:02 +00:00
|
|
|
imports = [
|
2024-07-27 06:49:29 +00:00
|
|
|
(lib.mkRemovedOptionModule [
|
|
|
|
"services"
|
|
|
|
"ollama"
|
|
|
|
"listenAddress"
|
|
|
|
] "Use `services.ollama.host` and `services.ollama.port` instead.")
|
|
|
|
(lib.mkRemovedOptionModule [
|
|
|
|
"services"
|
|
|
|
"ollama"
|
|
|
|
"sandbox"
|
|
|
|
] "Set `services.ollama.user` and `services.ollama.group` instead.")
|
|
|
|
(lib.mkRemovedOptionModule
|
|
|
|
[
|
|
|
|
"services"
|
|
|
|
"ollama"
|
|
|
|
"writablePaths"
|
|
|
|
]
|
|
|
|
"The `models` directory is now always writable. To make other directories writable, use `systemd.services.ollama.serviceConfig.ReadWritePaths`."
|
|
|
|
)
|
2024-06-05 15:53:02 +00:00
|
|
|
];
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
options = {
|
|
|
|
services.ollama = {
|
2024-04-21 15:54:59 +00:00
|
|
|
enable = lib.mkEnableOption "ollama server for local large language models";
|
|
|
|
package = lib.mkPackageOption pkgs "ollama" { };
|
2024-07-27 06:49:29 +00:00
|
|
|
|
|
|
|
user = lib.mkOption {
|
|
|
|
type = with types; nullOr str;
|
|
|
|
default = null;
|
|
|
|
example = "ollama";
|
|
|
|
description = ''
|
|
|
|
User account under which to run ollama. Defaults to [`DynamicUser`](https://www.freedesktop.org/software/systemd/man/latest/systemd.exec.html#DynamicUser=)
|
|
|
|
when set to `null`.
|
|
|
|
|
|
|
|
The user will automatically be created, if this option is set to a non-null value.
|
|
|
|
'';
|
|
|
|
};
|
|
|
|
group = lib.mkOption {
|
|
|
|
type = with types; nullOr str;
|
|
|
|
default = cfg.user;
|
|
|
|
defaultText = literalExpression "config.services.ollama.user";
|
|
|
|
example = "ollama";
|
|
|
|
description = ''
|
|
|
|
Group under which to run ollama. Only used when `services.ollama.user` is set.
|
|
|
|
|
|
|
|
The group will automatically be created, if this option is set to a non-null value.
|
|
|
|
'';
|
|
|
|
};
|
|
|
|
|
2024-04-21 15:54:59 +00:00
|
|
|
home = lib.mkOption {
|
|
|
|
type = types.str;
|
2024-07-27 06:49:29 +00:00
|
|
|
default = "/var/lib/ollama";
|
2024-04-21 15:54:59 +00:00
|
|
|
example = "/home/foo";
|
|
|
|
description = ''
|
|
|
|
The home directory that the ollama service is started in.
|
|
|
|
'';
|
|
|
|
};
|
|
|
|
models = lib.mkOption {
|
|
|
|
type = types.str;
|
2024-07-27 06:49:29 +00:00
|
|
|
default = "${cfg.home}/models";
|
|
|
|
defaultText = "\${config.services.ollama.home}/models";
|
2024-04-21 15:54:59 +00:00
|
|
|
example = "/path/to/ollama/models";
|
|
|
|
description = ''
|
|
|
|
The directory that the ollama service will read models from and download new models to.
|
|
|
|
'';
|
|
|
|
};
|
|
|
|
|
2024-06-05 15:53:02 +00:00
|
|
|
host = lib.mkOption {
|
2024-02-29 20:09:43 +00:00
|
|
|
type = types.str;
|
2024-06-05 15:53:02 +00:00
|
|
|
default = "127.0.0.1";
|
2024-07-27 06:49:29 +00:00
|
|
|
example = "[::]";
|
2024-04-21 15:54:59 +00:00
|
|
|
description = ''
|
2024-06-05 15:53:02 +00:00
|
|
|
The host address which the ollama server HTTP interface listens to.
|
|
|
|
'';
|
|
|
|
};
|
|
|
|
port = lib.mkOption {
|
|
|
|
type = types.port;
|
|
|
|
default = 11434;
|
|
|
|
example = 11111;
|
|
|
|
description = ''
|
|
|
|
Which port the ollama server listens to.
|
2024-01-25 14:12:00 +00:00
|
|
|
'';
|
|
|
|
};
|
2024-09-19 14:19:46 +00:00
|
|
|
|
2024-02-29 20:09:43 +00:00
|
|
|
acceleration = lib.mkOption {
|
2024-07-27 06:49:29 +00:00
|
|
|
type = types.nullOr (
|
|
|
|
types.enum [
|
|
|
|
false
|
|
|
|
"rocm"
|
|
|
|
"cuda"
|
|
|
|
]
|
|
|
|
);
|
2024-02-29 20:09:43 +00:00
|
|
|
default = null;
|
|
|
|
example = "rocm";
|
2024-04-21 15:54:59 +00:00
|
|
|
description = ''
|
|
|
|
What interface to use for hardware acceleration.
|
2024-02-29 20:09:43 +00:00
|
|
|
|
2024-04-21 15:54:59 +00:00
|
|
|
- `null`: default behavior
|
2024-06-05 15:53:02 +00:00
|
|
|
- if `nixpkgs.config.rocmSupport` is enabled, uses `"rocm"`
|
|
|
|
- if `nixpkgs.config.cudaSupport` is enabled, uses `"cuda"`
|
|
|
|
- otherwise defaults to `false`
|
2024-04-21 15:54:59 +00:00
|
|
|
- `false`: disable GPU, only use CPU
|
|
|
|
- `"rocm"`: supported by most modern AMD GPUs
|
2024-06-24 18:47:55 +00:00
|
|
|
- may require overriding gpu type with `services.ollama.rocmOverrideGfx`
|
|
|
|
if rocm doesn't detect your AMD gpu
|
2024-04-21 15:54:59 +00:00
|
|
|
- `"cuda"`: supported by most modern NVIDIA GPUs
|
|
|
|
'';
|
|
|
|
};
|
2024-06-24 18:47:55 +00:00
|
|
|
rocmOverrideGfx = lib.mkOption {
|
|
|
|
type = types.nullOr types.str;
|
|
|
|
default = null;
|
|
|
|
example = "10.3.0";
|
|
|
|
description = ''
|
|
|
|
Override what rocm will detect your gpu model as.
|
2024-09-19 14:19:46 +00:00
|
|
|
For example, if you have an RX 5700 XT, try setting this to `"10.1.0"` (gfx 1010).
|
2024-06-24 18:47:55 +00:00
|
|
|
|
|
|
|
This sets the value of `HSA_OVERRIDE_GFX_VERSION`. See [ollama's docs](
|
|
|
|
https://github.com/ollama/ollama/blob/main/docs/gpu.md#amd-radeon
|
|
|
|
) for details.
|
|
|
|
'';
|
|
|
|
};
|
2024-09-19 14:19:46 +00:00
|
|
|
|
2024-04-21 15:54:59 +00:00
|
|
|
environmentVariables = lib.mkOption {
|
|
|
|
type = types.attrsOf types.str;
|
|
|
|
default = { };
|
|
|
|
example = {
|
|
|
|
OLLAMA_LLM_LIBRARY = "cpu";
|
|
|
|
HIP_VISIBLE_DEVICES = "0,1";
|
|
|
|
};
|
|
|
|
description = ''
|
|
|
|
Set arbitrary environment variables for the ollama service.
|
|
|
|
|
|
|
|
Be aware that these are only seen by the ollama server (systemd service),
|
|
|
|
not normal invocations like `ollama run`.
|
|
|
|
Since `ollama run` is mostly a shell around the ollama server, this is usually sufficient.
|
2024-02-29 20:09:43 +00:00
|
|
|
'';
|
|
|
|
};
|
2024-07-01 15:47:52 +00:00
|
|
|
loadModels = lib.mkOption {
|
|
|
|
type = types.listOf types.str;
|
|
|
|
default = [ ];
|
|
|
|
description = ''
|
2024-09-19 14:19:46 +00:00
|
|
|
Download these models using `ollama pull` as soon as `ollama.service` has started.
|
|
|
|
|
|
|
|
This creates a systemd unit `ollama-model-loader.service`.
|
|
|
|
|
2024-07-01 15:47:52 +00:00
|
|
|
Search for models of your choice from: https://ollama.com/library
|
|
|
|
'';
|
|
|
|
};
|
2024-06-05 15:53:02 +00:00
|
|
|
openFirewall = lib.mkOption {
|
|
|
|
type = types.bool;
|
|
|
|
default = false;
|
|
|
|
description = ''
|
|
|
|
Whether to open the firewall for ollama.
|
2024-09-19 14:19:46 +00:00
|
|
|
|
2024-06-05 15:53:02 +00:00
|
|
|
This adds `services.ollama.port` to `networking.firewall.allowedTCPPorts`.
|
|
|
|
'';
|
|
|
|
};
|
2024-01-02 11:29:13 +00:00
|
|
|
};
|
|
|
|
};
|
|
|
|
|
|
|
|
config = lib.mkIf cfg.enable {
|
2024-07-27 06:49:29 +00:00
|
|
|
users = lib.mkIf staticUser {
|
|
|
|
users.${cfg.user} = {
|
|
|
|
inherit (cfg) home;
|
|
|
|
isSystemUser = true;
|
|
|
|
group = cfg.group;
|
|
|
|
};
|
|
|
|
groups.${cfg.group} = { };
|
|
|
|
};
|
|
|
|
|
2024-04-21 15:54:59 +00:00
|
|
|
systemd.services.ollama = {
|
|
|
|
description = "Server for local large language models";
|
|
|
|
wantedBy = [ "multi-user.target" ];
|
|
|
|
after = [ "network.target" ];
|
2024-07-27 06:49:29 +00:00
|
|
|
environment =
|
|
|
|
cfg.environmentVariables
|
|
|
|
// {
|
|
|
|
HOME = cfg.home;
|
|
|
|
OLLAMA_MODELS = cfg.models;
|
|
|
|
OLLAMA_HOST = "${cfg.host}:${toString cfg.port}";
|
|
|
|
}
|
|
|
|
// lib.optionalAttrs (cfg.rocmOverrideGfx != null) {
|
|
|
|
HSA_OVERRIDE_GFX_VERSION = cfg.rocmOverrideGfx;
|
|
|
|
};
|
|
|
|
serviceConfig =
|
|
|
|
lib.optionalAttrs staticUser {
|
|
|
|
User = cfg.user;
|
|
|
|
Group = cfg.group;
|
|
|
|
}
|
|
|
|
// {
|
2024-09-19 14:19:46 +00:00
|
|
|
Type = "exec";
|
2024-07-27 06:49:29 +00:00
|
|
|
DynamicUser = true;
|
|
|
|
ExecStart = "${lib.getExe ollamaPackage} serve";
|
|
|
|
WorkingDirectory = cfg.home;
|
|
|
|
StateDirectory = [ "ollama" ];
|
|
|
|
ReadWritePaths = [
|
|
|
|
cfg.home
|
|
|
|
cfg.models
|
|
|
|
];
|
|
|
|
|
|
|
|
CapabilityBoundingSet = [ "" ];
|
|
|
|
DeviceAllow = [
|
|
|
|
# CUDA
|
|
|
|
# https://docs.nvidia.com/dgx/pdf/dgx-os-5-user-guide.pdf
|
|
|
|
"char-nvidiactl"
|
|
|
|
"char-nvidia-caps"
|
2024-07-31 10:19:44 +00:00
|
|
|
"char-nvidia-frontend"
|
2024-07-27 06:49:29 +00:00
|
|
|
"char-nvidia-uvm"
|
|
|
|
# ROCm
|
|
|
|
"char-drm"
|
|
|
|
"char-kfd"
|
|
|
|
];
|
|
|
|
DevicePolicy = "closed";
|
|
|
|
LockPersonality = true;
|
|
|
|
MemoryDenyWriteExecute = true;
|
|
|
|
NoNewPrivileges = true;
|
|
|
|
PrivateDevices = false; # hides acceleration devices
|
|
|
|
PrivateTmp = true;
|
|
|
|
PrivateUsers = true;
|
|
|
|
ProcSubset = "all"; # /proc/meminfo
|
|
|
|
ProtectClock = true;
|
|
|
|
ProtectControlGroups = true;
|
|
|
|
ProtectHome = true;
|
|
|
|
ProtectHostname = true;
|
|
|
|
ProtectKernelLogs = true;
|
|
|
|
ProtectKernelModules = true;
|
|
|
|
ProtectKernelTunables = true;
|
|
|
|
ProtectProc = "invisible";
|
|
|
|
ProtectSystem = "strict";
|
|
|
|
RemoveIPC = true;
|
|
|
|
RestrictNamespaces = true;
|
|
|
|
RestrictRealtime = true;
|
|
|
|
RestrictSUIDSGID = true;
|
|
|
|
RestrictAddressFamilies = [
|
|
|
|
"AF_INET"
|
|
|
|
"AF_INET6"
|
|
|
|
"AF_UNIX"
|
|
|
|
];
|
|
|
|
SupplementaryGroups = [ "render" ]; # for rocm to access /dev/dri/renderD* devices
|
|
|
|
SystemCallArchitectures = "native";
|
|
|
|
SystemCallFilter = [
|
|
|
|
"@system-service @resources"
|
|
|
|
"~@privileged"
|
|
|
|
];
|
|
|
|
UMask = "0077";
|
|
|
|
};
|
2024-09-19 14:19:46 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
systemd.services.ollama-model-loader = lib.mkIf (cfg.loadModels != [ ]) {
|
|
|
|
description = "Download ollama models in the background";
|
|
|
|
wantedBy = [
|
|
|
|
"multi-user.target"
|
|
|
|
"ollama.service"
|
|
|
|
];
|
|
|
|
after = [ "ollama.service" ];
|
|
|
|
bindsTo = [ "ollama.service" ];
|
|
|
|
environment = config.systemd.services.ollama.environment;
|
|
|
|
serviceConfig = {
|
|
|
|
Type = "exec";
|
|
|
|
DynamicUser = true;
|
|
|
|
Restart = "on-failure";
|
|
|
|
# bounded exponential backoff
|
|
|
|
RestartSec = "1s";
|
|
|
|
RestartMaxDelaySec = "2h";
|
|
|
|
RestartSteps = "10";
|
|
|
|
};
|
|
|
|
|
|
|
|
script = ''
|
|
|
|
total=${toString (builtins.length cfg.loadModels)}
|
|
|
|
failed=0
|
|
|
|
|
|
|
|
for model in ${lib.escapeShellArgs cfg.loadModels}; do
|
|
|
|
'${lib.getExe ollamaPackage}' pull "$model" &
|
2024-07-01 15:47:52 +00:00
|
|
|
done
|
2024-09-19 14:19:46 +00:00
|
|
|
|
|
|
|
for job in $(jobs -p); do
|
|
|
|
set +e
|
|
|
|
wait $job
|
|
|
|
exit_code=$?
|
|
|
|
set -e
|
|
|
|
|
|
|
|
if [ $exit_code != 0 ]; then
|
|
|
|
failed=$((failed + 1))
|
|
|
|
fi
|
|
|
|
done
|
|
|
|
|
|
|
|
if [ $failed != 0 ]; then
|
|
|
|
echo "error: $failed out of $total attempted model downloads failed" >&2
|
|
|
|
exit 1
|
|
|
|
fi
|
2024-07-01 15:47:52 +00:00
|
|
|
'';
|
2024-01-02 11:29:13 +00:00
|
|
|
};
|
|
|
|
|
2024-06-05 15:53:02 +00:00
|
|
|
networking.firewall = lib.mkIf cfg.openFirewall { allowedTCPPorts = [ cfg.port ]; };
|
|
|
|
|
2024-02-29 20:09:43 +00:00
|
|
|
environment.systemPackages = [ ollamaPackage ];
|
2024-01-02 11:29:13 +00:00
|
|
|
};
|
|
|
|
|
2024-07-27 06:49:29 +00:00
|
|
|
meta.maintainers = with lib.maintainers; [
|
|
|
|
abysssol
|
|
|
|
onny
|
|
|
|
];
|
2024-01-02 11:29:13 +00:00
|
|
|
}
|