Merge commit 'fece082f6c165d89daf650c3b80a074079b8af50' into HEAD

This commit is contained in:
Luke Granger-Brown 2024-12-13 20:54:23 +00:00
commit e2c645ca5d
22611 changed files with 744944 additions and 486731 deletions

View file

@ -215,3 +215,9 @@ adb9714bd909df283c66bbd641bd631ff50a4260
# treewide: incus packages
9ab59bb5fb943ad6740f64f5a79eae9642fb8211
# treewide nixfmt reformat pass 1, master, staging and staging-next
4f0dadbf38ee4cf4cc38cbc232b7708fddf965bc
667d42c00d566e091e6b9a19b365099315d0e611
84d4f874c2bac9f3118cb6907d7113b3318dcb5e

View file

@ -28,7 +28,7 @@ jobs:
ref: ${{ github.event.pull_request.head.sha }}
token: ${{ steps.app-token.outputs.token }}
- name: Create backport PRs
uses: korthout/backport-action@bd410d37cdcae80be6d969823ff5a225fe5c833f # v3.0.2
uses: korthout/backport-action@be567af183754f6a5d831ae90f648954763f17f5 # v3.1.0
with:
# Config README: https://github.com/korthout/backport-action#backport-action
copy_labels_pattern: 'severity:\ssecurity'

View file

@ -0,0 +1,30 @@
name: "Building Nixpkgs lib-tests"
permissions:
contents: read
on:
pull_request_target:
paths:
- 'lib/**'
jobs:
get-merge-commit:
uses: ./.github/workflows/get-merge-commit.yml
nixpkgs-lib-tests:
name: nixpkgs-lib-tests
runs-on: ubuntu-latest
needs: get-merge-commit
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
if: needs.get-merge-commit.outputs.mergedSha
with:
# pull_request_target checks out the base branch by default
ref: ${{ needs.get-merge-commit.outputs.mergedSha }}
- uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
with:
# explicitly enable sandbox
extra_nix_config: sandbox = true
- name: Building Nixpkgs lib-tests
run: |
nix-build --arg pkgs "(import ./ci/. {}).pkgs" ./lib/tests/release.nix

View file

@ -222,6 +222,7 @@ jobs:
if: needs.process.outputs.baseRunId
permissions:
pull-requests: write
statuses: write
steps:
- name: Download process result
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
@ -261,3 +262,23 @@ jobs:
GH_TOKEN: ${{ github.token }}
REPOSITORY: ${{ github.repository }}
NUMBER: ${{ github.event.number }}
- name: Add eval summary to commit statuses
if: ${{ github.event_name == 'pull_request_target' }}
run: |
description=$(jq -r '
"Package: added " + (.attrdiff.added | length | tostring) +
", removed " + (.attrdiff.removed | length | tostring) +
", changed " + (.attrdiff.changed | length | tostring) +
", Rebuild: linux " + (.rebuildCountByKernel.linux | tostring) +
", darwin " + (.rebuildCountByKernel.darwin | tostring)
' <comparison/changed-paths.json)
target_url="$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID?pr=$NUMBER"
gh api --method POST \
-H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28" \
"/repos/$GITHUB_REPOSITORY/statuses/$PR_HEAD_SHA" \
-f "context=Eval / Summary" -f "state=success" -f "description=$description" -f "target_url=$target_url"
env:
GH_TOKEN: ${{ github.token }}
PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
NUMBER: ${{ github.event.number }}

22
third_party/nixpkgs/.mergify.yml vendored Normal file
View file

@ -0,0 +1,22 @@
queue_rules:
# This rule is for https://docs.mergify.com/commands/queue/
# and can be triggered with: @mergifyio queue
- name: default
merge_conditions:
# all github action checks in this list are required to merge a pull request
- check-success=Attributes
- check-success=Check
- check-success=Outpaths (aarch64-darwin)
- check-success=Outpaths (aarch64-linux)
- check-success=Outpaths (x86_64-darwin)
- check-success=Outpaths (x86_64-linux)
- check-success=Process
- check-success=Request
- check-success=Tag
- check-success=editorconfig-check
- check-success=label-pr
- check-success=nix-files-parseable-check
- check-success=nixfmt-check
- check-success=nixpkgs-vet
# queue up to 5 pull requests at a time
batch_size: 5

View file

@ -157,6 +157,7 @@ nixos/modules/installer/tools/nix-fallback-paths.nix @NixOS/nix-team @raitobeza
# Python-related code and docs
/doc/languages-frameworks/python.section.md @mweinelt @natsukium
/maintainers/scripts/update-python-libraries @mweinelt @natsukium
/pkgs/by-name/up/update-python-libraries @mweinelt @natsukium
/pkgs/development/interpreters/python @mweinelt @natsukium
/pkgs/top-level/python-packages.nix @natsukium
/pkgs/top-level/release-python.nix @natsukium
@ -206,8 +207,8 @@ nixos/modules/installer/tools/nix-fallback-paths.nix @NixOS/nix-team @raitobeza
# Browsers
/pkgs/applications/networking/browsers/firefox @mweinelt
/pkgs/applications/networking/browsers/chromium @emilylange
/nixos/tests/chromium.nix @emilylange
/pkgs/applications/networking/browsers/chromium @emilylange @networkException
/nixos/tests/chromium.nix @emilylange @networkException
# Certificate Authorities
pkgs/data/misc/cacert/ @ajs124 @lukegb @mweinelt
@ -222,7 +223,7 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
/pkgs/top-level/java-packages.nix @NixOS/java
# Jetbrains
/pkgs/applications/editors/jetbrains @edwtjo
/pkgs/applications/editors/jetbrains @edwtjo @leona-ya
# Licenses
/lib/licenses.nix @alyssais

View file

@ -1,164 +0,0 @@
# Turns
#
# {
# "hello.aarch64-linux": "a",
# "hello.x86_64-linux": "b",
# "hello.aarch64-darwin": "c",
# "hello.x86_64-darwin": "d"
# }
#
# into
#
# {
# "hello": {
# "linux": {
# "aarch64": "a",
# "x86_64": "b"
# },
# "darwin": {
# "aarch64": "c",
# "x86_64": "d"
# }
# }
# }
#
# while filtering out any attribute paths that don't match this pattern
def expand_system:
to_entries
| map(
.key |= split(".")
| select(.key | length > 1)
| .double = (.key[-1] | split("-"))
| select(.double | length == 2)
)
| group_by(.key[0:-1])
| map(
{
key: .[0].key[0:-1] | join("."),
value:
group_by(.double[1])
| map(
{
key: .[0].double[1],
value: map(.key = .double[0]) | from_entries
}
)
| from_entries
})
| from_entries
;
# Transposes
#
# {
# "a": [ "x", "y" ],
# "b": [ "x" ],
# }
#
# into
#
# {
# "x": [ "a", "b" ],
# "y": [ "a" ]
# }
def transpose:
[
to_entries[]
| {
key: .key,
value: .value[]
}
]
| group_by(.value)
| map({
key: .[0].value,
value: map(.key)
})
| from_entries
;
# Computes the key difference for two objects:
# {
# added: [ <keys only in the second object> ],
# removed: [ <keys only in the first object> ],
# changed: [ <keys with different values between the two objects> ],
# }
#
def diff($before; $after):
{
added: $after | delpaths($before | keys | map([.])) | keys,
removed: $before | delpaths($after | keys | map([.])) | keys,
changed:
$before
| to_entries
| map(
$after."\(.key)" as $after2
| select(
# Filter out attributes that don't exist anymore
($after2 != null)
and
# Filter out attributes that are the same as the new value
(.value != $after2)
)
| .key
)
}
;
($before[0] | expand_system) as $before
| ($after[0] | expand_system) as $after
| .attrdiff = diff($before; $after)
| .rebuildsByKernel = (
[
(
.attrdiff.changed[]
| {
key: .,
value: diff($before."\(.)"; $after."\(.)").changed
}
)
,
(
.attrdiff.added[]
| {
key: .,
value: ($after."\(.)" | keys)
}
)
]
| from_entries
| transpose
)
| .rebuildCountByKernel = (
.rebuildsByKernel
| with_entries(.value |= length)
| pick(.linux, .darwin)
| {
linux: (.linux // 0),
darwin: (.darwin // 0),
}
)
| .labels = (
.rebuildCountByKernel
| to_entries
| map(
"10.rebuild-\(.key): " +
if .value == 0 then
"0"
elif .value <= 10 then
"1-10"
elif .value <= 100 then
"11-100"
elif .value <= 500 then
"101-500"
elif .value <= 1000 then
"501-1000"
elif .value <= 2500 then
"1001-2500"
elif .value <= 5000 then
"2501-5000"
else
"5001+"
end
)
)

View file

@ -0,0 +1,114 @@
{
lib,
jq,
runCommand,
writeText,
...
}:
{ beforeResultDir, afterResultDir }:
let
/*
Derivation that computes which packages are affected (added, changed or removed) between two revisions of nixpkgs.
Note: "platforms" are "x86_64-linux", "aarch64-darwin", ...
---
Inputs:
- beforeResultDir, afterResultDir: The evaluation result from before and after the change.
They can be obtained by running `nix-build -A ci.eval.full` on both revisions.
---
Outputs:
- changed-paths.json: Various information about the changes:
{
attrdiff: {
added: ["package1"],
changed: ["package2", "package3"],
removed: ["package4"],
},
labels: [
"10.rebuild-darwin: 1-10",
"10.rebuild-linux: 1-10"
],
rebuildsByKernel: {
darwin: ["package1", "package2"],
linux: ["package1", "package2", "package3"]
},
rebuildCountByKernel: {
darwin: 2,
linux: 3,
},
rebuildsByPlatform: {
aarch64-darwin: ["package1", "package2"],
aarch64-linux: ["package1", "package2"],
x86_64-linux: ["package1", "package2", "package3"],
x86_64-darwin: ["package1"],
},
}
- step-summary.md: A markdown render of the changes
---
Implementation details:
Helper functions can be found in ./utils.nix.
Two main "types" are important:
- `packagePlatformPath`: A string of the form "<PACKAGE_PATH>.<PLATFORM>"
Example: "python312Packages.numpy.x86_64-linux"
- `packagePlatformAttr`: An attrs representation of a packagePlatformPath:
Example: { name = "python312Packages.numpy"; platform = "x86_64-linux"; }
*/
inherit (import ./utils.nix { inherit lib; })
diff
groupByKernel
convertToPackagePlatformAttrs
groupByPlatform
extractPackageNames
getLabels
uniqueStrings
;
getAttrs = dir: builtins.fromJSON (builtins.readFile "${dir}/outpaths.json");
beforeAttrs = getAttrs beforeResultDir;
afterAttrs = getAttrs afterResultDir;
# Attrs
# - keys: "added", "changed" and "removed"
# - values: lists of `packagePlatformPath`s
diffAttrs = diff beforeAttrs afterAttrs;
changed-paths =
let
rebuilds = uniqueStrings (diffAttrs.added ++ diffAttrs.changed);
rebuildsPackagePlatformAttrs = convertToPackagePlatformAttrs rebuilds;
rebuildsByPlatform = groupByPlatform rebuildsPackagePlatformAttrs;
rebuildsByKernel = groupByKernel rebuildsPackagePlatformAttrs;
rebuildCountByKernel = lib.mapAttrs (
kernel: kernelRebuilds: lib.length kernelRebuilds
) rebuildsByKernel;
in
writeText "changed-paths.json" (
builtins.toJSON {
attrdiff = lib.mapAttrs (_: extractPackageNames) diffAttrs;
inherit
rebuildsByPlatform
rebuildsByKernel
rebuildCountByKernel
;
labels = getLabels rebuildCountByKernel;
}
);
in
runCommand "compare"
{
nativeBuildInputs = [ jq ];
}
''
mkdir $out
cp ${changed-paths} $out/changed-paths.json
jq -r -f ${./generate-step-summary.jq} < ${changed-paths} > $out/step-summary.md
# TODO: Compare eval stats
''

View file

@ -0,0 +1,213 @@
{ lib, ... }:
rec {
# Borrowed from https://github.com/NixOS/nixpkgs/pull/355616
uniqueStrings = list: builtins.attrNames (builtins.groupBy lib.id list);
/*
Converts a `packagePlatformPath` into a `packagePlatformAttr`
Turns
"hello.aarch64-linux"
into
{
name = "hello";
platform = "aarch64-linux";
}
*/
convertToPackagePlatformAttr =
packagePlatformPath:
let
# python312Packages.numpy.aarch64-linux -> ["python312Packages" "numpy" "aarch64-linux"]
splittedPath = lib.splitString "." packagePlatformPath;
# ["python312Packages" "numpy" "aarch64-linux"] -> ["python312Packages" "numpy"]
packagePath = lib.sublist 0 (lib.length splittedPath - 1) splittedPath;
# "python312Packages.numpy"
name = lib.concatStringsSep "." packagePath;
in
if name == "" then
null
else
{
# python312Packages.numpy
inherit name;
# "aarch64-linux"
platform = lib.last splittedPath;
};
/*
Converts a list of `packagePlatformPath`s into a list of `packagePlatformAttr`s
Turns
[
"hello.aarch64-linux"
"hello.x86_64-linux"
"hello.aarch64-darwin"
"hello.x86_64-darwin"
"bye.x86_64-darwin"
"bye.aarch64-darwin"
"release-checks" <- Will be dropped
]
into
[
{ name = "hello"; platform = "aarch64-linux"; }
{ name = "hello"; platform = "x86_64-linux"; }
{ name = "hello"; platform = "aarch64-darwin"; }
{ name = "hello"; platform = "x86_64-darwin"; }
{ name = "bye"; platform = "aarch64-darwin"; }
{ name = "bye"; platform = "x86_64-darwin"; }
]
*/
convertToPackagePlatformAttrs =
packagePlatformPaths:
builtins.filter (x: x != null) (builtins.map convertToPackagePlatformAttr packagePlatformPaths);
/*
Converts a list of `packagePlatformPath`s directly to a list of (unique) package names
Turns
[
"hello.aarch64-linux"
"hello.x86_64-linux"
"hello.aarch64-darwin"
"hello.x86_64-darwin"
"bye.x86_64-darwin"
"bye.aarch64-darwin"
]
into
[
"hello"
"bye"
]
*/
extractPackageNames =
packagePlatformPaths:
let
packagePlatformAttrs = convertToPackagePlatformAttrs (uniqueStrings packagePlatformPaths);
in
uniqueStrings (builtins.map (p: p.name) packagePlatformAttrs);
/*
Computes the key difference between two attrs
{
added: [ <keys only in the second object> ],
removed: [ <keys only in the first object> ],
changed: [ <keys with different values between the two objects> ],
}
*/
diff =
let
filterKeys = cond: attrs: lib.attrNames (lib.filterAttrs cond attrs);
in
old: new: {
added = filterKeys (n: _: !(old ? ${n})) new;
removed = filterKeys (n: _: !(new ? ${n})) old;
changed = filterKeys (
n: v:
# Filter out attributes that don't exist anymore
(new ? ${n})
# Filter out attributes that are the same as the new value
&& (v != (new.${n}))
) old;
};
/*
Group a list of `packagePlatformAttr`s by platforms
Turns
[
{ name = "hello"; platform = "aarch64-linux"; }
{ name = "hello"; platform = "x86_64-linux"; }
{ name = "hello"; platform = "aarch64-darwin"; }
{ name = "hello"; platform = "x86_64-darwin"; }
{ name = "bye"; platform = "aarch64-darwin"; }
{ name = "bye"; platform = "x86_64-darwin"; }
]
into
{
aarch64-linux = [ "hello" ];
x86_64-linux = [ "hello" ];
aarch64-darwin = [ "hello" "bye" ];
x86_64-darwin = [ "hello" "bye" ];
}
*/
groupByPlatform =
packagePlatformAttrs:
let
packagePlatformAttrsByPlatform = builtins.groupBy (p: p.platform) packagePlatformAttrs;
extractPackageNames = map (p: p.name);
in
lib.mapAttrs (_: extractPackageNames) packagePlatformAttrsByPlatform;
# Turns
# [
# { name = "hello"; platform = "aarch64-linux"; }
# { name = "hello"; platform = "x86_64-linux"; }
# { name = "hello"; platform = "aarch64-darwin"; }
# { name = "hello"; platform = "x86_64-darwin"; }
# { name = "bye"; platform = "aarch64-darwin"; }
# { name = "bye"; platform = "x86_64-darwin"; }
# ]
#
# into
#
# {
# linux = [ "hello" ];
# darwin = [ "hello" "bye" ];
# }
groupByKernel =
packagePlatformAttrs:
let
filterKernel =
kernel:
builtins.attrNames (
builtins.groupBy (p: p.name) (
builtins.filter (p: lib.hasSuffix kernel p.platform) packagePlatformAttrs
)
);
in
lib.genAttrs [ "linux" "darwin" ] filterKernel;
/*
Maps an attrs of `kernel - rebuild counts` mappings to a list of labels
Turns
{
linux = 56;
darwin = 8;
}
into
[
"10.rebuild-darwin: 1-10"
"10.rebuild-linux: 11-100"
]
*/
getLabels = lib.mapAttrsToList (
kernel: rebuildCount:
let
number =
if rebuildCount == 0 then
"0"
else if rebuildCount <= 10 then
"1-10"
else if rebuildCount <= 100 then
"11-100"
else if rebuildCount <= 500 then
"101-500"
else if rebuildCount <= 1000 then
"501-1000"
else if rebuildCount <= 2500 then
"1001-2500"
else if rebuildCount <= 5000 then
"2501-5000"
else
"5001+";
in
"10.rebuild-${kernel}: ${number}"
);
}

View file

@ -2,6 +2,7 @@
lib,
runCommand,
writeShellScript,
writeText,
linkFarm,
time,
procps,
@ -246,24 +247,15 @@ let
jq -s from_entries > $out/stats.json
'';
compare =
{ beforeResultDir, afterResultDir }:
runCommand "compare"
{
nativeBuildInputs = [
jq
];
}
''
mkdir $out
jq -n -f ${./compare.jq} \
--slurpfile before ${beforeResultDir}/outpaths.json \
--slurpfile after ${afterResultDir}/outpaths.json \
> $out/changed-paths.json
jq -r -f ${./generate-step-summary.jq} < $out/changed-paths.json > $out/step-summary.md
# TODO: Compare eval stats
'';
compare = import ./compare {
inherit
lib
jq
runCommand
writeText
supportedSystems
;
};
full =
{

View file

@ -1,4 +1,4 @@
{
"rev": "31d66ae40417bb13765b0ad75dd200400e98de84",
"sha256": "0fwsqd05bnk635niqnx9vqkdbinjq0ffdrbk66xllfyrnx4fvmpc"
"rev": "929116e316068c7318c54eb4d827f7d9756d5e9c",
"sha256": "1am61kcakn9j47435k4cgsarvypb8klv4avszxza0jn362hp3ck8"
}

View file

@ -1,6 +1,8 @@
let requiredVersion = import ./lib/minver.nix; in
let
requiredVersion = import ./lib/minver.nix;
in
if ! builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
if !builtins ? nixVersion || builtins.compareVersions requiredVersion builtins.nixVersion == 1 then
abort ''

View file

@ -95,6 +95,7 @@ Inlining HTML is not allowed. Parts of the documentation gets rendered to variou
#### Roles
If you want to link to a man page, you can use `` {manpage}`nix.conf(5)` ``. The references will turn into links when a mapping exists in [`doc/manpage-urls.json`](./manpage-urls.json).
Please keep the `manpage-urls.json` file alphabetically sorted.
A few markups for other kinds of literals are also available:

View file

@ -755,6 +755,9 @@ Used with Subversion. Expects `url` to a Subversion directory, `rev`, and `hash`
Used with Git. Expects `url` to a Git repo, `rev`, and `hash`. `rev` in this case can be full the git commit id (SHA1 hash) or a tag name like `refs/tags/v1.0`.
If you want to fetch a tag you should pass the `tag` parameter instead of `rev` which has the same effect as setting `rev = "refs/tags"/${version}"`.
This is safer than just setting `rev = version` w.r.t. possible branch and tag name conflicts.
Additionally, the following optional arguments can be given:
*`fetchSubmodules`* (Boolean)
@ -833,7 +836,7 @@ A number of fetcher functions wrap part of `fetchurl` and `fetchzip`. They are m
## `fetchFromGitHub` {#fetchfromgithub}
`fetchFromGitHub` expects four arguments. `owner` is a string corresponding to the GitHub user or organization that controls this repository. `repo` corresponds to the name of the software repository. These are located at the top of every GitHub HTML page as `owner`/`repo`. `rev` corresponds to the Git commit hash or tag (e.g `v1.0`) that will be downloaded from Git. Finally, `hash` corresponds to the hash of the extracted directory. Again, other hash algorithms are also available, but `hash` is currently preferred.
`fetchFromGitHub` expects four arguments. `owner` is a string corresponding to the GitHub user or organization that controls this repository. `repo` corresponds to the name of the software repository. These are located at the top of every GitHub HTML page as `owner`/`repo`. `rev` corresponds to the Git commit hash or tag (e.g `v1.0`) that will be downloaded from Git. If you need to fetch a tag however, you should prefer to use the `tag` parameter which achieves this in a safer way with less boilerplate. Finally, `hash` corresponds to the hash of the extracted directory. Again, other hash algorithms are also available, but `hash` is currently preferred.
To use a different GitHub instance, use `githubBase` (defaults to `"github.com"`).

View file

@ -881,7 +881,7 @@ dockerTools.pullImage {
imageDigest = "sha256:b8ea88f763f33dfda2317b55eeda3b1a4006692ee29e60ee54ccf6d07348c598";
finalImageName = "nix";
finalImageTag = "2.19.3";
sha256 = "zRwlQs1FiKrvHPaf8vWOR/Tlp1C5eLn1d9pE4BZg3oA=";
hash = "sha256-zRwlQs1FiKrvHPaf8vWOR/Tlp1C5eLn1d9pE4BZg3oA=";
}
```
:::
@ -898,7 +898,7 @@ dockerTools.pullImage {
imageDigest = "sha256:24a23053f29266fb2731ebea27f915bb0fb2ae1ea87d42d890fe4e44f2e27c5d";
finalImageName = "etcd";
finalImageTag = "v3.5.11";
sha256 = "Myw+85f2/EVRyMB3axECdmQ5eh9p1q77FWYKy8YpRWU=";
hash = "sha256-Myw+85f2/EVRyMB3axECdmQ5eh9p1q77FWYKy8YpRWU=";
}
```
:::
@ -922,7 +922,7 @@ Writing manifest to image destination
{
imageName = "nixos/nix";
imageDigest = "sha256:498fa2d7f2b5cb3891a4edf20f3a8f8496e70865099ba72540494cd3e2942634";
sha256 = "1q6cf2pdrasa34zz0jw7pbs6lvv52rq2aibgxccbwcagwkg2qj1q";
hash = "sha256-OEgs3uRPMb4Y629FJXAWZW9q9LqHS/A/GUqr3K5wzOA=";
finalImageName = "nixos/nix";
finalImageTag = "latest";
}

View file

@ -1,74 +1,82 @@
{ nixpkgsPath, revision, libsetsJSON }:
{
nixpkgsPath,
revision,
libsetsJSON,
}:
let
lib = import (nixpkgsPath + "/lib");
libsets = builtins.fromJSON libsetsJSON;
libDefPos = prefix: set:
builtins.concatMap
(name: [{
name = builtins.concatStringsSep "." (prefix ++ [name]);
location = builtins.unsafeGetAttrPos name set;
}] ++ lib.optionals
(builtins.length prefix == 0 && builtins.isAttrs set.${name})
(libDefPos (prefix ++ [name]) set.${name})
) (builtins.attrNames set);
libDefPos =
prefix: set:
builtins.concatMap (
name:
[
{
name = builtins.concatStringsSep "." (prefix ++ [ name ]);
location = builtins.unsafeGetAttrPos name set;
}
]
++ lib.optionals (builtins.length prefix == 0 && builtins.isAttrs set.${name}) (
libDefPos (prefix ++ [ name ]) set.${name}
)
) (builtins.attrNames set);
libset = toplib:
builtins.map
(subsetname: {
subsetname = subsetname;
functions = libDefPos [] toplib.${subsetname};
})
(builtins.map (x: x.name) libsets);
libset =
toplib:
builtins.map (subsetname: {
subsetname = subsetname;
functions = libDefPos [ ] toplib.${subsetname};
}) (builtins.map (x: x.name) libsets);
flattenedLibSubset = { subsetname, functions }:
builtins.map
(fn: {
flattenedLibSubset =
{ subsetname, functions }:
builtins.map (fn: {
name = "lib.${subsetname}.${fn.name}";
value = fn.location;
})
functions;
}) functions;
locatedlibsets = libs: builtins.map flattenedLibSubset (libset libs);
removeFilenamePrefix = prefix: filename:
removeFilenamePrefix =
prefix: filename:
let
prefixLen = (builtins.stringLength prefix) + 1; # +1 to remove the leading /
prefixLen = (builtins.stringLength prefix) + 1; # +1 to remove the leading /
filenameLen = builtins.stringLength filename;
substr = builtins.substring prefixLen filenameLen filename;
in substr;
in
substr;
removeNixpkgs = removeFilenamePrefix (builtins.toString nixpkgsPath);
liblocations =
builtins.filter
(elem: elem.value != null)
(lib.lists.flatten
(locatedlibsets lib));
liblocations = builtins.filter (elem: elem.value != null) (lib.lists.flatten (locatedlibsets lib));
fnLocationRelative = { name, value }:
fnLocationRelative =
{ name, value }:
{
inherit name;
value = value // { file = removeNixpkgs value.file; };
value = value // {
file = removeNixpkgs value.file;
};
};
relativeLocs = (builtins.map fnLocationRelative liblocations);
sanitizeId = builtins.replaceStrings
[ "'" ]
[ "-prime" ];
sanitizeId = builtins.replaceStrings [ "'" ] [ "-prime" ];
urlPrefix = "https://github.com/NixOS/nixpkgs/blob/${revision}";
jsonLocs = builtins.listToAttrs
(builtins.map
({ name, value }: {
jsonLocs = builtins.listToAttrs (
builtins.map (
{ name, value }:
{
name = sanitizeId name;
value =
let
text = "${value.file}:${builtins.toString value.line}";
target = "${urlPrefix}/${value.file}#L${builtins.toString value.line}";
in
"[${text}](${target}) in `<nixpkgs>`";
})
relativeLocs);
"[${text}](${target}) in `<nixpkgs>`";
}
) relativeLocs
);
in
jsonLocs

View file

@ -27,42 +27,48 @@ mkShell {
name = "dotnet-env";
packages = [
(with dotnetCorePackages; combinePackages [
sdk_6_0
sdk_7_0
sdk_8_0
sdk_9_0
])
];
}
```
This will produce a dotnet installation that has the dotnet 6.0 7.0 sdk. The first sdk listed will have it's cli utility present in the resulting environment. Example info output:
This will produce a dotnet installation that has the dotnet 8.0 9.0 sdk. The first sdk listed will have it's cli utility present in the resulting environment. Example info output:
```ShellSession
$ dotnet --info
.NET SDK:
Version: 7.0.202
Commit: 6c74320bc3
Version: 9.0.100
Commit: 59db016f11
Workload version: 9.0.100-manifests.3068a692
MSBuild version: 17.12.7+5b8665660
Runtime Environment:
OS Name: nixos
OS Version: 23.05
OS Version: 25.05
OS Platform: Linux
RID: linux-x64
Base Path: /nix/store/n2pm44xq20hz7ybsasgmd7p3yh31gnh4-dotnet-sdk-7.0.202/sdk/7.0.202/
Base Path: /nix/store/a03c70i7x6rjdr6vikczsp5ck3v6rixh-dotnet-sdk-9.0.100/share/dotnet/sdk/9.0.100/
.NET workloads installed:
There are no installed workloads to display.
Configured to use loose manifests when installing new manifests.
Host:
Version: 7.0.4
Version: 9.0.0
Architecture: x64
Commit: 0a396acafe
Commit: 9d5a6a9aa4
.NET SDKs installed:
6.0.407 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/sdk]
7.0.202 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/sdk]
8.0.404 [/nix/store/6wlrjiy10wg766490dcmp6x64zb1vc8j-dotnet-core-combined/share/dotnet/sdk]
9.0.100 [/nix/store/6wlrjiy10wg766490dcmp6x64zb1vc8j-dotnet-core-combined/share/dotnet/sdk]
.NET runtimes installed:
Microsoft.AspNetCore.App 6.0.15 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/shared/Microsoft.AspNetCore.App]
Microsoft.AspNetCore.App 7.0.4 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/shared/Microsoft.AspNetCore.App]
Microsoft.NETCore.App 6.0.15 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/shared/Microsoft.NETCore.App]
Microsoft.NETCore.App 7.0.4 [/nix/store/3b19303vwrhv0xxz1hg355c7f2hgxxgd-dotnet-core-combined/shared/Microsoft.NETCore.App]
Microsoft.AspNetCore.App 8.0.11 [/nix/store/6wlrjiy10wg766490dcmp6x64zb1vc8j-dotnet-core-combined/share/dotnet/shared/Microsoft.AspNetCore.App]
Microsoft.AspNetCore.App 9.0.0 [/nix/store/6wlrjiy10wg766490dcmp6x64zb1vc8j-dotnet-core-combined/share/dotnet/shared/Microsoft.AspNetCore.App]
Microsoft.NETCore.App 8.0.11 [/nix/store/6wlrjiy10wg766490dcmp6x64zb1vc8j-dotnet-core-combined/share/dotnet/shared/Microsoft.NETCore.App]
Microsoft.NETCore.App 9.0.0 [/nix/store/6wlrjiy10wg766490dcmp6x64zb1vc8j-dotnet-core-combined/share/dotnet/shared/Microsoft.NETCore.App]
Other architectures found:
None
@ -146,8 +152,8 @@ in buildDotnetModule rec {
buildInputs = [ referencedProject ]; # `referencedProject` must contain `nupkg` in the folder structure.
dotnet-sdk = dotnetCorePackages.sdk_6_0;
dotnet-runtime = dotnetCorePackages.runtime_6_0;
dotnet-sdk = dotnetCorePackages.sdk_8_0;
dotnet-runtime = dotnetCorePackages.runtime_8_0;
executables = [ "foo" ]; # This wraps "$out/lib/$pname/foo" to `$out/bin/foo`.
executables = []; # Don't install any executables.

View file

@ -551,9 +551,9 @@ are used in [`buildPythonPackage`](#buildpythonpackage-function).
Several versions of the Python interpreter are available on Nix, as well as a
high amount of packages. The attribute `python3` refers to the default
interpreter, which is currently CPython 3.11. The attribute `python` refers to
interpreter, which is currently CPython 3.12. The attribute `python` refers to
CPython 2.7 for backwards-compatibility. It is also possible to refer to
specific versions, e.g. `python311` refers to CPython 3.11, and `pypy` refers to
specific versions, e.g. `python312` refers to CPython 3.12, and `pypy` refers to
the default PyPy interpreter.
Python is used a lot, and in different ways. This affects also how it is
@ -569,10 +569,10 @@ however, are in separate sets, with one set per interpreter version.
The interpreters have several common attributes. One of these attributes is
`pkgs`, which is a package set of Python libraries for this specific
interpreter. E.g., the `toolz` package corresponding to the default interpreter
is `python3.pkgs.toolz`, and the CPython 3.11 version is `python311.pkgs.toolz`.
is `python3.pkgs.toolz`, and the CPython 3.12 version is `python312.pkgs.toolz`.
The main package set contains aliases to these package sets, e.g.
`pythonPackages` refers to `python.pkgs` and `python311Packages` to
`python311.pkgs`.
`pythonPackages` refers to `python.pkgs` and `python312Packages` to
`python312.pkgs`.
#### Installing Python and packages {#installing-python-and-packages}
@ -597,7 +597,7 @@ with [`python.buildEnv`](#python.buildenv-function) or [`python.withPackages`](#
executables are wrapped to be able to find each other and all of the modules.
In the following examples we will start by creating a simple, ad-hoc environment
with a nix-shell that has `numpy` and `toolz` in Python 3.11; then we will create
with a nix-shell that has `numpy` and `toolz` in Python 3.12; then we will create
a re-usable environment in a single-file Python script; then we will create a
full Python environment for development with this same environment.
@ -613,10 +613,10 @@ temporary shell session with a Python and a *precise* list of packages (plus
their runtime dependencies), with no other Python packages in the Python
interpreter's scope.
To create a Python 3.11 session with `numpy` and `toolz` available, run:
To create a Python 3.12 session with `numpy` and `toolz` available, run:
```sh
$ nix-shell -p 'python311.withPackages(ps: with ps; [ numpy toolz ])'
$ nix-shell -p 'python312.withPackages(ps: with ps; [ numpy toolz ])'
```
By default `nix-shell` will start a `bash` session with this interpreter in our
@ -624,7 +624,7 @@ By default `nix-shell` will start a `bash` session with this interpreter in our
```Python console
[nix-shell:~/src/nixpkgs]$ python3
Python 3.11.3 (main, Apr 4 2023, 22:36:41) [GCC 12.2.0] on linux
Python 3.12.4 (main, Jun 6 2024, 18:26:44) [GCC 13.3.0] on linux
Type "help", "copyright", "credits" or "license" for more information.
>>> import numpy; import toolz
```
@ -644,12 +644,8 @@ will still get 1 wrapped Python interpreter. We can start the interpreter
directly like so:
```sh
$ nix-shell -p "python311.withPackages (ps: with ps; [ numpy toolz requests ])" --run python3
this derivation will be built:
/nix/store/r19yf5qgfiakqlhkgjahbg3zg79549n4-python3-3.11.2-env.drv
building '/nix/store/r19yf5qgfiakqlhkgjahbg3zg79549n4-python3-3.11.2-env.drv'...
created 273 symlinks in user environment
Python 3.11.2 (main, Feb 7 2023, 13:52:42) [GCC 12.2.0] on linux
$ nix-shell -p "python312.withPackages (ps: with ps; [ numpy toolz requests ])" --run python3
Python 3.12.4 (main, Jun 6 2024, 18:26:44) [GCC 13.3.0] on linux
Type "help", "copyright", "credits" or "license" for more information.
>>> import requests
>>>
@ -689,7 +685,7 @@ Executing this script requires a `python3` that has `numpy`. Using what we learn
in the previous section, we could startup a shell and just run it like so:
```ShellSession
$ nix-shell -p 'python311.withPackages (ps: with ps; [ numpy ])' --run 'python3 foo.py'
$ nix-shell -p 'python312.withPackages (ps: with ps; [ numpy ])' --run 'python3 foo.py'
The dot product of [1 2] and [3 4] is: 11
```
@ -752,12 +748,12 @@ create a single script with Python dependencies, but in the course of normal
development we're usually working in an entire package repository.
As explained [in the `nix-shell` section](https://nixos.org/manual/nix/stable/command-ref/nix-shell) of the Nix manual, `nix-shell` can also load an expression from a `.nix` file.
Say we want to have Python 3.11, `numpy` and `toolz`, like before,
Say we want to have Python 3.12, `numpy` and `toolz`, like before,
in an environment. We can add a `shell.nix` file describing our dependencies:
```nix
with import <nixpkgs> {};
(python311.withPackages (ps: with ps; [
(python312.withPackages (ps: with ps; [
numpy
toolz
])).env
@ -774,7 +770,7 @@ What's happening here?
imports the `<nixpkgs>` function, `{}` calls it and the `with` statement
brings all attributes of `nixpkgs` in the local scope. These attributes form
the main package set.
2. Then we create a Python 3.11 environment with the [`withPackages`](#python.withpackages-function) function, as before.
2. Then we create a Python 3.12 environment with the [`withPackages`](#python.withpackages-function) function, as before.
3. The [`withPackages`](#python.withpackages-function) function expects us to provide a function as an argument
that takes the set of all Python packages and returns a list of packages to
include in the environment. Here, we select the packages `numpy` and `toolz`
@ -785,7 +781,7 @@ To combine this with `mkShell` you can:
```nix
with import <nixpkgs> {};
let
pythonEnv = python311.withPackages (ps: [
pythonEnv = python312.withPackages (ps: [
ps.numpy
ps.toolz
]);
@ -939,8 +935,8 @@ information. The output of the function is a derivation.
An expression for `toolz` can be found in the Nixpkgs repository. As explained
in the introduction of this Python section, a derivation of `toolz` is available
for each interpreter version, e.g. `python311.pkgs.toolz` refers to the `toolz`
derivation corresponding to the CPython 3.11 interpreter.
for each interpreter version, e.g. `python312.pkgs.toolz` refers to the `toolz`
derivation corresponding to the CPython 3.12 interpreter.
The above example works when you're directly working on
`pkgs/top-level/python-packages.nix` in the Nixpkgs repository. Often though,
@ -953,7 +949,7 @@ and adds it along with a `numpy` package to a Python environment.
with import <nixpkgs> {};
( let
my_toolz = python311.pkgs.buildPythonPackage rec {
my_toolz = python312.pkgs.buildPythonPackage rec {
pname = "toolz";
version = "0.10.0";
pyproject = true;
@ -964,7 +960,7 @@ with import <nixpkgs> {};
};
build-system = [
python311.pkgs.setuptools
python312.pkgs.setuptools
];
# has no tests
@ -977,7 +973,7 @@ with import <nixpkgs> {};
};
};
in python311.withPackages (ps: with ps; [
in python312.withPackages (ps: with ps; [
numpy
my_toolz
])
@ -985,7 +981,7 @@ with import <nixpkgs> {};
```
Executing `nix-shell` will result in an environment in which you can use
Python 3.11 and the `toolz` package. As you can see we had to explicitly mention
Python 3.12 and the `toolz` package. As you can see we had to explicitly mention
for which Python version we want to build a package.
So, what did we do here? Well, we took the Nix expression that we used earlier
@ -1991,7 +1987,7 @@ has security implications and is relevant for those using Python in a
When the environment variable `DETERMINISTIC_BUILD` is set, all bytecode will
have timestamp 1. The [`buildPythonPackage`](#buildpythonpackage-function) function sets `DETERMINISTIC_BUILD=1`
and [PYTHONHASHSEED=0](https://docs.python.org/3.11/using/cmdline.html#envvar-PYTHONHASHSEED).
and [PYTHONHASHSEED=0](https://docs.python.org/3.12/using/cmdline.html#envvar-PYTHONHASHSEED).
Both are also exported in `nix-shell`.
### How to provide automatic tests to Python packages? {#automatic-tests}
@ -2062,10 +2058,12 @@ The following rules are desired to be respected:
* `meta.platforms` takes the default value in many cases.
It does not need to be set explicitly unless the package requires a specific platform.
* The file is formatted with `nixfmt-rfc-style`.
* Commit names of Python libraries should reflect that they are Python
libraries, so write for example `python311Packages.numpy: 1.11 -> 1.12`.
It is highly recommended to specify the current default version to enable
automatic build by ofborg.
* Commit names of Python libraries must reflect that they are Python
libraries (e.g. `python312Packages.numpy: 1.11 -> 1.12` rather than `numpy: 1.11 -> 1.12`).
* The current default version of python should be included
in commit messages to enable automatic builds by ofborg.
For example `python312Packages.numpy: 1.11 -> 1.12` should be used rather
than `python3Packages.numpy: 1.11 -> 1.12`.
Note that `pythonPackages` is an alias for `python27Packages`.
* Attribute names in `python-packages.nix` as well as `pname`s should match the
library's name on PyPI, but be normalized according to [PEP

View file

@ -1,66 +1,30 @@
{
"gnunet.conf(5)": "https://docs.gnunet.org/latest/users/configuration.html",
"mpd(1)": "https://mpd.readthedocs.io/en/latest/mpd.1.html",
"mpd.conf(5)": "https://mpd.readthedocs.io/en/latest/mpd.conf.5.html",
"nix.conf(5)": "https://nixos.org/manual/nix/stable/command-ref/conf-file.html",
"portals.conf(5)": "https://github.com/flatpak/xdg-desktop-portal/blob/1.18.1/doc/portals.conf.rst.in",
"bootctl(1)": "https://www.freedesktop.org/software/systemd/man/bootctl.html",
"busctl(1)": "https://www.freedesktop.org/software/systemd/man/busctl.html",
"coredumpctl(1)": "https://www.freedesktop.org/software/systemd/man/coredumpctl.html",
"homectl(1)": "https://www.freedesktop.org/software/systemd/man/homectl.html",
"hostnamectl(1)": "https://www.freedesktop.org/software/systemd/man/hostnamectl.html",
"init(1)": "https://www.freedesktop.org/software/systemd/man/init.html",
"journalctl(1)": "https://www.freedesktop.org/software/systemd/man/journalctl.html",
"localectl(1)": "https://www.freedesktop.org/software/systemd/man/localectl.html",
"loginctl(1)": "https://www.freedesktop.org/software/systemd/man/loginctl.html",
"machinectl(1)": "https://www.freedesktop.org/software/systemd/man/machinectl.html",
"mount.ddi(1)": "https://www.freedesktop.org/software/systemd/man/mount.ddi.html",
"networkctl(1)": "https://www.freedesktop.org/software/systemd/man/networkctl.html",
"oomctl(1)": "https://www.freedesktop.org/software/systemd/man/oomctl.html",
"portablectl(1)": "https://www.freedesktop.org/software/systemd/man/portablectl.html",
"resolvconf(1)": "https://www.freedesktop.org/software/systemd/man/resolvconf.html",
"resolvectl(1)": "https://www.freedesktop.org/software/systemd/man/resolvectl.html",
"systemctl(1)": "https://www.freedesktop.org/software/systemd/man/systemctl.html",
"systemd-ac-power(1)": "https://www.freedesktop.org/software/systemd/man/systemd-ac-power.html",
"systemd-analyze(1)": "https://www.freedesktop.org/software/systemd/man/systemd-analyze.html",
"systemd-ask-password(1)": "https://www.freedesktop.org/software/systemd/man/systemd-ask-password.html",
"systemd-cat(1)": "https://www.freedesktop.org/software/systemd/man/systemd-cat.html",
"systemd-cgls(1)": "https://www.freedesktop.org/software/systemd/man/systemd-cgls.html",
"systemd-cgtop(1)": "https://www.freedesktop.org/software/systemd/man/systemd-cgtop.html",
"systemd-creds(1)": "https://www.freedesktop.org/software/systemd/man/systemd-creds.html",
"systemd-cryptenroll(1)": "https://www.freedesktop.org/software/systemd/man/systemd-cryptenroll.html",
"systemd-delta(1)": "https://www.freedesktop.org/software/systemd/man/systemd-delta.html",
"systemd-detect-virt(1)": "https://www.freedesktop.org/software/systemd/man/systemd-detect-virt.html",
"systemd-dissect(1)": "https://www.freedesktop.org/software/systemd/man/systemd-dissect.html",
"systemd-escape(1)": "https://www.freedesktop.org/software/systemd/man/systemd-escape.html",
"systemd-id128(1)": "https://www.freedesktop.org/software/systemd/man/systemd-id128.html",
"systemd-inhibit(1)": "https://www.freedesktop.org/software/systemd/man/systemd-inhibit.html",
"systemd-machine-id-setup(1)": "https://www.freedesktop.org/software/systemd/man/systemd-machine-id-setup.html",
"systemd-measure(1)": "https://www.freedesktop.org/software/systemd/man/systemd-measure.html",
"systemd-mount(1)": "https://www.freedesktop.org/software/systemd/man/systemd-mount.html",
"systemd-notify(1)": "https://www.freedesktop.org/software/systemd/man/systemd-notify.html",
"systemd-nspawn(1)": "https://www.freedesktop.org/software/systemd/man/systemd-nspawn.html",
"systemd-path(1)": "https://www.freedesktop.org/software/systemd/man/systemd-path.html",
"systemd-run(1)": "https://www.freedesktop.org/software/systemd/man/systemd-run.html",
"systemd-socket-activate(1)": "https://www.freedesktop.org/software/systemd/man/systemd-socket-activate.html",
"systemd-stdio-bridge(1)": "https://www.freedesktop.org/software/systemd/man/systemd-stdio-bridge.html",
"systemd-tty-ask-password-agent(1)": "https://www.freedesktop.org/software/systemd/man/systemd-tty-ask-password-agent.html",
"systemd-umount(1)": "https://www.freedesktop.org/software/systemd/man/systemd-umount.html",
"systemd(1)": "https://www.freedesktop.org/software/systemd/man/systemd.html",
"timedatectl(1)": "https://www.freedesktop.org/software/systemd/man/timedatectl.html",
"userdbctl(1)": "https://www.freedesktop.org/software/systemd/man/userdbctl.html",
"30-systemd-environment-d-generator(8)": "https://www.freedesktop.org/software/systemd/man/30-systemd-environment-d-generator.html",
"binfmt.d(5)": "https://www.freedesktop.org/software/systemd/man/binfmt.d.html",
"bootctl(1)": "https://www.freedesktop.org/software/systemd/man/bootctl.html",
"bootup(7)": "https://www.freedesktop.org/software/systemd/man/bootup.html",
"busctl(1)": "https://www.freedesktop.org/software/systemd/man/busctl.html",
"cat(1)": "https://www.gnu.org/software/coreutils/manual/html_node/cat-invocation.html",
"coredump.conf(5)": "https://www.freedesktop.org/software/systemd/man/coredump.conf.html",
"coredump.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/coredump.conf.d.html",
"coredumpctl(1)": "https://www.freedesktop.org/software/systemd/man/coredumpctl.html",
"crypttab(5)": "https://www.freedesktop.org/software/systemd/man/crypttab.html",
"curl(1)": "https://curl.se/docs/manpage.html",
"daemon(7)": "https://www.freedesktop.org/software/systemd/man/daemon.html",
"dnssec-trust-anchors.d(5)": "https://www.freedesktop.org/software/systemd/man/dnssec-trust-anchors.d.html",
"environment.d(5)": "https://www.freedesktop.org/software/systemd/man/environment.d.html",
"extension-release(5)": "https://www.freedesktop.org/software/systemd/man/extension-release.html",
"file-hierarchy(7)": "https://www.freedesktop.org/software/systemd/man/file-hierarchy.html",
"gnunet.conf(5)": "https://docs.gnunet.org/latest/users/configuration.html",
"group(5)": "https://man.archlinux.org/man/group.5",
"halt(8)": "https://www.freedesktop.org/software/systemd/man/halt.html",
"homectl(1)": "https://www.freedesktop.org/software/systemd/man/homectl.html",
"homed.conf(5)": "https://www.freedesktop.org/software/systemd/man/homed.conf.html",
"homed.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/homed.conf.d.html",
"hostname(5)": "https://www.freedesktop.org/software/systemd/man/hostname.html",
"hostnamectl(1)": "https://www.freedesktop.org/software/systemd/man/hostnamectl.html",
"hwdb(7)": "https://www.freedesktop.org/software/systemd/man/hwdb.html",
"init(1)": "https://www.freedesktop.org/software/systemd/man/init.html",
"initrd-release(5)": "https://www.freedesktop.org/software/systemd/man/initrd-release.html",
"integritytab(5)": "https://www.freedesktop.org/software/systemd/man/integritytab.html",
"iocost.conf(5)": "https://www.freedesktop.org/software/systemd/man/iocost.conf.html",
@ -68,19 +32,46 @@
"journal-remote.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/journal-remote.conf.d.html",
"journal-upload.conf(5)": "https://www.freedesktop.org/software/systemd/man/journal-upload.conf.html",
"journal-upload.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/journal-upload.conf.d.html",
"journalctl(1)": "https://www.freedesktop.org/software/systemd/man/journalctl.html",
"journald.conf(5)": "https://www.freedesktop.org/software/systemd/man/journald.conf.html",
"journald.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/journald.conf.d.html",
"journald@.conf(5)": "https://www.freedesktop.org/software/systemd/man/journald@.conf.html",
"kernel-command-line(7)": "https://www.freedesktop.org/software/systemd/man/kernel-command-line.html",
"kernel-install(8)": "https://www.freedesktop.org/software/systemd/man/kernel-install.html",
"libnss_myhostname.so.2(8)": "https://www.freedesktop.org/software/systemd/man/libnss_myhostname.so.2.html",
"libnss_mymachines.so.2(8)": "https://www.freedesktop.org/software/systemd/man/libnss_mymachines.so.2.html",
"libnss_resolve.so.2(8)": "https://www.freedesktop.org/software/systemd/man/libnss_resolve.so.2.html",
"libnss_systemd.so.2(8)": "https://www.freedesktop.org/software/systemd/man/libnss_systemd.so.2.html",
"linuxaa64.efi.stub(7)": "https://www.freedesktop.org/software/systemd/man/linuxaa64.efi.stub.html",
"linuxia32.efi.stub(7)": "https://www.freedesktop.org/software/systemd/man/linuxia32.efi.stub.html",
"linuxx64.efi.stub(7)": "https://www.freedesktop.org/software/systemd/man/linuxx64.efi.stub.html",
"loader.conf(5)": "https://www.freedesktop.org/software/systemd/man/loader.conf.html",
"locale.conf(5)": "https://www.freedesktop.org/software/systemd/man/locale.conf.html",
"localectl(1)": "https://www.freedesktop.org/software/systemd/man/localectl.html",
"localtime(5)": "https://www.freedesktop.org/software/systemd/man/localtime.html",
"login.defs(5)": "https://man.archlinux.org/man/login.defs.5",
"loginctl(1)": "https://www.freedesktop.org/software/systemd/man/loginctl.html",
"logind.conf(5)": "https://www.freedesktop.org/software/systemd/man/logind.conf.html",
"logind.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/logind.conf.d.html",
"machine-id(5)": "https://www.freedesktop.org/software/systemd/man/machine-id.html",
"machine-info(5)": "https://www.freedesktop.org/software/systemd/man/machine-info.html",
"machinectl(1)": "https://www.freedesktop.org/software/systemd/man/machinectl.html",
"mksquashfs(1)": "https://man.archlinux.org/man/extra/squashfs-tools/mksquashfs.1.en",
"modules-load.d(5)": "https://www.freedesktop.org/software/systemd/man/modules-load.d.html",
"mount.ddi(1)": "https://www.freedesktop.org/software/systemd/man/mount.ddi.html",
"mpd(1)": "https://mpd.readthedocs.io/en/latest/mpd.1.html",
"mpd.conf(5)": "https://mpd.readthedocs.io/en/latest/mpd.conf.5.html",
"netrc(5)": "https://man.cx/netrc",
"networkctl(1)": "https://www.freedesktop.org/software/systemd/man/networkctl.html",
"networkd.conf(5)": "https://www.freedesktop.org/software/systemd/man/networkd.conf.html",
"networkd.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/networkd.conf.d.html",
"nix-shell(1)": "https://nixos.org/manual/nix/stable/command-ref/nix-shell.html",
"nix.conf(5)": "https://nixos.org/manual/nix/stable/command-ref/conf-file.html",
"nss-myhostname(8)": "https://www.freedesktop.org/software/systemd/man/nss-myhostname.html",
"nss-mymachines(8)": "https://www.freedesktop.org/software/systemd/man/nss-mymachines.html",
"nss-resolve(8)": "https://www.freedesktop.org/software/systemd/man/nss-resolve.html",
"nss-systemd(8)": "https://www.freedesktop.org/software/systemd/man/nss-systemd.html",
"oomctl(1)": "https://www.freedesktop.org/software/systemd/man/oomctl.html",
"oomd.conf(5)": "https://www.freedesktop.org/software/systemd/man/oomd.conf.html",
"oomd.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/oomd.conf.d.html",
"org.freedesktop.LogControl1(5)": "https://www.freedesktop.org/software/systemd/man/org.freedesktop.LogControl1.html",
@ -97,94 +88,32 @@
"org.freedesktop.systemd1(5)": "https://www.freedesktop.org/software/systemd/man/org.freedesktop.systemd1.html",
"org.freedesktop.timedate1(5)": "https://www.freedesktop.org/software/systemd/man/org.freedesktop.timedate1.html",
"os-release(5)": "https://www.freedesktop.org/software/systemd/man/os-release.html",
"pstore.conf(5)": "https://www.freedesktop.org/software/systemd/man/pstore.conf.html",
"pstore.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/pstore.conf.d.html",
"repart.d(5)": "https://www.freedesktop.org/software/systemd/man/repart.d.html",
"resolved.conf(5)": "https://www.freedesktop.org/software/systemd/man/resolved.conf.html",
"resolved.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/resolved.conf.d.html",
"sleep.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/sleep.conf.d.html",
"sysctl.d(5)": "https://www.freedesktop.org/software/systemd/man/sysctl.d.html",
"system.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/system.conf.d.html",
"systemd-sleep.conf(5)": "https://www.freedesktop.org/software/systemd/man/systemd-sleep.conf.html",
"systemd-system.conf(5)": "https://www.freedesktop.org/software/systemd/man/systemd-system.conf.html",
"systemd-user-runtime-dir(5)": "https://www.freedesktop.org/software/systemd/man/systemd-user-runtime-dir.html",
"systemd-user.conf(5)": "https://www.freedesktop.org/software/systemd/man/systemd-user.conf.html",
"systemd.automount(5)": "https://www.freedesktop.org/software/systemd/man/systemd.automount.html",
"systemd.device(5)": "https://www.freedesktop.org/software/systemd/man/systemd.device.html",
"systemd.dnssd(5)": "https://www.freedesktop.org/software/systemd/man/systemd.dnssd.html",
"systemd.exec(5)": "https://www.freedesktop.org/software/systemd/man/systemd.exec.html",
"systemd.kill(5)": "https://www.freedesktop.org/software/systemd/man/systemd.kill.html",
"systemd.link(5)": "https://www.freedesktop.org/software/systemd/man/systemd.link.html",
"systemd.mount(5)": "https://www.freedesktop.org/software/systemd/man/systemd.mount.html",
"systemd.negative(5)": "https://www.freedesktop.org/software/systemd/man/systemd.negative.html",
"systemd.netdev(5)": "https://www.freedesktop.org/software/systemd/man/systemd.netdev.html",
"systemd.network(5)": "https://www.freedesktop.org/software/systemd/man/systemd.network.html",
"systemd.nspawn(5)": "https://www.freedesktop.org/software/systemd/man/systemd.nspawn.html",
"systemd.path(5)": "https://www.freedesktop.org/software/systemd/man/systemd.path.html",
"systemd.positive(5)": "https://www.freedesktop.org/software/systemd/man/systemd.positive.html",
"systemd.preset(5)": "https://www.freedesktop.org/software/systemd/man/systemd.preset.html",
"systemd.resource-control(5)": "https://www.freedesktop.org/software/systemd/man/systemd.resource-control.html",
"systemd.scope(5)": "https://www.freedesktop.org/software/systemd/man/systemd.scope.html",
"systemd.service(5)": "https://www.freedesktop.org/software/systemd/man/systemd.service.html",
"systemd.slice(5)": "https://www.freedesktop.org/software/systemd/man/systemd.slice.html",
"systemd.socket(5)": "https://www.freedesktop.org/software/systemd/man/systemd.socket.html",
"systemd.swap(5)": "https://www.freedesktop.org/software/systemd/man/systemd.swap.html",
"systemd.target(5)": "https://www.freedesktop.org/software/systemd/man/systemd.target.html",
"systemd.timer(5)": "https://www.freedesktop.org/software/systemd/man/systemd.timer.html",
"systemd.unit(5)": "https://www.freedesktop.org/software/systemd/man/systemd.unit.html",
"sysupdate.d(5)": "https://www.freedesktop.org/software/systemd/man/sysupdate.d.html",
"sysusers.d(5)": "https://www.freedesktop.org/software/systemd/man/sysusers.d.html",
"timesyncd.conf(5)": "https://www.freedesktop.org/software/systemd/man/timesyncd.conf.html",
"timesyncd.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/timesyncd.conf.d.html",
"tmpfiles.d(5)": "https://www.freedesktop.org/software/systemd/man/tmpfiles.d.html",
"udev.conf(5)": "https://www.freedesktop.org/software/systemd/man/udev.conf.html",
"user-runtime-dir@.service(5)": "https://www.freedesktop.org/software/systemd/man/user-runtime-dir@.service.html",
"user.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/user.conf.d.html",
"user@.service(5)": "https://www.freedesktop.org/software/systemd/man/user@.service.html",
"vconsole.conf(5)": "https://www.freedesktop.org/software/systemd/man/vconsole.conf.html",
"veritytab(5)": "https://www.freedesktop.org/software/systemd/man/veritytab.html",
"bootup(7)": "https://www.freedesktop.org/software/systemd/man/bootup.html",
"daemon(7)": "https://www.freedesktop.org/software/systemd/man/daemon.html",
"file-hierarchy(7)": "https://www.freedesktop.org/software/systemd/man/file-hierarchy.html",
"hwdb(7)": "https://www.freedesktop.org/software/systemd/man/hwdb.html",
"kernel-command-line(7)": "https://www.freedesktop.org/software/systemd/man/kernel-command-line.html",
"linuxaa64.efi.stub(7)": "https://www.freedesktop.org/software/systemd/man/linuxaa64.efi.stub.html",
"linuxia32.efi.stub(7)": "https://www.freedesktop.org/software/systemd/man/linuxia32.efi.stub.html",
"linuxx64.efi.stub(7)": "https://www.freedesktop.org/software/systemd/man/linuxx64.efi.stub.html",
"sd-boot(7)": "https://www.freedesktop.org/software/systemd/man/sd-boot.html",
"sd-stub(7)": "https://www.freedesktop.org/software/systemd/man/sd-stub.html",
"smbios-type-11(7)": "https://www.freedesktop.org/software/systemd/man/smbios-type-11.html",
"systemd-boot(7)": "https://www.freedesktop.org/software/systemd/man/systemd-boot.html",
"systemd-stub(7)": "https://www.freedesktop.org/software/systemd/man/systemd-stub.html",
"systemd.directives(7)": "https://www.freedesktop.org/software/systemd/man/systemd.directives.html",
"systemd.environment-generator(7)": "https://www.freedesktop.org/software/systemd/man/systemd.environment-generator.html",
"systemd.generator(7)": "https://www.freedesktop.org/software/systemd/man/systemd.generator.html",
"systemd.image-policy(7)": "https://www.freedesktop.org/software/systemd/man/systemd.image-policy.html",
"systemd.index(7)": "https://www.freedesktop.org/software/systemd/man/systemd.index.html",
"systemd.journal-fields(7)": "https://www.freedesktop.org/software/systemd/man/systemd.journal-fields.html",
"systemd.net-naming-scheme(7)": "https://www.freedesktop.org/software/systemd/man/systemd.net-naming-scheme.html",
"systemd.offline-updates(7)": "https://www.freedesktop.org/software/systemd/man/systemd.offline-updates.html",
"systemd.special(7)": "https://www.freedesktop.org/software/systemd/man/systemd.special.html",
"systemd.syntax(7)": "https://www.freedesktop.org/software/systemd/man/systemd.syntax.html",
"systemd.system-credentials(7)": "https://www.freedesktop.org/software/systemd/man/systemd.system-credentials.html",
"systemd.time(7)": "https://www.freedesktop.org/software/systemd/man/systemd.time.html",
"udev(7)": "https://www.freedesktop.org/software/systemd/man/udev.html",
"30-systemd-environment-d-generator(8)": "https://www.freedesktop.org/software/systemd/man/30-systemd-environment-d-generator.html",
"halt(8)": "https://www.freedesktop.org/software/systemd/man/halt.html",
"kernel-install(8)": "https://www.freedesktop.org/software/systemd/man/kernel-install.html",
"libnss_myhostname.so.2(8)": "https://www.freedesktop.org/software/systemd/man/libnss_myhostname.so.2.html",
"libnss_mymachines.so.2(8)": "https://www.freedesktop.org/software/systemd/man/libnss_mymachines.so.2.html",
"libnss_resolve.so.2(8)": "https://www.freedesktop.org/software/systemd/man/libnss_resolve.so.2.html",
"libnss_systemd.so.2(8)": "https://www.freedesktop.org/software/systemd/man/libnss_systemd.so.2.html",
"nss-myhostname(8)": "https://www.freedesktop.org/software/systemd/man/nss-myhostname.html",
"nss-mymachines(8)": "https://www.freedesktop.org/software/systemd/man/nss-mymachines.html",
"nss-resolve(8)": "https://www.freedesktop.org/software/systemd/man/nss-resolve.html",
"nss-systemd(8)": "https://www.freedesktop.org/software/systemd/man/nss-systemd.html",
"pam_systemd(8)": "https://www.freedesktop.org/software/systemd/man/pam_systemd.html",
"pam_systemd_home(8)": "https://www.freedesktop.org/software/systemd/man/pam_systemd_home.html",
"passwd(5)": "https://man.archlinux.org/man/passwd.5",
"portablectl(1)": "https://www.freedesktop.org/software/systemd/man/portablectl.html",
"portals.conf(5)": "https://github.com/flatpak/xdg-desktop-portal/blob/1.18.1/doc/portals.conf.rst.in",
"poweroff(8)": "https://www.freedesktop.org/software/systemd/man/poweroff.html",
"pstore.conf(5)": "https://www.freedesktop.org/software/systemd/man/pstore.conf.html",
"pstore.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/pstore.conf.d.html",
"reboot(8)": "https://www.freedesktop.org/software/systemd/man/reboot.html",
"repart.d(5)": "https://www.freedesktop.org/software/systemd/man/repart.d.html",
"resolvconf(1)": "https://www.freedesktop.org/software/systemd/man/resolvconf.html",
"resolvectl(1)": "https://www.freedesktop.org/software/systemd/man/resolvectl.html",
"resolved.conf(5)": "https://www.freedesktop.org/software/systemd/man/resolved.conf.html",
"resolved.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/resolved.conf.d.html",
"sd-boot(7)": "https://www.freedesktop.org/software/systemd/man/sd-boot.html",
"sd-stub(7)": "https://www.freedesktop.org/software/systemd/man/sd-stub.html",
"shutdown(8)": "https://www.freedesktop.org/software/systemd/man/shutdown.html",
"sleep.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/sleep.conf.d.html",
"smbios-type-11(7)": "https://www.freedesktop.org/software/systemd/man/smbios-type-11.html",
"sysctl.d(5)": "https://www.freedesktop.org/software/systemd/man/sysctl.d.html",
"system.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/system.conf.d.html",
"systemctl(1)": "https://www.freedesktop.org/software/systemd/man/systemctl.html",
"systemd(1)": "https://www.freedesktop.org/software/systemd/man/systemd.html",
"systemd-ac-power(1)": "https://www.freedesktop.org/software/systemd/man/systemd-ac-power.html",
"systemd-analyze(1)": "https://www.freedesktop.org/software/systemd/man/systemd-analyze.html",
"systemd-ask-password(1)": "https://www.freedesktop.org/software/systemd/man/systemd-ask-password.html",
"systemd-ask-password-console.path(8)": "https://www.freedesktop.org/software/systemd/man/systemd-ask-password-console.path.html",
"systemd-ask-password-console.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-ask-password-console.service.html",
"systemd-ask-password-wall.path(8)": "https://www.freedesktop.org/software/systemd/man/systemd-ask-password-wall.path.html",
@ -193,51 +122,63 @@
"systemd-backlight@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-backlight@.service.html",
"systemd-battery-check(8)": "https://www.freedesktop.org/software/systemd/man/systemd-battery-check.html",
"systemd-binfmt(8)": "https://www.freedesktop.org/software/systemd/man/systemd-binfmt.html",
"systemd-bless-boot-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-bless-boot-generator.html",
"systemd-bless-boot(8)": "https://www.freedesktop.org/software/systemd/man/systemd-bless-boot.html",
"systemd-bless-boot-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-bless-boot-generator.html",
"systemd-boot(7)": "https://www.freedesktop.org/software/systemd/man/systemd-boot.html",
"systemd-boot-check-no-failures(8)": "https://www.freedesktop.org/software/systemd/man/systemd-boot-check-no-failures.html",
"systemd-boot-random-seed.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-boot-random-seed.service.html",
"systemd-cat(1)": "https://www.freedesktop.org/software/systemd/man/systemd-cat.html",
"systemd-cgls(1)": "https://www.freedesktop.org/software/systemd/man/systemd-cgls.html",
"systemd-cgtop(1)": "https://www.freedesktop.org/software/systemd/man/systemd-cgtop.html",
"systemd-confext(8)": "https://www.freedesktop.org/software/systemd/man/systemd-confext.html",
"systemd-confext.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-confext.service.html",
"systemd-coredump(8)": "https://www.freedesktop.org/software/systemd/man/systemd-coredump.html",
"systemd-coredump.socket(8)": "https://www.freedesktop.org/software/systemd/man/systemd-coredump.socket.html",
"systemd-coredump@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-coredump@.service.html",
"systemd-cryptsetup-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-cryptsetup-generator.html",
"systemd-creds(1)": "https://www.freedesktop.org/software/systemd/man/systemd-creds.html",
"systemd-cryptenroll(1)": "https://www.freedesktop.org/software/systemd/man/systemd-cryptenroll.html",
"systemd-cryptsetup(8)": "https://www.freedesktop.org/software/systemd/man/systemd-cryptsetup.html",
"systemd-cryptsetup-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-cryptsetup-generator.html",
"systemd-cryptsetup@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-cryptsetup@.service.html",
"systemd-debug-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-debug-generator.html",
"systemd-delta(1)": "https://www.freedesktop.org/software/systemd/man/systemd-delta.html",
"systemd-detect-virt(1)": "https://www.freedesktop.org/software/systemd/man/systemd-detect-virt.html",
"systemd-dissect(1)": "https://www.freedesktop.org/software/systemd/man/systemd-dissect.html",
"systemd-environment-d-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-environment-d-generator.html",
"systemd-escape(1)": "https://www.freedesktop.org/software/systemd/man/systemd-escape.html",
"systemd-fsck(8)": "https://www.freedesktop.org/software/systemd/man/systemd-fsck.html",
"systemd-fsck-root.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-fsck-root.service.html",
"systemd-fsck-usr.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-fsck-usr.service.html",
"systemd-fsck(8)": "https://www.freedesktop.org/software/systemd/man/systemd-fsck.html",
"systemd-fsck@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-fsck@.service.html",
"systemd-fstab-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-fstab-generator.html",
"systemd-getty-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-getty-generator.html",
"systemd-gpt-auto-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-gpt-auto-generator.html",
"systemd-growfs-root.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-growfs-root.service.html",
"systemd-growfs(8)": "https://www.freedesktop.org/software/systemd/man/systemd-growfs.html",
"systemd-growfs-root.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-growfs-root.service.html",
"systemd-growfs@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-growfs@.service.html",
"systemd-halt.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-halt.service.html",
"systemd-hibernate-resume-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-hibernate-resume-generator.html",
"systemd-hibernate-resume(8)": "https://www.freedesktop.org/software/systemd/man/systemd-hibernate-resume.html",
"systemd-hibernate-resume-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-hibernate-resume-generator.html",
"systemd-hibernate.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-hibernate.service.html",
"systemd-homed(8)": "https://www.freedesktop.org/software/systemd/man/systemd-homed.html",
"systemd-hostnamed(8)": "https://www.freedesktop.org/software/systemd/man/systemd-hostnamed.html",
"systemd-hwdb(8)": "https://www.freedesktop.org/software/systemd/man/systemd-hwdb.html",
"systemd-hybrid-sleep.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-hybrid-sleep.service.html",
"systemd-id128(1)": "https://www.freedesktop.org/software/systemd/man/systemd-id128.html",
"systemd-importd(8)": "https://www.freedesktop.org/software/systemd/man/systemd-importd.html",
"systemd-integritysetup-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-integritysetup-generator.html",
"systemd-inhibit(1)": "https://www.freedesktop.org/software/systemd/man/systemd-inhibit.html",
"systemd-integritysetup(8)": "https://www.freedesktop.org/software/systemd/man/systemd-integritysetup.html",
"systemd-integritysetup-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-integritysetup-generator.html",
"systemd-integritysetup@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-integritysetup@.service.html",
"systemd-journal-gatewayd(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journal-gatewayd.html",
"systemd-journal-gatewayd.socket(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journal-gatewayd.socket.html",
"systemd-journal-remote(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journal-remote.html",
"systemd-journal-remote.socket(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journal-remote.socket.html",
"systemd-journal-upload(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journal-upload.html",
"systemd-journald(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journald.html",
"systemd-journald-audit.socket(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journald-audit.socket.html",
"systemd-journald-dev-log.socket(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journald-dev-log.socket.html",
"systemd-journald-varlink@.socket(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journald-varlink@.socket.html",
"systemd-journald(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journald.html",
"systemd-journald.socket(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journald.socket.html",
"systemd-journald@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journald@.service.html",
"systemd-journald@.socket(8)": "https://www.freedesktop.org/software/systemd/man/systemd-journald@.socket.html",
@ -245,22 +186,28 @@
"systemd-localed(8)": "https://www.freedesktop.org/software/systemd/man/systemd-localed.html",
"systemd-logind(8)": "https://www.freedesktop.org/software/systemd/man/systemd-logind.html",
"systemd-machine-id-commit.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-machine-id-commit.service.html",
"systemd-machine-id-setup(1)": "https://www.freedesktop.org/software/systemd/man/systemd-machine-id-setup.html",
"systemd-machined(8)": "https://www.freedesktop.org/software/systemd/man/systemd-machined.html",
"systemd-makefs(8)": "https://www.freedesktop.org/software/systemd/man/systemd-makefs.html",
"systemd-makefs@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-makefs@.service.html",
"systemd-measure(1)": "https://www.freedesktop.org/software/systemd/man/systemd-measure.html",
"systemd-mkswap@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-mkswap@.service.html",
"systemd-modules-load(8)": "https://www.freedesktop.org/software/systemd/man/systemd-modules-load.html",
"systemd-mount(1)": "https://www.freedesktop.org/software/systemd/man/systemd-mount.html",
"systemd-network-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-network-generator.html",
"systemd-networkd(8)": "https://www.freedesktop.org/software/systemd/man/systemd-networkd.html",
"systemd-networkd-wait-online(8)": "https://www.freedesktop.org/software/systemd/man/systemd-networkd-wait-online.html",
"systemd-networkd-wait-online@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-networkd-wait-online@.service.html",
"systemd-networkd(8)": "https://www.freedesktop.org/software/systemd/man/systemd-networkd.html",
"systemd-notify(1)": "https://www.freedesktop.org/software/systemd/man/systemd-notify.html",
"systemd-nspawn(1)": "https://www.freedesktop.org/software/systemd/man/systemd-nspawn.html",
"systemd-oomd(8)": "https://www.freedesktop.org/software/systemd/man/systemd-oomd.html",
"systemd-path(1)": "https://www.freedesktop.org/software/systemd/man/systemd-path.html",
"systemd-pcrfs-root.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-pcrfs-root.service.html",
"systemd-pcrfs@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-pcrfs@.service.html",
"systemd-pcrmachine.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-pcrmachine.service.html",
"systemd-pcrphase(8)": "https://www.freedesktop.org/software/systemd/man/systemd-pcrphase.html",
"systemd-pcrphase-initrd.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-pcrphase-initrd.service.html",
"systemd-pcrphase-sysinit.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-pcrphase-sysinit.service.html",
"systemd-pcrphase(8)": "https://www.freedesktop.org/software/systemd/man/systemd-pcrphase.html",
"systemd-portabled(8)": "https://www.freedesktop.org/software/systemd/man/systemd-portabled.html",
"systemd-poweroff.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-poweroff.service.html",
"systemd-pstore(8)": "https://www.freedesktop.org/software/systemd/man/systemd-pstore.html",
@ -272,20 +219,26 @@
"systemd-resolved(8)": "https://www.freedesktop.org/software/systemd/man/systemd-resolved.html",
"systemd-rfkill(8)": "https://www.freedesktop.org/software/systemd/man/systemd-rfkill.html",
"systemd-rfkill.socket(8)": "https://www.freedesktop.org/software/systemd/man/systemd-rfkill.socket.html",
"systemd-run(1)": "https://www.freedesktop.org/software/systemd/man/systemd-run.html",
"systemd-run-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-run-generator.html",
"systemd-shutdown(8)": "https://www.freedesktop.org/software/systemd/man/systemd-shutdown.html",
"systemd-sleep(8)": "https://www.freedesktop.org/software/systemd/man/systemd-sleep.html",
"systemd-sleep.conf(5)": "https://www.freedesktop.org/software/systemd/man/systemd-sleep.conf.html",
"systemd-socket-activate(1)": "https://www.freedesktop.org/software/systemd/man/systemd-socket-activate.html",
"systemd-socket-proxyd(8)": "https://www.freedesktop.org/software/systemd/man/systemd-socket-proxyd.html",
"systemd-soft-reboot.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-soft-reboot.service.html",
"systemd-stdio-bridge(1)": "https://www.freedesktop.org/software/systemd/man/systemd-stdio-bridge.html",
"systemd-stub(7)": "https://www.freedesktop.org/software/systemd/man/systemd-stub.html",
"systemd-suspend-then-hibernate.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-suspend-then-hibernate.service.html",
"systemd-suspend.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-suspend.service.html",
"systemd-sysctl(8)": "https://www.freedesktop.org/software/systemd/man/systemd-sysctl.html",
"systemd-sysext(8)": "https://www.freedesktop.org/software/systemd/man/systemd-sysext.html",
"systemd-sysext.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-sysext.service.html",
"systemd-system-update-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-system-update-generator.html",
"systemd-system.conf(5)": "https://www.freedesktop.org/software/systemd/man/systemd-system.conf.html",
"systemd-sysupdate(8)": "https://www.freedesktop.org/software/systemd/man/systemd-sysupdate.html",
"systemd-sysupdate-reboot.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-sysupdate-reboot.service.html",
"systemd-sysupdate-reboot.timer(8)": "https://www.freedesktop.org/software/systemd/man/systemd-sysupdate-reboot.timer.html",
"systemd-sysupdate(8)": "https://www.freedesktop.org/software/systemd/man/systemd-sysupdate.html",
"systemd-sysupdate.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-sysupdate.service.html",
"systemd-sysupdate.timer(8)": "https://www.freedesktop.org/software/systemd/man/systemd-sysupdate.timer.html",
"systemd-sysusers(8)": "https://www.freedesktop.org/software/systemd/man/systemd-sysusers.html",
@ -293,34 +246,80 @@
"systemd-time-wait-sync(8)": "https://www.freedesktop.org/software/systemd/man/systemd-time-wait-sync.html",
"systemd-timedated(8)": "https://www.freedesktop.org/software/systemd/man/systemd-timedated.html",
"systemd-timesyncd(8)": "https://www.freedesktop.org/software/systemd/man/systemd-timesyncd.html",
"systemd-tmpfiles(8)": "https://www.freedesktop.org/software/systemd/man/systemd-tmpfiles.html",
"systemd-tmpfiles-clean.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-tmpfiles-clean.service.html",
"systemd-tmpfiles-clean.timer(8)": "https://www.freedesktop.org/software/systemd/man/systemd-tmpfiles-clean.timer.html",
"systemd-tmpfiles-setup-dev-early.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-tmpfiles-setup-dev-early.service.html",
"systemd-tmpfiles-setup-dev.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-tmpfiles-setup-dev.service.html",
"systemd-tmpfiles-setup.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-tmpfiles-setup.service.html",
"systemd-tmpfiles(8)": "https://www.freedesktop.org/software/systemd/man/systemd-tmpfiles.html",
"systemd-tty-ask-password-agent(1)": "https://www.freedesktop.org/software/systemd/man/systemd-tty-ask-password-agent.html",
"systemd-udev-settle.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-udev-settle.service.html",
"systemd-udevd(8)": "https://www.freedesktop.org/software/systemd/man/systemd-udevd.html",
"systemd-udevd-control.socket(8)": "https://www.freedesktop.org/software/systemd/man/systemd-udevd-control.socket.html",
"systemd-udevd-kernel.socket(8)": "https://www.freedesktop.org/software/systemd/man/systemd-udevd-kernel.socket.html",
"systemd-udevd(8)": "https://www.freedesktop.org/software/systemd/man/systemd-udevd.html",
"systemd-umount(1)": "https://www.freedesktop.org/software/systemd/man/systemd-umount.html",
"systemd-update-done(8)": "https://www.freedesktop.org/software/systemd/man/systemd-update-done.html",
"systemd-update-utmp-runlevel.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-update-utmp-runlevel.service.html",
"systemd-update-utmp(8)": "https://www.freedesktop.org/software/systemd/man/systemd-update-utmp.html",
"systemd-update-utmp-runlevel.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-update-utmp-runlevel.service.html",
"systemd-user-runtime-dir(5)": "https://www.freedesktop.org/software/systemd/man/systemd-user-runtime-dir.html",
"systemd-user-sessions(8)": "https://www.freedesktop.org/software/systemd/man/systemd-user-sessions.html",
"systemd-user.conf(5)": "https://www.freedesktop.org/software/systemd/man/systemd-user.conf.html",
"systemd-userdbd(8)": "https://www.freedesktop.org/software/systemd/man/systemd-userdbd.html",
"systemd-vconsole-setup(8)": "https://www.freedesktop.org/software/systemd/man/systemd-vconsole-setup.html",
"systemd-veritysetup-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-veritysetup-generator.html",
"systemd-veritysetup(8)": "https://www.freedesktop.org/software/systemd/man/systemd-veritysetup.html",
"systemd-veritysetup-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-veritysetup-generator.html",
"systemd-veritysetup@.service(8)": "https://www.freedesktop.org/software/systemd/man/systemd-veritysetup@.service.html",
"systemd-volatile-root(8)": "https://www.freedesktop.org/software/systemd/man/systemd-volatile-root.html",
"systemd-xdg-autostart-generator(8)": "https://www.freedesktop.org/software/systemd/man/systemd-xdg-autostart-generator.html",
"systemd.automount(5)": "https://www.freedesktop.org/software/systemd/man/systemd.automount.html",
"systemd.device(5)": "https://www.freedesktop.org/software/systemd/man/systemd.device.html",
"systemd.directives(7)": "https://www.freedesktop.org/software/systemd/man/systemd.directives.html",
"systemd.dnssd(5)": "https://www.freedesktop.org/software/systemd/man/systemd.dnssd.html",
"systemd.environment-generator(7)": "https://www.freedesktop.org/software/systemd/man/systemd.environment-generator.html",
"systemd.exec(5)": "https://www.freedesktop.org/software/systemd/man/systemd.exec.html",
"systemd.generator(7)": "https://www.freedesktop.org/software/systemd/man/systemd.generator.html",
"systemd.image-policy(7)": "https://www.freedesktop.org/software/systemd/man/systemd.image-policy.html",
"systemd.index(7)": "https://www.freedesktop.org/software/systemd/man/systemd.index.html",
"systemd.journal-fields(7)": "https://www.freedesktop.org/software/systemd/man/systemd.journal-fields.html",
"systemd.kill(5)": "https://www.freedesktop.org/software/systemd/man/systemd.kill.html",
"systemd.link(5)": "https://www.freedesktop.org/software/systemd/man/systemd.link.html",
"systemd.mount(5)": "https://www.freedesktop.org/software/systemd/man/systemd.mount.html",
"systemd.negative(5)": "https://www.freedesktop.org/software/systemd/man/systemd.negative.html",
"systemd.net-naming-scheme(7)": "https://www.freedesktop.org/software/systemd/man/systemd.net-naming-scheme.html",
"systemd.netdev(5)": "https://www.freedesktop.org/software/systemd/man/systemd.netdev.html",
"systemd.network(5)": "https://www.freedesktop.org/software/systemd/man/systemd.network.html",
"systemd.nspawn(5)": "https://www.freedesktop.org/software/systemd/man/systemd.nspawn.html",
"systemd.offline-updates(7)": "https://www.freedesktop.org/software/systemd/man/systemd.offline-updates.html",
"systemd.path(5)": "https://www.freedesktop.org/software/systemd/man/systemd.path.html",
"systemd.positive(5)": "https://www.freedesktop.org/software/systemd/man/systemd.positive.html",
"systemd.preset(5)": "https://www.freedesktop.org/software/systemd/man/systemd.preset.html",
"systemd.resource-control(5)": "https://www.freedesktop.org/software/systemd/man/systemd.resource-control.html",
"systemd.scope(5)": "https://www.freedesktop.org/software/systemd/man/systemd.scope.html",
"systemd.service(5)": "https://www.freedesktop.org/software/systemd/man/systemd.service.html",
"systemd.slice(5)": "https://www.freedesktop.org/software/systemd/man/systemd.slice.html",
"systemd.socket(5)": "https://www.freedesktop.org/software/systemd/man/systemd.socket.html",
"systemd.special(7)": "https://www.freedesktop.org/software/systemd/man/systemd.special.html",
"systemd.swap(5)": "https://www.freedesktop.org/software/systemd/man/systemd.swap.html",
"systemd.syntax(7)": "https://www.freedesktop.org/software/systemd/man/systemd.syntax.html",
"systemd.system-credentials(7)": "https://www.freedesktop.org/software/systemd/man/systemd.system-credentials.html",
"systemd.target(5)": "https://www.freedesktop.org/software/systemd/man/systemd.target.html",
"systemd.time(7)": "https://www.freedesktop.org/software/systemd/man/systemd.time.html",
"systemd.timer(5)": "https://www.freedesktop.org/software/systemd/man/systemd.timer.html",
"systemd.unit(5)": "https://www.freedesktop.org/software/systemd/man/systemd.unit.html",
"sysupdate.d(5)": "https://www.freedesktop.org/software/systemd/man/sysupdate.d.html",
"sysusers.d(5)": "https://www.freedesktop.org/software/systemd/man/sysusers.d.html",
"timedatectl(1)": "https://www.freedesktop.org/software/systemd/man/timedatectl.html",
"timesyncd.conf(5)": "https://www.freedesktop.org/software/systemd/man/timesyncd.conf.html",
"timesyncd.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/timesyncd.conf.d.html",
"tmpfiles.d(5)": "https://www.freedesktop.org/software/systemd/man/tmpfiles.d.html",
"udev(7)": "https://www.freedesktop.org/software/systemd/man/udev.html",
"udev.conf(5)": "https://www.freedesktop.org/software/systemd/man/udev.conf.html",
"udevadm(8)": "https://www.freedesktop.org/software/systemd/man/udevadm.html",
"passwd(5)": "https://man.archlinux.org/man/passwd.5",
"group(5)": "https://man.archlinux.org/man/group.5",
"login.defs(5)": "https://man.archlinux.org/man/login.defs.5",
"unshare(1)": "https://man.archlinux.org/man/unshare.1.en",
"nix-shell(1)": "https://nixos.org/manual/nix/stable/command-ref/nix-shell.html",
"mksquashfs(1)": "https://man.archlinux.org/man/extra/squashfs-tools/mksquashfs.1.en",
"curl(1)": "https://curl.se/docs/manpage.html",
"netrc(5)": "https://man.cx/netrc"
"user-runtime-dir@.service(5)": "https://www.freedesktop.org/software/systemd/man/user-runtime-dir@.service.html",
"user.conf.d(5)": "https://www.freedesktop.org/software/systemd/man/user.conf.d.html",
"user@.service(5)": "https://www.freedesktop.org/software/systemd/man/user@.service.html",
"userdbctl(1)": "https://www.freedesktop.org/software/systemd/man/userdbctl.html",
"vconsole.conf(5)": "https://www.freedesktop.org/software/systemd/man/vconsole.conf.html",
"veritytab(5)": "https://www.freedesktop.org/software/systemd/man/veritytab.html"
}

View file

@ -1,99 +1,100 @@
{ "\t" = 9;
{
"\t" = 9;
"\n" = 10;
"\r" = 13;
" " = 32;
"!" = 33;
" " = 32;
"!" = 33;
"\"" = 34;
"#" = 35;
"$" = 36;
"%" = 37;
"&" = 38;
"'" = 39;
"(" = 40;
")" = 41;
"*" = 42;
"+" = 43;
"," = 44;
"-" = 45;
"." = 46;
"/" = 47;
"0" = 48;
"1" = 49;
"2" = 50;
"3" = 51;
"4" = 52;
"5" = 53;
"6" = 54;
"7" = 55;
"8" = 56;
"9" = 57;
":" = 58;
";" = 59;
"<" = 60;
"=" = 61;
">" = 62;
"?" = 63;
"@" = 64;
"A" = 65;
"B" = 66;
"C" = 67;
"D" = 68;
"E" = 69;
"F" = 70;
"G" = 71;
"H" = 72;
"I" = 73;
"J" = 74;
"K" = 75;
"L" = 76;
"M" = 77;
"N" = 78;
"O" = 79;
"P" = 80;
"Q" = 81;
"R" = 82;
"S" = 83;
"T" = 84;
"U" = 85;
"V" = 86;
"W" = 87;
"X" = 88;
"Y" = 89;
"Z" = 90;
"[" = 91;
"#" = 35;
"$" = 36;
"%" = 37;
"&" = 38;
"'" = 39;
"(" = 40;
")" = 41;
"*" = 42;
"+" = 43;
"," = 44;
"-" = 45;
"." = 46;
"/" = 47;
"0" = 48;
"1" = 49;
"2" = 50;
"3" = 51;
"4" = 52;
"5" = 53;
"6" = 54;
"7" = 55;
"8" = 56;
"9" = 57;
":" = 58;
";" = 59;
"<" = 60;
"=" = 61;
">" = 62;
"?" = 63;
"@" = 64;
"A" = 65;
"B" = 66;
"C" = 67;
"D" = 68;
"E" = 69;
"F" = 70;
"G" = 71;
"H" = 72;
"I" = 73;
"J" = 74;
"K" = 75;
"L" = 76;
"M" = 77;
"N" = 78;
"O" = 79;
"P" = 80;
"Q" = 81;
"R" = 82;
"S" = 83;
"T" = 84;
"U" = 85;
"V" = 86;
"W" = 87;
"X" = 88;
"Y" = 89;
"Z" = 90;
"[" = 91;
"\\" = 92;
"]" = 93;
"^" = 94;
"_" = 95;
"`" = 96;
"a" = 97;
"b" = 98;
"c" = 99;
"d" = 100;
"e" = 101;
"f" = 102;
"g" = 103;
"h" = 104;
"i" = 105;
"j" = 106;
"k" = 107;
"l" = 108;
"m" = 109;
"n" = 110;
"o" = 111;
"p" = 112;
"q" = 113;
"r" = 114;
"s" = 115;
"t" = 116;
"u" = 117;
"v" = 118;
"w" = 119;
"x" = 120;
"y" = 121;
"z" = 122;
"{" = 123;
"|" = 124;
"}" = 125;
"~" = 126;
"]" = 93;
"^" = 94;
"_" = 95;
"`" = 96;
"a" = 97;
"b" = 98;
"c" = 99;
"d" = 100;
"e" = 101;
"f" = 102;
"g" = 103;
"h" = 104;
"i" = 105;
"j" = 106;
"k" = 107;
"l" = 108;
"m" = 109;
"n" = 110;
"o" = 111;
"p" = 112;
"q" = 113;
"r" = 114;
"s" = 115;
"t" = 116;
"u" = 117;
"v" = 118;
"w" = 119;
"x" = 120;
"y" = 121;
"z" = 122;
"{" = 123;
"|" = 124;
"}" = 125;
"~" = 126;
}

View file

@ -36,10 +36,7 @@ rec {
:::
*/
# TODO(Profpatsch): add tests that check stderr
assertMsg =
pred:
msg:
pred || builtins.throw msg;
assertMsg = pred: msg: pred || builtins.throw msg;
/**
Specialized `assertMsg` for checking if `val` is one of the elements
@ -81,14 +78,10 @@ rec {
:::
*/
assertOneOf =
name:
val:
xs:
assertMsg
(lib.elem val xs)
"${name} must be one of ${
lib.generators.toPretty {} xs}, but is: ${
lib.generators.toPretty {} val}";
name: val: xs:
assertMsg (lib.elem val xs) "${name} must be one of ${lib.generators.toPretty { } xs}, but is: ${
lib.generators.toPretty { } val
}";
/**
Specialized `assertMsg` for checking if every one of `vals` is one of the elements
@ -133,12 +126,9 @@ rec {
:::
*/
assertEachOneOf =
name:
vals:
xs:
assertMsg
(lib.all (val: lib.elem val xs) vals)
"each element in ${name} must be one of ${
lib.generators.toPretty {} xs}, but is: ${
lib.generators.toPretty {} vals}";
name: vals: xs:
assertMsg (lib.all (val: lib.elem val xs) vals)
"each element in ${name} must be one of ${lib.generators.toPretty { } xs}, but is: ${
lib.generators.toPretty { } vals
}";
}

View file

@ -9,7 +9,6 @@ rec {
`toGNUCommandLineShell` returns an escaped shell string.
# Inputs
`options`
@ -20,7 +19,6 @@ rec {
: The attributes to transform into arguments.
# Examples
:::{.example}
## `lib.cli.toGNUCommandLineShell` usage example
@ -40,15 +38,13 @@ rec {
:::
*/
toGNUCommandLineShell =
options: attrs: lib.escapeShellArgs (toGNUCommandLine options attrs);
toGNUCommandLineShell = options: attrs: lib.escapeShellArgs (toGNUCommandLine options attrs);
/**
Automatically convert an attribute set to a list of command-line options.
`toGNUCommandLine` returns a list of string arguments.
# Inputs
`options`
@ -76,7 +72,6 @@ rec {
: How to format a list value to a command list;
By default the option name is repeated for each value and `mkOption` is applied to the values themselves.
`mkOption`
: How to format any remaining value to a command list;
@ -89,7 +84,6 @@ rec {
By default, there is no separator, so option `-c` and value `5` would become ["-c" "5"].
This is useful if the command requires equals, for example, `-c=5`.
# Examples
:::{.example}
## `lib.cli.toGNUCommandLine` usage example
@ -116,33 +110,39 @@ rec {
:::
*/
toGNUCommandLine = {
mkOptionName ?
k: if builtins.stringLength k == 1
then "-${k}"
else "--${k}",
toGNUCommandLine =
{
mkOptionName ? k: if builtins.stringLength k == 1 then "-${k}" else "--${k}",
mkBool ? k: v: lib.optional v (mkOptionName k),
mkBool ? k: v: lib.optional v (mkOptionName k),
mkList ? k: v: lib.concatMap (mkOption k) v,
mkList ? k: v: lib.concatMap (mkOption k) v,
mkOption ?
k: v: if v == null
then []
else if optionValueSeparator == null then
[ (mkOptionName k) (lib.generators.mkValueStringDefault {} v) ]
else
[ "${mkOptionName k}${optionValueSeparator}${lib.generators.mkValueStringDefault {} v}" ],
mkOption ?
k: v:
if v == null then
[ ]
else if optionValueSeparator == null then
[
(mkOptionName k)
(lib.generators.mkValueStringDefault { } v)
]
else
[ "${mkOptionName k}${optionValueSeparator}${lib.generators.mkValueStringDefault { } v}" ],
optionValueSeparator ? null
optionValueSeparator ? null,
}:
options:
let
render = k: v:
if builtins.isBool v then mkBool k v
else if builtins.isList v then mkList k v
else mkOption k v;
let
render =
k: v:
if builtins.isBool v then
mkBool k v
else if builtins.isList v then
mkList k v
else
mkOption k v;
in
builtins.concatLists (lib.mapAttrsToList render options);
in
builtins.concatLists (lib.mapAttrsToList render options);
}

View file

@ -26,7 +26,8 @@ let
generators
id
mapAttrs
trace;
trace
;
in
rec {
@ -36,7 +37,6 @@ rec {
/**
Conditionally trace the supplied message, based on a predicate.
# Inputs
`pred`
@ -70,15 +70,13 @@ rec {
:::
*/
traceIf =
pred:
msg:
x: if pred then trace msg x else x;
pred: msg: x:
if pred then trace msg x else x;
/**
Trace the supplied value after applying a function to it, and
return the original value.
# Inputs
`f`
@ -107,9 +105,7 @@ rec {
:::
*/
traceValFn =
f:
x: trace (f x) x;
traceValFn = f: x: trace (f x) x;
/**
Trace the supplied value and return it.
@ -143,7 +139,6 @@ rec {
/**
`builtins.trace`, but the value is `builtins.deepSeq`ed first.
# Inputs
`x`
@ -175,16 +170,13 @@ rec {
:::
*/
traceSeq =
x:
y: trace (builtins.deepSeq x x) y;
traceSeq = x: y: trace (builtins.deepSeq x x) y;
/**
Like `traceSeq`, but only evaluate down to depth n.
This is very useful because lots of `traceSeq` usages
lead to an infinite recursion.
# Inputs
`depth`
@ -217,25 +209,39 @@ rec {
:::
*/
traceSeqN = depth: x: y:
let snip = v: if isList v then noQuotes "[]" v
else if isAttrs v then noQuotes "{}" v
else v;
noQuotes = str: v: { __pretty = const str; val = v; };
modify = n: fn: v: if (n == 0) then fn v
else if isList v then map (modify (n - 1) fn) v
else if isAttrs v then mapAttrs
(const (modify (n - 1) fn)) v
else v;
in trace (generators.toPretty { allowPrettyValues = true; }
(modify depth snip x)) y;
traceSeqN =
depth: x: y:
let
snip =
v:
if isList v then
noQuotes "[]" v
else if isAttrs v then
noQuotes "{}" v
else
v;
noQuotes = str: v: {
__pretty = const str;
val = v;
};
modify =
n: fn: v:
if (n == 0) then
fn v
else if isList v then
map (modify (n - 1) fn) v
else if isAttrs v then
mapAttrs (const (modify (n - 1) fn)) v
else
v;
in
trace (generators.toPretty { allowPrettyValues = true; } (modify depth snip x)) y;
/**
A combination of `traceVal` and `traceSeq` that applies a
provided function to the value to be traced after `deepSeq`ing
it.
# Inputs
`f`
@ -246,9 +252,7 @@ rec {
: Value to trace
*/
traceValSeqFn =
f:
v: traceValFn f (builtins.deepSeq v v);
traceValSeqFn = f: v: traceValFn f (builtins.deepSeq v v);
/**
A combination of `traceVal` and `traceSeq`.
@ -258,7 +262,6 @@ rec {
`v`
: Value to trace
*/
traceValSeq = traceValSeqFn id;
@ -266,7 +269,6 @@ rec {
A combination of `traceVal` and `traceSeqN` that applies a
provided function to the value to be traced.
# Inputs
`f`
@ -282,9 +284,8 @@ rec {
: Value to trace
*/
traceValSeqNFn =
f:
depth:
v: traceSeqN depth (f v) v;
f: depth: v:
traceSeqN depth (f v) v;
/**
A combination of `traceVal` and `traceSeqN`.
@ -308,7 +309,6 @@ rec {
This is useful for adding around a function call,
to see the before/after of values as they are transformed.
# Inputs
`depth`
@ -327,7 +327,6 @@ rec {
: 4\. Function argument
# Examples
:::{.example}
## `lib.debug.traceFnSeqN` usage example
@ -340,17 +339,16 @@ rec {
:::
*/
traceFnSeqN = depth: name: f: v:
let res = f v;
in lib.traceSeqN
(depth + 1)
{
fn = name;
from = v;
to = res;
}
res;
traceFnSeqN =
depth: name: f: v:
let
res = f v;
in
lib.traceSeqN (depth + 1) {
fn = name;
from = v;
to = res;
} res;
# -- TESTING --
@ -375,7 +373,6 @@ rec {
- If you want to run only a subset of the tests add the attribute `tests = ["testName"];`
# Inputs
`tests`
@ -430,26 +427,42 @@ rec {
:::
*/
runTests =
tests: concatLists (attrValues (mapAttrs (name: test:
let testsToRun = if tests ? tests then tests.tests else [];
in if (substring 0 4 name == "test" || elem name testsToRun)
&& ((testsToRun == []) || elem name tests.tests)
&& (test.expr != test.expected)
tests:
concatLists (
attrValues (
mapAttrs (
name: test:
let
testsToRun = if tests ? tests then tests.tests else [ ];
in
if
(substring 0 4 name == "test" || elem name testsToRun)
&& ((testsToRun == [ ]) || elem name tests.tests)
&& (test.expr != test.expected)
then [ { inherit name; expected = test.expected; result = test.expr; } ]
else [] ) tests));
then
[
{
inherit name;
expected = test.expected;
result = test.expr;
}
]
else
[ ]
) tests
)
);
/**
Create a test assuming that list elements are `true`.
# Inputs
`expr`
: 1\. Function argument
# Examples
:::{.example}
## `lib.debug.testAllTrue` usage example
@ -460,5 +473,8 @@ rec {
:::
*/
testAllTrue = expr: { inherit expr; expected = map (x: true) expr; };
testAllTrue = expr: {
inherit expr;
expected = map (x: true) expr;
};
}

View file

@ -71,7 +71,7 @@ let
# these are the only ones that are currently not
inherit (builtins) addErrorContext isPath trace typeOf unsafeGetAttrPos;
inherit (self.trivial) id const pipe concat or and xor bitAnd bitOr bitXor
bitNot boolToString mergeAttrs flip mapNullable inNixShell isFloat min max
bitNot boolToString mergeAttrs flip defaultTo mapNullable inNixShell isFloat min max
importJSON importTOML warn warnIf warnIfNot throwIf throwIfNot checkListOfEnum
info showWarnings nixpkgsVersion version isInOldestRelease oldestSupportedReleaseIsAtLeast
mod compare splitByAndCompare seq deepSeq lessThan add sub
@ -100,7 +100,7 @@ let
length head tail elem elemAt isList;
inherit (self.strings) concatStrings concatMapStrings concatImapStrings
stringLength substring isString replaceStrings
intersperse concatStringsSep concatMapStringsSep
intersperse concatStringsSep concatMapStringsSep concatMapAttrsStringSep
concatImapStringsSep concatLines makeSearchPath makeSearchPathOutput
makeLibraryPath makeIncludePath makeBinPath optionalString
hasInfix hasPrefix hasSuffix stringToCharacters stringAsChars escape

View file

@ -9,14 +9,16 @@ let
throwIfNot
;
showMaybeAttrPosPre = prefix: attrName: v:
let pos = builtins.unsafeGetAttrPos attrName v;
in if pos == null then "" else "${prefix}${pos.file}:${toString pos.line}:${toString pos.column}";
showMaybeAttrPosPre =
prefix: attrName: v:
let
pos = builtins.unsafeGetAttrPos attrName v;
in
if pos == null then "" else "${prefix}${pos.file}:${toString pos.line}:${toString pos.column}";
showMaybePackagePosPre = prefix: pkg:
if pkg?meta.position && isString pkg.meta.position
then "${prefix}${pkg.meta.position}"
else "";
showMaybePackagePosPre =
prefix: pkg:
if pkg ? meta.position && isString pkg.meta.position then "${prefix}${pkg.meta.position}" else "";
in
{
/**
@ -94,15 +96,14 @@ in
derivation,
meta ? null,
passthru ? { },
outputs ? [ "out" ]
outputs ? [ "out" ],
}:
let
# These checks are strict in `drv` and some `drv` attributes, but the
# attrset spine returned by lazyDerivation does not depend on it.
# Instead, the individual derivation attributes do depend on it.
checked =
throwIfNot (derivation.type or null == "derivation")
"lazyDerivation: input must be a derivation."
throwIfNot (derivation.type or null == "derivation") "lazyDerivation: input must be a derivation."
throwIfNot
# NOTE: Technically we could require our outputs to be a subset of the
# actual ones, or even leave them unchecked and fail on a lazy basis.
@ -152,7 +153,13 @@ in
# A fixed set of derivation values, so that `lazyDerivation` can return
# its attrset before evaluating `derivation`.
# This must only list attributes that are available on _all_ derivations.
inherit (checked) outPath outputName drvPath name system;
inherit (checked)
outPath
outputName
drvPath
name
system
;
inherit outputs;
# The meta attribute can either be taken from the derivation, or if the
@ -170,7 +177,6 @@ in
Thus, this function passes through its `value` argument if the `cond`
is `true`, but returns `null` if not.
# Inputs
`cond`
@ -205,9 +211,7 @@ in
:::
*/
optionalDrvAttr =
cond:
value: if cond then value else null;
optionalDrvAttr = cond: value: if cond then value else null;
/**
Wrap a derivation such that instantiating it produces a warning.
@ -238,8 +242,11 @@ in
warnOnInstantiate =
msg: drv:
let
drvToWrap = removeAttrs drv [ "meta" "name" "type" ];
drvToWrap = removeAttrs drv [
"meta"
"name"
"type"
];
in
drv
// mapAttrs (_: lib.warn msg) drvToWrap;
drv // mapAttrs (_: lib.warn msg) drvToWrap;
}

View file

@ -2,8 +2,8 @@
{ lib }:
let
commonH = hashTypes: rec {
hashNames = [ "hash" ] ++ hashTypes;
hashSet = lib.genAttrs hashNames (lib.const {});
hashNames = [ "hash" ] ++ hashTypes;
hashSet = lib.genAttrs hashNames (lib.const { });
};
fakeH = {
@ -11,15 +11,24 @@ let
sha256 = lib.fakeSha256;
sha512 = lib.fakeSha512;
};
in rec {
in
rec {
proxyImpureEnvVars = [
# We borrow these environment variables from the caller to allow
# easy proxy configuration. This is impure, but a fixed-output
# derivation like fetchurl is allowed to do so since its result is
# by definition pure.
"http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
"HTTP_PROXY" "HTTPS_PROXY" "FTP_PROXY" "ALL_PROXY" "NO_PROXY"
"http_proxy"
"https_proxy"
"ftp_proxy"
"all_proxy"
"no_proxy"
"HTTP_PROXY"
"HTTPS_PROXY"
"FTP_PROXY"
"ALL_PROXY"
"NO_PROXY"
# https proxies typically need to inject custom root CAs too
"NIX_SSL_CERT_FILE"
@ -77,42 +86,56 @@ in rec {
required
: whether to throw if no hash was present in the input; otherwise returns the original input, unmodified
*/
normalizeHash = {
hashTypes ? [ "sha256" ],
required ? true,
}:
normalizeHash =
{
hashTypes ? [ "sha256" ],
required ? true,
}:
let
inherit (lib) concatMapStringsSep head tail throwIf;
inherit (lib.attrsets) attrsToList intersectAttrs removeAttrs optionalAttrs;
inherit (lib)
concatMapStringsSep
head
tail
throwIf
;
inherit (lib.attrsets)
attrsToList
intersectAttrs
removeAttrs
optionalAttrs
;
inherit (commonH hashTypes) hashNames hashSet;
in
args:
if args ? "outputHash" then
args
else
args:
if args ? "outputHash" then
args
else
let
# The argument hash, as a {name, value} pair
h =
# All hashes passed in arguments (possibly 0 or >1) as a list of {name, value} pairs
let
# The argument hash, as a {name, value} pair
h =
# All hashes passed in arguments (possibly 0 or >1) as a list of {name, value} pairs
let hashesAsNVPairs = attrsToList (intersectAttrs hashSet args); in
if hashesAsNVPairs == [] then
throwIf required "fetcher called without `hash`" null
else if tail hashesAsNVPairs != [] then
throw "fetcher called with mutually-incompatible arguments: ${concatMapStringsSep ", " (a: a.name) hashesAsNVPairs}"
else
head hashesAsNVPairs
;
hashesAsNVPairs = attrsToList (intersectAttrs hashSet args);
in
removeAttrs args hashNames // (optionalAttrs (h != null) {
outputHashAlgo = if h.name == "hash" then null else h.name;
outputHash =
if h.value == "" then
fakeH.${h.name} or (throw "no fake hash defined for ${h.name}")
else
h.value;
})
;
if hashesAsNVPairs == [ ] then
throwIf required "fetcher called without `hash`" null
else if tail hashesAsNVPairs != [ ] then
throw "fetcher called with mutually-incompatible arguments: ${
concatMapStringsSep ", " (a: a.name) hashesAsNVPairs
}"
else
head hashesAsNVPairs;
in
removeAttrs args hashNames
// (optionalAttrs (h != null) {
outputHashAlgo = if h.name == "hash" then null else h.name;
outputHash =
if h.value == "" then
fakeH.${h.name} or (throw "no fake hash defined for ${h.name}")
else
h.value;
});
/**
Wraps a function which accepts `outputHash{,Algo}` into one which accepts `hash` or `sha{256,512}`
@ -164,9 +187,11 @@ in rec {
However, `withNormalizedHash` preserves `functionArgs` metadata insofar as possible,
and is implemented somewhat more efficiently.
*/
withNormalizedHash = {
hashTypes ? [ "sha256" ]
}: fetcher:
withNormalizedHash =
{
hashTypes ? [ "sha256" ],
}:
fetcher:
let
inherit (lib.attrsets) genAttrs intersectAttrs removeAttrs;
inherit (lib.trivial) const functionArgs setFunctionArgs;
@ -181,9 +206,15 @@ in rec {
in
# The o.g. fetcher must *only* accept outputHash and outputHashAlgo
assert fArgs ? outputHash && fArgs ? outputHashAlgo;
assert intersectAttrs fArgs hashSet == {};
assert intersectAttrs fArgs hashSet == { };
setFunctionArgs
(args: fetcher (normalize args))
(removeAttrs fArgs [ "outputHash" "outputHashAlgo" ] // { hash = fArgs.outputHash; });
setFunctionArgs (args: fetcher (normalize args)) (
removeAttrs fArgs [
"outputHash"
"outputHashAlgo"
]
// {
hash = fArgs.outputHash;
}
);
}

View file

@ -57,7 +57,6 @@
If you need more file set functions,
see [this issue](https://github.com/NixOS/nixpkgs/issues/266356) to request it.
# Implicit coercion from paths to file sets {#sec-fileset-path-coercion}
All functions accepting file sets as arguments can also accept [paths](https://nixos.org/manual/nix/stable/language/values.html#type-path) as arguments.
@ -155,14 +154,14 @@ let
pipe
;
in {
in
{
/**
Create a file set from a path that may or may not exist:
- If the path does exist, the path is [coerced to a file set](#sec-fileset-path-coercion).
- If the path does not exist, a file set containing no files is returned.
# Inputs
`path`
@ -188,14 +187,12 @@ in {
*/
maybeMissing =
path:
if ! isPath path then
if !isPath path then
if isStringLike path then
throw ''
lib.fileset.maybeMissing: Argument ("${toString path}") is a string-like value, but it should be a path instead.''
throw ''lib.fileset.maybeMissing: Argument ("${toString path}") is a string-like value, but it should be a path instead.''
else
throw ''
lib.fileset.maybeMissing: Argument is of type ${typeOf path}, but it should be a path instead.''
else if ! pathExists path then
throw ''lib.fileset.maybeMissing: Argument is of type ${typeOf path}, but it should be a path instead.''
else if !pathExists path then
_emptyWithoutBase
else
_singleton path;
@ -211,7 +208,6 @@ in {
This variant is useful for tracing file sets in the Nix repl.
# Inputs
`fileset`
@ -248,15 +244,14 @@ in {
:::
*/
trace = fileset:
trace =
fileset:
let
# "fileset" would be a better name, but that would clash with the argument name,
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
actualFileset = _coerce "lib.fileset.trace: Argument" fileset;
in
seq
(_printFileset actualFileset)
(x: x);
seq (_printFileset actualFileset) (x: x);
/**
Incrementally evaluate and trace a file set in a pretty way.
@ -268,7 +263,6 @@ in {
This variant is useful for tracing file sets passed as arguments to other functions.
# Inputs
`fileset`
@ -308,14 +302,14 @@ in {
:::
*/
traceVal = fileset:
traceVal =
fileset:
let
# "fileset" would be a better name, but that would clash with the argument name,
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
actualFileset = _coerce "lib.fileset.traceVal: Argument" fileset;
in
seq
(_printFileset actualFileset)
seq (_printFileset actualFileset)
# We could also return the original fileset argument here,
# but that would then duplicate work for consumers of the fileset, because then they have to coerce it again
actualFileset;
@ -423,10 +417,11 @@ in {
:::
*/
toSource = {
root,
fileset,
}:
toSource =
{
root,
fileset,
}:
let
# We cannot rename matched attribute arguments, so let's work around it with an extra `let in` statement
filesetArg = fileset;
@ -437,7 +432,7 @@ in {
filesetFilesystemRoot = (splitRoot fileset._internalBase).root;
sourceFilter = _toSourceFilter fileset;
in
if ! isPath root then
if !isPath root then
if root ? _isLibCleanSourceWith then
throw ''
lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
@ -448,38 +443,34 @@ in {
lib.fileset.toSource: `root` (${toString root}) is a string-like value, but it should be a path instead.
Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
else
throw ''
lib.fileset.toSource: `root` is of type ${typeOf root}, but it should be a path instead.''
throw ''lib.fileset.toSource: `root` is of type ${typeOf root}, but it should be a path instead.''
# Currently all Nix paths have the same filesystem root, but this could change in the future.
# See also ../path/README.md
else if ! fileset._internalIsEmptyWithoutBase && rootFilesystemRoot != filesetFilesystemRoot then
else if !fileset._internalIsEmptyWithoutBase && rootFilesystemRoot != filesetFilesystemRoot then
throw ''
lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` (${toString root}):
`root`: Filesystem root is "${toString rootFilesystemRoot}"
`fileset`: Filesystem root is "${toString filesetFilesystemRoot}"
Different filesystem roots are not supported.''
else if ! pathExists root then
throw ''
lib.fileset.toSource: `root` (${toString root}) is a path that does not exist.''
else if !pathExists root then
throw ''lib.fileset.toSource: `root` (${toString root}) is a path that does not exist.''
else if pathType root != "directory" then
throw ''
lib.fileset.toSource: `root` (${toString root}) is a file, but it should be a directory instead. Potential solutions:
- If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
- If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as ${toString (dirOf root)}, and set `fileset` to the file path.''
else if ! fileset._internalIsEmptyWithoutBase && ! hasPrefix root fileset._internalBase then
else if !fileset._internalIsEmptyWithoutBase && !hasPrefix root fileset._internalBase then
throw ''
lib.fileset.toSource: `fileset` could contain files in ${toString fileset._internalBase}, which is not under the `root` (${toString root}). Potential solutions:
- Set `root` to ${toString fileset._internalBase} or any directory higher up. This changes the layout of the resulting store path.
- Set `fileset` to a file set that cannot contain files outside the `root` (${toString root}). This could change the files included in the result.''
else
seq sourceFilter
cleanSourceWith {
seq sourceFilter cleanSourceWith {
name = "source";
src = root;
filter = sourceFilter;
};
/**
The list of file paths contained in the given file set.
@ -494,7 +485,6 @@ in {
The resulting list of files can be turned back into a file set using [`lib.fileset.unions`](#function-library-lib.fileset.unions).
# Inputs
`fileset`
@ -521,8 +511,7 @@ in {
:::
*/
toList = fileset:
_toList (_coerce "lib.fileset.toList: Argument" fileset);
toList = fileset: _toList (_coerce "lib.fileset.toList: Argument" fileset);
/**
The file set containing all files that are in either of two given file sets.
@ -533,7 +522,6 @@ in {
The given file sets are evaluated as lazily as possible,
with the first argument being evaluated first if needed.
# Inputs
`fileset1`
@ -567,10 +555,9 @@ in {
:::
*/
union =
fileset1:
fileset2:
_unionMany
(_coerceMany "lib.fileset.union" [
fileset1: fileset2:
_unionMany (
_coerceMany "lib.fileset.union" [
{
context = "First argument";
value = fileset1;
@ -579,7 +566,8 @@ in {
context = "Second argument";
value = fileset2;
}
]);
]
);
/**
The file set containing all files that are in any of the given file sets.
@ -590,7 +578,6 @@ in {
The given file sets are evaluated as lazily as possible,
with earlier elements being evaluated first if needed.
# Inputs
`filesets`
@ -631,16 +618,17 @@ in {
*/
unions =
filesets:
if ! isList filesets then
throw ''
lib.fileset.unions: Argument is of type ${typeOf filesets}, but it should be a list instead.''
if !isList filesets then
throw ''lib.fileset.unions: Argument is of type ${typeOf filesets}, but it should be a list instead.''
else
pipe filesets [
# Annotate the elements with context, used by _coerceMany for better errors
(imap0 (i: el: {
context = "Element ${toString i}";
value = el;
}))
(imap0 (
i: el: {
context = "Element ${toString i}";
value = el;
}
))
(_coerceMany "lib.fileset.unions")
_unionMany
];
@ -652,7 +640,6 @@ in {
The given file sets are evaluated as lazily as possible,
with the first argument being evaluated first if needed.
# Inputs
`fileset1`
@ -681,8 +668,7 @@ in {
:::
*/
intersection =
fileset1:
fileset2:
fileset1: fileset2:
let
filesets = _coerceMany "lib.fileset.intersection" [
{
@ -695,9 +681,7 @@ in {
}
];
in
_intersection
(elemAt filesets 0)
(elemAt filesets 1);
_intersection (elemAt filesets 0) (elemAt filesets 1);
/**
The file set containing all files from the first file set that are not in the second file set.
@ -706,7 +690,6 @@ in {
The given file sets are evaluated as lazily as possible,
with the first argument being evaluated first if needed.
# Inputs
`positive`
@ -744,8 +727,7 @@ in {
:::
*/
difference =
positive:
negative:
positive: negative:
let
filesets = _coerceMany "lib.fileset.difference" [
{
@ -758,14 +740,11 @@ in {
}
];
in
_difference
(elemAt filesets 0)
(elemAt filesets 1);
_difference (elemAt filesets 0) (elemAt filesets 1);
/**
Filter a file set to only contain files matching some predicate.
# Inputs
`predicate`
@ -827,22 +806,18 @@ in {
:::
*/
fileFilter =
predicate:
path:
if ! isFunction predicate then
throw ''
lib.fileset.fileFilter: First argument is of type ${typeOf predicate}, but it should be a function instead.''
else if ! isPath path then
predicate: path:
if !isFunction predicate then
throw ''lib.fileset.fileFilter: First argument is of type ${typeOf predicate}, but it should be a function instead.''
else if !isPath path then
if path._type or "" == "fileset" then
throw ''
lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
If you need to filter files in a file set, use `intersection fileset (fileFilter pred ./.)` instead.''
else
throw ''
lib.fileset.fileFilter: Second argument is of type ${typeOf path}, but it should be a path instead.''
else if ! pathExists path then
throw ''
lib.fileset.fileFilter: Second argument (${toString path}) is a path that does not exist.''
throw ''lib.fileset.fileFilter: Second argument is of type ${typeOf path}, but it should be a path instead.''
else if !pathExists path then
throw ''lib.fileset.fileFilter: Second argument (${toString path}) is a path that does not exist.''
else
_fileFilter predicate path;
@ -859,7 +834,6 @@ in {
Turning the result of this function back into a source using `toSource` will therefore not preserve empty directories.
:::
# Inputs
`source`
@ -905,7 +879,8 @@ in {
:::
*/
fromSource = source:
fromSource =
source:
let
# This function uses `._isLibCleanSourceWith`, `.origSrc` and `.filter`,
# which are technically internal to lib.sources,
@ -915,17 +890,15 @@ in {
path = if isFiltered then source.origSrc else source;
in
# We can only support sources created from paths
if ! isPath path then
if !isPath path then
if isStringLike path then
throw ''
lib.fileset.fromSource: The source origin of the argument is a string-like value ("${toString path}"), but it should be a path instead.
Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.''
else
throw ''
lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
else if ! pathExists path then
throw ''
lib.fileset.fromSource: The source origin (${toString path}) of the argument is a path that does not exist.''
throw ''lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
else if !pathExists path then
throw ''lib.fileset.fromSource: The source origin (${toString path}) of the argument is a path that does not exist.''
else if isFiltered then
_fromSourceFilter path source.filter
else
@ -937,7 +910,6 @@ in {
This function behaves like [`gitTrackedWith { }`](#function-library-lib.fileset.gitTrackedWith) - using the defaults.
# Inputs
`path`
@ -966,13 +938,7 @@ in {
:::
*/
gitTracked =
path:
_fromFetchGit
"gitTracked"
"argument"
path
{};
gitTracked = path: _fromFetchGit "gitTracked" "argument" path { };
/**
Create a file set containing all [Git-tracked files](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository) in a repository.
@ -998,7 +964,6 @@ in {
This may change in the future.
:::
# Inputs
`options` (attribute set)
@ -1033,19 +998,18 @@ in {
recurseSubmodules ? false,
}:
path:
if ! isBool recurseSubmodules then
if !isBool recurseSubmodules then
throw "lib.fileset.gitTrackedWith: Expected the attribute `recurseSubmodules` of the first argument to be a boolean, but it's a ${typeOf recurseSubmodules} instead."
else if recurseSubmodules && versionOlder nixVersion _fetchGitSubmodulesMinver then
throw "lib.fileset.gitTrackedWith: Setting the attribute `recurseSubmodules` to `true` is only supported for Nix version ${_fetchGitSubmodulesMinver} and after, but Nix version ${nixVersion} is used."
else
_fromFetchGit
"gitTrackedWith"
"second argument"
path
_fromFetchGit "gitTrackedWith" "second argument" path
# This is the only `fetchGit` parameter that makes sense in this context.
# We can't just pass `submodules = recurseSubmodules` here because
# this would fail for Nix versions that don't support `submodules`.
(lib.optionalAttrs recurseSubmodules {
submodules = true;
});
(
lib.optionalAttrs recurseSubmodules {
submodules = true;
}
);
}

View file

@ -1,4 +1,6 @@
{ lib ? import ../. }:
{
lib ? import ../.,
}:
let
inherit (builtins)
@ -87,7 +89,8 @@ rec {
let
parts = splitRoot filesetV0._internalBase;
in
filesetV0 // {
filesetV0
// {
_internalVersion = 1;
_internalBaseRoot = parts.root;
_internalBaseComponents = components parts.subpath;
@ -98,7 +101,8 @@ rec {
(
filesetV1:
# This change is backwards compatible (but not forwards compatible, so we still need a new version)
filesetV1 // {
filesetV1
// {
_internalVersion = 2;
}
)
@ -106,7 +110,8 @@ rec {
# Convert v2 into v3: filesetTree's now have a representation for an empty file set without a base path
(
filesetV2:
filesetV2 // {
filesetV2
// {
# All v1 file sets are not the new empty file set
_internalIsEmptyWithoutBase = false;
_internalVersion = 3;
@ -136,7 +141,8 @@ rec {
# Create a fileset, see ./README.md#fileset
# Type: path -> filesetTree -> fileset
_create = base: tree:
_create =
base: tree:
let
# Decompose the base into its components
# See ../path/README.md for why we're not just using `toString`
@ -162,7 +168,8 @@ rec {
# Coerce a value to a fileset, erroring when the value cannot be coerced.
# The string gives the context for error messages.
# Type: String -> (fileset | Path) -> fileset
_coerce = context: value:
_coerce =
context: value:
if value._type or "" == "fileset" then
if value._internalVersion > _currentVersion then
throw ''
@ -173,12 +180,14 @@ rec {
else if value._internalVersion < _currentVersion then
let
# Get all the migration functions necessary to convert from the old to the current version
migrationsToApply = sublist value._internalVersion (_currentVersion - value._internalVersion) migrations;
migrationsToApply = sublist value._internalVersion (
_currentVersion - value._internalVersion
) migrations;
in
foldl' (value: migration: migration value) value migrationsToApply
else
value
else if ! isPath value then
else if !isPath value then
if value ? _isLibCleanSourceWith then
throw ''
${context} is a `lib.sources`-based value, but it should be a file set or a path instead.
@ -189,9 +198,8 @@ rec {
${context} ("${toString value}") is a string-like value, but it should be a file set or a path instead.
Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
else
throw ''
${context} is of type ${typeOf value}, but it should be a file set or a path instead.''
else if ! pathExists value then
throw ''${context} is of type ${typeOf value}, but it should be a file set or a path instead.''
else if !pathExists value then
throw ''
${context} (${toString value}) is a path that does not exist.
To create a file set from a path that may not exist, use `lib.fileset.maybeMissing`.''
@ -201,22 +209,21 @@ rec {
# Coerce many values to filesets, erroring when any value cannot be coerced,
# or if the filesystem root of the values doesn't match.
# Type: String -> [ { context :: String, value :: fileset | Path } ] -> [ fileset ]
_coerceMany = functionContext: list:
_coerceMany =
functionContext: list:
let
filesets = map ({ context, value }:
_coerce "${functionContext}: ${context}" value
) list;
filesets = map ({ context, value }: _coerce "${functionContext}: ${context}" value) list;
# Find the first value with a base, there may be none!
firstWithBase = findFirst (fileset: ! fileset._internalIsEmptyWithoutBase) null filesets;
firstWithBase = findFirst (fileset: !fileset._internalIsEmptyWithoutBase) null filesets;
# This value is only accessed if first != null
firstBaseRoot = firstWithBase._internalBaseRoot;
# Finds the first element with a filesystem root different than the first element, if any
differentIndex = findFirstIndex (fileset:
differentIndex = findFirstIndex (
fileset:
# The empty value without a base doesn't have a base path
! fileset._internalIsEmptyWithoutBase
&& firstBaseRoot != fileset._internalBaseRoot
!fileset._internalIsEmptyWithoutBase && firstBaseRoot != fileset._internalBaseRoot
) null filesets;
in
# Only evaluates `differentIndex` if there are any elements with a base
@ -231,7 +238,8 @@ rec {
# Create a file set from a path.
# Type: Path -> fileset
_singleton = path:
_singleton =
path:
let
type = pathType path;
in
@ -244,21 +252,20 @@ rec {
# "default.nix" = <type>;
# }
# See ./README.md#single-files
_create (dirOf path)
{
${baseNameOf path} = type;
};
_create (dirOf path) {
${baseNameOf path} = type;
};
# Expand a directory representation to an equivalent one in attribute set form.
# All directory entries are included in the result.
# Type: Path -> filesetTree -> { <name> = filesetTree; }
_directoryEntries = path: value:
_directoryEntries =
path: value:
if value == "directory" then
readDir path
else
# Set all entries not present to null
mapAttrs (name: value: null) (readDir path)
// value;
mapAttrs (name: value: null) (readDir path) // value;
/*
A normalisation of a filesetTree suitable filtering with `builtins.path`:
@ -271,7 +278,8 @@ rec {
Type: Path -> filesetTree -> filesetTree
*/
_normaliseTreeFilter = path: tree:
_normaliseTreeFilter =
path: tree:
if tree == "directory" || isAttrs tree then
let
entries = _directoryEntries path tree;
@ -301,7 +309,8 @@ rec {
Type: Path -> filesetTree -> filesetTree (with "emptyDir"'s)
*/
_normaliseTreeMinimal = path: tree:
_normaliseTreeMinimal =
path: tree:
if tree == "directory" || isAttrs tree then
let
entries = _directoryEntries path tree;
@ -334,9 +343,11 @@ rec {
# Trace a filesetTree in a pretty way when the resulting value is evaluated.
# This can handle both normal filesetTree's, and ones returned from _normaliseTreeMinimal
# Type: Path -> filesetTree (with "emptyDir"'s) -> Null
_printMinimalTree = base: tree:
_printMinimalTree =
base: tree:
let
treeSuffix = tree:
treeSuffix =
tree:
if isAttrs tree then
""
else if tree == "directory" then
@ -349,14 +360,15 @@ rec {
" (${tree})";
# Only for attribute set trees
traceTreeAttrs = prevLine: indent: tree:
foldl' (prevLine: name:
traceTreeAttrs =
prevLine: indent: tree:
foldl' (
prevLine: name:
let
subtree = tree.${name};
# Evaluating this prints the line for this subtree
thisLine =
trace "${indent}- ${name}${treeSuffix subtree}" prevLine;
thisLine = trace "${indent}- ${name}${treeSuffix subtree}" prevLine;
in
if subtree == null || subtree == "emptyDir" then
# Don't print anything at all if this subtree is empty
@ -378,24 +390,24 @@ rec {
else
trace "${toString base}${treeSuffix tree}" null;
in
if isAttrs tree then
traceTreeAttrs firstLine "" tree
else
firstLine;
if isAttrs tree then traceTreeAttrs firstLine "" tree else firstLine;
# Pretty-print a file set in a pretty way when the resulting value is evaluated
# Type: fileset -> Null
_printFileset = fileset:
_printFileset =
fileset:
if fileset._internalIsEmptyWithoutBase then
trace "(empty)" null
else
_printMinimalTree fileset._internalBase
(_normaliseTreeMinimal fileset._internalBase fileset._internalTree);
_printMinimalTree fileset._internalBase (
_normaliseTreeMinimal fileset._internalBase fileset._internalTree
);
# Turn a fileset into a source filter function suitable for `builtins.path`
# Only directories recursively containing at least one files are recursed into
# Type: fileset -> (String -> String -> Bool)
_toSourceFilter = fileset:
_toSourceFilter =
fileset:
let
# Simplify the tree, necessary to make sure all empty directories are null
# which has the effect that they aren't included in the result
@ -403,7 +415,7 @@ rec {
# The base path as a string with a single trailing slash
baseString =
if fileset._internalBaseComponents == [] then
if fileset._internalBaseComponents == [ ] then
# Need to handle the filesystem root specially
"/"
else
@ -414,9 +426,11 @@ rec {
# Check whether a list of path components under the base path exists in the tree.
# This function is called often, so it should be fast.
# Type: [ String ] -> Bool
inTree = components:
inTree =
components:
let
recurse = index: localTree:
recurse =
index: localTree:
if isAttrs localTree then
# We have an attribute set, meaning this is a directory with at least one file
if index >= length components then
@ -431,7 +445,8 @@ rec {
# If it's not an attribute set it can only be either null (in which case it's not included)
# or a string ("directory" or "regular", etc.) in which case it's included
localTree != null;
in recurse 0 tree;
in
recurse 0 tree;
# Filter suited when there's no files
empty = _: _: false;
@ -483,16 +498,14 @@ rec {
# Special case because the code below assumes that the _internalBase is always included in the result
# which shouldn't be done when we have no files at all in the base
# This also forces the tree before returning the filter, leads to earlier error messages
if fileset._internalIsEmptyWithoutBase || tree == null then
empty
else
nonEmpty;
if fileset._internalIsEmptyWithoutBase || tree == null then empty else nonEmpty;
# Turn a builtins.filterSource-based source filter on a root path into a file set
# containing only files included by the filter.
# The filter is lazily called as necessary to determine whether paths are included
# Type: Path -> (String -> String -> Bool) -> fileset
_fromSourceFilter = root: sourceFilter:
_fromSourceFilter =
root: sourceFilter:
let
# During the recursion we need to track both:
# - The path value such that we can safely call `readDir` on it
@ -503,9 +516,10 @@ rec {
# which is a fairly expensive operation
# Create a file set from a directory entry
fromDirEntry = path: pathString: type:
fromDirEntry =
path: pathString: type:
# The filter needs to run on the path as a string
if ! sourceFilter pathString type then
if !sourceFilter pathString type then
null
else if type == "directory" then
fromDir path pathString
@ -513,7 +527,8 @@ rec {
type;
# Create a file set from a directory
fromDir = path: pathString:
fromDir =
path: pathString:
mapAttrs
# This looks a bit funny, but we need both the path-based and the path string-based values
(name: fromDirEntry (path + "/${name}") (pathString + "/${name}"))
@ -536,20 +551,19 @@ rec {
else
# Direct files are always included by builtins.path without calling the filter
# But we need to lift up the base path to its parent to satisfy the base path invariant
_create (dirOf root)
{
${baseNameOf root} = rootPathType;
};
_create (dirOf root) {
${baseNameOf root} = rootPathType;
};
# Turns a file set into the list of file paths it includes.
# Type: fileset -> [ Path ]
_toList = fileset:
_toList =
fileset:
let
recurse = path: tree:
recurse =
path: tree:
if isAttrs tree then
concatLists (mapAttrsToList (name: value:
recurse (path + "/${name}") value
) tree)
concatLists (mapAttrsToList (name: value: recurse (path + "/${name}") value) tree)
else if tree == "directory" then
recurse path (readDir path)
else if tree == null then
@ -565,9 +579,11 @@ rec {
# Transforms the filesetTree of a file set to a shorter base path, e.g.
# _shortenTreeBase [ "foo" ] (_create /foo/bar null)
# => { bar = null; }
_shortenTreeBase = targetBaseComponents: fileset:
_shortenTreeBase =
targetBaseComponents: fileset:
let
recurse = index:
recurse =
index:
# If we haven't reached the required depth yet
if index < length fileset._internalBaseComponents then
# Create an attribute set and recurse as the value, this can be lazily evaluated this way
@ -581,9 +597,11 @@ rec {
# Transforms the filesetTree of a file set to a longer base path, e.g.
# _lengthenTreeBase [ "foo" "bar" ] (_create /foo { bar.baz = "regular"; })
# => { baz = "regular"; }
_lengthenTreeBase = targetBaseComponents: fileset:
_lengthenTreeBase =
targetBaseComponents: fileset:
let
recurse = index: tree:
recurse =
index: tree:
# If the filesetTree is an attribute set and we haven't reached the required depth yet
if isAttrs tree && index < length targetBaseComponents then
# Recurse with the tree under the right component (which might not exist)
@ -602,10 +620,11 @@ rec {
# Computes the union of a list of filesets.
# The filesets must already be coerced and validated to be in the same filesystem root
# Type: [ Fileset ] -> Fileset
_unionMany = filesets:
_unionMany =
filesets:
let
# All filesets that have a base, aka not the ones that are the empty value without a base
filesetsWithBase = filter (fileset: ! fileset._internalIsEmptyWithoutBase) filesets;
filesetsWithBase = filter (fileset: !fileset._internalIsEmptyWithoutBase) filesets;
# The first fileset that has a base.
# This value is only accessed if there are at all.
@ -618,13 +637,13 @@ rec {
# A list of path components common to all base paths.
# Note that commonPrefix can only be fully evaluated,
# so this cannot cause a stack overflow due to a build-up of unevaluated thunks.
commonBaseComponents = foldl'
(components: el: commonPrefix components el._internalBaseComponents)
firstWithBase._internalBaseComponents
# We could also not do the `tail` here to avoid a list allocation,
# but then we'd have to pay for a potentially expensive
# but unnecessary `commonPrefix` call
(tail filesetsWithBase);
commonBaseComponents =
foldl' (components: el: commonPrefix components el._internalBaseComponents)
firstWithBase._internalBaseComponents
# We could also not do the `tail` here to avoid a list allocation,
# but then we'd have to pay for a potentially expensive
# but unnecessary `commonPrefix` call
(tail filesetsWithBase);
# The common base path assembled from a filesystem root and the common components
commonBase = append firstWithBase._internalBaseRoot (join commonBaseComponents);
@ -643,15 +662,13 @@ rec {
resultTree = _unionTrees trees;
in
# If there's no values with a base, we have no files
if filesetsWithBase == [ ] then
_emptyWithoutBase
else
_create commonBase resultTree;
if filesetsWithBase == [ ] then _emptyWithoutBase else _create commonBase resultTree;
# The union of multiple filesetTree's with the same base path.
# Later elements are only evaluated if necessary.
# Type: [ filesetTree ] -> filesetTree
_unionTrees = trees:
_unionTrees =
trees:
let
stringIndex = findFirstIndex isString null trees;
withoutNull = filter (tree: tree != null) trees;
@ -671,18 +688,15 @@ rec {
# Computes the intersection of a list of filesets.
# The filesets must already be coerced and validated to be in the same filesystem root
# Type: Fileset -> Fileset -> Fileset
_intersection = fileset1: fileset2:
_intersection =
fileset1: fileset2:
let
# The common base components prefix, e.g.
# (/foo/bar, /foo/bar/baz) -> /foo/bar
# (/foo/bar, /foo/baz) -> /foo
commonBaseComponentsLength =
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
length (
commonPrefix
fileset1._internalBaseComponents
fileset2._internalBaseComponents
);
length (commonPrefix fileset1._internalBaseComponents fileset2._internalBaseComponents);
# To be able to intersect filesetTree's together, they need to have the same base path.
# Base paths can be intersected by taking the longest one (if any)
@ -725,12 +739,11 @@ rec {
# The intersection of two filesetTree's with the same base path
# The second element is only evaluated as much as necessary.
# Type: filesetTree -> filesetTree -> filesetTree
_intersectTree = lhs: rhs:
_intersectTree =
lhs: rhs:
if isAttrs lhs && isAttrs rhs then
# Both sides are attribute sets, we can recurse for the attributes existing on both sides
mapAttrs
(name: _intersectTree lhs.${name})
(builtins.intersectAttrs lhs rhs)
mapAttrs (name: _intersectTree lhs.${name}) (builtins.intersectAttrs lhs rhs)
else if lhs == null || isString rhs then
# If the lhs is null, the result should also be null
# And if the rhs is the identity element
@ -743,18 +756,15 @@ rec {
# Compute the set difference between two file sets.
# The filesets must already be coerced and validated to be in the same filesystem root.
# Type: Fileset -> Fileset -> Fileset
_difference = positive: negative:
_difference =
positive: negative:
let
# The common base components prefix, e.g.
# (/foo/bar, /foo/bar/baz) -> /foo/bar
# (/foo/bar, /foo/baz) -> /foo
commonBaseComponentsLength =
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
length (
commonPrefix
positive._internalBaseComponents
negative._internalBaseComponents
);
length (commonPrefix positive._internalBaseComponents negative._internalBaseComponents);
# We need filesetTree's with the same base to be able to compute the difference between them
# This here is the filesetTree from the negative file set, but for a base path that matches the positive file set.
@ -786,9 +796,7 @@ rec {
null;
resultingTree =
_differenceTree
positive._internalBase
positive._internalTree
_differenceTree positive._internalBase positive._internalTree
negativeTreeWithPositiveBase;
in
# If the first file set is empty, we can never have any files in the result
@ -805,7 +813,8 @@ rec {
# Computes the set difference of two filesetTree's
# Type: Path -> filesetTree -> filesetTree
_differenceTree = path: lhs: rhs:
_differenceTree =
path: lhs: rhs:
# If the lhs doesn't have any files, or the right hand side includes all files
if lhs == null || isString rhs then
# The result will always be empty
@ -816,17 +825,19 @@ rec {
lhs
else
# Otherwise we always have two attribute sets to recurse into
mapAttrs (name: lhsValue:
_differenceTree (path + "/${name}") lhsValue (rhs.${name} or null)
) (_directoryEntries path lhs);
mapAttrs (name: lhsValue: _differenceTree (path + "/${name}") lhsValue (rhs.${name} or null)) (
_directoryEntries path lhs
);
# Filters all files in a path based on a predicate
# Type: ({ name, type, ... } -> Bool) -> Path -> FileSet
_fileFilter = predicate: root:
_fileFilter =
predicate: root:
let
# Check the predicate for a single file
# Type: String -> String -> filesetTree
fromFile = name: type:
fromFile =
name: type:
if
predicate {
inherit name type;
@ -834,7 +845,8 @@ rec {
# To ensure forwards compatibility with more arguments being added in the future,
# adding an attribute which can't be deconstructed :)
"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you're using `{ name, file, hasExt }:`, use `{ name, file, hasExt, ... }:` instead." = null;
"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you're using `{ name, file, hasExt }:`, use `{ name, file, hasExt, ... }:` instead." =
null;
}
then
type
@ -843,12 +855,10 @@ rec {
# Check the predicate for all files in a directory
# Type: Path -> filesetTree
fromDir = path:
mapAttrs (name: type:
if type == "directory" then
fromDir (path + "/${name}")
else
fromFile name type
fromDir =
path:
mapAttrs (
name: type: if type == "directory" then fromDir (path + "/${name}") else fromFile name type
) (readDir path);
rootType = pathType root;
@ -858,8 +868,7 @@ rec {
else
# Single files are turned into a directory containing that file or nothing.
_create (dirOf root) {
${baseNameOf root} =
fromFile (baseNameOf root) rootType;
${baseNameOf root} = fromFile (baseNameOf root) rootType;
};
# Support for `builtins.fetchGit` with `submodules = true` was introduced in 2.4
@ -876,22 +885,21 @@ rec {
# - The store path must not include files that don't exist in the respective local path.
#
# Type: Path -> String -> FileSet
_mirrorStorePath = localPath: storePath:
_mirrorStorePath =
localPath: storePath:
let
recurse = focusedStorePath:
mapAttrs (name: type:
if type == "directory" then
recurse (focusedStorePath + "/${name}")
else
type
recurse =
focusedStorePath:
mapAttrs (
name: type: if type == "directory" then recurse (focusedStorePath + "/${name}") else type
) (builtins.readDir focusedStorePath);
in
_create localPath
(recurse storePath);
_create localPath (recurse storePath);
# Create a file set from the files included in the result of a fetchGit call
# Type: String -> String -> Path -> Attrs -> FileSet
_fromFetchGit = function: argument: path: extraFetchGitAttrs:
_fromFetchGit =
function: argument: path: extraFetchGitAttrs:
let
# The code path for when isStorePath is true
tryStorePath =
@ -922,31 +930,33 @@ rec {
# With the [lazy trees PR](https://github.com/NixOS/nix/pull/6530),
# the unnecessarily import could be avoided.
# However a simpler alternative still would be [a builtins.gitLsFiles](https://github.com/NixOS/nix/issues/2944).
fetchResult = fetchGit ({
url = path;
}
# In older Nix versions, repositories were always assumed to be deep clones, which made `fetchGit` fail for shallow clones
# For newer versions this was fixed, but the `shallow` flag is required.
# The only behavioral difference is that for shallow clones, `fetchGit` doesn't return a `revCount`,
# which we don't need here, so it's fine to always pass it.
fetchResult = fetchGit (
{
url = path;
}
# In older Nix versions, repositories were always assumed to be deep clones, which made `fetchGit` fail for shallow clones
# For newer versions this was fixed, but the `shallow` flag is required.
# The only behavioral difference is that for shallow clones, `fetchGit` doesn't return a `revCount`,
# which we don't need here, so it's fine to always pass it.
# Unfortunately this means older Nix versions get a poor error message for shallow repositories, and there's no good way to improve that.
# Checking for `.git/shallow` doesn't seem worth it, especially since that's more of an implementation detail,
# and would also require more code to handle worktrees where `.git` is a file.
// optionalAttrs (versionAtLeast nixVersion _fetchGitShallowMinver) { shallow = true; }
// extraFetchGitAttrs);
# Unfortunately this means older Nix versions get a poor error message for shallow repositories, and there's no good way to improve that.
# Checking for `.git/shallow` doesn't seem worth it, especially since that's more of an implementation detail,
# and would also require more code to handle worktrees where `.git` is a file.
// optionalAttrs (versionAtLeast nixVersion _fetchGitShallowMinver) { shallow = true; }
// extraFetchGitAttrs
);
in
# We can identify local working directories by checking for .git,
# see https://git-scm.com/docs/gitrepository-layout#_description.
# Note that `builtins.fetchGit` _does_ work for bare repositories (where there's no `.git`),
# even though `git ls-files` wouldn't return any files in that case.
if ! pathExists (path + "/.git") then
if !pathExists (path + "/.git") then
throw "lib.fileset.${function}: Expected the ${argument} (${toString path}) to point to a local working tree of a Git repository, but it's not."
else
_mirrorStorePath path fetchResult.outPath;
in
if ! isPath path then
if !isPath path then
throw "lib.fileset.${function}: Expected the ${argument} to be a path, but it's a ${typeOf path} instead."
else if pathType path != "directory" then
throw "lib.fileset.${function}: Expected the ${argument} (${toString path}) to be a directory, but it's a file instead."

View file

@ -8,18 +8,21 @@
# }
self: super: {
path = super.path // {
splitRoot = path:
splitRoot =
path:
let
parts = super.path.splitRoot path;
components = self.path.subpath.components parts.subpath;
count = self.length components;
rootIndex = count - self.lists.findFirstIndex
(component: component == "mock-root")
(self.length components)
(self.reverseList components);
rootIndex =
count
- self.lists.findFirstIndex (component: component == "mock-root") (self.length components) (
self.reverseList components
);
root = self.path.append parts.root (self.path.subpath.join (self.take rootIndex components));
subpath = self.path.subpath.join (self.drop rootIndex components);
in {
in
{
inherit root subpath;
};
};

View file

@ -13,8 +13,9 @@ finalLib: prevLib: # lib overlay
{
trivial = prevLib.trivial // {
versionSuffix =
".${finalLib.substring 0 8 (self.lastModifiedDate or "19700101")}.${self.shortRev or "dirty"}";
versionSuffix = ".${
finalLib.substring 0 8 (self.lastModifiedDate or "19700101")
}.${self.shortRev or "dirty"}";
revisionWithDefault = default: self.rev or default;
};
}

View file

@ -1,10 +1,12 @@
{
description = "Library of low-level helper functions for nix expressions.";
outputs = { self }:
outputs =
{ self }:
let
lib0 = import ./.;
in {
in
{
lib = lib0.extend (import ./flake-version-info.nix self);
};
}

View file

@ -14,7 +14,12 @@
let
inherit (lib)
concatMapStringsSep concatStrings escape head replaceStrings;
concatMapStringsSep
concatStrings
escape
head
replaceStrings
;
mkPrimitive = t: v: {
_type = "gvariant";
@ -49,7 +54,6 @@ rec {
/**
Check if a value is a GVariant value
# Inputs
`v`
@ -115,7 +119,6 @@ rec {
Returns the GVariant value that most closely matches the given Nix value.
If no GVariant value can be found unambiguously then error is thrown.
# Inputs
`v`
@ -128,7 +131,8 @@ rec {
mkValue :: Any -> gvariant
```
*/
mkValue = v:
mkValue =
v:
if builtins.isBool v then
mkBoolean v
else if builtins.isFloat v then
@ -141,13 +145,17 @@ rec {
v
else if builtins.isInt v then
let
validConstructors = builtins.filter ({ min, max, ... }: (min == null || min <= v) && (max == null || v <= max)) intConstructors;
validConstructors = builtins.filter (
{ min, max, ... }: (min == null || min <= v) && (max == null || v <= max)
) intConstructors;
in
throw ''
The GVariant type for number ${builtins.toString v} is unclear.
Please wrap the value with one of the following, depending on the value type in GSettings schema:
${lib.concatMapStringsSep "\n" ({ name, type, ...}: "- `lib.gvariant.${name}` for `${type}`") validConstructors}
${lib.concatMapStringsSep "\n" (
{ name, type, ... }: "- `lib.gvariant.${name}` for `${type}`"
) validConstructors}
''
else if builtins.isAttrs v then
throw "Cannot construct GVariant value from an attribute set. If you want to construct a dictionary, you will need to create an array containing items constructed with `lib.gvariant.mkDictionaryEntry`."
@ -157,7 +165,6 @@ rec {
/**
Returns the GVariant array from the given type of the elements and a Nix list.
# Inputs
`elems`
@ -181,22 +188,22 @@ rec {
:::
*/
mkArray = elems:
mkArray =
elems:
let
vs = map mkValue (lib.throwIf (elems == [ ]) "Please create empty array with mkEmptyArray." elems);
elemType = lib.throwIfNot (lib.all (t: (head vs).type == t) (map (v: v.type) vs))
"Elements in a list should have same type."
(head vs).type;
elemType = lib.throwIfNot (lib.all (t: (head vs).type == t) (
map (v: v.type) vs
)) "Elements in a list should have same type." (head vs).type;
in
mkPrimitive (type.arrayOf elemType) vs // {
__toString = self:
"@${self.type} [${concatMapStringsSep "," toString self.value}]";
mkPrimitive (type.arrayOf elemType) vs
// {
__toString = self: "@${self.type} [${concatMapStringsSep "," toString self.value}]";
};
/**
Returns the GVariant array from the given empty Nix list.
# Inputs
`elemType`
@ -220,16 +227,17 @@ rec {
:::
*/
mkEmptyArray = elemType: mkPrimitive (type.arrayOf elemType) [ ] // {
__toString = self: "@${self.type} []";
};
mkEmptyArray =
elemType:
mkPrimitive (type.arrayOf elemType) [ ]
// {
__toString = self: "@${self.type} []";
};
/**
Returns the GVariant variant from the given Nix value. Variants are containers
of different GVariant type.
# Inputs
`elem`
@ -255,16 +263,19 @@ rec {
:::
*/
mkVariant = elem:
let gvarElem = mkValue elem;
in mkPrimitive type.variant gvarElem // {
mkVariant =
elem:
let
gvarElem = mkValue elem;
in
mkPrimitive type.variant gvarElem
// {
__toString = self: "<${toString self.value}>";
};
/**
Returns the GVariant dictionary entry from the given key and value.
# Inputs
`name`
@ -297,21 +308,20 @@ rec {
:::
*/
mkDictionaryEntry =
name:
value:
name: value:
let
name' = mkValue name;
value' = mkValue value;
dictionaryType = type.dictionaryEntryOf name'.type value'.type;
in
mkPrimitive dictionaryType { inherit name value; } // {
mkPrimitive dictionaryType { inherit name value; }
// {
__toString = self: "@${self.type} {${name'},${value'}}";
};
/**
Returns the GVariant maybe from the given element type.
# Inputs
`elemType`
@ -328,19 +338,17 @@ rec {
mkMaybe :: gvariant.type -> Any -> gvariant
```
*/
mkMaybe = elemType: elem:
mkPrimitive (type.maybeOf elemType) elem // {
__toString = self:
if self.value == null then
"@${self.type} nothing"
else
"just ${toString self.value}";
mkMaybe =
elemType: elem:
mkPrimitive (type.maybeOf elemType) elem
// {
__toString =
self: if self.value == null then "@${self.type} nothing" else "just ${toString self.value}";
};
/**
Returns the GVariant nothing from the given element type.
# Inputs
`elemType`
@ -358,7 +366,6 @@ rec {
/**
Returns the GVariant just from the given Nix value.
# Inputs
`elem`
@ -371,12 +378,16 @@ rec {
mkJust :: Any -> gvariant
```
*/
mkJust = elem: let gvarElem = mkValue elem; in mkMaybe gvarElem.type gvarElem;
mkJust =
elem:
let
gvarElem = mkValue elem;
in
mkMaybe gvarElem.type gvarElem;
/**
Returns the GVariant tuple from the given Nix list.
# Inputs
`elems`
@ -389,20 +400,20 @@ rec {
mkTuple :: [Any] -> gvariant
```
*/
mkTuple = elems:
mkTuple =
elems:
let
gvarElems = map mkValue elems;
tupleType = type.tupleOf (map (e: e.type) gvarElems);
in
mkPrimitive tupleType gvarElems // {
__toString = self:
"@${self.type} (${concatMapStringsSep "," toString self.value})";
mkPrimitive tupleType gvarElems
// {
__toString = self: "@${self.type} (${concatMapStringsSep "," toString self.value})";
};
/**
Returns the GVariant boolean from the given Nix bool value.
# Inputs
`v`
@ -415,15 +426,16 @@ rec {
mkBoolean :: Bool -> gvariant
```
*/
mkBoolean = v:
mkPrimitive type.boolean v // {
mkBoolean =
v:
mkPrimitive type.boolean v
// {
__toString = self: if self.value then "true" else "false";
};
/**
Returns the GVariant string from the given Nix string value.
# Inputs
`v`
@ -436,16 +448,19 @@ rec {
mkString :: String -> gvariant
```
*/
mkString = v:
let sanitize = s: replaceStrings [ "\n" ] [ "\\n" ] (escape [ "'" "\\" ] s);
in mkPrimitive type.string v // {
mkString =
v:
let
sanitize = s: replaceStrings [ "\n" ] [ "\\n" ] (escape [ "'" "\\" ] s);
in
mkPrimitive type.string v
// {
__toString = self: "'${sanitize self.value}'";
};
/**
Returns the GVariant object path from the given Nix string value.
# Inputs
`v`
@ -458,8 +473,10 @@ rec {
mkObjectpath :: String -> gvariant
```
*/
mkObjectpath = v:
mkPrimitive type.string v // {
mkObjectpath =
v:
mkPrimitive type.string v
// {
__toString = self: "objectpath '${escape [ "'" ] self.value}'";
};
@ -499,7 +516,6 @@ rec {
/**
Returns the GVariant int32 from the given Nix int value.
# Inputs
`v`
@ -512,8 +528,10 @@ rec {
mkInt32 :: Int -> gvariant
```
*/
mkInt32 = v:
mkPrimitive type.int32 v // {
mkInt32 =
v:
mkPrimitive type.int32 v
// {
__toString = self: toString self.value;
};
@ -553,7 +571,6 @@ rec {
/**
Returns the GVariant double from the given Nix float value.
# Inputs
`v`
@ -566,8 +583,10 @@ rec {
mkDouble :: Float -> gvariant
```
*/
mkDouble = v:
mkPrimitive type.double v // {
mkDouble =
v:
mkPrimitive type.double v
// {
__toString = self: toString self.value;
};
}

View file

@ -5,22 +5,34 @@ let
in
{
# Keeping these around in case we decide to change this horrible implementation :)
option = x:
x // { optional = true; };
yes = { tristate = "y"; optional = false; };
no = { tristate = "n"; optional = false; };
module = { tristate = "m"; optional = false; };
unset = { tristate = null; optional = false; };
freeform = x: { freeform = x; optional = false; };
option = x: x // { optional = true; };
yes = {
tristate = "y";
optional = false;
};
no = {
tristate = "n";
optional = false;
};
module = {
tristate = "m";
optional = false;
};
unset = {
tristate = null;
optional = false;
};
freeform = x: {
freeform = x;
optional = false;
};
# Common patterns/legacy used in common-config/hardened/config.nix
whenHelpers = version: {
whenAtLeast = ver: mkIf (versionAtLeast version ver);
whenOlder = ver: mkIf (versionOlder version ver);
whenOlder = ver: mkIf (versionOlder version ver);
# range is (inclusive, exclusive)
whenBetween = verLow: verHigh: mkIf (versionAtLeast version verLow && versionOlder version verHigh);
};

View file

@ -420,20 +420,17 @@ rec {
Placeholders will not be quoted as they are not actual values:
(showOption ["foo" "*" "bar"]) == "foo.*.bar"
(showOption ["foo" "<name>" "bar"]) == "foo.<name>.bar"
(showOption ["foo" "<myPlaceholder>" "bar"]) == "foo.<myPlaceholder>.bar"
*/
showOption = parts: let
# If the part is a named placeholder of the form "<...>" don't escape it.
# It may cause misleading escaping if somebody uses literally "<...>" in their option names.
# This is the trade-off to allow for placeholders in option names.
isNamedPlaceholder = builtins.match "\<(.*)\>";
escapeOptionPart = part:
let
# We assume that these are "special values" and not real configuration data.
# If it is real configuration data, it is rendered incorrectly.
specialIdentifiers = [
"<name>" # attrsOf (submodule {})
"*" # listOf (submodule {})
"<function body>" # functionTo
];
in if builtins.elem part specialIdentifiers
then part
else lib.strings.escapeNixIdentifier part;
if part == "*" || isNamedPlaceholder part != null
then part
else lib.strings.escapeNixIdentifier part;
in (concatStringsSep ".") (map escapeOptionPart parts);
showFiles = files: concatStringsSep " and " (map (f: "`${f}'") files);

View file

@ -1,4 +1,4 @@
/* Functions for working with path values. */
# Functions for working with path values.
# See ./README.md for internal docs
{ lib }:
let
@ -41,8 +41,9 @@ let
;
# Return the reason why a subpath is invalid, or `null` if it's valid
subpathInvalidReason = value:
if ! isString value then
subpathInvalidReason =
value:
if !isString value then
"The given value is of type ${builtins.typeOf value}, but a string was expected"
else if value == "" then
"The given string is empty"
@ -51,11 +52,13 @@ let
# We don't support ".." components, see ./path.md#parent-directory
else if match "(.*/)?\\.\\.(/.*)?" value != null then
"The given string \"${value}\" contains a `..` component, which is not allowed in subpaths"
else null;
else
null;
# Split and normalise a relative path string into its components.
# Error for ".." components and doesn't include "." components
splitRelPath = path:
splitRelPath =
path:
let
# Split the string into its parts using regex for efficiency. This regex
# matches patterns like "/", "/./", "/././", with arbitrarily many "/"s
@ -86,26 +89,31 @@ let
componentCount = partCount - skipEnd - skipStart;
in
# Special case of a single "." path component. Such a case leaves a
# componentCount of -1 due to the skipStart/skipEnd not verifying that
# they don't refer to the same character
if path == "." then []
# Special case of a single "." path component. Such a case leaves a
# componentCount of -1 due to the skipStart/skipEnd not verifying that
# they don't refer to the same character
if path == "." then
[ ]
# Generate the result list directly. This is more efficient than a
# combination of `filter`, `init` and `tail`, because here we don't
# allocate any intermediate lists
else genList (index:
# Generate the result list directly. This is more efficient than a
# combination of `filter`, `init` and `tail`, because here we don't
# allocate any intermediate lists
else
genList (
index:
# To get to the element we need to add the number of parts we skip and
# multiply by two due to the interleaved layout of `parts`
elemAt parts ((skipStart + index) * 2)
) componentCount;
# Join relative path components together
joinRelPath = components:
joinRelPath =
components:
# Always return relative paths with `./` as a prefix (./path.md#leading-dots-for-relative-paths)
"./" +
# An empty string is not a valid relative path, so we need to return a `.` when we have no components
(if components == [] then "." else concatStringsSep "/" components);
"./"
+
# An empty string is not a valid relative path, so we need to return a `.` when we have no components
(if components == [ ] then "." else concatStringsSep "/" components);
# Type: Path -> { root :: Path, components :: [ String ] }
#
@ -117,11 +125,18 @@ let
# because it can distinguish different filesystem roots
deconstructPath =
let
recurse = components: base:
recurse =
components: base:
# If the parent of a path is the path itself, then it's a filesystem root
if base == dirOf base then { root = base; inherit components; }
else recurse ([ (baseNameOf base) ] ++ components) (dirOf base);
in recurse [];
if base == dirOf base then
{
root = base;
inherit components;
}
else
recurse ([ (baseNameOf base) ] ++ components) (dirOf base);
in
recurse [ ];
# The components of the store directory, typically [ "nix" "store" ]
storeDirComponents = splitRelPath ("./" + storeDir);
@ -132,7 +147,8 @@ let
#
# Whether path components have a store path as a prefix, according to
# https://nixos.org/manual/nix/stable/store/store-path.html#store-path.
componentsHaveStorePathPrefix = components:
componentsHaveStorePathPrefix =
components:
# path starts with the store directory (typically /nix/store)
listHasPrefix storeDirComponents components
# is not the store directory itself, meaning there's at least one extra component
@ -145,7 +161,9 @@ let
# We care more about discerning store path-ness on realistic values. Making it airtight would be fragile and slow.
&& match ".{32}-.+" (elemAt components storeDirLength) != null;
in /* No rec! Add dependencies on this file at the top. */ {
in
# No rec! Add dependencies on this file at the top.
{
/*
Append a subpath string to a path.
@ -194,8 +212,8 @@ in /* No rec! Add dependencies on this file at the top. */ {
path:
# The subpath string to append
subpath:
assert assertMsg (isPath path) ''
lib.path.append: The first argument is of type ${builtins.typeOf path}, but a path was expected'';
assert assertMsg (isPath path)
''lib.path.append: The first argument is of type ${builtins.typeOf path}, but a path was expected'';
assert assertMsg (isValid subpath) ''
lib.path.append: Second argument is not a valid subpath string:
${subpathInvalidReason subpath}'';
@ -225,25 +243,23 @@ in /* No rec! Add dependencies on this file at the top. */ {
*/
hasPrefix =
path1:
assert assertMsg
(isPath path1)
assert assertMsg (isPath path1)
"lib.path.hasPrefix: First argument is of type ${typeOf path1}, but a path was expected";
let
path1Deconstructed = deconstructPath path1;
in
path2:
assert assertMsg
(isPath path2)
"lib.path.hasPrefix: Second argument is of type ${typeOf path2}, but a path was expected";
let
path2Deconstructed = deconstructPath path2;
in
assert assertMsg
(path1Deconstructed.root == path2Deconstructed.root) ''
lib.path.hasPrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
take (length path1Deconstructed.components) path2Deconstructed.components == path1Deconstructed.components;
path2:
assert assertMsg (isPath path2)
"lib.path.hasPrefix: Second argument is of type ${typeOf path2}, but a path was expected";
let
path2Deconstructed = deconstructPath path2;
in
assert assertMsg (path1Deconstructed.root == path2Deconstructed.root) ''
lib.path.hasPrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
take (length path1Deconstructed.components) path2Deconstructed.components
== path1Deconstructed.components;
/*
Remove the first path as a component-wise prefix from the second path.
@ -270,33 +286,29 @@ in /* No rec! Add dependencies on this file at the top. */ {
*/
removePrefix =
path1:
assert assertMsg
(isPath path1)
assert assertMsg (isPath path1)
"lib.path.removePrefix: First argument is of type ${typeOf path1}, but a path was expected.";
let
path1Deconstructed = deconstructPath path1;
path1Length = length path1Deconstructed.components;
in
path2:
assert assertMsg
(isPath path2)
"lib.path.removePrefix: Second argument is of type ${typeOf path2}, but a path was expected.";
let
path2Deconstructed = deconstructPath path2;
success = take path1Length path2Deconstructed.components == path1Deconstructed.components;
components =
if success then
drop path1Length path2Deconstructed.components
else
throw ''
lib.path.removePrefix: The first path argument "${toString path1}" is not a component-wise prefix of the second path argument "${toString path2}".'';
in
assert assertMsg
(path1Deconstructed.root == path2Deconstructed.root) ''
lib.path.removePrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
joinRelPath components;
path2:
assert assertMsg (isPath path2)
"lib.path.removePrefix: Second argument is of type ${typeOf path2}, but a path was expected.";
let
path2Deconstructed = deconstructPath path2;
success = take path1Length path2Deconstructed.components == path1Deconstructed.components;
components =
if success then
drop path1Length path2Deconstructed.components
else
throw ''lib.path.removePrefix: The first path argument "${toString path1}" is not a component-wise prefix of the second path argument "${toString path2}".'';
in
assert assertMsg (path1Deconstructed.root == path2Deconstructed.root) ''
lib.path.removePrefix: Filesystem roots must be the same for both paths, but paths with different roots were given:
first argument: "${toString path1}" with root "${toString path1Deconstructed.root}"
second argument: "${toString path2}" with root "${toString path2Deconstructed.root}"'';
joinRelPath components;
/*
Split the filesystem root from a [path](https://nixos.org/manual/nix/stable/language/values.html#type-path).
@ -336,12 +348,12 @@ in /* No rec! Add dependencies on this file at the top. */ {
splitRoot =
# The path to split the root off of
path:
assert assertMsg
(isPath path)
assert assertMsg (isPath path)
"lib.path.splitRoot: Argument is of type ${typeOf path}, but a path was expected";
let
deconstructed = deconstructPath path;
in {
in
{
root = deconstructed.root;
subpath = joinRelPath deconstructed.components;
};
@ -387,12 +399,12 @@ in /* No rec! Add dependencies on this file at the top. */ {
hasStorePathPrefix /nix/store/nvl9ic0pj1fpyln3zaqrf4cclbqdfn1j-foo.drv
=> true
*/
hasStorePathPrefix = path:
hasStorePathPrefix =
path:
let
deconstructed = deconstructPath path;
in
assert assertMsg
(isPath path)
assert assertMsg (isPath path)
"lib.path.hasStorePathPrefix: Argument is of type ${typeOf path}, but a path was expected";
assert assertMsg
# This function likely breaks or needs adjustment if used with other filesystem roots, if they ever get implemented.
@ -446,9 +458,7 @@ in /* No rec! Add dependencies on this file at the top. */ {
*/
subpath.isValid =
# The value to check
value:
subpathInvalidReason value == null;
value: subpathInvalidReason value == null;
/*
Join subpath strings together using `/`, returning a normalised subpath string.
@ -511,18 +521,20 @@ in /* No rec! Add dependencies on this file at the top. */ {
# The list of subpaths to join together
subpaths:
# Fast in case all paths are valid
if all isValid subpaths
then joinRelPath (concatMap splitRelPath subpaths)
if all isValid subpaths then
joinRelPath (concatMap splitRelPath subpaths)
else
# Otherwise we take our time to gather more info for a better error message
# Strictly go through each path, throwing on the first invalid one
# Tracks the list index in the fold accumulator
foldl' (i: path:
if isValid path
then i + 1
else throw ''
lib.path.subpath.join: Element at index ${toString i} is not a valid subpath string:
${subpathInvalidReason path}''
foldl' (
i: path:
if isValid path then
i + 1
else
throw ''
lib.path.subpath.join: Element at index ${toString i} is not a valid subpath string:
${subpathInvalidReason path}''
) 0 subpaths;
/*

View file

@ -2,8 +2,8 @@
nixpkgs ? ../../..,
system ? builtins.currentSystem,
pkgs ? import nixpkgs {
config = {};
overlays = [];
config = { };
overlays = [ ];
inherit system;
},
nixVersions ? import ../../tests/nix-for-tests.nix { inherit pkgs; },
@ -12,34 +12,38 @@
seed ? null,
}:
pkgs.runCommand "lib-path-tests" {
nativeBuildInputs = [
nixVersions.stable
] ++ (with pkgs; [
jq
bc
]);
} ''
# Needed to make Nix evaluation work
export TEST_ROOT=$(pwd)/test-tmp
export NIX_BUILD_HOOK=
export NIX_CONF_DIR=$TEST_ROOT/etc
export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_STORE_DIR=$TEST_ROOT/store
export PAGER=cat
pkgs.runCommand "lib-path-tests"
{
nativeBuildInputs =
[
nixVersions.stable
]
++ (with pkgs; [
jq
bc
]);
}
''
# Needed to make Nix evaluation work
export TEST_ROOT=$(pwd)/test-tmp
export NIX_BUILD_HOOK=
export NIX_CONF_DIR=$TEST_ROOT/etc
export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
export NIX_STATE_DIR=$TEST_ROOT/var/nix
export NIX_STORE_DIR=$TEST_ROOT/store
export PAGER=cat
cp -r ${libpath} lib
export TEST_LIB=$PWD/lib
cp -r ${libpath} lib
export TEST_LIB=$PWD/lib
echo "Running unit tests lib/path/tests/unit.nix"
nix-instantiate --eval --show-trace \
--argstr libpath "$TEST_LIB" \
lib/path/tests/unit.nix
echo "Running unit tests lib/path/tests/unit.nix"
nix-instantiate --eval --show-trace \
--argstr libpath "$TEST_LIB" \
lib/path/tests/unit.nix
echo "Running property tests lib/path/tests/prop.sh"
bash lib/path/tests/prop.sh ${toString seed}
echo "Running property tests lib/path/tests/prop.sh"
bash lib/path/tests/prop.sh ${toString seed}
touch $out
''
touch $out
''

View file

@ -16,14 +16,15 @@ let
lib = import libpath;
# read each file into a string
strings = map (name:
builtins.readFile (dir + "/${name}")
) (builtins.attrNames (builtins.readDir dir));
strings = map (name: builtins.readFile (dir + "/${name}")) (
builtins.attrNames (builtins.readDir dir)
);
inherit (lib.path.subpath) normalise isValid;
inherit (lib.asserts) assertMsg;
normaliseAndCheck = str:
normaliseAndCheck =
str:
let
originalValid = isValid str;
@ -33,28 +34,27 @@ let
absConcatOrig = /. + ("/" + str);
absConcatNormalised = /. + ("/" + tryOnce.value);
in
# Check the lib.path.subpath.normalise property to only error on invalid subpaths
assert assertMsg
(originalValid -> tryOnce.success)
"Even though string \"${str}\" is valid as a subpath, the normalisation for it failed";
assert assertMsg
(! originalValid -> ! tryOnce.success)
"Even though string \"${str}\" is invalid as a subpath, the normalisation for it succeeded";
# Check the lib.path.subpath.normalise property to only error on invalid subpaths
assert assertMsg (
originalValid -> tryOnce.success
) "Even though string \"${str}\" is valid as a subpath, the normalisation for it failed";
assert assertMsg (
!originalValid -> !tryOnce.success
) "Even though string \"${str}\" is invalid as a subpath, the normalisation for it succeeded";
# Check normalisation idempotency
assert assertMsg
(originalValid -> tryTwice.success)
"For valid subpath \"${str}\", the normalisation \"${tryOnce.value}\" was not a valid subpath";
assert assertMsg
(originalValid -> tryOnce.value == tryTwice.value)
"For valid subpath \"${str}\", normalising it once gives \"${tryOnce.value}\" but normalising it twice gives a different result: \"${tryTwice.value}\"";
# Check normalisation idempotency
assert assertMsg (
originalValid -> tryTwice.success
) "For valid subpath \"${str}\", the normalisation \"${tryOnce.value}\" was not a valid subpath";
assert assertMsg (originalValid -> tryOnce.value == tryTwice.value)
"For valid subpath \"${str}\", normalising it once gives \"${tryOnce.value}\" but normalising it twice gives a different result: \"${tryTwice.value}\"";
# Check that normalisation doesn't change a string when appended to an absolute Nix path value
assert assertMsg
(originalValid -> absConcatOrig == absConcatNormalised)
"For valid subpath \"${str}\", appending to an absolute Nix path value gives \"${absConcatOrig}\", but appending the normalised result \"${tryOnce.value}\" gives a different value \"${absConcatNormalised}\"";
# Check that normalisation doesn't change a string when appended to an absolute Nix path value
assert assertMsg (originalValid -> absConcatOrig == absConcatNormalised)
"For valid subpath \"${str}\", appending to an absolute Nix path value gives \"${absConcatOrig}\", but appending the normalised result \"${tryOnce.value}\" gives a different value \"${absConcatNormalised}\"";
# Return an empty string when failed
if tryOnce.success then tryOnce.value else "";
# Return an empty string when failed
if tryOnce.success then tryOnce.value else "";
in lib.genAttrs strings normaliseAndCheck
in
lib.genAttrs strings normaliseAndCheck

View file

@ -3,7 +3,14 @@
{ libpath }:
let
lib = import libpath;
inherit (lib.path) hasPrefix removePrefix append splitRoot hasStorePathPrefix subpath;
inherit (lib.path)
hasPrefix
removePrefix
append
splitRoot
hasStorePathPrefix
subpath
;
# This is not allowed generally, but we're in the tests here, so we'll allow ourselves.
storeDirPath = /. + builtins.storeDir;
@ -79,15 +86,24 @@ let
testSplitRootExample1 = {
expr = splitRoot /foo/bar;
expected = { root = /.; subpath = "./foo/bar"; };
expected = {
root = /.;
subpath = "./foo/bar";
};
};
testSplitRootExample2 = {
expr = splitRoot /.;
expected = { root = /.; subpath = "./."; };
expected = {
root = /.;
subpath = "./.";
};
};
testSplitRootExample3 = {
expr = splitRoot /foo/../bar;
expected = { root = /.; subpath = "./bar"; };
expected = {
root = /.;
subpath = "./bar";
};
};
testSplitRootExample4 = {
expr = (builtins.tryEval (splitRoot "/foo/bar")).success;
@ -111,7 +127,9 @@ let
expected = false;
};
testHasStorePathPrefixExample5 = {
expr = hasStorePathPrefix (storeDirPath + "/.links/10gg8k3rmbw8p7gszarbk7qyd9jwxhcfq9i6s5i0qikx8alkk4hq");
expr = hasStorePathPrefix (
storeDirPath + "/.links/10gg8k3rmbw8p7gszarbk7qyd9jwxhcfq9i6s5i0qikx8alkk4hq"
);
expected = false;
};
testHasStorePathPrefixExample6 = {
@ -188,11 +206,18 @@ let
# Test examples from the lib.path.subpath.join documentation
testSubpathJoinExample1 = {
expr = subpath.join [ "foo" "bar/baz" ];
expr = subpath.join [
"foo"
"bar/baz"
];
expected = "./foo/bar/baz";
};
testSubpathJoinExample2 = {
expr = subpath.join [ "./foo" "." "bar//./baz/" ];
expr = subpath.join [
"./foo"
"."
"bar//./baz/"
];
expected = "./foo/bar/baz";
};
testSubpathJoinExample3 = {
@ -273,7 +298,11 @@ let
};
testSubpathComponentsExample2 = {
expr = subpath.components "./foo//bar/./baz/";
expected = [ "foo" "bar" "baz" ];
expected = [
"foo"
"bar"
"baz"
];
};
testSubpathComponentsExample3 = {
expr = (builtins.tryEval (subpath.components "/foo")).success;
@ -281,5 +310,7 @@ let
};
};
in
if cases == [] then "Unit tests successful"
else throw "Path unit tests failed: ${lib.generators.toPretty {} cases}"
if cases == [ ] then
"Unit tests successful"
else
throw "Path unit tests failed: ${lib.generators.toPretty { } cases}"

View file

@ -5,15 +5,16 @@ let
shortName = tname;
isSource = false;
};
in lib.mapAttrs (tname: tset: defaultSourceType tname // tset) {
in
lib.mapAttrs (tname: tset: defaultSourceType tname // tset) {
fromSource = {
isSource = true;
};
binaryNativeCode = {};
binaryNativeCode = { };
binaryBytecode = {};
binaryBytecode = { };
binaryFirmware = {};
binaryFirmware = { };
}

View file

@ -1,4 +1,4 @@
/* Functions for copying sources to the Nix store. */
# Functions for copying sources to the Nix store.
{ lib }:
# Tested in lib/tests/sources.sh
@ -23,22 +23,34 @@ let
directories of version control system, backup files (*~)
and some generated files.
*/
cleanSourceFilter = name: type: let baseName = baseNameOf (toString name); in ! (
# Filter out version control software files/directories
(baseName == ".git" || type == "directory" && (baseName == ".svn" || baseName == "CVS" || baseName == ".hg")) ||
# Filter out editor backup / swap files.
lib.hasSuffix "~" baseName ||
match "^\\.sw[a-z]$" baseName != null ||
match "^\\..*\\.sw[a-z]$" baseName != null ||
cleanSourceFilter =
name: type:
let
baseName = baseNameOf (toString name);
in
!(
# Filter out version control software files/directories
(
baseName == ".git"
|| type == "directory" && (baseName == ".svn" || baseName == "CVS" || baseName == ".hg")
)
||
# Filter out editor backup / swap files.
lib.hasSuffix "~" baseName
|| match "^\\.sw[a-z]$" baseName != null
|| match "^\\..*\\.sw[a-z]$" baseName != null
||
# Filter out generates files.
lib.hasSuffix ".o" baseName ||
lib.hasSuffix ".so" baseName ||
# Filter out nix-build result symlinks
(type == "symlink" && lib.hasPrefix "result" baseName) ||
# Filter out sockets and other types of files we can't have in the store.
(type == "unknown")
);
# Filter out generates files.
lib.hasSuffix ".o" baseName
|| lib.hasSuffix ".so" baseName
||
# Filter out nix-build result symlinks
(type == "symlink" && lib.hasPrefix "result" baseName)
||
# Filter out sockets and other types of files we can't have in the store.
(type == "unknown")
);
/*
Filters a source tree removing version control files and directories using cleanSourceFilter.
@ -46,7 +58,12 @@ let
Example:
cleanSource ./.
*/
cleanSource = src: cleanSourceWith { filter = cleanSourceFilter; inherit src; };
cleanSource =
src:
cleanSourceWith {
filter = cleanSourceFilter;
inherit src;
};
/*
Like `builtins.filterSource`, except it will compose with itself,
@ -65,7 +82,6 @@ let
builtins.filterSource f (builtins.filterSource g ./.)
# Fails!
*/
cleanSourceWith =
{
@ -80,11 +96,12 @@ let
filter ? _path: _type: true,
# Optional name to use as part of the store path.
# This defaults to `src.name` or otherwise `"source"`.
name ? null
name ? null,
}:
let
orig = toSourceAttributes src;
in fromSourceAttributes {
in
fromSourceAttributes {
inherit (orig) origSrc;
filter = path: type: filter path type && orig.filter path type;
name = if name != null then name else orig.name;
@ -101,31 +118,40 @@ let
let
attrs = toSourceAttributes src;
in
fromSourceAttributes (
attrs // {
filter = path: type:
let
r = attrs.filter path type;
in
builtins.trace "${attrs.name}.filter ${path} = ${boolToString r}" r;
}
) // {
satisfiesSubpathInvariant = src ? satisfiesSubpathInvariant && src.satisfiesSubpathInvariant;
};
fromSourceAttributes (
attrs
// {
filter =
path: type:
let
r = attrs.filter path type;
in
builtins.trace "${attrs.name}.filter ${path} = ${boolToString r}" r;
}
)
// {
satisfiesSubpathInvariant = src ? satisfiesSubpathInvariant && src.satisfiesSubpathInvariant;
};
/*
Filter sources by a list of regular expressions.
Example: src = sourceByRegex ./my-subproject [".*\.py$" "^database.sql$"]
*/
sourceByRegex = src: regexes:
sourceByRegex =
src: regexes:
let
isFiltered = src ? _isLibCleanSourceWith;
origSrc = if isFiltered then src.origSrc else src;
in lib.cleanSourceWith {
filter = (path: type:
let relPath = lib.removePrefix (toString origSrc + "/") (toString path);
in lib.any (re: match re relPath != null) regexes);
in
lib.cleanSourceWith {
filter = (
path: type:
let
relPath = lib.removePrefix (toString origSrc + "/") (toString path);
in
lib.any (re: match re relPath != null) regexes
);
inherit src;
};
@ -145,21 +171,29 @@ let
src:
# A list of file suffix strings
exts:
let filter = name: type:
let base = baseNameOf (toString name);
in type == "directory" || lib.any (ext: lib.hasSuffix ext base) exts;
in cleanSourceWith { inherit filter src; };
let
filter =
name: type:
let
base = baseNameOf (toString name);
in
type == "directory" || lib.any (ext: lib.hasSuffix ext base) exts;
in
cleanSourceWith { inherit filter src; };
pathIsGitRepo = path: (_commitIdFromGitRepoOrError path)?value;
pathIsGitRepo = path: (_commitIdFromGitRepoOrError path) ? value;
/*
Get the commit id of a git repo.
Example: commitIdFromGitRepo <nixpkgs/.git>
*/
commitIdFromGitRepo = path:
let commitIdOrError = _commitIdFromGitRepoOrError path;
in commitIdOrError.value or (throw commitIdOrError.error);
commitIdFromGitRepo =
path:
let
commitIdOrError = _commitIdFromGitRepoOrError path;
in
commitIdOrError.value or (throw commitIdOrError.error);
# Get the commit id of a git repo.
@ -168,55 +202,68 @@ let
# Example: commitIdFromGitRepo <nixpkgs/.git>
# not exported, used for commitIdFromGitRepo
_commitIdFromGitRepoOrError =
let readCommitFromFile = file: path:
let fileName = path + "/${file}";
packedRefsName = path + "/packed-refs";
absolutePath = base: path:
if lib.hasPrefix "/" path
then path
else toString (/. + "${base}/${path}");
in if pathIsRegularFile path
# Resolve git worktrees. See gitrepository-layout(5)
then
let m = match "^gitdir: (.*)$" (lib.fileContents path);
in if m == null
then { error = "File contains no gitdir reference: " + path; }
else
let gitDir = absolutePath (dirOf path) (lib.head m);
commonDir'' = if pathIsRegularFile "${gitDir}/commondir"
then lib.fileContents "${gitDir}/commondir"
else gitDir;
commonDir' = lib.removeSuffix "/" commonDir'';
commonDir = absolutePath gitDir commonDir';
refFile = lib.removePrefix "${commonDir}/" "${gitDir}/${file}";
in readCommitFromFile refFile commonDir
let
readCommitFromFile =
file: path:
let
fileName = path + "/${file}";
packedRefsName = path + "/packed-refs";
absolutePath =
base: path: if lib.hasPrefix "/" path then path else toString (/. + "${base}/${path}");
in
if
pathIsRegularFile path
# Resolve git worktrees. See gitrepository-layout(5)
then
let
m = match "^gitdir: (.*)$" (lib.fileContents path);
in
if m == null then
{ error = "File contains no gitdir reference: " + path; }
else
let
gitDir = absolutePath (dirOf path) (lib.head m);
commonDir'' =
if pathIsRegularFile "${gitDir}/commondir" then lib.fileContents "${gitDir}/commondir" else gitDir;
commonDir' = lib.removeSuffix "/" commonDir'';
commonDir = absolutePath gitDir commonDir';
refFile = lib.removePrefix "${commonDir}/" "${gitDir}/${file}";
in
readCommitFromFile refFile commonDir
else if pathIsRegularFile fileName
# Sometimes git stores the commitId directly in the file but
# sometimes it stores something like: «ref: refs/heads/branch-name»
then
let fileContent = lib.fileContents fileName;
matchRef = match "^ref: (.*)$" fileContent;
in if matchRef == null
then { value = fileContent; }
else readCommitFromFile (lib.head matchRef) path
else if
pathIsRegularFile fileName
# Sometimes git stores the commitId directly in the file but
# sometimes it stores something like: «ref: refs/heads/branch-name»
then
let
fileContent = lib.fileContents fileName;
matchRef = match "^ref: (.*)$" fileContent;
in
if matchRef == null then { value = fileContent; } else readCommitFromFile (lib.head matchRef) path
else if pathIsRegularFile packedRefsName
# Sometimes, the file isn't there at all and has been packed away in the
# packed-refs file, so we have to grep through it:
then
let fileContent = readFile packedRefsName;
matchRef = match "([a-z0-9]+) ${file}";
isRef = s: isString s && (matchRef s) != null;
# there is a bug in libstdc++ leading to stackoverflow for long strings:
# https://github.com/NixOS/nix/issues/2147#issuecomment-659868795
refs = filter isRef (split "\n" fileContent);
in if refs == []
then { error = "Could not find " + file + " in " + packedRefsName; }
else { value = lib.head (matchRef (lib.head refs)); }
else if
pathIsRegularFile packedRefsName
# Sometimes, the file isn't there at all and has been packed away in the
# packed-refs file, so we have to grep through it:
then
let
fileContent = readFile packedRefsName;
matchRef = match "([a-z0-9]+) ${file}";
isRef = s: isString s && (matchRef s) != null;
# there is a bug in libstdc++ leading to stackoverflow for long strings:
# https://github.com/NixOS/nix/issues/2147#issuecomment-659868795
refs = filter isRef (split "\n" fileContent);
in
if refs == [ ] then
{ error = "Could not find " + file + " in " + packedRefsName; }
else
{ value = lib.head (matchRef (lib.head refs)); }
else { error = "Not a .git directory: " + toString path; };
in readCommitFromFile "HEAD";
else
{ error = "Not a .git directory: " + toString path; };
in
readCommitFromFile "HEAD";
pathHasContext = builtins.hasContext or (lib.hasPrefix storeDir);
@ -233,7 +280,8 @@ let
# like class of objects in the wild.
# (Existing ones being: paths, strings, sources and x//{outPath})
# So instead of exposing internals, we build a library of combinator functions.
toSourceAttributes = src:
toSourceAttributes =
src:
let
isFiltered = src ? _isLibCleanSourceWith;
in
@ -247,26 +295,38 @@ let
# fromSourceAttributes : SourceAttrs -> Source
#
# Inverse of toSourceAttributes for Source objects.
fromSourceAttributes = { origSrc, filter, name }:
fromSourceAttributes =
{
origSrc,
filter,
name,
}:
{
_isLibCleanSourceWith = true;
inherit origSrc filter name;
outPath = builtins.path { inherit filter name; path = origSrc; };
outPath = builtins.path {
inherit filter name;
path = origSrc;
};
};
in {
in
{
pathType = lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
"lib.sources.pathType has been moved to lib.filesystem.pathType."
lib.filesystem.pathType;
pathType =
lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
"lib.sources.pathType has been moved to lib.filesystem.pathType."
lib.filesystem.pathType;
pathIsDirectory = lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
"lib.sources.pathIsDirectory has been moved to lib.filesystem.pathIsDirectory."
lib.filesystem.pathIsDirectory;
pathIsDirectory =
lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
"lib.sources.pathIsDirectory has been moved to lib.filesystem.pathIsDirectory."
lib.filesystem.pathIsDirectory;
pathIsRegularFile = lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
"lib.sources.pathIsRegularFile has been moved to lib.filesystem.pathIsRegularFile."
lib.filesystem.pathIsRegularFile;
pathIsRegularFile =
lib.warnIf (lib.oldestSupportedReleaseIsAtLeast 2305)
"lib.sources.pathIsRegularFile has been moved to lib.filesystem.pathIsRegularFile."
lib.filesystem.pathIsRegularFile;
inherit
pathIsGitRepo

View file

@ -269,6 +269,43 @@ rec {
f:
list: concatStringsSep sep (lib.imap1 f list);
/**
Like [`concatMapStringsSep`](#function-library-lib.strings.concatMapStringsSep)
but takes an attribute set instead of a list.
# Inputs
`sep`
: Separator to add between item strings
`f`
: Function that takes each key and value and return a string
`attrs`
: Attribute set to map from
# Type
```
concatMapAttrsStringSep :: String -> (String -> Any -> String) -> AttrSet -> String
```
# Examples
:::{.example}
## `lib.strings.concatMapAttrsStringSep` usage example
```nix
concatMapAttrsStringSep "\n" (name: value: "${name}: foo-${value}") { a = "0.1.0"; b = "0.2.0"; }
=> "a: foo-0.1.0\nb: foo-0.2.0"
```
:::
*/
concatMapAttrsStringSep =
sep: f: attrs:
concatStringsSep sep (lib.attrValues (lib.mapAttrs f attrs));
/**
Concatenate a list of strings, adding a newline at the end of each one.
Defined as `concatMapStrings (s: s + "\n")`.

View file

@ -491,12 +491,42 @@ rec {
};
# can execute on 32bit chip
gcc_mips32r2_o32 = { gcc = { arch = "mips32r2"; abi = "32"; }; };
gcc_mips32r6_o32 = { gcc = { arch = "mips32r6"; abi = "32"; }; };
gcc_mips64r2_n32 = { gcc = { arch = "mips64r2"; abi = "n32"; }; };
gcc_mips64r6_n32 = { gcc = { arch = "mips64r6"; abi = "n32"; }; };
gcc_mips64r2_64 = { gcc = { arch = "mips64r2"; abi = "64"; }; };
gcc_mips64r6_64 = { gcc = { arch = "mips64r6"; abi = "64"; }; };
gcc_mips32r2_o32 = {
gcc = {
arch = "mips32r2";
abi = "32";
};
};
gcc_mips32r6_o32 = {
gcc = {
arch = "mips32r6";
abi = "32";
};
};
gcc_mips64r2_n32 = {
gcc = {
arch = "mips64r2";
abi = "n32";
};
};
gcc_mips64r6_n32 = {
gcc = {
arch = "mips64r6";
abi = "n32";
};
};
gcc_mips64r2_64 = {
gcc = {
arch = "mips64r2";
abi = "64";
};
};
gcc_mips64r6_64 = {
gcc = {
arch = "mips64r6";
abi = "64";
};
};
# based on:
# https://www.mail-archive.com/qemu-discuss@nongnu.org/msg05179.html
@ -545,27 +575,38 @@ rec {
# This function takes a minimally-valid "platform" and returns an
# attrset containing zero or more additional attrs which should be
# included in the platform in order to further elaborate it.
select = platform:
select =
platform:
# x86
/**/ if platform.isx86 then pc
if platform.isx86 then
pc
# ARM
else if platform.isAarch32 then let
version = platform.parsed.cpu.version or null;
in if version == null then pc
else if lib.versionOlder version "6" then sheevaplug
else if lib.versionOlder version "7" then raspberrypi
else armv7l-hf-multiplatform
else if platform.isAarch32 then
let
version = platform.parsed.cpu.version or null;
in
if version == null then
pc
else if lib.versionOlder version "6" then
sheevaplug
else if lib.versionOlder version "7" then
raspberrypi
else
armv7l-hf-multiplatform
else if platform.isAarch64 then
if platform.isDarwin then apple-m1
else aarch64-multiplatform
if platform.isDarwin then apple-m1 else aarch64-multiplatform
else if platform.isRiscV then riscv-multiplatform
else if platform.isRiscV then
riscv-multiplatform
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.mipsel then (import ./examples.nix { inherit lib; }).mipsel-linux-gnu
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.mipsel then
(import ./examples.nix { inherit lib; }).mipsel-linux-gnu
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.powerpc64le then powernv
else if platform.parsed.cpu == lib.systems.parse.cpuTypes.powerpc64le then
powernv
else { };
else
{ };
}

View file

@ -1,7 +1,10 @@
# Throws an error if any of our lib tests fail.
let tests = [ "misc" "systems" ];
all = builtins.concatLists (map (f: import (./. + "/${f}.nix")) tests);
in if all == []
then null
else throw (builtins.toJSON all)
let
tests = [
"misc"
"systems"
];
all = builtins.concatLists (map (f: import (./. + "/${f}.nix")) tests);
in
if all == [ ] then null else throw (builtins.toJSON all)

View file

@ -1,7 +1,8 @@
{ lib, ... }:
let
inherit (lib) types;
in {
in
{
options = {
name = lib.mkOption {
type = types.str;
@ -23,10 +24,12 @@ in {
default = null;
};
keys = lib.mkOption {
type = types.listOf (types.submodule {
options.fingerprint = lib.mkOption { type = types.str; };
});
default = [];
type = types.listOf (
types.submodule {
options.fingerprint = lib.mkOption { type = types.str; };
}
);
default = [ ];
};
};
}

View file

@ -1,53 +1,76 @@
# to run these tests (and the others)
# nix-build nixpkgs/lib/tests/release.nix
# These tests should stay in sync with the comment in maintainers/maintainers-list.nix
{ # The pkgs used for dependencies for the testing itself
pkgs ? import ../.. {}
, lib ? pkgs.lib
{
# The pkgs used for dependencies for the testing itself
pkgs ? import ../.. { },
lib ? pkgs.lib,
}:
let
checkMaintainer = handle: uncheckedAttrs:
let
prefix = [ "lib" "maintainers" handle ];
checkedAttrs = (lib.modules.evalModules {
inherit prefix;
modules = [
./maintainer-module.nix
{
_file = toString ../../maintainers/maintainer-list.nix;
config = uncheckedAttrs;
}
];
}).config;
checkMaintainer =
handle: uncheckedAttrs:
let
prefix = [
"lib"
"maintainers"
handle
];
checkedAttrs =
(lib.modules.evalModules {
inherit prefix;
modules = [
./maintainer-module.nix
{
_file = toString ../../maintainers/maintainer-list.nix;
config = uncheckedAttrs;
}
];
}).config;
checks = lib.optional (checkedAttrs.github != null && checkedAttrs.githubId == null) ''
echo ${lib.escapeShellArg (lib.showOption prefix)}': If `github` is specified, `githubId` must be too.'
# Calling this too often would hit non-authenticated API limits, but this
# shouldn't happen since such errors will get fixed rather quickly
info=$(curl -sS https://api.github.com/users/${checkedAttrs.github})
id=$(jq -r '.id' <<< "$info")
echo "The GitHub ID for GitHub user ${checkedAttrs.github} is $id:"
echo -e " githubId = $id;\n"
'' ++ lib.optional (checkedAttrs.email == null && checkedAttrs.github == null && checkedAttrs.matrix == null) ''
echo ${lib.escapeShellArg (lib.showOption prefix)}': At least one of `email`, `github` or `matrix` must be specified, so that users know how to reach you.'
'' ++ lib.optional (checkedAttrs.email != null && lib.hasSuffix "noreply.github.com" checkedAttrs.email) ''
echo ${lib.escapeShellArg (lib.showOption prefix)}': If an email address is given, it should allow people to reach you. If you do not want that, you can just provide `github` or `matrix` instead.'
'';
in lib.deepSeq checkedAttrs checks;
checks =
lib.optional (checkedAttrs.github != null && checkedAttrs.githubId == null) ''
echo ${lib.escapeShellArg (lib.showOption prefix)}': If `github` is specified, `githubId` must be too.'
# Calling this too often would hit non-authenticated API limits, but this
# shouldn't happen since such errors will get fixed rather quickly
info=$(curl -sS https://api.github.com/users/${checkedAttrs.github})
id=$(jq -r '.id' <<< "$info")
echo "The GitHub ID for GitHub user ${checkedAttrs.github} is $id:"
echo -e " githubId = $id;\n"
''
++
lib.optional
(checkedAttrs.email == null && checkedAttrs.github == null && checkedAttrs.matrix == null)
''
echo ${lib.escapeShellArg (lib.showOption prefix)}': At least one of `email`, `github` or `matrix` must be specified, so that users know how to reach you.'
''
++
lib.optional (checkedAttrs.email != null && lib.hasSuffix "noreply.github.com" checkedAttrs.email)
''
echo ${lib.escapeShellArg (lib.showOption prefix)}': If an email address is given, it should allow people to reach you. If you do not want that, you can just provide `github` or `matrix` instead.'
'';
in
lib.deepSeq checkedAttrs checks;
missingGithubIds = lib.concatLists (lib.mapAttrsToList checkMaintainer lib.maintainers);
success = pkgs.runCommand "checked-maintainers-success" {} ">$out";
success = pkgs.runCommand "checked-maintainers-success" { } ">$out";
failure = pkgs.runCommand "checked-maintainers-failure" {
nativeBuildInputs = [ pkgs.curl pkgs.jq ];
outputHash = "sha256:${lib.fakeSha256}";
outputHAlgo = "sha256";
outputHashMode = "flat";
SSL_CERT_FILE = "${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt";
} ''
${lib.concatStringsSep "\n" missingGithubIds}
exit 1
'';
in if missingGithubIds == [] then success else failure
failure =
pkgs.runCommand "checked-maintainers-failure"
{
nativeBuildInputs = [
pkgs.curl
pkgs.jq
];
outputHash = "sha256:${lib.fakeSha256}";
outputHAlgo = "sha256";
outputHashMode = "flat";
SSL_CERT_FILE = "${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt";
}
''
${lib.concatStringsSep "\n" missingGithubIds}
exit 1
'';
in
if missingGithubIds == [ ] then success else failure

View file

@ -39,6 +39,7 @@ let
composeManyExtensions
concatLines
concatMapAttrs
concatMapAttrsStringSep
concatMapStrings
concatStrings
concatStringsSep
@ -328,6 +329,11 @@ runTests {
expected = "a,b,c";
};
testConcatMapAttrsStringSepExamples = {
expr = concatMapAttrsStringSep "\n" (name: value: "${name}: foo-${value}") { a = "0.1.0"; b = "0.2.0"; };
expected = "a: foo-0.1.0\nb: foo-0.2.0";
};
testConcatLines = {
expr = concatLines ["a" "b" "c"];
expected = "a\nb\nc\n";
@ -1871,6 +1877,44 @@ runTests {
expected = [ [ "_module" "args" ] [ "foo" ] [ "foo" "<name>" "bar" ] [ "foo" "bar" ] ];
};
testAttrsWithName = {
expr = let
eval = evalModules {
modules = [
{
options = {
foo = lib.mkOption {
type = lib.types.attrsWith {
placeholder = "MyCustomPlaceholder";
elemType = lib.types.submodule {
options.bar = lib.mkOption {
type = lib.types.int;
default = 42;
};
};
};
};
};
}
];
};
opt = eval.options.foo;
in
(opt.type.getSubOptions opt.loc).bar.loc;
expected = [
"foo"
"<MyCustomPlaceholder>"
"bar"
];
};
testShowOptionWithPlaceholder = {
# <name>, *, should not be escaped. It is used as a placeholder by convention.
# Other symbols should be escaped. `{}`
expr = lib.showOption ["<name>" "<myName>" "*" "{foo}"];
expected = "<name>.<myName>.*.\"{foo}\"";
};
testCartesianProductOfEmptySet = {
expr = cartesianProduct {};
expected = [ {} ];

View file

@ -190,6 +190,9 @@ checkConfigOutput '^420$' config.bare-submodule.deep ./declare-bare-submodule.ni
checkConfigOutput '^2$' config.bare-submodule.deep ./declare-bare-submodule.nix ./declare-bare-submodule-deep-option.nix ./define-shorthandOnlyDefinesConfig-true.nix
checkConfigError 'The option .bare-submodule.deep. in .*/declare-bare-submodule-deep-option.nix. is already declared in .*/declare-bare-submodule-deep-option-duplicate.nix' config.bare-submodule.deep ./declare-bare-submodule.nix ./declare-bare-submodule-deep-option.nix ./declare-bare-submodule-deep-option-duplicate.nix
# Check that strMatching can be merged
checkConfigOutput '^"strMatching.*"$' options.sm.type.name ./strMatching-merge.nix
# Check integer types.
# unsigned
checkConfigOutput '^42$' config.value ./declare-int-unsigned-value.nix ./define-value-int-positive.nix
@ -391,6 +394,10 @@ checkConfigError 'The option `mergedLazyNonLazy'\'' in `.*'\'' is already declar
checkConfigOutput '^11$' config.lazyResult ./lazy-attrsWith.nix
checkConfigError 'infinite recursion encountered' config.nonLazyResult ./lazy-attrsWith.nix
# AttrsWith placeholder tests
checkConfigOutput '^"mergedName.<id>.nested"$' config.result ./name-merge-attrsWith-1.nix
checkConfigError 'The option .mergedName. in .*\.nix. is already declared in .*\.nix' config.mergedName ./name-merge-attrsWith-2.nix
# Even with multiple assignments, a type error should be thrown if any of them aren't valid
checkConfigError 'A definition for option .* is not of type .*' \
config.value ./declare-int-unsigned-value.nix ./define-value-list.nix ./define-value-int-positive.nix

View file

@ -1,5 +1,9 @@
{ lib, ... }: {
options.dummy = lib.mkOption { type = lib.types.anything; default = {}; };
{ lib, ... }:
{
options.dummy = lib.mkOption {
type = lib.types.anything;
default = { };
};
freeformType =
let
a = lib.types.attrsOf (lib.types.submodule { options.bar = lib.mkOption { }; });
@ -7,7 +11,8 @@
# modifying types like this breaks type merging.
# This test makes sure that type merging is not performed when only a single declaration exists.
# Don't modify types in practice!
a // {
a
// {
merge = loc: defs: { freeformItems = a.merge loc defs; };
};
config.foo.bar = "ok";

View file

@ -30,7 +30,7 @@ in
# mkAliasOptionModule sets warnings, so this has to be defined.
warnings = mkOption {
internal = true;
default = [];
default = [ ];
type = types.listOf types.str;
example = [ "The `foo' service is deprecated and will go away soon!" ];
description = ''
@ -46,14 +46,16 @@ in
# Disable the aliased option with a high priority so it
# should override the next import.
( { config, lib, ... }:
(
{ config, lib, ... }:
{
enableAlias = mkForce false;
}
)
# Enable the normal (non-aliased) option.
( { config, lib, ... }:
(
{ config, lib, ... }:
{
enable = true;
}

View file

@ -30,7 +30,7 @@ in
# mkAliasOptionModule sets warnings, so this has to be defined.
warnings = mkOption {
internal = true;
default = [];
default = [ ];
type = types.listOf types.str;
example = [ "The `foo' service is deprecated and will go away soon!" ];
description = ''
@ -46,14 +46,16 @@ in
# Disable the aliased option, but with a default (low) priority so it
# should be able to be overridden by the next import.
( { config, lib, ... }:
(
{ config, lib, ... }:
{
enableAlias = mkDefault false;
}
)
# Enable the normal (non-aliased) option.
( { config, lib, ... }:
(
{ config, lib, ... }:
{
enable = true;
}

View file

@ -1,6 +1,7 @@
{ lib, config, ... }: {
{ lib, config, ... }:
{
options.conditionalWorks = lib.mkOption {
default = ! config.value ? foo;
default = !config.value ? foo;
};
config.value.foo = lib.mkIf false "should not be defined";

View file

@ -1,6 +1,7 @@
{ lib, config, ... }: {
{ lib, config, ... }:
{
options.isLazy = lib.mkOption {
default = ! config.value ? foo;
default = !config.value ? foo;
};
config.value.bar = throw "is not lazy";

View file

@ -1,14 +1,26 @@
{ lib, ... }: {
{ lib, ... }:
{
options.value = lib.mkOption {
type = lib.types.lazyAttrsOf lib.types.boolByOr;
};
config.value = {
falseFalse = lib.mkMerge [ false false ];
trueFalse = lib.mkMerge [ true false ];
falseTrue = lib.mkMerge [ false true ];
trueTrue = lib.mkMerge [ true true ];
falseFalse = lib.mkMerge [
false
false
];
trueFalse = lib.mkMerge [
true
false
];
falseTrue = lib.mkMerge [
false
true
];
trueTrue = lib.mkMerge [
true
true
];
};
}

View file

@ -1,4 +1,5 @@
{ lib, ... }: {
{ lib, ... }:
{
options = {
sub = {
nixosOk = lib.mkOption {
@ -40,37 +41,39 @@
];
config = {
_module.freeformType = lib.types.anything;
ok =
lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
];
};
ok = lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
];
};
fail =
lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
./module-class-is-darwin.nix
];
};
fail = lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
./module-class-is-darwin.nix
];
};
fail-anon =
lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
{ _file = "foo.nix#darwinModules.default";
_class = "darwin";
config = {};
imports = [];
}
];
};
fail-anon = lib.evalModules {
class = "nixos";
modules = [
./module-class-is-nixos.nix
{
_file = "foo.nix#darwinModules.default";
_class = "darwin";
config = { };
imports = [ ];
}
];
};
sub.nixosOk = { _class = "nixos"; };
sub.nixosFail = { imports = [ ./module-class-is-darwin.nix ]; };
sub.nixosOk = {
_class = "nixos";
};
sub.nixosFail = {
imports = [ ./module-class-is-darwin.nix ];
};
};
}

View file

@ -1,13 +1,13 @@
{ lib, ... }:
let
deathtrapArgs = lib.mapAttrs
(k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute.")
(lib.functionArgs lib.mkOption);
deathtrapArgs = lib.mapAttrs (
k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute."
) (lib.functionArgs lib.mkOption);
in
{
options.value = lib.mkOption {
type = lib.types.attrsOf lib.types.str;
default = {};
default = { };
};
options.testing-laziness-so-don't-read-me = lib.mkOption deathtrapArgs;
}

View file

@ -1,25 +1,27 @@
{ lib, ... }:
let
submod = { ... }: {
options = {
enable = lib.mkOption {
default = false;
example = true;
type = lib.types.bool;
description = ''
Some descriptive text
'';
submod =
{ ... }:
{
options = {
enable = lib.mkOption {
default = false;
example = true;
type = lib.types.bool;
description = ''
Some descriptive text
'';
};
};
};
};
in
{
options = {
attrsOfSub = lib.mkOption {
default = {};
example = {};
default = { };
example = { };
type = lib.types.attrsOf (lib.types.submodule [ submod ]);
description = ''
Some descriptive text

View file

@ -8,7 +8,7 @@ in
modules = [ ];
shorthandOnlyDefinesConfig = config.shorthandOnlyDefinesConfig;
};
default = {};
default = { };
};
# config-dependent options: won't recommend, but useful for making this test parameterized

View file

@ -1,4 +1,5 @@
{ lib, ... }: {
{ lib, ... }:
{
options.value = lib.mkOption {
type = lib.types.either lib.types.int lib.types.str;
};

View file

@ -1,6 +1,7 @@
{ lib, ... }: {
{ lib, ... }:
{
options.value = lib.mkOption {
type = lib.types.lazyAttrsOf (lib.types.str // { emptyValue.value = "empty"; });
default = {};
default = { };
};
}

View file

@ -1,9 +1,11 @@
{ lib, ... }: let
{ lib, ... }:
let
pkgs.hello = {
type = "derivation";
pname = "hello";
};
in {
in
{
options = {
package = lib.mkPackageOption pkgs "hello" { };
@ -46,8 +48,14 @@ in {
pkgsText = "myPkgs";
};
packageFromOtherSet = let myPkgs = {
hello = pkgs.hello // { pname = "hello-other"; };
}; in lib.mkPackageOption myPkgs "hello" { };
packageFromOtherSet =
let
myPkgs = {
hello = pkgs.hello // {
pname = "hello-other";
};
};
in
lib.mkPackageOption myPkgs "hello" { };
};
}

View file

@ -1,4 +1,5 @@
{ lib, ... }: {
{ lib, ... }:
{
options.value = lib.mkOption {
type = lib.types.oneOf [
lib.types.int

View file

@ -3,7 +3,9 @@
{
options.set = lib.mkOption {
default = { };
example = { a = 1; };
example = {
a = 1;
};
type = lib.types.attrsOf lib.types.int;
description = ''
Some descriptive text

View file

@ -1,25 +1,32 @@
{ lib, ... }: {
{ lib, ... }:
{
options.submodule = lib.mkOption {
inherit (lib.evalModules {
modules = [
{
options.inner = lib.mkOption {
type = lib.types.bool;
default = false;
};
}
];
}) type;
default = {};
inherit
(lib.evalModules {
modules = [
{
options.inner = lib.mkOption {
type = lib.types.bool;
default = false;
};
}
];
})
type
;
default = { };
};
config.submodule = lib.mkMerge [
({ lib, ... }: {
options.outer = lib.mkOption {
type = lib.types.bool;
default = false;
};
})
(
{ lib, ... }:
{
options.outer = lib.mkOption {
type = lib.types.bool;
default = false;
};
}
)
{
inner = true;
outer = true;

View file

@ -1,4 +1,5 @@
{ lib, ... }: {
{ lib, ... }:
{
options.submodule = lib.mkOption {
type = lib.types.submoduleWith {
modules = [
@ -10,16 +11,19 @@
}
];
};
default = {};
default = { };
};
config.submodule = lib.mkMerge [
({ lib, ... }: {
options.outer = lib.mkOption {
type = lib.types.bool;
default = false;
};
})
(
{ lib, ... }:
{
options.outer = lib.mkOption {
type = lib.types.bool;
default = false;
};
}
)
{
inner = true;
outer = true;

View file

@ -1,13 +1,15 @@
{ lib, ... }: let
{ lib, ... }:
let
sub.options.config = lib.mkOption {
type = lib.types.bool;
default = false;
};
in {
in
{
options.submodule = lib.mkOption {
type = lib.types.submoduleWith {
modules = [ sub ];
};
default = {};
default = { };
};
}

View file

@ -1,11 +1,12 @@
{ lib, ... }: {
{ lib, ... }:
{
options.submodule = lib.mkOption {
type = lib.types.submoduleWith {
modules = [
./declare-enable.nix
];
};
default = {};
default = { };
};
config.submodule = ./define-enable.nix;

View file

@ -1,14 +1,16 @@
{ lib, ... }: let
{ lib, ... }:
let
sub.options.config = lib.mkOption {
type = lib.types.bool;
default = false;
};
in {
in
{
options.submodule = lib.mkOption {
type = lib.types.submoduleWith {
modules = [ sub ];
shorthandOnlyDefinesConfig = true;
};
default = {};
default = { };
};
}

View file

@ -1,17 +1,21 @@
{ lib, ... }: {
{ lib, ... }:
{
options.submodule = lib.mkOption {
type = lib.types.submoduleWith {
modules = [
({ lib, ... }: {
options.foo = lib.mkOption {
default = lib.foo;
};
})
(
{ lib, ... }:
{
options.foo = lib.mkOption {
default = lib.foo;
};
}
)
];
specialArgs.lib = lib // {
foo = "foo";
};
};
default = {};
default = { };
};
}

View file

@ -1,9 +1,10 @@
{ lib, moduleType, ... }:
let inherit (lib) mkOption types;
let
inherit (lib) mkOption types;
in
{
options.variants = mkOption {
type = types.lazyAttrsOf moduleType;
default = {};
default = { };
};
}

View file

@ -1,8 +1,15 @@
{ lib ? import ../.., modules ? [] }:
{
lib ? import ../..,
modules ? [ ],
}:
{
inherit (lib.evalModules {
inherit modules;
specialArgs.modulesPath = ./.;
}) config options;
inherit
(lib.evalModules {
inherit modules;
specialArgs.modulesPath = ./.;
})
config
options
;
}

View file

@ -1,7 +1,19 @@
{ config, lib, ... }:
let
inherit (lib) types mkOption setDefaultModuleLocation evalModules;
inherit (types) deferredModule lazyAttrsOf submodule str raw enum;
inherit (lib)
types
mkOption
setDefaultModuleLocation
evalModules
;
inherit (types)
deferredModule
lazyAttrsOf
submodule
str
raw
enum
;
in
{
options = {
@ -13,7 +25,8 @@ in
};
};
config = {
deferred = { ... }:
deferred =
{ ... }:
# this should be an attrset, so this fails
true;
};

View file

@ -1,7 +1,14 @@
{ lib, ... }:
let
inherit (lib) types mkOption setDefaultModuleLocation;
inherit (types) deferredModule lazyAttrsOf submodule str raw enum;
inherit (types)
deferredModule
lazyAttrsOf
submodule
str
raw
enum
;
in
{
imports = [
@ -9,27 +16,37 @@ in
# - nodes.<name>
# - default
# where all nodes include the default
({ config, ... }: {
_file = "generic.nix";
options.nodes = mkOption {
type = lazyAttrsOf (submodule { imports = [ config.default ]; });
default = {};
};
options.default = mkOption {
type = deferredModule;
default = { };
description = ''
Module that is included in all nodes.
'';
};
})
(
{ config, ... }:
{
_file = "generic.nix";
options.nodes = mkOption {
type = lazyAttrsOf (submodule {
imports = [ config.default ];
});
default = { };
};
options.default = mkOption {
type = deferredModule;
default = { };
description = ''
Module that is included in all nodes.
'';
};
}
)
{
_file = "default-1.nix";
default = { config, ... }: {
options.settingsDict = lib.mkOption { type = lazyAttrsOf str; default = {}; };
options.bottom = lib.mkOption { type = enum []; };
};
default =
{ config, ... }:
{
options.settingsDict = lib.mkOption {
type = lazyAttrsOf str;
default = { };
};
options.bottom = lib.mkOption { type = enum [ ]; };
};
}
{
@ -49,9 +66,11 @@ in
{
_file = "nodes-foo-c-is-a.nix";
nodes.foo = { config, ... }: {
settingsDict.c = config.settingsDict.a;
};
nodes.foo =
{ config, ... }:
{
settingsDict.c = config.settingsDict.a;
};
}
];

View file

@ -1,3 +1,3 @@
{
attrsOfSub.bar = {};
attrsOfSub.bar = { };
}

View file

@ -1,3 +1,3 @@
{
attrsOfSub.foo = {};
attrsOfSub.foo = { };
}

View file

@ -1,15 +1,24 @@
{ config, ... }: {
class = { "just" = "data"; };
{ config, ... }:
{
class = {
"just" = "data";
};
a = "one";
b = "two";
meta = "meta";
_module.args.result =
let r = builtins.removeAttrs config [ "_module" ];
in builtins.trace (builtins.deepSeq r r) (r == {
a = "one";
b = "two";
class = { "just" = "data"; };
meta = "meta";
});
let
r = builtins.removeAttrs config [ "_module" ];
in
builtins.trace (builtins.deepSeq r r) (
r == {
a = "one";
b = "two";
class = {
"just" = "data";
};
meta = "meta";
}
);
}

View file

@ -5,12 +5,13 @@
{
# Always defined, but the value depends on the presence of an option.
config.set = {
value = if options ? set.enable then 360 else 7;
}
# Only define if possible.
// lib.optionalAttrs (options ? set.enable) {
enable = true;
};
config.set =
{
value = if options ? set.enable then 360 else 7;
}
# Only define if possible.
// lib.optionalAttrs (options ? set.enable) {
enable = true;
};
}

View file

@ -5,12 +5,13 @@
{
# Always defined, but the value depends on the presence of an option.
config = {
value = if options ? enable then 360 else 7;
}
# Only define if possible.
// lib.optionalAttrs (options ? enable) {
enable = true;
};
config =
{
value = if options ? enable then 360 else 7;
}
# Only define if possible.
// lib.optionalAttrs (options ? enable) {
enable = true;
};
}

View file

@ -1,3 +1,4 @@
{ config, ... }: {
{ config, ... }:
{
settingsDict.a = config.settingsDict.b;
}

View file

@ -1,3 +1,3 @@
{
value = [];
value = [ ];
}

View file

@ -1,8 +1,11 @@
{ lib, ... }: {
{ lib, ... }:
{
imports = [{
value = lib.mkDefault "def";
}];
imports = [
{
value = lib.mkDefault "def";
}
];
value = lib.mkMerge [
(lib.mkIf false "nope")

View file

@ -1,5 +1,6 @@
{ config, lib, ... }:
let inherit (lib) types mkOption attrNames;
let
inherit (lib) types mkOption attrNames;
in
{
options = {
@ -16,7 +17,11 @@ in
variants.foo.variants.bar.attrs.z = 1;
variants.foo.variants.foo.attrs.c = 3;
resultFoo = lib.concatMapStringsSep " " toString (attrNames config.variants.foo.attrs);
resultFooBar = lib.concatMapStringsSep " " toString (attrNames config.variants.foo.variants.bar.attrs);
resultFooFoo = lib.concatMapStringsSep " " toString (attrNames config.variants.foo.variants.foo.attrs);
resultFooBar = lib.concatMapStringsSep " " toString (
attrNames config.variants.foo.variants.bar.attrs
);
resultFooFoo = lib.concatMapStringsSep " " toString (
attrNames config.variants.foo.variants.foo.attrs
);
};
}

View file

@ -1,5 +1,8 @@
{ ... }:
{
disabledModules = [ "define-enable.nix" "declare-enable.nix" ];
disabledModules = [
"define-enable.nix"
"declare-enable.nix"
];
}

View file

@ -2,11 +2,13 @@
let
inherit (lib) mkOption types;
moduleWithKey = { config, ... }: {
config = {
enable = true;
moduleWithKey =
{ config, ... }:
{
config = {
enable = true;
};
};
};
in
{
imports = [

View file

@ -18,7 +18,7 @@ in
moduleWithKey
];
};
default = {};
default = { };
};
negative = mkOption {
type = types.submodule {
@ -28,7 +28,7 @@ in
];
disabledModules = [ moduleWithKey ];
};
default = {};
default = { };
};
};
}

View file

@ -18,7 +18,7 @@ in
moduleWithKey
];
};
default = {};
default = { };
};
negative = mkOption {
type = types.submodule {
@ -28,7 +28,7 @@ in
];
disabledModules = [ 123 ];
};
default = {};
default = { };
};
};
}

View file

@ -1,9 +1,23 @@
{ lib, ... }: {
{ lib, ... }:
{
imports = [
(lib.doRename { from = ["a" "b"]; to = ["c" "d" "e"]; warn = true; use = x: x; visible = true; })
(lib.doRename {
from = [
"a"
"b"
];
to = [
"c"
"d"
"e"
];
warn = true;
use = x: x;
visible = true;
})
];
options = {
c.d.e = lib.mkOption {};
c.d.e = lib.mkOption { };
};
config = {
a.b = 1234;

View file

@ -4,7 +4,12 @@
services.foo.enable = true;
services.foo.bar = "baz";
result =
assert config.services.foos == { "" = { bar = "baz"; }; };
assert
config.services.foos == {
"" = {
bar = "baz";
};
};
true;
};
}

View file

@ -3,7 +3,12 @@
config = {
services.foos."".bar = "baz";
result =
assert config.services.foos == { "" = { bar = "baz"; }; };
assert
config.services.foos == {
"" = {
bar = "baz";
};
};
assert config.services.foo.bar == "baz";
true;
};

View file

@ -3,7 +3,7 @@
config = {
result =
assert config.services.foos == { };
assert ! options.services.foo.bar.isDefined;
assert !options.services.foo.bar.isDefined;
true;
};
}

View file

@ -13,25 +13,41 @@
*/
{ config, lib, ... }:
let
inherit (lib) mkOption mkEnableOption types doRename;
inherit (lib)
mkOption
mkEnableOption
types
doRename
;
in
{
options = {
services.foo.enable = mkEnableOption "foo";
services.foos = mkOption {
type = types.attrsOf (types.submodule {
options = {
bar = mkOption { type = types.str; };
};
});
type = types.attrsOf (
types.submodule {
options = {
bar = mkOption { type = types.str; };
};
}
);
default = { };
};
result = mkOption {};
result = mkOption { };
};
imports = [
(doRename {
from = [ "services" "foo" "bar" ];
to = [ "services" "foos" "" "bar" ];
from = [
"services"
"foo"
"bar"
];
to = [
"services"
"foos"
""
"bar"
];
visible = true;
warn = false;
use = x: x;

View file

@ -1,11 +1,25 @@
{ lib, config, ... }: {
{ lib, config, ... }:
{
imports = [
(lib.doRename { from = ["a" "b"]; to = ["c" "d" "e"]; warn = true; use = x: x; visible = true; })
(lib.doRename {
from = [
"a"
"b"
];
to = [
"c"
"d"
"e"
];
warn = true;
use = x: x;
visible = true;
})
];
options = {
warnings = lib.mkOption { type = lib.types.listOf lib.types.str; };
c.d.e = lib.mkOption {};
result = lib.mkOption {};
c.d.e = lib.mkOption { };
result = lib.mkOption { };
};
config = {
a.b = 1234;

View file

@ -2,7 +2,7 @@
A basic documentation generating module.
Declares and defines a `docs` option, suitable for making assertions about
the extraction "phase" of documentation generation.
*/
*/
{ lib, options, ... }:
let
@ -11,7 +11,7 @@ let
length
mkOption
types
;
;
traceListSeq = l: v: lib.foldl' (a: b: lib.traceSeq b a) v l;
@ -24,18 +24,12 @@ in
All options to be rendered, without any visibility filtering applied.
'';
};
config.docs =
lib.zipAttrsWith
(name: values:
if length values > 1 then
traceListSeq values
abort "Multiple options with the same name: ${name}"
else
assert length values == 1;
head values
)
(map
(opt: { ${opt.name} = opt; })
(lib.optionAttrSetToDocList options)
);
config.docs = lib.zipAttrsWith (
name: values:
if length values > 1 then
traceListSeq values abort "Multiple options with the same name: ${name}"
else
assert length values == 1;
head values
) (map (opt: { ${opt.name} = opt; }) (lib.optionAttrSetToDocList options));
}

View file

@ -1,7 +1,8 @@
{ lib, ... }:
let
inherit (lib) types;
in {
in
{
options = {
int = lib.mkOption {
@ -20,7 +21,7 @@ in {
type = types.lazyAttrsOf (types.nullOr types.int);
};
submodule = lib.mkOption {
type = types.lazyAttrsOf (types.submodule {});
type = types.lazyAttrsOf (types.submodule { });
};
};

View file

@ -5,8 +5,8 @@ in
{
options.sub = lib.mkOption {
type = lib.types.submodule {
wrong2 = mkOption {};
wrong2 = mkOption { };
};
default = {};
default = { };
};
}

View file

@ -1,6 +1,7 @@
{ lib
, extendModules
, ...
{
lib,
extendModules,
...
}:
let
@ -17,9 +18,10 @@ in
options.sub = mkOption {
default = { };
type = types.submodule (
{ config
, extendModules
, ...
{
config,
extendModules,
...
}:
{
options.value = mkOption {
@ -30,11 +32,14 @@ in
default = { };
inherit
(extendModules {
modules = [{
specialisation = mkOverride 0 { };
}];
modules = [
{
specialisation = mkOverride 0 { };
}
];
})
type;
type
;
};
}
);
@ -43,6 +48,5 @@ in
{ config.sub.value = 1; }
];
}

View file

@ -1,3 +1,4 @@
{ lib, ... }: {
{ lib, ... }:
{
freeformType = with lib.types; attrsOf (either str (attrsOf str));
}

View file

@ -1,3 +1,4 @@
{ lib, ... }: {
{ lib, ... }:
{
freeformType = with lib.types; lazyAttrsOf (either str (lazyAttrsOf str));
}

View file

@ -1,8 +1,8 @@
{ lib, ... }:
let
deathtrapArgs = lib.mapAttrs
(k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute.")
(lib.functionArgs lib.mkOption);
deathtrapArgs = lib.mapAttrs (
k: _: throw "The module system is too strict, accessing an unused option's ${k} mkOption-attribute."
) (lib.functionArgs lib.mkOption);
in
{
options.nest.foo = lib.mkOption {

View file

@ -1,4 +1,5 @@
{ lib, config, ... }: {
{ lib, config, ... }:
{
options.foo = lib.mkOption {
type = lib.types.nullOr lib.types.str;
default = null;

Some files were not shown because too many files have changed in this diff Show more