Project import generated by Copybara.

GitOrigin-RevId: b73c2221a46c13557b1b3be9c2070cc42cf01eb3
This commit is contained in:
Default email 2024-07-27 08:49:29 +02:00
parent 4b5714ed08
commit f34ce41345
8298 changed files with 181363 additions and 126827 deletions

View file

@ -121,3 +121,35 @@ c759efa5e7f825913f9a69ef20f025f50f56dc4d
# python3Packages: format with nixfmt
59b1aef59071cae6e87859dc65de973d2cc595c0
# treewide description changes (#317959)
bf995e3641950f4183c1dd9010349263dfa0123b
755b915a158c9d588f08e9b08da9f7f3422070cc
f8c4a98e8e138e21353a2c33b90db3359f539b37
# vscode-extensions.*: format with nixfmt (RFC 166)
7bf9febfa6271012b1ef86647a3a06f06875fdcf
# remove uses of mdDoc (#303841)
1a24330f792c8625746d07d842290e6fd95ae6f9
acd0e3898feb321cb9a71a0fd376f1157d0f4553
1b28414d2886c57343864326dbb745a634d3e37d
6afb255d976f85f3359e4929abd6f5149c323a02
# azure-cli: move to by-name, nixfmt #325950
96cd538b68bd1d0a0a37979356d669abbba32ebc
# poptracker: format with nixfmt-rfc-style (#326697)
ff5c8f6cc3d1f2e017e86d50965c14b71f00567b
# mangal: format with nixfmt-rfc-style #328284
3bb5e993cac3a6e1c3056d2bc9bf43eb2c7a5951
# pico-sdk: switch to finalAttrs (#329438)
8946018b0391ae594d167f1e58497b18de068968
# ollama: format with nixfmt-rfc-style (#329353)
bdfde18037f8d9f9b641a4016c8ada4dc4cbf856
# nixos/ollama: format with nixfmt-rfc-style (#329561)
246d1ee533810ac1946d863bbd9de9b525818d56

View file

@ -14,7 +14,7 @@
# CI
/.github/workflows @NixOS/Security @Mic92 @zowoq
/.github/workflows/check-nix-format.yml @infinisil
/ci @infinisil
/ci @infinisil @NixOS/Security
# Develompent support
/.editorconfig @Mic92 @zowoq
@ -53,7 +53,7 @@
/pkgs/build-support/setup-hooks/auto-patchelf.py @layus
/pkgs/pkgs-lib @infinisil
## Format generators/serializers
/pkgs/pkgs-lib/formats/libconfig @ckiee @h7x4
/pkgs/pkgs-lib/formats/libconfig @h7x4
/pkgs/pkgs-lib/formats/hocon @h7x4
# pkgs/by-name
@ -108,6 +108,9 @@ nixos/modules/installer/tools/nix-fallback-paths.nix @raitobezarius
# NixOS QEMU virtualisation
/nixos/virtualisation/qemu-vm.nix @raitobezarius
# ACME
/nixos/modules/security/acme @arianvp @flokli @aanderse # no merge permission: @m1cr0man @emilazy
# Systemd
/nixos/modules/system/boot/systemd.nix @NixOS/systemd
/nixos/modules/system/boot/systemd @NixOS/systemd
@ -129,8 +132,11 @@ nixos/modules/installer/tools/nix-fallback-paths.nix @raitobezarius
/pkgs/common-updater/scripts/update-source-version @jtojnar
# Python-related code and docs
/doc/languages-frameworks/python.section.md @mweinelt
/pkgs/development/interpreters/python/hooks
/doc/languages-frameworks/python.section.md @mweinelt @natsukium
/maintainers/scripts/update-python-libraries @natsukium
/pkgs/development/interpreters/python @natsukium
/pkgs/top-level/python-packages.nix @natsukium
/pkgs/top-level/release-python.nix @natsukium
# Haskell
/doc/languages-frameworks/haskell.section.md @sternenseemann @maralorn @ncfavier
@ -225,18 +231,15 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
/nixos/modules/services/networking/ntp @thoughtpolice
# Network
/pkgs/tools/networking/octodns @Janik-Haag
/pkgs/tools/networking/kea/default.nix @mweinelt
/pkgs/tools/networking/babeld/default.nix @mweinelt
/nixos/modules/services/networking/babeld.nix @mweinelt
/nixos/modules/services/networking/kea.nix @mweinelt
/nixos/modules/services/networking/knot.nix @mweinelt
nixos/modules/services/networking/networkmanager.nix @Janik-Haag
/nixos/modules/services/monitoring/prometheus/exporters/kea.nix @mweinelt
/nixos/tests/babeld.nix @mweinelt
/nixos/tests/kea.nix @mweinelt
/nixos/tests/knot.nix @mweinelt
/nixos/tests/networking/* @Janik-Haag
# Web servers
/doc/packages/nginx.section.md @raitobezarius
@ -301,7 +304,11 @@ nixos/modules/services/networking/networkmanager.nix @Janik-Haag
/pkgs/build-support/make-hardcode-gsettings-patch @jtojnar
# Cinnamon
/pkgs/desktops/cinnamon @mkg20001
/pkgs/by-name/ci/cinnamon-* @mkg20001
/pkgs/by-name/cj/cjs @mkg20001
/pkgs/by-name/mu/muffin @mkg20001
/pkgs/by-name/ne/nemo @mkg20001
/pkgs/by-name/ne/nemo-* @mkg20001
# nim
/pkgs/development/compilers/nim @ehmry
@ -322,9 +329,9 @@ pkgs/by-name/fo/forgejo/package.nix @adamcstephens @bendlas @emilylange
/doc/languages-frameworks/dotnet.section.md @corngood
# Node.js
/pkgs/build-support/node/build-npm-package @lilyinstarlight @winterqt
/pkgs/build-support/node/fetch-npm-deps @lilyinstarlight @winterqt
/doc/languages-frameworks/javascript.section.md @lilyinstarlight @winterqt
/pkgs/build-support/node/build-npm-package @winterqt
/pkgs/build-support/node/fetch-npm-deps @winterqt
/doc/languages-frameworks/javascript.section.md @winterqt
# environment.noXlibs option aka NoX
/nixos/modules/config/no-x-libs.nix @SuperSandro2000
@ -367,7 +374,6 @@ nixos/tests/lxd/ @adamcstephens
pkgs/by-name/in/incus/ @adamcstephens
pkgs/by-name/lx/lxc* @adamcstephens
pkgs/by-name/lx/lxd* @adamcstephens
pkgs/os-specific/linux/lxc/ @adamcstephens
# ExpidusOS, Flutter
/pkgs/development/compilers/flutter @RossComputerGuy

View file

@ -12,9 +12,14 @@
- any:
- changed-files:
- any-glob-to-any-file:
- pkgs/desktops/cinnamon/**/*
- nixos/modules/services/x11/desktop-managers/cinnamon.nix
- nixos/tests/cinnamon.nix
- nixos/tests/cinnamon-wayland.nix
- pkgs/by-name/ci/cinnamon-*/**/*
- pkgs/by-name/cj/cjs/**/*
- pkgs/by-name/mu/muffin/**/*
- pkgs/by-name/ne/nemo/**/*
- pkgs/by-name/ne/nemo-*/**/*
"6.topic: dotnet":
- any:
@ -34,9 +39,9 @@
- nixos/modules/services/editors/emacs.nix
- nixos/modules/services/editors/emacs.xml
- nixos/tests/emacs-daemon.nix
- pkgs/applications/editors/emacs/build-support/**/*
- pkgs/applications/editors/emacs/elisp-packages/**/*
- pkgs/applications/editors/emacs/**/*
- pkgs/build-support/emacs/**/*
- pkgs/top-level/emacs-packages.nix
"6.topic: Enlightenment DE":
@ -74,6 +79,13 @@
- lib/systems/flake-systems.nix
- nixos/modules/config/nix-flakes.nix
"6.topic: flutter":
- any:
- changed-files:
- any-glob-to-any-file:
- pkgs/build-support/flutter/*.nix
- pkgs/development/compilers/flutter/**/*.nix
"6.topic: GNOME":
- any:
- changed-files:
@ -149,7 +161,7 @@
- any:
- changed-files:
- any-glob-to-any-file:
- pkgs/development/compilers/llvm/*
- pkgs/development/compilers/llvm/**/*
"6.topic: lua":
- any:
@ -338,6 +350,7 @@
# *developed in this repo*;
# - not individual tests
# - not packages for test frameworks
- pkgs/build-support/testers/**
- nixos/lib/testing/**
- nixos/lib/test-driver/**
- nixos/tests/nixos-test-driver/**

View file

@ -24,7 +24,7 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Create backport PRs
uses: korthout/backport-action@ef20d86abccbac3ee3a73cb2efbdc06344c390e5 # v2.5.0
uses: korthout/backport-action@bd410d37cdcae80be6d969823ff5a225fe5c833f # v3.0.2
with:
# Config README: https://github.com/korthout/backport-action#backport-action
copy_labels_pattern: 'severity:\ssecurity'

View file

@ -19,7 +19,7 @@ jobs:
# we don't limit this action to only NixOS repo since the checks are cheap and useful developer feedback
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/install-nix-action@ba0dd844c9180cbf77aa72a116d6fbc515d0e87b # v27
- uses: cachix/cachix-action@ad2ddac53f961de1989924296a1f236fcfbaa4fc # v15
with:
# This cache is for the nixpkgs repo checks and should not be trusted or used elsewhere.

View file

@ -58,7 +58,7 @@ jobs:
if [[ "$mergeable" == "null" ]]; then
if (( retryCount == 0 )); then
echo "Not retrying anymore, probably GitHub is having internal issues"
echo "Not retrying anymore. It's likely that GitHub is having internal issues: check https://www.githubstatus.com/"
exit 1
else
(( retryCount -= 1 )) || true
@ -94,7 +94,7 @@ jobs:
base=$(mktemp -d)
git worktree add "$base" "$(git rev-parse HEAD^1)"
echo "base=$base" >> "$GITHUB_ENV"
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/install-nix-action@ba0dd844c9180cbf77aa72a116d6fbc515d0e87b # v27
if: env.mergedSha
- name: Fetching the pinned tool
if: env.mergedSha

View file

@ -20,7 +20,7 @@ jobs:
sparse-checkout: |
lib
maintainers
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/install-nix-action@ba0dd844c9180cbf77aa72a116d6fbc515d0e87b # v27
with:
# explicitly enable sandbox
extra_nix_config: sandbox = true

View file

@ -7,18 +7,29 @@ name: Check that Nix files are formatted
on:
pull_request_target:
# See the comment at the same location in ./check-by-name.yml
types: [opened, synchronize, reopened, edited]
permissions:
contents: read
jobs:
nixos:
runs-on: ubuntu-latest
if: github.repository_owner == 'NixOS'
if: "github.repository_owner == 'NixOS' && !contains(github.event.pull_request.title, '[skip treewide]')"
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge
# Fetches the merge commit and its parents
fetch-depth: 2
- name: Checking out base branch
run: |
base=$(mktemp -d)
baseRev=$(git rev-parse HEAD^1)
git worktree add "$base" "$baseRev"
echo "baseRev=$baseRev" >> "$GITHUB_ENV"
echo "base=$base" >> "$GITHUB_ENV"
- name: Get Nixpkgs revision for nixfmt
run: |
# pin to a commit from nixpkgs-unstable to avoid e.g. building nixfmt
@ -26,7 +37,7 @@ jobs:
# This should not be a URL, because it would allow PRs to run arbitrary code in CI!
rev=$(jq -r .rev ci/pinned-nixpkgs.json)
echo "url=https://github.com/NixOS/nixpkgs/archive/$rev.tar.gz" >> "$GITHUB_ENV"
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/install-nix-action@ba0dd844c9180cbf77aa72a116d6fbc515d0e87b # v27
with:
# explicitly enable sandbox
extra_nix_config: sandbox = true
@ -34,43 +45,44 @@ jobs:
- name: Install nixfmt
run: "nix-env -f '<nixpkgs>' -iAP nixfmt-rfc-style"
- name: Check that Nix files are formatted according to the RFC style
# Each environment variable beginning with NIX_FMT_PATHS_ is a list of
# paths to check with nixfmt.
env:
NIX_FMT_PATHS_BSD: pkgs/os-specific/bsd
NIX_FMT_PATHS_MPVSCRIPTS: pkgs/applications/video/mpv/scripts
# Format paths related to the Nixpkgs CUDA ecosystem.
NIX_FMT_PATHS_CUDA: |-
pkgs/development/cuda-modules
pkgs/test/cuda
pkgs/top-level/cuda-packages.nix
NIX_FMT_PATHS_MAINTAINERS: |-
maintainers/maintainer-list.nix
maintainers/team-list.nix
NIX_FMT_PATHS_K3S: |-
nixos/modules/services/cluster/k3s
nixos/tests/k3s
pkgs/applications/networking/cluster/k3s
NIX_FMT_PATHS_VSCODE_EXTS: pkgs/applications/editors/vscode/extensions
NIX_FMT_PATHS_PHP_PACKAGES: pkgs/development/php-packages
NIX_FMT_PATHS_BUILD_SUPPORT_PHP: pkgs/build-support/php
# Iterate over all environment variables beginning with NIX_FMT_PATHS_.
run: |
unformattedPaths=()
for env_var in "${!NIX_FMT_PATHS_@}"; do
readarray -t paths <<< "${!env_var}"
if [[ "${paths[*]}" == "" ]]; then
echo "Error: $env_var is empty."
exit 1
unformattedFiles=()
# TODO: Make this more parallel
# Loop through all Nix files touched by the PR
while readarray -d '' -n 2 entry && (( ${#entry[@]} != 0 )); do
type=${entry[0]}
file=${entry[1]}
case $type in
A*)
source=""
dest=$file
;;
M*)
source=$file
dest=$file
;;
C*|R*)
source=$file
read -r -d '' dest
;;
*)
echo "Ignoring file $file with type $type"
continue
esac
# Ignore files that weren't already formatted
if [[ -n "$source" ]] && ! nixfmt --check ${{ env.base }}/"$source" 2>/dev/null; then
echo "Ignoring file $file because it's not formatted in the base commit"
elif ! nixfmt --check "$dest"; then
unformattedFiles+=("$file")
fi
echo "Checking paths: ${paths[@]}"
if ! nixfmt --check "${paths[@]}"; then
unformattedPaths+=("${paths[@]}")
fi
done
if (( "${#unformattedPaths[@]}" > 0 )); then
echo "Some required Nix files are not properly formatted"
done < <(git diff -z --name-status ${{ env.baseRev }} -- '*.nix')
if (( "${#unformattedFiles[@]}" > 0 )); then
echo "Some new/changed Nix files are not properly formatted"
echo "Please run the following in \`nix-shell\`:"
echo "nixfmt ${unformattedPaths[*]@Q}"
echo "nixfmt ${unformattedFiles[*]@Q}"
exit 1
fi

View file

@ -13,7 +13,7 @@ jobs:
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/install-nix-action@ba0dd844c9180cbf77aa72a116d6fbc515d0e87b # v27
- name: Build shell
run: nix-build shell.nix
@ -24,6 +24,6 @@ jobs:
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/install-nix-action@ba0dd844c9180cbf77aa72a116d6fbc515d0e87b # v27
- name: Build shell
run: nix-build shell.nix

View file

@ -28,7 +28,7 @@ jobs:
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/install-nix-action@ba0dd844c9180cbf77aa72a116d6fbc515d0e87b # v27
with:
# nixpkgs commit is pinned so that it doesn't break
# editorconfig-checker 2.4.0

View file

@ -18,7 +18,7 @@ jobs:
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/install-nix-action@ba0dd844c9180cbf77aa72a116d6fbc515d0e87b # v27
with:
# explicitly enable sandbox
extra_nix_config: sandbox = true

View file

@ -20,7 +20,7 @@ jobs:
with:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/install-nix-action@ba0dd844c9180cbf77aa72a116d6fbc515d0e87b # v27
with:
# explicitly enable sandbox
extra_nix_config: sandbox = true

View file

@ -29,7 +29,7 @@ jobs:
# pull_request_target checks out the base branch by default
ref: refs/pull/${{ github.event.pull_request.number }}/merge
if: ${{ env.CHANGED_FILES && env.CHANGED_FILES != '' }}
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/install-nix-action@ba0dd844c9180cbf77aa72a116d6fbc515d0e87b # v27
with:
nix_path: nixpkgs=channel:nixpkgs-unstable
- name: Parse all changed or added nix files

View file

@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: cachix/install-nix-action@8887e596b4ee1134dae06b98d573bd674693f47c # v26
- uses: cachix/install-nix-action@ba0dd844c9180cbf77aa72a116d6fbc515d0e87b # v27
with:
nix_path: nixpkgs=channel:nixpkgs-unstable
- name: setup
@ -46,7 +46,7 @@ jobs:
run: |
git clean -f
- name: create PR
uses: peter-evans/create-pull-request@9153d834b60caba6d51c9b9510b087acf9f33f83 # v6.0.4
uses: peter-evans/create-pull-request@c5a7806660adbe173f04e3e038b0ccdcd758773c # v6.1.0
with:
body: |
Automatic update by [update-terraform-providers](https://github.com/NixOS/nixpkgs/blob/master/.github/workflows/update-terraform-providers.yml) action.

View file

@ -7,6 +7,7 @@
.idea/
.nixos-test-history
.vscode/
.helix/
outputs/
result-*
result
@ -19,6 +20,8 @@ tags
/doc/manual.pdf
/source/
.version-suffix
.direnv
.envrc
.DS_Store
.mypy_cache

View file

@ -379,10 +379,12 @@ See [this section][branch] to know when to use the release branches.
[staging]: #staging
The staging workflow exists to batch Hydra builds of many packages together.
It is coordinated in the [Staging room](https://matrix.to/#/#staging:nixos.org) on Matrix.
It works by directing commits that cause [mass rebuilds][mass-rebuild] to a separate `staging` branch that isn't directly built by Hydra.
Regularly, the `staging` branch is _manually_ merged into a `staging-next` branch to be built by Hydra using the [`nixpkgs:staging-next` jobset](https://hydra.nixos.org/jobset/nixpkgs/staging-next).
The `staging-next` branch should then only receive direct commits in order to fix Hydra builds.
The `staging-next` branch should then only receive changes that fix Hydra builds;
**for anything else, ask the [Staging room](https://matrix.to/#/#staging:nixos.org) first**.
Once it is verified that there are no major regressions, it is merged into `master` using [a pull request](https://github.com/NixOS/nixpkgs/pulls?q=head%3Astaging-next).
This is done manually in order to ensure it's a good use of Hydra's computing resources.
By keeping the `staging-next` branch separate from `staging`, this batching does not block developers from merging changes into `staging`.
@ -555,138 +557,11 @@ Names of files and directories should be in lowercase, with dashes between words
### Syntax
- Use 2 spaces of indentation per indentation level in Nix expressions, 4 spaces in shell scripts.
- Do not use tab characters, i.e. configure your editor to use soft tabs. For instance, use `(setq-default indent-tabs-mode nil)` in Emacs. Everybody has different tab settings so its asking for trouble.
- Set up [editorconfig](https://editorconfig.org/) for your editor, such that [the settings](./.editorconfig) are automatically applied.
- Use `lowerCamelCase` for variable names, not `UpperCamelCase`. Note, this rule does not apply to package attribute names, which instead follow the rules in [package naming](./pkgs/README.md#package-naming).
- Function calls with attribute set arguments are written as
```nix
foo {
arg = <...>;
}
```
not
```nix
foo
{
arg = <...>;
}
```
Also fine is
```nix
foo { arg = <...>; }
```
if it's a short call.
- In attribute sets or lists that span multiple lines, the attribute names or list elements should be aligned:
```nix
{
# A long list.
list = [
elem1
elem2
elem3
];
# A long attribute set.
attrs = {
attr1 = short_expr;
attr2 =
if true then big_expr else big_expr;
};
# Combined
listOfAttrs = [
{
attr1 = 3;
attr2 = "fff";
}
{
attr1 = 5;
attr2 = "ggg";
}
];
}
```
- Short lists or attribute sets can be written on one line:
```nix
{
# A short list.
list = [ elem1 elem2 elem3 ];
# A short set.
attrs = { x = 1280; y = 1024; };
}
```
- Breaking in the middle of a function argument can give hard-to-read code, like
```nix
someFunction { x = 1280;
y = 1024; } otherArg
yetAnotherArg
```
(especially if the argument is very large, spanning multiple lines).
Better:
```nix
someFunction
{ x = 1280; y = 1024; }
otherArg
yetAnotherArg
```
or
```nix
let res = { x = 1280; y = 1024; };
in someFunction res otherArg yetAnotherArg
```
- The bodies of functions, asserts, and withs are not indented to prevent a lot of superfluous indentation levels, i.e.
```nix
{ arg1, arg2 }:
assert system == "i686-linux";
stdenv.mkDerivation { /* ... */ }
```
not
```nix
{ arg1, arg2 }:
assert system == "i686-linux";
stdenv.mkDerivation { /* ... */ }
```
- Function formal arguments are written as:
```nix
{ arg1, arg2, arg3 }: { /* ... */ }
```
but if they don't fit on one line they're written as:
```nix
{ arg1, arg2, arg3
, arg4
# Some comment...
, argN
}: { }
```
- New files must be formatted by entering the `nix-shell` from the repository root and running `nixfmt`.
- Functions should list their expected arguments as precisely as possible. That is, write

View file

@ -1,4 +1,4 @@
{
"rev": "cfb89a95f19bea461fc37228dc4d07b22fe617c2",
"sha256": "1yhsacvry6j8r02lk70p9dphjpi8lpzgq2qay8hiy4nqlys0mrch"
"rev": "521d48afa9ae596930a95325529df27fa7135ff5",
"sha256": "0a1pa5azw990narsfipdli1wng4nc3vhvrp00hb8v1qfchcq7dc9"
}

View file

@ -10,7 +10,8 @@ repo=https://github.com/nixos/nixpkgs
branch=nixpkgs-unstable
file=$SCRIPT_DIR/pinned-nixpkgs.json
rev=$(git ls-remote "$repo" refs/heads/"$branch" | cut -f1)
defaultRev=$(git ls-remote "$repo" refs/heads/"$branch" | cut -f1)
rev=${1:-$defaultRev}
sha256=$(nix-prefetch-url --unpack "$repo/archive/$rev.tar.gz" --name source)
jq -n --arg rev "$rev" --arg sha256 "$sha256" '$ARGS.named' | tee /dev/stderr > $file

View file

@ -293,7 +293,7 @@ Though this is not shown in the rendered documentation on nixos.org.
#### Figures
To define a referencable figure use the following fencing:
To define a referenceable figure use the following fencing:
```markdown
::: {.figure #nixos-logo}

View file

@ -20,6 +20,7 @@ There is no uniform interface for build helpers.
build-helpers/fetchers.chapter.md
build-helpers/trivial-build-helpers.chapter.md
build-helpers/testers.chapter.md
build-helpers/dev-shell-tools.chapter.md
build-helpers/special.md
build-helpers/images.md
hooks/index.md

View file

@ -0,0 +1,29 @@
# Development Shell helpers {#chap-devShellTools}
The `nix-shell` command has popularized the concept of transient shell environments for development or testing purposes.
<!--
We should try to document the product, not its development process in the Nixpkgs reference manual,
but *something* needs to be said to provide context for this library.
This is the most future proof sentence I could come up with while Nix itself does yet make use of this.
Relevant is the current status of the devShell attribute "project": https://github.com/NixOS/nix/issues/7501
-->
However, `nix-shell` is not the only way to create such environments, and even `nix-shell` itself can indirectly benefit from this library.
This library provides a set of functions that help create such environments.
## `devShellTools.valueToString` {#sec-devShellTools-valueToString}
Converts Nix values to strings in the way the [`derivation` built-in function](https://nix.dev/manual/nix/2.23/language/derivations) does.
:::{.example}
## `valueToString` usage examples
```nix
devShellTools.valueToString (builtins.toFile "foo" "bar")
=> "/nix/store/...-foo"
```
```nix
devShellTools.valueToString false
=> ""
```

View file

@ -869,7 +869,7 @@ It produces packages that cannot be built automatically.
fetchtorrent {
config = { peer-limit-global = 100; };
url = "magnet:?xt=urn:btih:dd8255ecdc7ca55fb0bbf81323d87062db1f6d1c";
sha256 = "";
hash = "";
}
```

View file

@ -185,6 +185,19 @@ Similarly, if you encounter errors similar to `Error_Protocol ("certificate has
_Default value:_ `"gz"`.\
_Possible values:_ `"none"`, `"gz"`, `"zstd"`.
`includeNixDB` (Boolean; _optional_)
: Populate the nix database in the image with the dependencies of `copyToRoot`.
The main purpose is to be able to use nix commands in the container.
:::{.caution}
Be careful since this doesn't work well in combination with `fromImage`. In particular, in a multi-layered image, only the Nix paths from the lower image will be in the database.
This also neglects to register the store paths that are pulled into the image as a dependency of one of the other values, but aren't a dependency of `copyToRoot`.
:::
_Default value:_ `false`.
`contents` **DEPRECATED**
: This attribute is deprecated, and users are encouraged to use `copyToRoot` instead.
@ -574,6 +587,19 @@ This allows the function to produce reproducible images.
_Default value:_ `true`
`includeNixDB` (Boolean; _optional_)
: Populate the nix database in the image with the dependencies of `copyToRoot`.
The main purpose is to be able to use nix commands in the container.
:::{.caution}
Be careful since this doesn't work well in combination with `fromImage`. In particular, in a multi-layered image, only the Nix paths from the lower image will be in the database.
This also neglects to register the store paths that are pulled into the image as a dependency of one of the other values, but aren't a dependency of `copyToRoot`.
:::
_Default value:_ `false`.
`passthru` (Attribute Set; _optional_)
: Use this to pass any attributes as [`passthru`](#chap-passthru) for the resulting derivation.

View file

@ -120,9 +120,10 @@ It has two modes:
Checks that the output from running a command contains the specified version string in it as a whole word.
Although simplistic, this test assures that the main program can run.
While there's no substitute for a real test case, it does catch dynamic linking errors and such.
It also provides some protection against accidentally building the wrong version, for example when using an "old" hash in a fixed-output derivation.
NOTE: In most cases, [`versionCheckHook`](#versioncheckhook) should be preferred, but this function is provided and documented here anyway. The motivation for adding either tests would be:
- Catch dynamic linking errors and such and missing environment variables that should be added by wrapping.
- Probable protection against accidentally building the wrong version, for example when using an "old" hash in a fixed-output derivation.
By default, the command to be run will be inferred from the given `package` attribute:
it will check `meta.mainProgram` first, and fall back to `pname` or `name`.

View file

@ -241,7 +241,7 @@ Write a text file to the Nix store.
`allowSubstitutes` (Bool, _optional_)
: Whether to allow substituting from a binary cache.
Passed through to [`allowSubsitutes`](https://nixos.org/manual/nix/stable/language/advanced-attributes#adv-attr-allowSubstitutes) of the underlying call to `builtins.derivation`.
Passed through to [`allowSubstitutes`](https://nixos.org/manual/nix/stable/language/advanced-attributes#adv-attr-allowSubstitutes) of the underlying call to `builtins.derivation`.
It defaults to `false`, as running the derivation's simple `builder` executable locally is assumed to be faster than network operations.
Set it to true if the `checkPhase` step is expensive.
@ -453,7 +453,7 @@ writeTextFile {
### `writeScriptBin` {#trivial-builder-writeScriptBin}
Write a script within a `bin` subirectory of a directory in the Nix store.
Write a script within a `bin` subdirectory of a directory in the Nix store.
This is for consistency with the convention of software packages placing executables under `bin`.
`writeScriptBin` takes the following arguments:
@ -468,7 +468,7 @@ This is for consistency with the convention of software packages placing executa
The created file is marked as executable.
The file's contents will be put into `/nix/store/<store path>/bin/<name>`.
The store path will include the the name, and it will be a directory.
The store path will include the name, and it will be a directory.
::: {.example #ex-writeScriptBin}
# Usage of `writeScriptBin`

View file

@ -1,4 +0,0 @@
{
outputPath = "share/doc/nixpkgs";
indexPath = "manual.html";
}

View file

@ -1,196 +1,6 @@
{ pkgs ? (import ./.. { }), nixpkgs ? { }}:
let
inherit (pkgs) lib;
inherit (lib) hasPrefix removePrefix;
fs = lib.fileset;
{
pkgs ? (import ./.. { }),
nixpkgs ? { },
}:
common = import ./common.nix;
lib-docs = import ./doc-support/lib-function-docs.nix {
inherit pkgs nixpkgs;
libsets = [
{ name = "asserts"; description = "assertion functions"; }
{ name = "attrsets"; description = "attribute set functions"; }
{ name = "strings"; description = "string manipulation functions"; }
{ name = "versions"; description = "version string functions"; }
{ name = "trivial"; description = "miscellaneous functions"; }
{ name = "fixedPoints"; baseName = "fixed-points"; description = "explicit recursion functions"; }
{ name = "lists"; description = "list manipulation functions"; }
{ name = "debug"; description = "debugging functions"; }
{ name = "options"; description = "NixOS / nixpkgs option handling"; }
{ name = "path"; description = "path functions"; }
{ name = "filesystem"; description = "filesystem functions"; }
{ name = "fileset"; description = "file set functions"; }
{ name = "sources"; description = "source filtering functions"; }
{ name = "cli"; description = "command-line serialization functions"; }
{ name = "generators"; description = "functions that create file formats from nix data structures"; }
{ name = "gvariant"; description = "GVariant formatted string serialization functions"; }
{ name = "customisation"; description = "Functions to customise (derivation-related) functions, derivatons, or attribute sets"; }
{ name = "meta"; description = "functions for derivation metadata"; }
{ name = "derivations"; description = "miscellaneous derivation-specific functions"; }
];
};
epub = pkgs.runCommand "manual.epub" {
nativeBuildInputs = with pkgs; [ libxslt zip ];
epub = ''
<book xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
version="5.0"
xml:id="nixpkgs-manual">
<info>
<title>Nixpkgs Manual</title>
<subtitle>Version ${pkgs.lib.version}</subtitle>
</info>
<chapter>
<title>Temporarily unavailable</title>
<para>
The Nixpkgs manual is currently not available in EPUB format,
please use the <link xlink:href="https://nixos.org/nixpkgs/manual">HTML manual</link>
instead.
</para>
<para>
If you've used the EPUB manual in the past and it has been useful to you, please
<link xlink:href="https://github.com/NixOS/nixpkgs/issues/237234">let us know</link>.
</para>
</chapter>
</book>
'';
passAsFile = [ "epub" ];
} ''
mkdir scratch
xsltproc \
--param chapter.autolabel 0 \
--nonet \
--output scratch/ \
${pkgs.docbook_xsl_ns}/xml/xsl/docbook/epub/docbook.xsl \
$epubPath
echo "application/epub+zip" > mimetype
zip -0Xq "$out" mimetype
cd scratch && zip -Xr9D "$out" *
'';
# NB: This file describes the Nixpkgs manual, which happens to use module
# docs infra originally developed for NixOS.
optionsDoc = pkgs.nixosOptionsDoc {
inherit (pkgs.lib.evalModules {
modules = [ ../pkgs/top-level/config.nix ];
class = "nixpkgsConfig";
}) options;
documentType = "none";
transformOptions = opt:
opt // {
declarations =
map
(decl:
if hasPrefix (toString ../..) (toString decl)
then
let subpath = removePrefix "/" (removePrefix (toString ../.) (toString decl));
in { url = "https://github.com/NixOS/nixpkgs/blob/master/${subpath}"; name = subpath; }
else decl)
opt.declarations;
};
};
in pkgs.stdenv.mkDerivation {
name = "nixpkgs-manual";
nativeBuildInputs = with pkgs; [
nixos-render-docs
];
src = fs.toSource {
root = ./.;
fileset = fs.unions [
(fs.fileFilter (file:
file.hasExt "md"
|| file.hasExt "md.in"
) ./.)
./style.css
./anchor-use.js
./anchor.min.js
./manpage-urls.json
];
};
postPatch = ''
ln -s ${optionsDoc.optionsJSON}/share/doc/nixos/options.json ./config-options.json
'';
pythonInterpreterTable = pkgs.callPackage ./doc-support/python-interpreter-table.nix {};
passAsFile = [ "pythonInterpreterTable" ];
buildPhase = ''
substituteInPlace ./languages-frameworks/python.section.md --subst-var-by python-interpreter-table "$(<"$pythonInterpreterTablePath")"
cat \
./functions/library.md.in \
${lib-docs}/index.md \
> ./functions/library.md
substitute ./manual.md.in ./manual.md \
--replace-fail '@MANUAL_VERSION@' '${pkgs.lib.version}'
mkdir -p out/media
mkdir -p out/highlightjs
cp -t out/highlightjs \
${pkgs.documentation-highlighter}/highlight.pack.js \
${pkgs.documentation-highlighter}/LICENSE \
${pkgs.documentation-highlighter}/mono-blue.css \
${pkgs.documentation-highlighter}/loader.js
cp -t out ./style.css ./anchor.min.js ./anchor-use.js
nixos-render-docs manual html \
--manpage-urls ./manpage-urls.json \
--revision ${pkgs.lib.trivial.revisionWithDefault (pkgs.rev or "master")} \
--stylesheet style.css \
--stylesheet highlightjs/mono-blue.css \
--script ./highlightjs/highlight.pack.js \
--script ./highlightjs/loader.js \
--script ./anchor.min.js \
--script ./anchor-use.js \
--toc-depth 1 \
--section-toc-depth 1 \
manual.md \
out/index.html
'';
installPhase = ''
dest="$out/${common.outputPath}"
mkdir -p "$(dirname "$dest")"
mv out "$dest"
mv "$dest/index.html" "$dest/${common.indexPath}"
cp ${epub} "$dest/nixpkgs-manual.epub"
mkdir -p $out/nix-support/
echo "doc manual $dest ${common.indexPath}" >> $out/nix-support/hydra-build-products
echo "doc manual $dest nixpkgs-manual.epub" >> $out/nix-support/hydra-build-products
'';
passthru.tests.manpage-urls = with pkgs; testers.invalidateFetcherByDrvHash
({ name ? "manual_check-manpage-urls"
, script
, urlsFile
}: runCommand name {
nativeBuildInputs = [
cacert
(python3.withPackages (p: with p; [
aiohttp
rich
structlog
]))
];
outputHash = "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU="; # Empty output
} ''
python3 ${script} ${urlsFile}
touch $out
'') {
script = ./tests/manpage-urls.py;
urlsFile = ./manpage-urls.json;
};
}
pkgs.nixpkgs-manual.override { inherit nixpkgs; }

View file

@ -0,0 +1,54 @@
# To build this derivation, run `nix-build -A nixpkgs-manual.epub`
{
lib,
runCommand,
docbook_xsl_ns,
libxslt,
zip,
}:
runCommand "manual.epub"
{
nativeBuildInputs = [
libxslt
zip
];
epub = ''
<book xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
version="5.0"
xml:id="nixpkgs-manual">
<info>
<title>Nixpkgs Manual</title>
<subtitle>Version ${lib.version}</subtitle>
</info>
<chapter>
<title>Temporarily unavailable</title>
<para>
The Nixpkgs manual is currently not available in EPUB format,
please use the <link xlink:href="https://nixos.org/nixpkgs/manual">HTML manual</link>
instead.
</para>
<para>
If you've used the EPUB manual in the past and it has been useful to you, please
<link xlink:href="https://github.com/NixOS/nixpkgs/issues/237234">let us know</link>.
</para>
</chapter>
</book>
'';
passAsFile = [ "epub" ];
}
''
mkdir scratch
xsltproc \
--param chapter.autolabel 0 \
--nonet \
--output scratch/ \
${docbook_xsl_ns}/xml/xsl/docbook/epub/docbook.xsl \
$epubPath
echo "application/epub+zip" > mimetype
zip -0Xq -b "$TMPDIR" "$out" mimetype
cd scratch && zip -Xr9D -b "$TMPDIR" "$out" *
''

View file

@ -1,27 +1,122 @@
# Generates the documentation for library functions via nixdoc.
# To build this derivation, run `nix-build -A nixpkgs-manual.lib-docs`
{
lib,
stdenvNoCC,
nixdoc,
nix,
nixpkgs ? { },
libsets ? [
{
name = "asserts";
description = "assertion functions";
}
{
name = "attrsets";
description = "attribute set functions";
}
{
name = "strings";
description = "string manipulation functions";
}
{
name = "versions";
description = "version string functions";
}
{
name = "trivial";
description = "miscellaneous functions";
}
{
name = "fixedPoints";
baseName = "fixed-points";
description = "explicit recursion functions";
}
{
name = "lists";
description = "list manipulation functions";
}
{
name = "debug";
description = "debugging functions";
}
{
name = "options";
description = "NixOS / nixpkgs option handling";
}
{
name = "path";
description = "path functions";
}
{
name = "filesystem";
description = "filesystem functions";
}
{
name = "fileset";
description = "file set functions";
}
{
name = "sources";
description = "source filtering functions";
}
{
name = "cli";
description = "command-line serialization functions";
}
{
name = "generators";
description = "functions that create file formats from nix data structures";
}
{
name = "gvariant";
description = "GVariant formatted string serialization functions";
}
{
name = "customisation";
description = "Functions to customise (derivation-related) functions, derivatons, or attribute sets";
}
{
name = "meta";
description = "functions for derivation metadata";
}
{
name = "derivations";
description = "miscellaneous derivation-specific functions";
}
],
}:
{ pkgs, nixpkgs, libsets }:
with pkgs;
let
locationsJSON = import ./lib-function-locations.nix { inherit pkgs nixpkgs libsets; };
in
stdenv.mkDerivation {
stdenvNoCC.mkDerivation {
name = "nixpkgs-lib-docs";
src = ../../lib;
buildInputs = [ nixdoc ];
src = lib.fileset.toSource {
root = ../..;
fileset = ../../lib;
};
buildInputs = [
nixdoc
nix
];
installPhase = ''
export NIX_STATE_DIR=$(mktemp -d)
nix-instantiate --eval --strict --json ${./lib-function-locations.nix} \
--arg nixpkgsPath "./." \
--argstr revision ${nixpkgs.rev or "master"} \
--argstr libsetsJSON ${lib.escapeShellArg (builtins.toJSON libsets)} \
> locations.json
function docgen {
name=$1
baseName=$2
description=$3
# TODO: wrap lib.$name in <literal>, make nixdoc not escape it
if [[ -e "../lib/$baseName.nix" ]]; then
nixdoc -c "$name" -d "lib.$name: $description" -l ${locationsJSON} -f "$baseName.nix" > "$out/$name.md"
if [[ -e "lib/$baseName.nix" ]]; then
nixdoc -c "$name" -d "lib.$name: $description" -l locations.json -f "lib/$baseName.nix" > "$out/$name.md"
else
nixdoc -c "$name" -d "lib.$name: $description" -l ${locationsJSON} -f "$baseName/default.nix" > "$out/$name.md"
nixdoc -c "$name" -d "lib.$name: $description" -l locations.json -f "lib/$baseName/default.nix" > "$out/$name.md"
fi
echo "$out/$name.md" >> "$out/index.md"
}
@ -32,9 +127,16 @@ stdenv.mkDerivation {
```{=include=} sections auto-id-prefix=auto-generated
EOF
${lib.concatMapStrings ({ name, baseName ? name, description }: ''
docgen ${name} ${baseName} ${lib.escapeShellArg description}
'') libsets}
${lib.concatMapStrings (
{
name,
baseName ? name,
description,
}:
''
docgen ${name} ${baseName} ${lib.escapeShellArg description}
''
) libsets}
echo '```' >> "$out/index.md"
'';

View file

@ -1,13 +1,14 @@
{ pkgs, nixpkgs ? { }, libsets }:
{ nixpkgsPath, revision, libsetsJSON }:
let
revision = pkgs.lib.trivial.revisionWithDefault (nixpkgs.rev or "master");
lib = import (nixpkgsPath + "/lib");
libsets = builtins.fromJSON libsetsJSON;
libDefPos = prefix: set:
builtins.concatMap
(name: [{
name = builtins.concatStringsSep "." (prefix ++ [name]);
location = builtins.unsafeGetAttrPos name set;
}] ++ nixpkgsLib.optionals
}] ++ lib.optionals
(builtins.length prefix == 0 && builtins.isAttrs set.${name})
(libDefPos (prefix ++ [name]) set.${name})
) (builtins.attrNames set);
@ -20,8 +21,6 @@ let
})
(builtins.map (x: x.name) libsets);
nixpkgsLib = pkgs.lib;
flattenedLibSubset = { subsetname, functions }:
builtins.map
(fn: {
@ -38,13 +37,13 @@ let
substr = builtins.substring prefixLen filenameLen filename;
in substr;
removeNixpkgs = removeFilenamePrefix (builtins.toString pkgs.path);
removeNixpkgs = removeFilenamePrefix (builtins.toString nixpkgsPath);
liblocations =
builtins.filter
(elem: elem.value != null)
(nixpkgsLib.lists.flatten
(locatedlibsets nixpkgsLib));
(lib.lists.flatten
(locatedlibsets lib));
fnLocationRelative = { name, value }:
{
@ -72,4 +71,4 @@ let
relativeLocs);
in
pkgs.writeText "locations.json" (builtins.toJSON jsonLocs)
jsonLocs

View file

@ -0,0 +1,28 @@
# To build this derivation, run `nix-build -A nixpkgs-manual.optionsDoc`
{ lib, nixosOptionsDoc }:
let
modules = lib.evalModules {
modules = [ ../../pkgs/top-level/config.nix ];
class = "nixpkgsConfig";
};
root = toString ../..;
transformDeclaration =
decl:
let
declStr = toString decl;
subpath = lib.removePrefix "/" (lib.removePrefix root declStr);
in
assert lib.hasPrefix root declStr;
{
url = "https://github.com/NixOS/nixpkgs/blob/master/${subpath}";
name = subpath;
};
in
nixosOptionsDoc {
inherit (modules) options;
documentType = "none";
transformOptions = opt: opt // { declarations = map transformDeclaration opt.declarations; };
}

View file

@ -0,0 +1,106 @@
# This file describes the Nixpkgs manual, which happens to use module docs infra originally
# developed for NixOS. To build this derivation, run `nix-build -A nixpkgs-manual`.
#
{
lib,
stdenvNoCC,
callPackage,
documentation-highlighter,
nixos-render-docs,
nixpkgs ? { },
}:
stdenvNoCC.mkDerivation (
finalAttrs:
let
inherit (finalAttrs.finalPackage.optionsDoc) optionsJSON;
inherit (finalAttrs.finalPackage) epub lib-docs pythonInterpreterTable;
in
{
name = "nixpkgs-manual";
nativeBuildInputs = [ nixos-render-docs ];
src = lib.fileset.toSource {
root = ../.;
fileset = lib.fileset.unions [
(lib.fileset.fileFilter (file: file.hasExt "md" || file.hasExt "md.in") ../.)
../style.css
../anchor-use.js
../anchor.min.js
../manpage-urls.json
];
};
postPatch = ''
ln -s ${optionsJSON}/share/doc/nixos/options.json ./config-options.json
'';
buildPhase = ''
substituteInPlace ./languages-frameworks/python.section.md \
--subst-var-by python-interpreter-table "$(<"${pythonInterpreterTable}")"
cat \
./functions/library.md.in \
${lib-docs}/index.md \
> ./functions/library.md
substitute ./manual.md.in ./manual.md \
--replace-fail '@MANUAL_VERSION@' '${lib.version}'
mkdir -p out/media
mkdir -p out/highlightjs
cp -t out/highlightjs \
${documentation-highlighter}/highlight.pack.js \
${documentation-highlighter}/LICENSE \
${documentation-highlighter}/mono-blue.css \
${documentation-highlighter}/loader.js
cp -t out ./style.css ./anchor.min.js ./anchor-use.js
nixos-render-docs manual html \
--manpage-urls ./manpage-urls.json \
--revision ${lib.trivial.revisionWithDefault (nixpkgs.rev or "master")} \
--stylesheet style.css \
--stylesheet highlightjs/mono-blue.css \
--script ./highlightjs/highlight.pack.js \
--script ./highlightjs/loader.js \
--script ./anchor.min.js \
--script ./anchor-use.js \
--toc-depth 1 \
--section-toc-depth 1 \
manual.md \
out/index.html
'';
installPhase = ''
dest="$out/share/doc/nixpkgs"
mkdir -p "$(dirname "$dest")"
mv out "$dest"
mv "$dest/index.html" "$dest/manual.html"
cp ${epub} "$dest/nixpkgs-manual.epub"
mkdir -p $out/nix-support/
echo "doc manual $dest manual.html" >> $out/nix-support/hydra-build-products
echo "doc manual $dest nixpkgs-manual.epub" >> $out/nix-support/hydra-build-products
'';
passthru = {
lib-docs = callPackage ./lib-function-docs.nix { inherit nixpkgs; };
epub = callPackage ./epub.nix { };
optionsDoc = callPackage ./options-doc.nix { };
pythonInterpreterTable = callPackage ./python-interpreter-table.nix { };
shell = callPackage ../../pkgs/tools/nix/web-devmode.nix {
buildArgs = "./.";
open = "/share/doc/nixpkgs/manual.html";
};
tests.manpage-urls = callPackage ../tests/manpage-urls.nix { };
};
}
)

View file

@ -1,14 +1,15 @@
# For debugging, run in this directory:
# nix eval --impure --raw --expr 'import ./python-interpreter-table.nix {}'
{ pkgs ? (import ../.. { config = { }; overlays = []; }) }:
# To build this derivation, run `nix-build -A nixpkgs-manual.pythonInterpreterTable`
{
lib,
writeText,
pkgs,
pythonInterpreters,
}:
let
lib = pkgs.lib;
inherit (lib.attrsets) attrNames filterAttrs;
inherit (lib.lists) elem filter map naturalSort reverseList;
inherit (lib.strings) concatStringsSep;
isPythonInterpreter = name:
/* NB: Package names that don't follow the regular expression:
isPythonInterpreter =
name:
/*
NB: Package names that don't follow the regular expression:
- `python-cosmopolitan` is not part of `pkgs.pythonInterpreters`.
- `_prebuilt` interpreters are used for bootstrapping internally.
- `python3Minimal` contains python packages, left behind conservatively.
@ -16,7 +17,8 @@ let
*/
(lib.strings.match "(pypy|python)([[:digit:]]*)" name) != null;
interpreterName = pname:
interpreterName =
pname:
let
cuteName = {
cpython = "CPython";
@ -26,16 +28,16 @@ let
in
"${cuteName.${interpreter.implementation}} ${interpreter.pythonVersion}";
interpreters = reverseList (naturalSort (
filter isPythonInterpreter (attrNames pkgs.pythonInterpreters)
));
interpreters = lib.reverseList (
lib.naturalSort (lib.filter isPythonInterpreter (lib.attrNames pythonInterpreters))
);
aliases = pname:
attrNames (
filterAttrs (name: value:
isPythonInterpreter name
&& name != pname
&& interpreterName name == interpreterName pname
aliases =
pname:
lib.attrNames (
lib.filterAttrs (
name: value:
isPythonInterpreter name && name != pname && interpreterName name == interpreterName pname
) pkgs
);
@ -45,18 +47,17 @@ let
interpreter = interpreterName pname;
}) interpreters;
toMarkdown = data:
toMarkdown =
data:
let
line = package: ''
| ${package.pname} | ${join ", " package.aliases or [ ]} | ${package.interpreter} |
| ${package.pname} | ${lib.concatStringsSep ", " package.aliases or [ ]} | ${package.interpreter} |
'';
in
join "" (map line data);
join = lib.strings.concatStringsSep;
lib.concatStringsSep "" (map line data);
in
''
writeText "python-interpreter-table.md" ''
| Package | Aliases | Interpeter |
|---------|---------|------------|
${toMarkdown result}

View file

@ -29,6 +29,7 @@ scons.section.md
tetex-tex-live.section.md
unzip.section.md
validatePkgConfig.section.md
versionCheckHook.section.md
waf.section.md
zig.section.md
xcbuild.section.md

View file

@ -4,7 +4,7 @@ This hook helps with installing manpages and shell completion files. It exposes
The `installManPage` function takes one or more paths to manpages to install. The manpages must have a section suffix, and may optionally be compressed (with `.gz` suffix). This function will place them into the correct `share/man/man<section>/` directory, in [`outputMan`](#outputman).
The `installShellCompletion` function takes one or more paths to shell completion files. By default it will autodetect the shell type from the completion file extension, but you may also specify it by passing one of `--bash`, `--fish`, or `--zsh`. These flags apply to all paths listed after them (up until another shell flag is given). Each path may also have a custom installation name provided by providing a flag `--name NAME` before the path. If this flag is not provided, zsh completions will be renamed automatically such that `foobar.zsh` becomes `_foobar`. A root name may be provided for all paths using the flag `--cmd NAME`; this synthesizes the appropriate name depending on the shell (e.g. `--cmd foo` will synthesize the name `foo.bash` for bash and `_foo` for zsh). The path may also be a fifo or named fd (such as produced by `<(cmd)`), in which case the shell and name must be provided.
The `installShellCompletion` function takes one or more paths to shell completion files. By default it will autodetect the shell type from the completion file extension, but you may also specify it by passing one of `--bash`, `--fish`, or `--zsh`. These flags apply to all paths listed after them (up until another shell flag is given). Each path may also have a custom installation name provided by providing a flag `--name NAME` before the path. If this flag is not provided, zsh completions will be renamed automatically such that `foobar.zsh` becomes `_foobar`. A root name may be provided for all paths using the flag `--cmd NAME`; this synthesizes the appropriate name depending on the shell (e.g. `--cmd foo` will synthesize the name `foo.bash` for bash and `_foo` for zsh).
```nix
{
@ -17,6 +17,18 @@ The `installShellCompletion` function takes one or more paths to shell completio
installShellCompletion --zsh --name _foobar share/completions.zsh
# implicit behavior
installShellCompletion share/completions/foobar.{bash,fish,zsh}
'';
}
```
The path may also be a fifo or named fd (such as produced by `<(cmd)`), in which case the shell and name must be provided (see below).
If the destination shell completion file is not actually present or consists of zero bytes after calling `installShellCompletion` this is treated as a build failure. In particular, if completion files are not vendored but are generated by running an executable, this is likely to fail in cross compilation scenarios. The result will be a zero byte completion file and hence a build failure. To prevent this, guard the completion commands against this, e.g.
```nix
{
nativeBuildInputs = [ installShellFiles ];
postInstall = lib.optionalString (stdenv.buildPlatform.canExecute stdenv.hostPlatform) ''
# using named fd
installShellCompletion --cmd foobar \
--bash <($out/bin/foobar --bash-completion) \

View file

@ -0,0 +1,35 @@
# versionCheckHook {#versioncheckhook}
This hook adds a `versionCheckPhase` to the [`preInstallCheckHooks`](#ssec-installCheck-phase) that runs the main program of the derivation with a `--help` or `--version` argument, and checks that the `${version}` string is found in that output. You use it like this:
```nix
{
lib,
stdenv,
versionCheckHook,
# ...
}:
stdenv.mkDerivation (finalAttrs: {
# ...
nativeInstallCheckInputs = [
versionCheckHook
];
doInstallCheck = true;
# ...
})
```
Note that for [`buildPythonPackage`](#buildpythonpackage-function) and [`buildPythonApplication`](#buildpythonapplication-function), `doInstallCheck` is enabled by default.
It does so in a clean environment (using `env --ignore-environment`), and it checks for the `${version}` string in both the `stdout` and the `stderr` of the command. It will report to you in the build log the output it received and it will fail the build if it failed to find `${version}`.
The variables that this phase control are:
- `dontVersionCheck`: Disable adding this hook to the [`preDistPhases`](#var-stdenv-preDist). Useful if you do want to load the bash functions of the hook, but run them differently.
- `versionCheckProgram`: The full path to the program that should print the `${version}` string. Defaults roughly to `${placeholder "out"}/bin/${pname}`. Using `$out` in the value of this variable won't work, as environment variables from this variable are not expanded by the hook. Hence using `placeholder` is unavoidable.
- `versionCheckProgramArg`: The argument that needs to be passed to `versionCheckProgram`. If undefined the hook tries first `--help` and then `--version`. Examples: `version`, `-V`, `-v`.
- `preVersionCheck`: A hook to run before the check is done.
- `postVersionCheck`: A hook to run after the check is done.

View file

@ -84,7 +84,7 @@ mkCoqDerivation {
[ mathcomp.ssreflect mathcomp.algebra mathcomp-finmap mathcomp-bigenough ];
meta = {
description = "A Coq/SSReflect Library for Monoidal Rings and Multinomials";
description = "Coq/SSReflect Library for Monoidal Rings and Multinomials";
license = lib.licenses.cecill-c;
};
}

View file

@ -114,7 +114,7 @@ flutter322.buildFlutterApplication {
owner = "canonical";
repo = "firmware-updater";
rev = "6e7dbdb64e344633ea62874b54ff3990bd3b8440";
sha256 = "sha256-s5mwtr5MSPqLMN+k851+pFIFFPa0N1hqz97ys050tFA=";
hash = "sha256-s5mwtr5MSPqLMN+k851+pFIFFPa0N1hqz97ys050tFA=";
fetchSubmodules = true;
};

View file

@ -194,7 +194,7 @@ This helper has the same arguments as `buildDotnetModule`, with a few difference
* `pname` and `version` are required, and will be used to find the NuGet package of the tool
* `nugetName` can be used to override the NuGet package name that will be downloaded, if it's different from `pname`
* `nugetSha256` is the hash of the fetched NuGet package. Set this to `lib.fakeHash256` for the first build, and it will error out, giving you the proper hash. Also remember to update it during version updates (it will not error out if you just change the version while having a fetched package in `/nix/store`)
* `nugetHash` is the hash of the fetched NuGet package. `nugetSha256` is also supported, but not recommended. Set this to `lib.fakeHash` for the first build, and it will error out, giving you the proper hash. Also remember to update it during version updates (it will not error out if you just change the version while having a fetched package in `/nix/store`)
* `dotnet-runtime` is set to `dotnet-sdk` by default. When changing this, remember that .NET tools fetched from NuGet require an SDK.
Here is an example of packaging `pbm`, an unfree binary without source available:
@ -205,7 +205,7 @@ buildDotnetGlobalTool {
pname = "pbm";
version = "1.3.1";
nugetSha256 = "sha256-ZG2HFyKYhVNVYd2kRlkbAjZJq88OADe3yjxmLuxXDUo=";
nugetHash = "sha256-ZG2HFyKYhVNVYd2kRlkbAjZJq88OADe3yjxmLuxXDUo=";
meta = {
homepage = "https://cmd.petabridge.com/index.html";
@ -241,15 +241,15 @@ $ nuget-to-nix out > deps.nix
Which `nuget-to-nix` will generate an output similar to below
```nix
{ fetchNuGet }: [
(fetchNuGet { pname = "FosterFramework"; version = "0.1.15-alpha"; sha256 = "0pzsdfbsfx28xfqljcwy100xhbs6wyx0z1d5qxgmv3l60di9xkll"; })
(fetchNuGet { pname = "Microsoft.AspNetCore.App.Runtime.linux-x64"; version = "8.0.1"; sha256 = "1gjz379y61ag9whi78qxx09bwkwcznkx2mzypgycibxk61g11da1"; })
(fetchNuGet { pname = "Microsoft.NET.ILLink.Tasks"; version = "8.0.1"; sha256 = "1drbgqdcvbpisjn8mqfgba1pwb6yri80qc4mfvyczqwrcsj5k2ja"; })
(fetchNuGet { pname = "Microsoft.NETCore.App.Runtime.linux-x64"; version = "8.0.1"; sha256 = "1g5b30f4l8a1zjjr3b8pk9mcqxkxqwa86362f84646xaj4iw3a4d"; })
(fetchNuGet { pname = "SharpGLTF.Core"; version = "1.0.0-alpha0031"; sha256 = "0ln78mkhbcxqvwnf944hbgg24vbsva2jpih6q3x82d3h7rl1pkh6"; })
(fetchNuGet { pname = "SharpGLTF.Runtime"; version = "1.0.0-alpha0031"; sha256 = "0lvb3asi3v0n718qf9y367km7qpkb9wci38y880nqvifpzllw0jg"; })
(fetchNuGet { pname = "Sledge.Formats"; version = "1.2.2"; sha256 = "1y0l66m9rym0p1y4ifjlmg3j9lsmhkvbh38frh40rpvf1axn2dyh"; })
(fetchNuGet { pname = "Sledge.Formats.Map"; version = "1.1.5"; sha256 = "1bww60hv9xcyxpvkzz5q3ybafdxxkw6knhv97phvpkw84pd0jil6"; })
(fetchNuGet { pname = "System.Numerics.Vectors"; version = "4.5.0"; sha256 = "1kzrj37yzawf1b19jq0253rcs8hsq1l2q8g69d7ipnhzb0h97m59"; })
(fetchNuGet { pname = "FosterFramework"; version = "0.1.15-alpha"; hash = "sha256-lM6eYgOGjl1fx6WFD7rnRi/YAQieM0mx60h0p5dr+l8="; })
(fetchNuGet { pname = "Microsoft.AspNetCore.App.Runtime.linux-x64"; version = "8.0.1"; hash = "sha256-QbUQXjCzr8j8u/5X0af9jE++EugdoxMhT08F49MZX74="; })
(fetchNuGet { pname = "Microsoft.NET.ILLink.Tasks"; version = "8.0.1"; hash = "sha256-SopZpGaZ48/8dpUwDFDM3ix+g1rP4Yqs1PGuzRp+K7c="; })
(fetchNuGet { pname = "Microsoft.NETCore.App.Runtime.linux-x64"; version = "8.0.1"; hash = "sha256-jajBI5GqG2IIcsIMgxTHfXbMapoXrZGl/EEhShwYq7w="; })
(fetchNuGet { pname = "SharpGLTF.Core"; version = "1.0.0-alpha0031"; hash = "sha256-Bs4baD5wNIH6wAbGK4Xaem0i3luQkOQs37izBWdFx1I="; })
(fetchNuGet { pname = "SharpGLTF.Runtime"; version = "1.0.0-alpha0031"; hash = "sha256-TwJO6b8ubmwBQh6NyHha8+JT5zHDJ4dROBbsEbUaa1M="; })
(fetchNuGet { pname = "Sledge.Formats"; version = "1.2.2"; hash = "sha256-0Ddhuwpu3wwIzA4NuPaEVdMkx6tUukh8uKD6nKoxFPg="; })
(fetchNuGet { pname = "Sledge.Formats.Map"; version = "1.1.5"; hash = "sha256-hkYJ2iWIz7vhPWlDOw2fvTenlh+4/D/37Z71tCEwnK8="; })
(fetchNuGet { pname = "System.Numerics.Vectors"; version = "4.5.0"; hash = "sha256-qdSTIFgf2htPS+YhLGjAGiLN8igCYJnCCo6r78+Q+c8="; })
]
```

View file

@ -143,7 +143,7 @@ You can also pass additional arguments to `makeWrapper` using `gappsWrapperArgs`
## Updating GNOME packages {#ssec-gnome-updating}
Most GNOME package offer [`updateScript`](#var-passthru-updateScript), it is therefore possible to update to latest source tarball by running `nix-shell maintainers/scripts/update.nix --argstr package gnome.nautilus` or even en masse with `nix-shell maintainers/scripts/update.nix --argstr path gnome`. Read the packages `NEWS` file to see what changed.
Most GNOME package offer [`updateScript`](#var-passthru-updateScript), it is therefore possible to update to latest source tarball by running `nix-shell maintainers/scripts/update.nix --argstr package nautilus` or even en masse with `nix-shell maintainers/scripts/update.nix --argstr path gnome`. Read the packages `NEWS` file to see what changed.
## Frequently encountered issues {#ssec-gnome-common-issues}

View file

@ -66,6 +66,25 @@ The following is an example expression using `buildGoModule`:
The function `buildGoPackage` builds legacy Go programs, not supporting Go modules.
::: {.warning}
`buildGoPackage` is deprecated and will be removed for the 25.05 release.
:::
### Migrating from `buildGoPackage` to `buildGoModule` {#buildGoPackage-migration}
Go modules, released 6y ago, are now widely adopted in the ecosystem.
Most upstream projects are using Go modules, and the tooling previously used for dependency management in Go is mostly deprecated, archived or at least unmaintained at this point.
In case a project doesn't have external dependencies or dependencies are vendored in a way understood by `go mod init`, migration can be done with a few changes in the package.
- Switch the builder from `buildGoPackage` to `buildGoModule`
- Remove `goPackagePath` and other attributes specific to `buildGoPackage`
- Set `vendorHash = null;`
- Run `go mod init <module name>` in `postPatch`
In case the package has external dependencies that aren't vendored or the build setup is more complex the upstream source might need to be patched.
Examples for the migration can be found in the [issue tracking migration withing nixpkgs](https://github.com/NixOS/nixpkgs/issues/318069).
### Example for `buildGoPackage` {#example-for-buildgopackage}
In the following is an example expression using `buildGoPackage`, the following arguments are of special significance to the function:

View file

@ -0,0 +1,189 @@
# Gradle {#gradle}
Gradle is a popular build tool for Java/Kotlin. Gradle itself doesn't
currently provide tools to make dependency resolution reproducible, so
nixpkgs has a proxy designed for intercepting Gradle web requests to
record dependencies so they can be restored in a reproducible fashion.
## Building a Gradle package {#building-a-gradle-package}
Here's how a typical derivation will look like:
```nix
stdenv.mkDerivation (finalAttrs: {
pname = "pdftk";
version = "3.3.3";
src = fetchFromGitLab {
owner = "pdftk-java";
repo = "pdftk";
rev = "v${finalAttrs.version}";
hash = "sha256-ciKotTHSEcITfQYKFZ6sY2LZnXGChBJy0+eno8B3YHY=";
};
nativeBuildInputs = [ gradle ];
# if the package has dependencies, mitmCache must be set
mitmCache = gradle.fetchDeps {
inherit (finalAttrs) pname;
data = ./deps.json;
};
# this is required for using mitm-cache on Darwin
__darwinAllowLocalNetworking = true;
gradleFlags = [ "-Dfile.encoding=utf-8" ];
# defaults to "assemble"
gradleBuildTask = "shadowJar";
# will run the gradleCheckTask (defaults to "test")
doCheck = true;
installPhase = ''
mkdir -p $out/{bin,share/pdftk}
cp build/libs/pdftk-all.jar $out/share/pdftk
makeWrapper ${jre}/bin/java $out/bin/pdftk \
--add-flags "-jar $out/share/pdftk/pdftk-all.jar"
cp ${finalAttrs.src}/pdftk.1 $out/share/man/man1
'';
meta.sourceProvenance = with lib.sourceTypes; [
fromSource
binaryBytecode # mitm cache
];
})
```
To update (or initialize) dependencies, run the update script via
something like `$(nix-build -A <pname>.mitmCache.updateScript)`
(`nix-build` builds the `updateScript`, `$(...)` runs the script at the
path printed by `nix-build`).
If your package can't be evaluated using a simple `pkgs.<pname>`
expression (for example, if your package isn't located in nixpkgs, or if
you want to override some of its attributes), you will usually have to
pass `pkg` instead of `pname` to `gradle.fetchDeps`. There are two ways
of doing it.
The first is to add the derivation arguments required for getting the
package. Using the pdftk example above:
```nix
{ lib
, stdenv
# ...
, pdftk
}:
stdenv.mkDerivation (finalAttrs: {
# ...
mitmCache = gradle.fetchDeps {
pkg = pdftk;
data = ./deps.json;
};
})
```
This allows you to `override` any arguments of the `pkg` used for
the update script (for example, `pkg = pdftk.override { enableSomeFlag =
true };`), so this is the preferred way.
The second is to create a `let` binding for the package, like this:
```nix
let self = stdenv.mkDerivation {
# ...
mitmCache = gradle.fetchDeps {
pkg = self;
data = ./deps.json;
};
}; in self
```
This is useful if you can't easily pass the derivation as its own
argument, or if your `mkDerivation` call is responsible for building
multiple packages.
In the former case, the update script will stay the same even if the
derivation is called with different arguments. In the latter case, the
update script will change depending on the derivation arguments. It's up
to you to decide which one would work best for your derivation.
## Update Script {#gradle-update-script}
The update script does the following:
- Build the derivation's source via `pkgs.srcOnly`
- Enter a `nix-shell` for the derivation in a `bwrap` sandbox (the
sandbox is only used on Linux)
- Set the `IN_GRADLE_UPDATE_DEPS` environment variable to `1`
- Run the derivation's `unpackPhase`, `patchPhase`, `configurePhase`
- Run the derivation's `gradleUpdateScript` (the Gradle setup hook sets
a default value for it, which runs `preBuild`, `preGradleUpdate`
hooks, fetches the dependencies using `gradleUpdateTask`, and finally
runs the `postGradleUpdate` hook)
- Finally, store all of the fetched files' hashes in the lockfile. They
may be `.jar`/`.pom` files from Maven repositories, or they may be
files otherwise used for building the package.
`fetchDeps` takes the following arguments:
- `attrPath` - the path to the package in nixpkgs (for example,
`"javaPackages.openjfx22"`). Used for update script metadata.
- `pname` - an alias for `attrPath` for convenience. This is what you
will generally use instead of `pkg` or `attrPath`.
- `pkg` - the package to be used for fetching the dependencies. Defaults
to `getAttrFromPath (splitString "." attrPath) pkgs`.
- `bwrapFlags` - allows you to override bwrap flags (only relevant for
downstream, non-nixpkgs projects)
- `data` - path to the dependencies lockfile (can be relative to the
package, can be absolute). In nixpkgs, it's discouraged to have the
lockfiles be named anything other `deps.json`, consider creating
subdirectories if your package requires multiple `deps.json` files.
## Environment {#gradle-environment}
The Gradle setup hook accepts the following environment variables:
- `mitmCache` - the MITM proxy cache imported using `gradle.fetchDeps`
- `gradleFlags` - command-line flags to be used for every Gradle
invocation (this simply registers a function that uses the necessary
flags).
- You can't use `gradleFlags` for flags that contain spaces, in that
case you must add `gradleFlagsArray+=("-flag with spaces")` to the
derivation's bash code instead.
- If you want to build the package using a specific Java version, you
can pass `"-Dorg.gradle.java.home=${jdk}"` as one of the flags.
- `gradleBuildTask` - the Gradle task (or tasks) to be used for building
the package. Defaults to `assemble`.
- `gradleCheckTask` - the Gradle task (or tasks) to be used for checking
the package if `doCheck` is set to `true`. Defaults to `test`.
- `gradleUpdateTask` - the Gradle task (or tasks) to be used for
fetching all of the package's dependencies in
`mitmCache.updateScript`. Defaults to `nixDownloadDeps`.
- `gradleUpdateScript` - the code to run for fetching all of the
package's dependencies in `mitmCache.updateScript`. Defaults to
running the `preBuild` and `preGradleUpdate` hooks, running the
`gradleUpdateTask`, and finally running the `postGradleUpdate` hook.
- `gradleInitScript` - path to the `--init-script` to pass to Gradle. By
default, a simple init script that enables reproducible archive
creation is used.
- Note that reproducible archives might break some builds. One example
of an error caused by it is `Could not create task ':jar'. Replacing
an existing task that may have already been used by other plugins is
not supported`. If you get such an error, the easiest "fix" is
disabling reproducible archives altogether by setting
`gradleInitScript` to something like `writeText
"empty-init-script.gradle" ""`
- `enableParallelBuilding` / `enableParallelChecking` /
`enableParallelUpdating` - pass `--parallel` to Gradle in the
build/check phase or in the update script. Defaults to true. If the
build fails for mysterious reasons, consider setting this to false.
- `dontUseGradleConfigure` / `dontUseGradleBuild` / `dontUseGradleCheck`
\- force disable the Gradle setup hook for certain phases.
- Note that if you disable the configure hook, you may face issues
such as `Failed to load native library 'libnative-platform.so'`,
because the configure hook is responsible for initializing Gradle.

View file

@ -90,6 +90,7 @@ qt.section.md
r.section.md
ruby.section.md
rust.section.md
scheme.section.md
swift.section.md
texlive.section.md
titanium.section.md

View file

@ -206,7 +206,7 @@ buildNpmPackage rec {
NODE_OPTIONS = "--openssl-legacy-provider";
meta = {
description = "A modern web UI for various torrent clients with a Node.js backend and React frontend";
description = "Modern web UI for various torrent clients with a Node.js backend and React frontend";
homepage = "https://flood.js.org";
license = lib.licenses.gpl3Only;
maintainers = with lib.maintainers; [ winter ];
@ -233,7 +233,7 @@ If these are not defined, `npm pack` may miss some files, and no binaries will b
* `npmPruneFlags`: Flags to pass to `npm prune`. Defaults to the value of `npmInstallFlags`.
* `makeWrapperArgs`: Flags to pass to `makeWrapper`, added to executable calling the generated `.js` with `node` as an interpreter. These scripts are defined in `package.json`.
* `nodejs`: The `nodejs` package to build against, using the corresponding `npm` shipped with that version of `node`. Defaults to `pkgs.nodejs`.
* `npmDeps`: The dependencies used to build the npm package. Especially useful to not have to recompute workspace depedencies.
* `npmDeps`: The dependencies used to build the npm package. Especially useful to not have to recompute workspace dependencies.
#### prefetch-npm-deps {#javascript-buildNpmPackage-prefetch-npm-deps}
@ -375,8 +375,74 @@ Assuming the following directory structure, we can define `sourceRoot` and `pnpm
pnpmRoot = "frontend";
```
### Yarn {#javascript-yarn}
Yarn based projects use a `yarn.lock` file instead of a `package-lock.json` to pin dependencies. Nixpkgs provides the Nix function `fetchYarnDeps` which fetches an offline cache suitable for running `yarn install` before building the project. In addition, Nixpkgs provides the hooks:
- `yarnConfigHook`: Fetches the dependencies from the offline cache and installs them into `node_modules`.
- `yarnBuildHook`: Runs `yarn build` or a specified `yarn` command that builds the project.
An example usage of the above attributes is:
```nix
{
lib,
stdenv,
fetchFromGitHub,
fetchYarnDeps,
yarnConfigHook,
yarnBuildHook,
nodejs,
npmHooks,
}:
stdenv.mkDerivation (finalAttrs: {
pname = "...";
version = "...";
src = fetchFromGitHub {
owner = "...";
repo = "...";
rev = "v${finalAttrs.version}";
hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=";
};
yarnOfflineCache = fetchYarnDeps {
yarnLock = finalAttrs.src + "/yarn.lock";
hash = "sha256-mo8urQaWIHu33+r0Y7mL9mJ/aSe/5CihuIetTeDHEUQ=";
};
nativeBuildInputs = [
yarnConfigHook
yarnBuildHook
# Needed for executing package.json scripts
nodejs
npmHooks.npmInstallHook
];
meta = {
# ...
};
})
```
Note that there is no setup hook for installing yarn based packages - `npmHooks.npmInstallHook` should fit most cases, but sometimes you may need to override the `installPhase` completely.
#### `yarnConfigHook` arguments {#javascript-yarnconfighook}
By default, `yarnConfigHook` relies upon the attribute `${yarnOfflineCache}` (or `${offlineCache}` if the former is not set) to find the location of the offline cache produced by `fetchYarnDeps`. To disable this phase, you can set `dontYarnInstallDeps = true` or override the `configurePhase`.
#### `yarnBuildHook` arguments {#javascript-yarnbuildhook}
This script by default runs `yarn --offline build`, and it relies upon the project's dependencies installed at `node_modules`. Below is a list of additional `mkDerivation` arguments read by this hook:
- `yarnBuildScript`: Sets a different `yarn --offline` subcommand (defaults to `build`).
- `yarnBuildFlags`: Single string list of additional flags to pass the above command, or a Nix list of such additional flags.
### yarn2nix {#javascript-yarn2nix}
WARNING: The `yarn2nix` functions have been deprecated in favor of the new `yarnConfigHook` and `yarnBuildHook`. Documentation for them still appears here for the sake of the packages that still use them. See also a tracking issue [#324246](https://github.com/NixOS/nixpkgs/issues/324246).
#### Preparation {#javascript-yarn2nix-preparation}
You will need at least a `yarn.lock` file. If upstream does not have one you need to generate it and reference it in your package definition.

View file

@ -88,7 +88,7 @@ For example, to propagate a dependency on SDL2 for lockfiles that select the Nim
}
```
The annotations in the `nim-overrides.nix` set are functions that take two arguments and return a new attrset to be overlayed on the package being built.
The annotations in the `nim-overrides.nix` set are functions that take two arguments and return a new attrset to be overlaid on the package being built.
- lockAttrs: the attrset for this library from within a lockfile. This can be used to implement library version constraints, such as marking libraries as broken or insecure.
- prevAttrs: the attrset produced by initial arguments to `buildNimPackage` and any preceding lockfile overlays.

View file

@ -113,7 +113,7 @@ buildDunePackage rec {
meta = {
homepage = "https://github.com/flowtype/ocaml-wtf8";
description = "WTF-8 is a superset of UTF-8 that allows unpaired surrogates.";
description = "WTF-8 is a superset of UTF-8 that allows unpaired surrogates";
license = lib.licenses.mit;
maintainers = [ lib.maintainers.eqyiel ];
};

View file

@ -158,7 +158,7 @@ $ nix-generate-from-cpan XML::Simple
};
propagatedBuildInputs = [ XMLNamespaceSupport XMLSAX XMLSAXExpat ];
meta = {
description = "An API for simple XML files";
description = "API for simple XML files";
license = with lib.licenses; [ artistic1 gpl1Plus ];
};
};

View file

@ -283,7 +283,10 @@ in {
];
composerRepository = php.mkComposerRepository {
inherit (finalAttrs) src;
inherit (finalAttrs) pname version src;
composerNoDev = true;
composerNoPlugins = true;
composerNoScripts = true;
# Specifying a custom composer.lock since it is not present in the sources.
composerLock = ./composer.lock;
# The composer vendor hash

View file

@ -993,7 +993,7 @@ buildPythonPackage rec {
meta = {
changelog = "https://github.com/blaze/datashape/releases/tag/${version}";
homepage = "https://github.com/ContinuumIO/datashape";
description = "A data description language";
description = "Data description language";
license = lib.licenses.bsd2;
};
}
@ -1118,7 +1118,7 @@ buildPythonPackage rec {
meta = {
changelog = "https://github.com/pyFFTW/pyFFTW/releases/tag/v${version}";
description = "A pythonic wrapper around FFTW, the FFT library, presenting a unified interface for all the supported transforms";
description = "Pythonic wrapper around FFTW, the FFT library, presenting a unified interface for all the supported transforms";
homepage = "http://hgomersall.github.com/pyFFTW";
license = with lib.licenses; [ bsd2 bsd3 ];
};
@ -1315,9 +1315,6 @@ we can do:
```nix
{
nativeBuildInputs = [
pythonRelaxDepsHook
];
pythonRelaxDeps = [
"pkg1"
"pkg3"
@ -1340,7 +1337,6 @@ example:
```nix
{
nativeBuildInputs = [ pythonRelaxDepsHook ];
pythonRelaxDeps = true;
}
```
@ -1362,8 +1358,11 @@ instead of a dev dependency).
Keep in mind that while the examples above are done with `requirements.txt`,
`pythonRelaxDepsHook` works by modifying the resulting wheel file, so it should
work with any of the [existing hooks](#setup-hooks).
It indicates that `pythonRelaxDepsHook` has no effect on build time dependencies, such as in `build-system`.
If a package requires incompatible build time dependencies, they should be removed in `postPatch` with `substituteInPlace` or something similar.
The `pythonRelaxDepsHook` has no effect on build time dependencies, such as
those specified in `build-system`. If a package requires incompatible build
time dependencies, they should be removed in `postPatch` through
`substituteInPlace` or similar.
#### Using unittestCheckHook {#using-unittestcheckhook}

View file

@ -38,7 +38,7 @@ rustPlatform.buildRustPackage rec {
cargoHash = "sha256-jtBw4ahSl88L0iuCXxQgZVm1EcboWRJMNtjxLVTtzts=";
meta = {
description = "A fast line-oriented regex search tool, similar to ag and ack";
description = "Fast line-oriented regex search tool, similar to ag and ack";
homepage = "https://github.com/BurntSushi/ripgrep";
license = lib.licenses.unlicense;
maintainers = [];
@ -1000,7 +1000,7 @@ rustPlatform.buildRustPackage rec {
doCheck = false;
meta = {
description = "A fast line-oriented regex search tool, similar to ag and ack";
description = "Fast line-oriented regex search tool, similar to ag and ack";
homepage = "https://github.com/BurntSushi/ripgrep";
license = with lib.licenses; [ mit unlicense ];
maintainers = with lib.maintainers; [];

View file

@ -0,0 +1,35 @@
# Scheme {#sec-scheme}
## Package Management {#sec-scheme-package-management}
### Akku {#sec-scheme-package-management-akku}
About two hundred R6RS & R7RS libraries from [Akku](https://akkuscm.org/)
(which also mirrors [snow-fort](https://snow-fort.org/pkg))
are available inside the `akkuPackages` attrset, and the Akku executable
itself is at the top level as `akku`. The packages could be used
in a derivation's `buildInputs`, work inside of `nix-shell`, and
are tested using [Chez](https://www.scheme.com/) &
[Chibi](https://synthcode.com/wiki/chibi-scheme)
Scheme during build time.
Including a package as a build input is done in the typical Nix fashion.
For example, to include
[a bunch of SRFIs](https://akkuscm.org/packages/chez-srfi/)
primarily for Chez Scheme in a derivation, one might write:
```nix
{
buildInputs = [
chez
akkuPackages.chez-srfi
];
}
```
The package index is located in `pkgs/tools/package-management/akku`
as `deps.toml`, and should be updated occasionally by running `./update.sh`
in the directory. Doing so will pull the source URLs for new packages and
more recent versions, then write them to the TOML.

View file

@ -83,12 +83,13 @@ Release 23.11 ships with a new interface that will eventually replace `texlive.c
```nix
stdenvNoCC.mkDerivation rec {
src = texlive.pkgs.iwona;
dontUnpack = true;
inherit (src) pname version;
installPhase = ''
runHook preInstall
install -Dm644 fonts/opentype/nowacki/iwona/*.otf -t $out/share/fonts/opentype
install -Dm644 $src/fonts/opentype/nowacki/iwona/*.otf -t $out/share/fonts/opentype
runHook postInstall
'';
}
@ -182,7 +183,7 @@ let
'';
meta = {
description = "A LaTeX2e class for overhead transparencies";
description = "LaTeX2e class for overhead transparencies";
license = lib.licenses.unfreeRedistributable;
maintainers = with lib.maintainers; [ veprbl ];
platforms = lib.platforms.all;

View file

@ -226,18 +226,18 @@ Some plugins require overrides in order to function properly. Overrides are plac
}
```
Sometimes plugins require an override that must be changed when the plugin is updated. This can cause issues when Vim plugins are auto-updated but the associated override isn't updated. For these plugins, the override should be written so that it specifies all information required to install the plugin, and running `./update.py` doesn't change the derivation for the plugin. Manually updating the override is required to update these types of plugins. An example of such a plugin is `LanguageClient-neovim`.
Sometimes plugins require an override that must be changed when the plugin is updated. This can cause issues when Vim plugins are auto-updated but the associated override isn't updated. For these plugins, the override should be written so that it specifies all information required to install the plugin, and running `nix-shell -p vimPluginsUpdater --run vim-plugins-updater` doesn't change the derivation for the plugin. Manually updating the override is required to update these types of plugins. An example of such a plugin is `LanguageClient-neovim`.
To add a new plugin, run `./update.py add "[owner]/[name]"`. **NOTE**: This script automatically commits to your git repository. Be sure to check out a fresh branch before running.
To add a new plugin, run `nix-shell -p vimPluginsUpdater --run 'vim-plugins-updater add "[owner]/[name]"'`. **NOTE**: This script automatically commits to your git repository. Be sure to check out a fresh branch before running.
Finally, there are some plugins that are also packaged in nodePackages because they have Javascript-related build steps, such as running webpack. Those plugins are not listed in `vim-plugin-names` or managed by `update.py` at all, and are included separately in `overrides.nix`. Currently, all these plugins are related to the `coc.nvim` ecosystem of the Language Server Protocol integration with Vim/Neovim.
Finally, there are some plugins that are also packaged in nodePackages because they have Javascript-related build steps, such as running webpack. Those plugins are not listed in `vim-plugin-names` or managed by `vimPluginsUpdater` at all, and are included separately in `overrides.nix`. Currently, all these plugins are related to the `coc.nvim` ecosystem of the Language Server Protocol integration with Vim/Neovim.
## Updating plugins in nixpkgs {#updating-plugins-in-nixpkgs}
Run the update script with a GitHub API token that has at least `public_repo` access. Running the script without the token is likely to result in rate-limiting (429 errors). For steps on creating an API token, please refer to [GitHub's token documentation](https://docs.github.com/en/free-pro-team@latest/github/authenticating-to-github/creating-a-personal-access-token).
```sh
GITHUB_API_TOKEN=my_token ./pkgs/applications/editors/vim/plugins/update.py
nix-shell -p vimPluginsUpdater --run 'vim-plugins-updater --github-token=mytoken' # or set GITHUB_API_TOKEN environment variable
```
Alternatively, set the number of processes to a lower count to avoid rate-limiting.

View file

@ -11,7 +11,8 @@ On NixOS, you need to explicitly enable `ibus` with given engines before customi
```nix
{ pkgs, ... }: {
i18n.inputMethod = {
enabled = "ibus";
enable = true;
type = "ibus";
ibus.engines = with pkgs.ibus-engines; [ typing-booster ];
};
}

View file

@ -24,6 +24,7 @@ shell-helpers.section.md
steam.section.md
cataclysm-dda.section.md
urxvt.section.md
vcpkg.section.md
weechat.section.md
xorg.section.md
```

View file

@ -0,0 +1,24 @@
# VCPKG {#sec-vcpkg}
The `vcpkg-tool` package has a wrapper around the `vcpkg` executable to avoid writing to the nix store.
The wrapper will also be present in `vcpkg`, unless you specify `vcpkg.override { vcpkg-tool = vcpkg-tool-unwrapped; }`
The wrapper has been made in a way so that it will provide default cli arguments, but tries not to interfere if the user provides the same arguments.
The arguments also have corresponding environment variables that can be used as an alternative way of overriding these paths.
Run the wrapper with the environment variable `NIX_VCPKG_DEBUG_PRINT_ENVVARS=true` to get a full list of corresponding environment variables.
## Nix specific environment variables {#sec-vcpkg-nix-envvars}
The wrapper also provides some new nix-specific environment variables that lets you control some of the wrapper functionality.
- `NIX_VCPKG_WRITABLE_PATH = <path>`
Set this environment variable to specify the path where `vcpkg` will store buildtime artifacts.
This will become the base path for all of the other paths.
- `NIX_VCPKG_DEBUG_PRINT_ENVVARS = true | false`
Set this to `true` for the wrapper to print the corresponding environment variables for the arguments that will be provided to the unwrapped executable.
The list of variables will be printed right before invoking `vcpkg`.
This can be useful if you suspect that the wrapper for some reason was unable to prioritize user-provided cli args over its default ones, or for fixing other issues like typos or unexpanded environment variables.

View file

@ -1,20 +1,7 @@
let
pkgs = import ../. {
config = {};
overlays = [];
};
common = import ./common.nix;
inherit (common) outputPath indexPath;
web-devmode = import ../pkgs/tools/nix/web-devmode.nix {
inherit pkgs;
buildArgs = "./.";
open = "/${outputPath}/${indexPath}";
config = { };
overlays = [ ];
};
in
pkgs.mkShell {
packages = [
web-devmode
];
}
pkgs.nixpkgs-manual.shell

View file

@ -5,7 +5,7 @@ Nix packages can declare *meta-attributes* that contain information about a pack
```nix
{
meta = {
description = "A program that produces a familiar, friendly greeting";
description = "Program that produces a familiar, friendly greeting";
longDescription = ''
GNU Hello is a program that prints "Hello, world!" when you run it.
It is fully customizable.
@ -22,6 +22,10 @@ Meta-attributes are not passed to the builder of the package. Thus, a change to
## Standard meta-attributes {#sec-standard-meta-attributes}
If the package is to be submitted to Nixpkgs, please check out the
[requirements for meta attributes](https://github.com/NixOS/nixpkgs/tree/master/pkgs#meta-attributes)
in the contributing documentation.
It is expected that each meta-attribute is one of the following:
### `description` {#var-meta-description}
@ -29,11 +33,21 @@ It is expected that each meta-attribute is one of the following:
A short (one-line) description of the package.
This is displayed on [search.nixos.org](https://search.nixos.org/packages).
Dont include a period at the end. Dont include newline characters. Capitalise the first character. For brevity, dont repeat the name of package --- just describe what it does.
The general requirements of a description are:
- Be short, just one sentence.
- Be capitalized.
- Not start with definite ("The") or indefinite ("A"/"An") article.
- Not start with the package name.
- More generally, it should not refer to the package name.
- Not end with a period (or any punctuation for that matter).
- Provide factual information.
- Avoid subjective language.
Wrong: `"libpng is a library that allows you to decode PNG images."`
Right: `"A library for decoding PNG images"`
Right: `"Library for decoding PNG images"`
### `longDescription` {#var-meta-longDescription}
@ -67,6 +81,12 @@ The license, or licenses, for the package. One from the attribute set defined in
For details, see [Licenses](#sec-meta-license).
### `sourceProvenance` {#var-meta-sourceProvenance}
A list containing the type or types of source inputs from which the package is built, e.g. original source code, pre-built binaries, etc.
For details, see [Source provenance](#sec-meta-sourceProvenance).
### `maintainers` {#var-meta-maintainers}
A list of the maintainers of this Nix expression. Maintainers are defined in [`nixpkgs/maintainers/maintainer-list.nix`](https://github.com/NixOS/nixpkgs/blob/master/maintainers/maintainer-list.nix). There is no restriction to becoming a maintainer, just add yourself to that list in a separate commit titled “maintainers: add alice” in the same pull request, and reference maintainers with `maintainers = with lib.maintainers; [ alice bob ]`.

View file

@ -75,40 +75,17 @@ The Nixpkgs systems for continuous integration [Hydra](https://hydra.nixos.org/)
#### Package tests {#var-passthru-tests-packages}
[]{#var-meta-tests-packages} <!-- legacy anchor -->
Tests that are part of the source package, if they run quickly, are typically executed in the [`installCheckPhase`](#var-stdenv-phases).
This phase is also suitable for performing a `--version` test for packages that support such flag.
Most programs distributed by Nixpkgs support such a `--version` flag, and successfully calling the program with that flag indicates that the package at least got compiled properly.
Besides tests provided by upstream, that you run in the [`checkPhase`](#ssec-check-phase), you may want to define tests derivations in the `passthru.tests` attribute, which won't change the build. `passthru.tests` have several advantages over running tests during any of the [standard phases](#sec-stdenv-phases):
:::{.example #ex-checking-build-installCheckPhase}
- They access the package as consumers would, independently from the environment in which it was built
- They can be run and debugged without rebuilding the package, which is useful if that takes a long time
- They don't add overhead to each build, as opposed checks added to the [`distPhase`](#ssec-distribution-phase), such as [`versionCheckHook`](#versioncheckhook).
## Checking builds with `installCheckPhase`
It is also possible to use `passthru.tests` to test the version with [`testVersion`](#tester-testVersion), but since that is pretty trivial and recommended thing to do, we recommend using [`versionCheckHook`](#versioncheckhook) for that, which has the following advantages over `passthru.tests`:
When building `git`, a rudimentary test for successful compilation would be running `git --version`:
```nix
stdenv.mkDerivation (finalAttrs: {
pname = "git";
version = "1.2.3";
# ...
doInstallCheck = true;
installCheckPhase = ''
runHook preInstallCheck
echo checking if 'git --version' mentions ${finalAttrs.version}
$out/bin/git --version | grep ${finalAttrs.version}
runHook postInstallCheck
'';
# ...
})
```
:::
However, tests that are non-trivial will better fit into `passthru.tests` because they:
- Access the package as consumers would, independently from the environment in which it was built
- Can be run and debugged without rebuilding the package, which is useful if that takes a long time
- Don't add overhad to each build, as opposed to `installCheckPhase`
It is also possible to use `passthru.tests` to test the version with [`testVersion`](#tester-testVersion).
- If the `versionCheckPhase` (the phase defined by [`versionCheckHook`](#versioncheckhook)) fails, it triggers a failure which can't be ignored if you use the package, or if you find out about it in a [`nixpkgs-review`](https://github.com/Mic92/nixpkgs-review) report.
- Sometimes packages become silently broken - meaning they fail to launch but their build passes because they don't perform any tests in the `checkPhase`. If you use this tool infrequently, such a silent breakage may rot in your system / profile configuration, and you will not notice the failure until you will want to use this package. Testing such basic functionality ensures you have to deal with the failure when you update your system / profile.
- When you open a PR, [ofborg](https://github.com/NixOS/ofborg)'s CI _will_ run `passthru.tests` of [packages that are directly changed by your PR (according to your commits' messages)](https://github.com/NixOS/ofborg?tab=readme-ov-file#automatic-building), but if you'd want to use the [`@ofborg build`](https://github.com/NixOS/ofborg?tab=readme-ov-file#build) command for dependent packages, you won't have to specify in addition the `.tests` attribute of the packages you want to build, and no body will be able to avoid these tests.
<!-- NOTE(@fricklerhandwerk): one may argue whether that testing guide should rather be in the user's manual -->
For more on how to write and run package tests for Nixpkgs, see the [testing section in the package contributor guide](https://github.com/NixOS/nixpkgs/blob/master/pkgs/README.md#package-tests).

View file

@ -762,6 +762,8 @@ Before and after running `make`, the hooks `preBuild` and `postBuild` are called
The check phase checks whether the package was built correctly by running its test suite. The default `checkPhase` calls `make $checkTarget`, but only if the [`doCheck` variable](#var-stdenv-doCheck) is enabled.
It is highly recommended, for packages' sources that are not distributed with any tests, to at least use [`versionCheckHook`](#versioncheckhook) to test that the resulting executable is basically functional.
#### Variables controlling the check phase {#variables-controlling-the-check-phase}
##### `doCheck` {#var-stdenv-doCheck}
@ -1515,29 +1517,43 @@ This flag can break dynamic shared object loading. For instance, the module syst
intel_drv.so: undefined symbol: vgaHWFreeHWRec
```
#### `zerocallusedregs` {#zerocallusedregs}
Adds the `-fzero-call-used-regs=used-gpr` compiler option. This causes the general-purpose registers that an architecture's calling convention considers "call-used" to be zeroed on return from the function. This can make it harder for attackers to construct useful ROP gadgets and also reduces the chance of data leakage from a function call.
### Hardening flags disabled by default {#sec-hardening-flags-disabled-by-default}
The following flags are disabled by default and should be enabled with `hardeningEnable` for packages that take untrusted input like network services.
#### `pie` {#pie}
This flag is disabled by default for normal `glibc` based NixOS package builds, but enabled by default for `musl` based package builds.
This flag is disabled by default for normal `glibc` based NixOS package builds, but enabled by default for
- `musl`-based package builds, except on Aarch64 and Aarch32, where there are issues.
- Statically-linked for OpenBSD builds, where it appears to be required to get a working binary.
Adds the `-fPIE` compiler and `-pie` linker options. Position Independent Executables are needed to take advantage of Address Space Layout Randomization, supported by modern kernel versions. While ASLR can already be enforced for data areas in the stack and heap (brk and mmap), the code areas must be compiled as position-independent. Shared libraries already do this with the `pic` flag, so they gain ASLR automatically, but binary .text regions need to be build with `pie` to gain ASLR. When this happens, ROP attacks are much harder since there are no static locations to bounce off of during a memory corruption attack.
Static libraries need to be compiled with `-fPIE` so that executables can link them in with the `-pie` linker option.
If the libraries lack `-fPIE`, you will get the error `recompile with -fPIE`.
#### `zerocallusedregs` {#zerocallusedregs}
Adds the `-fzero-call-used-regs=used-gpr` compiler option. This causes the general-purpose registers that an architecture's calling convention considers "call-used" to be zeroed on return from the function. This can make it harder for attackers to construct useful ROP gadgets and also reduces the chance of data leakage from a function call.
#### `trivialautovarinit` {#trivialautovarinit}
Adds the `-ftrivial-auto-var-init=pattern` compiler option. This causes "trivially-initializable" uninitialized stack variables to be forcibly initialized with a nonzero value that is likely to cause a crash (and therefore be noticed). Uninitialized variables generally take on their values based on fragments of previous program state, and attackers can carefully manipulate that state to craft malicious initial values for these variables.
Use of this flag is controversial as it can prevent tools that detect uninitialized variable use (such as valgrind) from operating correctly.
This should be turned off or fixed for build errors such as:
```
sorry, unimplemented: __builtin_clear_padding not supported for variable length aggregates
```
#### `stackclashprotection` {#stackclashprotection}
This flag adds the `-fstack-clash-protection` compiler option, which causes growth of a program's stack to access each successive page in order. This should force the guard page to be accessed and cause an attempt to "jump over" this guard page to crash.
[^footnote-stdenv-ignored-build-platform]: The build platform is ignored because it is a mere implementation detail of the package satisfying the dependency: As a general programming principle, dependencies are always *specified* as interfaces, not concrete implementation.
[^footnote-stdenv-native-dependencies-in-path]: Currently, this means for native builds all dependencies are put on the `PATH`. But in the future that may not be the case for sake of matching cross: the platforms would be assumed to be unique for native and cross builds alike, so only the `depsBuild*` and `nativeBuildInputs` would be added to the `PATH`.
[^footnote-stdenv-propagated-dependencies]: Nix itself already takes a packages transitive dependencies into account, but this propagation ensures nixpkgs-specific infrastructure like [setup hooks](#ssec-setup-hooks) also are run as if it were a propagated dependency.

View file

@ -0,0 +1,32 @@
# To build this derivation, run `nix-build -A nixpkgs-manual.tests.manpage-urls`
{
lib,
runCommand,
invalidateFetcherByDrvHash,
cacert,
python3,
}:
invalidateFetcherByDrvHash (
{
name ? "manual_check-manpage-urls",
script ? ./manpage-urls.py,
urlsFile ? ../manpage-urls.json,
}:
runCommand name
{
nativeBuildInputs = [
cacert
(python3.withPackages (p: [
p.aiohttp
p.rich
p.structlog
]))
];
outputHash = "sha256-47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU="; # Empty output
}
''
python3 ${script} ${urlsFile}
touch $out
''
) { }

View file

@ -1764,6 +1764,7 @@ rec {
/**
Get a package output.
If no output is found, fallback to `.out` and then to the default.
The function is idempotent: `getOutput "b" (getOutput "a" p) == getOutput "a" p`.
# Inputs
@ -1779,7 +1780,7 @@ rec {
# Type
```
getOutput :: String -> Derivation -> String
getOutput :: String -> :: Derivation -> Derivation
```
# Examples
@ -1787,7 +1788,7 @@ rec {
## `lib.attrsets.getOutput` usage example
```nix
getOutput "dev" pkgs.openssl
"${getOutput "dev" pkgs.openssl}"
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev"
```
@ -1798,6 +1799,49 @@ rec {
then pkg.${output} or pkg.out or pkg
else pkg;
/**
Get the first of the `outputs` provided by the package, or the default.
This function is alligned with `_overrideFirst()` from the `multiple-outputs.sh` setup hook.
Like `getOutput`, the function is idempotent.
# Inputs
`outputs`
: 1\. Function argument
`pkg`
: 2\. Function argument
# Type
```
getFirstOutput :: [String] -> Derivation -> Derivation
```
# Examples
:::{.example}
## `lib.attrsets.getFirstOutput` usage example
```nix
"${getFirstOutput [ "include" "dev" ] pkgs.openssl}"
=> "/nix/store/00000000000000000000000000000000-openssl-1.0.1r-dev"
```
:::
*/
getFirstOutput =
candidates: pkg:
let
outputs = builtins.filter (name: hasAttr name pkg) candidates;
output = builtins.head outputs;
in
if pkg.outputSpecified or false || outputs == [ ] then
pkg
else
pkg.${output};
/**
Get a package's `bin` output.
If the output does not exist, fallback to `.out` and then to the default.
@ -1811,7 +1855,7 @@ rec {
# Type
```
getBin :: Derivation -> String
getBin :: Derivation -> Derivation
```
# Examples
@ -1819,8 +1863,8 @@ rec {
## `lib.attrsets.getBin` usage example
```nix
getBin pkgs.openssl
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r"
"${getBin pkgs.openssl}"
=> "/nix/store/00000000000000000000000000000000-openssl-1.0.1r"
```
:::
@ -1841,7 +1885,7 @@ rec {
# Type
```
getLib :: Derivation -> String
getLib :: Derivation -> Derivation
```
# Examples
@ -1849,7 +1893,7 @@ rec {
## `lib.attrsets.getLib` usage example
```nix
getLib pkgs.openssl
"${getLib pkgs.openssl}"
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-lib"
```
@ -1857,6 +1901,35 @@ rec {
*/
getLib = getOutput "lib";
/**
Get a package's `static` output.
If the output does not exist, fallback to `.lib`, then to `.out`, and then to the default.
# Inputs
`pkg`
: The package whose `static` output will be retrieved.
# Type
```
getStatic :: Derivation -> Derivation
```
# Examples
:::{.example}
## `lib.attrsets.getStatic` usage example
```nix
"${lib.getStatic pkgs.glibc}"
=> "/nix/store/00000000000000000000000000000000-glibc-2.39-52-static"
```
:::
*/
getStatic = getFirstOutput [ "static" "lib" "out" ];
/**
Get a package's `dev` output.
@ -1871,7 +1944,7 @@ rec {
# Type
```
getDev :: Derivation -> String
getDev :: Derivation -> Derivation
```
# Examples
@ -1879,7 +1952,7 @@ rec {
## `lib.attrsets.getDev` usage example
```nix
getDev pkgs.openssl
"${getDev pkgs.openssl}"
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev"
```
@ -1887,6 +1960,35 @@ rec {
*/
getDev = getOutput "dev";
/**
Get a package's `include` output.
If the output does not exist, fallback to `.dev`, then to `.out`, and then to the default.
# Inputs
`pkg`
: The package whose `include` output will be retrieved.
# Type
```
getInclude :: Derivation -> Derivation
```
# Examples
:::{.example}
## `lib.attrsets.getInclude` usage example
```nix
"${getInclude pkgs.openssl}"
=> "/nix/store/00000000000000000000000000000000-openssl-1.0.1r-dev"
```
:::
*/
getInclude = getFirstOutput [ "include" "dev" "out" ];
/**
Get a package's `man` output.
@ -1901,7 +2003,7 @@ rec {
# Type
```
getMan :: Derivation -> String
getMan :: Derivation -> Derivation
```
# Examples
@ -1909,7 +2011,7 @@ rec {
## `lib.attrsets.getMan` usage example
```nix
getMan pkgs.openssl
"${getMan pkgs.openssl}"
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-man"
```
@ -1929,7 +2031,7 @@ rec {
# Type
```
chooseDevOutputs :: [Derivation] -> [String]
chooseDevOutputs :: [Derivation] -> [Derivation]
```
*/
chooseDevOutputs = builtins.map getDev;

View file

@ -64,16 +64,19 @@ let
# linux kernel configuration
kernel = callLibs ./kernel.nix;
# network
network = callLibs ./network;
# TODO: For consistency, all builtins should also be available from a sub-library;
# these are the only ones that are currently not
inherit (builtins) addErrorContext isPath trace;
inherit (builtins) addErrorContext isPath trace typeOf unsafeGetAttrPos;
inherit (self.trivial) id const pipe concat or and xor bitAnd bitOr bitXor
bitNot boolToString mergeAttrs flip mapNullable inNixShell isFloat min max
importJSON importTOML warn warnIf warnIfNot throwIf throwIfNot checkListOfEnum
info showWarnings nixpkgsVersion version isInOldestRelease
mod compare splitByAndCompare seq deepSeq lessThan add sub
functionArgs setFunctionArgs isFunction toFunction mirrorFunctionArgs
toHexString toBaseDigits inPureEvalMode isBool isInt pathExists
fromHexString toHexString toBaseDigits inPureEvalMode isBool isInt pathExists
genericClosure readFile;
inherit (self.fixedPoints) fix fix' converge extends composeExtensions
composeManyExtensions makeExtensible makeExtensibleWithCustomName;
@ -83,8 +86,8 @@ let
mapAttrs' mapAttrsToList attrsToList concatMapAttrs mapAttrsRecursive
mapAttrsRecursiveCond genAttrs isDerivation toDerivation optionalAttrs
zipAttrsWithNames zipAttrsWith zipAttrs recursiveUpdateUntil
recursiveUpdate matchAttrs mergeAttrsList overrideExisting showAttrPath getOutput
getBin getLib getDev getMan chooseDevOutputs zipWithNames zip
recursiveUpdate matchAttrs mergeAttrsList overrideExisting showAttrPath getOutput getFirstOutput
getBin getLib getStatic getDev getInclude getMan chooseDevOutputs zipWithNames zip
recurseIntoAttrs dontRecurseIntoAttrs cartesianProduct cartesianProductOfSets
mapCartesianProduct updateManyAttrsByPath listToAttrs hasAttr getAttr isAttrs intersectAttrs removeAttrs;
inherit (self.lists) singleton forEach map foldr fold foldl foldl' imap0 imap1
@ -102,7 +105,7 @@ let
hasInfix hasPrefix hasSuffix stringToCharacters stringAsChars escape
escapeShellArg escapeShellArgs
isStorePath isStringLike
isValidPosixName toShellVar toShellVars
isValidPosixName toShellVar toShellVars trim trimWith
escapeRegex escapeURL escapeXML replaceChars lowerChars
upperChars toLower toUpper addContextFrom splitString
removePrefix removeSuffix versionOlder versionAtLeast
@ -120,7 +123,7 @@ let
inherit (self.derivations) lazyDerivation optionalDrvAttr;
inherit (self.meta) addMetaAttrs dontDistribute setName updateName
appendToName mapDerivationAttrset setPrio lowPrio lowPrioSet hiPrio
hiPrioSet getLicenseFromSpdxId getExe getExe';
hiPrioSet getLicenseFromSpdxId getLicenseFromSpdxIdOr getExe getExe';
inherit (self.filesystem) pathType pathIsDirectory pathIsRegularFile
packagesFromDirectoryRecursive;
inherit (self.sources) cleanSourceFilter

View file

@ -29,9 +29,10 @@ let
nameValuePair
tail
toList
warn
;
inherit (lib.attrsets) removeAttrs;
inherit (lib.attrsets) removeAttrs mapAttrsToList;
# returns default if env var is not set
maybeEnv = name: default:
@ -212,7 +213,7 @@ let
else closePropagationSlow;
# calls a function (f attr value ) for each record item. returns a list
mapAttrsFlatten = f: r: map (attr: f attr r.${attr}) (attrNames r);
mapAttrsFlatten = warn "lib.misc.mapAttrsFlatten is deprecated, please use lib.attrsets.mapAttrsToList instead." mapAttrsToList;
# attribute set containing one attribute
nvs = name: value: listToAttrs [ (nameValuePair name value) ];

View file

@ -236,7 +236,7 @@ File sets cannot add single files to the store, they can only import files under
Arguments:
- (+) There's no point in using this library for a single file, since you can't do anything other than add it to the store or not.
And it would be unclear how the library should behave if the one file wouldn't be added to the store:
`toSource { root = ./file.nix; fileset = <empty>; }` has no reasonable result because returing an empty store path wouldn't match the file type, and there's no way to have an empty file store path, whatever that would mean.
`toSource { root = ./file.nix; fileset = <empty>; }` has no reasonable result because returning an empty store path wouldn't match the file type, and there's no way to have an empty file store path, whatever that would mean.
### `fileFilter` takes a path

View file

@ -1,25 +1,29 @@
{ lib }:
let
inherit (lib) optionalAttrs;
lib.mapAttrs (lname: lset: let
defaultLicense = {
shortName = lname;
free = true; # Most of our licenses are Free, explicitly declare unfree additions as such!
deprecated = false;
mkLicense = lname: {
shortName ? lname,
# Most of our licenses are Free, explicitly declare unfree additions as such!
free ? true,
deprecated ? false,
spdxId ? null,
url ? null,
fullName ? null,
redistributable ? free
}@attrs: {
inherit shortName free deprecated redistributable;
} // optionalAttrs (attrs ? spdxId) {
inherit spdxId;
url = "https://spdx.org/licenses/${spdxId}.html";
} // optionalAttrs (attrs ? url) {
inherit url;
} // optionalAttrs (attrs ? fullName) {
inherit fullName;
};
mkLicense = licenseDeclaration: let
applyDefaults = license: defaultLicense // license;
applySpdx = license:
if license ? spdxId
then license // { url = "https://spdx.org/licenses/${license.spdxId}.html"; }
else license;
applyRedistributable = license: { redistributable = license.free; } // license;
in lib.pipe licenseDeclaration [
applyDefaults
applySpdx
applyRedistributable
];
in mkLicense lset) ({
in
lib.mapAttrs mkLicense ({
/* License identifiers from spdx.org where possible.
* If you cannot find your license here, then look for a similar license or
* add it to this list. The URL mentioned above is a good source for inspiration.
@ -317,6 +321,12 @@ in mkLicense lset) ({
free = false;
};
cc-by-nd-40 = {
spdxId = "CC-BY-ND-4.0";
fullName = "Creative Commons Attribution-No Derivative Works v4.0";
free = false;
};
cc-by-sa-10 = {
spdxId = "CC-BY-SA-1.0";
fullName = "Creative Commons Attribution Share Alike 1.0";
@ -917,7 +927,7 @@ in mkLicense lset) ({
ncbiPd = {
spdxId = "NCBI-PD";
fullname = "NCBI Public Domain Notice";
fullName = "NCBI Public Domain Notice";
# Due to United States copyright law, anything with this "license" does not have a copyright in the
# jurisdiction of the United States. However, other jurisdictions may assign the United States
# government copyright to the work, and the license explicitly states that in such a case, no license
@ -1161,7 +1171,7 @@ in mkLicense lset) ({
shortName = "TSL";
fullName = "Timescale License Agreegment";
url = "https://github.com/timescale/timescaledb/blob/main/tsl/LICENSE-TIMESCALE";
unfree = true;
free = false;
};
tcltk = {
@ -1297,7 +1307,7 @@ in mkLicense lset) ({
zsh = {
url = "https://github.com/zsh-users/zsh/blob/master/LICENCE";
fulllName = "Zsh License";
fullName = "Zsh License";
};
zpl20 = {

View file

@ -287,10 +287,10 @@ rec {
all (elem: !platformMatch platform elem) (pkg.meta.badPlatforms or []);
/**
Get the corresponding attribute in lib.licenses
from the SPDX ID.
For SPDX IDs, see
https://spdx.org/licenses
Get the corresponding attribute in lib.licenses from the SPDX ID
or warn and fallback to `{ shortName = <license string>; }`.
For SPDX IDs, see https://spdx.org/licenses
# Type
@ -315,15 +315,57 @@ rec {
:::
*/
getLicenseFromSpdxId =
let
spdxLicenses = lib.mapAttrs (id: ls: assert lib.length ls == 1; builtins.head ls)
(lib.groupBy (l: lib.toLower l.spdxId) (lib.filter (l: l ? spdxId) (lib.attrValues lib.licenses)));
in licstr:
spdxLicenses.${ lib.toLower licstr } or (
licstr:
getLicenseFromSpdxIdOr licstr (
lib.warn "getLicenseFromSpdxId: No license matches the given SPDX ID: ${licstr}"
{ shortName = licstr; }
);
/**
Get the corresponding attribute in lib.licenses from the SPDX ID
or fallback to the given default value.
For SPDX IDs, see https://spdx.org/licenses
# Inputs
`licstr`
: 1\. SPDX ID string to find a matching license
`default`
: 2\. Fallback value when a match is not found
# Type
```
getLicenseFromSpdxIdOr :: str -> Any -> Any
```
# Examples
:::{.example}
## `lib.meta.getLicenseFromSpdxIdOr` usage example
```nix
lib.getLicenseFromSpdxIdOr "MIT" null == lib.licenses.mit
=> true
lib.getLicenseFromSpdxId "mIt" null == lib.licenses.mit
=> true
lib.getLicenseFromSpdxIdOr "MY LICENSE" lib.licenses.free == lib.licenses.free
=> true
lib.getLicenseFromSpdxIdOr "MY LICENSE" null
=> null
lib.getLicenseFromSpdxIdOr "MY LICENSE" (builtins.throw "No SPDX ID matches MY LICENSE")
=> error: No SPDX ID matches MY LICENSE
```
:::
*/
getLicenseFromSpdxIdOr =
let
spdxLicenses = lib.mapAttrs (id: ls: assert lib.length ls == 1; builtins.head ls)
(lib.groupBy (l: lib.toLower l.spdxId) (lib.filter (l: l ? spdxId) (lib.attrValues lib.licenses)));
in licstr: default:
spdxLicenses.${ lib.toLower licstr } or default;
/**
Get the path to the main program of a package based on meta.mainProgram

View file

@ -2,6 +2,7 @@
let
inherit (lib)
addErrorContext
all
any
attrByPath
@ -13,13 +14,16 @@ let
elem
filter
foldl'
functionArgs
getAttrFromPath
genericClosure
head
id
imap1
isAttrs
isBool
isFunction
isInOldestRelease
isList
isString
length
@ -32,9 +36,17 @@ let
optionalString
recursiveUpdate
reverseList sort
seq
setAttrByPath
substring
throwIfNot
trace
typeOf
types
unsafeGetAttrPos
warn
warnIf
zipAttrs
zipAttrsWith
;
inherit (lib.options)
@ -89,8 +101,8 @@ let
}:
let
withWarnings = x:
lib.warnIf (evalModulesArgs?args) "The args argument to evalModules is deprecated. Please set config._module.args instead."
lib.warnIf (evalModulesArgs?check) "The check argument to evalModules is deprecated. Please set config._module.check instead."
warnIf (evalModulesArgs?args) "The args argument to evalModules is deprecated. Please set config._module.args instead."
warnIf (evalModulesArgs?check) "The check argument to evalModules is deprecated. Please set config._module.check instead."
x;
legacyModules =
@ -265,9 +277,9 @@ let
let
optText = showOption (prefix ++ firstDef.prefix);
defText =
builtins.addErrorContext
addErrorContext
"while evaluating the error message for definitions for `${optText}', which is an option that does not exist"
(builtins.addErrorContext
(addErrorContext
"while evaluating a definition from `${firstDef.file}'"
( showDefs [ firstDef ])
);
@ -298,7 +310,7 @@ let
else throw baseMsg
else null;
checked = builtins.seq checkUnmatched;
checked = seq checkUnmatched;
extendModules = extendArgs@{
modules ? [],
@ -312,7 +324,7 @@ let
prefix = extendArgs.prefix or evalModulesArgs.prefix or [];
});
type = lib.types.submoduleWith {
type = types.submoduleWith {
inherit modules specialArgs class;
};
@ -344,8 +356,8 @@ let
else
throw (
"Could not load a value as a module, because it is of type ${lib.strings.escapeNixString m._type}"
+ lib.optionalString (fallbackFile != unknownModule) ", in file ${toString fallbackFile}."
+ lib.optionalString (m._type == "configuration") " If you do intend to import this configuration, please only import the modules that make up the configuration. You may have to create a `let` binding, file or attribute to give yourself access to the relevant modules.\nWhile loading a configuration into the module system is a very sensible idea, it can not be done cleanly in practice."
+ optionalString (fallbackFile != unknownModule) ", in file ${toString fallbackFile}."
+ optionalString (m._type == "configuration") " If you do intend to import this configuration, please only import the modules that make up the configuration. You may have to create a `let` binding, file or attribute to give yourself access to the relevant modules.\nWhile loading a configuration into the module system is a very sensible idea, it can not be done cleanly in practice."
# Extended explanation: That's because a finalized configuration is more than just a set of modules. For instance, it has its own `specialArgs` that, by the nature of `specialArgs` can't be loaded through `imports` or the the `modules` argument. So instead, we have to ask you to extract the relevant modules and use those instead. This way, we keep the module system comparatively simple, and hopefully avoid a bad surprise down the line.
)
else if isList m then
@ -415,7 +427,7 @@ let
moduleKey = file: m:
if isString m
then
if builtins.substring 0 1 m == "/"
if substring 0 1 m == "/"
then m
else toString modulesPath + "/" + m
@ -433,11 +445,11 @@ let
else if isAttrs m
then throw "Module `${file}` contains a disabledModules item that is an attribute set, presumably a module, that does not have a `key` attribute. This means that the module system doesn't have any means to identify the module that should be disabled. Make sure that you've put the correct value in disabledModules: a string path relative to modulesPath, a path value, or an attribute set with a `key` attribute."
else throw "Each disabledModules item must be a path, string, or a attribute set with a key attribute, or a value supported by toString. However, one of the disabledModules items in `${toString file}` is none of that, but is of type ${builtins.typeOf m}.";
else throw "Each disabledModules item must be a path, string, or a attribute set with a key attribute, or a value supported by toString. However, one of the disabledModules items in `${toString file}` is none of that, but is of type ${typeOf m}.";
disabledKeys = concatMap ({ file, disabled }: map (moduleKey file) disabled) disabled;
keyFilter = filter (attrs: ! elem attrs.key disabledKeys);
in map (attrs: attrs.module) (builtins.genericClosure {
in map (attrs: attrs.module) (genericClosure {
startSet = keyFilter modules;
operator = attrs: keyFilter attrs.modules;
});
@ -475,7 +487,7 @@ let
}
else
# shorthand syntax
lib.throwIfNot (isAttrs m) "module ${file} (${key}) does not look like a module."
throwIfNot (isAttrs m) "module ${file} (${key}) does not look like a module."
{ _file = toString m._file or file;
_class = m._class or null;
key = toString m.key or key;
@ -485,10 +497,10 @@ let
config = addFreeformType (removeAttrs m ["_class" "_file" "key" "disabledModules" "require" "imports" "freeformType"]);
};
applyModuleArgsIfFunction = key: f: args@{ config, options, lib, ... }:
applyModuleArgsIfFunction = key: f: args@{ config, ... }:
if isFunction f then applyModuleArgs key f args else f;
applyModuleArgs = key: f: args@{ config, options, lib, ... }:
applyModuleArgs = key: f: args@{ config, ... }:
let
# Module arguments are resolved in a strict manner when attribute set
# deconstruction is used. As the arguments are now defined with the
@ -503,10 +515,10 @@ let
# not their values. The values are forwarding the result of the
# evaluation of the option.
context = name: ''while evaluating the module argument `${name}' in "${key}":'';
extraArgs = builtins.mapAttrs (name: _:
builtins.addErrorContext (context name)
extraArgs = mapAttrs (name: _:
addErrorContext (context name)
(args.${name} or config._module.args.${name})
) (lib.functionArgs f);
) (functionArgs f);
# Note: we append in the opposite order such that we can add an error
# context on the explicit arguments of "args" too. This update
@ -547,16 +559,16 @@ let
(n: concatLists)
(map
(module: let subtree = module.options; in
if !(builtins.isAttrs subtree) then
if !(isAttrs subtree) then
throw ''
An option declaration for `${builtins.concatStringsSep "." prefix}' has type
`${builtins.typeOf subtree}' rather than an attribute set.
An option declaration for `${concatStringsSep "." prefix}' has type
`${typeOf subtree}' rather than an attribute set.
Did you mean to define this outside of `options'?
''
else
mapAttrs
(n: option:
[{ inherit (module) _file; pos = builtins.unsafeGetAttrPos n subtree; options = option; }]
[{ inherit (module) _file; pos = unsafeGetAttrPos n subtree; options = option; }]
)
subtree
)
@ -565,17 +577,17 @@ let
# The root of any module definition must be an attrset.
checkedConfigs =
assert
lib.all
all
(c:
# TODO: I have my doubts that this error would occur when option definitions are not matched.
# The implementation of this check used to be tied to a superficially similar check for
# options, so maybe that's why this is here.
isAttrs c.config || throw ''
In module `${c.file}', you're trying to define a value of type `${builtins.typeOf c.config}'
In module `${c.file}', you're trying to define a value of type `${typeOf c.config}'
rather than an attribute set for the option
`${builtins.concatStringsSep "." prefix}'!
`${concatStringsSep "." prefix}'!
This usually happens if `${builtins.concatStringsSep "." prefix}' has option
This usually happens if `${concatStringsSep "." prefix}' has option
definitions inside that are not matched. Please check how to properly define
this option by e.g. referring to `man 5 configuration.nix'!
''
@ -667,7 +679,7 @@ let
let
nonOptions = filter (m: !isOption m.options) decls;
in
throw "The option `${showOption loc}' in module `${(lib.head optionDecls)._file}' would be a parent of the following options, but its type `${(lib.head optionDecls).options.type.description or "<no description>"}' does not support nested options.\n${
throw "The option `${showOption loc}' in module `${(head optionDecls)._file}' would be a parent of the following options, but its type `${(head optionDecls).options.type.description or "<no description>"}' does not support nested options.\n${
showRawDecls loc nonOptions
}"
else
@ -806,7 +818,7 @@ let
"The type `types.${opt.type.name}' of option `${showOption loc}' defined in ${showFiles opt.declarations} is deprecated. ${opt.type.deprecationMessage}";
in warnDeprecation opt //
{ value = builtins.addErrorContext "while evaluating the option `${showOption loc}':" value;
{ value = addErrorContext "while evaluating the option `${showOption loc}':" value;
inherit (res.defsFinal') highestPrio;
definitions = map (def: def.value) res.defsFinal;
files = map (def: def.file) res.defsFinal;
@ -822,7 +834,7 @@ let
let
# Process mkMerge and mkIf properties.
defs' = concatMap (m:
map (value: { inherit (m) file; inherit value; }) (builtins.addErrorContext "while evaluating definitions from `${m.file}':" (dischargeProperties m.value))
map (value: { inherit (m) file; inherit value; }) (addErrorContext "while evaluating definitions from `${m.file}':" (dischargeProperties m.value))
) defs;
# Process mkOverride properties.
@ -972,12 +984,12 @@ let
mergeAttrDefinitionsWithPrio = opt:
let
defsByAttr =
lib.zipAttrs (
lib.concatLists (
lib.concatMap
zipAttrs (
concatLists (
concatMap
({ value, ... }@def:
map
(lib.mapAttrsToList (k: value: { ${k} = def // { inherit value; }; }))
(mapAttrsToList (k: value: { ${k} = def // { inherit value; }; }))
(pushDownProperties value)
)
opt.definitionsWithLocations
@ -985,9 +997,9 @@ let
);
in
assert opt.type.name == "attrsOf" || opt.type.name == "lazyAttrsOf";
lib.mapAttrs
mapAttrs
(k: v:
let merging = lib.mergeDefinitions (opt.loc ++ [k]) opt.type.nestedTypes.elemType v;
let merging = mergeDefinitions (opt.loc ++ [k]) opt.type.nestedTypes.elemType v;
in {
value = merging.mergedValue;
inherit (merging.defsFinal') highestPrio;
@ -1023,9 +1035,9 @@ let
mkForce = mkOverride 50;
mkVMOverride = mkOverride 10; # used by nixos-rebuild build-vm
defaultPriority = lib.warnIf (lib.isInOldestRelease 2305) "lib.modules.defaultPriority is deprecated, please use lib.modules.defaultOverridePriority instead." defaultOverridePriority;
defaultPriority = warnIf (isInOldestRelease 2305) "lib.modules.defaultPriority is deprecated, please use lib.modules.defaultOverridePriority instead." defaultOverridePriority;
mkFixStrictness = lib.warn "lib.mkFixStrictness has no effect and will be removed. It returns its argument unmodified, so you can just remove any calls." id;
mkFixStrictness = warn "lib.mkFixStrictness has no effect and will be removed. It returns its argument unmodified, so you can just remove any calls." id;
mkOrder = priority: content:
{ _type = "order";
@ -1121,7 +1133,7 @@ let
inherit from to;
visible = false;
warn = true;
use = builtins.trace "Obsolete option `${showOption from}' is used. It was renamed to `${showOption to}'.";
use = trace "Obsolete option `${showOption from}' is used. It was renamed to `${showOption to}'.";
};
mkRenamedOptionModuleWith = {
@ -1139,8 +1151,8 @@ let
}: doRename {
inherit from to;
visible = false;
warn = lib.isInOldestRelease sinceRelease;
use = lib.warnIf (lib.isInOldestRelease sinceRelease)
warn = isInOldestRelease sinceRelease;
use = warnIf (isInOldestRelease sinceRelease)
"Obsolete option `${showOption from}' is used. It was renamed to `${showOption to}'.";
};
@ -1372,8 +1384,8 @@ let
config = lib.importTOML file;
};
private = lib.mapAttrs
(k: lib.warn "External use of `lib.modules.${k}` is deprecated. If your use case isn't covered by non-deprecated functions, we'd like to know more and perhaps support your use case well, instead of providing access to these low level functions. In this case please open an issue in https://github.com/nixos/nixpkgs/issues/.")
private = mapAttrs
(k: warn "External use of `lib.modules.${k}` is deprecated. If your use case isn't covered by non-deprecated functions, we'd like to know more and perhaps support your use case well, instead of providing access to these low level functions. In this case please open an issue in https://github.com/nixos/nixpkgs/issues/.")
{
inherit
applyModuleArgsIfFunction

View file

@ -0,0 +1,49 @@
{ lib }:
let
inherit (import ./internal.nix { inherit lib; }) _ipv6;
in
{
ipv6 = {
/**
Creates an `IPv6Address` object from an IPv6 address as a string. If
the prefix length is omitted, it defaults to 64. The parser is limited
to the first two versions of IPv6 addresses addressed in RFC 4291.
The form "x:x:x:x:x:x:d.d.d.d" is not yet implemented. Addresses are
NOT compressed, so they are not always the same as the canonical text
representation of IPv6 addresses defined in RFC 5952.
# Type
```
fromString :: String -> IPv6Address
```
# Examples
```nix
fromString "2001:DB8::ffff/32"
=> {
address = "2001:db8:0:0:0:0:0:ffff";
prefixLength = 32;
}
```
# Arguments
- [addr] An IPv6 address with optional prefix length.
*/
fromString =
addr:
let
splittedAddr = _ipv6.split addr;
addrInternal = splittedAddr.address;
prefixLength = splittedAddr.prefixLength;
address = _ipv6.toStringFromExpandedIp addrInternal;
in
{
inherit address prefixLength;
};
};
}

View file

@ -0,0 +1,209 @@
{
lib ? import ../.,
}:
let
inherit (builtins)
map
match
genList
length
concatMap
head
toString
;
inherit (lib) lists strings trivial;
inherit (lib.lists) last;
/*
IPv6 addresses are 128-bit identifiers. The preferred form is 'x:x:x:x:x:x:x:x',
where the 'x's are one to four hexadecimal digits of the eight 16-bit pieces of
the address. See RFC 4291.
*/
ipv6Bits = 128;
ipv6Pieces = 8; # 'x:x:x:x:x:x:x:x'
ipv6PieceBits = 16; # One piece in range from 0 to 0xffff.
ipv6PieceMaxValue = 65535; # 2^16 - 1
in
let
/**
Expand an IPv6 address by removing the "::" compression and padding them
with the necessary number of zeros. Converts an address from the string to
the list of strings which then can be parsed using `_parseExpanded`.
Throws an error when the address is malformed.
# Type: String -> [ String ]
# Example:
```nix
expandIpv6 "2001:DB8::ffff"
=> ["2001" "DB8" "0" "0" "0" "0" "0" "ffff"]
```
*/
expandIpv6 =
addr:
if match "^[0-9A-Fa-f:]+$" addr == null then
throw "${addr} contains malformed characters for IPv6 address"
else
let
pieces = strings.splitString ":" addr;
piecesNoEmpty = lists.remove "" pieces;
piecesNoEmptyLen = length piecesNoEmpty;
zeros = genList (_: "0") (ipv6Pieces - piecesNoEmptyLen);
hasPrefix = strings.hasPrefix "::" addr;
hasSuffix = strings.hasSuffix "::" addr;
hasInfix = strings.hasInfix "::" addr;
in
if addr == "::" then
zeros
else if
let
emptyCount = length pieces - piecesNoEmptyLen;
emptyExpected =
# splitString produces two empty pieces when "::" in the beginning
# or in the end, and only one when in the middle of an address.
if hasPrefix || hasSuffix then
2
else if hasInfix then
1
else
0;
in
emptyCount != emptyExpected
|| (hasInfix && piecesNoEmptyLen >= ipv6Pieces) # "::" compresses at least one group of zeros.
|| (!hasInfix && piecesNoEmptyLen != ipv6Pieces)
then
throw "${addr} is not a valid IPv6 address"
# Create a list of 8 elements, filling some of them with zeros depending
# on where the "::" was found.
else if hasPrefix then
zeros ++ piecesNoEmpty
else if hasSuffix then
piecesNoEmpty ++ zeros
else if hasInfix then
concatMap (piece: if piece == "" then zeros else [ piece ]) pieces
else
pieces;
/**
Parses an expanded IPv6 address (see `expandIpv6`), converting each part
from a string to an u16 integer. Returns an internal representation of IPv6
address (list of integers) that can be easily processed by other helper
functions.
Throws an error some element is not an u16 integer.
# Type: [ String ] -> IPv6
# Example:
```nix
parseExpandedIpv6 ["2001" "DB8" "0" "0" "0" "0" "0" "ffff"]
=> [8193 3512 0 0 0 0 0 65535]
```
*/
parseExpandedIpv6 =
addr:
assert lib.assertMsg (
length addr == ipv6Pieces
) "parseExpandedIpv6: expected list of integers with ${ipv6Pieces} elements";
let
u16FromHexStr =
hex:
let
parsed = trivial.fromHexString hex;
in
if 0 <= parsed && parsed <= ipv6PieceMaxValue then
parsed
else
throw "0x${hex} is not a valid u16 integer";
in
map (piece: u16FromHexStr piece) addr;
in
let
/**
Parses an IPv6 address from a string to the internal representation (list
of integers).
# Type: String -> IPv6
# Example:
```nix
parseIpv6FromString "2001:DB8::ffff"
=> [8193 3512 0 0 0 0 0 65535]
```
*/
parseIpv6FromString = addr: parseExpandedIpv6 (expandIpv6 addr);
in
{
/*
Internally, an IPv6 address is stored as a list of 16-bit integers with 8
elements. Wherever you see `IPv6` in internal functions docs, it means that
it is a list of integers produced by one of the internal parsers, such as
`parseIpv6FromString`
*/
_ipv6 = {
/**
Converts an internal representation of an IPv6 address (i.e, a list
of integers) to a string. The returned string is not a canonical
representation as defined in RFC 5952, i.e zeros are not compressed.
# Type: IPv6 -> String
# Example:
```nix
parseIpv6FromString [8193 3512 0 0 0 0 0 65535]
=> "2001:db8:0:0:0:0:0:ffff"
```
*/
toStringFromExpandedIp =
pieces: strings.concatMapStringsSep ":" (piece: strings.toLower (trivial.toHexString piece)) pieces;
/**
Extract an address and subnet prefix length from a string. The subnet
prefix length is optional and defaults to 128. The resulting address and
prefix length are validated and converted to an internal representation
that can be used by other functions.
# Type: String -> [ {address :: IPv6, prefixLength :: Int} ]
# Example:
```nix
split "2001:DB8::ffff/32"
=> {
address = [8193 3512 0 0 0 0 0 65535];
prefixLength = 32;
}
```
*/
split =
addr:
let
splitted = strings.splitString "/" addr;
splittedLength = length splitted;
in
if splittedLength == 1 then # [ ip ]
{
address = parseIpv6FromString addr;
prefixLength = ipv6Bits;
}
else if splittedLength == 2 then # [ ip subnet ]
{
address = parseIpv6FromString (head splitted);
prefixLength =
let
n = strings.toInt (last splitted);
in
if 1 <= n && n <= ipv6Bits then
n
else
throw "${addr} IPv6 subnet should be in range [1;${toString ipv6Bits}], got ${toString n}";
}
else
throw "${addr} is not a valid IPv6 address in CIDR notation";
};
}

View file

@ -157,6 +157,69 @@ rec {
*/
replicate = n: s: concatStrings (lib.lists.replicate n s);
/*
Remove leading and trailing whitespace from a string.
Whitespace is defined as any of the following characters:
" ", "\t" "\r" "\n"
Type: trim :: string -> string
Example:
trim " hello, world! "
=> "hello, world!"
*/
trim = trimWith {
start = true;
end = true;
};
/*
Remove leading and/or trailing whitespace from a string.
To remove both leading and trailing whitespace, you can also use [`trim`](#function-library-lib.strings.trim)
Whitespace is defined as any of the following characters:
" ", "\t" "\r" "\n"
Type: trimWith :: { start ? false, end ? false } -> string -> string
Example:
trimWith { start = true; } " hello, world! "}
=> "hello, world! "
trimWith { end = true; } " hello, world! "}
=> " hello, world!"
*/
trimWith =
{
# Trim leading whitespace (`false` by default)
start ? false,
# Trim trailing whitespace (`false` by default)
end ? false,
}:
s:
let
# Define our own whitespace character class instead of using
# `[:space:]`, which is not well-defined.
chars = " \t\r\n";
# To match up until trailing whitespace, we need to capture a
# group that ends with a non-whitespace character.
regex =
if start && end then
"[${chars}]*(.*[^${chars}])[${chars}]*"
else if start then
"[${chars}]*(.*)"
else if end then
"(.*[^${chars}])[${chars}]*"
else
"(.*)";
# If the string was empty or entirely whitespace,
# then the regex may not match and `res` will be `null`.
res = match regex s;
in
optionalString (res != null) (head res);
/* Construct a Unix-style, colon-separated search path consisting of
the given `subDir` appended to each of the given paths.

View file

@ -323,6 +323,7 @@ let
os =
/**/ if rust ? platform then rust.platform.os or "none"
else if final.isDarwin then "macos"
else if final.isWasm && !final.isWasi then "unknown" # Needed for {wasm32,wasm64}-unknown-unknown.
else final.parsed.kernel.name;
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_family

View file

@ -24,6 +24,5 @@
# "armv5tel-linux" is excluded because it is not bootstrapped
"powerpc64le-linux"
"riscv64-linux"
# "x86_64-freebsd" is excluded because it is mostly broken
"x86_64-freebsd"
]

View file

@ -58,6 +58,7 @@ let
genList
getExe
getExe'
getLicenseFromSpdxIdOr
groupBy
groupBy'
hasAttrByPath
@ -102,6 +103,7 @@ let
testAllTrue
toBaseDigits
toHexString
fromHexString
toInt
toIntBase10
toShellVars
@ -286,6 +288,21 @@ runTests {
expected = "FA";
};
testFromHexStringFirstExample = {
expr = fromHexString "FF";
expected = 255;
};
testFromHexStringSecondExample = {
expr = fromHexString (builtins.hashString "sha256" "test");
expected = 9223372036854775807;
};
testFromHexStringWithPrefix = {
expr = fromHexString "0Xf";
expected = 15;
};
testToBaseDigits = {
expr = toBaseDigits 2 6;
expected = [ 1 1 0 ];
@ -352,6 +369,72 @@ runTests {
expected = "hellohellohellohellohello";
};
# Test various strings are trimmed correctly
testTrimString = {
expr =
let
testValues = f: mapAttrs (_: f) {
empty = "";
cr = "\r";
lf = "\n";
tab = "\t";
spaces = " ";
leading = " Hello, world";
trailing = "Hello, world ";
mixed = " Hello, world ";
mixed-tabs = " \t\tHello, world \t \t ";
multiline = " Hello,\n world! ";
multiline-crlf = " Hello,\r\n world! ";
};
in
{
leading = testValues (strings.trimWith { start = true; });
trailing = testValues (strings.trimWith { end = true; });
both = testValues strings.trim;
};
expected = {
leading = {
empty = "";
cr = "";
lf = "";
tab = "";
spaces = "";
leading = "Hello, world";
trailing = "Hello, world ";
mixed = "Hello, world ";
mixed-tabs = "Hello, world \t \t ";
multiline = "Hello,\n world! ";
multiline-crlf = "Hello,\r\n world! ";
};
trailing = {
empty = "";
cr = "";
lf = "";
tab = "";
spaces = "";
leading = " Hello, world";
trailing = "Hello, world";
mixed = " Hello, world";
mixed-tabs = " \t\tHello, world";
multiline = " Hello,\n world!";
multiline-crlf = " Hello,\r\n world!";
};
both = {
empty = "";
cr = "";
lf = "";
tab = "";
spaces = "";
leading = "Hello, world";
trailing = "Hello, world";
mixed = "Hello, world";
mixed-tabs = "Hello, world";
multiline = "Hello,\n world!";
multiline-crlf = "Hello,\r\n world!";
};
};
};
testSplitStringsSimple = {
expr = strings.splitString "." "a.b.c.d";
expected = [ "a" "b" "c" "d" ];
@ -2307,6 +2390,25 @@ runTests {
getExe' { type = "derivation"; } "dir/executable"
);
testGetLicenseFromSpdxIdOrExamples = {
expr = [
(getLicenseFromSpdxIdOr "MIT" null)
(getLicenseFromSpdxIdOr "mIt" null)
(getLicenseFromSpdxIdOr "MY LICENSE" lib.licenses.free)
(getLicenseFromSpdxIdOr "MY LICENSE" null)
];
expected = [
lib.licenses.mit
lib.licenses.mit
lib.licenses.free
null
];
};
testGetLicenseFromSpdxIdOrThrow = testingThrow (
getLicenseFromSpdxIdOr "MY LICENSE" (throw "No SPDX ID matches MY LICENSE")
);
testPlatformMatch = {
expr = meta.platformMatch { system = "x86_64-linux"; } "x86_64-linux";
expected = true;

View file

@ -94,6 +94,8 @@ checkConfigOutput '^true$' config.result ./module-argument-default.nix
# gvariant
checkConfigOutput '^true$' config.assertion ./gvariant.nix
checkConfigOutput '"ok"' config.result ./specialArgs-lib.nix
# https://github.com/NixOS/nixpkgs/pull/131205
# We currently throw this error already in `config`, but throwing in `config.wrong1` would be acceptable.
checkConfigError 'It seems as if you.re trying to declare an option by placing it into .config. rather than .options.' config.wrong1 ./error-mkOption-in-config.nix

View file

@ -1,4 +1,4 @@
{ lib, ... }:
{ ... }:
{
config = {

View file

@ -1,4 +1,4 @@
{ lib, custom, ... }:
{ custom, ... }:
{
config = {

View file

@ -1,4 +1,4 @@
{ lib, ... }:
{ ... }:
{
disabledModules = [ ./declare-enable.nix ];

View file

@ -1,4 +1,4 @@
{ lib, ... }:
{ ... }:
{
disabledModules = [ (toString ./define-enable.nix) ];

View file

@ -1,4 +1,4 @@
{ lib, ... }:
{ ... }:
{
disabledModules = [ ./define-enable.nix ];

View file

@ -1,4 +1,4 @@
{ lib, ... }:
{ ... }:
{
disabledModules = [ "define-enable.nix" "declare-enable.nix" ];

View file

@ -1,4 +1,4 @@
{ config, lib, ... }:
{ config, ... }:
{
config = {
services.foo.enable = true;

View file

@ -1,4 +1,4 @@
{ config, lib, ... }:
{ config, ... }:
{
config = {
services.foos."".bar = "baz";

View file

@ -1,4 +1,4 @@
{ config, lib, options, ... }:
{ config, options, ... }:
{
config = {
result =

View file

@ -1,4 +1,4 @@
{ lib, ... }:
{ ... }:
{
imports = [

View file

@ -1,4 +1,4 @@
{ lib, ... }:
{ ... }:
let
typeless =

View file

@ -0,0 +1,28 @@
{ config, lib, ... }:
{
options = {
result = lib.mkOption { };
weird = lib.mkOption {
type = lib.types.submoduleWith {
# I generally recommend against overriding lib, because that leads to
# slightly incompatible dialects of the module system.
# Nonetheless, it's worth guarding the property that the module system
# evaluates with a completely custom lib, as a matter of separation of
# concerns.
specialArgs.lib = { };
modules = [ ];
};
};
};
config.weird = args@{ ... /* note the lack of a `lib` argument */ }:
assert args.lib == { };
assert args.specialArgs == { lib = { }; };
{
options.foo = lib.mkOption { };
config.foo = lib.mkIf true "alright";
};
config.result =
assert config.weird.foo == "alright";
"ok";
}

117
third_party/nixpkgs/lib/tests/network.sh vendored Executable file
View file

@ -0,0 +1,117 @@
#!/usr/bin/env bash
# Tests lib/network.nix
# Run:
# [nixpkgs]$ lib/tests/network.sh
# or:
# [nixpkgs]$ nix-build lib/tests/release.nix
set -euo pipefail
shopt -s inherit_errexit
if [[ -n "${TEST_LIB:-}" ]]; then
NIX_PATH=nixpkgs="$(dirname "$TEST_LIB")"
else
NIX_PATH=nixpkgs="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.."; pwd)"
fi
export NIX_PATH
die() {
echo >&2 "test case failed: " "$@"
exit 1
}
tmp="$(mktemp -d)"
clean_up() {
rm -rf "$tmp"
}
trap clean_up EXIT SIGINT SIGTERM
work="$tmp/work"
mkdir "$work"
cd "$work"
prefixExpression='
let
lib = import <nixpkgs/lib>;
internal = import <nixpkgs/lib/network/internal.nix> {
inherit lib;
};
in
with lib;
with lib.network;
'
expectSuccess() {
local expr=$1
local expectedResult=$2
if ! result=$(nix-instantiate --eval --strict --json --show-trace \
--expr "$prefixExpression ($expr)"); then
die "$expr failed to evaluate, but it was expected to succeed"
fi
if [[ ! "$result" == "$expectedResult" ]]; then
die "$expr == $result, but $expectedResult was expected"
fi
}
expectSuccessRegex() {
local expr=$1
local expectedResultRegex=$2
if ! result=$(nix-instantiate --eval --strict --json --show-trace \
--expr "$prefixExpression ($expr)"); then
die "$expr failed to evaluate, but it was expected to succeed"
fi
if [[ ! "$result" =~ $expectedResultRegex ]]; then
die "$expr == $result, but $expectedResultRegex was expected"
fi
}
expectFailure() {
local expr=$1
local expectedErrorRegex=$2
if result=$(nix-instantiate --eval --strict --json --show-trace 2>"$work/stderr" \
--expr "$prefixExpression ($expr)"); then
die "$expr evaluated successfully to $result, but it was expected to fail"
fi
if [[ ! "$(<"$work/stderr")" =~ $expectedErrorRegex ]]; then
die "Error was $(<"$work/stderr"), but $expectedErrorRegex was expected"
fi
}
# Internal functions
expectSuccess '(internal._ipv6.split "0:0:0:0:0:0:0:0").address' '[0,0,0,0,0,0,0,0]'
expectSuccess '(internal._ipv6.split "000a:000b:000c:000d:000e:000f:ffff:aaaa").address' '[10,11,12,13,14,15,65535,43690]'
expectSuccess '(internal._ipv6.split "::").address' '[0,0,0,0,0,0,0,0]'
expectSuccess '(internal._ipv6.split "::0000").address' '[0,0,0,0,0,0,0,0]'
expectSuccess '(internal._ipv6.split "::1").address' '[0,0,0,0,0,0,0,1]'
expectSuccess '(internal._ipv6.split "::ffff").address' '[0,0,0,0,0,0,0,65535]'
expectSuccess '(internal._ipv6.split "::000f").address' '[0,0,0,0,0,0,0,15]'
expectSuccess '(internal._ipv6.split "::1:1:1:1:1:1:1").address' '[0,1,1,1,1,1,1,1]'
expectSuccess '(internal._ipv6.split "1::").address' '[1,0,0,0,0,0,0,0]'
expectSuccess '(internal._ipv6.split "1:1:1:1:1:1:1::").address' '[1,1,1,1,1,1,1,0]'
expectSuccess '(internal._ipv6.split "1:1:1:1::1:1:1").address' '[1,1,1,1,0,1,1,1]'
expectSuccess '(internal._ipv6.split "1::1").address' '[1,0,0,0,0,0,0,1]'
expectFailure 'internal._ipv6.split "0:0:0:0:0:0:0:-1"' "contains malformed characters for IPv6 address"
expectFailure 'internal._ipv6.split "::0:"' "is not a valid IPv6 address"
expectFailure 'internal._ipv6.split ":0::"' "is not a valid IPv6 address"
expectFailure 'internal._ipv6.split "0::0:"' "is not a valid IPv6 address"
expectFailure 'internal._ipv6.split "0:0:"' "is not a valid IPv6 address"
expectFailure 'internal._ipv6.split "0:0:0:0:0:0:0:0:0"' "is not a valid IPv6 address"
expectFailure 'internal._ipv6.split "0:0:0:0:0:0:0:0:"' "is not a valid IPv6 address"
expectFailure 'internal._ipv6.split "::0:0:0:0:0:0:0:0"' "is not a valid IPv6 address"
expectFailure 'internal._ipv6.split "0::0:0:0:0:0:0:0"' "is not a valid IPv6 address"
expectFailure 'internal._ipv6.split "::10000"' "0x10000 is not a valid u16 integer"
expectSuccess '(internal._ipv6.split "::").prefixLength' '128'
expectSuccess '(internal._ipv6.split "::/1").prefixLength' '1'
expectSuccess '(internal._ipv6.split "::/128").prefixLength' '128'
expectFailure '(internal._ipv6.split "::/0").prefixLength' "IPv6 subnet should be in range \[1;128\], got 0"
expectFailure '(internal._ipv6.split "::/129").prefixLength' "IPv6 subnet should be in range \[1;128\], got 129"
expectFailure '(internal._ipv6.split "/::/").prefixLength' "is not a valid IPv6 address in CIDR notation"
# Library API
expectSuccess 'lib.network.ipv6.fromString "2001:DB8::ffff/64"' '{"address":"2001:db8:0:0:0:0:0:ffff","prefixLength":64}'
expectSuccess 'lib.network.ipv6.fromString "1234:5678:90ab:cdef:fedc:ba09:8765:4321/44"' '{"address":"1234:5678:90ab:cdef:fedc:ba09:8765:4321","prefixLength":44}'
echo >&2 tests ok

View file

@ -65,6 +65,9 @@ pkgs.runCommand "nixpkgs-lib-tests-nix-${nix.version}" {
echo "Running lib/tests/sources.sh"
TEST_LIB=$PWD/lib bash lib/tests/sources.sh
echo "Running lib/tests/network.sh"
TEST_LIB=$PWD/lib bash lib/tests/network.sh
echo "Running lib/fileset/tests.sh"
TEST_LIB=$PWD/lib bash lib/fileset/tests.sh

View file

@ -12,6 +12,9 @@ let
version
versionSuffix
warn;
inherit (lib)
isString
;
in {
## Simple (higher order) functions
@ -718,98 +721,97 @@ in {
importTOML = path:
builtins.fromTOML (builtins.readFile path);
## Warnings
# See https://github.com/NixOS/nix/issues/749. Eventually we'd like these
# to expand to Nix builtins that carry metadata so that Nix can filter out
# the INFO messages without parsing the message string.
#
# Usage:
# {
# foo = lib.warn "foo is deprecated" oldFoo;
# bar = lib.warnIf (bar == "") "Empty bar is deprecated" bar;
# }
#
# TODO: figure out a clever way to integrate location information from
# something like __unsafeGetAttrPos.
/**
Print a warning before returning the second argument. This function behaves
like `builtins.trace`, but requires a string message and formats it as a
warning, including the `warning: ` prefix.
To get a call stack trace and abort evaluation, set the environment variable
`NIX_ABORT_ON_WARN=true` and set the Nix options `--option pure-eval false --show-trace`
`warn` *`message`* *`value`*
Print a warning before returning the second argument.
See [`builtins.warn`](https://nix.dev/manual/nix/latest/language/builtins.html#builtins-warn) (Nix >= 2.23).
On older versions, the Nix 2.23 behavior is emulated with [`builtins.trace`](https://nix.dev/manual/nix/latest/language/builtins.html#builtins-warn), including the [`NIX_ABORT_ON_WARN`](https://nix.dev/manual/nix/latest/command-ref/conf-file#conf-abort-on-warn) behavior, but not the `nix.conf` setting or command line option.
# Inputs
`msg`
*`message`* (String)
: Warning message to print.
: Warning message to print before evaluating *`value`*.
`val`
*`value`* (any value)
: Value to return as-is.
# Type
```
string -> a -> a
String -> a -> a
```
*/
warn =
if lib.elem (builtins.getEnv "NIX_ABORT_ON_WARN") ["1" "true" "yes"]
then msg: builtins.trace "warning: ${msg}" (abort "NIX_ABORT_ON_WARN=true; warnings are treated as unrecoverable errors.")
else msg: builtins.trace "warning: ${msg}";
# Since Nix 2.23, https://github.com/NixOS/nix/pull/10592
builtins.warn or (
let mustAbort = lib.elem (builtins.getEnv "NIX_ABORT_ON_WARN") ["1" "true" "yes"];
in
# Do not eta reduce v, so that we have the same strictness as `builtins.warn`.
msg: v:
# `builtins.warn` requires a string message, so we enforce that in our implementation, so that callers aren't accidentally incompatible with newer Nix versions.
assert isString msg;
if mustAbort
then builtins.trace "evaluation warning: ${msg}" (abort "NIX_ABORT_ON_WARN=true; warnings are treated as unrecoverable errors.")
else builtins.trace "evaluation warning: ${msg}" v
);
/**
Like warn, but only warn when the first argument is `true`.
`warnIf` *`condition`* *`message`* *`value`*
Like `warn`, but only warn when the first argument is `true`.
# Inputs
`cond`
*`condition`* (Boolean)
: 1\. Function argument
: `true` to trigger the warning before continuing with *`value`*.
`msg`
*`message`* (String)
: 2\. Function argument
: Warning message to print before evaluating
`val`
*`value`* (any value)
: Value to return as-is.
# Type
```
bool -> string -> a -> a
Bool -> String -> a -> a
```
*/
warnIf = cond: msg: if cond then warn msg else x: x;
/**
Like warnIf, but negated (warn if the first argument is `false`).
`warnIfNot` *`condition`* *`message`* *`value`*
Like `warnIf`, but negated: warn if the first argument is `false`.
# Inputs
`cond`
*`condition`*
: 1\. Function argument
: `false` to trigger the warning before continuing with `val`.
`msg`
*`message`*
: 2\. Function argument
: Warning message to print before evaluating *`value`*.
`val`
*`value`*
: Value to return as-is.
# Type
```
bool -> string -> a -> a
Boolean -> String -> a -> a
```
*/
warnIfNot = cond: msg: if cond then x: x else warn msg;
@ -1074,6 +1076,32 @@ in {
then v
else k: v;
/**
Convert a hexadecimal string to it's integer representation.
# Type
```
fromHexString :: String -> [ String ]
```
# Examples
```nix
fromHexString "FF"
=> 255
fromHexString (builtins.hashString "sha256" "test")
=> 9223372036854775807
```
*/
fromHexString = value:
let
noPrefix = lib.strings.removePrefix "0x" (lib.strings.toLower value);
in let
parsed = builtins.fromTOML "v=0x${noPrefix}";
in parsed.v;
/**
Convert the given positive integer to a string of its hexadecimal
representation. For example:

View file

@ -73,7 +73,6 @@ let
outer_types =
rec {
__attrsFailEvaluation = true;
isType = type: x: (x._type or "") == type;
setType = typeName: value: value // {

File diff suppressed because it is too large Load diff

View file

View file

@ -6,9 +6,9 @@ binaries (without the reliance on external inputs):
- `bootstrap-tools`: an archive with the compiler toolchain and other
helper tools enough to build the rest of the `nixpkgs`.
- initial binaries needed to unpack `bootstrap-tools.*`. On `linux`
it's just `busybox`, on `darwin` it is unpack.nar.xz which contains
the binaries and script needed to unpack the tools. These binaries
can be executed directly from the store.
it's just `busybox`, on `darwin` and `freebsd` it is unpack.nar.xz
which contains the binaries and script needed to unpack the tools.
These binaries can be executed directly from the store.
These are called "bootstrap files".

View file

@ -95,6 +95,7 @@ CROSS_TARGETS=(
powerpc64-unknown-linux-gnuabielfv2
powerpc64le-unknown-linux-gnu
riscv64-unknown-linux-gnu
x86_64-unknown-freebsd
)
is_cross() {
@ -163,6 +164,7 @@ for target in "${targets[@]}"; do
case "$target" in
*linux*) nixpkgs_prefix="pkgs/stdenv/linux" ;;
*darwin*) nixpkgs_prefix="pkgs/stdenv/darwin" ;;
*freebsd*) nixpkgs_prefix="pkgs/stdenv/freebsd" ;;
*) die "don't know where to put '$target'" ;;
esac

View file

@ -6,6 +6,7 @@ argparse,,,,,,
basexx,,,,,,
binaryheap,,,,,,vcunat
busted,,,,,,
busted-htest,,,,,,mrcjkb
cassowary,,,,,,alerque
cldr,,,,,,alerque
compat53,,,,,,vcunat
@ -97,7 +98,7 @@ lua-utils.nvim,,,,,,mrcjkb
lua-yajl,,,,,,pstn
lua-iconv,,,,7.0.0,,
luuid,,,,20120509-2,,
luv,,,,1.44.2-1,,
luv,,,,1.48.0-2,,
lush.nvim,,,https://luarocks.org/dev,,,teto
lyaml,,,,,,lblasc
lz.n,,,,,,mrcjkb

1 name rockspec ref server version luaversion maintainers
6 basexx
7 binaryheap vcunat
8 busted
9 busted-htest mrcjkb
10 cassowary alerque
11 cldr alerque
12 compat53 vcunat
98 lua-yajl pstn
99 lua-iconv 7.0.0
100 luuid 20120509-2
101 luv 1.44.2-1 1.48.0-2
102 lush.nvim https://luarocks.org/dev teto
103 lyaml lblasc
104 lz.n mrcjkb

View file

@ -142,7 +142,7 @@ class Repo:
return loaded
def prefetch(self, ref: Optional[str]) -> str:
print("Prefetching")
print("Prefetching %s", self.uri)
loaded = self._prefetch(ref)
return loaded["sha256"]
@ -266,6 +266,7 @@ class PluginDesc:
@staticmethod
def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> "PluginDesc":
log.debug("Loading row %s", row)
branch = row["branch"]
repo = make_repo(row["repo"], branch.strip())
repo.token = config.github_token
@ -328,7 +329,7 @@ def load_plugins_from_csv(
def run_nix_expr(expr, nixpkgs: str):
def run_nix_expr(expr, nixpkgs: str, **args):
'''
:param expr nix expression to fetch current plugins
:param nixpkgs Path towards a nixpkgs checkout
@ -347,7 +348,7 @@ def run_nix_expr(expr, nixpkgs: str):
nix_path,
]
log.debug("Running command: %s", " ".join(cmd))
out = subprocess.check_output(cmd, timeout=90)
out = subprocess.check_output(cmd, **args)
data = json.loads(out)
return data
@ -736,6 +737,7 @@ def rewrite_input(
redirects: Redirects = {},
append: List[PluginDesc] = [],
):
log.info("Rewriting input file %s", input_file)
plugins = load_plugins_from_csv(
config,
input_file,
@ -744,10 +746,14 @@ def rewrite_input(
plugins.extend(append)
if redirects:
log.debug("Dealing with deprecated plugins listed in %s", deprecated)
cur_date_iso = datetime.now().strftime("%Y-%m-%d")
with open(deprecated, "r") as f:
deprecations = json.load(f)
# TODO parallelize this step
for pdesc, new_repo in redirects.items():
log.info("Rewriting input file %s", input_file)
new_pdesc = PluginDesc(new_repo, pdesc.branch, pdesc.alias)
old_plugin, _ = prefetch_plugin(pdesc)
new_plugin, _ = prefetch_plugin(new_pdesc)
@ -791,7 +797,7 @@ def update_plugins(editor: Editor, args):
start_time = time.time()
redirects = update()
duration = time.time() - start_time
print(f"The plugin update took {duration}s.")
print(f"The plugin update took {duration:.2f}s.")
editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, redirects)
autocommit = not args.no_commit

View file

@ -13,6 +13,7 @@
, include-overlays ? false
, keep-going ? null
, commit ? null
, skip-prompt ? null
}:
let
@ -158,7 +159,7 @@ let
to run all update scripts for all packages that lists \`garbas\` as a maintainer
and have \`updateScript\` defined, or:
% nix-shell maintainers/scripts/update.nix --argstr package gnome.nautilus
% nix-shell maintainers/scripts/update.nix --argstr package nautilus
to run update script for specific package, or
@ -184,6 +185,10 @@ let
that support it by adding
--argstr commit true
to skip prompt:
--argstr skip-prompt true
'';
/* Transform a matched package into an object for update.py.
@ -204,7 +209,8 @@ let
optionalArgs =
lib.optional (max-workers != null) "--max-workers=${max-workers}"
++ lib.optional (keep-going == "true") "--keep-going"
++ lib.optional (commit == "true") "--commit";
++ lib.optional (commit == "true") "--commit"
++ lib.optional (skip-prompt == "true") "--skip-prompt";
args = [ packagesJson ] ++ optionalArgs;

View file

@ -91,9 +91,11 @@ def make_worktree() -> Generator[Tuple[str, str], None, None]:
target_directory = f'{wt}/nixpkgs'
subprocess.run(['git', 'worktree', 'add', '-b', branch_name, target_directory])
yield (target_directory, branch_name)
subprocess.run(['git', 'worktree', 'remove', '--force', target_directory])
subprocess.run(['git', 'branch', '-D', branch_name])
try:
yield (target_directory, branch_name)
finally:
subprocess.run(['git', 'worktree', 'remove', '--force', target_directory])
subprocess.run(['git', 'branch', '-D', branch_name])
async def commit_changes(name: str, merge_lock: asyncio.Lock, worktree: str, branch: str, changes: List[Dict]) -> None:
for change in changes:
@ -207,7 +209,7 @@ async def start_updates(max_workers: int, keep_going: bool, commit: bool, packag
eprint(e)
sys.exit(1)
def main(max_workers: int, keep_going: bool, commit: bool, packages_path: str) -> None:
def main(max_workers: int, keep_going: bool, commit: bool, packages_path: str, skip_prompt: bool) -> None:
with open(packages_path) as f:
packages = json.load(f)
@ -217,7 +219,8 @@ def main(max_workers: int, keep_going: bool, commit: bool, packages_path: str) -
eprint(f" - {package['name']}")
eprint()
confirm = input('Press Enter key to continue...')
confirm = '' if skip_prompt else input('Press Enter key to continue...')
if confirm == '':
eprint()
eprint('Running update for:')
@ -236,12 +239,13 @@ parser.add_argument('--max-workers', '-j', dest='max_workers', type=int, help='N
parser.add_argument('--keep-going', '-k', dest='keep_going', action='store_true', help='Do not stop after first failure')
parser.add_argument('--commit', '-c', dest='commit', action='store_true', help='Commit the changes')
parser.add_argument('packages', help='JSON file containing the list of package names and their update scripts')
parser.add_argument('--skip-prompt', '-s', dest='skip_prompt', action='store_true', help='Do not stop for prompts')
if __name__ == '__main__':
args = parser.parse_args()
try:
main(args.max_workers, args.keep_going, args.commit, args.packages)
main(args.max_workers, args.keep_going, args.commit, args.packages, args.skip_prompt)
except KeyboardInterrupt as e:
# Lets cancel outside of the main loop too.
sys.exit(130)

View file

@ -95,7 +95,10 @@ with lib.maintainers;
};
budgie = {
members = [ bobby285271 ];
members = [
bobby285271
getchoo
];
scope = "Maintain Budgie desktop environment";
shortName = "Budgie";
};
@ -359,6 +362,7 @@ with lib.maintainers;
geospatial = {
members = [
autra
imincik
l0b0
nh2
@ -496,6 +500,21 @@ with lib.maintainers;
shortName = "Jupyter";
};
k3s = {
githubTeams = [ "k3s" ];
members = [
euank
marcusramberg
mic92
rorosen
superherointj
wrmilling
yajo
];
scope = "Maintain K3s package, NixOS module, NixOS tests, update script";
shortName = "K3s";
};
kubernetes = {
members = [
johanot
@ -715,10 +734,7 @@ with lib.maintainers;
};
node = {
members = [
lilyinstarlight
winter
];
members = [ winter ];
scope = "Maintain Node.js runtimes and build tooling.";
shortName = "Node.js";
enableFeatureFreezePing = true;
@ -772,7 +788,7 @@ with lib.maintainers;
aanderse
drupol
ma27
patka
piotrkwiecinski
talyz
];
githubTeams = [ "php" ];
@ -925,6 +941,27 @@ with lib.maintainers;
shortName = "Steam";
};
stridtech = {
# Verify additions by approval of an already existing member of the team
members = [
superherointj
ulrikstrid
];
scope = "Group registration for Strid Tech AB team members who collectively maintain packages";
shortName = "StridTech";
};
swift = {
members = [
dduan
stephank
trepetti
trundle
];
scope = "Maintain Swift compiler suite for NixOS.";
shortName = "Swift";
};
systemd = {
members = [ ];
githubTeams = [ "systemd" ];

Some files were not shown because too many files have changed in this diff Show more