Compare commits
9 commits
73d36d54cd
...
fa903ba5da
Author | SHA1 | Date | |
---|---|---|---|
fa903ba5da | |||
2eca8a59b3 | |||
d287198d5a | |||
bd68caadae | |||
105b0813c1 | |||
b50ad2d685 | |||
b27583fad2 | |||
6eac2e2d20 | |||
d262598f6f |
3928 changed files with 99947 additions and 193718 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -25,6 +25,7 @@
|
|||
/go/trains/*/lukegb-trains.json
|
||||
/py/icalfilter/config/*
|
||||
/rust/*/target/*
|
||||
result
|
||||
result-*
|
||||
result
|
||||
|
||||
|
|
|
@ -8,7 +8,6 @@
|
|||
plex-pass = import ./plex-pass.nix args;
|
||||
secretsync = import ./secretsync args;
|
||||
copybara = pkgs.callPackage ./copybara.nix { };
|
||||
hg-git = import ./hg-git.nix args;
|
||||
erbium = import ./erbium args;
|
||||
sheepshaver = import ./sheepshaver.nix args;
|
||||
intermec-cups-driver = pkgs.callPackage ./intermec-cups-driver.nix {};
|
||||
|
@ -26,7 +25,8 @@
|
|||
lukegb-wallpapers = pkgs.callPackage ./lukegb-wallpapers {};
|
||||
rundeck-bin = pkgs.callPackage ./rundeck-bin {};
|
||||
mercurial = pkgs.mercurialFull.withExtensions (pm: with pm; [
|
||||
hg-evolve
|
||||
(hg-evolve.override { mercurial = pkgs.mercurialFull; })
|
||||
(hg-git.override { mercurial = pkgs.mercurialFull; })
|
||||
]);
|
||||
ubi_reader = pkgs.python3Packages.callPackage ./ubi_reader {};
|
||||
prometheus-bird-exporter-lfty = pkgs.callPackage ./prometheus-bird-exporter-lfty.nix {};
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
# SPDX-FileCopyrightText: 2020 Luke Granger-Brown <depot@lukegb.com>
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
{ pkgs, ... }:
|
||||
with pkgs;
|
||||
with pkgs.python3Packages;
|
||||
(buildPythonPackage rec {
|
||||
pname = "hg-git";
|
||||
version = "0.9.0a1";
|
||||
|
||||
src = fetchPypi {
|
||||
inherit pname version;
|
||||
sha256 = "1lhb813zljki3q10bx3n9d7c075s6ahhak5d3a1m6gaxmy6gzj5y";
|
||||
};
|
||||
|
||||
doCheck = false;
|
||||
|
||||
propagatedBuildInputs = [ dulwich ];
|
||||
|
||||
meta = with lib; {
|
||||
description = "Push and pull from a Git server using Mercurial";
|
||||
homepage = "http://hg-git.github.com/";
|
||||
license = licenses.gpl2;
|
||||
};
|
||||
})
|
|
@ -5,11 +5,11 @@
|
|||
|
||||
appimageTools.wrapType2 rec {
|
||||
pname = "worldofgoo2";
|
||||
version = "12329.171";
|
||||
version = "1.0.12478.15";
|
||||
|
||||
src = requireFile {
|
||||
name = "World_of_Goo_2-x86_64.${version}.appimage";
|
||||
hash = "sha256-/dOder333lqgKi+dQCIzeVtPObRYsn5LSYldXFXaK8o=";
|
||||
name = "World_of_Goo_2-x86_64-${version}.appimage";
|
||||
hash = "sha256-esFcDFxeCDdl1BVEYdTeHxgkLo0j9HMYmEnPK0fIEhA=";
|
||||
message = "Please download World of Goo 2 from the Humble Store and add it to the store.";
|
||||
};
|
||||
|
||||
|
|
9
third_party/nixpkgs/.git-blame-ignore-revs
vendored
9
third_party/nixpkgs/.git-blame-ignore-revs
vendored
|
@ -204,5 +204,14 @@ ce21e97a1f20dee15da85c084f9d1148d84f853b
|
|||
# sqlc: format with nixfmt
|
||||
2bdec131b2bb2c8563f4556d741d34ccb77409e2
|
||||
|
||||
# ant: format with nixfmt-rfc-style
|
||||
2538d58436b8d0b56d29780aeebf4bf720ddb9ea
|
||||
|
||||
# treewide: migrate packages to pkgs/by-name, take 1
|
||||
571c71e6f73af34a229414f51585738894211408
|
||||
|
||||
# format files with nixfmt (#347275)
|
||||
adb9714bd909df283c66bbd641bd631ff50a4260
|
||||
|
||||
# treewide: incus packages
|
||||
9ab59bb5fb943ad6740f64f5a79eae9642fb8211
|
||||
|
|
39
third_party/nixpkgs/.github/ISSUE_TEMPLATE/tracking_issue.md
vendored
Normal file
39
third_party/nixpkgs/.github/ISSUE_TEMPLATE/tracking_issue.md
vendored
Normal file
|
@ -0,0 +1,39 @@
|
|||
---
|
||||
name: Tracking issue
|
||||
about: Provide an overview on a multi-step effort
|
||||
title: 'Tracking issue: ISSUENAME'
|
||||
labels: '5.scope: tracking'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Tracking description
|
||||
|
||||
<!--
|
||||
Provide a brief summary of the project or multi-step effort. Explain why it is
|
||||
necessary and what the goals are.
|
||||
|
||||
You may include way to reproduce the problem, screenshots or any information
|
||||
that you find relevant.
|
||||
-->
|
||||
|
||||
#### Reference Issue(s)/PR(s)
|
||||
|
||||
- ...
|
||||
|
||||
### Follow-up issues/notes
|
||||
|
||||
<!--
|
||||
List any follow-up issues that need to be addressed after the main tasks are
|
||||
completed, or any notes related to the project.
|
||||
-->
|
||||
|
||||
#### Additional context
|
||||
Add any other context about the problem here.
|
||||
|
||||
---
|
||||
|
||||
Add a :+1: [reaction] to [issues you find important].
|
||||
|
||||
[reaction]: https://github.blog/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/
|
||||
[issues you find important]: https://github.com/NixOS/nixpkgs/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc
|
22
third_party/nixpkgs/.github/labeler.yml
vendored
22
third_party/nixpkgs/.github/labeler.yml
vendored
|
@ -176,13 +176,32 @@
|
|||
- any:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- nixos/modules/programs/java.nix
|
||||
# Distributions
|
||||
- pkgs/development/compilers/adoptopenjdk-icedtea-web/**/*
|
||||
- pkgs/development/compilers/corretto/**/*
|
||||
- pkgs/development/compilers/graalvm/**/*
|
||||
- pkgs/development/compilers/openjdk/**/*
|
||||
- pkgs/development/compilers/semeru-bin/**/*
|
||||
- pkgs/development/compilers/temurin-bin/**/*
|
||||
- pkgs/development/compilers/zulu/**/*
|
||||
# Documentation
|
||||
- doc/languages-frameworks/java.section.md
|
||||
# Gradle
|
||||
- doc/languages-frameworks/gradle.section.md
|
||||
- pkgs/development/tools/build-managers/gradle/**/*
|
||||
- pkgs/by-name/gr/gradle-completion/**/*
|
||||
# Maven
|
||||
- pkgs/by-name/ma/maven/**/*
|
||||
- doc/languages-frameworks/maven.section.md
|
||||
# Ant
|
||||
- pkgs/by-name/an/ant/**/*
|
||||
# javaPackages attrset
|
||||
- pkgs/development/java-modules/**/*
|
||||
- pkgs/top-level/java-packages.nix
|
||||
# Maintainer tooling
|
||||
- pkgs/by-name/ni/nixpkgs-openjdk-updater/**/*
|
||||
# Misc
|
||||
- nixos/modules/programs/java.nix
|
||||
|
||||
"6.topic: jitsi":
|
||||
- any:
|
||||
|
@ -426,6 +445,7 @@
|
|||
- doc/languages-frameworks/ruby.section.md
|
||||
- pkgs/development/interpreters/ruby/**/*
|
||||
- pkgs/development/ruby-modules/**/*
|
||||
- pkgs/top-level/ruby-packages.nix
|
||||
|
||||
"6.topic: rust":
|
||||
- any:
|
||||
|
|
|
@ -8,26 +8,31 @@ on:
|
|||
# the GitHub repository. This means that it should not evaluate user input in a
|
||||
# way that allows code injection.
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
backport:
|
||||
permissions:
|
||||
contents: write # for korthout/backport-action to create branch
|
||||
pull-requests: write # for korthout/backport-action to create PR to backport
|
||||
name: Backport Pull Request
|
||||
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Use a GitHub App to create the PR so that CI gets triggered
|
||||
# The App is scoped to Repository > Contents and Pull Requests: write for Nixpkgs
|
||||
- uses: actions/create-github-app-token@5d869da34e18e7287c1daad50e0b8ea0f506ce69 # v1.11.0
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ vars.BACKPORT_APP_ID }}
|
||||
private-key: ${{ secrets.BACKPORT_PRIVATE_KEY }}
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ github.event.pull_request.head.sha }}
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
- name: Create backport PRs
|
||||
uses: korthout/backport-action@bd410d37cdcae80be6d969823ff5a225fe5c833f # v3.0.2
|
||||
with:
|
||||
# Config README: https://github.com/korthout/backport-action#backport-action
|
||||
copy_labels_pattern: 'severity:\ssecurity'
|
||||
github_token: ${{ steps.app-token.outputs.token }}
|
||||
pull_description: |-
|
||||
Bot-based backport to `${target_branch}`, triggered by a label in #${pull_number}.
|
||||
|
||||
|
|
|
@ -83,8 +83,9 @@ jobs:
|
|||
|
||||
if (( "${#unformattedFiles[@]}" > 0 )); then
|
||||
echo "Some new/changed Nix files are not properly formatted"
|
||||
echo "Please go to the Nixpkgs root directory, run \`nix-shell\`, then:"
|
||||
echo "Please format them using the Nixpkgs-specific \`nixfmt\` by going to the Nixpkgs root directory, running \`nix-shell\`, then:"
|
||||
echo "nixfmt ${unformattedFiles[*]@Q}"
|
||||
echo "Make sure your branch is up to date with master, rebase if not."
|
||||
echo "If you're having trouble, please ping @NixOS/nix-formatting"
|
||||
exit 1
|
||||
fi
|
||||
|
|
|
@ -107,7 +107,7 @@ jobs:
|
|||
echo "$errors"
|
||||
else
|
||||
# just print in plain text
|
||||
echo "$errors" | sed 's/^:://'
|
||||
echo "${errors/::/}"
|
||||
echo # add one empty line
|
||||
fi
|
||||
failedFiles+=("$dest")
|
||||
|
|
|
@ -2,6 +2,9 @@ name: "Check shell"
|
|||
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- 'shell.nix'
|
||||
- 'ci/**'
|
||||
|
||||
permissions: {}
|
||||
|
||||
|
|
|
@ -38,7 +38,7 @@ jobs:
|
|||
nix_path: nixpkgs=https://github.com/NixOS/nixpkgs/archive/c473cc8714710179df205b153f4e9fa007107ff9.tar.gz
|
||||
- name: Checking EditorConfig
|
||||
run: |
|
||||
cat "$HOME/changed_files" | nix-shell -p editorconfig-checker --run 'xargs -r editorconfig-checker -disable-indent-size'
|
||||
< "$HOME/changed_files" nix-shell -p editorconfig-checker --run 'xargs -r editorconfig-checker -disable-indent-size'
|
||||
- if: ${{ failure() }}
|
||||
run: |
|
||||
echo "::error :: Hey! It looks like your changes don't follow our editorconfig settings. Read https://editorconfig.org/#download to configure your editor so you never see this error again."
|
||||
|
|
263
third_party/nixpkgs/.github/workflows/eval.yml
vendored
Normal file
263
third_party/nixpkgs/.github/workflows/eval.yml
vendored
Normal file
|
@ -0,0 +1,263 @@
|
|||
name: Eval
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
push:
|
||||
# Keep this synced with ci/request-reviews/dev-branches.txt
|
||||
branches:
|
||||
- master
|
||||
- staging
|
||||
- release-*
|
||||
- staging-*
|
||||
- haskell-updates
|
||||
- python-updates
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
get-merge-commit:
|
||||
uses: ./.github/workflows/get-merge-commit.yml
|
||||
|
||||
attrs:
|
||||
name: Attributes
|
||||
runs-on: ubuntu-latest
|
||||
needs: get-merge-commit
|
||||
outputs:
|
||||
mergedSha: ${{ needs.get-merge-commit.outputs.mergedSha }}
|
||||
baseSha: ${{ steps.baseSha.outputs.baseSha }}
|
||||
systems: ${{ steps.systems.outputs.systems }}
|
||||
steps:
|
||||
- name: Check out the PR at the test merge commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
# Add this to _all_ subsequent steps to skip them
|
||||
if: needs.get-merge-commit.outputs.mergedSha
|
||||
with:
|
||||
ref: ${{ needs.get-merge-commit.outputs.mergedSha }}
|
||||
fetch-depth: 2
|
||||
path: nixpkgs
|
||||
|
||||
- name: Determine base commit
|
||||
if: github.event_name == 'pull_request_target' && needs.get-merge-commit.outputs.mergedSha
|
||||
id: baseSha
|
||||
run: |
|
||||
baseSha=$(git -C nixpkgs rev-parse HEAD^1)
|
||||
echo "baseSha=$baseSha" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
|
||||
if: needs.get-merge-commit.outputs.mergedSha
|
||||
|
||||
- name: Evaluate the list of all attributes and get the systems matrix
|
||||
id: systems
|
||||
if: needs.get-merge-commit.outputs.mergedSha
|
||||
run: |
|
||||
nix-build nixpkgs/ci -A eval.attrpathsSuperset
|
||||
echo "systems=$(<result/systems.json)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Upload the list of all attributes
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
if: needs.get-merge-commit.outputs.mergedSha
|
||||
with:
|
||||
name: paths
|
||||
path: result/*
|
||||
|
||||
eval-aliases:
|
||||
name: Eval nixpkgs with aliases enabled
|
||||
runs-on: ubuntu-latest
|
||||
needs: attrs
|
||||
steps:
|
||||
- name: Check out the PR at the test merge commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ needs.attrs.outputs.mergedSha }}
|
||||
path: nixpkgs
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
|
||||
|
||||
- name: Query nixpkgs with aliases enabled to check for basic syntax errors
|
||||
run: |
|
||||
time nix-env -I ./nixpkgs -f ./nixpkgs -qa '*' --option restrict-eval true --option allow-import-from-derivation false >/dev/null
|
||||
|
||||
outpaths:
|
||||
name: Outpaths
|
||||
runs-on: ubuntu-latest
|
||||
needs: attrs
|
||||
# Skip this and future steps if the PR can't be merged
|
||||
if: needs.attrs.outputs.mergedSha
|
||||
strategy:
|
||||
matrix:
|
||||
system: ${{ fromJSON(needs.attrs.outputs.systems) }}
|
||||
steps:
|
||||
- name: Download the list of all attributes
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: paths
|
||||
path: paths
|
||||
|
||||
- name: Check out the PR at the test merge commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ needs.attrs.outputs.mergedSha }}
|
||||
path: nixpkgs
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
|
||||
|
||||
- name: Evaluate the ${{ matrix.system }} output paths for all derivation attributes
|
||||
env:
|
||||
MATRIX_SYSTEM: ${{ matrix.system }}
|
||||
run: |
|
||||
nix-build nixpkgs/ci -A eval.singleSystem \
|
||||
--argstr evalSystem "$MATRIX_SYSTEM" \
|
||||
--arg attrpathFile ./paths/paths.json \
|
||||
--arg chunkSize 10000
|
||||
# If it uses too much memory, slightly decrease chunkSize
|
||||
|
||||
- name: Upload the output paths and eval stats
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
if: needs.attrs.outputs.mergedSha
|
||||
with:
|
||||
name: intermediate-${{ matrix.system }}
|
||||
path: result/*
|
||||
|
||||
process:
|
||||
name: Process
|
||||
runs-on: ubuntu-latest
|
||||
needs: [ outpaths, attrs ]
|
||||
outputs:
|
||||
baseRunId: ${{ steps.baseRunId.outputs.baseRunId }}
|
||||
steps:
|
||||
- name: Download output paths and eval stats for all systems
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
pattern: intermediate-*
|
||||
path: intermediate
|
||||
|
||||
- name: Check out the PR at the test merge commit
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
ref: ${{ needs.attrs.outputs.mergedSha }}
|
||||
path: nixpkgs
|
||||
|
||||
- name: Install Nix
|
||||
uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
|
||||
|
||||
- name: Combine all output paths and eval stats
|
||||
run: |
|
||||
nix-build nixpkgs/ci -A eval.combine \
|
||||
--arg resultsDir ./intermediate \
|
||||
-o prResult
|
||||
|
||||
- name: Upload the combined results
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
with:
|
||||
name: result
|
||||
path: prResult/*
|
||||
|
||||
- name: Get base run id
|
||||
if: needs.attrs.outputs.baseSha
|
||||
id: baseRunId
|
||||
run: |
|
||||
# Get the latest eval.yml workflow run for the PR's base commit
|
||||
if ! run=$(gh api --method GET /repos/"$REPOSITORY"/actions/workflows/eval.yml/runs \
|
||||
-f head_sha="$BASE_SHA" -f event=push \
|
||||
--jq '.workflow_runs | sort_by(.run_started_at) | .[-1]') \
|
||||
|| [[ -z "$run" ]]; then
|
||||
echo "Could not find an eval.yml workflow run for $BASE_SHA, cannot make comparison"
|
||||
exit 0
|
||||
fi
|
||||
echo "Comparing against $(jq .html_url <<< "$run")"
|
||||
runId=$(jq .id <<< "$run")
|
||||
conclusion=$(jq -r .conclusion <<< "$run")
|
||||
|
||||
while [[ "$conclusion" == null ]]; do
|
||||
echo "Workflow not done, waiting 10 seconds before checking again"
|
||||
sleep 10
|
||||
conclusion=$(gh api /repos/"$REPOSITORY"/actions/runs/"$runId" --jq '.conclusion')
|
||||
done
|
||||
|
||||
if [[ "$conclusion" != "success" ]]; then
|
||||
echo "Workflow was not successful, cannot make comparison"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "baseRunId=$runId" >> "$GITHUB_OUTPUT"
|
||||
env:
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
BASE_SHA: ${{ needs.attrs.outputs.baseSha }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
if: steps.baseRunId.outputs.baseRunId
|
||||
with:
|
||||
name: result
|
||||
path: baseResult
|
||||
github-token: ${{ github.token }}
|
||||
run-id: ${{ steps.baseRunId.outputs.baseRunId }}
|
||||
|
||||
- name: Compare against the base branch
|
||||
if: steps.baseRunId.outputs.baseRunId
|
||||
run: |
|
||||
nix-build nixpkgs/ci -A eval.compare \
|
||||
--arg beforeResultDir ./baseResult \
|
||||
--arg afterResultDir ./prResult \
|
||||
-o comparison
|
||||
cat comparison/step-summary.md >> "$GITHUB_STEP_SUMMARY"
|
||||
# TODO: Request reviews from maintainers for packages whose files are modified in the PR
|
||||
|
||||
- name: Upload the combined results
|
||||
if: steps.baseRunId.outputs.baseRunId
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
with:
|
||||
name: comparison
|
||||
path: comparison/*
|
||||
|
||||
# Separate job to have a very tightly scoped PR write token
|
||||
tag:
|
||||
name: Tag
|
||||
runs-on: ubuntu-latest
|
||||
needs: process
|
||||
if: needs.process.outputs.baseRunId
|
||||
permissions:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Download process result
|
||||
uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8
|
||||
with:
|
||||
name: comparison
|
||||
path: comparison
|
||||
|
||||
- name: Tagging pull request
|
||||
run: |
|
||||
# Get all currently set rebuild labels
|
||||
gh api \
|
||||
/repos/"$REPOSITORY"/issues/"$NUMBER"/labels \
|
||||
--jq '.[].name | select(startswith("10.rebuild"))' \
|
||||
| sort > before
|
||||
|
||||
# And the labels that should be there
|
||||
jq -r '.labels[]' comparison/changed-paths.json \
|
||||
| sort > after
|
||||
|
||||
# Remove the ones not needed anymore
|
||||
while read -r toRemove; do
|
||||
echo "Removing label $toRemove"
|
||||
gh api \
|
||||
--method DELETE \
|
||||
/repos/"$REPOSITORY"/issues/"$NUMBER"/labels/"$toRemove"
|
||||
done < <(comm -23 before after)
|
||||
|
||||
# And add the ones that aren't set already
|
||||
while read -r toAdd; do
|
||||
echo "Adding label $toAdd"
|
||||
gh api \
|
||||
--method POST \
|
||||
/repos/"$REPOSITORY"/issues/"$NUMBER"/labels \
|
||||
-f "labels[]=$toAdd"
|
||||
done < <(comm -13 before after)
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
REPOSITORY: ${{ github.repository }}
|
||||
NUMBER: ${{ github.event.number }}
|
43
third_party/nixpkgs/.github/workflows/get-merge-commit.yml
vendored
Normal file
43
third_party/nixpkgs/.github/workflows/get-merge-commit.yml
vendored
Normal file
|
@ -0,0 +1,43 @@
|
|||
name: Get merge commit
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
outputs:
|
||||
mergedSha:
|
||||
description: "The merge commit SHA"
|
||||
value: ${{ jobs.resolve-merge-commit.outputs.mergedSha }}
|
||||
|
||||
# We need a token to query the API, but it doesn't need any special permissions
|
||||
permissions: {}
|
||||
|
||||
jobs:
|
||||
resolve-merge-commit:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
mergedSha: ${{ steps.merged.outputs.mergedSha }}
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: base
|
||||
sparse-checkout: ci
|
||||
- name: Check if the PR can be merged and get the test merge commit
|
||||
id: merged
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
GH_EVENT: ${{ github.event_name }}
|
||||
run: |
|
||||
case "$GH_EVENT" in
|
||||
push)
|
||||
echo "mergedSha=${{ github.sha }}" >> "$GITHUB_OUTPUT"
|
||||
;;
|
||||
pull_request_target)
|
||||
if mergedSha=$(base/ci/get-merge-commit.sh ${{ github.repository }} ${{ github.event.number }}); then
|
||||
echo "Checking the merge commit $mergedSha"
|
||||
echo "mergedSha=$mergedSha" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
# Skipping so that no notifications are sent
|
||||
echo "Skipping the rest..."
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
rm -rf base
|
7
third_party/nixpkgs/.github/workflows/lint-actions.sh
vendored
Executable file
7
third_party/nixpkgs/.github/workflows/lint-actions.sh
vendored
Executable file
|
@ -0,0 +1,7 @@
|
|||
#!/usr/bin/env nix-shell
|
||||
#!nix-shell -i bash -p bash actionlint shellcheck -I nixpkgs=../..
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
cd "$SCRIPT_DIR/../.."
|
||||
actionlint
|
|
@ -19,46 +19,34 @@ permissions: {}
|
|||
# There is a feature request for suppressing notifications on concurrency-canceled runs: https://github.com/orgs/community/discussions/13015
|
||||
|
||||
jobs:
|
||||
get-merge-commit:
|
||||
uses: ./.github/workflows/get-merge-commit.yml
|
||||
|
||||
check:
|
||||
name: nixpkgs-vet
|
||||
# This needs to be x86_64-linux, because we depend on the tooling being pre-built in the GitHub releases.
|
||||
runs-on: ubuntu-latest
|
||||
# This should take 1 minute at most, but let's be generous. The default of 6 hours is definitely too long.
|
||||
timeout-minutes: 10
|
||||
needs: get-merge-commit
|
||||
steps:
|
||||
# This checks out the base branch because of pull_request_target
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
with:
|
||||
path: base
|
||||
sparse-checkout: ci
|
||||
- name: Resolving the merge commit
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
if mergedSha=$(base/ci/get-merge-commit.sh ${{ github.repository }} ${{ github.event.number }}); then
|
||||
echo "Checking the merge commit $mergedSha"
|
||||
echo "mergedSha=$mergedSha" >> "$GITHUB_ENV"
|
||||
else
|
||||
echo "Skipping the rest..."
|
||||
fi
|
||||
rm -rf base
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
if: env.mergedSha
|
||||
if: needs.get-merge-commit.outputs.mergedSha
|
||||
with:
|
||||
# pull_request_target checks out the base branch by default
|
||||
ref: ${{ env.mergedSha }}
|
||||
ref: ${{ needs.get-merge-commit.outputs.mergedSha }}
|
||||
# Fetches the merge commit and its parents
|
||||
fetch-depth: 2
|
||||
- name: Checking out base branch
|
||||
if: env.mergedSha
|
||||
if: needs.get-merge-commit.outputs.mergedSha
|
||||
run: |
|
||||
base=$(mktemp -d)
|
||||
git worktree add "$base" "$(git rev-parse HEAD^1)"
|
||||
echo "base=$base" >> "$GITHUB_ENV"
|
||||
- uses: cachix/install-nix-action@08dcb3a5e62fa31e2da3d490afc4176ef55ecd72 # v30
|
||||
if: env.mergedSha
|
||||
if: needs.get-merge-commit.outputs.mergedSha
|
||||
- name: Fetching the pinned tool
|
||||
if: env.mergedSha
|
||||
if: needs.get-merge-commit.outputs.mergedSha
|
||||
# Update the pinned version using ci/nixpkgs-vet/update-pinned-tool.sh
|
||||
run: |
|
||||
# The pinned version of the tooling to use.
|
||||
|
@ -71,7 +59,7 @@ jobs:
|
|||
# Adds a result symlink as a GC root.
|
||||
nix-store --realise "$toolPath" --add-root result
|
||||
- name: Running nixpkgs-vet
|
||||
if: env.mergedSha
|
||||
if: needs.get-merge-commit.outputs.mergedSha
|
||||
env:
|
||||
# Force terminal colors to be enabled. The library that `nixpkgs-vet` uses respects https://bixense.com/clicolors/
|
||||
CLICOLOR_FORCE: 1
|
||||
|
|
1
third_party/nixpkgs/.mailmap
vendored
1
third_party/nixpkgs/.mailmap
vendored
|
@ -14,6 +14,7 @@ Jörg Thalheim <joerg@thalheim.io> <Mic92@users.noreply.github.com>
|
|||
Lin Jian <me@linj.tech> <linj.dev@outlook.com>
|
||||
Lin Jian <me@linj.tech> <75130626+jian-lin@users.noreply.github.com>
|
||||
Martin Weinelt <hexa@darmstadt.ccc.de> <mweinelt@users.noreply.github.com>
|
||||
moni <lythe1107@gmail.com> <lythe1107@icloud.com>
|
||||
R. RyanTM <ryantm-bot@ryantm.com>
|
||||
Robert Hensing <robert@roberthensing.nl> <roberth@users.noreply.github.com>
|
||||
Sandro Jäckel <sandro.jaeckel@gmail.com>
|
||||
|
|
2
third_party/nixpkgs/CONTRIBUTING.md
vendored
2
third_party/nixpkgs/CONTRIBUTING.md
vendored
|
@ -345,7 +345,7 @@ See [Nix Channel Status](https://status.nixos.org/) for the current channels and
|
|||
Here's a brief overview of the main Git branches and what channels they're used for:
|
||||
|
||||
- `master`: The main branch, used for the unstable channels such as `nixpkgs-unstable`, `nixos-unstable` and `nixos-unstable-small`.
|
||||
- `release-YY.MM` (e.g. `release-24.11`): The NixOS release branches, used for the stable channels such as `nixos-24.11`, `nixos-24.11-small` and `nixpkgs-24.11-darwin`.
|
||||
- `release-YY.MM` (e.g. `release-25.05`): The NixOS release branches, used for the stable channels such as `nixos-25.05`, `nixos-25.05-small` and `nixpkgs-25.05-darwin`.
|
||||
|
||||
When a channel is updated, a corresponding Git branch is also updated to point to the corresponding commit.
|
||||
So e.g. the [`nixpkgs-unstable` branch](https://github.com/nixos/nixpkgs/tree/nixpkgs-unstable) corresponds to the Git commit from the [`nixpkgs-unstable` channel](https://channels.nixos.org/nixpkgs-unstable).
|
||||
|
|
4
third_party/nixpkgs/README.md
vendored
4
third_party/nixpkgs/README.md
vendored
|
@ -52,9 +52,9 @@ Nixpkgs and NixOS are built and tested by our continuous integration
|
|||
system, [Hydra](https://hydra.nixos.org/).
|
||||
|
||||
* [Continuous package builds for unstable/master](https://hydra.nixos.org/jobset/nixos/trunk-combined)
|
||||
* [Continuous package builds for the NixOS 24.05 release](https://hydra.nixos.org/jobset/nixos/release-24.05)
|
||||
* [Continuous package builds for the NixOS 24.11 release](https://hydra.nixos.org/jobset/nixos/release-24.11)
|
||||
* [Tests for unstable/master](https://hydra.nixos.org/job/nixos/trunk-combined/tested#tabs-constituents)
|
||||
* [Tests for the NixOS 24.05 release](https://hydra.nixos.org/job/nixos/release-24.05/tested#tabs-constituents)
|
||||
* [Tests for the NixOS 24.11 release](https://hydra.nixos.org/job/nixos/release-24.11/tested#tabs-constituents)
|
||||
|
||||
Artifacts successfully built with Hydra are published to cache at
|
||||
https://cache.nixos.org/. When successful build and test criteria are
|
||||
|
|
5
third_party/nixpkgs/ci/OWNERS
vendored
5
third_party/nixpkgs/ci/OWNERS
vendored
|
@ -14,7 +14,7 @@
|
|||
# Processing of this file is implemented in workflows/codeowners-v2.yml
|
||||
|
||||
# CI
|
||||
/.github/workflows @NixOS/Security @Mic92 @zowoq
|
||||
/.github/workflows @NixOS/Security @Mic92 @zowoq @infinisil @azuwis
|
||||
/.github/workflows/check-nix-format.yml @infinisil
|
||||
/.github/workflows/nixpkgs-vet.yml @infinisil @philiptaron
|
||||
/.github/workflows/codeowners-v2.yml @infinisil
|
||||
|
@ -182,6 +182,7 @@ nixos/modules/installer/tools/nix-fallback-paths.nix @NixOS/nix-team @raitobeza
|
|||
# Rust
|
||||
/pkgs/development/compilers/rust @alyssais @Mic92 @zowoq @winterqt @figsoda
|
||||
/pkgs/build-support/rust @zowoq @winterqt @figsoda
|
||||
/pkgs/build-support/rust/fetch-cargo-vendor* @TomaSajt
|
||||
/doc/languages-frameworks/rust.section.md @zowoq @winterqt @figsoda
|
||||
|
||||
# Tcl
|
||||
|
@ -217,6 +218,7 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
|
|||
/doc/languages-frameworks/java.section.md @NixOS/java
|
||||
/doc/languages-frameworks/gradle.section.md @NixOS/java
|
||||
/doc/languages-frameworks/maven.section.md @NixOS/java
|
||||
/nixos/modules/programs/java.nix @NixOS/java
|
||||
/pkgs/top-level/java-packages.nix @NixOS/java
|
||||
|
||||
# Jetbrains
|
||||
|
@ -240,6 +242,7 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
|
|||
|
||||
# PostgreSQL and related stuff
|
||||
/pkgs/servers/sql/postgresql @NixOS/postgres
|
||||
/pkgs/development/tools/rust/cargo-pgrx @NixOS/postgres
|
||||
/nixos/modules/services/databases/postgresql.md @NixOS/postgres
|
||||
/nixos/modules/services/databases/postgresql.nix @NixOS/postgres
|
||||
/nixos/tests/postgresql @NixOS/postgres
|
||||
|
|
29
third_party/nixpkgs/ci/README.md
vendored
29
third_party/nixpkgs/ci/README.md
vendored
|
@ -58,7 +58,7 @@ Exit codes:
|
|||
|
||||
### Usage
|
||||
|
||||
This script can be used in GitHub Actions workflows as follows:
|
||||
This script is implemented as a reusable GitHub Actions workflow, and can be used as follows:
|
||||
|
||||
```yaml
|
||||
on: pull_request_target
|
||||
|
@ -67,32 +67,19 @@ on: pull_request_target
|
|||
permissions: {}
|
||||
|
||||
jobs:
|
||||
get-merge-commit:
|
||||
# use the relative path of the get-merge-commit workflow yaml here
|
||||
uses: ./.github/workflows/get-merge-commit.yml
|
||||
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
needs: get-merge-commit
|
||||
steps:
|
||||
# Important: Because of `pull_request_target`, this doesn't check out the PR,
|
||||
# but rather the base branch of the PR, which is needed so we don't run untrusted code
|
||||
- uses: actions/checkout@<VERSION>
|
||||
with:
|
||||
path: base
|
||||
sparse-checkout: ci
|
||||
- name: Resolving the merge commit
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
if mergedSha=$(base/ci/get-merge-commit.sh ${{ github.repository }} ${{ github.event.number }}); then
|
||||
echo "Checking the merge commit $mergedSha"
|
||||
echo "mergedSha=$mergedSha" >> "$GITHUB_ENV"
|
||||
else
|
||||
# Skipping so that no notifications are sent
|
||||
echo "Skipping the rest..."
|
||||
fi
|
||||
rm -rf base
|
||||
- uses: actions/checkout@<VERSION>
|
||||
# Add this to _all_ subsequent steps to skip them
|
||||
if: env.mergedSha
|
||||
if: needs.get-merge-commit.outputs.mergedSha
|
||||
with:
|
||||
ref: ${{ env.mergedSha }}
|
||||
ref: ${{ needs.get-merge-commit.outputs.mergedSha }}
|
||||
- ...
|
||||
```
|
||||
|
|
1
third_party/nixpkgs/ci/default.nix
vendored
1
third_party/nixpkgs/ci/default.nix
vendored
|
@ -26,4 +26,5 @@ in
|
|||
inherit pkgs;
|
||||
requestReviews = pkgs.callPackage ./request-reviews { };
|
||||
codeownersValidator = pkgs.callPackage ./codeowners-validator { };
|
||||
eval = pkgs.callPackage ./eval { };
|
||||
}
|
||||
|
|
19
third_party/nixpkgs/ci/eval/README.md
vendored
Normal file
19
third_party/nixpkgs/ci/eval/README.md
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
# Nixpkgs CI evaluation
|
||||
|
||||
The code in this directory is used by the [eval.yml](../../.github/workflows/eval.yml) GitHub Actions workflow to evaluate the majority of Nixpkgs for all PRs, effectively making sure that when the development branches are processed by Hydra, no evaluation failures are encountered.
|
||||
|
||||
Furthermore it also allows local evaluation using
|
||||
```
|
||||
nix-build ci -A eval.full \
|
||||
--max-jobs 4
|
||||
--cores 2
|
||||
--arg chunkSize 10000
|
||||
```
|
||||
|
||||
- `--max-jobs`: The maximum number of derivations to run at the same time. Only each [supported system](../supportedSystems.nix) gets a separate derivation, so it doesn't make sense to set this higher than that number.
|
||||
- `--cores`: The number of cores to use for each job. Recommended to set this to the amount of cores on your system divided by `--max-jobs`.
|
||||
- `chunkSize`: The number of attributes that are evaluated simultaneously on a single core. Lowering this decreases memory usage at the cost of increased evaluation time. If this is too high, there won't be enough chunks to process them in parallel, and will also increase evaluation time.
|
||||
|
||||
A good default is to set `chunkSize` to 10000, which leads to about 3.6GB max memory usage per core, so suitable for fully utilising machines with 4 cores and 16GB memory, 8 cores and 32GB memory or 16 cores and 64GB memory.
|
||||
|
||||
Note that 16GB memory is the recommended minimum, while with less than 8GB memory evaluation time suffers greatly.
|
164
third_party/nixpkgs/ci/eval/compare.jq
vendored
Normal file
164
third_party/nixpkgs/ci/eval/compare.jq
vendored
Normal file
|
@ -0,0 +1,164 @@
|
|||
# Turns
|
||||
#
|
||||
# {
|
||||
# "hello.aarch64-linux": "a",
|
||||
# "hello.x86_64-linux": "b",
|
||||
# "hello.aarch64-darwin": "c",
|
||||
# "hello.x86_64-darwin": "d"
|
||||
# }
|
||||
#
|
||||
# into
|
||||
#
|
||||
# {
|
||||
# "hello": {
|
||||
# "linux": {
|
||||
# "aarch64": "a",
|
||||
# "x86_64": "b"
|
||||
# },
|
||||
# "darwin": {
|
||||
# "aarch64": "c",
|
||||
# "x86_64": "d"
|
||||
# }
|
||||
# }
|
||||
# }
|
||||
#
|
||||
# while filtering out any attribute paths that don't match this pattern
|
||||
def expand_system:
|
||||
to_entries
|
||||
| map(
|
||||
.key |= split(".")
|
||||
| select(.key | length > 1)
|
||||
| .double = (.key[-1] | split("-"))
|
||||
| select(.double | length == 2)
|
||||
)
|
||||
| group_by(.key[0:-1])
|
||||
| map(
|
||||
{
|
||||
key: .[0].key[0:-1] | join("."),
|
||||
value:
|
||||
group_by(.double[1])
|
||||
| map(
|
||||
{
|
||||
key: .[0].double[1],
|
||||
value: map(.key = .double[0]) | from_entries
|
||||
}
|
||||
)
|
||||
| from_entries
|
||||
})
|
||||
| from_entries
|
||||
;
|
||||
|
||||
# Transposes
|
||||
#
|
||||
# {
|
||||
# "a": [ "x", "y" ],
|
||||
# "b": [ "x" ],
|
||||
# }
|
||||
#
|
||||
# into
|
||||
#
|
||||
# {
|
||||
# "x": [ "a", "b" ],
|
||||
# "y": [ "a" ]
|
||||
# }
|
||||
def transpose:
|
||||
[
|
||||
to_entries[]
|
||||
| {
|
||||
key: .key,
|
||||
value: .value[]
|
||||
}
|
||||
]
|
||||
| group_by(.value)
|
||||
| map({
|
||||
key: .[0].value,
|
||||
value: map(.key)
|
||||
})
|
||||
| from_entries
|
||||
;
|
||||
|
||||
# Computes the key difference for two objects:
|
||||
# {
|
||||
# added: [ <keys only in the second object> ],
|
||||
# removed: [ <keys only in the first object> ],
|
||||
# changed: [ <keys with different values between the two objects> ],
|
||||
# }
|
||||
#
|
||||
def diff($before; $after):
|
||||
{
|
||||
added: $after | delpaths($before | keys | map([.])) | keys,
|
||||
removed: $before | delpaths($after | keys | map([.])) | keys,
|
||||
changed:
|
||||
$before
|
||||
| to_entries
|
||||
| map(
|
||||
$after."\(.key)" as $after2
|
||||
| select(
|
||||
# Filter out attributes that don't exist anymore
|
||||
($after2 != null)
|
||||
and
|
||||
# Filter out attributes that are the same as the new value
|
||||
(.value != $after2)
|
||||
)
|
||||
| .key
|
||||
)
|
||||
}
|
||||
;
|
||||
|
||||
($before[0] | expand_system) as $before
|
||||
| ($after[0] | expand_system) as $after
|
||||
| .attrdiff = diff($before; $after)
|
||||
| .rebuildsByKernel = (
|
||||
[
|
||||
(
|
||||
.attrdiff.changed[]
|
||||
| {
|
||||
key: .,
|
||||
value: diff($before."\(.)"; $after."\(.)").changed
|
||||
}
|
||||
)
|
||||
,
|
||||
(
|
||||
.attrdiff.added[]
|
||||
| {
|
||||
key: .,
|
||||
value: ($after."\(.)" | keys)
|
||||
}
|
||||
)
|
||||
]
|
||||
| from_entries
|
||||
| transpose
|
||||
)
|
||||
| .rebuildCountByKernel = (
|
||||
.rebuildsByKernel
|
||||
| with_entries(.value |= length)
|
||||
| pick(.linux, .darwin)
|
||||
| {
|
||||
linux: (.linux // 0),
|
||||
darwin: (.darwin // 0),
|
||||
}
|
||||
)
|
||||
| .labels = (
|
||||
.rebuildCountByKernel
|
||||
| to_entries
|
||||
| map(
|
||||
"10.rebuild-\(.key): " +
|
||||
if .value == 0 then
|
||||
"0"
|
||||
elif .value <= 10 then
|
||||
"1-10"
|
||||
elif .value <= 100 then
|
||||
"11-100"
|
||||
elif .value <= 500 then
|
||||
"101-500"
|
||||
elif .value <= 1000 then
|
||||
"501-1000"
|
||||
elif .value <= 2500 then
|
||||
"1001-2500"
|
||||
elif .value <= 5000 then
|
||||
"2501-5000"
|
||||
else
|
||||
"5001+"
|
||||
end
|
||||
)
|
||||
)
|
303
third_party/nixpkgs/ci/eval/default.nix
vendored
Normal file
303
third_party/nixpkgs/ci/eval/default.nix
vendored
Normal file
|
@ -0,0 +1,303 @@
|
|||
{
|
||||
lib,
|
||||
runCommand,
|
||||
writeShellScript,
|
||||
linkFarm,
|
||||
time,
|
||||
procps,
|
||||
nixVersions,
|
||||
jq,
|
||||
sta,
|
||||
}:
|
||||
|
||||
let
|
||||
nixpkgs =
|
||||
with lib.fileset;
|
||||
toSource {
|
||||
root = ../..;
|
||||
fileset = unions (
|
||||
map (lib.path.append ../..) [
|
||||
"default.nix"
|
||||
"doc"
|
||||
"lib"
|
||||
"maintainers"
|
||||
"nixos"
|
||||
"pkgs"
|
||||
".version"
|
||||
"ci/supportedSystems.nix"
|
||||
]
|
||||
);
|
||||
};
|
||||
|
||||
nix = nixVersions.nix_2_24;
|
||||
|
||||
supportedSystems = import ../supportedSystems.nix;
|
||||
|
||||
attrpathsSuperset =
|
||||
runCommand "attrpaths-superset.json"
|
||||
{
|
||||
src = nixpkgs;
|
||||
nativeBuildInputs = [
|
||||
nix
|
||||
time
|
||||
];
|
||||
env.supportedSystems = builtins.toJSON supportedSystems;
|
||||
passAsFile = [ "supportedSystems" ];
|
||||
}
|
||||
''
|
||||
export NIX_STATE_DIR=$(mktemp -d)
|
||||
mkdir $out
|
||||
export GC_INITIAL_HEAP_SIZE=4g
|
||||
command time -v \
|
||||
nix-instantiate --eval --strict --json --show-trace \
|
||||
"$src/pkgs/top-level/release-attrpaths-superset.nix" \
|
||||
-A paths \
|
||||
-I "$src" \
|
||||
--option restrict-eval true \
|
||||
--option allow-import-from-derivation false \
|
||||
--arg enableWarnings false > $out/paths.json
|
||||
mv "$supportedSystemsPath" $out/systems.json
|
||||
'';
|
||||
|
||||
singleSystem =
|
||||
{
|
||||
# The system to evaluate.
|
||||
# Note that this is intentionally not called `system`,
|
||||
# because `--argstr system` would only be passed to the ci/default.nix file!
|
||||
evalSystem,
|
||||
# The path to the `paths.json` file from `attrpathsSuperset`
|
||||
attrpathFile,
|
||||
# The number of attributes per chunk, see ./README.md for more info.
|
||||
chunkSize,
|
||||
checkMeta ? true,
|
||||
includeBroken ? true,
|
||||
# Whether to just evaluate a single chunk for quick testing
|
||||
quickTest ? false,
|
||||
}:
|
||||
let
|
||||
singleChunk = writeShellScript "single-chunk" ''
|
||||
set -euo pipefail
|
||||
chunkSize=$1
|
||||
myChunk=$2
|
||||
system=$3
|
||||
outputDir=$4
|
||||
|
||||
export NIX_SHOW_STATS=1
|
||||
export NIX_SHOW_STATS_PATH="$outputDir/stats/$myChunk"
|
||||
echo "Chunk $myChunk on $system start"
|
||||
set +e
|
||||
command time -f "Chunk $myChunk on $system done [%MKB max resident, %Es elapsed] %C" \
|
||||
nix-env -f "${nixpkgs}/pkgs/top-level/release-attrpaths-parallel.nix" \
|
||||
--option restrict-eval true \
|
||||
--option allow-import-from-derivation false \
|
||||
--query --available \
|
||||
--no-name --attr-path --out-path \
|
||||
--show-trace \
|
||||
--arg chunkSize "$chunkSize" \
|
||||
--arg myChunk "$myChunk" \
|
||||
--arg attrpathFile "${attrpathFile}" \
|
||||
--arg systems "[ \"$system\" ]" \
|
||||
--arg checkMeta ${lib.boolToString checkMeta} \
|
||||
--arg includeBroken ${lib.boolToString includeBroken} \
|
||||
-I ${nixpkgs} \
|
||||
-I ${attrpathFile} \
|
||||
> "$outputDir/result/$myChunk"
|
||||
exitCode=$?
|
||||
set -e
|
||||
if (( exitCode != 0 )); then
|
||||
echo "Evaluation failed with exit code $exitCode"
|
||||
# This immediately halts all xargs processes
|
||||
kill $PPID
|
||||
fi
|
||||
'';
|
||||
in
|
||||
runCommand "nixpkgs-eval-${evalSystem}"
|
||||
{
|
||||
nativeBuildInputs = [
|
||||
nix
|
||||
time
|
||||
procps
|
||||
jq
|
||||
];
|
||||
env = {
|
||||
inherit evalSystem chunkSize;
|
||||
};
|
||||
}
|
||||
''
|
||||
export NIX_STATE_DIR=$(mktemp -d)
|
||||
nix-store --init
|
||||
|
||||
echo "System: $evalSystem"
|
||||
cores=$NIX_BUILD_CORES
|
||||
echo "Cores: $cores"
|
||||
attrCount=$(jq length "${attrpathFile}")
|
||||
echo "Attribute count: $attrCount"
|
||||
echo "Chunk size: $chunkSize"
|
||||
# Same as `attrCount / chunkSize` but rounded up
|
||||
chunkCount=$(( (attrCount - 1) / chunkSize + 1 ))
|
||||
echo "Chunk count: $chunkCount"
|
||||
|
||||
mkdir $out
|
||||
|
||||
# Record and print stats on free memory and swap in the background
|
||||
(
|
||||
while true; do
|
||||
availMemory=$(free -b | grep Mem | awk '{print $7}')
|
||||
freeSwap=$(free -b | grep Swap | awk '{print $4}')
|
||||
echo "Available memory: $(( availMemory / 1024 / 1024 )) MiB, free swap: $(( freeSwap / 1024 / 1024 )) MiB"
|
||||
|
||||
if [[ ! -f "$out/min-avail-memory" ]] || (( availMemory < $(<$out/min-avail-memory) )); then
|
||||
echo "$availMemory" > $out/min-avail-memory
|
||||
fi
|
||||
if [[ ! -f $out/min-free-swap ]] || (( availMemory < $(<$out/min-free-swap) )); then
|
||||
echo "$freeSwap" > $out/min-free-swap
|
||||
fi
|
||||
sleep 4
|
||||
done
|
||||
) &
|
||||
|
||||
seq_end=$(( chunkCount - 1 ))
|
||||
|
||||
${lib.optionalString quickTest ''
|
||||
seq_end=0
|
||||
''}
|
||||
|
||||
chunkOutputDir=$(mktemp -d)
|
||||
mkdir "$chunkOutputDir"/{result,stats}
|
||||
|
||||
seq -w 0 "$seq_end" |
|
||||
command time -f "%e" -o "$out/total-time" \
|
||||
xargs -I{} -P"$cores" \
|
||||
${singleChunk} "$chunkSize" {} "$evalSystem" "$chunkOutputDir"
|
||||
|
||||
if (( chunkSize * chunkCount != attrCount )); then
|
||||
# A final incomplete chunk would mess up the stats, don't include it
|
||||
rm "$chunkOutputDir"/stats/"$seq_end"
|
||||
fi
|
||||
|
||||
# Make sure the glob doesn't break when there's no files
|
||||
shopt -s nullglob
|
||||
cat "$chunkOutputDir"/result/* > $out/paths
|
||||
cat "$chunkOutputDir"/stats/* > $out/stats.jsonstream
|
||||
'';
|
||||
|
||||
combine =
|
||||
{
|
||||
resultsDir,
|
||||
}:
|
||||
runCommand "combined-result"
|
||||
{
|
||||
nativeBuildInputs = [
|
||||
jq
|
||||
sta
|
||||
];
|
||||
}
|
||||
''
|
||||
mkdir -p $out
|
||||
|
||||
# Transform output paths to JSON
|
||||
cat ${resultsDir}/*/paths |
|
||||
jq --sort-keys --raw-input --slurp '
|
||||
split("\n") |
|
||||
map(select(. != "") | split(" ") | map(select(. != ""))) |
|
||||
map(
|
||||
{
|
||||
key: .[0],
|
||||
value: .[1] | split(";") | map(split("=") |
|
||||
if length == 1 then
|
||||
{ key: "out", value: .[0] }
|
||||
else
|
||||
{ key: .[0], value: .[1] }
|
||||
end) | from_entries}
|
||||
) | from_entries
|
||||
' > $out/outpaths.json
|
||||
|
||||
# Computes min, mean, error, etc. for a list of values and outputs a JSON from that
|
||||
statistics() {
|
||||
local stat=$1
|
||||
sta --transpose |
|
||||
jq --raw-input --argjson stat "$stat" -n '
|
||||
[
|
||||
inputs |
|
||||
split("\t") |
|
||||
{ key: .[0], value: (.[1] | fromjson) }
|
||||
] |
|
||||
from_entries |
|
||||
{
|
||||
key: ($stat | join(".")),
|
||||
value: .
|
||||
}'
|
||||
}
|
||||
|
||||
# Gets all available number stats (without .sizes because those are constant and not interesting)
|
||||
readarray -t stats < <(jq -cs '.[0] | del(.sizes) | paths(type == "number")' ${resultsDir}/*/stats.jsonstream)
|
||||
|
||||
# Combines the statistics from all evaluations
|
||||
{
|
||||
echo "{ \"key\": \"minAvailMemory\", \"value\": $(cat ${resultsDir}/*/min-avail-memory | sta --brief --min) }"
|
||||
echo "{ \"key\": \"minFreeSwap\", \"value\": $(cat ${resultsDir}/*/min-free-swap | sta --brief --min) }"
|
||||
cat ${resultsDir}/*/total-time | statistics '["totalTime"]'
|
||||
for stat in "''${stats[@]}"; do
|
||||
cat ${resultsDir}/*/stats.jsonstream |
|
||||
jq --argjson stat "$stat" 'getpath($stat)' |
|
||||
statistics "$stat"
|
||||
done
|
||||
} |
|
||||
jq -s from_entries > $out/stats.json
|
||||
'';
|
||||
|
||||
compare =
|
||||
{ beforeResultDir, afterResultDir }:
|
||||
runCommand "compare"
|
||||
{
|
||||
nativeBuildInputs = [
|
||||
jq
|
||||
];
|
||||
}
|
||||
''
|
||||
mkdir $out
|
||||
jq -n -f ${./compare.jq} \
|
||||
--slurpfile before ${beforeResultDir}/outpaths.json \
|
||||
--slurpfile after ${afterResultDir}/outpaths.json \
|
||||
> $out/changed-paths.json
|
||||
|
||||
jq -r -f ${./generate-step-summary.jq} < $out/changed-paths.json > $out/step-summary.md
|
||||
# TODO: Compare eval stats
|
||||
'';
|
||||
|
||||
full =
|
||||
{
|
||||
# Whether to evaluate just a single system, by default all are evaluated
|
||||
evalSystem ? if quickTest then "x86_64-linux" else null,
|
||||
# The number of attributes per chunk, see ./README.md for more info.
|
||||
chunkSize,
|
||||
quickTest ? false,
|
||||
}:
|
||||
let
|
||||
systems = if evalSystem == null then supportedSystems else [ evalSystem ];
|
||||
results = linkFarm "results" (
|
||||
map (evalSystem: {
|
||||
name = evalSystem;
|
||||
path = singleSystem {
|
||||
inherit quickTest evalSystem chunkSize;
|
||||
attrpathFile = attrpathsSuperset + "/paths.json";
|
||||
};
|
||||
}) systems
|
||||
);
|
||||
in
|
||||
combine {
|
||||
resultsDir = results;
|
||||
};
|
||||
|
||||
in
|
||||
{
|
||||
inherit
|
||||
attrpathsSuperset
|
||||
singleSystem
|
||||
combine
|
||||
compare
|
||||
# The above three are used by separate VMs in a GitHub workflow,
|
||||
# while the below is intended for testing on a single local machine
|
||||
full
|
||||
;
|
||||
}
|
15
third_party/nixpkgs/ci/eval/generate-step-summary.jq
vendored
Normal file
15
third_party/nixpkgs/ci/eval/generate-step-summary.jq
vendored
Normal file
|
@ -0,0 +1,15 @@
|
|||
def truncate(xs; n):
|
||||
if xs | length > n then xs[:n] + ["..."]
|
||||
else xs
|
||||
end;
|
||||
|
||||
def itemize_packages(xs):
|
||||
# we truncate the list to stay below the GitHub limit of 1MB per step summary.
|
||||
truncate(xs; 3000) | map("- [\(.)](https://search.nixos.org/packages?channel=unstable&show=\(.)&from=0&size=50&sort=relevance&type=packages&query=\(.))") | join("\n");
|
||||
|
||||
def section(title; xs):
|
||||
"<details> <summary>" + title + " (" + (xs | length | tostring) + ")</summary>\n\n" + itemize_packages(xs) + "</details>";
|
||||
|
||||
section("Added packages"; .attrdiff.added) + "\n\n" +
|
||||
section("Removed packages"; .attrdiff.removed) + "\n\n" +
|
||||
section("Changed packages"; .attrdiff.changed)
|
4
third_party/nixpkgs/ci/pinned-nixpkgs.json
vendored
4
third_party/nixpkgs/ci/pinned-nixpkgs.json
vendored
|
@ -1,4 +1,4 @@
|
|||
{
|
||||
"rev": "4de4818c1ffa76d57787af936e8a23648bda6be4",
|
||||
"sha256": "0l3b9jr5ydzqgvd10j12imc9jqb6jv5v2bdi1gyy5cwkwplfay67"
|
||||
"rev": "31d66ae40417bb13765b0ad75dd200400e98de84",
|
||||
"sha256": "0fwsqd05bnk635niqnx9vqkdbinjq0ffdrbk66xllfyrnx4fvmpc"
|
||||
}
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
# Trusted development branches:
|
||||
# These generally require PRs to update and are built by Hydra.
|
||||
# Keep this synced with the branches in .github/workflows/eval.yml
|
||||
master
|
||||
staging
|
||||
release-*
|
||||
staging-*
|
||||
haskell-updates
|
||||
python-updates
|
||||
|
|
6
third_party/nixpkgs/ci/supportedSystems.nix
vendored
Normal file
6
third_party/nixpkgs/ci/supportedSystems.nix
vendored
Normal file
|
@ -0,0 +1,6 @@
|
|||
[
|
||||
"aarch64-linux"
|
||||
"aarch64-darwin"
|
||||
"x86_64-linux"
|
||||
"x86_64-darwin"
|
||||
]
|
|
@ -364,7 +364,7 @@ including `nativeBuildInputs` to specify dependencies available to the `script`.
|
|||
```nix
|
||||
testers.runCommand {
|
||||
name = "access-the-internet";
|
||||
command = ''
|
||||
script = ''
|
||||
curl -o /dev/null https://example.com
|
||||
touch $out
|
||||
'';
|
||||
|
|
|
@ -754,10 +754,6 @@ This creates a derivation with a directory structure like the following:
|
|||
...
|
||||
```
|
||||
|
||||
## `writeReferencesToFile` {#trivial-builder-writeReferencesToFile}
|
||||
|
||||
Deprecated. Use [`writeClosure`](#trivial-builder-writeClosure) instead.
|
||||
|
||||
## `writeClosure` {#trivial-builder-writeClosure}
|
||||
|
||||
Given a list of [store paths](https://nixos.org/manual/nix/stable/glossary#gloss-store-path) (or string-like expressions coercible to store paths), write their collective [closure](https://nixos.org/manual/nix/stable/glossary#gloss-closure) to a text file.
|
||||
|
|
10
third_party/nixpkgs/doc/doc-support/package.nix
vendored
10
third_party/nixpkgs/doc/doc-support/package.nix
vendored
|
@ -9,6 +9,8 @@
|
|||
mkShellNoCC,
|
||||
documentation-highlighter,
|
||||
nixos-render-docs,
|
||||
nixos-render-docs-redirects,
|
||||
writeShellScriptBin,
|
||||
nixpkgs ? { },
|
||||
}:
|
||||
|
||||
|
@ -105,8 +107,14 @@ stdenvNoCC.mkDerivation (
|
|||
buildArgs = "./.";
|
||||
open = "/share/doc/nixpkgs/manual.html";
|
||||
};
|
||||
nixos-render-docs-redirects' = writeShellScriptBin "redirects" "${lib.getExe nixos-render-docs-redirects} --file ${toString ../redirects.json} $@";
|
||||
in
|
||||
mkShellNoCC { packages = [ devmode' ]; };
|
||||
mkShellNoCC {
|
||||
packages = [
|
||||
devmode'
|
||||
nixos-render-docs-redirects'
|
||||
];
|
||||
};
|
||||
|
||||
tests.manpage-urls = callPackage ../tests/manpage-urls.nix { };
|
||||
};
|
||||
|
|
22
third_party/nixpkgs/doc/hooks/tauri.section.md
vendored
22
third_party/nixpkgs/doc/hooks/tauri.section.md
vendored
|
@ -14,15 +14,13 @@ In Nixpkgs, `cargo-tauri.hook` overrides the default build and install phases.
|
|||
rustPlatform,
|
||||
fetchNpmDeps,
|
||||
cargo-tauri,
|
||||
darwin,
|
||||
glib-networking,
|
||||
libsoup,
|
||||
nodejs,
|
||||
npmHooks,
|
||||
openssl,
|
||||
pkg-config,
|
||||
webkitgtk_4_0,
|
||||
wrapGAppsHook3,
|
||||
webkitgtk_4_1,
|
||||
wrapGAppsHook4,
|
||||
}:
|
||||
|
||||
rustPlatform.buildRustPackage rec {
|
||||
|
@ -47,25 +45,15 @@ rustPlatform.buildRustPackage rec {
|
|||
|
||||
# Make sure we can find our libraries
|
||||
pkg-config
|
||||
wrapGAppsHook3
|
||||
wrapGAppsHook4
|
||||
];
|
||||
|
||||
buildInputs =
|
||||
[ openssl ]
|
||||
++ lib.optionals stdenv.hostPlatform.isLinux [
|
||||
glib-networking # Most Tauri apps need networking
|
||||
libsoup
|
||||
webkitgtk_4_0
|
||||
]
|
||||
++ lib.optionals stdenv.hostPlatform.isDarwin (
|
||||
with darwin.apple_sdk.frameworks;
|
||||
[
|
||||
AppKit
|
||||
CoreServices
|
||||
Security
|
||||
WebKit
|
||||
]
|
||||
);
|
||||
webkitgtk_4_1
|
||||
];
|
||||
|
||||
# Set our Tauri source directory
|
||||
cargoRoot = "src-tauri";
|
||||
|
|
|
@ -60,13 +60,13 @@ all the other eggs:
|
|||
|
||||
```nix
|
||||
let
|
||||
myChickenPackages = pkgs.chickenPackages.overrideScope' (self: super: {
|
||||
myChickenPackages = pkgs.chickenPackages.overrideScope (self: super: {
|
||||
# The chicken package itself can be overridden to effect the whole ecosystem.
|
||||
# chicken = super.chicken.overrideAttrs {
|
||||
# src = ...
|
||||
# };
|
||||
|
||||
chickenEggs = super.chickenEggs.overrideScope' (eggself: eggsuper: {
|
||||
chickenEggs = super.chickenEggs.overrideScope (eggself: eggsuper: {
|
||||
srfi-180 = eggsuper.srfi-180.overrideAttrs {
|
||||
# path to a local copy of srfi-180
|
||||
src = <...>;
|
||||
|
|
|
@ -7,32 +7,6 @@ The function `buildGoModule` builds Go programs managed with Go modules. It buil
|
|||
- An intermediate fetcher derivation called `goModules`. This derivation will be used to fetch all the dependencies of the Go module.
|
||||
- A final derivation will use the output of the intermediate derivation to build the binaries and produce the final output.
|
||||
|
||||
### Attributes of `buildGoModule` {#buildgomodule-parameters}
|
||||
|
||||
The `buildGoModule` function accepts the following parameters in addition to the [attributes accepted by both Go builders](#ssec-go-common-attributes):
|
||||
|
||||
- `vendorHash`: is the hash of the output of the intermediate fetcher derivation (the dependencies of the Go modules).
|
||||
|
||||
`vendorHash` can be set to `null`.
|
||||
In that case, rather than fetching the dependencies, the dependencies already vendored in the `vendor` directory of the source repo will be used.
|
||||
|
||||
To avoid updating this field when dependencies change, run `go mod vendor` in your source repo and set `vendorHash = null;`.
|
||||
You can read more about [vendoring in the Go documentation](https://go.dev/ref/mod#vendoring).
|
||||
|
||||
To obtain the actual hash, set `vendorHash = lib.fakeHash;` and run the build ([more details here](#sec-source-hashes)).
|
||||
- `proxyVendor`: If `true`, the intermediate fetcher downloads dependencies from the
|
||||
[Go module proxy](https://go.dev/ref/mod#module-proxy) (using `go mod download`) instead of vendoring them. The resulting
|
||||
[module cache](https://go.dev/ref/mod#module-cache) is then passed to the final derivation.
|
||||
|
||||
This is useful if your code depends on C code and `go mod tidy` does not include the needed sources to build or
|
||||
if any dependency has case-insensitive conflicts which will produce platform-dependent `vendorHash` checksums.
|
||||
|
||||
Defaults to `false`.
|
||||
- `modPostBuild`: Shell commands to run after the build of the goModules executes `go mod vendor`, and before calculating fixed output derivation's `vendorHash`.
|
||||
Note that if you change this attribute, you need to update `vendorHash` attribute.
|
||||
- `modRoot`: The root directory of the Go module that contains the `go.mod` file.
|
||||
Defaults to `./`, which is the root of `src`.
|
||||
|
||||
### Example for `buildGoModule` {#ex-buildGoModule}
|
||||
|
||||
The following is an example expression using `buildGoModule`:
|
||||
|
@ -62,17 +36,45 @@ The following is an example expression using `buildGoModule`:
|
|||
}
|
||||
```
|
||||
|
||||
### Obtaining and overriding `vendorHash` for `buildGoModule` {#buildGoModule-vendorHash}
|
||||
## Attributes of `buildGoModule` {#buildgomodule-parameters}
|
||||
|
||||
Many attributes [controlling the build phase](#variables-controlling-the-build-phase) are respected by `buildGoModule`. Note that `buildGoModule` reads the following attributes also when building the `vendor/` goModules fixed output derivation as well:
|
||||
|
||||
- [`sourceRoot`](#var-stdenv-sourceRoot)
|
||||
- [`prePatch`](#var-stdenv-prePatch)
|
||||
- [`patches`](#var-stdenv-patches)
|
||||
- [`patchFlags`](#var-stdenv-patchFlags)
|
||||
- [`postPatch`](#var-stdenv-postPatch)
|
||||
- [`preBuild`](#var-stdenv-preBuild)
|
||||
- `env`: useful for passing down variables such as `GOWORK`.
|
||||
|
||||
To control test execution of the build derivation, the following attributes are of interest:
|
||||
|
||||
- [`checkInputs`](#var-stdenv-checkInputs)
|
||||
- [`preCheck`](#var-stdenv-preCheck)
|
||||
- [`checkFlags`](#var-stdenv-checkFlags)
|
||||
|
||||
In addition to the above attributes, and the many more variables respected also by `stdenv.mkDerivation`, `buildGoModule` respects Go-specific attributes that tweak them to behave slightly differently:
|
||||
|
||||
### `vendorHash` {#var-go-vendorHash}
|
||||
|
||||
Hash of the output of the intermediate fetcher derivation (the dependencies of the Go modules).
|
||||
|
||||
`vendorHash` can be set to `null`.
|
||||
In that case, rather than fetching the dependencies, the dependencies already vendored in the `vendor` directory of the source repo will be used.
|
||||
|
||||
To avoid updating this field when dependencies change, run `go mod vendor` in your source repo and set `vendorHash = null;`.
|
||||
You can read more about [vendoring in the Go documentation](https://go.dev/ref/mod#vendoring).
|
||||
|
||||
To obtain the hash, set `vendorHash = lib.fakeHash;` and run the build. ([more details here](#sec-source-hashes)).
|
||||
Another way is to use use `nix-prefetch` to obtain the hash. The following command gets the value of `vendorHash` for package `pet`:
|
||||
|
||||
We can use `nix-prefetch` to obtain the actual hash. The following command gets the value of `vendorHash` for package `pet`:
|
||||
|
||||
```sh
|
||||
cd path/to/nixpkgs
|
||||
nix-prefetch -E "{ sha256 }: ((import ./. { }).my-package.overrideAttrs { vendorHash = sha256; }).goModules"
|
||||
```
|
||||
|
||||
To obtain the hash without external tools, set `vendorHash = lib.fakeHash;` and run the build. ([more details here](#sec-source-hashes)).
|
||||
|
||||
`vendorHash` can be overridden with `overrideAttrs`. Override the above example like this:
|
||||
|
||||
```nix
|
||||
|
@ -91,144 +93,29 @@ To obtain the hash without external tools, set `vendorHash = lib.fakeHash;` and
|
|||
}
|
||||
```
|
||||
|
||||
### Overriding `goModules` {#buildGoModule-goModules-override}
|
||||
### `proxyVendor` {#var-go-proxyVendor}
|
||||
|
||||
Overriding `<pkg>.goModules` by calling `goModules.overrideAttrs` is unsupported. Still, it is possible to override the `vendorHash` (`goModules`'s `outputHash`) and the `pre`/`post` hooks for both the build and patch phases of the primary and `goModules` derivation. Alternatively, the primary derivation provides an overridable `passthru.overrideModAttrs` function to store the attribute overlay implicitly taken by `goModules.overrideAttrs`. Here's an example usage of `overrideModAttrs`:
|
||||
If `true`, the intermediate fetcher downloads dependencies from the
|
||||
[Go module proxy](https://go.dev/ref/mod#module-proxy) (using `go mod download`) instead of vendoring them. The resulting
|
||||
[module cache](https://go.dev/ref/mod#module-cache) is then passed to the final derivation.
|
||||
|
||||
```nix
|
||||
{
|
||||
pet-overridden = pet.overrideAttrs (
|
||||
finalAttrs: previousAttrs: {
|
||||
passthru = previousAttrs.passthru // {
|
||||
# If the original package has an `overrideModAttrs` attribute set, you'd
|
||||
# want to extend it, and not replace it. Hence we use
|
||||
# `lib.composeExtensions`. If you are sure the `overrideModAttrs` of the
|
||||
# original package trivially does nothing, you can safely replace it
|
||||
# with your own by not using `lib.composeExtensions`.
|
||||
overrideModAttrs = lib.composeExtensions previousAttrs.passthru.overrideModAttrs (
|
||||
finalModAttrs: previousModAttrs: {
|
||||
# goModules-specific overriding goes here
|
||||
postBuild = ''
|
||||
# Here you have access to the `vendor` directory.
|
||||
substituteInPlace vendor/github.com/example/repo/file.go \
|
||||
--replace-fail "panic(err)" ""
|
||||
'';
|
||||
}
|
||||
);
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
```
|
||||
This is useful if your code depends on C code and `go mod tidy` does not include the needed sources to build or
|
||||
if any dependency has case-insensitive conflicts which will produce platform-dependent `vendorHash` checksums.
|
||||
|
||||
## `buildGoPackage` (legacy) {#ssec-go-legacy}
|
||||
Defaults to `false`.
|
||||
|
||||
The function `buildGoPackage` builds legacy Go programs, not supporting Go modules.
|
||||
|
||||
::: {.warning}
|
||||
`buildGoPackage` is deprecated and will be removed for the 25.05 release.
|
||||
:::
|
||||
### `modPostBuild` {#var-go-modPostBuild}
|
||||
|
||||
### Migrating from `buildGoPackage` to `buildGoModule` {#buildGoPackage-migration}
|
||||
Shell commands to run after the build of the goModules executes `go mod vendor`, and before calculating fixed output derivation's `vendorHash`.
|
||||
Note that if you change this attribute, you need to update `vendorHash` attribute.
|
||||
|
||||
Go modules, released 6y ago, are now widely adopted in the ecosystem.
|
||||
Most upstream projects are using Go modules, and the tooling previously used for dependency management in Go is mostly deprecated, archived or at least unmaintained at this point.
|
||||
|
||||
In case a project doesn't have external dependencies or dependencies are vendored in a way understood by `go mod init`, migration can be done with a few changes in the package.
|
||||
### `modRoot` {#var-go-modRoot}
|
||||
|
||||
- Switch the builder from `buildGoPackage` to `buildGoModule`
|
||||
- Remove `goPackagePath` and other attributes specific to `buildGoPackage`
|
||||
- Set `vendorHash = null;`
|
||||
- Run `go mod init <module name>` in `postPatch`
|
||||
The root directory of the Go module that contains the `go.mod` file.
|
||||
|
||||
In case the package has external dependencies that aren't vendored or the build setup is more complex the upstream source might need to be patched.
|
||||
Examples for the migration can be found in the [issue tracking migration withing nixpkgs](https://github.com/NixOS/nixpkgs/issues/318069).
|
||||
|
||||
### Example for `buildGoPackage` {#example-for-buildgopackage}
|
||||
|
||||
In the following is an example expression using `buildGoPackage`, the following arguments are of special significance to the function:
|
||||
|
||||
- `goPackagePath` specifies the package's canonical Go import path.
|
||||
- `goDeps` is where the Go dependencies of a Go program are listed as a list of package source identified by Go import path. It could be imported as a separate `deps.nix` file for readability. The dependency data structure is described below.
|
||||
|
||||
```nix
|
||||
{
|
||||
deis = buildGoPackage rec {
|
||||
pname = "deis";
|
||||
version = "1.13.0";
|
||||
|
||||
goPackagePath = "github.com/deis/deis";
|
||||
|
||||
src = fetchFromGitHub {
|
||||
owner = "deis";
|
||||
repo = "deis";
|
||||
rev = "v${version}";
|
||||
hash = "sha256-XCPD4LNWtAd8uz7zyCLRfT8rzxycIUmTACjU03GnaeM=";
|
||||
};
|
||||
|
||||
goDeps = ./deps.nix;
|
||||
};
|
||||
}
|
||||
```
|
||||
|
||||
The `goDeps` attribute can be imported from a separate `nix` file that defines which Go libraries are needed and should be included in `GOPATH` for `buildPhase`:
|
||||
|
||||
```nix
|
||||
# deps.nix
|
||||
[ # goDeps is a list of Go dependencies.
|
||||
{
|
||||
# goPackagePath specifies Go package import path.
|
||||
goPackagePath = "gopkg.in/yaml.v2";
|
||||
fetch = {
|
||||
# `fetch type` that needs to be used to get package source.
|
||||
# If `git` is used there should be `url`, `rev` and `hash` defined next to it.
|
||||
type = "git";
|
||||
url = "https://gopkg.in/yaml.v2";
|
||||
rev = "a83829b6f1293c91addabc89d0571c246397bbf4";
|
||||
hash = "sha256-EMrdy0M0tNuOcITaTAmT5/dPSKPXwHDKCXFpkGbVjdQ=";
|
||||
};
|
||||
}
|
||||
{
|
||||
goPackagePath = "github.com/docopt/docopt-go";
|
||||
fetch = {
|
||||
type = "git";
|
||||
url = "https://github.com/docopt/docopt-go";
|
||||
rev = "784ddc588536785e7299f7272f39101f7faccc3f";
|
||||
hash = "sha256-Uo89zjE+v3R7zzOq/gbQOHj3SMYt2W1nDHS7RCUin3M=";
|
||||
};
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
To extract dependency information from a Go package in automated way use [go2nix (deprecated)](https://github.com/kamilchm/go2nix). It can produce complete derivation and `goDeps` file for Go programs.
|
||||
|
||||
You may use Go packages installed into the active Nix profiles by adding the following to your ~/.bashrc:
|
||||
|
||||
```bash
|
||||
for p in $NIX_PROFILES; do
|
||||
GOPATH="$p/share/go:$GOPATH"
|
||||
done
|
||||
```
|
||||
|
||||
## Attributes used by both builders {#ssec-go-common-attributes}
|
||||
|
||||
Many attributes [controlling the build phase](#variables-controlling-the-build-phase) are respected by both `buildGoModule` and `buildGoPackage`. Note that `buildGoModule` reads the following attributes also when building the `vendor/` goModules fixed output derivation as well:
|
||||
|
||||
- [`sourceRoot`](#var-stdenv-sourceRoot)
|
||||
- [`prePatch`](#var-stdenv-prePatch)
|
||||
- [`patches`](#var-stdenv-patches)
|
||||
- [`patchFlags`](#var-stdenv-patchFlags)
|
||||
- [`postPatch`](#var-stdenv-postPatch)
|
||||
- [`preBuild`](#var-stdenv-preBuild)
|
||||
- `env`: useful for passing down variables such as `GOWORK`.
|
||||
|
||||
To control test execution of the build derivation, the following attributes are of interest:
|
||||
|
||||
- [`checkInputs`](#var-stdenv-checkInputs)
|
||||
- [`preCheck`](#var-stdenv-preCheck)
|
||||
- [`checkFlags`](#var-stdenv-checkFlags)
|
||||
|
||||
In addition to the above attributes, and the many more variables respected also by `stdenv.mkDerivation`, both `buildGoModule` and `buildGoPackage` respect Go-specific attributes that tweak them to behave slightly differently:
|
||||
Defaults to `./`, which is the root of `src`.
|
||||
|
||||
### `ldflags` {#var-go-ldflags}
|
||||
|
||||
|
@ -326,6 +213,38 @@ Whether the build result should be allowed to contain references to the Go tool
|
|||
|
||||
Defaults to `false`
|
||||
|
||||
## Overriding `goModules` {#buildGoModule-goModules-override}
|
||||
|
||||
Overriding `<pkg>.goModules` by calling `goModules.overrideAttrs` is unsupported. Still, it is possible to override the `vendorHash` (`goModules`'s `outputHash`) and the `pre`/`post` hooks for both the build and patch phases of the primary and `goModules` derivation.
|
||||
|
||||
Alternatively, the primary derivation provides an overridable `passthru.overrideModAttrs` function to store the attribute overlay implicitly taken by `goModules.overrideAttrs`. Here's an example usage of `overrideModAttrs`:
|
||||
|
||||
```nix
|
||||
{
|
||||
pet-overridden = pet.overrideAttrs (
|
||||
finalAttrs: previousAttrs: {
|
||||
passthru = previousAttrs.passthru // {
|
||||
# If the original package has an `overrideModAttrs` attribute set, you'd
|
||||
# want to extend it, and not replace it. Hence we use
|
||||
# `lib.composeExtensions`. If you are sure the `overrideModAttrs` of the
|
||||
# original package trivially does nothing, you can safely replace it
|
||||
# with your own by not using `lib.composeExtensions`.
|
||||
overrideModAttrs = lib.composeExtensions previousAttrs.passthru.overrideModAttrs (
|
||||
finalModAttrs: previousModAttrs: {
|
||||
# goModules-specific overriding goes here
|
||||
postBuild = ''
|
||||
# Here you have access to the `vendor` directory.
|
||||
substituteInPlace vendor/github.com/example/repo/file.go \
|
||||
--replace-fail "panic(err)" ""
|
||||
'';
|
||||
}
|
||||
);
|
||||
};
|
||||
}
|
||||
);
|
||||
}
|
||||
```
|
||||
|
||||
## Controlling the Go environment {#ssec-go-environment}
|
||||
|
||||
The Go build can be further tweaked by setting environment variables. In most cases, this isn't needed. Possible values can be found in the [Go documentation of accepted environment variables](https://pkg.go.dev/cmd/go#hdr-Environment_variables). Notice that some of these flags are set by the builder itself and should not be set explicitly. If in doubt, grep the implementation of the builder.
|
||||
|
@ -364,4 +283,23 @@ If a larger amount of tests should be skipped, the following pattern can be used
|
|||
```
|
||||
|
||||
To disable tests altogether, set `doCheck = false;`.
|
||||
`buildGoPackage` does not execute tests by default.
|
||||
|
||||
## Migrating from `buildGoPackage` to `buildGoModule` {#buildGoPackage-migration}
|
||||
|
||||
::: {.warning}
|
||||
`buildGoPackage` was removed for the 25.05 release. It was used to build legacy Go programs
|
||||
that do not support Go modules.
|
||||
:::
|
||||
|
||||
Go modules, released 6y ago, are now widely adopted in the ecosystem.
|
||||
Most upstream projects are using Go modules, and the tooling previously used for dependency management in Go is mostly deprecated, archived or at least unmaintained at this point.
|
||||
|
||||
In case a project doesn't have external dependencies or dependencies are vendored in a way understood by `go mod init`, migration can be done with a few changes in the package.
|
||||
|
||||
- Switch the builder from `buildGoPackage` to `buildGoModule`
|
||||
- Remove `goPackagePath` and other attributes specific to `buildGoPackage`
|
||||
- Set `vendorHash = null;`
|
||||
- Run `go mod init <module name>` in `postPatch`
|
||||
|
||||
In case the package has external dependencies that aren't vendored or the build setup is more complex the upstream source might need to be patched.
|
||||
Examples for the migration can be found in the [issue tracking migration withing nixpkgs](https://github.com/NixOS/nixpkgs/issues/318069).
|
||||
|
|
|
@ -67,3 +67,14 @@ nix-shell -p 'julia.withPackages ["Plots"]' --run julia
|
|||
|
||||
This normally points at a special augmented version of the Julia [General packages registry](https://github.com/JuliaRegistries/General).
|
||||
If you want to use a bleeding-edge version to pick up the latest package updates, you can plug in a later revision than the one in Nixpkgs.
|
||||
|
||||
* `juliaCpuTarget`: Allows you to set `JULIA_CPU_TARGET` when precompiling. Has no effect if `precompile=false`.
|
||||
|
||||
You may want to use this if you're building a Julia depot that will end up in a Nix cache and used on machines with
|
||||
different CPUs.
|
||||
|
||||
Why? Julia will detect the CPU microarchitecture of the build machine and include this information in the precompiled
|
||||
`*.ji` files. Starting in 1.10 Julia became more strict about checking the CPU target compatibility, so it may reject
|
||||
your precompiled files if they were compiled on a different machine.
|
||||
A good option to provide wide compatibility is to set this to `"generic"`, although this may reduce performance.
|
||||
You can also set a semicolon-separated list of multiple different targets. See the Julia documentation for details.
|
||||
|
|
|
@ -49,6 +49,90 @@ This package calls `maven.buildMavenPackage` to do its work. The primary differe
|
|||
After setting `maven.buildMavenPackage`, we then do standard Java `.jar` installation by saving the `.jar` to `$out/share/java` and then making a wrapper which allows executing that file; see [](#sec-language-java) for additional generic information about packaging Java applications.
|
||||
:::
|
||||
|
||||
### Overriding Maven package attributes {#maven-overriding-package-attributes}
|
||||
|
||||
```
|
||||
overrideMavenAttrs :: (AttrSet -> Derivation) | ((AttrSet -> Attrset) -> Derivation) -> Derivation
|
||||
```
|
||||
|
||||
The output of `buildMavenPackage` has an `overrideMavenAttrs` attribute, which is a function that takes either
|
||||
- any subset of the attributes that can be passed to `buildMavenPackage`
|
||||
|
||||
or
|
||||
- a function that takes the argument passed to the previous invocation of `buildMavenPackage` (conventionally called `old`) and returns an attribute set that can be passed to `buildMavenPackage`
|
||||
|
||||
and returns a derivation that builds a Maven package based on the old and new arguments merged.
|
||||
|
||||
This is similar to [](#sec-pkg-overrideAttrs), but notably does not allow accessing the final value of the argument to `buildMavenPackage`.
|
||||
|
||||
:::{.example}
|
||||
### `overrideMavenAttrs` Example
|
||||
|
||||
Use `overrideMavenAttrs` to build `jd-cli` version 1.2.0 and disable some flaky test:
|
||||
|
||||
```nix
|
||||
jd-cli.overrideMavenAttrs (old: rec {
|
||||
version = "1.2.0";
|
||||
src = fetchFromGitHub {
|
||||
owner = old.src.owner;
|
||||
repo = old.src.repo;
|
||||
rev = "${old.pname}-${version}";
|
||||
# old source hash of 1.2.0 version
|
||||
hash = "sha256-US7j6tQ6mh1libeHnQdFxPGoxHzbZHqehWSgCYynKx8=";
|
||||
};
|
||||
|
||||
# tests can be disabled by prefixing it with `!`
|
||||
# see Maven documentation for more details:
|
||||
# https://maven.apache.org/surefire/maven-surefire-plugin/examples/single-test.html#Multiple_Formats_in_One
|
||||
mvnParameters = lib.escapeShellArgs [
|
||||
"-Dsurefire.failIfNoSpecifiedTests=false"
|
||||
"-Dtest=!JavaDecompilerTest#basicTest,!JavaDecompilerTest#patternMatchingTest"
|
||||
];
|
||||
|
||||
# old mvnHash of 1.2.0 maven dependencies
|
||||
mvnHash = "sha256-N9XC1pg6Y4sUiBWIQUf16QSXCuiAPpXEHGlgApviF4I=";
|
||||
});
|
||||
```
|
||||
:::
|
||||
|
||||
### Offline build {#maven-offline-build}
|
||||
|
||||
By default, `buildMavenPackage` does the following:
|
||||
|
||||
1. Run `mvn package -Dmaven.repo.local=$out/.m2 ${mvnParameters}` in the
|
||||
`fetchedMavenDeps` [fixed-output derivation](https://nixos.org/manual/nix/stable/glossary.html#gloss-fixed-output-derivation).
|
||||
2. Run `mvn package -o -nsu "-Dmaven.repo.local=$mvnDeps/.m2"
|
||||
${mvnParameters}` again in the main derivation.
|
||||
|
||||
As a result, tests are run twice.
|
||||
This also means that a failing test will trigger a new attempt to realise the fixed-output derivation, which in turn downloads all dependencies again.
|
||||
For bigger Maven projects, this might lead to a long feedback cycle.
|
||||
|
||||
Use `buildOffline = true` to change the behaviour of `buildMavenPackage to the following:
|
||||
1. Run `mvn de.qaware.maven:go-offline-maven-plugin:1.2.8:resolve-dependencies
|
||||
-Dmaven.repo.local=$out/.m2 ${mvnDepsParameters}` in the fixed-output derivation.
|
||||
2. Run `mvn package -o -nsu "-Dmaven.repo.local=$mvnDeps/.m2"
|
||||
${mvnParameters}` in the main derivation.
|
||||
|
||||
As a result, all dependencies are downloaded in step 1 and the tests are executed in step 2.
|
||||
A failing test only triggers a rebuild of step 2 as it can reuse the dependencies of step 1 because they have not changed.
|
||||
|
||||
::: {.warning}
|
||||
Test dependencies are not downloaded in step 1 and are therefore missing in
|
||||
step 2 which will most probably fail the build. The `go-offline` plugin cannot
|
||||
handle these so-called [dynamic dependencies](https://github.com/qaware/go-offline-maven-plugin?tab=readme-ov-file#dynamic-dependencies).
|
||||
In that case you must add these dynamic dependencies manually with:
|
||||
```nix
|
||||
maven.buildMavenPackage rec {
|
||||
manualMvnArtifacts = [
|
||||
# add dynamic test dependencies here
|
||||
"org.apache.maven.surefire:surefire-junit-platform:3.1.2"
|
||||
"org.junit.platform:junit-platform-launcher:1.10.0"
|
||||
];
|
||||
};
|
||||
```
|
||||
:::
|
||||
|
||||
### Stable Maven plugins {#stable-maven-plugins}
|
||||
|
||||
Maven defines default versions for its core plugins, e.g. `maven-compiler-plugin`. If your project does not override these versions, an upgrade of Maven will change the version of the used plugins, and therefore the derivation and hash.
|
||||
|
|
|
@ -264,10 +264,15 @@ nix-shell -p vimPluginsUpdater --run 'vim-plugins-updater --github-token=mytoken
|
|||
Alternatively, set the number of processes to a lower count to avoid rate-limiting.
|
||||
|
||||
```sh
|
||||
|
||||
nix-shell -p vimPluginsUpdater --run 'vim-plugins-updater --proc 1'
|
||||
```
|
||||
|
||||
If you want to update only certain plugins, you can specify them after the `update` command. Note that you must use the same plugin names as the `pkgs/applications/editors/vim/plugins/vim-plugin-names` file.
|
||||
|
||||
```sh
|
||||
nix-shell -p vimPluginsUpdater --run 'vim-plugins-updater update "nvim-treesitter" "LazyVim"'
|
||||
```
|
||||
|
||||
## How to maintain an out-of-tree overlay of vim plugins ? {#vim-out-of-tree-overlays}
|
||||
|
||||
You can use the updater script to generate basic packages out of a custom vim
|
||||
|
|
42
third_party/nixpkgs/doc/redirects.json
vendored
42
third_party/nixpkgs/doc/redirects.json
vendored
|
@ -1421,11 +1421,9 @@
|
|||
"trivial-builder-symlinkJoin": [
|
||||
"index.html#trivial-builder-symlinkJoin"
|
||||
],
|
||||
"trivial-builder-writeReferencesToFile": [
|
||||
"index.html#trivial-builder-writeReferencesToFile"
|
||||
],
|
||||
"trivial-builder-writeClosure": [
|
||||
"index.html#trivial-builder-writeClosure"
|
||||
"index.html#trivial-builder-writeClosure",
|
||||
"index.html#trivial-builder-writeReferencesToFile"
|
||||
],
|
||||
"trivial-builder-writeDirectReferencesToFile": [
|
||||
"index.html#trivial-builder-writeDirectReferencesToFile"
|
||||
|
@ -2721,29 +2719,20 @@
|
|||
"index.html#ssec-language-go"
|
||||
],
|
||||
"buildgomodule-parameters": [
|
||||
"index.html#buildgomodule-parameters"
|
||||
"index.html#buildgomodule-parameters",
|
||||
"index.html#ssec-go-common-attributes"
|
||||
],
|
||||
"ex-buildGoModule": [
|
||||
"index.html#ex-buildGoModule"
|
||||
],
|
||||
"buildGoModule-vendorHash": [
|
||||
"index.html#buildGoModule-vendorHash"
|
||||
],
|
||||
"buildGoModule-goModules-override": [
|
||||
"index.html#buildGoModule-goModules-override"
|
||||
],
|
||||
"ssec-go-legacy": [
|
||||
"index.html#ssec-go-legacy"
|
||||
],
|
||||
"buildGoPackage-migration": [
|
||||
"index.html#buildGoPackage-migration"
|
||||
],
|
||||
"example-for-buildgopackage": [
|
||||
"index.html#buildGoPackage-migration",
|
||||
"index.html#ssec-go-legacy",
|
||||
"index.html#example-for-buildgopackage"
|
||||
],
|
||||
"ssec-go-common-attributes": [
|
||||
"index.html#ssec-go-common-attributes"
|
||||
],
|
||||
"var-go-ldflags": [
|
||||
"index.html#var-go-ldflags"
|
||||
],
|
||||
|
@ -2759,6 +2748,9 @@
|
|||
"var-go-excludedPackages": [
|
||||
"index.html#var-go-excludedPackages"
|
||||
],
|
||||
"var-go-proxyVendor": [
|
||||
"index.html#var-go-proxyVendor"
|
||||
],
|
||||
"var-go-CGO_ENABLED": [
|
||||
"index.html#var-go-CGO_ENABLED"
|
||||
],
|
||||
|
@ -2768,6 +2760,16 @@
|
|||
"var-go-allowGoReference": [
|
||||
"index.html#var-go-allowGoReference"
|
||||
],
|
||||
"var-go-vendorHash": [
|
||||
"index.html#var-go-vendorHash",
|
||||
"index.html#buildGoModule-vendorHash"
|
||||
],
|
||||
"var-go-modPostBuild": [
|
||||
"index.html#var-go-modPostBuild"
|
||||
],
|
||||
"var-go-modRoot": [
|
||||
"index.html#var-go-modRoot"
|
||||
],
|
||||
"ssec-go-environment": [
|
||||
"index.html#ssec-go-environment"
|
||||
],
|
||||
|
@ -3188,6 +3190,12 @@
|
|||
"maven-buildmavenpackage": [
|
||||
"index.html#maven-buildmavenpackage"
|
||||
],
|
||||
"maven-overriding-package-attributes": [
|
||||
"index.html#maven-overriding-package-attributes"
|
||||
],
|
||||
"maven-offline-build": [
|
||||
"index.html#maven-offline-build"
|
||||
],
|
||||
"stable-maven-plugins": [
|
||||
"index.html#stable-maven-plugins"
|
||||
],
|
||||
|
|
|
@ -165,7 +165,7 @@ These paths will need to be replaced with relative paths and the xcbuild package
|
|||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
postPatch = ''
|
||||
subsituteInPlace Makefile \
|
||||
substituteInPlace Makefile \
|
||||
--replace-fail '/usr/bin/xcodebuild' 'xcodebuild' \
|
||||
--replace-fail '/usr/bin/xcrun' 'xcrun' \
|
||||
--replace-fail '/usr/bin/PListBuddy' 'PListBuddy'
|
||||
|
|
|
@ -154,11 +154,13 @@ There are several ways to tweak how Nix handles a package which has been marked
|
|||
|
||||
The `allowInsecurePredicate` option is a function which accepts a package and returns a boolean, much like `allowUnfreePredicate`.
|
||||
|
||||
The following configuration example only allows insecure packages with very short names:
|
||||
The following configuration example allows any version of the `ovftool` package:
|
||||
|
||||
```nix
|
||||
{
|
||||
allowInsecurePredicate = pkg: builtins.stringLength (lib.getName pkg) <= 5;
|
||||
allowInsecurePredicate = pkg: builtins.elem (lib.getName pkg) [
|
||||
"ovftool"
|
||||
];
|
||||
}
|
||||
```
|
||||
|
||||
|
|
4
third_party/nixpkgs/lib/customisation.nix
vendored
4
third_party/nixpkgs/lib/customisation.nix
vendored
|
@ -543,10 +543,6 @@ rec {
|
|||
newScope = scope: newScope (self // scope);
|
||||
callPackage = self.newScope {};
|
||||
overrideScope = g: makeScope newScope (extends g f);
|
||||
# Remove after 24.11 is released.
|
||||
overrideScope' = g: warnIf (isInOldestRelease 2311)
|
||||
"`overrideScope'` (from `lib.makeScope`) has been renamed to `overrideScope`."
|
||||
(makeScope newScope (extends g f));
|
||||
packages = f;
|
||||
};
|
||||
in self;
|
||||
|
|
4
third_party/nixpkgs/lib/default.nix
vendored
4
third_party/nixpkgs/lib/default.nix
vendored
|
@ -121,7 +121,7 @@ let
|
|||
inherit (self.customisation) overrideDerivation makeOverridable
|
||||
callPackageWith callPackagesWith extendDerivation hydraJob
|
||||
makeScope makeScopeWithSplicing makeScopeWithSplicing';
|
||||
inherit (self.derivations) lazyDerivation optionalDrvAttr;
|
||||
inherit (self.derivations) lazyDerivation optionalDrvAttr warnOnInstantiate;
|
||||
inherit (self.meta) addMetaAttrs dontDistribute setName updateName
|
||||
appendToName mapDerivationAttrset setPrio lowPrio lowPrioSet hiPrio
|
||||
hiPrioSet licensesSpdx getLicenseFromSpdxId getLicenseFromSpdxIdOr
|
||||
|
@ -152,7 +152,7 @@ let
|
|||
scrubOptionValue literalExpression literalExample
|
||||
showOption showOptionWithDefLocs showFiles
|
||||
unknownModule mkOption mkPackageOption mkPackageOptionMD
|
||||
mdDoc literalMD;
|
||||
literalMD;
|
||||
inherit (self.types) isType setType defaultTypeMerge defaultFunctor
|
||||
isOptionType mkOptionType;
|
||||
inherit (self.asserts)
|
||||
|
|
36
third_party/nixpkgs/lib/derivations.nix
vendored
36
third_party/nixpkgs/lib/derivations.nix
vendored
|
@ -4,6 +4,8 @@ let
|
|||
inherit (lib)
|
||||
genAttrs
|
||||
isString
|
||||
mapAttrs
|
||||
removeAttrs
|
||||
throwIfNot
|
||||
;
|
||||
|
||||
|
@ -206,4 +208,38 @@ in
|
|||
optionalDrvAttr =
|
||||
cond:
|
||||
value: if cond then value else null;
|
||||
|
||||
/**
|
||||
Wrap a derivation such that instantiating it produces a warning.
|
||||
|
||||
All attributes apart from `meta`, `name`, and `type` (which are used by
|
||||
`nix search`) will be wrapped in `lib.warn`.
|
||||
|
||||
# Inputs
|
||||
|
||||
`msg`
|
||||
: The warning message to emit (via `lib.warn`).
|
||||
|
||||
`drv`
|
||||
: The derivation to wrap.
|
||||
|
||||
# Examples
|
||||
:::{.example}
|
||||
## `lib.derivations.warnOnInstantiate` usage example
|
||||
|
||||
```nix
|
||||
{
|
||||
myPackage = warnOnInstantiate "myPackage has been renamed to my-package" my-package;
|
||||
}
|
||||
```
|
||||
|
||||
:::
|
||||
*/
|
||||
warnOnInstantiate =
|
||||
msg: drv:
|
||||
let
|
||||
drvToWrap = removeAttrs drv [ "meta" "name" "type" ];
|
||||
in
|
||||
drv
|
||||
// mapAttrs (_: lib.warn msg) drvToWrap;
|
||||
}
|
||||
|
|
71
third_party/nixpkgs/lib/filesystem.nix
vendored
71
third_party/nixpkgs/lib/filesystem.nix
vendored
|
@ -306,35 +306,29 @@ in
|
|||
As a result, directories with no `.nix` files (including empty
|
||||
directories) will be transformed into empty attribute sets.
|
||||
|
||||
# Inputs
|
||||
|
||||
Structured function argument
|
||||
|
||||
: Attribute set containing the following attributes.
|
||||
Additional attributes are ignored.
|
||||
|
||||
`callPackage`
|
||||
|
||||
: `pkgs.callPackage`
|
||||
|
||||
Type: `Path -> AttrSet -> a`
|
||||
|
||||
`directory`
|
||||
|
||||
: The directory to read package files from
|
||||
|
||||
Type: `Path`
|
||||
|
||||
|
||||
# Type
|
||||
|
||||
```
|
||||
packagesFromDirectoryRecursive :: AttrSet -> AttrSet
|
||||
packagesFromDirectoryRecursive :: {
|
||||
callPackage :: Path -> {} -> a,
|
||||
directory :: Path,
|
||||
...
|
||||
} -> AttrSet
|
||||
```
|
||||
|
||||
# Inputs
|
||||
|
||||
`callPackage`
|
||||
: The function used to convert a Nix file's path into a leaf of the attribute set.
|
||||
It is typically the `callPackage` function, taken from either `pkgs` or a new scope corresponding to the `directory`.
|
||||
|
||||
`directory`
|
||||
: The directory to read package files from.
|
||||
|
||||
|
||||
# Examples
|
||||
:::{.example}
|
||||
## `lib.filesystem.packagesFromDirectoryRecursive` usage example
|
||||
## Basic use of `lib.packagesFromDirectoryRecursive`
|
||||
|
||||
```nix
|
||||
packagesFromDirectoryRecursive {
|
||||
|
@ -342,17 +336,48 @@ in
|
|||
directory = ./my-packages;
|
||||
}
|
||||
=> { ... }
|
||||
```
|
||||
|
||||
In this case, `callPackage` will only search `pkgs` for a file's input parameters.
|
||||
In other words, a file cannot refer to another file in the directory in its input parameters.
|
||||
:::
|
||||
|
||||
::::{.example}
|
||||
## Create a scope for the nix files found in a directory
|
||||
```nix
|
||||
lib.makeScope pkgs.newScope (
|
||||
self: packagesFromDirectoryRecursive {
|
||||
callPackage = self.callPackage;
|
||||
inherit (self) callPackage;
|
||||
directory = ./my-packages;
|
||||
}
|
||||
)
|
||||
=> { ... }
|
||||
```
|
||||
|
||||
For example, take the following directory structure:
|
||||
```
|
||||
my-packages
|
||||
├── a.nix → { b }: assert b ? b1; ...
|
||||
└── b
|
||||
├── b1.nix → { a }: ...
|
||||
└── b2.nix
|
||||
```
|
||||
|
||||
Here, `b1.nix` can specify `{ a }` as a parameter, which `callPackage` will resolve as expected.
|
||||
Likewise, `a.nix` receive an attrset corresponding to the contents of the `b` directory.
|
||||
|
||||
:::{.note}
|
||||
`a.nix` cannot directly take as inputs packages defined in a child directory, such as `b1`.
|
||||
:::
|
||||
|
||||
:::{.warning}
|
||||
As of now, `lib.packagesFromDirectoryRecursive` cannot create nested scopes for sub-directories.
|
||||
|
||||
In particular, files under `b/` can only require (as inputs) other files under `my-packages`,
|
||||
but not to those in the same directory, nor those in a parent directory; e.g, `b2.nix` cannot directly
|
||||
require `b1`.
|
||||
:::
|
||||
::::
|
||||
*/
|
||||
packagesFromDirectoryRecursive =
|
||||
{
|
||||
|
|
2
third_party/nixpkgs/lib/licenses.nix
vendored
2
third_party/nixpkgs/lib/licenses.nix
vendored
|
@ -792,7 +792,7 @@ lib.mapAttrs mkLicense ({
|
|||
|
||||
lens = {
|
||||
fullName = "Lens Terms of Service Agreement";
|
||||
url = "https://k8slens.dev/licenses/tos";
|
||||
url = "https://k8slens.dev/legal/tos";
|
||||
free = false;
|
||||
};
|
||||
|
||||
|
|
40
third_party/nixpkgs/lib/modules.nix
vendored
40
third_party/nixpkgs/lib/modules.nix
vendored
|
@ -751,17 +751,47 @@ let
|
|||
t' = opt.options.type;
|
||||
mergedType = t.typeMerge t'.functor;
|
||||
typesMergeable = mergedType != null;
|
||||
typeSet = if (bothHave "type") && typesMergeable
|
||||
then { type = mergedType; }
|
||||
else {};
|
||||
|
||||
# TODO: Remove this when all downstream reliances of internals: 'functor.wrapped' are sufficiently migrated.
|
||||
# A function that adds the deprecated wrapped message to a type.
|
||||
addDeprecatedWrapped = t:
|
||||
t // {
|
||||
functor = t.functor // {
|
||||
wrapped = t.functor.wrappedDeprecationMessage {
|
||||
inherit loc;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
typeSet =
|
||||
if opt.options ? type then
|
||||
if res ? type then
|
||||
if typesMergeable then
|
||||
{
|
||||
type =
|
||||
if mergedType ? functor.wrappedDeprecationMessage then
|
||||
addDeprecatedWrapped mergedType
|
||||
else
|
||||
mergedType;
|
||||
}
|
||||
else
|
||||
# Keep in sync with the same error below!
|
||||
throw "The option `${showOption loc}' in `${opt._file}' is already declared in ${showFiles res.declarations}."
|
||||
else if opt.options.type ? functor.wrappedDeprecationMessage then
|
||||
{ type = addDeprecatedWrapped opt.options.type; }
|
||||
else
|
||||
{}
|
||||
else
|
||||
{};
|
||||
|
||||
bothHave = k: opt.options ? ${k} && res ? ${k};
|
||||
in
|
||||
if bothHave "default" ||
|
||||
bothHave "example" ||
|
||||
bothHave "description" ||
|
||||
bothHave "apply" ||
|
||||
(bothHave "type" && (! typesMergeable))
|
||||
bothHave "apply"
|
||||
then
|
||||
# Keep in sync with the same error above!
|
||||
throw "The option `${showOption loc}' in `${opt._file}' is already declared in ${showFiles res.declarations}."
|
||||
else
|
||||
let
|
||||
|
|
7
third_party/nixpkgs/lib/options.nix
vendored
7
third_party/nixpkgs/lib/options.nix
vendored
|
@ -399,13 +399,6 @@ rec {
|
|||
|
||||
literalExample = lib.warn "lib.literalExample is deprecated, use lib.literalExpression instead, or use lib.literalMD for a non-Nix description." literalExpression;
|
||||
|
||||
/* Transition marker for documentation that's already migrated to markdown
|
||||
syntax. Has been a no-op for some while and been removed from nixpkgs.
|
||||
Kept here to alert downstream users who may not be aware of the migration's
|
||||
completion that it should be removed from modules.
|
||||
*/
|
||||
mdDoc = lib.warn "lib.mdDoc will be removed from nixpkgs in 24.11. Option descriptions are now in Markdown by default; you can remove any remaining uses of lib.mdDoc.";
|
||||
|
||||
/* For use in the `defaultText` and `example` option attributes. Causes the
|
||||
given MD text to be inserted verbatim in the documentation, for when
|
||||
a `literalExpression` would be too hard to read.
|
||||
|
|
|
@ -11,12 +11,13 @@ rec {
|
|||
x86-64-v3 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "avx" "avx2" "fma" ];
|
||||
x86-64-v4 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "avx" "avx2" "avx512" "fma" ];
|
||||
# x86_64 Intel
|
||||
nehalem = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" ];
|
||||
westmere = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" ];
|
||||
sandybridge = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" ];
|
||||
ivybridge = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" ];
|
||||
haswell = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "fma" ];
|
||||
broadwell = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "fma" ];
|
||||
nehalem = [ "sse3" "ssse3" "sse4_1" "sse4_2" ];
|
||||
westmere = [ "sse3" "ssse3" "sse4_1" "sse4_2" ];
|
||||
silvermont = [ "sse3" "ssse3" "sse4_1" "sse4_2" ];
|
||||
sandybridge = [ "sse3" "ssse3" "sse4_1" "sse4_2" "avx" ];
|
||||
ivybridge = [ "sse3" "ssse3" "sse4_1" "sse4_2" "avx" ];
|
||||
haswell = [ "sse3" "ssse3" "sse4_1" "sse4_2" "avx" "avx2" "fma" ];
|
||||
broadwell = [ "sse3" "ssse3" "sse4_1" "sse4_2" "avx" "avx2" "fma" ];
|
||||
skylake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "fma" ];
|
||||
skylake-avx512 = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
|
||||
cannonlake = [ "sse3" "ssse3" "sse4_1" "sse4_2" "aes" "avx" "avx2" "avx512" "fma" ];
|
||||
|
|
23
third_party/nixpkgs/lib/systems/default.nix
vendored
23
third_party/nixpkgs/lib/systems/default.nix
vendored
|
@ -6,9 +6,9 @@ let
|
|||
filterAttrs
|
||||
foldl
|
||||
hasInfix
|
||||
isAttrs
|
||||
isFunction
|
||||
isList
|
||||
isString
|
||||
mapAttrs
|
||||
optional
|
||||
optionalAttrs
|
||||
|
@ -55,24 +55,34 @@ let
|
|||
*/
|
||||
flakeExposed = import ./flake-systems.nix { };
|
||||
|
||||
# Turn localSystem or crossSystem, which could be system-string or attrset, into
|
||||
# attrset.
|
||||
systemToAttrs = systemOrArgs:
|
||||
if isAttrs systemOrArgs then systemOrArgs else { system = systemOrArgs; };
|
||||
|
||||
# Elaborate a `localSystem` or `crossSystem` so that it contains everything
|
||||
# necessary.
|
||||
#
|
||||
# `parsed` is inferred from args, both because there are two options with one
|
||||
# clearly preferred, and to prevent cycles. A simpler fixed point where the RHS
|
||||
# always just used `final.*` would fail on both counts.
|
||||
elaborate = args': let
|
||||
args = if isString args' then { system = args'; }
|
||||
else args';
|
||||
elaborate = systemOrArgs: let
|
||||
allArgs = systemToAttrs systemOrArgs;
|
||||
|
||||
# Those two will always be derived from "config", if given, so they should NOT
|
||||
# be overridden further down with "// args".
|
||||
args = builtins.removeAttrs allArgs [ "parsed" "system" ];
|
||||
|
||||
# TODO: deprecate args.rustc in favour of args.rust after 23.05 is EOL.
|
||||
rust = args.rust or args.rustc or {};
|
||||
|
||||
final = {
|
||||
# Prefer to parse `config` as it is strictly more informative.
|
||||
parsed = parse.mkSystemFromString (if args ? config then args.config else args.system);
|
||||
# Either of these can be losslessly-extracted from `parsed` iff parsing succeeds.
|
||||
parsed = parse.mkSystemFromString (args.config or allArgs.system);
|
||||
# This can be losslessly-extracted from `parsed` iff parsing succeeds.
|
||||
system = parse.doubleFromSystem final.parsed;
|
||||
# TODO: This currently can't be losslessly-extracted from `parsed`, for example
|
||||
# because of -mingw32.
|
||||
config = parse.tripleFromSystem final.parsed;
|
||||
# Determine whether we can execute binaries built for the provided platform.
|
||||
canExecute = platform:
|
||||
|
@ -435,5 +445,6 @@ in
|
|||
inspect
|
||||
parse
|
||||
platforms
|
||||
systemToAttrs
|
||||
;
|
||||
}
|
||||
|
|
2
third_party/nixpkgs/lib/systems/doubles.nix
vendored
2
third_party/nixpkgs/lib/systems/doubles.nix
vendored
|
@ -13,7 +13,7 @@ let
|
|||
"x86_64-darwin" "i686-darwin" "aarch64-darwin" "armv7a-darwin"
|
||||
|
||||
# FreeBSD
|
||||
"i686-freebsd" "x86_64-freebsd"
|
||||
"i686-freebsd" "x86_64-freebsd" "aarch64-freebsd"
|
||||
|
||||
# Genode
|
||||
"aarch64-genode" "i686-genode" "x86_64-genode"
|
||||
|
|
6
third_party/nixpkgs/lib/systems/examples.nix
vendored
6
third_party/nixpkgs/lib/systems/examples.nix
vendored
|
@ -329,11 +329,17 @@ rec {
|
|||
ucrtAarch64 = {
|
||||
config = "aarch64-w64-mingw32";
|
||||
libc = "ucrt";
|
||||
rust.rustcTarget = "aarch64-pc-windows-gnullvm";
|
||||
useLLVM = true;
|
||||
};
|
||||
|
||||
# BSDs
|
||||
|
||||
aarch64-freebsd = {
|
||||
config = "aarch64-unknown-freebsd";
|
||||
useLLVM = true;
|
||||
};
|
||||
|
||||
x86_64-freebsd = {
|
||||
config = "x86_64-unknown-freebsd";
|
||||
useLLVM = true;
|
||||
|
|
9
third_party/nixpkgs/lib/tests/modules.sh
vendored
9
third_party/nixpkgs/lib/tests/modules.sh
vendored
|
@ -386,6 +386,10 @@ checkConfigOutput '^true$' config.conditionalWorks ./declare-attrsOf.nix ./attrs
|
|||
checkConfigOutput '^false$' config.conditionalWorks ./declare-lazyAttrsOf.nix ./attrsOf-conditional-check.nix
|
||||
checkConfigOutput '^"empty"$' config.value.foo ./declare-lazyAttrsOf.nix ./attrsOf-conditional-check.nix
|
||||
|
||||
# Check attrsWith type merging
|
||||
checkConfigError 'The option `mergedLazyNonLazy'\'' in `.*'\'' is already declared in `.*'\''\.' options.mergedLazyNonLazy ./lazy-attrsWith.nix
|
||||
checkConfigOutput '^11$' config.lazyResult ./lazy-attrsWith.nix
|
||||
checkConfigError 'infinite recursion encountered' config.nonLazyResult ./lazy-attrsWith.nix
|
||||
|
||||
# Even with multiple assignments, a type error should be thrown if any of them aren't valid
|
||||
checkConfigError 'A definition for option .* is not of type .*' \
|
||||
|
@ -516,6 +520,10 @@ checkConfigError 'The option .theOption.nested. in .other.nix. is already declar
|
|||
# Test that types.optionType leaves types untouched as long as they don't need to be merged
|
||||
checkConfigOutput 'ok' config.freeformItems.foo.bar ./adhoc-freeformType-survives-type-merge.nix
|
||||
|
||||
# Test that specifying both functor.wrapped and functor.payload isn't allowed
|
||||
checkConfigError 'Type foo defines both `functor.payload` and `functor.wrapped` at the same time, which is not supported.' config.result ./default-type-merge-both.nix
|
||||
|
||||
|
||||
# Anonymous submodules don't get nixed by import resolution/deduplication
|
||||
# because of an `extendModules` bug, issue 168767.
|
||||
checkConfigOutput '^1$' config.sub.specialisation.value ./extendModules-168767-imports.nix
|
||||
|
@ -571,6 +579,7 @@ checkConfigOutput '^38|27$' options.submoduleLine38.declarationPositions.1.line
|
|||
# nested options work
|
||||
checkConfigOutput '^34$' options.nested.nestedLine34.declarationPositions.0.line ./declaration-positions.nix
|
||||
|
||||
|
||||
cat <<EOF
|
||||
====== module tests ======
|
||||
$pass Pass
|
||||
|
|
28
third_party/nixpkgs/lib/tests/modules/default-type-merge-both.nix
vendored
Normal file
28
third_party/nixpkgs/lib/tests/modules/default-type-merge-both.nix
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
{ lib, options, ... }:
|
||||
let
|
||||
foo = lib.mkOptionType {
|
||||
name = "foo";
|
||||
functor = lib.types.defaultFunctor "foo" // {
|
||||
wrapped = lib.types.int;
|
||||
payload = 10;
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
imports = [
|
||||
{
|
||||
options.foo = lib.mkOption {
|
||||
type = foo;
|
||||
};
|
||||
}
|
||||
{
|
||||
options.foo = lib.mkOption {
|
||||
type = foo;
|
||||
};
|
||||
}
|
||||
];
|
||||
|
||||
options.result = lib.mkOption {
|
||||
default = builtins.seq options.foo null;
|
||||
};
|
||||
}
|
57
third_party/nixpkgs/lib/tests/modules/lazy-attrsWith.nix
vendored
Normal file
57
third_party/nixpkgs/lib/tests/modules/lazy-attrsWith.nix
vendored
Normal file
|
@ -0,0 +1,57 @@
|
|||
# Check that AttrsWith { lazy = true; } is lazy
|
||||
{ lib, ... }:
|
||||
let
|
||||
inherit (lib) types mkOption;
|
||||
|
||||
lazyAttrsOf = mkOption {
|
||||
# Same as lazyAttrsOf
|
||||
type = types.attrsWith {
|
||||
lazy = true;
|
||||
elemType = types.int;
|
||||
};
|
||||
};
|
||||
|
||||
attrsOf = mkOption {
|
||||
# Same as lazyAttrsOf
|
||||
type = types.attrsWith {
|
||||
elemType = types.int;
|
||||
};
|
||||
};
|
||||
in
|
||||
{
|
||||
imports = [
|
||||
# Module A
|
||||
(
|
||||
{ ... }:
|
||||
{
|
||||
options.mergedLazyLazy = lazyAttrsOf;
|
||||
options.mergedLazyNonLazy = lazyAttrsOf;
|
||||
options.mergedNonLazyNonLazy = attrsOf;
|
||||
}
|
||||
)
|
||||
# Module B
|
||||
(
|
||||
{ ... }:
|
||||
{
|
||||
options.mergedLazyLazy = lazyAttrsOf;
|
||||
options.mergedLazyNonLazy = attrsOf;
|
||||
options.mergedNonLazyNonLazy = attrsOf;
|
||||
}
|
||||
)
|
||||
# Result
|
||||
(
|
||||
{ config, ... }:
|
||||
{
|
||||
# Can only evaluate if lazy
|
||||
config.mergedLazyLazy.bar = config.mergedLazyLazy.baz + 1;
|
||||
config.mergedLazyLazy.baz = 10;
|
||||
options.lazyResult = mkOption { default = config.mergedLazyLazy.bar; };
|
||||
|
||||
# Can not only evaluate if not lazy
|
||||
config.mergedNonLazyNonLazy.bar = config.mergedNonLazyNonLazy.baz + 1;
|
||||
config.mergedNonLazyNonLazy.baz = 10;
|
||||
options.nonLazyResult = mkOption { default = config.mergedNonLazyNonLazy.bar; };
|
||||
}
|
||||
)
|
||||
];
|
||||
}
|
16
third_party/nixpkgs/lib/tests/release.nix
vendored
16
third_party/nixpkgs/lib/tests/release.nix
vendored
|
@ -14,19 +14,5 @@ let
|
|||
in
|
||||
pkgs.symlinkJoin {
|
||||
name = "nixpkgs-lib-tests";
|
||||
paths = map testWithNix nixVersions ++
|
||||
|
||||
#
|
||||
# TEMPORARY MIGRATION MECHANISM
|
||||
#
|
||||
# This comment and the expression which follows it should be
|
||||
# removed as part of resolving this issue:
|
||||
#
|
||||
# https://github.com/NixOS/nixpkgs/issues/272591
|
||||
#
|
||||
[(import ../../pkgs/test/release {
|
||||
inherit pkgs lib nix;
|
||||
})]
|
||||
;
|
||||
|
||||
paths = map testWithNix nixVersions;
|
||||
}
|
||||
|
|
14
third_party/nixpkgs/lib/tests/systems.nix
vendored
14
third_party/nixpkgs/lib/tests/systems.nix
vendored
|
@ -52,7 +52,7 @@ lib.runTests (
|
|||
|
||||
testcygwin = mseteq cygwin [ "i686-cygwin" "x86_64-cygwin" ];
|
||||
testdarwin = mseteq darwin [ "x86_64-darwin" "i686-darwin" "aarch64-darwin" "armv7a-darwin" ];
|
||||
testfreebsd = mseteq freebsd [ "i686-freebsd" "x86_64-freebsd" ];
|
||||
testfreebsd = mseteq freebsd [ "aarch64-freebsd" "i686-freebsd" "x86_64-freebsd" ];
|
||||
testgenode = mseteq genode [ "aarch64-genode" "i686-genode" "x86_64-genode" ];
|
||||
testredox = mseteq redox [ "x86_64-redox" ];
|
||||
testgnu = mseteq gnu (linux /* ++ kfreebsd ++ ... */);
|
||||
|
@ -78,6 +78,18 @@ lib.runTests (
|
|||
expr = toLosslessStringMaybe (lib.systems.elaborate "x86_64-linux" // { something = "extra"; });
|
||||
expected = null;
|
||||
};
|
||||
test_elaborate_config_over_system = {
|
||||
expr = (lib.systems.elaborate { config = "i686-unknown-linux-gnu"; system = "x86_64-linux"; }).system;
|
||||
expected = "i686-linux";
|
||||
};
|
||||
test_elaborate_config_over_parsed = {
|
||||
expr = (lib.systems.elaborate { config = "i686-unknown-linux-gnu"; parsed = (lib.systems.elaborate "x86_64-linux").parsed; }).parsed.cpu.arch;
|
||||
expected = "i686";
|
||||
};
|
||||
test_elaborate_system_over_parsed = {
|
||||
expr = (lib.systems.elaborate { system = "i686-linux"; parsed = (lib.systems.elaborate "x86_64-linux").parsed; }).parsed.cpu.arch;
|
||||
expected = "i686";
|
||||
};
|
||||
}
|
||||
|
||||
# Generate test cases to assert that a change in any non-function attribute makes a platform unequal
|
||||
|
|
138
third_party/nixpkgs/lib/types.nix
vendored
138
third_party/nixpkgs/lib/types.nix
vendored
|
@ -83,23 +83,41 @@ rec {
|
|||
# Default type merging function
|
||||
# takes two type functors and return the merged type
|
||||
defaultTypeMerge = f: f':
|
||||
let wrapped = f.wrapped.typeMerge f'.wrapped.functor;
|
||||
payload = f.binOp f.payload f'.payload;
|
||||
let
|
||||
mergedWrapped = f.wrapped.typeMerge f'.wrapped.functor;
|
||||
mergedPayload = f.binOp f.payload f'.payload;
|
||||
|
||||
hasPayload = assert (f'.payload != null) == (f.payload != null); f.payload != null;
|
||||
hasWrapped = assert (f'.wrapped != null) == (f.wrapped != null); f.wrapped != null;
|
||||
|
||||
typeFromPayload = if mergedPayload == null then null else f.type mergedPayload;
|
||||
typeFromWrapped = if mergedWrapped == null then null else f.type mergedWrapped;
|
||||
in
|
||||
# cannot merge different types
|
||||
# Abort early: cannot merge different types
|
||||
if f.name != f'.name
|
||||
then null
|
||||
# simple types
|
||||
else if (f.wrapped == null && f'.wrapped == null)
|
||||
&& (f.payload == null && f'.payload == null)
|
||||
then f.type
|
||||
# composed types
|
||||
else if (f.wrapped != null && f'.wrapped != null) && (wrapped != null)
|
||||
then f.type wrapped
|
||||
# value types
|
||||
else if (f.payload != null && f'.payload != null) && (payload != null)
|
||||
then f.type payload
|
||||
else null;
|
||||
else
|
||||
|
||||
if hasPayload then
|
||||
# Just return the payload if returning wrapped is deprecated
|
||||
if f ? wrappedDeprecationMessage then
|
||||
typeFromPayload
|
||||
else if hasWrapped then
|
||||
# Has both wrapped and payload
|
||||
throw ''
|
||||
Type ${f.name} defines both `functor.payload` and `functor.wrapped` at the same time, which is not supported.
|
||||
|
||||
Use either `functor.payload` or `functor.wrapped` but not both.
|
||||
|
||||
If your code worked before remove either `functor.wrapped` or `functor.payload` from the type definition.
|
||||
''
|
||||
else
|
||||
typeFromPayload
|
||||
else
|
||||
if hasWrapped then
|
||||
typeFromWrapped
|
||||
else
|
||||
f.type;
|
||||
|
||||
# Default type functor
|
||||
defaultFunctor = name: {
|
||||
|
@ -568,48 +586,78 @@ rec {
|
|||
substSubModules = m: nonEmptyListOf (elemType.substSubModules m);
|
||||
};
|
||||
|
||||
attrsOf = elemType: mkOptionType rec {
|
||||
name = "attrsOf";
|
||||
description = "attribute set of ${optionDescriptionPhrase (class: class == "noun" || class == "composite") elemType}";
|
||||
descriptionClass = "composite";
|
||||
check = isAttrs;
|
||||
merge = loc: defs:
|
||||
mapAttrs (n: v: v.value) (filterAttrs (n: v: v ? value) (zipAttrsWith (name: defs:
|
||||
(mergeDefinitions (loc ++ [name]) elemType defs).optionalValue
|
||||
)
|
||||
# Push down position info.
|
||||
(map (def: mapAttrs (n: v: { inherit (def) file; value = v; }) def.value) defs)));
|
||||
emptyValue = { value = {}; };
|
||||
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["<name>"]);
|
||||
getSubModules = elemType.getSubModules;
|
||||
substSubModules = m: attrsOf (elemType.substSubModules m);
|
||||
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||
nestedTypes.elemType = elemType;
|
||||
};
|
||||
attrsOf = elemType: attrsWith { inherit elemType; };
|
||||
|
||||
# A version of attrsOf that's lazy in its values at the expense of
|
||||
# conditional definitions not working properly. E.g. defining a value with
|
||||
# `foo.attr = mkIf false 10`, then `foo ? attr == true`, whereas with
|
||||
# attrsOf it would correctly be `false`. Accessing `foo.attr` would throw an
|
||||
# error that it's not defined. Use only if conditional definitions don't make sense.
|
||||
lazyAttrsOf = elemType: mkOptionType rec {
|
||||
name = "lazyAttrsOf";
|
||||
description = "lazy attribute set of ${optionDescriptionPhrase (class: class == "noun" || class == "composite") elemType}";
|
||||
lazyAttrsOf = elemType: attrsWith { inherit elemType; lazy = true; };
|
||||
|
||||
# base type for lazyAttrsOf and attrsOf
|
||||
attrsWith =
|
||||
let
|
||||
# Push down position info.
|
||||
pushPositions = map (def: mapAttrs (n: v: { inherit (def) file; value = v; }) def.value);
|
||||
binOp = lhs: rhs:
|
||||
let
|
||||
elemType = lhs.elemType.typeMerge rhs.elemType.functor;
|
||||
lazy =
|
||||
if lhs.lazy == rhs.lazy then
|
||||
lhs.lazy
|
||||
else
|
||||
null;
|
||||
in
|
||||
if elemType == null || lazy == null then
|
||||
null
|
||||
else
|
||||
{
|
||||
inherit elemType lazy;
|
||||
};
|
||||
in
|
||||
{
|
||||
elemType,
|
||||
lazy ? false,
|
||||
}:
|
||||
mkOptionType {
|
||||
name = if lazy then "lazyAttrsOf" else "attrsOf";
|
||||
description = (if lazy then "lazy attribute set" else "attribute set") + " of ${optionDescriptionPhrase (class: class == "noun" || class == "composite") elemType}";
|
||||
descriptionClass = "composite";
|
||||
check = isAttrs;
|
||||
merge = loc: defs:
|
||||
zipAttrsWith (name: defs:
|
||||
let merged = mergeDefinitions (loc ++ [name]) elemType defs;
|
||||
# mergedValue will trigger an appropriate error when accessed
|
||||
in merged.optionalValue.value or elemType.emptyValue.value or merged.mergedValue
|
||||
)
|
||||
# Push down position info.
|
||||
(map (def: mapAttrs (n: v: { inherit (def) file; value = v; }) def.value) defs);
|
||||
merge = if lazy then (
|
||||
# Lazy merge Function
|
||||
loc: defs:
|
||||
zipAttrsWith (name: defs:
|
||||
let merged = mergeDefinitions (loc ++ [name]) elemType defs;
|
||||
# mergedValue will trigger an appropriate error when accessed
|
||||
in merged.optionalValue.value or elemType.emptyValue.value or merged.mergedValue
|
||||
)
|
||||
# Push down position info.
|
||||
(pushPositions defs)
|
||||
) else (
|
||||
# Non-lazy merge Function
|
||||
loc: defs:
|
||||
mapAttrs (n: v: v.value) (filterAttrs (n: v: v ? value) (zipAttrsWith (name: defs:
|
||||
(mergeDefinitions (loc ++ [name]) elemType (defs)).optionalValue
|
||||
)
|
||||
# Push down position info.
|
||||
(pushPositions defs)))
|
||||
);
|
||||
emptyValue = { value = {}; };
|
||||
getSubOptions = prefix: elemType.getSubOptions (prefix ++ ["<name>"]);
|
||||
getSubModules = elemType.getSubModules;
|
||||
substSubModules = m: lazyAttrsOf (elemType.substSubModules m);
|
||||
functor = (defaultFunctor name) // { wrapped = elemType; };
|
||||
substSubModules = m: attrsWith { elemType = elemType.substSubModules m; inherit lazy; };
|
||||
functor = defaultFunctor "attrsWith" // {
|
||||
wrappedDeprecationMessage = { loc }: lib.warn ''
|
||||
The deprecated `type.functor.wrapped` attribute of the option `${showOption loc}` is accessed, use `type.nestedTypes.elemType` instead.
|
||||
'' elemType;
|
||||
payload = {
|
||||
# Important!: Add new function attributes here in case of future changes
|
||||
inherit elemType lazy;
|
||||
};
|
||||
inherit binOp;
|
||||
};
|
||||
nestedTypes.elemType = elemType;
|
||||
};
|
||||
|
||||
|
|
255
third_party/nixpkgs/maintainers/maintainer-list.nix
vendored
255
third_party/nixpkgs/maintainers/maintainer-list.nix
vendored
|
@ -493,6 +493,12 @@
|
|||
github = "acesyde";
|
||||
githubId = 958435;
|
||||
};
|
||||
AchmadFathoni = {
|
||||
name = "Achmad Fathoni";
|
||||
email = "fathoni.id@gmail.com";
|
||||
github = "AchmadFathoni";
|
||||
githubId = 26775746;
|
||||
};
|
||||
aciceri = {
|
||||
name = "Andrea Ciceri";
|
||||
email = "andrea.ciceri@autistici.org";
|
||||
|
@ -1197,6 +1203,12 @@
|
|||
name = "alyaeanyx";
|
||||
keys = [ { fingerprint = "1F73 8879 5E5A 3DFC E2B3 FA32 87D1 AADC D25B 8DEE"; } ];
|
||||
};
|
||||
amadaluzia = {
|
||||
email = "amad@atl.tools";
|
||||
github = "amadaluzia";
|
||||
githubId = 188314694;
|
||||
name = "Artur Manuel";
|
||||
};
|
||||
amadejkastelic = {
|
||||
email = "amadejkastelic7@gmail.com";
|
||||
github = "amadejkastelic";
|
||||
|
@ -1307,7 +1319,6 @@
|
|||
name = "Wroclaw";
|
||||
};
|
||||
amuckstot30 = {
|
||||
email = "amuckstot30@tutanota.com";
|
||||
github = "amuckstot30";
|
||||
githubId = 157274630;
|
||||
name = "amuckstot30";
|
||||
|
@ -1619,6 +1630,12 @@
|
|||
githubId = 4194320;
|
||||
name = "Anton Schirg";
|
||||
};
|
||||
anugrahn1 = {
|
||||
email = "pnanugrah@gmail.com";
|
||||
github = "anugrahn1";
|
||||
githubId = 117428605;
|
||||
name = "Anugrah Naranatt";
|
||||
};
|
||||
anytimetraveler = {
|
||||
email = "simon@simonscode.org";
|
||||
github = "AnyTimeTraveler";
|
||||
|
@ -2273,6 +2290,12 @@
|
|||
githubId = 206242;
|
||||
name = "Andreas Wiese";
|
||||
};
|
||||
axertheaxe = {
|
||||
email = "axertheaxe@proton.me";
|
||||
github = "axertheaxe";
|
||||
githubId = 99703210;
|
||||
name = "Katherine Jamison";
|
||||
};
|
||||
ayazhafiz = {
|
||||
email = "ayaz.hafiz.1@gmail.com";
|
||||
github = "hafiz";
|
||||
|
@ -3806,6 +3829,12 @@
|
|||
name = "ChaosAttractor";
|
||||
keys = [ { fingerprint = "A137 4415 DB7C 6439 10EA 5BF1 0FEE 4E47 5940 E125"; } ];
|
||||
};
|
||||
charB66 = {
|
||||
email = "nix.disparate221@passinbox.com";
|
||||
github = "charB66";
|
||||
githubId = 59340663;
|
||||
name = "Bryan F.";
|
||||
};
|
||||
charlesbaynham = {
|
||||
email = "charlesbaynham@gmail.com";
|
||||
github = "charlesbaynham";
|
||||
|
@ -4186,7 +4215,7 @@
|
|||
matrix = "@clot27:matrix.org";
|
||||
};
|
||||
cloudripper = {
|
||||
email = "other.wing8806@fastmail.com";
|
||||
email = "dev+nixpkgs@cldrpr.com";
|
||||
github = "cloudripper";
|
||||
githubId = 70971768;
|
||||
name = "cloudripper";
|
||||
|
@ -4578,6 +4607,13 @@
|
|||
githubId = 1707779;
|
||||
name = "Chris Ertel";
|
||||
};
|
||||
crimeminister = {
|
||||
email = "robert@crimeminister.org";
|
||||
name = "Robert Medeiros";
|
||||
github = "crimeminister";
|
||||
githubId = 29072;
|
||||
keys = [ { fingerprint = "E3BD A35E 590A 8D29 701A 9723 F448 7FA0 4BC6 44F2"; } ];
|
||||
};
|
||||
crinklywrappr = {
|
||||
email = "crinklywrappr@pm.me";
|
||||
name = "Daniel Fitzpatrick";
|
||||
|
@ -4653,6 +4689,12 @@
|
|||
githubId = 490965;
|
||||
name = "Craig Swank";
|
||||
};
|
||||
cterence = {
|
||||
email = "terence.chateigne@posteo.net";
|
||||
github = "cterence";
|
||||
githubId = 25285508;
|
||||
name = "Térence Chateigné";
|
||||
};
|
||||
ctron = {
|
||||
email = "ctron@dentrassi.de";
|
||||
github = "ctron";
|
||||
|
@ -4775,6 +4817,12 @@
|
|||
name = "Dov Alperin";
|
||||
keys = [ { fingerprint = "4EED 5096 B925 86FA 1101 6673 7F2C 07B9 1B52 BB61"; } ];
|
||||
};
|
||||
damhiya = {
|
||||
name = "SoonWon Moon";
|
||||
email = "damhiya@gmail.com";
|
||||
github = "damhiya";
|
||||
githubId = 13533446;
|
||||
};
|
||||
DamienCassou = {
|
||||
email = "damien@cassou.me";
|
||||
github = "DamienCassou";
|
||||
|
@ -4957,6 +5005,12 @@
|
|||
githubId = 7589338;
|
||||
name = "Daniel Șerbănescu";
|
||||
};
|
||||
daspk04 = {
|
||||
email = "dpratyush.k@gmail.com";
|
||||
github = "daspk04";
|
||||
githubId = 28738918;
|
||||
name = "Pratyush Das";
|
||||
};
|
||||
datafoo = {
|
||||
github = "datafoo";
|
||||
githubId = 34766150;
|
||||
|
@ -4968,8 +5022,7 @@
|
|||
githubId = 28595242;
|
||||
name = "DataHearth";
|
||||
keys = [
|
||||
{ fingerprint = "A129 2547 0298 BFEE 7EE0 92B3 946E 2D0C 410C 7B3D"; }
|
||||
{ fingerprint = "FFC4 92C1 5320 B05D 0F8D 7D58 ABF6 737C 6339 6D35"; }
|
||||
{ fingerprint = "E8F9 0B80 908E 723D 0EDF 0916 5803 CDA5 9C26 A96A"; }
|
||||
];
|
||||
};
|
||||
davegallant = {
|
||||
|
@ -6044,6 +6097,12 @@
|
|||
githubId = 2025623;
|
||||
name = "Luc Chabassier";
|
||||
};
|
||||
dwrege = {
|
||||
email = "email@dwrege.de";
|
||||
github = "dominicwrege";
|
||||
githubId = 7389000;
|
||||
name = "Dominic Wrege";
|
||||
};
|
||||
dxf = {
|
||||
email = "dingxiangfei2009@gmail.com";
|
||||
github = "dingxiangfei2009";
|
||||
|
@ -6287,6 +6346,12 @@
|
|||
githubId = 701128;
|
||||
name = "Eike Kettner";
|
||||
};
|
||||
eilvelia = {
|
||||
email = "hi@eilvelia.cat";
|
||||
github = "eilvelia";
|
||||
githubId = 10106819;
|
||||
name = "eilvelia";
|
||||
};
|
||||
eken = {
|
||||
email = "edvin.kallstrom@protonmail.com";
|
||||
github = "Eken-beep";
|
||||
|
@ -6903,6 +6968,12 @@
|
|||
githubId = 30512529;
|
||||
name = "Evils";
|
||||
};
|
||||
evris99 = {
|
||||
name = "Evrymachos Koukoumakas";
|
||||
github = "evris99";
|
||||
githubId = 32963606;
|
||||
email = "cptevris@gmail.com";
|
||||
};
|
||||
ewok = {
|
||||
email = "ewok@ewok.ru";
|
||||
github = "ewok-old";
|
||||
|
@ -7015,12 +7086,6 @@
|
|||
githubId = 878822;
|
||||
name = "Tristan Helmich";
|
||||
};
|
||||
falsifian = {
|
||||
email = "james.cook@utoronto.ca";
|
||||
github = "falsifian";
|
||||
githubId = 225893;
|
||||
name = "James Cook";
|
||||
};
|
||||
fangpen = {
|
||||
email = "hello@fangpenlin.com";
|
||||
github = "fangpenlin";
|
||||
|
@ -7260,6 +7325,13 @@
|
|||
github = "fkautz";
|
||||
githubId = 135706;
|
||||
};
|
||||
flacks = {
|
||||
name = "Jean Lucas";
|
||||
email = "jean@4ray.co";
|
||||
github = "flacks";
|
||||
githubId = 2135469;
|
||||
matrix = "@flacks:matrix.org";
|
||||
};
|
||||
FlafyDev = {
|
||||
name = "Flafy Arazi";
|
||||
email = "flafyarazi@gmail.com";
|
||||
|
@ -8603,6 +8675,13 @@
|
|||
githubId = 5317234;
|
||||
name = "Raphael Megzari";
|
||||
};
|
||||
harbiinger = {
|
||||
email = "theo.godin@protonmail.com";
|
||||
matrix = "@hrbgr:matrix.org";
|
||||
github = "harbiinger";
|
||||
githubId = 55398594;
|
||||
name = "Theo Godin";
|
||||
};
|
||||
hardselius = {
|
||||
email = "martin@hardselius.dev";
|
||||
github = "hardselius";
|
||||
|
@ -11408,6 +11487,12 @@
|
|||
githubId = 15373888;
|
||||
name = "Claudius Holeksa";
|
||||
};
|
||||
keller00 = {
|
||||
name = "Mark Keller";
|
||||
email = "markooo.keller@gmail.com";
|
||||
github = "keller00";
|
||||
githubId = 8452750;
|
||||
};
|
||||
kennyballou = {
|
||||
email = "kb@devnulllabs.io";
|
||||
github = "kennyballou";
|
||||
|
@ -11667,6 +11752,12 @@
|
|||
name = "Kat Inskip";
|
||||
keys = [ { fingerprint = "9CC6 44B5 69CD A59B C874 C4C9 E8DD E3ED 1C90 F3A0"; } ];
|
||||
};
|
||||
kivikakk = {
|
||||
email = "ashe@kivikakk.ee";
|
||||
github = "kivikakk";
|
||||
githubId = 1915;
|
||||
name = "Asherah Connor";
|
||||
};
|
||||
kjeremy = {
|
||||
email = "kjeremy@gmail.com";
|
||||
name = "Jeremy Kolb";
|
||||
|
@ -12740,6 +12831,14 @@
|
|||
githubId = 5624721;
|
||||
name = "Ben Wolsieffer";
|
||||
};
|
||||
lordmzte = {
|
||||
name = "Moritz Thomae";
|
||||
email = "lord@mzte.de";
|
||||
matrix = "@lordmzte:mzte.de";
|
||||
github = "LordMZTE";
|
||||
githubId = 28735087;
|
||||
keys = [ { fingerprint = "AB47 3D70 53D2 74CA DC2C 230C B648 02DC 33A6 4FF6"; } ];
|
||||
};
|
||||
lord-valen = {
|
||||
name = "Lord Valen";
|
||||
matrix = "@lord-valen:matrix.org";
|
||||
|
@ -12866,12 +12965,6 @@
|
|||
githubId = 37505890;
|
||||
name = "Luis Wirth";
|
||||
};
|
||||
luc65r = {
|
||||
email = "lucas@ransan.fr";
|
||||
github = "luc65r";
|
||||
githubId = 59375051;
|
||||
name = "Lucas Ransan";
|
||||
};
|
||||
LucaGuerra = {
|
||||
email = "luca@guerra.sh";
|
||||
github = "LucaGuerra";
|
||||
|
@ -13072,6 +13165,13 @@
|
|||
githubId = 30698906;
|
||||
name = "Luna D Dragon";
|
||||
};
|
||||
luNeder = {
|
||||
email = "luana@luana.dev.br";
|
||||
matrix = "@luana:catgirl.cloud";
|
||||
github = "LuNeder";
|
||||
githubId = 19750714;
|
||||
name = "Luana Neder";
|
||||
};
|
||||
lunik1 = {
|
||||
email = "ch.nixpkgs@themaw.xyz";
|
||||
matrix = "@lunik1:lunik.one";
|
||||
|
@ -13675,7 +13775,6 @@
|
|||
github = "matthewpi";
|
||||
githubId = 26559841;
|
||||
name = "Matthew Penner";
|
||||
keys = [ { fingerprint = "5118 F1CC B7B0 6C17 4DD1 5267 3131 1906 AD4C F6D6"; } ];
|
||||
};
|
||||
matthiasbenaets = {
|
||||
email = "matthias.benaets@gmail.com";
|
||||
|
@ -15579,6 +15678,13 @@
|
|||
githubId = 399535;
|
||||
name = "Niklas Hambüchen";
|
||||
};
|
||||
n-hass = {
|
||||
email = "nick@hassan.host";
|
||||
github = "n-hass";
|
||||
githubId = 72363381;
|
||||
name = "n-hass";
|
||||
keys = [ { fingerprint = "FDEE 6116 DBA7 8840 7323 4466 A371 5973 2728 A6A6"; } ];
|
||||
};
|
||||
nhnn = {
|
||||
matrix = "@nhnn:nhnn.dev";
|
||||
github = "thenhnn";
|
||||
|
@ -16212,6 +16318,13 @@
|
|||
github = "octodi";
|
||||
githubId = 127038896;
|
||||
};
|
||||
octvs = {
|
||||
name = "octvs";
|
||||
email = "octvs@posteo.de";
|
||||
matrix = "@octvs:matrix.org";
|
||||
github = "octvs";
|
||||
githubId = 42993892;
|
||||
};
|
||||
oddlama = {
|
||||
email = "oddlama@oddlama.org";
|
||||
github = "oddlama";
|
||||
|
@ -16724,6 +16837,11 @@
|
|||
githubId = 33826198;
|
||||
name = "Philipp Arras";
|
||||
};
|
||||
parth = {
|
||||
github = "parth";
|
||||
githubId = 821972;
|
||||
name = "Parth Mehrotra";
|
||||
};
|
||||
pashashocky = {
|
||||
email = "pashashocky@gmail.com";
|
||||
github = "pashashocky";
|
||||
|
@ -17878,6 +17996,12 @@
|
|||
githubId = 5636;
|
||||
name = "Steve Purcell";
|
||||
};
|
||||
purpole = {
|
||||
email = "mail@purpole.io";
|
||||
github = "purpole";
|
||||
githubId = 101905225;
|
||||
name = "David Schneider";
|
||||
};
|
||||
purrpurrn = {
|
||||
email = "scrcpynovideoaudiocodecraw+nixpkgs@gmail.com";
|
||||
github = "purrpurrn";
|
||||
|
@ -18087,6 +18211,13 @@
|
|||
matrix = "@qyriad:katesiria.org";
|
||||
name = "Qyriad";
|
||||
};
|
||||
r17x = {
|
||||
email = "hi@rin.rocks";
|
||||
github = "r17x";
|
||||
githubId = 16365952;
|
||||
name = "Rin";
|
||||
keys = [ { fingerprint = "476A F55D 6378 F878 0709 848A 18F9 F516 1CC0 576C"; } ];
|
||||
};
|
||||
r3dl3g = {
|
||||
email = "redleg@rothfuss-web.de";
|
||||
github = "r3dl3g";
|
||||
|
@ -18537,7 +18668,7 @@
|
|||
};
|
||||
returntoreality = {
|
||||
email = "linus@lotz.li";
|
||||
github = "retuntoreality";
|
||||
github = "returntoreality";
|
||||
githubId = 255667;
|
||||
name = "Linus Karl";
|
||||
};
|
||||
|
@ -18639,6 +18770,12 @@
|
|||
github = "ribose-jeffreylau";
|
||||
githubId = 2649467;
|
||||
};
|
||||
ribru17 = {
|
||||
name = "Riley Bruins";
|
||||
email = "ribru17@hotmail.com";
|
||||
github = "ribru17";
|
||||
githubId = 55766287;
|
||||
};
|
||||
ricarch97 = {
|
||||
email = "ricardo.steijn97@gmail.com";
|
||||
github = "RicArch97";
|
||||
|
@ -19367,6 +19504,13 @@
|
|||
name = "Maxwell Beck";
|
||||
keys = [ { fingerprint = "D260 79E3 C2BC 2E43 905B D057 BB3E FA30 3760 A0DB"; } ];
|
||||
};
|
||||
rytswd = {
|
||||
email = "rytswd@gmail.com";
|
||||
github = "rytswd";
|
||||
githubId = 23435099;
|
||||
name = "Ryota";
|
||||
keys = [ { fingerprint = "537E 712F 0EC3 91C2 B47F 56E2 EB5D 1A84 5333 43BB"; } ];
|
||||
};
|
||||
ryze = {
|
||||
name = "Ryze";
|
||||
github = "ryze312";
|
||||
|
@ -19407,6 +19551,14 @@
|
|||
github = "Sail0rd";
|
||||
githubId = 55802415;
|
||||
};
|
||||
sako = {
|
||||
name = "Sako";
|
||||
email = "sako@cock.email";
|
||||
matrix = "@sako:imagisphe.re";
|
||||
github = "Sakooooo";
|
||||
githubId = 78461130;
|
||||
keys = [ { fingerprint = "CA52 EE7B E681 720E 32B6 6792 FE52 FD65 B76E 4751"; } ];
|
||||
};
|
||||
samalws = {
|
||||
email = "sam@samalws.com";
|
||||
name = "Sam Alws";
|
||||
|
@ -19594,6 +19746,12 @@
|
|||
githubId = 2347889;
|
||||
name = "Sauyon Lee";
|
||||
};
|
||||
savalet = {
|
||||
email = "savinien.petitjean@gmail.com";
|
||||
github = "savalet";
|
||||
githubId = 73446695;
|
||||
name = "savalet";
|
||||
};
|
||||
savannidgerinel = {
|
||||
email = "savanni@luminescent-dreams.com";
|
||||
github = "savannidgerinel";
|
||||
|
@ -20132,6 +20290,13 @@
|
|||
githubId = 251028;
|
||||
name = "Shell Turner";
|
||||
};
|
||||
shelvacu = {
|
||||
name = "Shelvacu";
|
||||
email = "nix-maint@shelvacu.com";
|
||||
matrix = "@s:consortium.chat";
|
||||
github = "shelvacu";
|
||||
githubId = 1731537;
|
||||
};
|
||||
shhht = {
|
||||
name = "shhht";
|
||||
email = "stp.tjeerd@gmail.com";
|
||||
|
@ -21107,12 +21272,6 @@
|
|||
githubId = 1694705;
|
||||
name = "Sam Stites";
|
||||
};
|
||||
stnley = {
|
||||
email = "michael@stnley.io";
|
||||
github = "stnley";
|
||||
githubId = 64174376;
|
||||
name = "Michael Stanley";
|
||||
};
|
||||
strager = {
|
||||
email = "strager.nds@gmail.com";
|
||||
github = "strager";
|
||||
|
@ -21499,6 +21658,17 @@
|
|||
githubId = 6064962;
|
||||
name = "TakWolf";
|
||||
};
|
||||
talhaHavadar = {
|
||||
email = "havadartalha@gmail.com";
|
||||
github = "talhaHavadar";
|
||||
githubId = 6908462;
|
||||
name = "Talha Can Havadar";
|
||||
keys = [
|
||||
{
|
||||
fingerprint = "1E13 12DF 4B71 58B6 EBF9 DE78 2574 3879 62FE B0D1";
|
||||
}
|
||||
];
|
||||
};
|
||||
talkara = {
|
||||
email = "taito.horiuchi@relexsolutions.com";
|
||||
github = "talkara";
|
||||
|
@ -21687,6 +21857,12 @@
|
|||
githubId = 2084639;
|
||||
name = "Manu";
|
||||
};
|
||||
tensor5 = {
|
||||
github = "tensor5";
|
||||
githubId = 1545895;
|
||||
matrix = "@tensor5:matrix.org";
|
||||
name = "Nicola Squartini";
|
||||
};
|
||||
teozkr = {
|
||||
email = "teo@nullable.se";
|
||||
github = "nightkr";
|
||||
|
@ -22023,6 +22199,12 @@
|
|||
githubId = 7709;
|
||||
name = "Thomaz Leite";
|
||||
};
|
||||
tholo = {
|
||||
email = "ali0mhmz@gmail.com";
|
||||
github = "tholoo";
|
||||
githubId = 42005990;
|
||||
name = "Ali Mohammadzadeh";
|
||||
};
|
||||
thomasdesr = {
|
||||
email = "git@hive.pw";
|
||||
github = "thomasdesr";
|
||||
|
@ -22247,6 +22429,12 @@
|
|||
githubId = 6118602;
|
||||
name = "Viktor";
|
||||
};
|
||||
tne = {
|
||||
email = "tne@garudalinux.org";
|
||||
github = "JustTNE";
|
||||
githubId = 38938720;
|
||||
name = "TNE";
|
||||
};
|
||||
tnias = {
|
||||
email = "phil@grmr.de";
|
||||
matrix = "@tnias:stratum0.org";
|
||||
|
@ -23060,6 +23248,12 @@
|
|||
githubId = 2856634;
|
||||
name = "Tyler Compton";
|
||||
};
|
||||
venikx = {
|
||||
email = "code@venikx.com";
|
||||
github = "venikx";
|
||||
githubId = 24815061;
|
||||
name = "Kevin De Baerdemaeker";
|
||||
};
|
||||
veprbl = {
|
||||
email = "veprbl@gmail.com";
|
||||
github = "veprbl";
|
||||
|
@ -23097,6 +23291,12 @@
|
|||
githubId = 11413574;
|
||||
name = "Fiona Weber";
|
||||
};
|
||||
vieta = {
|
||||
email = "xyzVieta@gmail.com";
|
||||
github = "yVieta";
|
||||
githubId = 94648307;
|
||||
name = "Thanh Viet Nguyen";
|
||||
};
|
||||
vifino = {
|
||||
email = "vifino@tty.sh";
|
||||
github = "vifino";
|
||||
|
@ -23244,6 +23444,13 @@
|
|||
name = "Vinicius Bernardino";
|
||||
keys = [ { fingerprint = "F0D3 920C 722A 541F 0CCD 66E3 A7BA BA05 3D78 E7CA"; } ];
|
||||
};
|
||||
vog = {
|
||||
email = "v@njh.eu";
|
||||
github = "vog";
|
||||
githubId = 412749;
|
||||
name = "Volker Diels-Grabsch";
|
||||
keys = [ { fingerprint = "A7E6 9C4F 69DC 5D6C FC84 EE34 A29F BD51 5F89 90AF"; } ];
|
||||
};
|
||||
voidless = {
|
||||
email = "julius.schmitt@yahoo.de";
|
||||
github = "voidIess";
|
||||
|
|
|
@ -103,6 +103,7 @@ luazip,,,,,,
|
|||
lush.nvim,,,https://luarocks.org/dev,,,teto
|
||||
luuid,,,,20120509-2,,
|
||||
luv,,,,1.48.0-2,,
|
||||
lusc_luv,,,,,,
|
||||
lyaml,,,,,,lblasc
|
||||
lz.n,,,,,,mrcjkb
|
||||
lze,,,,,,birdee
|
||||
|
@ -138,11 +139,11 @@ sqlite,,,,,,
|
|||
std._debug,,,,,,
|
||||
std.normalize,,,,,,
|
||||
stdlib,,,,41.2.2,,vyp
|
||||
teal-language-server,,,http://luarocks.org/dev,,,
|
||||
teal-language-server,,,,,,
|
||||
telescope-manix,,,,,,
|
||||
telescope.nvim,,,,,5.1,
|
||||
tiktoken_core,,,,,,natsukium
|
||||
tl,,,,0.15.3-1,,mephistophiles
|
||||
tl,,,,,,mephistophiles
|
||||
toml-edit,,,,,5.1,mrcjkb
|
||||
tree-sitter-norg,,,,,5.1,mrcjkb
|
||||
vstruct,,,,,,
|
||||
|
|
|
|
@ -32,7 +32,7 @@ from functools import wraps
|
|||
from multiprocessing.dummy import Pool
|
||||
from pathlib import Path
|
||||
from tempfile import NamedTemporaryFile
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
||||
from typing import Any, Callable
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
import git
|
||||
|
@ -94,7 +94,7 @@ def make_request(url: str, token=None) -> urllib.request.Request:
|
|||
|
||||
|
||||
# a dictionary of plugins and their new repositories
|
||||
Redirects = Dict["PluginDesc", "Repo"]
|
||||
Redirects = dict["PluginDesc", "Repo"]
|
||||
|
||||
|
||||
class Repo:
|
||||
|
@ -103,7 +103,7 @@ class Repo:
|
|||
"""Url to the repo"""
|
||||
self._branch = branch
|
||||
# Redirect is the new Repo to use
|
||||
self.redirect: Optional["Repo"] = None
|
||||
self.redirect: "Repo | None" = None
|
||||
self.token = "dummy_token"
|
||||
|
||||
@property
|
||||
|
@ -125,14 +125,14 @@ class Repo:
|
|||
return True
|
||||
|
||||
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
||||
def latest_commit(self) -> Tuple[str, datetime]:
|
||||
def latest_commit(self) -> tuple[str, datetime]:
|
||||
log.debug("Latest commit")
|
||||
loaded = self._prefetch(None)
|
||||
updated = datetime.strptime(loaded["date"], "%Y-%m-%dT%H:%M:%S%z")
|
||||
|
||||
return loaded["rev"], updated
|
||||
|
||||
def _prefetch(self, ref: Optional[str]):
|
||||
def _prefetch(self, ref: str | None):
|
||||
cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
|
||||
if ref is not None:
|
||||
cmd.append(ref)
|
||||
|
@ -141,7 +141,7 @@ class Repo:
|
|||
loaded = json.loads(data)
|
||||
return loaded
|
||||
|
||||
def prefetch(self, ref: Optional[str]) -> str:
|
||||
def prefetch(self, ref: str | None) -> str:
|
||||
log.info("Prefetching %s", self.uri)
|
||||
loaded = self._prefetch(ref)
|
||||
return loaded["sha256"]
|
||||
|
@ -186,7 +186,7 @@ class RepoGitHub(Repo):
|
|||
return True
|
||||
|
||||
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
||||
def latest_commit(self) -> Tuple[str, datetime]:
|
||||
def latest_commit(self) -> tuple[str, datetime]:
|
||||
commit_url = self.url(f"commits/{self.branch}.atom")
|
||||
log.debug("Sending request to %s", commit_url)
|
||||
commit_req = make_request(commit_url, self.token)
|
||||
|
@ -252,14 +252,14 @@ class RepoGitHub(Repo):
|
|||
class PluginDesc:
|
||||
repo: Repo
|
||||
branch: str
|
||||
alias: Optional[str]
|
||||
alias: str | None
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.alias or self.repo.name
|
||||
|
||||
@staticmethod
|
||||
def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> "PluginDesc":
|
||||
def load_from_csv(config: FetchConfig, row: dict[str, str]) -> "PluginDesc":
|
||||
log.debug("Loading row %s", row)
|
||||
branch = row["branch"]
|
||||
repo = make_repo(row["repo"], branch.strip())
|
||||
|
@ -292,7 +292,7 @@ class Plugin:
|
|||
commit: str
|
||||
has_submodules: bool
|
||||
sha256: str
|
||||
date: Optional[datetime] = None
|
||||
date: datetime | None = None
|
||||
|
||||
@property
|
||||
def normalized_name(self) -> str:
|
||||
|
@ -303,7 +303,7 @@ class Plugin:
|
|||
assert self.date is not None
|
||||
return self.date.strftime("%Y-%m-%d")
|
||||
|
||||
def as_json(self) -> Dict[str, str]:
|
||||
def as_json(self) -> dict[str, str]:
|
||||
copy = self.__dict__.copy()
|
||||
del copy["date"]
|
||||
return copy
|
||||
|
@ -312,7 +312,7 @@ class Plugin:
|
|||
def load_plugins_from_csv(
|
||||
config: FetchConfig,
|
||||
input_file: Path,
|
||||
) -> List[PluginDesc]:
|
||||
) -> list[PluginDesc]:
|
||||
log.debug("Load plugins from csv %s", input_file)
|
||||
plugins = []
|
||||
with open(input_file, newline="") as csvfile:
|
||||
|
@ -359,10 +359,10 @@ class Editor:
|
|||
name: str,
|
||||
root: Path,
|
||||
get_plugins: str,
|
||||
default_in: Optional[Path] = None,
|
||||
default_out: Optional[Path] = None,
|
||||
deprecated: Optional[Path] = None,
|
||||
cache_file: Optional[str] = None,
|
||||
default_in: Path | None = None,
|
||||
default_out: Path | None = None,
|
||||
deprecated: Path | None = None,
|
||||
cache_file: str | None = None,
|
||||
):
|
||||
log.debug("get_plugins:", get_plugins)
|
||||
self.name = name
|
||||
|
@ -388,6 +388,19 @@ class Editor:
|
|||
fetch_config, args.input_file, editor.deprecated, append=append
|
||||
)
|
||||
plugin, _ = prefetch_plugin(pdesc)
|
||||
|
||||
if ( # lua updater doesn't support updating individual plugin
|
||||
self.name != "lua"
|
||||
):
|
||||
# update generated.nix
|
||||
update = self.get_update(
|
||||
args.input_file,
|
||||
args.outfile,
|
||||
fetch_config,
|
||||
[plugin.normalized_name],
|
||||
)
|
||||
update()
|
||||
|
||||
autocommit = not args.no_commit
|
||||
if autocommit:
|
||||
commit(
|
||||
|
@ -404,16 +417,35 @@ class Editor:
|
|||
"""CSV spec"""
|
||||
print("the update member function should be overridden in subclasses")
|
||||
|
||||
def get_current_plugins(self, nixpkgs: str) -> List[Plugin]:
|
||||
def get_current_plugins(
|
||||
self, config: FetchConfig, nixpkgs: str
|
||||
) -> list[tuple[PluginDesc, Plugin]]:
|
||||
"""To fill the cache"""
|
||||
data = run_nix_expr(self.get_plugins, nixpkgs)
|
||||
plugins = []
|
||||
for name, attr in data.items():
|
||||
p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
|
||||
plugins.append(p)
|
||||
checksum = attr["checksum"]
|
||||
|
||||
# https://github.com/NixOS/nixpkgs/blob/8a335419/pkgs/applications/editors/neovim/build-neovim-plugin.nix#L36
|
||||
# https://github.com/NixOS/nixpkgs/pull/344478#discussion_r1786646055
|
||||
version = re.search(r"\d\d\d\d-\d\d?-\d\d?", attr["version"])
|
||||
if version is None:
|
||||
raise ValueError(f"Cannot parse version: {attr['version']}")
|
||||
date = datetime.strptime(version.group(), "%Y-%m-%d")
|
||||
|
||||
pdesc = PluginDesc.load_from_string(config, f'{attr["homePage"]} as {name}')
|
||||
p = Plugin(
|
||||
attr["pname"],
|
||||
checksum["rev"],
|
||||
checksum["submodules"],
|
||||
checksum["sha256"],
|
||||
date,
|
||||
)
|
||||
|
||||
plugins.append((pdesc, p))
|
||||
return plugins
|
||||
|
||||
def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
|
||||
def load_plugin_spec(self, config: FetchConfig, plugin_file) -> list[PluginDesc]:
|
||||
"""CSV spec"""
|
||||
return load_plugins_from_csv(config, plugin_file)
|
||||
|
||||
|
@ -421,28 +453,115 @@ class Editor:
|
|||
"""Returns nothing for now, writes directly to outfile"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_update(self, input_file: str, outfile: str, config: FetchConfig):
|
||||
cache: Cache = Cache(self.get_current_plugins(self.nixpkgs), self.cache_file)
|
||||
def filter_plugins_to_update(
|
||||
self, plugin: PluginDesc, to_update: list[str]
|
||||
) -> bool:
|
||||
"""Function for filtering out plugins, that user doesn't want to update.
|
||||
|
||||
It is mainly used for updating only specific plugins, not all of them.
|
||||
By default it filters out plugins not present in `to_update`,
|
||||
assuming `to_update` is a list of plugin names (the same as in the
|
||||
result expression).
|
||||
|
||||
This function is never called if `to_update` is empty.
|
||||
Feel free to override this function in derived classes.
|
||||
|
||||
Note:
|
||||
Known bug: you have to use a deprecated name, instead of new one.
|
||||
This is because we resolve deprecations later and can't get new
|
||||
plugin URL before we request info about it.
|
||||
|
||||
Although, we could parse deprecated.json, but it's a whole bunch
|
||||
of spaghetti code, which I don't want to write.
|
||||
|
||||
Arguments:
|
||||
plugin: Plugin on which you decide whether to ignore or not.
|
||||
to_update:
|
||||
List of strings passed to via the `--update` command line parameter.
|
||||
By default, we assume it is a list of URIs identical to what
|
||||
is in the input file.
|
||||
|
||||
Returns:
|
||||
True if we should update plugin and False if not.
|
||||
"""
|
||||
return plugin.name.replace(".", "-") in to_update
|
||||
|
||||
def get_update(
|
||||
self,
|
||||
input_file: str,
|
||||
output_file: str,
|
||||
config: FetchConfig,
|
||||
to_update: list[str] | None,
|
||||
):
|
||||
if to_update is None:
|
||||
to_update = []
|
||||
|
||||
current_plugins = self.get_current_plugins(config, self.nixpkgs)
|
||||
current_plugin_specs = self.load_plugin_spec(config, input_file)
|
||||
|
||||
cache: Cache = Cache(
|
||||
[plugin for _description, plugin in current_plugins], self.cache_file
|
||||
)
|
||||
_prefetch = functools.partial(prefetch, cache=cache)
|
||||
|
||||
def update() -> dict:
|
||||
plugins = self.load_plugin_spec(config, input_file)
|
||||
plugins_to_update = (
|
||||
current_plugin_specs
|
||||
if len(to_update) == 0
|
||||
else [
|
||||
description
|
||||
for description in current_plugin_specs
|
||||
if self.filter_plugins_to_update(description, to_update)
|
||||
]
|
||||
)
|
||||
|
||||
def update() -> Redirects:
|
||||
if len(plugins_to_update) == 0:
|
||||
log.error(
|
||||
"\n\n\n\nIt seems like you provided some arguments to `--update`:\n"
|
||||
+ ", ".join(to_update)
|
||||
+ "\nBut after filtering, the result list of plugins is empty\n"
|
||||
"\n"
|
||||
"Are you sure you provided the same URIs as in your input file?\n"
|
||||
"(" + str(input_file) + ")\n\n"
|
||||
)
|
||||
return {}
|
||||
|
||||
try:
|
||||
pool = Pool(processes=config.proc)
|
||||
results = pool.map(_prefetch, plugins)
|
||||
results = pool.map(_prefetch, plugins_to_update)
|
||||
finally:
|
||||
cache.store()
|
||||
|
||||
print(f"{len(results)} of {len(current_plugins)} were checked")
|
||||
# Do only partial update of out file
|
||||
if len(results) != len(current_plugins):
|
||||
results = self.merge_results(current_plugins, results)
|
||||
plugins, redirects = check_results(results)
|
||||
|
||||
plugins = sorted(plugins, key=lambda v: v[1].normalized_name)
|
||||
self.generate_nix(plugins, outfile)
|
||||
self.generate_nix(plugins, output_file)
|
||||
|
||||
return redirects
|
||||
|
||||
return update
|
||||
|
||||
def merge_results(
|
||||
self,
|
||||
current: list[tuple[PluginDesc, Plugin]],
|
||||
fetched: list[tuple[PluginDesc, Exception | Plugin, Repo | None]],
|
||||
) -> list[tuple[PluginDesc, Exception | Plugin, Repo | None]]:
|
||||
# transforming this to dict, so lookup is O(1) instead of O(n) (n is len(current))
|
||||
result: dict[str, tuple[PluginDesc, Exception | Plugin, Repo | None]] = {
|
||||
# also adding redirect (third item in the result tuple)
|
||||
pl.normalized_name: (pdesc, pl, None)
|
||||
for pdesc, pl in current
|
||||
}
|
||||
|
||||
for plugin_desc, plugin, redirect in fetched:
|
||||
result[plugin.normalized_name] = (plugin_desc, plugin, redirect)
|
||||
|
||||
return list(result.values())
|
||||
|
||||
@property
|
||||
def attr_path(self):
|
||||
return self.name + "Plugins"
|
||||
|
@ -544,6 +663,12 @@ class Editor:
|
|||
description="Update all or a subset of existing plugins",
|
||||
add_help=False,
|
||||
)
|
||||
pupdate.add_argument(
|
||||
"update_only",
|
||||
default=None,
|
||||
nargs="*",
|
||||
help="Plugin URLs to update (must be the same as in the input file)",
|
||||
)
|
||||
pupdate.set_defaults(func=self.update)
|
||||
return main
|
||||
|
||||
|
@ -587,8 +712,8 @@ class CleanEnvironment(object):
|
|||
|
||||
def prefetch_plugin(
|
||||
p: PluginDesc,
|
||||
cache: "Optional[Cache]" = None,
|
||||
) -> Tuple[Plugin, Optional[Repo]]:
|
||||
cache: "Cache | None" = None,
|
||||
) -> tuple[Plugin, Repo | None]:
|
||||
commit = None
|
||||
log.info(f"Fetching last commit for plugin {p.name} from {p.repo.uri}@{p.branch}")
|
||||
commit, date = p.repo.latest_commit()
|
||||
|
@ -621,10 +746,10 @@ def print_download_error(plugin: PluginDesc, ex: Exception):
|
|||
|
||||
|
||||
def check_results(
|
||||
results: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]],
|
||||
) -> Tuple[List[Tuple[PluginDesc, Plugin]], Redirects]:
|
||||
results: list[tuple[PluginDesc, Exception | Plugin, Repo | None]],
|
||||
) -> tuple[list[tuple[PluginDesc, Plugin]], Redirects]:
|
||||
""" """
|
||||
failures: List[Tuple[PluginDesc, Exception]] = []
|
||||
failures: list[tuple[PluginDesc, Exception]] = []
|
||||
plugins = []
|
||||
redirects: Redirects = {}
|
||||
for pdesc, result, redirect in results:
|
||||
|
@ -637,11 +762,10 @@ def check_results(
|
|||
new_pdesc = PluginDesc(redirect, pdesc.branch, pdesc.alias)
|
||||
plugins.append((new_pdesc, result))
|
||||
|
||||
print(f"{len(results) - len(failures)} plugins were checked", end="")
|
||||
if len(failures) == 0:
|
||||
return plugins, redirects
|
||||
else:
|
||||
log.error(f", {len(failures)} plugin(s) could not be downloaded:\n")
|
||||
log.error(f"{len(failures)} plugin(s) could not be downloaded:\n")
|
||||
|
||||
for plugin, exception in failures:
|
||||
print_download_error(plugin, exception)
|
||||
|
@ -661,7 +785,7 @@ def make_repo(uri: str, branch) -> Repo:
|
|||
return repo
|
||||
|
||||
|
||||
def get_cache_path(cache_file_name: str) -> Optional[Path]:
|
||||
def get_cache_path(cache_file_name: str) -> Path | None:
|
||||
xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
|
||||
if xdg_cache is None:
|
||||
home = os.environ.get("HOME", None)
|
||||
|
@ -673,7 +797,7 @@ def get_cache_path(cache_file_name: str) -> Optional[Path]:
|
|||
|
||||
|
||||
class Cache:
|
||||
def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
|
||||
def __init__(self, initial_plugins: list[Plugin], cache_file_name: str) -> None:
|
||||
self.cache_file = get_cache_path(cache_file_name)
|
||||
|
||||
downloads = {}
|
||||
|
@ -682,11 +806,11 @@ class Cache:
|
|||
downloads.update(self.load())
|
||||
self.downloads = downloads
|
||||
|
||||
def load(self) -> Dict[str, Plugin]:
|
||||
def load(self) -> dict[str, Plugin]:
|
||||
if self.cache_file is None or not self.cache_file.exists():
|
||||
return {}
|
||||
|
||||
downloads: Dict[str, Plugin] = {}
|
||||
downloads: dict[str, Plugin] = {}
|
||||
with open(self.cache_file) as f:
|
||||
data = json.load(f)
|
||||
for attr in data.values():
|
||||
|
@ -707,7 +831,7 @@ class Cache:
|
|||
data[name] = attr.as_json()
|
||||
json.dump(data, f, indent=4, sort_keys=True)
|
||||
|
||||
def __getitem__(self, key: str) -> Optional[Plugin]:
|
||||
def __getitem__(self, key: str) -> Plugin | None:
|
||||
return self.downloads.get(key, None)
|
||||
|
||||
def __setitem__(self, key: str, value: Plugin) -> None:
|
||||
|
@ -716,7 +840,7 @@ class Cache:
|
|||
|
||||
def prefetch(
|
||||
pluginDesc: PluginDesc, cache: Cache
|
||||
) -> Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]:
|
||||
) -> tuple[PluginDesc, Exception | Plugin, Repo | None]:
|
||||
try:
|
||||
plugin, redirect = prefetch_plugin(pluginDesc, cache)
|
||||
cache[plugin.commit] = plugin
|
||||
|
@ -731,7 +855,7 @@ def rewrite_input(
|
|||
deprecated: Path,
|
||||
# old pluginDesc and the new
|
||||
redirects: Redirects = {},
|
||||
append: List[PluginDesc] = [],
|
||||
append: list[PluginDesc] = [],
|
||||
):
|
||||
log.info("Rewriting input file %s", input_file)
|
||||
plugins = load_plugins_from_csv(config, input_file)
|
||||
|
@ -779,7 +903,7 @@ def rewrite_input(
|
|||
writer.writerow(asdict(plugin))
|
||||
|
||||
|
||||
def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
|
||||
def commit(repo: git.Repo, message: str, files: list[Path]) -> None:
|
||||
repo.index.add([str(f.resolve()) for f in files])
|
||||
|
||||
if repo.index.diff("HEAD"):
|
||||
|
@ -802,7 +926,14 @@ def update_plugins(editor: Editor, args):
|
|||
)
|
||||
|
||||
fetch_config = FetchConfig(args.proc, args.github_token)
|
||||
update = editor.get_update(args.input_file, args.outfile, fetch_config)
|
||||
update = editor.get_update(
|
||||
input_file=args.input_file,
|
||||
output_file=args.outfile,
|
||||
config=fetch_config,
|
||||
to_update=getattr( # if script was called without arguments
|
||||
args, "update_only", None
|
||||
),
|
||||
)
|
||||
|
||||
start_time = time.time()
|
||||
redirects = update()
|
||||
|
|
|
@ -440,7 +440,6 @@ with lib.maintainers;
|
|||
hlolli
|
||||
glittershark
|
||||
ericdallo
|
||||
thiagokokada
|
||||
];
|
||||
scope = "Maintain GraalVM Community Edition packages.";
|
||||
shortName = "GraalVM-CE";
|
||||
|
@ -576,6 +575,7 @@ with lib.maintainers;
|
|||
members = [
|
||||
aanderse
|
||||
edwtjo
|
||||
hrdinka
|
||||
thiagokokada
|
||||
];
|
||||
scope = "Maintain Libretro, RetroArch and related packages.";
|
||||
|
|
|
@ -399,6 +399,23 @@ Composed types are types that take a type as parameter. `listOf
|
|||
returned instead for the same `mkIf false` definition.
|
||||
:::
|
||||
|
||||
`types.attrsWith` { *`elemType`*, *`lazy`* ? false }
|
||||
|
||||
: An attribute set of where all the values are of *`elemType`* type.
|
||||
|
||||
**Parameters**
|
||||
|
||||
`elemType` (Required)
|
||||
: Specifies the type of the values contained in the attribute set.
|
||||
|
||||
`lazy`
|
||||
: Determines whether the attribute set is lazily evaluated. See: `types.lazyAttrsOf`
|
||||
|
||||
**Behavior**
|
||||
|
||||
- `attrsWith { elemType = t; }` is equivalent to `attrsOf t`
|
||||
- `attrsWith { lazy = true; elemType = t; }` is equivalent to `lazyAttrsOf t`
|
||||
|
||||
`types.uniq` *`t`*
|
||||
|
||||
: Ensures that type *`t`* cannot be merged. It is used to ensure option
|
||||
|
|
|
@ -160,6 +160,18 @@ The first steps to all these are the same:
|
|||
Refer to the `nixos-generate-config` step in
|
||||
[](#sec-installation) for more information.
|
||||
|
||||
::: {.note}
|
||||
On [UEFI](https://en.wikipedia.org/wiki/UEFI) systems, check that your `/etc/nixos/hardware-configuration.nix` did the right thing with the [EFI System Partition](https://en.wikipedia.org/wiki/EFI_system_partition).
|
||||
In NixOS, by default, both [systemd-boot](https://systemd.io/BOOT/) and [grub](https://www.gnu.org/software/grub/index.html) expect it to be mounted on `/boot`.
|
||||
However, the configuration generator bases its [](#opt-fileSystems) configuration on the current mount points at the time it is run.
|
||||
If the current system and NixOS's bootloader configuration don't agree on where the [EFI System Partition](https://en.wikipedia.org/wiki/EFI_system_partition) is to be mounted, you'll need to manually alter the mount point in `hardware-configuration.nix` before building the system closure.
|
||||
:::
|
||||
|
||||
::: {.note}
|
||||
The lustrate process will not work if the [](#opt-boot.initrd.systemd.enable) option is set to `true`.
|
||||
If you want to use this option, wait until after the first boot into the NixOS system to enable it and rebuild.
|
||||
:::
|
||||
|
||||
You'll likely want to set a root password for your first boot using
|
||||
the configuration files because you won't have a chance to enter a
|
||||
password until after you reboot. You can initialize the root password
|
||||
|
@ -231,26 +243,46 @@ The first steps to all these are the same:
|
|||
$ echo etc/nixos | sudo tee -a /etc/NIXOS_LUSTRATE
|
||||
```
|
||||
|
||||
1. Finally, move the `/boot` directory of your current distribution out
|
||||
of the way (the lustrate process will take care of the rest once you
|
||||
reboot, but this one must be moved out now because NixOS needs to
|
||||
install its own boot files:
|
||||
1. Finally, install NixOS's boot system, backing up the current boot system's files in the process.
|
||||
|
||||
The details of this step can vary depending on the bootloader configuration in NixOS and the bootloader in use by the current system.
|
||||
|
||||
The commands below should work for:
|
||||
|
||||
- [BIOS](https://en.wikipedia.org/wiki/BIOS) systems.
|
||||
|
||||
- [UEFI](https://en.wikipedia.org/wiki/UEFI) systems where both the current system and NixOS mount the [EFI System Partition](https://en.wikipedia.org/wiki/EFI_system_partition) on `/boot`.
|
||||
Both [systemd-boot](https://systemd.io/BOOT/) and [grub](https://www.gnu.org/software/grub/index.html) expect this by default in NixOS, but other distributions vary.
|
||||
|
||||
::: {.warning}
|
||||
Once you complete this step, your current distribution will no
|
||||
longer be bootable! If you didn't get all the NixOS configuration
|
||||
right, especially those settings pertaining to boot loading and root
|
||||
partition, NixOS may not be bootable either. Have a USB rescue
|
||||
device ready in case this happens.
|
||||
Once you complete this step, your current distribution will no longer be bootable!
|
||||
If you didn't get all the NixOS configuration right, especially those settings pertaining to boot loading and root partition, NixOS may not be bootable either.
|
||||
Have a USB rescue device ready in case this happens.
|
||||
:::
|
||||
|
||||
::: {.warning}
|
||||
On [UEFI](https://en.wikipedia.org/wiki/UEFI) systems, anything on the [EFI System Partition](https://en.wikipedia.org/wiki/EFI_system_partition) will be removed by these commands, such as other coexisting OS's bootloaders.
|
||||
:::
|
||||
|
||||
```ShellSession
|
||||
$ sudo mv -v /boot /boot.bak &&
|
||||
sudo /nix/var/nix/profiles/system/bin/switch-to-configuration boot
|
||||
$ sudo mkdir /boot.bak && sudo mv /boot/* /boot.bak &&
|
||||
sudo NIXOS_INSTALL_BOOTLOADER=1 /nix/var/nix/profiles/system/bin/switch-to-configuration boot
|
||||
```
|
||||
|
||||
Cross your fingers, reboot, hopefully you should get a NixOS prompt!
|
||||
|
||||
In other cases, most commonly where the [EFI System Partition](https://en.wikipedia.org/wiki/EFI_system_partition) of the current system is instead mounted on `/boot/efi`, the goal is to:
|
||||
|
||||
- Make sure `/boot` (and the [EFI System Partition](https://en.wikipedia.org/wiki/EFI_system_partition), if mounted elsewhere) are mounted how the NixOS configuration would mount them.
|
||||
|
||||
- Clear them of files related to the current system, backing them up outside of `/boot`.
|
||||
NixOS will move the backups into `/old-root` along with everything else when it first boots.
|
||||
|
||||
- Instruct the NixOS closure built earlier to install its bootloader with:
|
||||
```ShellSession
|
||||
sudo NIXOS_INSTALL_BOOTLOADER=1 /nix/var/nix/profiles/system/bin/switch-to-configuration boot
|
||||
```
|
||||
|
||||
1. If for some reason you want to revert to the old distribution,
|
||||
you'll need to boot on a USB rescue disk and do something along
|
||||
these lines:
|
||||
|
|
|
@ -6,7 +6,7 @@ expressions and associated binaries. The NixOS channels are updated
|
|||
automatically from NixOS's Git repository after certain tests have
|
||||
passed and all packages have been built. These channels are:
|
||||
|
||||
- *Stable channels*, such as [`nixos-24.05`](https://channels.nixos.org/nixos-24.05).
|
||||
- *Stable channels*, such as [`nixos-24.11`](https://channels.nixos.org/nixos-24.11).
|
||||
These only get conservative bug fixes and package upgrades. For
|
||||
instance, a channel update may cause the Linux kernel on your system
|
||||
to be upgraded from 4.19.34 to 4.19.38 (a minor bug fix), but not
|
||||
|
@ -19,7 +19,7 @@ passed and all packages have been built. These channels are:
|
|||
radical changes between channel updates. It's not recommended for
|
||||
production systems.
|
||||
|
||||
- *Small channels*, such as [`nixos-24.05-small`](https://channels.nixos.org/nixos-24.05-small)
|
||||
- *Small channels*, such as [`nixos-24.11-small`](https://channels.nixos.org/nixos-24.11-small)
|
||||
or [`nixos-unstable-small`](https://channels.nixos.org/nixos-unstable-small).
|
||||
These are identical to the stable and unstable channels described above,
|
||||
except that they contain fewer binary packages. This means they get updated
|
||||
|
@ -38,8 +38,8 @@ supported stable release.
|
|||
|
||||
When you first install NixOS, you're automatically subscribed to the
|
||||
NixOS channel that corresponds to your installation source. For
|
||||
instance, if you installed from a 24.05 ISO, you will be subscribed to
|
||||
the `nixos-24.05` channel. To see which NixOS channel you're subscribed
|
||||
instance, if you installed from a 24.11 ISO, you will be subscribed to
|
||||
the `nixos-24.11` channel. To see which NixOS channel you're subscribed
|
||||
to, run the following as root:
|
||||
|
||||
```ShellSession
|
||||
|
@ -54,16 +54,16 @@ To switch to a different NixOS channel, do
|
|||
```
|
||||
|
||||
(Be sure to include the `nixos` parameter at the end.) For instance, to
|
||||
use the NixOS 24.05 stable channel:
|
||||
use the NixOS 24.11 stable channel:
|
||||
|
||||
```ShellSession
|
||||
# nix-channel --add https://channels.nixos.org/nixos-24.05 nixos
|
||||
# nix-channel --add https://channels.nixos.org/nixos-24.11 nixos
|
||||
```
|
||||
|
||||
If you have a server, you may want to use the "small" channel instead:
|
||||
|
||||
```ShellSession
|
||||
# nix-channel --add https://channels.nixos.org/nixos-24.05-small nixos
|
||||
# nix-channel --add https://channels.nixos.org/nixos-24.11-small nixos
|
||||
```
|
||||
|
||||
And if you want to live on the bleeding edge:
|
||||
|
@ -117,6 +117,6 @@ modules. You can also specify a channel explicitly, e.g.
|
|||
|
||||
```nix
|
||||
{
|
||||
system.autoUpgrade.channel = "https://channels.nixos.org/nixos-24.05";
|
||||
system.autoUpgrade.channel = "https://channels.nixos.org/nixos-24.11";
|
||||
}
|
||||
```
|
||||
|
|
|
@ -1868,6 +1868,21 @@
|
|||
"sec-release-24.11-migration-dto-compatible": [
|
||||
"release-notes.html#sec-release-24.11-migration-dto-compatible"
|
||||
],
|
||||
"sec-release-24.11-wiki": [
|
||||
"release-notes.html#sec-release-24.11-wiki"
|
||||
],
|
||||
"sec-release-24.11-lib": [
|
||||
"release-notes.html#sec-release-24.11-lib"
|
||||
],
|
||||
"sec-release-24.11-lib-breaking": [
|
||||
"release-notes.html#sec-release-24.11-lib-breaking"
|
||||
],
|
||||
"sec-release-24.11-lib-additions-improvements": [
|
||||
"release-notes.html#sec-release-24.11-lib-additions-improvements"
|
||||
],
|
||||
"sec-release-24.11-lib-deprecations": [
|
||||
"release-notes.html#sec-release-24.11-lib-deprecations"
|
||||
],
|
||||
"sec-release-24.05": [
|
||||
"release-notes.html#sec-release-24.05"
|
||||
],
|
||||
|
|
|
@ -1,14 +1,9 @@
|
|||
# Release 24.11 (“Vicuña”, 2024.11/??) {#sec-release-24.11}
|
||||
# Release 24.11 (“Vicuña”, 2024.11/28) {#sec-release-24.11}
|
||||
|
||||
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
|
||||
|
||||
## Highlights {#sec-release-24.11-highlights}
|
||||
|
||||
- **This will be the last release of Nixpkgs to support macOS Sierra 10.12 to macOS Catalina 10.15.**
|
||||
Starting with release 25.05, the minimum supported version will be macOS Big Sur 11, and we cannot guarantee that packages will continue to work on older versions of macOS.
|
||||
Users on old macOS versions should consider upgrading to a supported version (potentially using [OpenCore Legacy Patcher](https://dortania.github.io/OpenCore-Legacy-Patcher/) for old hardware) or installing NixOS.
|
||||
If neither of those options are viable and you require new versions of software, [MacPorts](https://www.macports.org/) supports versions back to Mac OS X Snow Leopard 10.6.
|
||||
|
||||
- Nix was updated to 2.24, which brings a lot of improvements and fixes. See the release notes for
|
||||
[2.19](https://nix.dev/manual/nix/latest/release-notes/rl-2.19),
|
||||
[2.20](https://nix.dev/manual/nix/latest/release-notes/rl-2.20),
|
||||
|
@ -18,6 +13,16 @@
|
|||
[2.24](https://nix.dev/manual/nix/latest/release-notes/rl-2.24).
|
||||
Notable changes include improvements to Git fetching, documentation comment support in `nix-repl> :doc`, as well as many quality of life additions.
|
||||
|
||||
- There have been significant changes to macOS support.
|
||||
- The build environment has been redesigned to be closer to a native Xcode toolchain, enabling us to provide all SDKs from macOS Sierra 10.12 to macOS Sequoia 15, simplify build definitions, and build more software without hacks or patching.
|
||||
Although compatibility shims for the old SDK scheme are provided, some builds may break, and the old mechanisms will be removed by 25.11 at the latest.
|
||||
See the [Darwin section](https://nixos.org/manual/nixpkgs/stable/#sec-darwin) of the Nixpkgs manual for details of the new scheme and how to use it, and [the announcement on Discourse](https://discourse.nixos.org/t/the-darwin-sdks-have-been-updated/55295) for more information on the changes and benefits.
|
||||
|
||||
- **This will be the last release of Nixpkgs to support macOS Sierra 10.12 to macOS Catalina 10.15.**
|
||||
Starting with release 25.05, the minimum supported version will be macOS Big Sur 11, and we cannot guarantee that packages will continue to work on older versions of macOS.
|
||||
Users on old macOS versions should consider upgrading to a supported version (potentially using [OpenCore Legacy Patcher](https://dortania.github.io/OpenCore-Legacy-Patcher/) for old hardware) or installing NixOS.
|
||||
If neither of those options are viable and you require new versions of software, [MacPorts](https://www.macports.org/) supports versions back to Mac OS X Snow Leopard 10.6.
|
||||
|
||||
- This will be the last release of Nixpkgs to support versions of CUDA prior to CUDA 12.0.
|
||||
These versions only work with old compiler versions that will be unsupported by the time of the Nixpkgs 25.05 release.
|
||||
In the future, users should expect CUDA versions to be dropped as the compiler versions they require leave upstream support windows.
|
||||
|
@ -29,6 +34,9 @@
|
|||
|
||||
- The `moonlight-qt` package (for [Moonlight game streaming](https://moonlight-stream.org/)) now has HDR support on Linux systems.
|
||||
|
||||
- [Sched-ext](https://github.com/sched-ext/scx), a Linux kernel feature to run schedulers in userspace, is now available [`services.scx`](options.html#opt-services.scx.enable).
|
||||
Requires Linux kernel version 6.12 or later.
|
||||
|
||||
- PostgreSQL now defaults to major version 16.
|
||||
|
||||
- GNOME has been updated to version 47. Refer to the [release notes](https://release.gnome.org/47/) for more details.
|
||||
|
@ -36,6 +44,8 @@
|
|||
- `authelia` has been upgraded to version 4.38. This version brings several features and improvements which are detailed in the [release blog post](https://www.authelia.com/blog/4.38-release-notes/).
|
||||
This release also deprecates some configuration keys which are likely to be removed in version 5.0.0.
|
||||
|
||||
- `netbird` has been updated to 0.31.1. This adds a built-in relay server which is not yet supported by the NixOS module, as well as a metrics endpoint for both the management and signal services. The default metrics port for the `signal` service has been changed from `9090` to `9091` to prevent a port conflict with the management server. This can be changed with their respective `metricsPort` as needed. Refer to the [release notes](https://github.com/netbirdio/netbird/releases/tag/v0.31.1) and [this pull request](https://github.com/NixOS/nixpkgs/pull/354032#issuecomment-2480925927) for more information.
|
||||
|
||||
- `compressDrv` can compress selected files in a derivation. `compressDrvWeb` compresses files for common web server usage (`.gz` with `zopfli`, `.br` with `brotli`).
|
||||
|
||||
- [`hardware.display`](#opt-hardware.display.edid.enable) is a new module implementing workarounds for misbehaving monitors
|
||||
|
@ -77,6 +87,8 @@
|
|||
|
||||
## New Modules {#sec-release-24.11-new-modules}
|
||||
|
||||
- [Coral](https://coral.ai/), hardware support for Coral.ai Edge TPU devices. Available as [hardware.coral.usb.enable](#opt-hardware.coral.usb.enable) and [hardware.coral.pcie.enable](#opt-hardware.coral.pcie.enable).
|
||||
|
||||
- [Cyrus IMAP](https://github.com/cyrusimap/cyrus-imapd), an email, contacts and calendar server. Available as [services.cyrus-imap](#opt-services.cyrus-imap.enable) service.
|
||||
|
||||
- [TaskChampion Sync-Server](https://github.com/GothenburgBitFactory/taskchampion-sync-server), a [Taskwarrior 3](https://taskwarrior.org/docs/upgrade-3/) sync server. Available as [services.taskchampion-sync-server](#opt-services.taskchampion-sync-server.enable).
|
||||
|
@ -85,7 +97,7 @@
|
|||
|
||||
- [Gancio](https://gancio.org/), a shared agenda for local communities. Available as [services.gancio](#opt-services.gancio.enable).
|
||||
|
||||
- [Goatcounter](https://www.goatcounter.com/), an easy web analytics platform with no tracking of personal data. Available as [services.goatcounter](options.html#opt-services.goatcocunter.enable).
|
||||
- [Goatcounter](https://www.goatcounter.com/), an easy web analytics platform with no tracking of personal data. Available as [services.goatcounter](options.html#opt-services.goatcounter.enable).
|
||||
|
||||
- [Privatebin](https://github.com/PrivateBin/PrivateBin/), a minimalist, open source online pastebin where the server has zero knowledge of pasted data. Available as [services.privatebin](#opt-services.privatebin.enable).
|
||||
|
||||
|
@ -123,6 +135,8 @@
|
|||
|
||||
- [Radicle](https://radicle.xyz), an open source, peer-to-peer code collaboration stack built on Git. Available as [services.radicle](#opt-services.radicle.enable).
|
||||
|
||||
- [Ordinal](https://github.com/snu-sf/Ordinal), A library for ordinal numbers in the Coq proof assistant.
|
||||
|
||||
- [ddns-updater](https://github.com/qdm12/ddns-updater), a service with a WebUI to update DNS records periodically for many providers. Available as [services.ddns-updater](#opt-services.ddns-updater.enable).
|
||||
|
||||
- [Immersed](https://immersed.com/), a closed-source coworking platform. Available as [programs.immersed](#opt-programs.immersed.enable).
|
||||
|
@ -139,8 +153,6 @@
|
|||
|
||||
- [zeronsd](https://github.com/zerotier/zeronsd), a DNS server for ZeroTier users. Available with [services.zeronsd.servedNetworks](#opt-services.zeronsd.servedNetworks).
|
||||
|
||||
- [agorakit](https://github.com/agorakit/agorakit), an organization tool for citizens' collectives. Available with [services.agorakit](#opt-services.agorakit.enable).
|
||||
|
||||
- [Collabora Online](https://www.collaboraonline.com/), a collaborative online office suite based on LibreOffice technology. Available as [services.collabora-online](options.html#opt-services.collabora-online.enable).
|
||||
|
||||
- [wg-access-server](https://github.com/freifunkMUC/wg-access-server/), an all-in-one WireGuard VPN solution with a WebUI for connecting devices. Available as [services.wg-access-server](#opt-services.wg-access-server.enable).
|
||||
|
@ -432,6 +444,9 @@
|
|||
|
||||
- `gitea` no longer supports the opt-in feature [PAM (Pluggable Authentication Module)](https://docs.gitea.com/usage/authentication#pam-pluggable-authentication-module).
|
||||
|
||||
- `vuze` was removed because it is unmaintained upstream and insecure (CVE-2018-13417).
|
||||
BiglyBT is a maintained fork.
|
||||
|
||||
- `services.ddclient.use` has been deprecated: `ddclient` now supports separate IPv4 and IPv6 configuration. Use `services.ddclient.usev4` and `services.ddclient.usev6` instead.
|
||||
|
||||
- `services.pgbouncer` systemd service is now configured with `Type=notify-reload` and allows reloading configuration without process restart. PgBouncer configuration options were moved to the freeform type option under [`services.pgbouncer.settings`](#opt-services.pgbouncer.settings).
|
||||
|
@ -734,7 +749,6 @@
|
|||
- The `atlassian-crowd` package and its `services.crowd` NixOS module
|
||||
- The `atlassian-jira` package and its `services.jira` NixOS module
|
||||
|
||||
|
||||
- `python3Packages.nose` has been removed, as it has been deprecated and unmaintained for almost a decade and does not work on Python 3.12.
|
||||
Please switch to `pytest` or another test runner/framework.
|
||||
|
||||
|
@ -752,6 +766,8 @@
|
|||
rather than dotnet 6. For packages that still need dotnet 6, use
|
||||
`dotnet-sdk_6`, etc.
|
||||
|
||||
- torq has been removed because upstreamed went closed source.
|
||||
|
||||
## Other Notable Changes {#sec-release-24.11-notable-changes}
|
||||
|
||||
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
|
||||
|
@ -781,10 +797,6 @@
|
|||
not the `hare` package, should be added to `nativeBuildInputs` when building
|
||||
Hare programs.
|
||||
|
||||
- [`lib.options.mkPackageOptionMD`](https://nixos.org/manual/nixpkgs/unstable#function-library-lib.options.mkPackageOptionMD) is now obsolete; use the identical [`lib.options.mkPackageOption`](https://nixos.org/manual/nixpkgs/unstable#function-library-lib.options.mkPackageOption) instead.
|
||||
|
||||
- `lib.misc.mapAttrsFlatten` is now formally deprecated and will be removed in future releases; use the identical [`lib.attrsets.mapAttrsToList`](https://nixos.org/manual/nixpkgs/unstable#function-library-lib.attrsets.mapAttrsToList) instead.
|
||||
|
||||
- `virtualisation.docker.liveRestore` has been renamed to `virtualisation.docker.daemon.settings."live-restore"` and turned off by default for state versions of at least 24.11.
|
||||
|
||||
- Tailscale's `authKeyFile` can now have its corresponding parameters set through `config.services.tailscale.authKeyParameters`, allowing for non-ephemeral unsupervised deployment and more.
|
||||
|
@ -808,6 +820,8 @@
|
|||
Note that first solution of the [official FAQ answer](https://cloud.seatable.io/dtable/external-links/7b976c85f504491cbe8e/?tid=0000&vid=0000&row-id=BQhH-2HSQs68Nq2EW91DBA)
|
||||
is not allowed by the `services.nginx` module's config-checker.
|
||||
|
||||
- The new option `boot.binfmt.addEmulatedSystemsToNixSandbox` allows you to skip adding the emulated systems to `nix.settings.extra-platforms`. Now you can emulate foreign binaries locally while only building them on native remote builders.
|
||||
|
||||
- The latest available version of Nextcloud is v30 (available as `pkgs.nextcloud30`). The installation logic is as follows:
|
||||
- If [`services.nextcloud.package`](#opt-services.nextcloud.package) is specified explicitly, this package will be installed (**recommended**)
|
||||
- If [`system.stateVersion`](#opt-system.stateVersion) is >=24.05, `pkgs.nextcloud29` will be installed by default.
|
||||
|
@ -852,6 +866,8 @@
|
|||
|
||||
- The `shadowstack` hardening flag has been added, though disabled by default.
|
||||
|
||||
- `writeReferencesToFile` has been removed after its deprecation in 24.05. Use the trivial build helper `writeClosure` instead.
|
||||
|
||||
- `xxd` is now provided by the `tinyxxd` package rather than `vim.xxd` to reduce closure size and vulnerability impact. Since it has the same options and semantics as Vim's `xxd` utility, there is no user impact. Vim's `xxd` remains available as the `vim.xxd` package.
|
||||
|
||||
- `restic` module now has an option for inhibiting system sleep while backups are running, defaulting to off (not inhibiting sleep). Available as [`services.restic.backups.<name>.inhibitsSleep`](#opt-services.restic.backups._name_.inhibitsSleep).
|
||||
|
@ -893,6 +909,8 @@
|
|||
|
||||
- `virtualisation.incus` module gained new `incus-user.service` and `incus-user.socket` systemd units. It is now possible to add a user to `incus` group instead of `incus-admin` for increased security.
|
||||
|
||||
- `freecad` now supports addons and custom configuration in nix-way, which can be used by calling `freecad.customize`.
|
||||
|
||||
## Detailed Migration Information {#sec-release-24.11-migration}
|
||||
|
||||
### `sound` options removal {#sec-release-24.11-migration-sound}
|
||||
|
@ -954,3 +972,62 @@ To provide some examples:
|
|||
| `"foo", "bar"` | `"baz", "bar"` | no match | match | One compatible string matching is enough |
|
||||
|
||||
Note that this also allows writing overlays that explicitly apply to multiple boards.
|
||||
|
||||
## Nixpkgs Library {#sec-release-24.11-lib}
|
||||
|
||||
### Breaking changes {#sec-release-24.11-lib-breaking}
|
||||
|
||||
- [`lib.escapeShellArg`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.strings.escapeShellArg) and [`lib.escapeShellArgs`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.strings.escapeShellArgs): Arguments that don't need to be escaped won't be anymore, which is not breaking according to the functions documentation, but it can cause breakages if used for the non-intended use cases.
|
||||
- [`lib.warn msg val`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.trivial.warn) (and its relatives [`lib.warnIf`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.trivial.warnIf) and [`lib.warnIfNot`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.trivial.warnIfNot)) now require `msg` to be a string to match the behavior of the new [`builtins.warn`](https://nix.dev/manual/nix/2.25/language/builtins.html?highlight=warn#builtins-warn).
|
||||
- `lib.mdDoc`: Removed after deprecation in the previous release.
|
||||
|
||||
### Additions and Improvements {#sec-release-24.11-lib-additions-improvements}
|
||||
|
||||
New and extended interfaces:
|
||||
- [`lib.fromHexString`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.trivial.fromHexString): Convert a hexadecimal string to it's integer representation.
|
||||
- `lib.network.ipv6.fromString`: Parse an IPv6 address.
|
||||
- [`lib.getLicenseFromSpdxIdOr`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.meta.getLicenseFromSpdxIdOr): Get the corresponding attribute in `lib.licenses` from an SPDX ID or fall back to the given default value.
|
||||
- [`lib.licensesSpdx`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.meta.licensesSpdx): Mapping of SPDX ID to the attributes in `lib.licenses`.
|
||||
- [`lib.getFirstOutput`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.attrsets.getFirstOutput): Like `getOutput` but with a list of fallback output names.
|
||||
- [`lib.getInclude`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.attrsets.getInclude) and [`lib.getStatic`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.attrsets.getStatic): Get a package’s `include`/`static` output.
|
||||
- [`lib.trim`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.strings.trim) and [`lib.trimWith`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.strings.trimWith): Remove leading and trailing whitespace from a string.
|
||||
- [`lib.meta.defaultPriority`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.meta.defaultPriority): The default priority of packages in Nix.
|
||||
- [`lib.toExtension`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.fixedPoints.toExtension): Convert to an extending function (overlay).
|
||||
- `lib.fetchers.normalizeHash`: Convert an attrset containing one of `hash`, `sha256` or `sha512` into one containing `outputHash{,Algo}` as accepted by `mkDerivation`.
|
||||
- `lib.fetchers.withNormalizedHash`: Wraps a function which accepts `outputHash{,Algo}` into one which accepts `hash`, `sha256` or `sha512`.
|
||||
- Various builtins are now reexported in a more standard way:
|
||||
- `lib.map` -> `lib.lists.map` -> `builtins.map`
|
||||
- `lib.intersectAttrs` -> `lib.attrsets.intersectAttrs` -> `builtins.intersectAttrs`
|
||||
- `lib.removeAttrs` -> `lib.attrsets.removeAttrs` -> `builtins.removeAttrs`
|
||||
- `lib.match` -> `lib.strings.match` -> `builtins.match`
|
||||
- `lib.split` -> `lib.strings.split` -> `builtins.split`
|
||||
- `lib.typeOf` -> `builtins.typeOf`
|
||||
- `lib.unsafeGetAttrPos` -> `builtins.unsafeGetAttrPos`
|
||||
- [`lib.cli.toGNUCommandLine`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.cli.toGNUCommandLine) now supports the `optionValueSeparator` argument attribute to control the key-value separator for arguments.
|
||||
|
||||
Documentation improvements:
|
||||
- Much of the documentation has been migrated to the [standard doc-comment format](https://github.com/NixOS/rfcs/pull/145), including [`lib.derivations`](https://nixos.org/manual/nixpkgs/unstable/#sec-functions-library-derivations), [`lib.fixedPoints`](https://nixos.org/manual/nixpkgs/unstable/#sec-functions-library-fixedPoints), [`lib.gvariant`](https://nixos.org/manual/nixpkgs/unstable/#sec-functions-library-gvariant), [`lib.filesystem`](https://nixos.org/manual/nixpkgs/unstable/#sec-functions-library-filesystem), [`lib.strings`](https://nixos.org/manual/nixpkgs/unstable/#sec-functions-library-strings), [`lib.meta`](https://nixos.org/manual/nixpkgs/unstable/#sec-functions-library-meta).
|
||||
- [`lib.generators` documentation](https://nixos.org/manual/nixpkgs/unstable/#sec-functions-library-generators) is now improved and rendered in the manual.
|
||||
- [`lib.cli` documentation](https://nixos.org/manual/nixpkgs/unstable/#sec-functions-library-cli) is now improved and rendered in the manual.
|
||||
- [`lib.composeExtensions`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.fixedPoints.composeExtensions) and [`lib.composeManyExtensions`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.fixedPoints.composeManyExtensions) documentation is now improved.
|
||||
- [`lib.importTOML`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.trivial.importTOML) and [`lib.importJSON`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.trivial.importJSON)'s documentation now have an example.
|
||||
|
||||
Module System:
|
||||
- `lib.importApply`: New function, imports a Nix expression file much like the module system would, after passing an extra positional argument to the function in the file.
|
||||
- Improve error message when accessing an option that isn't defined.
|
||||
- `lib.types.anything`: Don't fail to merge when specifying the same list multiple times.
|
||||
- Improve error when loading a flake as a module.
|
||||
|
||||
### Deprecations {#sec-release-24.11-lib-deprecations}
|
||||
|
||||
- [`lib.options.mkPackageOptionMD`](https://nixos.org/manual/nixpkgs/unstable#function-library-lib.options.mkPackageOptionMD) is now obsolete; use the identical [`lib.options.mkPackageOption`](https://nixos.org/manual/nixpkgs/unstable#function-library-lib.options.mkPackageOption) instead.
|
||||
- `lib.misc.mapAttrsFlatten` is now formally deprecated and will be removed in future releases; use the identical [`lib.attrsets.mapAttrsToList`](https://nixos.org/manual/nixpkgs/unstable#function-library-lib.attrsets.mapAttrsToList) instead.
|
||||
- `lib.isInOldestRelease`: Renamed to [`oldestSupportedReleaseIsAtLeast`](https://nixos.org/manual/nixpkgs/unstable/#function-library-lib.trivial.oldestSupportedReleaseIsAtLeast) and deprecated.
|
||||
|
||||
## NixOS Wiki {#sec-release-24.11-wiki}
|
||||
|
||||
The official NixOS Wiki at [wiki.nixos.org](https://wiki.nixos.org/) was launched in April 2024, featuring
|
||||
content initially copied from the community wiki. The wiki enhances the official documentation, linking to
|
||||
existing resources and providing a categorization system for easy navigation, and is guided by a new "Manual
|
||||
of Style" — a contribution guide and enhanced templates. It offers a wealth of new information, including
|
||||
articles on applications, desktop environments, and a growing number of translations in multiple languages.
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
|
||||
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
|
||||
|
||||
- Create the first release note entry in this section!
|
||||
- The default PHP version has been updated to 8.3.
|
||||
|
||||
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
|
||||
|
||||
|
@ -14,16 +14,70 @@
|
|||
|
||||
- [Kimai](https://www.kimai.org/), a web-based multi-user time-tracking application. Available as [services.kimai](option.html#opt-services.kimai).
|
||||
|
||||
- [Omnom](https://github.com/asciimoo/omnom), a webpage bookmarking and snapshotting service. Available as [services.omnom](options.html#opt-services.omnom.enable).
|
||||
|
||||
- [Traccar](https://www.traccar.org/), a modern GPS Tracking Platform. Available as [services.traccar](#opt-services.traccar.enable).
|
||||
|
||||
- [Amazon CloudWatch Agent](https://github.com/aws/amazon-cloudwatch-agent), the official telemetry collector for AWS CloudWatch and AWS X-Ray. Available as [services.amazon-cloudwatch-agent](#opt-services.amazon-cloudwatch-agent.enable).
|
||||
|
||||
- [agorakit](https://github.com/agorakit/agorakit), an organization tool for citizens' collectives. Available with [services.agorakit](#opt-services.agorakit.enable).
|
||||
|
||||
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
|
||||
|
||||
## Backward Incompatibilities {#sec-release-25.05-incompatibilities}
|
||||
|
||||
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
|
||||
|
||||
- `binwalk` was updated to 3.1.0, which has been rewritten in rust. The python module is no longer available.
|
||||
See the release notes of [3.1.0](https://github.com/ReFirmLabs/binwalk/releases/tag/v3.1.0) for more information.
|
||||
|
||||
- `buildGoPackage` has been removed. Use `buildGoModule` instead. See the [Go section in the nixpkgs manual](https://nixos.org/manual/nixpkgs/unstable/#sec-language-go) for details.
|
||||
|
||||
- `strawberry` has been updated to 1.2, which drops support for the VLC backend and Qt 5. The `strawberry-qt5` package
|
||||
and `withGstreamer`/`withVlc` override options have been removed due to this.
|
||||
|
||||
- `timescaledb` requires manual upgrade steps.
|
||||
After you run ALTER EXTENSION, you must run [this SQL script](https://github.com/timescale/timescaledb-extras/blob/master/utils/2.15.X-fix_hypertable_foreign_keys.sql). For more details, see the following pull requests [#6797](https://github.com/timescale/timescaledb/pull/6797).
|
||||
PostgreSQL 13 is no longer supported in TimescaleDB v2.16.
|
||||
|
||||
- Support for CUDA 10 has been dropped, as announced in the 24.11 release notes.
|
||||
|
||||
- `zammad` has had its support for MySQL removed, since it was never working correctly and is now deprecated upstream. Check the [migration guide](https://docs.zammad.org/en/latest/appendix/migrate-to-postgresql.html) for how to convert your database to PostgreSQL.
|
||||
|
||||
- `nodePackages.insect` has been removed, as it's deprecated by upstream. The suggested replacement is `numbat`.
|
||||
|
||||
- The behavior of the `networking.nat.externalIP` and `networking.nat.externalIPv6` options has been changed. `networking.nat.forwardPorts` now only forwards packets destined for the specified IP addresses.
|
||||
|
||||
- `nodePackages.meshcommander` has been removed, as the package was deprecated by Intel.
|
||||
|
||||
- `kanata` was updated to v1.7.0, which introduces several breaking changes.
|
||||
See the release notes of
|
||||
[v1.7.0](https://github.com/jtroo/kanata/releases/tag/v1.7.0)
|
||||
for more information.
|
||||
- `vscode-utils.buildVscodeExtension` now requires pname as an argument
|
||||
|
||||
- `nerdfonts` has been separated into individual font packages under the namespace `nerd-fonts`. The directories for font
|
||||
files have changed from `$out/share/fonts/{opentype,truetype}/NerdFonts` to
|
||||
`$out/share/fonts/{opentype,truetype}/NerdFonts/<fontDirName>`, where `<fontDirName>` can be found in the
|
||||
[official website](https://www.nerdfonts.com/font-downloads) as the titles in preview images, with the "Nerd Font"
|
||||
suffix and any whitespaces trimmed.
|
||||
|
||||
- `retroarch` has been refactored and the older `retroarch.override { cores = [ ... ]; }` to create a RetroArch derivation with custom cores doesn't work anymore, use `retroarch.withCores (cores: [ ... ])` instead. If you need more customization (e.g.: custom settings), use `wrapRetroArch` instead.
|
||||
|
||||
- `gkraken` software and `hardware.gkraken.enable` option have been removed, use `coolercontrol` via `programs.coolercontrol.enable` option instead.
|
||||
|
||||
- `nodePackages.ganache` has been removed, as the package has been deprecated by upstream.
|
||||
|
||||
- `containerd` has been updated to v2, which contains breaking changes. See the [containerd
|
||||
2.0](https://github.com/containerd/containerd/blob/main/docs/containerd-2.0.md) documentation for more
|
||||
details.
|
||||
|
||||
- `nodePackages.stackdriver-statsd-backend` has been removed, as the StackDriver service has been discontinued by Google, and therefore the package no longer works.
|
||||
|
||||
- the notmuch vim plugin now lives in a separate output of the `notmuch`
|
||||
package. Installing `notmuch` will not bring the notmuch vim package anymore,
|
||||
add `vimPlugins.notmuch-vim` to your (Neo)vim configuration if you want the
|
||||
vim plugin.
|
||||
|
||||
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
|
||||
|
||||
|
@ -31,6 +85,8 @@
|
|||
|
||||
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
|
||||
|
||||
- Create the first release note entry in this section!
|
||||
- Cinnamon has been updated to 6.4.
|
||||
|
||||
- `bind.cacheNetworks` now only controls access for recursive queries, where it previously controlled access for all queries.
|
||||
|
||||
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
|
||||
|
|
|
@ -10,7 +10,11 @@ let
|
|||
buildArgs = "../../release.nix -A manualHTML.${builtins.currentSystem}";
|
||||
open = "/${outputPath}/${indexPath}";
|
||||
};
|
||||
nixos-render-docs-redirects = pkgs.writeShellScriptBin "redirects" "${pkgs.lib.getExe pkgs.nixos-render-docs-redirects} --file ${toString ./redirects.json} $@";
|
||||
in
|
||||
pkgs.mkShellNoCC {
|
||||
packages = [ devmode ];
|
||||
packages = [
|
||||
devmode
|
||||
nixos-render-docs-redirects
|
||||
];
|
||||
}
|
||||
|
|
|
@ -163,6 +163,9 @@ To solve this, you can run `fdisk -l $image` and generate `dd if=$image of=$imag
|
|||
, # Disk image format, one of qcow2, qcow2-compressed, vdi, vpc, raw.
|
||||
format ? "raw"
|
||||
|
||||
, # Disk image filename, without any extensions (e.g. `image_1`).
|
||||
baseName ? "nixos"
|
||||
|
||||
# Whether to fix:
|
||||
# - GPT Disk Unique Identifier (diskGUID)
|
||||
# - GPT Partition Unique Identifier: depends on the layout, root partition UUID can be controlled through `rootGPUID` option
|
||||
|
@ -208,7 +211,7 @@ let format' = format; in let
|
|||
|
||||
compress = lib.optionalString (format' == "qcow2-compressed") "-c";
|
||||
|
||||
filename = "nixos." + {
|
||||
filename = "${baseName}." + {
|
||||
qcow2 = "qcow2";
|
||||
vdi = "vdi";
|
||||
vpc = "vhd";
|
||||
|
@ -470,7 +473,7 @@ let format' = format; in let
|
|||
additionalSpace=$(( $(numfmt --from=iec '${additionalSpace}') + reservedSpace ))
|
||||
|
||||
# Compute required space in filesystem blocks
|
||||
diskUsage=$(find . ! -type d -print0 | du --files0-from=- --apparent-size --block-size "${blockSize}" | cut -f1 | sum_lines)
|
||||
diskUsage=$(find . ! -type d -print0 | du --files0-from=- --apparent-size --count-links --block-size "${blockSize}" | cut -f1 | sum_lines)
|
||||
# Each inode takes space!
|
||||
numInodes=$(find . | wc -l)
|
||||
# Convert to bytes, inodes take two blocks each!
|
||||
|
|
|
@ -236,8 +236,7 @@ let
|
|||
image = (
|
||||
pkgs.vmTools.override {
|
||||
rootModules =
|
||||
[ "zfs" "9p" "9pnet_virtio" "virtio_pci" "virtio_blk" ] ++
|
||||
(pkgs.lib.optional pkgs.stdenv.hostPlatform.isx86 "rtc_cmos");
|
||||
[ "zfs" "9p" "9pnet_virtio" "virtio_pci" "virtio_blk" ];
|
||||
kernel = modulesTree;
|
||||
}
|
||||
).runInLinuxVM (
|
||||
|
|
|
@ -39,10 +39,6 @@
|
|||
}
|
||||
```
|
||||
|
||||
## optionsDocBook
|
||||
|
||||
deprecated since 23.11 and will be removed in 24.05.
|
||||
|
||||
## optionsAsciiDoc
|
||||
|
||||
Documentation rendered as AsciiDoc. This is useful for e.g. man pages.
|
||||
|
@ -111,18 +107,8 @@
|
|||
# instead of printing warnings for eg options with missing descriptions (which may be lost
|
||||
# by nix build unless -L is given), emit errors instead and fail the build
|
||||
, warningsAreErrors ? true
|
||||
# allow docbook option docs if `true`. only markdown documentation is allowed when set to
|
||||
# `false`, and a different renderer may be used with different bugs and performance
|
||||
# characteristics but (hopefully) indistinguishable output.
|
||||
# deprecated since 23.11.
|
||||
# TODO remove in a while.
|
||||
, allowDocBook ? false
|
||||
# TODO remove in a while (see https://github.com/NixOS/nixpkgs/issues/300735)
|
||||
, markdownByDefault ? true
|
||||
}:
|
||||
|
||||
assert markdownByDefault && ! allowDocBook;
|
||||
|
||||
let
|
||||
rawOpts = lib.optionAttrSetToDocList options;
|
||||
transformedOpts = map transformOptions rawOpts;
|
||||
|
@ -229,6 +215,4 @@ in rec {
|
|||
echo "file json $dst/options.json" >> $out/nix-support/hydra-build-products
|
||||
echo "file json-br $dst/options.json.br" >> $out/nix-support/hydra-build-products
|
||||
'';
|
||||
|
||||
optionsDocBook = throw "optionsDocBook has been removed in 24.05";
|
||||
}
|
||||
|
|
|
@ -226,8 +226,7 @@ let
|
|||
image = (
|
||||
pkgs.vmTools.override {
|
||||
rootModules =
|
||||
[ "zfs" "9p" "9pnet_virtio" "virtio_pci" "virtio_blk" ] ++
|
||||
(pkgs.lib.optional pkgs.stdenv.hostPlatform.isx86 "rtc_cmos");
|
||||
[ "zfs" "9p" "9pnet_virtio" "virtio_pci" "virtio_blk" ];
|
||||
kernel = modulesTree;
|
||||
}
|
||||
).runInLinuxVM (
|
||||
|
|
|
@ -1,17 +1,18 @@
|
|||
{ lib
|
||||
, python3Packages
|
||||
, enableOCR ? false
|
||||
, qemu_pkg ? qemu_test
|
||||
, coreutils
|
||||
, imagemagick_light
|
||||
, netpbm
|
||||
, qemu_test
|
||||
, socat
|
||||
, ruff
|
||||
, tesseract4
|
||||
, vde2
|
||||
, extraPythonPackages ? (_ : [])
|
||||
, nixosTests
|
||||
{
|
||||
lib,
|
||||
python3Packages,
|
||||
enableOCR ? false,
|
||||
qemu_pkg ? qemu_test,
|
||||
coreutils,
|
||||
imagemagick_light,
|
||||
netpbm,
|
||||
qemu_test,
|
||||
socat,
|
||||
ruff,
|
||||
tesseract4,
|
||||
vde2,
|
||||
extraPythonPackages ? (_: [ ]),
|
||||
nixosTests,
|
||||
}:
|
||||
let
|
||||
fs = lib.fileset;
|
||||
|
@ -19,6 +20,8 @@ in
|
|||
python3Packages.buildPythonApplication {
|
||||
pname = "nixos-test-driver";
|
||||
version = "1.1";
|
||||
pyproject = true;
|
||||
|
||||
src = fs.toSource {
|
||||
root = ./.;
|
||||
fileset = fs.unions [
|
||||
|
@ -27,37 +30,50 @@ python3Packages.buildPythonApplication {
|
|||
./extract-docstrings.py
|
||||
];
|
||||
};
|
||||
pyproject = true;
|
||||
|
||||
propagatedBuildInputs = [
|
||||
coreutils
|
||||
netpbm
|
||||
python3Packages.colorama
|
||||
python3Packages.junit-xml
|
||||
python3Packages.ptpython
|
||||
qemu_pkg
|
||||
socat
|
||||
vde2
|
||||
]
|
||||
++ (lib.optionals enableOCR [ imagemagick_light tesseract4 ])
|
||||
build-system = with python3Packages; [
|
||||
setuptools
|
||||
];
|
||||
|
||||
dependencies =
|
||||
with python3Packages;
|
||||
[
|
||||
colorama
|
||||
junit-xml
|
||||
ptpython
|
||||
]
|
||||
++ extraPythonPackages python3Packages;
|
||||
|
||||
nativeBuildInputs = [
|
||||
python3Packages.setuptools
|
||||
];
|
||||
propagatedBuildInputs =
|
||||
[
|
||||
coreutils
|
||||
netpbm
|
||||
qemu_pkg
|
||||
socat
|
||||
vde2
|
||||
]
|
||||
++ lib.optionals enableOCR [
|
||||
imagemagick_light
|
||||
tesseract4
|
||||
];
|
||||
|
||||
passthru.tests = {
|
||||
inherit (nixosTests.nixos-test-driver) driver-timeout;
|
||||
};
|
||||
|
||||
doCheck = true;
|
||||
nativeCheckInputs = with python3Packages; [ mypy ruff black ];
|
||||
|
||||
nativeCheckInputs = with python3Packages; [
|
||||
mypy
|
||||
ruff
|
||||
];
|
||||
|
||||
checkPhase = ''
|
||||
echo -e "\x1b[32m## run mypy\x1b[0m"
|
||||
mypy test_driver extract-docstrings.py
|
||||
echo -e "\x1b[32m## run ruff\x1b[0m"
|
||||
echo -e "\x1b[32m## run ruff check\x1b[0m"
|
||||
ruff check .
|
||||
echo -e "\x1b[32m## run black\x1b[0m"
|
||||
black --check --diff .
|
||||
echo -e "\x1b[32m## run ruff format\x1b[0m"
|
||||
ruff format --check --diff .
|
||||
'';
|
||||
}
|
||||
|
|
|
@ -17,6 +17,7 @@ find = {}
|
|||
test_driver = ["py.typed"]
|
||||
|
||||
[tool.ruff]
|
||||
target-version = "py312"
|
||||
line-length = 88
|
||||
|
||||
lint.select = ["E", "F", "I", "U", "N"]
|
||||
|
@ -35,11 +36,6 @@ ignore_missing_imports = true
|
|||
module = "junit_xml.*"
|
||||
ignore_missing_imports = true
|
||||
|
||||
[tool.black]
|
||||
line-length = 88
|
||||
target-version = ['py39']
|
||||
include = '\.pyi?$'
|
||||
|
||||
[tool.mypy]
|
||||
warn_redundant_casts = true
|
||||
disallow_untyped_calls = true
|
||||
|
|
|
@ -3,11 +3,10 @@ import re
|
|||
import signal
|
||||
import tempfile
|
||||
import threading
|
||||
from contextlib import contextmanager
|
||||
from collections.abc import Callable, Iterator
|
||||
from contextlib import AbstractContextManager, contextmanager
|
||||
from pathlib import Path
|
||||
from typing import Any, Callable, ContextManager, Dict, Iterator, List, Optional, Union
|
||||
|
||||
from colorama import Fore, Style
|
||||
from typing import Any
|
||||
|
||||
from test_driver.logger import AbstractLogger
|
||||
from test_driver.machine import Machine, NixStartScript, retry
|
||||
|
@ -44,17 +43,17 @@ class Driver:
|
|||
and runs the tests"""
|
||||
|
||||
tests: str
|
||||
vlans: List[VLan]
|
||||
machines: List[Machine]
|
||||
polling_conditions: List[PollingCondition]
|
||||
vlans: list[VLan]
|
||||
machines: list[Machine]
|
||||
polling_conditions: list[PollingCondition]
|
||||
global_timeout: int
|
||||
race_timer: threading.Timer
|
||||
logger: AbstractLogger
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
start_scripts: List[str],
|
||||
vlans: List[int],
|
||||
start_scripts: list[str],
|
||||
vlans: list[int],
|
||||
tests: str,
|
||||
out_dir: Path,
|
||||
logger: AbstractLogger,
|
||||
|
@ -73,7 +72,7 @@ class Driver:
|
|||
vlans = list(set(vlans))
|
||||
self.vlans = [VLan(nr, tmp_dir, self.logger) for nr in vlans]
|
||||
|
||||
def cmd(scripts: List[str]) -> Iterator[NixStartScript]:
|
||||
def cmd(scripts: list[str]) -> Iterator[NixStartScript]:
|
||||
for s in scripts:
|
||||
yield NixStartScript(s)
|
||||
|
||||
|
@ -119,7 +118,7 @@ class Driver:
|
|||
self.logger.error(f'Test "{name}" failed with error: "{e}"')
|
||||
raise e
|
||||
|
||||
def test_symbols(self) -> Dict[str, Any]:
|
||||
def test_symbols(self) -> dict[str, Any]:
|
||||
@contextmanager
|
||||
def subtest(name: str) -> Iterator[None]:
|
||||
return self.subtest(name)
|
||||
|
@ -205,48 +204,11 @@ class Driver:
|
|||
|
||||
def create_machine(
|
||||
self,
|
||||
start_command: str | dict,
|
||||
start_command: str,
|
||||
*,
|
||||
name: Optional[str] = None,
|
||||
name: str | None = None,
|
||||
keep_vm_state: bool = False,
|
||||
) -> Machine:
|
||||
# Legacy args handling
|
||||
# FIXME: remove after 24.05
|
||||
if isinstance(start_command, dict):
|
||||
if name is not None or keep_vm_state:
|
||||
raise TypeError(
|
||||
"Dictionary passed to create_machine must be the only argument"
|
||||
)
|
||||
|
||||
args = start_command
|
||||
start_command = args.pop("startCommand", SENTINEL)
|
||||
|
||||
if start_command is SENTINEL:
|
||||
raise TypeError(
|
||||
"Dictionary passed to create_machine must contain startCommand"
|
||||
)
|
||||
|
||||
if not isinstance(start_command, str):
|
||||
raise TypeError(
|
||||
f"startCommand must be a string, got: {repr(start_command)}"
|
||||
)
|
||||
|
||||
name = args.pop("name", None)
|
||||
keep_vm_state = args.pop("keep_vm_state", False)
|
||||
|
||||
if args:
|
||||
raise TypeError(
|
||||
f"Unsupported arguments passed to create_machine: {args}"
|
||||
)
|
||||
|
||||
self.logger.warning(
|
||||
Fore.YELLOW
|
||||
+ Style.BRIGHT
|
||||
+ "WARNING: Using create_machine with a single dictionary argument is deprecated and will be removed in NixOS 24.11"
|
||||
+ Style.RESET_ALL
|
||||
)
|
||||
# End legacy args handling
|
||||
|
||||
tmp_dir = get_tmp_dir()
|
||||
|
||||
cmd = NixStartScript(start_command)
|
||||
|
@ -273,11 +235,11 @@ class Driver:
|
|||
|
||||
def polling_condition(
|
||||
self,
|
||||
fun_: Optional[Callable] = None,
|
||||
fun_: Callable | None = None,
|
||||
*,
|
||||
seconds_interval: float = 2.0,
|
||||
description: Optional[str] = None,
|
||||
) -> Union[Callable[[Callable], ContextManager], ContextManager]:
|
||||
description: str | None = None,
|
||||
) -> Callable[[Callable], AbstractContextManager] | AbstractContextManager:
|
||||
driver = self
|
||||
|
||||
class Poll:
|
||||
|
|
|
@ -5,10 +5,11 @@ import sys
|
|||
import time
|
||||
import unicodedata
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Iterator
|
||||
from contextlib import ExitStack, contextmanager
|
||||
from pathlib import Path
|
||||
from queue import Empty, Queue
|
||||
from typing import Any, Dict, Iterator, List
|
||||
from typing import Any
|
||||
from xml.sax.saxutils import XMLGenerator
|
||||
from xml.sax.xmlreader import AttributesImpl
|
||||
|
||||
|
@ -18,17 +19,17 @@ from junit_xml import TestCase, TestSuite
|
|||
|
||||
class AbstractLogger(ABC):
|
||||
@abstractmethod
|
||||
def log(self, message: str, attributes: Dict[str, str] = {}) -> None:
|
||||
def log(self, message: str, attributes: dict[str, str] = {}) -> None:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
@contextmanager
|
||||
def subtest(self, name: str, attributes: Dict[str, str] = {}) -> Iterator[None]:
|
||||
def subtest(self, name: str, attributes: dict[str, str] = {}) -> Iterator[None]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
@contextmanager
|
||||
def nested(self, message: str, attributes: Dict[str, str] = {}) -> Iterator[None]:
|
||||
def nested(self, message: str, attributes: dict[str, str] = {}) -> Iterator[None]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
|
@ -68,11 +69,11 @@ class JunitXMLLogger(AbstractLogger):
|
|||
self._print_serial_logs = True
|
||||
atexit.register(self.close)
|
||||
|
||||
def log(self, message: str, attributes: Dict[str, str] = {}) -> None:
|
||||
def log(self, message: str, attributes: dict[str, str] = {}) -> None:
|
||||
self.tests[self.currentSubtest].stdout += message + os.linesep
|
||||
|
||||
@contextmanager
|
||||
def subtest(self, name: str, attributes: Dict[str, str] = {}) -> Iterator[None]:
|
||||
def subtest(self, name: str, attributes: dict[str, str] = {}) -> Iterator[None]:
|
||||
old_test = self.currentSubtest
|
||||
self.tests.setdefault(name, self.TestCaseState())
|
||||
self.currentSubtest = name
|
||||
|
@ -82,7 +83,7 @@ class JunitXMLLogger(AbstractLogger):
|
|||
self.currentSubtest = old_test
|
||||
|
||||
@contextmanager
|
||||
def nested(self, message: str, attributes: Dict[str, str] = {}) -> Iterator[None]:
|
||||
def nested(self, message: str, attributes: dict[str, str] = {}) -> Iterator[None]:
|
||||
self.log(message)
|
||||
yield
|
||||
|
||||
|
@ -123,25 +124,25 @@ class JunitXMLLogger(AbstractLogger):
|
|||
|
||||
|
||||
class CompositeLogger(AbstractLogger):
|
||||
def __init__(self, logger_list: List[AbstractLogger]) -> None:
|
||||
def __init__(self, logger_list: list[AbstractLogger]) -> None:
|
||||
self.logger_list = logger_list
|
||||
|
||||
def add_logger(self, logger: AbstractLogger) -> None:
|
||||
self.logger_list.append(logger)
|
||||
|
||||
def log(self, message: str, attributes: Dict[str, str] = {}) -> None:
|
||||
def log(self, message: str, attributes: dict[str, str] = {}) -> None:
|
||||
for logger in self.logger_list:
|
||||
logger.log(message, attributes)
|
||||
|
||||
@contextmanager
|
||||
def subtest(self, name: str, attributes: Dict[str, str] = {}) -> Iterator[None]:
|
||||
def subtest(self, name: str, attributes: dict[str, str] = {}) -> Iterator[None]:
|
||||
with ExitStack() as stack:
|
||||
for logger in self.logger_list:
|
||||
stack.enter_context(logger.subtest(name, attributes))
|
||||
yield
|
||||
|
||||
@contextmanager
|
||||
def nested(self, message: str, attributes: Dict[str, str] = {}) -> Iterator[None]:
|
||||
def nested(self, message: str, attributes: dict[str, str] = {}) -> Iterator[None]:
|
||||
with ExitStack() as stack:
|
||||
for logger in self.logger_list:
|
||||
stack.enter_context(logger.nested(message, attributes))
|
||||
|
@ -173,7 +174,7 @@ class TerminalLogger(AbstractLogger):
|
|||
def __init__(self) -> None:
|
||||
self._print_serial_logs = True
|
||||
|
||||
def maybe_prefix(self, message: str, attributes: Dict[str, str]) -> str:
|
||||
def maybe_prefix(self, message: str, attributes: dict[str, str]) -> str:
|
||||
if "machine" in attributes:
|
||||
return f"{attributes['machine']}: {message}"
|
||||
return message
|
||||
|
@ -182,16 +183,16 @@ class TerminalLogger(AbstractLogger):
|
|||
def _eprint(*args: object, **kwargs: Any) -> None:
|
||||
print(*args, file=sys.stderr, **kwargs)
|
||||
|
||||
def log(self, message: str, attributes: Dict[str, str] = {}) -> None:
|
||||
def log(self, message: str, attributes: dict[str, str] = {}) -> None:
|
||||
self._eprint(self.maybe_prefix(message, attributes))
|
||||
|
||||
@contextmanager
|
||||
def subtest(self, name: str, attributes: Dict[str, str] = {}) -> Iterator[None]:
|
||||
def subtest(self, name: str, attributes: dict[str, str] = {}) -> Iterator[None]:
|
||||
with self.nested("subtest: " + name, attributes):
|
||||
yield
|
||||
|
||||
@contextmanager
|
||||
def nested(self, message: str, attributes: Dict[str, str] = {}) -> Iterator[None]:
|
||||
def nested(self, message: str, attributes: dict[str, str] = {}) -> Iterator[None]:
|
||||
self._eprint(
|
||||
self.maybe_prefix(
|
||||
Style.BRIGHT + Fore.GREEN + message + Style.RESET_ALL, attributes
|
||||
|
@ -241,12 +242,12 @@ class XMLLogger(AbstractLogger):
|
|||
def sanitise(self, message: str) -> str:
|
||||
return "".join(ch for ch in message if unicodedata.category(ch)[0] != "C")
|
||||
|
||||
def maybe_prefix(self, message: str, attributes: Dict[str, str]) -> str:
|
||||
def maybe_prefix(self, message: str, attributes: dict[str, str]) -> str:
|
||||
if "machine" in attributes:
|
||||
return f"{attributes['machine']}: {message}"
|
||||
return message
|
||||
|
||||
def log_line(self, message: str, attributes: Dict[str, str]) -> None:
|
||||
def log_line(self, message: str, attributes: dict[str, str]) -> None:
|
||||
self.xml.startElement("line", attrs=AttributesImpl(attributes))
|
||||
self.xml.characters(message)
|
||||
self.xml.endElement("line")
|
||||
|
@ -260,7 +261,7 @@ class XMLLogger(AbstractLogger):
|
|||
def error(self, *args, **kwargs) -> None: # type: ignore
|
||||
self.log(*args, **kwargs)
|
||||
|
||||
def log(self, message: str, attributes: Dict[str, str] = {}) -> None:
|
||||
def log(self, message: str, attributes: dict[str, str] = {}) -> None:
|
||||
self.drain_log_queue()
|
||||
self.log_line(message, attributes)
|
||||
|
||||
|
@ -273,7 +274,7 @@ class XMLLogger(AbstractLogger):
|
|||
|
||||
self.enqueue({"msg": message, "machine": machine, "type": "serial"})
|
||||
|
||||
def enqueue(self, item: Dict[str, str]) -> None:
|
||||
def enqueue(self, item: dict[str, str]) -> None:
|
||||
self.queue.put(item)
|
||||
|
||||
def drain_log_queue(self) -> None:
|
||||
|
@ -287,12 +288,12 @@ class XMLLogger(AbstractLogger):
|
|||
pass
|
||||
|
||||
@contextmanager
|
||||
def subtest(self, name: str, attributes: Dict[str, str] = {}) -> Iterator[None]:
|
||||
def subtest(self, name: str, attributes: dict[str, str] = {}) -> Iterator[None]:
|
||||
with self.nested("subtest: " + name, attributes):
|
||||
yield
|
||||
|
||||
@contextmanager
|
||||
def nested(self, message: str, attributes: Dict[str, str] = {}) -> Iterator[None]:
|
||||
def nested(self, message: str, attributes: dict[str, str] = {}) -> Iterator[None]:
|
||||
self.xml.startElement("nest", attrs=AttributesImpl({}))
|
||||
self.xml.startElement("head", attrs=AttributesImpl(attributes))
|
||||
self.xml.characters(message)
|
||||
|
|
|
@ -12,10 +12,11 @@ import sys
|
|||
import tempfile
|
||||
import threading
|
||||
import time
|
||||
from collections.abc import Callable, Iterable
|
||||
from contextlib import _GeneratorContextManager, nullcontext
|
||||
from pathlib import Path
|
||||
from queue import Queue
|
||||
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple
|
||||
from typing import Any
|
||||
|
||||
from test_driver.logger import AbstractLogger
|
||||
|
||||
|
@ -91,7 +92,7 @@ def make_command(args: list) -> str:
|
|||
|
||||
def _perform_ocr_on_screenshot(
|
||||
screenshot_path: str, model_ids: Iterable[int]
|
||||
) -> List[str]:
|
||||
) -> list[str]:
|
||||
if shutil.which("tesseract") is None:
|
||||
raise Exception("OCR requested but enableOCR is false")
|
||||
|
||||
|
@ -248,19 +249,19 @@ class Machine:
|
|||
start_command: StartCommand
|
||||
keep_vm_state: bool
|
||||
|
||||
process: Optional[subprocess.Popen]
|
||||
pid: Optional[int]
|
||||
monitor: Optional[socket.socket]
|
||||
qmp_client: Optional[QMPSession]
|
||||
shell: Optional[socket.socket]
|
||||
serial_thread: Optional[threading.Thread]
|
||||
process: subprocess.Popen | None
|
||||
pid: int | None
|
||||
monitor: socket.socket | None
|
||||
qmp_client: QMPSession | None
|
||||
shell: socket.socket | None
|
||||
serial_thread: threading.Thread | None
|
||||
|
||||
booted: bool
|
||||
connected: bool
|
||||
# Store last serial console lines for use
|
||||
# of wait_for_console_text
|
||||
last_lines: Queue = Queue()
|
||||
callbacks: List[Callable]
|
||||
callbacks: list[Callable]
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Machine '{self.name}'>"
|
||||
|
@ -273,7 +274,7 @@ class Machine:
|
|||
logger: AbstractLogger,
|
||||
name: str = "machine",
|
||||
keep_vm_state: bool = False,
|
||||
callbacks: Optional[List[Callable]] = None,
|
||||
callbacks: list[Callable] | None = None,
|
||||
) -> None:
|
||||
self.out_dir = out_dir
|
||||
self.tmp_dir = tmp_dir
|
||||
|
@ -314,7 +315,7 @@ class Machine:
|
|||
def log_serial(self, msg: str) -> None:
|
||||
self.logger.log_serial(msg, self.name)
|
||||
|
||||
def nested(self, msg: str, attrs: Dict[str, str] = {}) -> _GeneratorContextManager:
|
||||
def nested(self, msg: str, attrs: dict[str, str] = {}) -> _GeneratorContextManager:
|
||||
my_attrs = {"machine": self.name}
|
||||
my_attrs.update(attrs)
|
||||
return self.logger.nested(msg, my_attrs)
|
||||
|
@ -343,7 +344,7 @@ class Machine:
|
|||
return self.wait_for_monitor_prompt()
|
||||
|
||||
def wait_for_unit(
|
||||
self, unit: str, user: Optional[str] = None, timeout: int = 900
|
||||
self, unit: str, user: str | None = None, timeout: int = 900
|
||||
) -> None:
|
||||
"""
|
||||
Wait for a systemd unit to get into "active" state.
|
||||
|
@ -373,7 +374,7 @@ class Machine:
|
|||
):
|
||||
retry(check_active, timeout)
|
||||
|
||||
def get_unit_info(self, unit: str, user: Optional[str] = None) -> Dict[str, str]:
|
||||
def get_unit_info(self, unit: str, user: str | None = None) -> dict[str, str]:
|
||||
status, lines = self.systemctl(f'--no-pager show "{unit}"', user)
|
||||
if status != 0:
|
||||
raise Exception(
|
||||
|
@ -384,7 +385,7 @@ class Machine:
|
|||
|
||||
line_pattern = re.compile(r"^([^=]+)=(.*)$")
|
||||
|
||||
def tuple_from_line(line: str) -> Tuple[str, str]:
|
||||
def tuple_from_line(line: str) -> tuple[str, str]:
|
||||
match = line_pattern.match(line)
|
||||
assert match is not None
|
||||
return match[1], match[2]
|
||||
|
@ -399,7 +400,7 @@ class Machine:
|
|||
self,
|
||||
unit: str,
|
||||
property: str,
|
||||
user: Optional[str] = None,
|
||||
user: str | None = None,
|
||||
) -> str:
|
||||
status, lines = self.systemctl(
|
||||
f'--no-pager show "{unit}" --property="{property}"',
|
||||
|
@ -424,7 +425,7 @@ class Machine:
|
|||
assert match[1] == property, invalid_output_message
|
||||
return match[2]
|
||||
|
||||
def systemctl(self, q: str, user: Optional[str] = None) -> Tuple[int, str]:
|
||||
def systemctl(self, q: str, user: str | None = None) -> tuple[int, str]:
|
||||
"""
|
||||
Runs `systemctl` commands with optional support for
|
||||
`systemctl --user`
|
||||
|
@ -480,8 +481,8 @@ class Machine:
|
|||
command: str,
|
||||
check_return: bool = True,
|
||||
check_output: bool = True,
|
||||
timeout: Optional[int] = 900,
|
||||
) -> Tuple[int, str]:
|
||||
timeout: int | None = 900,
|
||||
) -> tuple[int, str]:
|
||||
"""
|
||||
Execute a shell command, returning a list `(status, stdout)`.
|
||||
|
||||
|
@ -548,7 +549,7 @@ class Machine:
|
|||
|
||||
return (rc, output.decode(errors="replace"))
|
||||
|
||||
def shell_interact(self, address: Optional[str] = None) -> None:
|
||||
def shell_interact(self, address: str | None = None) -> None:
|
||||
"""
|
||||
Allows you to directly interact with the guest shell. This should
|
||||
only be used during test development, not in production tests.
|
||||
|
@ -595,7 +596,7 @@ class Machine:
|
|||
break
|
||||
self.send_console(char.decode())
|
||||
|
||||
def succeed(self, *commands: str, timeout: Optional[int] = None) -> str:
|
||||
def succeed(self, *commands: str, timeout: int | None = None) -> str:
|
||||
"""
|
||||
Execute a shell command, raising an exception if the exit status is
|
||||
not zero, otherwise returning the standard output. Similar to `execute`,
|
||||
|
@ -612,7 +613,7 @@ class Machine:
|
|||
output += out
|
||||
return output
|
||||
|
||||
def fail(self, *commands: str, timeout: Optional[int] = None) -> str:
|
||||
def fail(self, *commands: str, timeout: int | None = None) -> str:
|
||||
"""
|
||||
Like `succeed`, but raising an exception if the command returns a zero
|
||||
status.
|
||||
|
@ -724,7 +725,7 @@ class Machine:
|
|||
with self.nested(f"waiting for {regexp} to appear on tty {tty}"):
|
||||
retry(tty_matches, timeout)
|
||||
|
||||
def send_chars(self, chars: str, delay: Optional[float] = 0.01) -> None:
|
||||
def send_chars(self, chars: str, delay: float | None = 0.01) -> None:
|
||||
"""
|
||||
Simulate typing a sequence of characters on the virtual keyboard,
|
||||
e.g., `send_chars("foobar\n")` will type the string `foobar`
|
||||
|
@ -798,10 +799,10 @@ class Machine:
|
|||
with self.nested(f"waiting for TCP port {port} on {addr} to be closed"):
|
||||
retry(port_is_closed, timeout)
|
||||
|
||||
def start_job(self, jobname: str, user: Optional[str] = None) -> Tuple[int, str]:
|
||||
def start_job(self, jobname: str, user: str | None = None) -> tuple[int, str]:
|
||||
return self.systemctl(f"start {jobname}", user)
|
||||
|
||||
def stop_job(self, jobname: str, user: Optional[str] = None) -> Tuple[int, str]:
|
||||
def stop_job(self, jobname: str, user: str | None = None) -> tuple[int, str]:
|
||||
return self.systemctl(f"stop {jobname}", user)
|
||||
|
||||
def wait_for_job(self, jobname: str) -> None:
|
||||
|
@ -942,13 +943,13 @@ class Machine:
|
|||
"""Debugging: Dump the contents of the TTY<n>"""
|
||||
self.execute(f"fold -w 80 /dev/vcs{tty} | systemd-cat")
|
||||
|
||||
def _get_screen_text_variants(self, model_ids: Iterable[int]) -> List[str]:
|
||||
def _get_screen_text_variants(self, model_ids: Iterable[int]) -> list[str]:
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
screenshot_path = os.path.join(tmpdir, "ppm")
|
||||
self.send_monitor_command(f"screendump {screenshot_path}")
|
||||
return _perform_ocr_on_screenshot(screenshot_path, model_ids)
|
||||
|
||||
def get_screen_text_variants(self) -> List[str]:
|
||||
def get_screen_text_variants(self) -> list[str]:
|
||||
"""
|
||||
Return a list of different interpretations of what is currently
|
||||
visible on the machine's screen using optical character
|
||||
|
@ -1028,7 +1029,7 @@ class Machine:
|
|||
pass
|
||||
|
||||
def send_key(
|
||||
self, key: str, delay: Optional[float] = 0.01, log: Optional[bool] = True
|
||||
self, key: str, delay: float | None = 0.01, log: bool | None = True
|
||||
) -> None:
|
||||
"""
|
||||
Simulate pressing keys on the virtual keyboard, e.g.,
|
||||
|
@ -1168,7 +1169,7 @@ class Machine:
|
|||
with self.nested("waiting for the X11 server"):
|
||||
retry(check_x, timeout)
|
||||
|
||||
def get_window_names(self) -> List[str]:
|
||||
def get_window_names(self) -> list[str]:
|
||||
return self.succeed(
|
||||
r"xwininfo -root -tree | sed 's/.*0x[0-9a-f]* \"\([^\"]*\)\".*/\1/; t; d'"
|
||||
).splitlines()
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import time
|
||||
from collections.abc import Callable
|
||||
from math import isfinite
|
||||
from typing import Callable, Optional
|
||||
|
||||
from test_driver.logger import AbstractLogger
|
||||
|
||||
|
@ -12,7 +12,7 @@ class PollingConditionError(Exception):
|
|||
class PollingCondition:
|
||||
condition: Callable[[], bool]
|
||||
seconds_interval: float
|
||||
description: Optional[str]
|
||||
description: str | None
|
||||
logger: AbstractLogger
|
||||
|
||||
last_called: float
|
||||
|
@ -20,10 +20,10 @@ class PollingCondition:
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
condition: Callable[[], Optional[bool]],
|
||||
condition: Callable[[], bool | None],
|
||||
logger: AbstractLogger,
|
||||
seconds_interval: float = 2.0,
|
||||
description: Optional[str] = None,
|
||||
description: str | None = None,
|
||||
):
|
||||
self.condition = condition # type: ignore
|
||||
self.seconds_interval = seconds_interval
|
||||
|
|
19
third_party/nixpkgs/nixos/lib/utils.nix
vendored
19
third_party/nixpkgs/nixos/lib/utils.nix
vendored
|
@ -343,6 +343,25 @@ utils = rec {
|
|||
in
|
||||
filter (x: !(elem (getName x) namesToRemove)) packages;
|
||||
|
||||
/* Returns false if a package with the same name as the `package` is present in `packagesToDisable`.
|
||||
|
||||
Type:
|
||||
disablePackageByName :: package -> [package] -> bool
|
||||
|
||||
Example:
|
||||
disablePackageByName file-roller [ file-roller totem ]
|
||||
=> false
|
||||
|
||||
Example:
|
||||
disablePackageByName nautilus [ file-roller totem ]
|
||||
=> true
|
||||
*/
|
||||
disablePackageByName = package: packagesToDisable:
|
||||
let
|
||||
namesToDisable = map getName packagesToDisable;
|
||||
in
|
||||
!elem (getName package) namesToDisable;
|
||||
|
||||
systemdUtils = {
|
||||
lib = import ./systemd-lib.nix { inherit lib config pkgs utils; };
|
||||
unitOptions = import ./systemd-unit-options.nix { inherit lib systemdUtils; };
|
||||
|
|
|
@ -27,9 +27,6 @@
|
|||
# This should not contain packages that are broken or can't build, since it
|
||||
# will break this expression
|
||||
#
|
||||
# Currently broken packages:
|
||||
# - contour
|
||||
#
|
||||
# can be generated with:
|
||||
# lib.attrNames (lib.filterAttrs
|
||||
# (_: drv: (builtins.tryEval (lib.isDerivation drv && drv ? terminfo)).value)
|
||||
|
@ -39,6 +36,7 @@
|
|||
with pkgs.pkgsBuildBuild;
|
||||
[
|
||||
alacritty
|
||||
contour
|
||||
foot
|
||||
kitty
|
||||
mtm
|
||||
|
|
38
third_party/nixpkgs/nixos/modules/hardware/coral.nix
vendored
Normal file
38
third_party/nixpkgs/nixos/modules/hardware/coral.nix
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (lib)
|
||||
mkEnableOption
|
||||
mkIf
|
||||
mkMerge
|
||||
;
|
||||
|
||||
cfg = config.hardware.coral;
|
||||
in
|
||||
|
||||
{
|
||||
options.hardware.coral = {
|
||||
usb.enable = mkEnableOption "Coral USB support";
|
||||
pcie.enable = mkEnableOption "Coral PCIe support";
|
||||
};
|
||||
|
||||
config = mkMerge [
|
||||
(mkIf (cfg.usb.enable || cfg.pcie.enable) {
|
||||
users.groups.coral = { };
|
||||
})
|
||||
(mkIf cfg.usb.enable {
|
||||
services.udev.packages = with pkgs; [ libedgetpu ];
|
||||
})
|
||||
(mkIf cfg.pcie.enable {
|
||||
boot.extraModulePackages = with config.boot.kernelPackages; [ gasket ];
|
||||
services.udev.extraRules = ''
|
||||
SUBSYSTEM=="apex",MODE="0660",GROUP="coral"
|
||||
'';
|
||||
})
|
||||
];
|
||||
}
|
|
@ -1,15 +0,0 @@
|
|||
{ config, lib, pkgs, ... }:
|
||||
let
|
||||
cfg = config.hardware.gkraken;
|
||||
in
|
||||
{
|
||||
options.hardware.gkraken = {
|
||||
enable = lib.mkEnableOption "gkraken's udev rules for NZXT AIO liquid coolers";
|
||||
};
|
||||
|
||||
config = lib.mkIf cfg.enable {
|
||||
services.udev.packages = with pkgs; [
|
||||
gkraken
|
||||
];
|
||||
};
|
||||
}
|
|
@ -1,12 +1,9 @@
|
|||
{ config, pkgs, lib, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
imcfg = config.i18n.inputMethod;
|
||||
in
|
||||
{
|
||||
config = mkIf (imcfg.enable && imcfg.type == "hime") {
|
||||
config = lib.mkIf (imcfg.enable && imcfg.type == "hime") {
|
||||
i18n.inputMethod.package = pkgs.hime;
|
||||
environment.variables = {
|
||||
GTK_IM_MODULE = "hime";
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
{ config, pkgs, lib, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
imcfg = config.i18n.inputMethod;
|
||||
cfg = imcfg.ibus;
|
||||
|
@ -9,10 +6,10 @@ let
|
|||
ibusEngine = lib.types.mkOptionType {
|
||||
name = "ibus-engine";
|
||||
inherit (lib.types.package) descriptionClass merge;
|
||||
check = x: (lib.types.package.check x) && (attrByPath ["meta" "isIbusEngine"] false x);
|
||||
check = x: (lib.types.package.check x) && (lib.attrByPath ["meta" "isIbusEngine"] false x);
|
||||
};
|
||||
|
||||
impanel = optionalString (cfg.panel != null) "--panel=${cfg.panel}";
|
||||
impanel = lib.optionalString (cfg.panel != null) "--panel=${cfg.panel}";
|
||||
|
||||
ibusAutostart = pkgs.writeTextFile {
|
||||
name = "autostart-ibus-daemon";
|
||||
|
@ -29,32 +26,32 @@ let
|
|||
in
|
||||
{
|
||||
imports = [
|
||||
(mkRenamedOptionModule [ "programs" "ibus" "plugins" ] [ "i18n" "inputMethod" "ibus" "engines" ])
|
||||
(lib.mkRenamedOptionModule [ "programs" "ibus" "plugins" ] [ "i18n" "inputMethod" "ibus" "engines" ])
|
||||
];
|
||||
|
||||
options = {
|
||||
i18n.inputMethod.ibus = {
|
||||
engines = mkOption {
|
||||
type = with types; listOf ibusEngine;
|
||||
engines = lib.mkOption {
|
||||
type = with lib.types; listOf ibusEngine;
|
||||
default = [];
|
||||
example = literalExpression "with pkgs.ibus-engines; [ mozc hangul ]";
|
||||
example = lib.literalExpression "with pkgs.ibus-engines; [ mozc hangul ]";
|
||||
description =
|
||||
let
|
||||
enginesDrv = filterAttrs (const isDerivation) pkgs.ibus-engines;
|
||||
engines = concatStringsSep ", "
|
||||
(map (name: "`${name}`") (attrNames enginesDrv));
|
||||
enginesDrv = lib.filterAttrs (lib.const lib.isDerivation) pkgs.ibus-engines;
|
||||
engines = lib.concatStringsSep ", "
|
||||
(map (name: "`${name}`") (lib.attrNames enginesDrv));
|
||||
in "Enabled IBus engines. Available engines are: ${engines}.";
|
||||
};
|
||||
panel = mkOption {
|
||||
type = with types; nullOr path;
|
||||
panel = lib.mkOption {
|
||||
type = with lib.types; nullOr path;
|
||||
default = null;
|
||||
example = literalExpression ''"''${pkgs.plasma5Packages.plasma-desktop}/libexec/kimpanel-ibus-panel"'';
|
||||
example = lib.literalExpression ''"''${pkgs.plasma5Packages.plasma-desktop}/libexec/kimpanel-ibus-panel"'';
|
||||
description = "Replace the IBus panel with another panel.";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf (imcfg.enable && imcfg.type == "ibus") {
|
||||
config = lib.mkIf (imcfg.enable && imcfg.type == "ibus") {
|
||||
i18n.inputMethod.package = ibusPackage;
|
||||
|
||||
environment.systemPackages = [
|
||||
|
@ -76,7 +73,7 @@ in
|
|||
XMODIFIERS = "@im=ibus";
|
||||
};
|
||||
|
||||
xdg.portal.extraPortals = mkIf config.xdg.portal.enable [
|
||||
xdg.portal.extraPortals = lib.mkIf config.xdg.portal.enable [
|
||||
ibusPackage
|
||||
];
|
||||
};
|
||||
|
|
|
@ -1,11 +1,9 @@
|
|||
{ config, pkgs, lib, ... }:
|
||||
|
||||
with lib;
|
||||
let
|
||||
imcfg = config.i18n.inputMethod;
|
||||
in
|
||||
{
|
||||
config = mkIf (imcfg.enable && imcfg.type == "nabi") {
|
||||
config = lib.mkIf (imcfg.enable && imcfg.type == "nabi") {
|
||||
i18n.inputMethod.package = pkgs.nabi;
|
||||
|
||||
environment.variables = {
|
||||
|
|
|
@ -1,7 +1,4 @@
|
|||
{ config, pkgs, lib, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
imcfg = config.i18n.inputMethod;
|
||||
cfg = imcfg.uim;
|
||||
|
@ -10,8 +7,8 @@ in
|
|||
options = {
|
||||
|
||||
i18n.inputMethod.uim = {
|
||||
toolbar = mkOption {
|
||||
type = types.enum [ "gtk" "gtk3" "gtk-systray" "gtk3-systray" "qt5" ];
|
||||
toolbar = lib.mkOption {
|
||||
type = lib.types.enum [ "gtk" "gtk3" "gtk-systray" "gtk3-systray" "qt5" ];
|
||||
default = "gtk";
|
||||
example = "gtk-systray";
|
||||
description = ''
|
||||
|
@ -22,7 +19,7 @@ in
|
|||
|
||||
};
|
||||
|
||||
config = mkIf (imcfg.enable && imcfg.type == "uim") {
|
||||
config = lib.mkIf (imcfg.enable && imcfg.type == "uim") {
|
||||
i18n.inputMethod.package = pkgs.uim;
|
||||
|
||||
environment.variables = {
|
||||
|
|
48
third_party/nixpkgs/nixos/modules/image/file-options.nix
vendored
Normal file
48
third_party/nixpkgs/nixos/modules/image/file-options.nix
vendored
Normal file
|
@ -0,0 +1,48 @@
|
|||
{
|
||||
lib,
|
||||
config,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
{
|
||||
options.image = {
|
||||
baseName = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "nixos-image-${config.system.nixos.label}-${pkgs.stdenv.hostPlatform.system}";
|
||||
description = ''
|
||||
Basename of the image filename without any extension (e.g. `image_1`).
|
||||
'';
|
||||
};
|
||||
|
||||
extension = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
description = ''
|
||||
Extension of the image filename (e.g. `raw`).
|
||||
'';
|
||||
};
|
||||
|
||||
# FIXME: this should be marked readOnly, when there are no
|
||||
# mkRenamedOptionModuleWith calls with `image.fileName` as
|
||||
# as a target left anymore (i.e. 24.11). We can't do it
|
||||
# before, as some source options where writable before.
|
||||
# Those should use image.baseName and image.extension instead.
|
||||
fileName = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "${config.image.baseName}.${config.image.extension}";
|
||||
description = ''
|
||||
Filename of the image including all extensions (e.g `image_1.raw` or
|
||||
`image_1.raw.zst`).
|
||||
'';
|
||||
};
|
||||
|
||||
filePath = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = config.image.fileName;
|
||||
description = ''
|
||||
Path of the image, relative to `$out` in `system.build.image`.
|
||||
While it defaults to `config.image.fileName`, it can be different for builders where
|
||||
the image is in sub directory, such as `iso`, `sd-card` or `kexec` images.
|
||||
'';
|
||||
};
|
||||
};
|
||||
}
|
70
third_party/nixpkgs/nixos/modules/image/images.nix
vendored
Normal file
70
third_party/nixpkgs/nixos/modules/image/images.nix
vendored
Normal file
|
@ -0,0 +1,70 @@
|
|||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
extendModules,
|
||||
...
|
||||
}:
|
||||
let
|
||||
inherit (lib) types;
|
||||
|
||||
imageModules = {
|
||||
azure = [ ../virtualisation/azure-image.nix ];
|
||||
digital-ocean = [ ../virtualisation/digital-ocean-image.nix ];
|
||||
google-compute = [ ../virtualisation/google-compute-image.nix ];
|
||||
hyperv = [ ../virtualisation/hyperv-image.nix ];
|
||||
linode = [ ../virtualisation/linode-image.nix ];
|
||||
lxc = [ ../virtualisation/lxc-container.nix ];
|
||||
lxc-metadata = [ ../virtualisation/lxc-image-metadata.nix ];
|
||||
oci = [ ../virtualisation/oci-image.nix ];
|
||||
proxmox = [ ../virtualisation/proxmox-image.nix ];
|
||||
kubevirt = [ ../virtualisation/kubevirt.nix ];
|
||||
vagrant-virtualbox = [ ../virtualisation/vagrant-virtualbox-image.nix ];
|
||||
virtualbox = [ ../virtualisation/virtualbox-image.nix ];
|
||||
vmware = [ ../virtualisation/vmware-image.nix ];
|
||||
};
|
||||
imageConfigs = lib.mapAttrs (
|
||||
name: modules:
|
||||
extendModules {
|
||||
inherit modules;
|
||||
}
|
||||
) config.image.modules;
|
||||
in
|
||||
{
|
||||
options = {
|
||||
system.build = {
|
||||
images = lib.mkOption {
|
||||
type = types.lazyAttrsOf types.raw;
|
||||
readOnly = true;
|
||||
description = ''
|
||||
Different target images generated for this NixOS configuration.
|
||||
'';
|
||||
};
|
||||
};
|
||||
image.modules = lib.mkOption {
|
||||
type = types.attrsOf (types.listOf types.deferredModule);
|
||||
description = ''
|
||||
image-specific NixOS Modules used for `system.build.images`.
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config.image.modules = lib.mkIf (!config.system.build ? image) imageModules;
|
||||
config.system.build.images = lib.mkIf (!config.system.build ? image) (
|
||||
lib.mapAttrs (
|
||||
name: nixos:
|
||||
let
|
||||
inherit (nixos) config;
|
||||
inherit (config.image) filePath;
|
||||
builder =
|
||||
config.system.build.image
|
||||
or (throw "Module for `system.build.images.${name}` misses required `system.build.image` option.");
|
||||
in
|
||||
lib.recursiveUpdate builder {
|
||||
passthru = {
|
||||
inherit config filePath;
|
||||
};
|
||||
}
|
||||
) imageConfigs
|
||||
);
|
||||
}
|
|
@ -42,5 +42,11 @@
|
|||
done
|
||||
'';
|
||||
|
||||
environment.defaultPackages = with pkgs; [
|
||||
rsync
|
||||
];
|
||||
|
||||
programs.git.enable = lib.mkDefault true;
|
||||
|
||||
system.stateVersion = lib.mkDefault lib.trivial.release;
|
||||
}
|
||||
|
|
|
@ -49,10 +49,6 @@
|
|||
vim
|
||||
nano
|
||||
|
||||
# Include some version control tools.
|
||||
git
|
||||
rsync
|
||||
|
||||
# Firefox for reading the manual.
|
||||
firefox
|
||||
|
||||
|
|
|
@ -335,12 +335,14 @@ let
|
|||
set textmode=true
|
||||
terminal_output console
|
||||
}
|
||||
|
||||
${lib.optionalString (config.isoImage.grubTheme != null) ''
|
||||
hiddenentry 'GUI mode' --hotkey 'g' {
|
||||
$(find ${config.isoImage.grubTheme} -iname '*.pf2' -printf "loadfont (\$root)/EFI/BOOT/grub-theme/%P\n")
|
||||
set textmode=false
|
||||
terminal_output gfxterm
|
||||
}
|
||||
|
||||
''}
|
||||
|
||||
# If the parameter iso_path is set, append the findiso parameter to the kernel
|
||||
# line. We need this to allow the nixos iso to be booted from grub directly.
|
||||
|
|
|
@ -131,18 +131,30 @@ with lib;
|
|||
|
||||
boot.loader.timeout = 10;
|
||||
|
||||
boot.postBootCommands =
|
||||
''
|
||||
# After booting, register the contents of the Nix store
|
||||
# in the Nix database in the tmpfs.
|
||||
${config.nix.package}/bin/nix-store --load-db < /nix/store/nix-path-registration
|
||||
boot.postBootCommands = ''
|
||||
# After booting, register the contents of the Nix store
|
||||
# in the Nix database in the tmpfs.
|
||||
${config.nix.package}/bin/nix-store --load-db < /nix/store/nix-path-registration
|
||||
|
||||
# nixos-rebuild also requires a "system" profile and an
|
||||
# /etc/NIXOS tag.
|
||||
touch /etc/NIXOS
|
||||
${config.nix.package}/bin/nix-env -p /nix/var/nix/profiles/system --set /run/current-system
|
||||
'';
|
||||
# nixos-rebuild also requires a "system" profile and an
|
||||
# /etc/NIXOS tag.
|
||||
touch /etc/NIXOS
|
||||
${config.nix.package}/bin/nix-env -p /nix/var/nix/profiles/system --set /run/current-system
|
||||
|
||||
# Set password for user nixos if specified on cmdline
|
||||
# Allows using nixos-anywhere in headless environments
|
||||
for o in $(</proc/cmdline); do
|
||||
case "$o" in
|
||||
live.nixos.passwordHash=*)
|
||||
set -- $(IFS==; echo $o)
|
||||
${pkgs.gnugrep}/bin/grep -q "root::" /etc/shadow && ${pkgs.shadow}/bin/usermod -p "$2" root
|
||||
;;
|
||||
live.nixos.password=*)
|
||||
set -- $(IFS==; echo $o)
|
||||
${pkgs.gnugrep}/bin/grep -q "root::" /etc/shadow && echo "root:$2" | ${pkgs.shadow}/bin/chpasswd
|
||||
;;
|
||||
esac
|
||||
done
|
||||
'';
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -17,6 +17,10 @@
|
|||
sdImage = {
|
||||
populateFirmwareCommands = let
|
||||
configTxt = pkgs.writeText "config.txt" ''
|
||||
# u-boot refuses to start (gets stuck at rainbow polygon) without this,
|
||||
# at least on Raspberry Pi 0.
|
||||
enable_uart=1
|
||||
|
||||
# Prevent the firmware from smashing the framebuffer setup done by the mainline kernel
|
||||
# when attempting to show low-voltage or overtemperature warnings.
|
||||
avoid_warnings=1
|
||||
|
@ -28,7 +32,7 @@
|
|||
kernel=u-boot-rpi1.bin
|
||||
'';
|
||||
in ''
|
||||
(cd ${pkgs.raspberrypifw}/share/raspberrypi/boot && cp bootcode.bin fixup*.dat start*.elf $NIX_BUILD_TOP/firmware/)
|
||||
(cd ${pkgs.raspberrypifw}/share/raspberrypi/boot && cp bootcode.bin fixup*.dat start*.elf *.dtb $NIX_BUILD_TOP/firmware/)
|
||||
cp ${pkgs.ubootRaspberryPiZero}/u-boot.bin firmware/u-boot-rpi0.bin
|
||||
cp ${pkgs.ubootRaspberryPi}/u-boot.bin firmware/u-boot-rpi1.bin
|
||||
cp ${configTxt} firmware/config.txt
|
||||
|
|
|
@ -1,13 +1,10 @@
|
|||
{ lib, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
{
|
||||
|
||||
options = {
|
||||
|
||||
assertions = mkOption {
|
||||
type = types.listOf types.unspecified;
|
||||
assertions = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.unspecified;
|
||||
internal = true;
|
||||
default = [];
|
||||
example = [ { assertion = false; message = "you can't enable this for that reason"; } ];
|
||||
|
@ -18,10 +15,10 @@ with lib;
|
|||
'';
|
||||
};
|
||||
|
||||
warnings = mkOption {
|
||||
warnings = lib.mkOption {
|
||||
internal = true;
|
||||
default = [];
|
||||
type = types.listOf types.str;
|
||||
type = lib.types.listOf lib.types.str;
|
||||
example = [ "The `foo' service is deprecated and will go away soon!" ];
|
||||
description = ''
|
||||
This option allows modules to show warnings to users during
|
||||
|
|
|
@ -1,11 +1,8 @@
|
|||
{ config, lib, pkgs, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
crashdump = config.boot.crashDump;
|
||||
|
||||
kernelParams = concatStringsSep " " crashdump.kernelParams;
|
||||
kernelParams = lib.concatStringsSep " " crashdump.kernelParams;
|
||||
|
||||
in
|
||||
###### interface
|
||||
|
@ -13,8 +10,8 @@ in
|
|||
options = {
|
||||
boot = {
|
||||
crashDump = {
|
||||
enable = mkOption {
|
||||
type = types.bool;
|
||||
enable = lib.mkOption {
|
||||
type = lib.types.bool;
|
||||
default = false;
|
||||
description = ''
|
||||
If enabled, NixOS will set up a kernel that will
|
||||
|
@ -24,17 +21,17 @@ in
|
|||
It also activates the NMI watchdog.
|
||||
'';
|
||||
};
|
||||
reservedMemory = mkOption {
|
||||
reservedMemory = lib.mkOption {
|
||||
default = "128M";
|
||||
type = types.str;
|
||||
type = lib.types.str;
|
||||
description = ''
|
||||
The amount of memory reserved for the crashdump kernel.
|
||||
If you choose a too high value, dmesg will mention
|
||||
"crashkernel reservation failed".
|
||||
'';
|
||||
};
|
||||
kernelParams = mkOption {
|
||||
type = types.listOf types.str;
|
||||
kernelParams = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = [ "1" "boot.shell_on_fail" ];
|
||||
description = ''
|
||||
Parameters that will be passed to the kernel kexec-ed on crash.
|
||||
|
@ -46,7 +43,7 @@ in
|
|||
|
||||
###### implementation
|
||||
|
||||
config = mkIf crashdump.enable {
|
||||
config = lib.mkIf crashdump.enable {
|
||||
boot = {
|
||||
postBootCommands = ''
|
||||
echo "loading crashdump kernel...";
|
||||
|
|
|
@ -367,7 +367,13 @@ in
|
|||
})
|
||||
|
||||
(mkIf cfg.doc.enable {
|
||||
environment.pathsToLink = [ "/share/doc" ];
|
||||
environment.pathsToLink = [
|
||||
"/share/doc"
|
||||
|
||||
# Legacy paths used by gtk-doc & adjacent tools.
|
||||
"/share/gtk-doc"
|
||||
"/share/devhelp"
|
||||
];
|
||||
environment.extraOutputsToInstall = [ "doc" ] ++ optional cfg.dev.enable "devdoc";
|
||||
})
|
||||
|
||||
|
|
17
third_party/nixpkgs/nixos/modules/misc/label.nix
vendored
17
third_party/nixpkgs/nixos/modules/misc/label.nix
vendored
|
@ -1,7 +1,4 @@
|
|||
{ config, lib, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
cfg = config.system.nixos;
|
||||
in
|
||||
|
@ -10,8 +7,8 @@ in
|
|||
|
||||
options.system = {
|
||||
|
||||
nixos.label = mkOption {
|
||||
type = types.strMatching "[a-zA-Z0-9:_\\.-]*";
|
||||
nixos.label = lib.mkOption {
|
||||
type = lib.types.strMatching "[a-zA-Z0-9:_\\.-]*";
|
||||
description = ''
|
||||
NixOS version name to be used in the names of generated
|
||||
outputs and boot labels.
|
||||
|
@ -43,8 +40,8 @@ in
|
|||
'';
|
||||
};
|
||||
|
||||
nixos.tags = mkOption {
|
||||
type = types.listOf types.str;
|
||||
nixos.tags = lib.mkOption {
|
||||
type = lib.types.listOf lib.types.str;
|
||||
default = [];
|
||||
example = [ "with-xen" ];
|
||||
description = ''
|
||||
|
@ -68,9 +65,9 @@ in
|
|||
config = {
|
||||
# This is set here rather than up there so that changing it would
|
||||
# not rebuild the manual
|
||||
system.nixos.label = mkDefault (maybeEnv "NIXOS_LABEL"
|
||||
(concatStringsSep "-" ((sort (x: y: x < y) cfg.tags)
|
||||
++ [ (maybeEnv "NIXOS_LABEL_VERSION" cfg.version) ])));
|
||||
system.nixos.label = lib.mkDefault (lib.maybeEnv "NIXOS_LABEL"
|
||||
(lib.concatStringsSep "-" ((lib.sort (x: y: x < y) cfg.tags)
|
||||
++ [ (lib.maybeEnv "NIXOS_LABEL_VERSION" cfg.version) ])));
|
||||
};
|
||||
|
||||
}
|
||||
|
|
29
third_party/nixpkgs/nixos/modules/misc/meta.nix
vendored
29
third_party/nixpkgs/nixos/modules/misc/meta.nix
vendored
|
@ -1,28 +1,25 @@
|
|||
{ lib, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
maintainer = mkOptionType {
|
||||
maintainer = lib.mkOptionType {
|
||||
name = "maintainer";
|
||||
check = email: elem email (attrValues lib.maintainers);
|
||||
merge = loc: defs: listToAttrs (singleton (nameValuePair (last defs).file (last defs).value));
|
||||
check = email: lib.elem email (lib.attrValues lib.maintainers);
|
||||
merge = loc: defs: lib.listToAttrs (lib.singleton (lib.nameValuePair (lib.last defs).file (lib.last defs).value));
|
||||
};
|
||||
|
||||
listOfMaintainers = types.listOf maintainer // {
|
||||
listOfMaintainers = lib.types.listOf maintainer // {
|
||||
# Returns list of
|
||||
# { "module-file" = [
|
||||
# "maintainer1 <first@nixos.org>"
|
||||
# "maintainer2 <second@nixos.org>" ];
|
||||
# }
|
||||
merge = loc: defs:
|
||||
zipAttrs
|
||||
(flatten (imap1 (n: def: imap1 (m: def':
|
||||
lib.zipAttrs
|
||||
(lib.flatten (lib.imap1 (n: def: lib.imap1 (m: def':
|
||||
maintainer.merge (loc ++ ["[${toString n}-${toString m}]"])
|
||||
[{ inherit (def) file; value = def'; }]) def.value) defs));
|
||||
};
|
||||
|
||||
docFile = types.path // {
|
||||
docFile = lib.types.path // {
|
||||
# Returns tuples of
|
||||
# { file = "module location"; value = <path/to/doc.xml>; }
|
||||
merge = loc: defs: defs;
|
||||
|
@ -33,18 +30,18 @@ in
|
|||
options = {
|
||||
meta = {
|
||||
|
||||
maintainers = mkOption {
|
||||
maintainers = lib.mkOption {
|
||||
type = listOfMaintainers;
|
||||
internal = true;
|
||||
default = [];
|
||||
example = literalExpression ''[ lib.maintainers.all ]'';
|
||||
example = lib.literalExpression ''[ lib.maintainers.all ]'';
|
||||
description = ''
|
||||
List of maintainers of each module. This option should be defined at
|
||||
most once per module.
|
||||
'';
|
||||
};
|
||||
|
||||
doc = mkOption {
|
||||
doc = lib.mkOption {
|
||||
type = docFile;
|
||||
internal = true;
|
||||
example = "./meta.chapter.md";
|
||||
|
@ -54,8 +51,8 @@ in
|
|||
'';
|
||||
};
|
||||
|
||||
buildDocsInSandbox = mkOption {
|
||||
type = types.bool // {
|
||||
buildDocsInSandbox = lib.mkOption {
|
||||
type = lib.types.bool // {
|
||||
merge = loc: defs: defs;
|
||||
};
|
||||
internal = true;
|
||||
|
@ -72,5 +69,5 @@ in
|
|||
};
|
||||
};
|
||||
|
||||
meta.maintainers = singleton lib.maintainers.pierron;
|
||||
meta.maintainers = lib.singleton lib.maintainers.pierron;
|
||||
}
|
||||
|
|
|
@ -36,14 +36,13 @@ let
|
|||
DOCUMENTATION_URL = optionalString isNixos "https://nixos.org/learn.html";
|
||||
SUPPORT_URL = optionalString isNixos "https://nixos.org/community.html";
|
||||
BUG_REPORT_URL = optionalString isNixos "https://github.com/NixOS/nixpkgs/issues";
|
||||
ANSI_COLOR = optionalString isNixos "1;34";
|
||||
ANSI_COLOR = optionalString isNixos "0;38;2;126;186;228";
|
||||
IMAGE_ID = optionalString (config.system.image.id != null) config.system.image.id;
|
||||
IMAGE_VERSION = optionalString (config.system.image.version != null) config.system.image.version;
|
||||
VARIANT = optionalString (cfg.variantName != null) cfg.variantName;
|
||||
VARIANT_ID = optionalString (cfg.variant_id != null) cfg.variant_id;
|
||||
DEFAULT_HOSTNAME = config.networking.fqdnOrHostName;
|
||||
SUPPORT_END = "2025-06-30";
|
||||
};
|
||||
DEFAULT_HOSTNAME = config.system.nixos.distroId;
|
||||
} // cfg.extraOSReleaseArgs;
|
||||
|
||||
initrdReleaseContents = (removeAttrs osReleaseContents [ "BUILD_ID" ]) // {
|
||||
PRETTY_NAME = "${osReleaseContents.PRETTY_NAME} (Initrd)";
|
||||
|
@ -143,6 +142,26 @@ in
|
|||
default = "NixOS";
|
||||
description = "The name of the operating system vendor";
|
||||
};
|
||||
|
||||
extraOSReleaseArgs = mkOption {
|
||||
internal = true;
|
||||
type = types.attrsOf types.str;
|
||||
default = { };
|
||||
description = "Additional attributes to be merged with the /etc/os-release generator.";
|
||||
example = {
|
||||
ANSI_COLOR = "1;31";
|
||||
};
|
||||
};
|
||||
|
||||
extraLSBReleaseArgs = mkOption {
|
||||
internal = true;
|
||||
type = types.attrsOf types.str;
|
||||
default = { };
|
||||
description = "Additional attributes to be merged with the /etc/lsb-release generator.";
|
||||
example = {
|
||||
LSB_VERSION = "1.0";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
image = {
|
||||
|
@ -237,13 +256,13 @@ in
|
|||
# https://www.freedesktop.org/software/systemd/man/os-release.html for the
|
||||
# format.
|
||||
environment.etc = {
|
||||
"lsb-release".text = attrsToText {
|
||||
"lsb-release".text = attrsToText ({
|
||||
LSB_VERSION = "${cfg.release} (${cfg.codeName})";
|
||||
DISTRIB_ID = "${cfg.distroId}";
|
||||
DISTRIB_RELEASE = cfg.release;
|
||||
DISTRIB_CODENAME = toLower cfg.codeName;
|
||||
DISTRIB_DESCRIPTION = "${cfg.distroName} ${cfg.release} (${cfg.codeName})";
|
||||
};
|
||||
} // cfg.extraLSBReleaseArgs);
|
||||
|
||||
"os-release".text = attrsToText osReleaseContents;
|
||||
};
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue