Project import generated by Copybara.

GitOrigin-RevId: c757e9bd77b16ca2e03c89bf8bc9ecb28e0c06ad
This commit is contained in:
Default email 2023-11-16 04:20:00 +00:00
parent 24297e50b4
commit 2c76a4cb41
6053 changed files with 192869 additions and 121585 deletions

View file

@ -11,9 +11,6 @@
# This also holds true for GitHub teams. Since almost none of our teams have write
# permissions, you need to list all members of the team with commit access individually.
# This file
/.github/CODEOWNERS @edolstra
# GitHub actions
/.github/workflows @NixOS/Security @Mic92 @zowoq
/.github/workflows/merge-staging @FRidh
@ -22,12 +19,12 @@
/.editorconfig @Mic92 @zowoq
# Libraries
/lib @edolstra @infinisil
/lib @infinisil
/lib/systems @alyssais @ericson2314 @amjoseph-nixpkgs
/lib/generators.nix @edolstra @Profpatsch
/lib/cli.nix @edolstra @Profpatsch
/lib/debug.nix @edolstra @Profpatsch
/lib/asserts.nix @edolstra @Profpatsch
/lib/generators.nix @infinisil @Profpatsch
/lib/cli.nix @infinisil @Profpatsch
/lib/debug.nix @infinisil @Profpatsch
/lib/asserts.nix @infinisil @Profpatsch
/lib/path.* @infinisil @fricklerhandwerk
/lib/fileset @infinisil
/doc/functions/fileset.section.md @infinisil
@ -48,6 +45,8 @@
/pkgs/build-support/setup-hooks/auto-patchelf.sh @layus
/pkgs/build-support/setup-hooks/auto-patchelf.py @layus
/pkgs/pkgs-lib @infinisil
## Format generators/serializers
/pkgs/pkgs-lib/formats/libconfig @ckiee
# pkgs/by-name
/pkgs/test/nixpkgs-check-by-name @infinisil
@ -59,7 +58,7 @@
/pkgs/build-support/writers @lassulus @Profpatsch
# Nixpkgs make-disk-image
/doc/builders/images/makediskimage.section.md @raitobezarius
/doc/build-helpers/images/makediskimage.section.md @raitobezarius
/nixos/lib/make-disk-image.nix @raitobezarius
# Nixpkgs documentation
@ -116,7 +115,6 @@
/maintainers/scripts/update-python-libraries @FRidh
/pkgs/development/interpreters/python @FRidh
/doc/languages-frameworks/python.section.md @FRidh @mweinelt
/pkgs/development/tools/poetry2nix @adisbladis
/pkgs/development/interpreters/python/hooks @FRidh @jonringer
# Haskell
@ -149,6 +147,8 @@
# C compilers
/pkgs/development/compilers/gcc @amjoseph-nixpkgs
/pkgs/development/compilers/llvm @RaitoBezarius
/pkgs/development/compilers/emscripten @raitobezarius
/doc/languages-frameworks/emscripten.section.md @raitobezarius
# Audio
/nixos/modules/services/audio/botamusique.nix @mweinelt
@ -216,7 +216,7 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
/nixos/tests/knot.nix @mweinelt
# Web servers
/doc/builders/packages/nginx.section.md @raitobezarius
/doc/packages/nginx.section.md @raitobezarius
/pkgs/servers/http/nginx/ @raitobezarius
/nixos/modules/services/web-servers/nginx/ @raitobezarius
@ -269,7 +269,7 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
# Docker tools
/pkgs/build-support/docker @roberth
/nixos/tests/docker-tools* @roberth
/doc/builders/images/dockertools.section.md @roberth
/doc/build-helpers/images/dockertools.section.md @roberth
# Blockchains
/pkgs/applications/blockchains @mmahut @RaghavSood

View file

@ -7,25 +7,81 @@ assignees: ''
---
Building this package twice does not produce the bit-by-bit identical result each time, making it harder to detect CI breaches. You can read more about this at https://reproducible-builds.org/ .
<!--
Hello dear reporter,
Fixing bit-by-bit reproducibility also has additional advantages, such as avoiding hard-to-reproduce bugs, making content-addressed storage more effective and reducing rebuilds in such systems.
Thank you for bringing attention to this issue. Your insights are valuable to
us, and we appreciate the time you took to document the problem.
I wanted to kindly point out that in this issue template, it would be beneficial
to replace the placeholder `<package>` with the actual, canonical name of the
package you're reporting the issue for. Doing so will provide better context and
facilitate quicker troubleshooting for anyone who reads this issue in the
future.
Best regards
-->
Building this package multiple times does not yield bit-by-bit identical
results, complicating the detection of Continuous Integration (CI) breaches. For
more information on this issue, visit
[reproducible-builds.org](https://reproducible-builds.org/).
Fixing bit-by-bit reproducibility also has additional advantages, such as
avoiding hard-to-reproduce bugs, making content-addressed storage more effective
and reducing rebuilds in such systems.
### Steps To Reproduce
```
nix-build '<nixpkgs>' -A ... --check --keep-failed
```
In the following steps, replace `<package>` with the canonical name of the
package.
You can use `diffoscope` to analyze the differences in the output of the two builds.
#### 1. Build the package
To view the build log of the build that produced the artifact in the binary cache:
This step will build the package. Specific arguments are passed to the command
to keep the build artifacts so we can compare them in case of differences.
Execute the following command:
```
nix-store --read-log $(nix-instantiate '<nixpkgs>' -A ...)
nix-build '<nixpkgs>' -A <package> && nix-build '<nixpkgs>' -A <package> --check --keep-failed
```
Or using the new command line style:
```
nix build nixpkgs#<package> && nix build nixpkgs#<package> --rebuild --keep-failed
```
#### 2. Compare the build artifacts
If the previous command completes successfully, no differences were found and
there's nothing to do, builds are reproducible.
If it terminates with the error message `error: derivation '<X>' may not be
deterministic: output '<Y>' differs from '<Z>'`, use `diffoscope` to investigate
the discrepancies between the two build outputs. You may need to add the
`--exclude-directory-metadata recursive` option to ignore files and directories
metadata (*e.g. timestamp*) differences.
```
nix run nixpkgs#diffoscopeMinimal -- --exclude-directory-metadata recursive <Y> <Z>
```
#### 3. Examine the build log
To examine the build log, use:
```
nix-store --read-log $(nix-instantiate '<nixpkgs>' -A <package>)
```
Or with the new command line style:
```
nix log $(nix path-info --derivation nixpkgs#<package>)
```
### Additional context
(please share the relevant fragment of the diffoscope output here,
and any additional analysis you may have done)
(please share the relevant fragment of the diffoscope output here, and any
additional analysis you may have done)

View file

@ -14,7 +14,9 @@ For new packages please briefly describe the package or provide a link to its ho
- [ ] aarch64-linux
- [ ] x86_64-darwin
- [ ] aarch64-darwin
- [ ] For non-Linux: Is `sandbox = true` set in `nix.conf`? (See [Nix manual](https://nixos.org/manual/nix/stable/command-ref/conf-file.html))
- For non-Linux: Is sandboxing enabled in `nix.conf`? (See [Nix manual](https://nixos.org/manual/nix/stable/command-ref/conf-file.html))
- [ ] `sandbox = relaxed`
- [ ] `sandbox = true`
- [ ] Tested, as applicable:
- [NixOS test(s)](https://nixos.org/manual/nixos/unstable/index.html#sec-nixos-tests) (look inside [nixos/tests](https://github.com/NixOS/nixpkgs/blob/master/nixos/tests))
- and/or [package tests](https://nixos.org/manual/nixpkgs/unstable/#sec-package-tests)

View file

@ -37,6 +37,11 @@
"6.topic: fetch":
- pkgs/build-support/fetch*/**/*
"6.topic: flakes":
- '**/flake.nix'
- lib/systems/flake-systems.nix
- nixos/modules/config/nix-flakes.nix
"6.topic: GNOME":
- doc/languages-frameworks/gnome.section.md
- nixos/modules/services/desktops/gnome/**/*

View file

@ -24,7 +24,7 @@ jobs:
with:
ref: ${{ github.event.pull_request.head.sha }}
- name: Create backport PRs
uses: korthout/backport-action@v1.3.1
uses: korthout/backport-action@v2.1.1
with:
# Config README: https://github.com/korthout/backport-action#backport-action
copy_labels_pattern: 'severity:\ssecurity'

View file

@ -322,6 +322,8 @@ All the review template samples provided in this section are generic and meant a
To get more information about how to review specific parts of Nixpkgs, refer to the documents linked to in the [overview section][overview].
If a pull request contains documentation changes that might require feedback from the documentation team, ping @NixOS/documentation-team on the pull request.
If you consider having enough knowledge and experience in a topic and would like to be a long-term reviewer for related submissions, please contact the current reviewers for that topic. They will give you information about the reviewing process. The main reviewers for a topic can be hard to find as there is no list, but checking past pull requests to see who reviewed or git-blaming the code to see who committed to that topic can give some hints.
Container system, boot system and library changes are some examples of the pull requests fitting this category.
@ -352,8 +354,8 @@ In a case a contributor definitively leaves the Nix community, they should creat
# Flow of merged pull requests
After a pull requests is merged, it eventually makes it to the [official Hydra CI](https://hydra.nixos.org/).
Hydra regularly evaluates and builds Nixpkgs, updating [the official channels](http://channels.nixos.org/) when specific Hydra jobs succeeded.
After a pull request is merged, it eventually makes it to the [official Hydra CI](https://hydra.nixos.org/).
Hydra regularly evaluates and builds Nixpkgs, updating [the official channels](https://channels.nixos.org/) when specific Hydra jobs succeeded.
See [Nix Channel Status](https://status.nixos.org/) for the current channels and their state.
Here's a brief overview of the main Git branches and what channels they're used for:
@ -465,7 +467,7 @@ Is the change [acceptable for releases][release-acceptable] and do you wish to h
- No: Use the `master` branch, do not backport the pull request.
- Yes: Can the change be implemented the same way on the `master` and release branches?
For example, a packages major version might differ between the `master` and release branches, such that separate security patches are required.
- Yes: Use the `master` branch and [backport the pull request](#backporting-changes).
- Yes: Use the `master` branch and [backport the pull request](#how-to-backport-pull-requests).
- No: Create separate pull requests to the `master` and `release-XX.YY` branches.
Furthermore, if the change causes a [mass rebuild][mass-rebuild], use the appropriate staging branch instead:
@ -512,34 +514,19 @@ To get a sense for what changes are considered mass rebuilds, see [previously me
- If you have commits `pkg-name: oh, forgot to insert whitespace`: squash commits in this case. Use `git rebase -i`.
- Format the commit messages in the following way:
- For consistency, there should not be a period at the end of the commit message's summary line (the first line of the commit message).
```
(pkg-name | nixos/<module>): (from -> to | init at version | refactor | etc)
(Motivation for change. Link to release notes. Additional information.)
```
For consistency, there should not be a period at the end of the commit message's summary line (the first line of the commit message).
Examples:
* nginx: init at 2.0.1
* firefox: 54.0.1 -> 55.0
https://www.mozilla.org/en-US/firefox/55.0/releasenotes/
* nixos/hydra: add bazBaz option
Dual baz behavior is needed to do foo.
* nixos/nginx: refactor config generation
The old config generation system used impure shell scripts and could break in specific circumstances (see #1234).
When adding yourself as maintainer, in the same pull request, make a separate
- When adding yourself as maintainer in the same pull request, make a separate
commit with the message `maintainers: add <handle>`.
Add the commit before those making changes to the package or module.
See [Nixpkgs Maintainers](./maintainers/README.md) for details.
- Make sure you read about any commit conventions specific to the area you're touching. See:
- [Commit conventions](./pkgs/README.md#commit-conventions) for changes to `pkgs`.
- [Commit conventions](./lib/README.md#commit-conventions) for changes to `lib`.
- [Commit conventions](./nixos/README.md#commit-conventions) for changes to `nixos`.
- [Commit conventions](./doc/README.md#commit-conventions) for changes to `doc`, the Nixpkgs manual.
### Writing good commit messages
In addition to writing properly formatted commit messages, it's important to include relevant information so other developers can later understand *why* a change was made. While this information usually can be found by digging code, mailing list/Discourse archives, pull request discussions or upstream changes, it may require a lot of work.
@ -565,7 +552,7 @@ Names of files and directories should be in lowercase, with dashes between words
- Do not use tab characters, i.e. configure your editor to use soft tabs. For instance, use `(setq-default indent-tabs-mode nil)` in Emacs. Everybody has different tab settings so its asking for trouble.
- Use `lowerCamelCase` for variable names, not `UpperCamelCase`. Note, this rule does not apply to package attribute names, which instead follow the rules in [](#sec-package-naming).
- Use `lowerCamelCase` for variable names, not `UpperCamelCase`. Note, this rule does not apply to package attribute names, which instead follow the rules in [package naming](./pkgs/README.md#package-naming).
- Function calls with attribute set arguments are written as

View file

@ -114,3 +114,24 @@ pear
watermelon
: green fruit with red flesh
```
## Commit conventions
- Make sure you read about the [commit conventions](../CONTRIBUTING.md#commit-conventions) common to Nixpkgs as a whole.
- If creating a commit purely for documentation changes, format the commit message in the following way:
```
doc: (documentation summary)
(Motivation for change, relevant links, additional information.)
```
Examples:
* doc: update the kernel config documentation to use `nix-shell`
* doc: add information about `nix-update-script`
Closes #216321.
- If the commit contains more than just documentation changes, follow the commit message format relevant for the rest of the changes.

View file

@ -0,0 +1,28 @@
# Build helpers {#part-builders}
A build helper is a function that produces derivations.
:::{.warning}
This is not to be confused with the [`builder` argument of the Nix `derivation` primitive](https://nixos.org/manual/nix/unstable/language/derivations.html), which refers to the executable that produces the build result, or [remote builder](https://nixos.org/manual/nix/stable/advanced-topics/distributed-builds.html), which refers to a remote machine that could run such an executable.
:::
Such a function is usually designed to abstract over a typical workflow for a given programming language or framework.
This allows declaring a build recipe by setting a limited number of options relevant to the particular use case instead of using the `derivation` function directly.
[`stdenv.mkDerivation`](#part-stdenv) is the most widely used build helper, and serves as a basis for many others.
In addition, it offers various options to customize parts of the builds.
There is no uniform interface for build helpers.
[Trivial build helpers](#chap-trivial-builders) and [fetchers](#chap-pkgs-fetchers) have various input types for convenience.
[Language- or framework-specific build helpers](#chap-language-support) usually follow the style of `stdenv.mkDerivation`, which accepts an attribute set or a fixed-point function taking an attribute set.
```{=include=} chapters
build-helpers/fetchers.chapter.md
build-helpers/trivial-build-helpers.chapter.md
build-helpers/testers.chapter.md
build-helpers/special.md
build-helpers/images.md
hooks/index.md
languages-frameworks/index.md
packages/index.md
```

View file

@ -1,13 +1,28 @@
# Fetchers {#chap-pkgs-fetchers}
Building software with Nix often requires downloading source code and other files from the internet.
`nixpkgs` provides *fetchers* for different protocols and services. Fetchers are functions that simplify downloading files.
To this end, Nixpkgs provides *fetchers*: functions to obtain remote sources via various protocols and services.
Nixpkgs fetchers differ from built-in fetchers such as [`builtins.fetchTarball`](https://nixos.org/manual/nix/stable/language/builtins.html#builtins-fetchTarball):
- A built-in fetcher will download and cache files at evaluation time and produce a [store path](https://nixos.org/manual/nix/stable/glossary#gloss-store-path).
A Nixpkgs fetcher will create a ([fixed-output](https://nixos.org/manual/nix/stable/glossary#gloss-fixed-output-derivation)) [derivation](https://nixos.org/manual/nix/stable/language/derivations), and files are downloaded at build time.
- Built-in fetchers will invalidate their cache after [`tarball-ttl`](https://nixos.org/manual/nix/stable/command-ref/conf-file#conf-tarball-ttl) expires, and will require network activity to check if the cache entry is up to date.
Nixpkgs fetchers only re-download if the specified hash changes or the store object is not otherwise available.
- Built-in fetchers do not use [substituters](https://nixos.org/manual/nix/stable/command-ref/conf-file#conf-substituters).
Derivations produced by Nixpkgs fetchers will use any configured binary cache transparently.
This significantly reduces the time needed to evaluate the entirety of Nixpkgs, and allows [Hydra](https://nixos.org/hydra) to retain and re-distribute sources used by Nixpkgs in the [public binary cache](https://cache.nixos.org).
For these reasons, built-in fetchers are not allowed in Nixpkgs source code.
The following table shows an overview of the differences:
| Fetchers | Download | Output | Cache | Re-download when |
|-|-|-|-|-|
| `builtins.fetch*` | evaluation time | store path | `/nix/store`, `~/.cache/nix` | `tarball-ttl` expires, cache miss in `~/.cache/nix`, output store object not in local store |
| `pkgs.fetch*` | build time | derivation | `/nix/store`, substituters | output store object not available |
## Caveats {#chap-pkgs-fetchers-caveats}
Fetchers create [fixed output derivations](https://nixos.org/manual/nix/stable/#fixed-output-drvs) from downloaded files.
Nix can reuse the downloaded files via the hash of the resulting derivation.
The fact that the hash belongs to the Nix derivation output and not the file itself can lead to confusion.
For example, consider the following fetcher:
@ -243,21 +258,21 @@ or
***
```
## `fetchFromBittorrent` {#fetchfrombittorrent}
## `fetchtorrent` {#fetchtorrent}
`fetchFromBittorrent` expects two arguments. `url` which can either be a Magnet URI (Magnet Link) such as `magnet:?xt=urn:btih:dd8255ecdc7ca55fb0bbf81323d87062db1f6d1c` or an HTTP URL pointing to a `.torrent` file. It can also take a `config` argument which will craft a `settings.json` configuration file and give it to `transmission`, the underlying program that is performing the fetch. The available config options for `transmission` can be found [here](https://github.com/transmission/transmission/blob/main/docs/Editing-Configuration-Files.md#options)
`fetchtorrent` expects two arguments. `url` which can either be a Magnet URI (Magnet Link) such as `magnet:?xt=urn:btih:dd8255ecdc7ca55fb0bbf81323d87062db1f6d1c` or an HTTP URL pointing to a `.torrent` file. It can also take a `config` argument which will craft a `settings.json` configuration file and give it to `transmission`, the underlying program that is performing the fetch. The available config options for `transmission` can be found [here](https://github.com/transmission/transmission/blob/main/docs/Editing-Configuration-Files.md#options)
```
{ fetchFromBittorrent }:
{ fetchtorrent }:
fetchFromBittorrent {
fetchtorrent {
config = { peer-limit-global = 100; };
url = "magnet:?xt=urn:btih:dd8255ecdc7ca55fb0bbf81323d87062db1f6d1c";
sha256 = "";
}
```
### Parameters {#fetchfrombittorrent-parameters}
### Parameters {#fetchtorrent-parameters}
- `url`: Magnet URI (Magnet Link) such as `magnet:?xt=urn:btih:dd8255ecdc7ca55fb0bbf81323d87062db1f6d1c` or an HTTP URL pointing to a `.torrent` file.

View file

@ -275,7 +275,7 @@ pullImage {
`nix-prefetch-docker` command can be used to get required image parameters:
```ShellSession
$ nix run nixpkgs.nix-prefetch-docker -c nix-prefetch-docker --image-name mysql --image-tag 5
$ nix run nixpkgs#nix-prefetch-docker -- --image-name mysql --image-tag 5
```
Since a given `imageName` may transparently refer to a manifest list of images which support multiple architectures and/or operating systems, you can supply the `--os` and `--arch` arguments to specify exactly which image you want. By default it will match the OS and architecture of the host the command is run on.

View file

@ -1,11 +1,10 @@
# Special builders {#chap-special}
# Special build helpers {#chap-special}
This chapter describes several special builders.
This chapter describes several special build helpers.
```{=include=} sections
special/fhs-environments.section.md
special/makesetuphook.section.md
special/mkshell.section.md
special/darwin-builder.section.md
special/vm-tools.section.md
```

View file

@ -1,6 +1,6 @@
# pkgs.makeSetupHook {#sec-pkgs.makeSetupHook}
`pkgs.makeSetupHook` is a builder that produces hooks that go in to `nativeBuildInputs`
`pkgs.makeSetupHook` is a build helper that produces hooks that go in to `nativeBuildInputs`
## Usage {#sec-pkgs.makeSetupHook-usage}

View file

@ -1,4 +1,4 @@
# Trivial builders {#chap-trivial-builders}
# Trivial build helpers {#chap-trivial-builders}
Nixpkgs provides a couple of functions that help with building derivations. The most important one, `stdenv.mkDerivation`, has already been documented above. The following functions wrap `stdenv.mkDerivation`, making it easier to use in certain cases.

View file

@ -1,12 +0,0 @@
# Builders {#part-builders}
```{=include=} chapters
builders/fetchers.chapter.md
builders/trivial-builders.chapter.md
builders/testers.chapter.md
builders/special.md
builders/images.md
hooks/index.md
languages-frameworks/index.md
builders/packages/index.md
```

View file

@ -23,6 +23,7 @@ let
{ name = "sources"; description = "source filtering functions"; }
{ name = "cli"; description = "command-line serialization functions"; }
{ name = "gvariant"; description = "GVariant formatted string serialization functions"; }
{ name = "customisation"; description = "Functions to customise (derivation-related) functions, derivatons, or attribute sets"; }
];
};

View file

@ -6,11 +6,8 @@ The [`lib.fileset`](#sec-functions-library-fileset) library allows you to work w
A file set is a mathematical set of local files that can be added to the Nix store for use in Nix derivations.
File sets are easy and safe to use, providing obvious and composable semantics with good error messages to prevent mistakes.
These sections apply to the entire library.
See the [function reference](#sec-functions-library-fileset) for function-specific documentation.
The file set library is currently somewhat limited but is being expanded to include more functions over time.
## Implicit coercion from paths to file sets {#sec-fileset-path-coercion}
All functions accepting file sets as arguments can also accept [paths](https://nixos.org/manual/nix/stable/language/values.html#type-path) as arguments.

View file

@ -6,6 +6,6 @@ You can also specify a `runtimeDependencies` variable which lists dependencies t
In certain situations you may want to run the main command (`autoPatchelf`) of the setup hook on a file or a set of directories instead of unconditionally patching all outputs. This can be done by setting the `dontAutoPatchelf` environment variable to a non-empty value.
By default `autoPatchelf` will fail as soon as any ELF file requires a dependency which cannot be resolved via the given build inputs. In some situations you might prefer to just leave missing dependencies unpatched and continue to patch the rest. This can be achieved by setting the `autoPatchelfIgnoreMissingDeps` environment variable to a non-empty value. `autoPatchelfIgnoreMissingDeps` can be set to a list like `autoPatchelfIgnoreMissingDeps = [ "libcuda.so.1" "libcudart.so.1" ];` or to simply `[ "*" ]` to ignore all missing dependencies.
By default `autoPatchelf` will fail as soon as any ELF file requires a dependency which cannot be resolved via the given build inputs. In some situations you might prefer to just leave missing dependencies unpatched and continue to patch the rest. This can be achieved by setting the `autoPatchelfIgnoreMissingDeps` environment variable to a non-empty value. `autoPatchelfIgnoreMissingDeps` can be set to a list like `autoPatchelfIgnoreMissingDeps = [ "libcuda.so.1" "libcudart.so.1" ];` or to `[ "*" ]` to ignore all missing dependencies.
The `autoPatchelf` command also recognizes a `--no-recurse` command line flag, which prevents it from recursing into subdirectories.

View file

@ -25,7 +25,6 @@ perl.section.md
pkg-config.section.md
postgresql-test-hook.section.md
python.section.md
qt-4.section.md
scons.section.md
tetex-tex-live.section.md
unzip.section.md

View file

@ -1,25 +1,83 @@
# Meson {#meson}
Overrides the configure phase to run meson to generate Ninja files. To run these files, you should accompany Meson with ninja. By default, `enableParallelBuilding` is enabled as Meson supports parallel building almost everywhere.
[Meson](https://mesonbuild.com/) is an open source meta build system meant to be
fast and user-friendly.
## Variables controlling Meson {#variables-controlling-meson}
In Nixpkgs, meson comes with a setup hook that overrides the configure, check,
and install phases.
### `mesonFlags` {#mesonflags}
Being a meta build system, meson needs an accompanying backend. In the context
of Nixpkgs, the typical companion backend is [Ninja](#ninja), that provides a
setup hook registering ninja-based build and install phases.
Controls the flags passed to meson.
## Variables controlling Meson {#meson-variables-controlling}
### `mesonBuildType` {#mesonbuildtype}
### Meson Exclusive Variables {#meson-exclusive-variables}
Which [`--buildtype`](https://mesonbuild.com/Builtin-options.html#core-options) to pass to Meson. We default to `plain`.
#### `mesonFlags` {#meson-flags}
### `mesonAutoFeatures` {#mesonautofeatures}
Controls the flags passed to `meson setup` during configure phase.
What value to set [`-Dauto_features=`](https://mesonbuild.com/Builtin-options.html#core-options) to. We default to `enabled`.
#### `mesonWrapMode` {#meson-wrap-mode}
### `mesonWrapMode` {#mesonwrapmode}
Which value is passed as
[`-Dwrap_mode=`](https://mesonbuild.com/Builtin-options.html#core-options)
to. In Nixpkgs the default value is `nodownload`, so that no subproject will be
downloaded (since network access is already disabled during deployment in
Nixpkgs).
What value to set [`-Dwrap_mode=`](https://mesonbuild.com/Builtin-options.html#core-options) to. We default to `nodownload` as we disallow network access.
Note: Meson allows pre-population of subprojects that would otherwise be
downloaded.
### `dontUseMesonConfigure` {#dontusemesonconfigure}
#### `mesonBuildType` {#meson-build-type}
Disables using Mesons `configurePhase`.
Which value is passed as
[`--buildtype`](https://mesonbuild.com/Builtin-options.html#core-options) to
`meson setup` during configure phase. In Nixpkgs the default value is `plain`.
#### `mesonAutoFeatures` {#meson-auto-features}
Which value is passed as
[`-Dauto_features=`](https://mesonbuild.com/Builtin-options.html#core-options)
to `meson setup` during configure phase. In Nixpkgs the default value is
`enabled`, meaning that every feature declared as "auto" by the meson scripts
will be enabled.
#### `mesonCheckFlags` {#meson-check-flags}
Controls the flags passed to `meson test` during check phase.
#### `mesonInstallFlags` {#meson-install-flags}
Controls the flags passed to `meson install` during install phase.
#### `mesonInstallTags` {#meson-install-tags}
A list of installation tags passed to Meson's commandline option
[`--tags`](https://mesonbuild.com/Installing.html#installation-tags) during
install phase.
Note: `mesonInstallTags` should be a list of strings, that will be converted to
a comma-separated string that is recognized to `--tags`.
Example: `mesonInstallTags = [ "emulator" "assembler" ];` will be converted to
`--tags emulator,assembler`.
#### `dontUseMesonConfigure` {#dont-use-meson-configure}
When set to true, don't use the predefined `mesonConfigurePhase`.
#### `dontUseMesonCheck` {#dont-use-meson-check}
When set to true, don't use the predefined `mesonCheckPhase`.
#### `dontUseMesonInstall` {#dont-use-meson-install}
When set to true, don't use the predefined `mesonInstallPhase`.
### Honored variables {#meson-honored-variables}
The following variables commonly used by `stdenv.mkDerivation` are honored by
Meson setup hook.
- `prefixKey`
- `enableParallelBuilding`

View file

@ -1,3 +1,5 @@
# ninja {#ninja}
Overrides the build, install, and check phase to run ninja instead of make. You can disable this behavior with the `dontUseNinjaBuild`, `dontUseNinjaInstall`, and `dontUseNinjaCheck`, respectively. Parallel building is enabled by default in Ninja.
Note that if the [Meson setup hook](#meson) is also active, Ninja's install and check phases will be disabled in favor of Meson's.

View file

@ -1,3 +0,0 @@
# Qt 4 {#qt-4}
Sets the `QTDIR` environment variable to Qts path.

View file

@ -146,7 +146,7 @@ agdaPackages.mkDerivation {
### Building Agda packages {#building-agda-packages}
The default build phase for `agdaPackages.mkDerivation` simply runs `agda` on the `Everything.agda` file.
The default build phase for `agdaPackages.mkDerivation` runs `agda` on the `Everything.agda` file.
If something else is needed to build the package (e.g. `make`) then the `buildPhase` should be overridden.
Additionally, a `preBuild` or `configurePhase` can be used if there are steps that need to be done prior to checking the `Everything.agda` file.
`agda` and the Agda libraries contained in `buildInputs` are made available during the build phase.
@ -250,7 +250,7 @@ Usually, the maintainers will answer within a week or two with a new release.
Bumping the version of that reverse dependency should be a further commit on your PR.
In the rare case that a new release is not to be expected within an acceptable time,
simply mark the broken package as broken by setting `meta.broken = true;`.
mark the broken package as broken by setting `meta.broken = true;`.
This will exclude it from the build test.
It can be added later when it is fixed,
and does not hinder the advancement of the whole package set in the meantime.

View file

@ -44,7 +44,7 @@ There is also a `buildMix` helper, whose behavior is closer to that of `buildErl
## How to Install BEAM Packages {#how-to-install-beam-packages}
BEAM builders are not registered at the top level, simply because they are not relevant to the vast majority of Nix users.
BEAM builders are not registered at the top level, because they are not relevant to the vast majority of Nix users.
To use any of those builders into your environment, refer to them by their attribute path under `beamPackages`, e.g. `beamPackages.rebar3`:
::: {.example #ex-beam-ephemeral-shell}

View file

@ -8,10 +8,12 @@ It fetches its Dart dependencies automatically through `fetchDartDeps`, and (thr
If you are packaging a Flutter desktop application, use [`buildFlutterApplication`](#ssec-dart-flutter) instead.
`vendorHash`: is the hash of the output of the dependency fetcher derivation. To obtain it, simply set it to `lib.fakeHash` (or omit it) and run the build ([more details here](#sec-source-hashes)).
`vendorHash`: is the hash of the output of the dependency fetcher derivation. To obtain it, set it to `lib.fakeHash` (or omit it) and run the build ([more details here](#sec-source-hashes)).
If the upstream source is missing a `pubspec.lock` file, you'll have to vendor one and specify it using `pubspecLockFile`. If it is needed, one will be generated for you and printed when attempting to build the derivation.
The `depsListFile` must always be provided when packaging in Nixpkgs. It will be generated and printed if the derivation is attempted to be built without one. Alternatively, `autoDepsList` may be set to `true` only when outside of Nixpkgs, as it relies on import-from-derivation.
The `dart` commands run can be overridden through `pubGetScript` and `dartCompileCommand`, you can also add flags using `dartCompileFlags` or `dartJitFlags`.
Dart supports multiple [outputs types](https://dart.dev/tools/dart-compile#types-of-output), you can choose between them using `dartOutputType` (defaults to `exe`). If you want to override the binaries path or the source path they come from, you can use `dartEntryPoints`. Outputs that require a runtime will automatically be wrapped with the relevant runtime (`dartaotruntime` for `aot-snapshot`, `dart run` for `jit-snapshot` and `kernel`, `node` for `js`), this can be overridden through `dartRuntimeCommand`.
@ -31,6 +33,7 @@ buildDartApplication rec {
};
pubspecLockFile = ./pubspec.lock;
depsListFile = ./deps.json;
vendorHash = "sha256-Atm7zfnDambN/BmmUf4BG0yUz/y6xWzf0reDw3Ad41s=";
}
```
@ -39,9 +42,7 @@ buildDartApplication rec {
The function `buildFlutterApplication` builds Flutter applications.
The deps.json file must always be provided when packaging in Nixpkgs. It will be generated and printed if the derivation is attempted to be built without one. Alternatively, `autoDepsList` may be set to `true` when outside of Nixpkgs, as it relies on import-from-derivation.
A `pubspec.lock` file must be available. See the [Dart documentation](#ssec-dart-applications) for more details.
See the [Dart documentation](#ssec-dart-applications) for more details on required files and arguments.
```nix
{ flutter, fetchFromGitHub }:

View file

@ -323,7 +323,7 @@ $ nix-shell -p haskellPackages.dhall-nixpkgs nix-prefetch-git
```
:::{.note}
`nix-prefetch-git` has to be in `$PATH` for `dhall-to-nixpkgs` to work.
`nix-prefetch-git` is added to the `nix-shell -p` invocation above, because it has to be in `$PATH` for `dhall-to-nixpkgs` to work.
:::
The utility takes care of automatically detecting remote imports and converting

View file

@ -138,7 +138,9 @@ in buildDotnetModule rec {
src = ./.;
projectFile = "src/project.sln";
nugetDeps = ./deps.nix; # File generated with `nix-build -A package.passthru.fetch-deps`.
# File generated with `nix-build -A package.passthru.fetch-deps`.
# To run fetch-deps when this file does not yet exist, set nugetDeps to null
nugetDeps = ./deps.nix;
projectReferences = [ referencedProject ]; # `referencedProject` must contain `nupkg` in the folder structure.

View file

@ -2,168 +2,159 @@
[Emscripten](https://github.com/kripken/emscripten): An LLVM-to-JavaScript Compiler
This section of the manual covers how to use `emscripten` in nixpkgs.
If you want to work with `emcc`, `emconfigure` and `emmake` as you are used to from Ubuntu and similar distributions,
Minimal requirements:
* nix
* nixpkgs
Modes of use of `emscripten`:
* **Imperative usage** (on the command line):
If you want to work with `emcc`, `emconfigure` and `emmake` as you are used to from Ubuntu and similar distributions you can use these commands:
* `nix-env -f "<nixpkgs>" -iA emscripten`
* `nix-shell -p emscripten`
* **Declarative usage**:
This mode is far more power full since this makes use of `nix` for dependency management of emscripten libraries and targets by using the `mkDerivation` which is implemented by `pkgs.emscriptenStdenv` and `pkgs.buildEmscriptenPackage`. The source for the packages is in `pkgs/top-level/emscripten-packages.nix` and the abstraction behind it in `pkgs/development/em-modules/generic/default.nix`. From the root of the nixpkgs repository:
* build and install all packages:
* `nix-env -iA emscriptenPackages`
* dev-shell for zlib implementation hacking:
* `nix-shell -A emscriptenPackages.zlib`
## Imperative usage {#imperative-usage}
```console
nix-shell -p emscripten
```
A few things to note:
* `export EMCC_DEBUG=2` is nice for debugging
* `~/.emscripten`, the build artifact cache sometimes creates issues and needs to be removed from time to time
* The build artifact cache in `~/.emscripten` sometimes creates issues and needs to be removed from time to time
## Declarative usage {#declarative-usage}
## Examples {#declarative-usage}
Let's see two different examples from `pkgs/top-level/emscripten-packages.nix`:
* `pkgs.zlib.override`
* `pkgs.buildEmscriptenPackage`
Both are interesting concepts.
A special requirement of the `pkgs.buildEmscriptenPackage` is the `doCheck = true`.
This means each Emscripten package requires that a [`checkPhase`](#ssec-check-phase) is implemented.
A special requirement of the `pkgs.buildEmscriptenPackage` is the `doCheck = true` is a default meaning that each emscriptenPackage requires a `checkPhase` implemented.
* Use `export EMCC_DEBUG=2` from within a phase to get more detailed debug output what is going wrong.
* The cache at `~/.emscripten` requires to set `HOME=$TMPDIR` in individual phases.
This makes compilation slower but also more deterministic.
* Use `export EMCC_DEBUG=2` from within a emscriptenPackage's `phase` to get more detailed debug output what is going wrong.
* ~/.emscripten cache is requiring us to set `HOME=$TMPDIR` in individual phases. This makes compilation slower but also makes it more deterministic.
::: {.example #usage-1-pkgs.zlib.override}
### Usage 1: pkgs.zlib.override {#usage-1-pkgs.zlib.override}
# Using `pkgs.zlib.override {}`
This example uses `zlib` from nixpkgs but instead of compiling **C** to **ELF** it compiles **C** to **JS** since we were using `pkgs.zlib.override` and changed stdenv to `pkgs.emscriptenStdenv`. A few adaptions and hacks were set in place to make it working. One advantage is that when `pkgs.zlib` is updated, it will automatically update this package as well. However, this can also be the downside...
This example uses `zlib` from Nixpkgs, but instead of compiling **C** to **ELF** it compiles **C** to **JavaScript** since we were using `pkgs.zlib.override` and changed `stdenv` to `pkgs.emscriptenStdenv`.
See the `zlib` example:
A few adaptions and hacks were put in place to make it work.
One advantage is that when `pkgs.zlib` is updated, it will automatically update this package as well.
zlib = (pkgs.zlib.override {
stdenv = pkgs.emscriptenStdenv;
}).overrideAttrs
(old: rec {
buildInputs = old.buildInputs ++ [ pkg-config ];
# we need to reset this setting!
env = (old.env or { }) // { NIX_CFLAGS_COMPILE = ""; };
configurePhase = ''
# FIXME: Some tests require writing at $HOME
HOME=$TMPDIR
runHook preConfigure
#export EMCC_DEBUG=2
emconfigure ./configure --prefix=$out --shared
```nix
(pkgs.zlib.override {
stdenv = pkgs.emscriptenStdenv;
}).overrideAttrs
(old: rec {
buildInputs = old.buildInputs ++ [ pkg-config ];
# we need to reset this setting!
env = (old.env or { }) // { NIX_CFLAGS_COMPILE = ""; };
configurePhase = ''
# FIXME: Some tests require writing at $HOME
HOME=$TMPDIR
runHook preConfigure
runHook postConfigure
'';
dontStrip = true;
outputs = [ "out" ];
buildPhase = ''
emmake make
'';
installPhase = ''
emmake make install
'';
checkPhase = ''
echo "================= testing zlib using node ================="
#export EMCC_DEBUG=2
emconfigure ./configure --prefix=$out --shared
echo "Compiling a custom test"
set -x
emcc -O2 -s EMULATE_FUNCTION_POINTER_CASTS=1 test/example.c -DZ_SOLO \
libz.so.${old.version} -I . -o example.js
runHook postConfigure
'';
dontStrip = true;
outputs = [ "out" ];
buildPhase = ''
emmake make
'';
installPhase = ''
emmake make install
'';
checkPhase = ''
echo "================= testing zlib using node ================="
echo "Using node to execute the test"
${pkgs.nodejs}/bin/node ./example.js
echo "Compiling a custom test"
set -x
emcc -O2 -s EMULATE_FUNCTION_POINTER_CASTS=1 test/example.c -DZ_SOLO \
libz.so.${old.version} -I . -o example.js
set +x
if [ $? -ne 0 ]; then
echo "test failed for some reason"
exit 1;
else
echo "it seems to work! very good."
fi
echo "================= /testing zlib using node ================="
'';
echo "Using node to execute the test"
${pkgs.nodejs}/bin/node ./example.js
postPatch = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
substituteInPlace configure \
--replace '/usr/bin/libtool' 'ar' \
--replace 'AR="libtool"' 'AR="ar"' \
--replace 'ARFLAGS="-o"' 'ARFLAGS="-r"'
'';
});
set +x
if [ $? -ne 0 ]; then
echo "test failed for some reason"
exit 1;
else
echo "it seems to work! very good."
fi
echo "================= /testing zlib using node ================="
'';
### Usage 2: pkgs.buildEmscriptenPackage {#usage-2-pkgs.buildemscriptenpackage}
postPatch = pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
substituteInPlace configure \
--replace '/usr/bin/libtool' 'ar' \
--replace 'AR="libtool"' 'AR="ar"' \
--replace 'ARFLAGS="-o"' 'ARFLAGS="-r"'
'';
})
```
This `xmlmirror` example features a emscriptenPackage which is defined completely from this context and no `pkgs.zlib.override` is used.
:::{.example #usage-2-pkgs.buildemscriptenpackage}
xmlmirror = pkgs.buildEmscriptenPackage rec {
name = "xmlmirror";
# Using `pkgs.buildEmscriptenPackage {}`
buildInputs = [ pkg-config autoconf automake libtool gnumake libxml2 nodejs openjdk json_c ];
nativeBuildInputs = [ pkg-config zlib ];
This `xmlmirror` example features an Emscripten package that is defined completely from this context and no `pkgs.zlib.override` is used.
src = pkgs.fetchgit {
url = "https://gitlab.com/odfplugfest/xmlmirror.git";
rev = "4fd7e86f7c9526b8f4c1733e5c8b45175860a8fd";
hash = "sha256-i+QgY+5PYVg5pwhzcDnkfXAznBg3e8sWH2jZtixuWsk=";
};
```nix
pkgs.buildEmscriptenPackage rec {
name = "xmlmirror";
configurePhase = ''
rm -f fastXmlLint.js*
# a fix for ERROR:root:For asm.js, TOTAL_MEMORY must be a multiple of 16MB, was 234217728
# https://gitlab.com/odfplugfest/xmlmirror/issues/8
sed -e "s/TOTAL_MEMORY=234217728/TOTAL_MEMORY=268435456/g" -i Makefile.emEnv
# https://github.com/kripken/emscripten/issues/6344
# https://gitlab.com/odfplugfest/xmlmirror/issues/9
sed -e "s/\$(JSONC_LDFLAGS) \$(ZLIB_LDFLAGS) \$(LIBXML20_LDFLAGS)/\$(JSONC_LDFLAGS) \$(LIBXML20_LDFLAGS) \$(ZLIB_LDFLAGS) /g" -i Makefile.emEnv
# https://gitlab.com/odfplugfest/xmlmirror/issues/11
sed -e "s/-o fastXmlLint.js/-s EXTRA_EXPORTED_RUNTIME_METHODS='[\"ccall\", \"cwrap\"]' -o fastXmlLint.js/g" -i Makefile.emEnv
'';
buildInputs = [ pkg-config autoconf automake libtool gnumake libxml2 nodejs openjdk json_c ];
nativeBuildInputs = [ pkg-config zlib ];
buildPhase = ''
HOME=$TMPDIR
make -f Makefile.emEnv
'';
src = pkgs.fetchgit {
url = "https://gitlab.com/odfplugfest/xmlmirror.git";
rev = "4fd7e86f7c9526b8f4c1733e5c8b45175860a8fd";
hash = "sha256-i+QgY+5PYVg5pwhzcDnkfXAznBg3e8sWH2jZtixuWsk=";
};
outputs = [ "out" "doc" ];
configurePhase = ''
rm -f fastXmlLint.js*
# a fix for ERROR:root:For asm.js, TOTAL_MEMORY must be a multiple of 16MB, was 234217728
# https://gitlab.com/odfplugfest/xmlmirror/issues/8
sed -e "s/TOTAL_MEMORY=234217728/TOTAL_MEMORY=268435456/g" -i Makefile.emEnv
# https://github.com/kripken/emscripten/issues/6344
# https://gitlab.com/odfplugfest/xmlmirror/issues/9
sed -e "s/\$(JSONC_LDFLAGS) \$(ZLIB_LDFLAGS) \$(LIBXML20_LDFLAGS)/\$(JSONC_LDFLAGS) \$(LIBXML20_LDFLAGS) \$(ZLIB_LDFLAGS) /g" -i Makefile.emEnv
# https://gitlab.com/odfplugfest/xmlmirror/issues/11
sed -e "s/-o fastXmlLint.js/-s EXTRA_EXPORTED_RUNTIME_METHODS='[\"ccall\", \"cwrap\"]' -o fastXmlLint.js/g" -i Makefile.emEnv
'';
installPhase = ''
mkdir -p $out/share
mkdir -p $doc/share/${name}
buildPhase = ''
HOME=$TMPDIR
make -f Makefile.emEnv
'';
cp Demo* $out/share
cp -R codemirror-5.12 $out/share
cp fastXmlLint.js* $out/share
cp *.xsd $out/share
cp *.js $out/share
cp *.xhtml $out/share
cp *.html $out/share
cp *.json $out/share
cp *.rng $out/share
cp README.md $doc/share/${name}
'';
checkPhase = ''
outputs = [ "out" "doc" ];
'';
};
installPhase = ''
mkdir -p $out/share
mkdir -p $doc/share/${name}
### Declarative debugging {#declarative-debugging}
cp Demo* $out/share
cp -R codemirror-5.12 $out/share
cp fastXmlLint.js* $out/share
cp *.xsd $out/share
cp *.js $out/share
cp *.xhtml $out/share
cp *.html $out/share
cp *.json $out/share
cp *.rng $out/share
cp README.md $doc/share/${name}
'';
checkPhase = ''
'';
}
```
:::
## Debugging {#declarative-debugging}
Use `nix-shell -I nixpkgs=/some/dir/nixpkgs -A emscriptenPackages.libz` and from there you can go trough the individual steps. This makes it easy to build a good `unit test` or list the files of the project.
@ -174,9 +165,3 @@ Use `nix-shell -I nixpkgs=/some/dir/nixpkgs -A emscriptenPackages.libz` and from
5. `configurePhase`
6. `buildPhase`
7. ... happy hacking...
## Summary {#summary}
Using this toolchain makes it easy to leverage `nix` from NixOS, MacOSX or even Windows (WSL+ubuntu+nix). This toolchain is reproducible, behaves like the rest of the packages from nixpkgs and contains a set of well working examples to learn and adapt from.
If in trouble, ask the maintainers.

View file

@ -18,9 +18,9 @@ In the following is an example expression using `buildGoModule`, the following a
To avoid updating this field when dependencies change, run `go mod vendor` in your source repo and set `vendorHash = null;`
To obtain the actual hash, set `vendorHash = lib.fakeSha256;` and run the build ([more details here](#sec-source-hashes)).
To obtain the actual hash, set `vendorHash = lib.fakeHash;` and run the build ([more details here](#sec-source-hashes)).
- `proxyVendor`: Fetches (go mod download) and proxies the vendor directory. This is useful if your code depends on c code and go mod tidy does not include the needed sources to build or if any dependency has case-insensitive conflicts which will produce platform-dependent `vendorHash` checksums.
- `modPostBuild`: Shell commands to run after the build of the goModules executes `go mod vendor`, and before calculating fixed output derivation's `vendorHash` (or `vendorSha256`). Note that if you change this attribute, you need to update `vendorHash` (or `vendorSha256`) attribute.
- `modPostBuild`: Shell commands to run after the build of the goModules executes `go mod vendor`, and before calculating fixed output derivation's `vendorHash`. Note that if you change this attribute, you need to update `vendorHash` attribute.
```nix
pet = buildGoModule rec {

View file

@ -177,7 +177,7 @@ exactly one version. Those versions need to satisfy all the version constraints
given in the `.cabal` file of your package and all its dependencies.
The [Haskell builder in nixpkgs](#haskell-mkderivation) does no such thing.
It will simply take as input packages with names off the desired dependencies
It will take as input packages with names off the desired dependencies
and just check whether they fulfill the version bounds and fail if they dont
(by default, see `jailbreak` to circumvent this).
@ -780,7 +780,7 @@ there instead.
The top level `pkgs.haskell-language-server` attribute is just a convenience
wrapper to make it possible to install HLS for multiple GHC versions at the
same time. If you know, that you only use one GHC version, e.g., in a project
specific `nix-shell` you can simply use
specific `nix-shell` you can use
`pkgs.haskellPackages.haskell-language-server` or
`pkgs.haskell.packages.*.haskell-language-server` from the package set you use.

View file

@ -13,7 +13,7 @@ If you find you are lacking inspiration for packing javascript applications, the
### Github {#javascript-finding-examples-github}
- Searching Nix files for `mkYarnPackage`: <https://github.com/search?q=mkYarnPackage+language%3ANix&type=code>
- Searching just `flake.nix` files for `mkYarnPackage`: <https://github.com/search?q=mkYarnPackage+filename%3Aflake.nix&type=code>
- Searching just `flake.nix` files for `mkYarnPackage`: <https://github.com/search?q=mkYarnPackage+path%3A**%2Fflake.nix&type=code>
### Gitlab {#javascript-finding-examples-gitlab}
@ -209,6 +209,8 @@ In the default `installPhase` set by `buildNpmPackage`, it uses `npm pack --json
* `npmPackFlags`: Flags to pass to `npm pack`.
* `npmPruneFlags`: Flags to pass to `npm prune`. Defaults to the value of `npmInstallFlags`.
* `makeWrapperArgs`: Flags to pass to `makeWrapper`, added to executable calling the generated `.js` with `node` as an interpreter. These scripts are defined in `package.json`.
* `nodejs`: The `nodejs` package to build against, using the corresponding `npm` shipped with that version of `node`. Defaults to `pkgs.nodejs`.
* `npmDeps`: The dependencies used to build the npm package. Especially useful to not have to recompute workspace depedencies.
#### prefetch-npm-deps {#javascript-buildNpmPackage-prefetch-npm-deps}

View file

@ -66,7 +66,7 @@ buildPhase = ''
To save some work of writing Nix expressions, there is a script that imports all
the packages distributed by Quicklisp into `imported.nix`. This works by parsing
its `releases.txt` and `systems.txt` files, which are published every couple of
months on [quicklisp.org](http://beta.quicklisp.org/dist/quicklisp.txt).
months on [quicklisp.org](https://beta.quicklisp.org/dist/quicklisp.txt).
The import process is implemented in the `import` directory as Common Lisp
code in the `org.lispbuilds.nix` ASDF system. To run the script, one can
@ -268,7 +268,7 @@ getting an environment variable for `ext:getenv`. This will load the
### Loading systems {#lisp-loading-systems}
There, you can simply use `asdf:load-system`. This works by setting the right
There, you can use `asdf:load-system`. This works by setting the right
values for the `CL_SOURCE_REGISTRY`/`ASDF_OUTPUT_TRANSLATIONS` environment
variables, so that systems are found in the Nix store and pre-compiled FASLs are
loaded.

View file

@ -134,11 +134,11 @@ The site proposes two types of packages, the `rockspec` and the `src.rock`
Luarocks-based packages are generated in [pkgs/development/lua-modules/generated-packages.nix](https://github.com/NixOS/nixpkgs/tree/master/pkgs/development/lua-modules/generated-packages.nix) from
the whitelist maintainers/scripts/luarocks-packages.csv and updated by running
the script
[maintainers/scripts/update-luarocks-packages](https://github.com/NixOS/nixpkgs/tree/master/maintainers/scripts/update-luarocks-packages):
the package `luarocks-packages-updater`:
```sh
./maintainers/scripts/update-luarocks-packages update
nix-shell -p luarocks-packages-updater --run luarocks-packages-updater
```
[luarocks2nix](https://github.com/nix-community/luarocks) is a tool capable of generating nix derivations from both rockspec and src.rock (and favors the src.rock).

View file

@ -53,7 +53,7 @@ After setting `maven.buildMavenPackage`, we then do standard Java `.jar` install
Maven defines default versions for its core plugins, e.g. `maven-compiler-plugin`. If your project does not override these versions, an upgrade of Maven will change the version of the used plugins, and therefore the derivation and hash.
When `maven` is upgraded, `mvnHash` for the derivation must be updated as well: otherwise, the project will simply be built on the derivation of old plugins, and fail because the requested plugins are missing.
When `maven` is upgraded, `mvnHash` for the derivation must be updated as well: otherwise, the project will be built on the derivation of old plugins, and fail because the requested plugins are missing.
This clearly prevents automatic upgrades of Maven: a manual effort must be made throughout nixpkgs by any maintainer wishing to push the upgrades.

View file

@ -58,7 +58,7 @@ php.withExtensions ({ enabled, all }:
++ [ all.imagick ])
```
To build your list of extensions from the ground up, you can simply
To build your list of extensions from the ground up, you can
ignore `enabled`:
```nix
@ -140,7 +140,7 @@ Example of building `composer` with additional extensions:
### Overriding PHP packages {#ssec-php-user-guide-overriding-packages}
`php-packages.nix` form a scope, allowing us to override the packages defined
within. For example, to apply a patch to a `mysqlnd` extension, you can simply
within. For example, to apply a patch to a `mysqlnd` extension, you can
pass an overlay-style function to `php`s `packageOverrides` argument:
```nix
@ -191,7 +191,7 @@ using the `bin` attribute in `composer.json`, these binaries will be
automatically linked and made accessible in the derivation. In this context,
"binaries" refer to PHP scripts that are intended to be executable.
To use the helper effectively, simply add the `vendorHash` attribute, which
To use the helper effectively, add the `vendorHash` attribute, which
enables the wrapper to handle the heavy lifting.
Internally, the helper operates in three stages:

View file

@ -9,9 +9,10 @@
| python27 | python2, python | CPython 2.7 |
| python38 | | CPython 3.8 |
| python39 | | CPython 3.9 |
| python310 | python3 | CPython 3.10 |
| python311 | | CPython 3.11 |
| python310 | | CPython 3.10 |
| python311 | python3 | CPython 3.11 |
| python312 | | CPython 3.12 |
| python313 | | CPython 3.13 |
| pypy27 | pypy2, pypy | PyPy2.7 |
| pypy39 | pypy3 | PyPy 3.9 |
@ -63,12 +64,14 @@ sets are
* `pkgs.python39Packages`
* `pkgs.python310Packages`
* `pkgs.python311Packages`
* `pkgs.python312Packages`
* `pkgs.python313Packages`
* `pkgs.pypyPackages`
and the aliases
* `pkgs.python2Packages` pointing to `pkgs.python27Packages`
* `pkgs.python3Packages` pointing to `pkgs.python310Packages`
* `pkgs.python3Packages` pointing to `pkgs.python311Packages`
* `pkgs.pythonPackages` pointing to `pkgs.python2Packages`
#### `buildPythonPackage` function {#buildpythonpackage-function}
@ -141,7 +144,7 @@ buildPythonPackage rec {
The `buildPythonPackage` mainly does four things:
* In the [`buildPhase`](#build-phase), it calls `${python.pythonForBuild.interpreter} setup.py bdist_wheel` to
* In the [`buildPhase`](#build-phase), it calls `${python.pythonOnBuildForHost.interpreter} setup.py bdist_wheel` to
build a wheel binary zipfile.
* In the [`installPhase`](#ssec-install-phase), it installs the wheel file using `pip install *.whl`.
* In the [`postFixup`](#var-stdenv-postFixup) phase, the `wrapPythonPrograms` bash function is called to
@ -261,7 +264,7 @@ python3MyBlas = pkgs.python3.override {
```
This is particularly useful for numpy and scipy users who want to gain speed with other blas implementations.
Note that using simply `scipy = super.scipy.override { blas = super.pkgs.mkl; };` will likely result in
Note that using `scipy = super.scipy.override { blas = super.pkgs.mkl; };` will likely result in
compilation issues, because scipy dependencies need to use the same blas implementation as well.
#### `buildPythonApplication` function {#buildpythonapplication-function}
@ -277,16 +280,16 @@ the packages with the version of the interpreter. Because this is irrelevant for
applications, the prefix is omitted.
When packaging a Python application with [`buildPythonApplication`](#buildpythonapplication-function), it should be
called with `callPackage` and passed `python` or `pythonPackages` (possibly
called with `callPackage` and passed `python3` or `python3Packages` (possibly
specifying an interpreter version), like this:
```nix
{ lib
, python3
, python3Packages
, fetchPypi
}:
python3.pkgs.buildPythonApplication rec {
python3Packages.buildPythonApplication rec {
pname = "luigi";
version = "2.7.9";
pyproject = true;
@ -297,13 +300,13 @@ python3.pkgs.buildPythonApplication rec {
};
nativeBuildInputs = [
python3.pkgs.setuptools
python3.pkgs.wheel
python3Packages.setuptools
python3Packages.wheel
];
propagatedBuildInputs = with python3.pkgs; [
tornado
python-daemon
propagatedBuildInputs = [
python3Packages.tornado
python3Packages.python-daemon
];
meta = with lib; {
@ -319,7 +322,7 @@ luigi = callPackage ../applications/networking/cluster/luigi { };
```
Since the package is an application, a consumer doesn't need to care about
Python versions or modules, which is why they don't go in `pythonPackages`.
Python versions or modules, which is why they don't go in `python3Packages`.
#### `toPythonApplication` function {#topythonapplication-function}
@ -335,7 +338,7 @@ the attribute in `python-packages.nix`, and the `toPythonApplication` shall be
applied to the reference:
```nix
youtube-dl = with pythonPackages; toPythonApplication youtube-dl;
youtube-dl = with python3Packages; toPythonApplication youtube-dl;
```
#### `toPythonModule` function {#topythonmodule-function}
@ -364,8 +367,8 @@ Saving the following as `default.nix`
```nix
with import <nixpkgs> {};
python.buildEnv.override {
extraLibs = [ pythonPackages.pyramid ];
python3.buildEnv.override {
extraLibs = [ python3Packages.pyramid ];
ignoreCollisions = true;
}
```
@ -430,7 +433,7 @@ python3.withPackages (ps: [ ps.pyramid ])
Now, `ps` is set to `python3Packages`, matching the version of the interpreter.
As [`python.withPackages`](#python.withpackages-function) simply uses [`python.buildEnv`](#python.buildenv-function) under the hood, it also
As [`python.withPackages`](#python.withpackages-function) uses [`python.buildEnv`](#python.buildenv-function) under the hood, it also
supports the `env` attribute. The `shell.nix` file from the previous section can
thus be also written like this:
@ -495,9 +498,9 @@ Given a `default.nix`:
```nix
with import <nixpkgs> {};
pythonPackages.buildPythonPackage {
python3Packages.buildPythonPackage {
name = "myproject";
buildInputs = with pythonPackages; [ pyramid ];
buildInputs = with python3Packages; [ pyramid ];
src = ./.;
}
@ -509,7 +512,7 @@ the package would be built with `nix-build`.
Shortcut to setup environments with C headers/libraries and Python packages:
```shell
nix-shell -p pythonPackages.pyramid zlib libjpeg git
nix-shell -p python3Packages.pyramid zlib libjpeg git
```
::: {.note}
@ -524,7 +527,7 @@ There is a boolean value `lib.inNixShell` set to `true` if nix-shell is invoked.
Several versions of the Python interpreter are available on Nix, as well as a
high amount of packages. The attribute `python3` refers to the default
interpreter, which is currently CPython 3.10. The attribute `python` refers to
interpreter, which is currently CPython 3.11. The attribute `python` refers to
CPython 2.7 for backwards-compatibility. It is also possible to refer to
specific versions, e.g. `python311` refers to CPython 3.11, and `pypy` refers to
the default PyPy interpreter.
@ -542,7 +545,7 @@ however, are in separate sets, with one set per interpreter version.
The interpreters have several common attributes. One of these attributes is
`pkgs`, which is a package set of Python libraries for this specific
interpreter. E.g., the `toolz` package corresponding to the default interpreter
is `python.pkgs.toolz`, and the CPython 3.11 version is `python311.pkgs.toolz`.
is `python3.pkgs.toolz`, and the CPython 3.11 version is `python311.pkgs.toolz`.
The main package set contains aliases to these package sets, e.g.
`pythonPackages` refers to `python.pkgs` and `python311Packages` to
`python311.pkgs`.
@ -679,7 +682,7 @@ b = np.array([3,4])
print(f"The dot product of {a} and {b} is: {np.dot(a, b)}")
```
Then we simply execute it, without requiring any environment setup at all!
Then we execute it, without requiring any environment setup at all!
```sh
$ ./foo.py
@ -1681,7 +1684,7 @@ of such package using the feature is `pkgs/tools/X11/xpra/default.nix`.
As workaround install it as an extra `preInstall` step:
```shell
${python.pythonForBuild.interpreter} setup.py install_data --install-dir=$out --root=$out
${python.pythonOnBuildForHost.interpreter} setup.py install_data --install-dir=$out --root=$out
sed -i '/ = data\_files/d' setup.py
```
@ -1710,7 +1713,7 @@ This is an example of a `default.nix` for a `nix-shell`, which allows to consume
a virtual environment created by `venv`, and install Python modules through
`pip` the traditional way.
Create this `default.nix` file, together with a `requirements.txt` and simply
Create this `default.nix` file, together with a `requirements.txt` and
execute `nix-shell`.
```nix
@ -1834,7 +1837,7 @@ If you need to change a package's attribute(s) from `configuration.nix` you coul
};
```
`pythonPackages.twisted` is now globally overridden.
`python3Packages.twisted` is now globally overridden.
All packages and also all NixOS services that reference `twisted`
(such as `services.buildbot-worker`) now use the new definition.
Note that `python-super` refers to the old package set and `python-self`
@ -1844,7 +1847,7 @@ To modify only a Python package set instead of a whole Python derivation, use
this snippet:
```nix
myPythonPackages = pythonPackages.override {
myPythonPackages = python3Packages.override {
overrides = self: super: {
twisted = ...;
};
@ -2024,7 +2027,9 @@ The following rules are desired to be respected:
disabled individually. Try to avoid disabling the tests altogether. In any
case, when you disable tests, leave a comment explaining why.
* Commit names of Python libraries should reflect that they are Python
libraries, so write for example `pythonPackages.numpy: 1.11 -> 1.12`.
libraries, so write for example `python311Packages.numpy: 1.11 -> 1.12`.
It is highly recommended to specify the current default version to enable
automatic build by ofborg.
* Attribute names in `python-packages.nix` as well as `pname`s should match the
library's name on PyPI, but be normalized according to [PEP
0503](https://www.python.org/dev/peps/pep-0503/#normalized-names). This means

View file

@ -94,7 +94,7 @@ $ bundle lock
$ bundix
```
If you already have a `Gemfile.lock`, you can simply run `bundix` and it will work the same.
If you already have a `Gemfile.lock`, you can run `bundix` and it will work the same.
To update the gems in your `Gemfile.lock`, you may use the `bundix -l` flag, which will create a new `Gemfile.lock` in case the `Gemfile` has a more recent time of modification.
@ -251,7 +251,7 @@ source 'https://rubygems.org' do
end
```
If you want to package a specific version, you can use the standard Gemfile syntax for that, e.g. `gem 'mdl', '0.5.0'`, but if you want the latest stable version anyway, it's easier to update by simply running the `bundle lock` and `bundix` steps again.
If you want to package a specific version, you can use the standard Gemfile syntax for that, e.g. `gem 'mdl', '0.5.0'`, but if you want the latest stable version anyway, it's easier to update by running the `bundle lock` and `bundix` steps again.
Now you can also make a `default.nix` that looks like this:

View file

@ -939,3 +939,68 @@ Fenix also has examples with `buildRustPackage`,
[crane](https://github.com/ipetkov/crane),
[naersk](https://github.com/nix-community/naersk),
and cross compilation in its [Examples](https://github.com/nix-community/fenix#examples) section.
## Using `git bisect` on the Rust compiler {#using-git-bisect-on-the-rust-compiler}
Sometimes an upgrade of the Rust compiler (`rustc`) will break a
downstream package. In these situations, being able to `git bisect`
the `rustc` version history to find the offending commit is quite
useful. Nixpkgs makes it easy to do this.
First, roll back your nixpkgs to a commit in which its `rustc` used
*the most recent one which doesn't have the problem.* You'll need
to do this because of `rustc`'s extremely aggressive
version-pinning.
Next, add the following overlay, updating the Rust version to the
one in your rolled-back nixpkgs, and replacing `/git/scratch/rust`
with the path into which you have `git clone`d the `rustc` git
repository:
```nix
(final: prev: /*lib.optionalAttrs prev.stdenv.targetPlatform.isAarch64*/ {
rust_1_72 =
lib.updateManyAttrsByPath [{
path = [ "packages" "stable" ];
update = old: old.overrideScope(final: prev: {
rustc = prev.rustc.overrideAttrs (_: {
src = lib.cleanSource /git/scratch/rust;
# do *not* put passthru.isReleaseTarball=true here
});
});
}]
prev.rust_1_72;
})
```
If the problem you're troubleshooting only manifests when
cross-compiling you can uncomment the `lib.optionalAttrs` in the
example above, and replace `isAarch64` with the target that is
having problems. This will speed up your bisect quite a bit, since
the host compiler won't need to be rebuilt.
Now, you can start a `git bisect` in the directory where you checked
out the `rustc` source code. It is recommended to select the
endpoint commits by searching backwards from `origin/master` for the
*commits which added the release notes for the versions in
question.* If you set the endpoints to commits on the release
branches (i.e. the release tags), git-bisect will often get confused
by the complex merge-commit structures it will need to traverse.
The command loop you'll want to use for bisecting looks like this:
```bash
git bisect {good,bad} # depending on result of last build
git submodule update --init
CARGO_NET_OFFLINE=false cargo vendor \
--sync ./src/tools/cargo/Cargo.toml \
--sync ./src/tools/rust-analyzer/Cargo.toml \
--sync ./compiler/rustc_codegen_cranelift/Cargo.toml \
--sync ./src/bootstrap/Cargo.toml
nix-build $NIXPKGS -A package-broken-by-rust-changes
```
The `git submodule update --init` and `cargo vendor` commands above
require network access, so they can't be performed from within the
`rustc` derivation, unfortunately.

View file

@ -32,7 +32,7 @@ look for the following directories:
(If not targeting macOS, replace `macosx` with the Xcode platform name.)
- On other platforms: `lib/swift/linux/x86_64`
(Where `linux` and `x86_64` are from lowercase `uname -sm`.)
- For convenience, Nixpkgs also adds simply `lib/swift` to the search path.
- For convenience, Nixpkgs also adds `lib/swift` to the search path.
This can save a bit of work packaging Swift modules, because many Nix builds
will produce output for just one target any way.
@ -123,7 +123,7 @@ swiftpmFlags = [ "--disable-dead-strip" ];
The default `buildPhase` already passes `-j` for parallel building.
If these two customization options are insufficient, simply provide your own
If these two customization options are insufficient, provide your own
`buildPhase` that invokes `swift build`.
### Running tests {#ssec-swiftpm-running-tests}

View file

@ -2,6 +2,46 @@
Since release 15.09 there is a new TeX Live packaging that lives entirely under attribute `texlive`.
## User's guide (experimental new interface) {#sec-language-texlive-user-guide-experimental}
Release 23.11 ships with a new interface that will eventually replace `texlive.combine`.
- For basic usage, use some of the prebuilt environments available at the top level, such as `texliveBasic`, `texliveSmall`. For the full list of prebuilt environments, inspect `texlive.schemes`.
- Packages cannot be used directly but must be assembled in an environment. To create or add packages to an environment, use
```nix
texliveSmall.withPackages (ps: with ps; [ collection-langkorean algorithms cm-super ])
```
The function `withPackages` can be called multiple times to add more packages.
- **Note.** Within Nixpkgs, packages should only use prebuilt environments as inputs, such as `texliveSmall` or `texliveInfraOnly`, and should not depend directly on `texlive`. Further dependencies should be added by calling `withPackages`. This is to ensure that there is a consistent and simple way to override the inputs.
- `texlive.withPackages` uses the same logic as `buildEnv`. Only parts of a package are installed in an environment: its 'runtime' files (`tex` output), binaries (`out` output), and support files (`tlpkg` output). Moreover, man and info pages are assembled into separate `man` and `info` outputs. To add only the TeX files of a package, or its documentation (`texdoc` output), just specify the outputs:
```nix
texlive.withPackages (ps: with ps; [
texdoc # recommended package to navigate the documentation
perlPackages.LaTeXML.tex # tex files of LaTeXML, omit binaries
cm-super
cm-super.texdoc # documentation of cm-super
])
```
- All packages distributed by TeX Live, which contains most of CTAN, are available and can be found under `texlive.pkgs`:
```ShellSession
$ nix repl
nix-repl> :l <nixpkgs>
nix-repl> texlive.pkgs.[TAB]
```
Note that the packages in `texlive.pkgs` are only provided for search purposes and must not be used directly.
- **Experimental and subject to change without notice:** to add the documentation for all packages in the environment, use
```nix
texliveSmall.__overrideTeXConfig { withDocs = true; }
```
This can be applied before or after calling `withPackages`.
The function currently support the parameters `withDocs`, `withSources`, and `requireTeXPackages`.
## User's guide {#sec-language-texlive-user-guide}
- For basic usage just pull `texlive.combined.scheme-basic` for an environment with basic LaTeX support.
@ -38,6 +78,24 @@ Since release 15.09 there is a new TeX Live packaging that lives entirely under
- Note that the wrapper assumes that the result has a chance to be useful. For example, the core executables should be present, as well as some core data files. The supported way of ensuring this is by including some scheme, for example `scheme-basic`, into the combination.
- TeX Live packages are also available under `texlive.pkgs` as derivations with outputs `out`, `tex`, `texdoc`, `texsource`, `tlpkg`, `man`, `info`. They cannot be installed outside of `texlive.combine` but are available for other uses. To repackage a font, for instance, use
```nix
stdenvNoCC.mkDerivation rec {
src = texlive.pkgs.iwona;
inherit (src) pname version;
installPhase = ''
runHook preInstall
install -Dm644 fonts/opentype/nowacki/iwona/*.otf -t $out/share/fonts/opentype
runHook postInstall
'';
}
```
See `biber`, `iwona` for complete examples.
## Custom packages {#sec-language-texlive-custom-packages}
You may find that you need to use an external TeX package. A derivation for such package has to provide the contents of the "texmf" directory in its output and provide the appropriate `tlType` attribute (one of `"run"`, `"bin"`, `"doc"`, `"source"`). Dependencies on other TeX packages can be listed in the attribute `tlDeps`.

View file

@ -9,7 +9,7 @@ preface.chapter.md
using-nixpkgs.md
lib.md
stdenv.md
builders.md
build-helpers.md
development.md
contributing.md
```

View file

@ -1,10 +1,10 @@
# darwin.linux-builder {#sec-darwin-builder}
`darwin.linux-builder` provides a way to bootstrap a Linux builder on a macOS machine.
`darwin.linux-builder` provides a way to bootstrap a Linux remote builder on a macOS machine.
This requires macOS version 12.4 or later.
The builder runs on host port 31022 by default.
The remote builder runs on host port 31022 by default.
You can change it by overriding `virtualisation.darwin-builder.hostPort`.
See the [example](#sec-darwin-builder-example-flake).
@ -15,7 +15,7 @@ words, your `/etc/nix/nix.conf` should have something like:
extra-trusted-users = <your username goes here>
```
To launch the builder, run the following flake:
To launch the remote builder, run the following flake:
```ShellSession
$ nix run nixpkgs#darwin.linux-builder
@ -57,7 +57,7 @@ builders = ssh-ng://builder@linux-builder ${ARCH}-linux /etc/nix/builder_ed25519
builders-use-substitutes = true
```
To allow Nix to connect to a builder not running on port 22, you will also need to create a new file at `/etc/ssh/ssh_config.d/100-linux-builder.conf`:
To allow Nix to connect to a remote builder not running on port 22, you will also need to create a new file at `/etc/ssh/ssh_config.d/100-linux-builder.conf`:
```
Host linux-builder
@ -130,11 +130,11 @@ $ sudo launchctl kickstart -k system/org.nixos.nix-daemon
}
```
## Reconfiguring the builder {#sec-darwin-builder-reconfiguring}
## Reconfiguring the remote builder {#sec-darwin-builder-reconfiguring}
Initially you should not change the builder configuration else you will not be
able to use the binary cache. However, after you have the builder running locally
you may use it to build a modified builder with additional storage or memory.
Initially you should not change the remote builder configuration else you will not be
able to use the binary cache. However, after you have the remote builder running locally
you may use it to build a modified remote builder with additional storage or memory.
To do this, you just need to set the `virtualisation.darwin-builder.*` parameters as
in the example below and rebuild.

View file

@ -4,6 +4,7 @@ This chapter contains information about how to use and maintain the Nix expressi
```{=include=} sections
citrix.section.md
darwin-builder.section.md
dlib.section.md
eclipse.section.md
elm.section.md

View file

@ -11,7 +11,7 @@ Nix problems and constraints:
- The `steam.sh` script in `$HOME` cannot be patched, as it is checked and rewritten by steam.
- The steam binary cannot be patched, it's also checked.
The current approach to deploy Steam in NixOS is composing a FHS-compatible chroot environment, as documented [here](http://sandervanderburg.blogspot.nl/2013/09/composing-fhs-compatible-chroot.html). This allows us to have binaries in the expected paths without disrupting the system, and to avoid patching them to work in a non FHS environment.
The current approach to deploy Steam in NixOS is composing a FHS-compatible chroot environment, as documented [here](https://sandervanderburg.blogspot.com/2013/09/composing-fhs-compatible-chroot.html). This allows us to have binaries in the expected paths without disrupting the system, and to avoid patching them to work in a non FHS environment.
## How to play {#sec-steam-play}

View file

@ -34,7 +34,7 @@ $ nix repl
map (p: p.name) pkgs.rxvt-unicode.plugins
```
Alternatively, if your shell is bash or zsh and have completion enabled, simply type `nixpkgs.rxvt-unicode.plugins.<tab>`.
Alternatively, if your shell is bash or zsh and have completion enabled, type `nixpkgs.rxvt-unicode.plugins.<tab>`.
In addition to `plugins` the options `extraDeps` and `perlDeps` can be used to install extra packages. `extraDeps` can be used, for example, to provide `xsel` (a clipboard manager) to the clipboard plugin, without installing it globally:

View file

@ -101,25 +101,62 @@ genericBuild
### Building a `stdenv` package in `nix-shell` {#sec-building-stdenv-package-in-nix-shell}
To build a `stdenv` package in a [`nix-shell`](https://nixos.org/manual/nix/unstable/command-ref/nix-shell.html), use
To build a `stdenv` package in a [`nix-shell`](https://nixos.org/manual/nix/unstable/command-ref/nix-shell.html), enter a shell, find the [phases](#sec-stdenv-phases) you wish to build, then invoke `genericBuild` manually:
Go to an empty directory, invoke `nix-shell` with the desired package, and from inside the shell, set the output variables to a writable directory:
```bash
cd "$(mktemp -d)"
nix-shell '<nixpkgs>' -A some_package
eval "${unpackPhase:-unpackPhase}"
cd $sourceRoot
eval "${patchPhase:-patchPhase}"
eval "${configurePhase:-configurePhase}"
eval "${buildPhase:-buildPhase}"
export out=$(pwd)/out
```
Next, invoke the desired parts of the build.
First, run the phases that generate a working copy of the sources, which will change directory to the sources for you:
```bash
phases="${prePhases[*]:-} unpackPhase patchPhase" genericBuild
```
Then, run more phases up until the failure is reached.
For example, if the failure is in the build phase, the following phases would be required:
```bash
phases="${preConfigurePhases[*]:-} configurePhase ${preBuildPhases[*]:-} buildPhase" genericBuild
```
Re-run a single phase as many times as necessary to examine the failure like so:
```bash
phases="buildPhase" genericBuild
```
To modify a [phase](#sec-stdenv-phases), first print it with
```bash
echo "$buildPhase"
```
Or, if that is empty, for instance, if it is using a function:
```bash
type buildPhase
```
then change it in a text editor, and paste it back to the terminal.
::: {.note}
This method may have some inconsistencies in environment variables and behaviour compared to a normal build within the [Nix build sandbox](https://nixos.org/manual/nix/unstable/language/derivations#builder-execution).
The following is a non-exhaustive list of such differences:
- `TMP`, `TMPDIR`, and similar variables likely point to non-empty directories that the build might conflict with files in.
- Output store paths are not writable, so the variables for outputs need to be overridden to writable paths.
- Other environment variables may be inconsistent with a `nix-build` either due to `nix-shell`'s initialization script or due to the use of `nix-shell` without the `--pure` option.
If the build fails differently inside the shell than in the sandbox, consider using [`breakpointHook`](#breakpointhook) and invoking `nix-build` instead.
The [`--keep-failed`](https://nixos.org/manual/nix/unstable/command-ref/conf-file#opt--keep-failed) option for `nix-build` may also be useful to examine the build directory of a failed build.
:::
## Tools provided by `stdenv` {#sec-tools-of-stdenv}
The standard environment provides the following packages:
@ -282,7 +319,7 @@ let f(h, h + 1, i) = i + (if i <= 0 then h else h)
let f(h, h + 1, i) = i + h
```
This is where “sum-like” comes in from above: We can just sum all of the host offsets to get the host offset of the transitive dependency. The target offset is the transitive dependency is simply the host offset + 1, just as it was with the dependencies composed to make this transitive one; it can be ignored as it doesnt add any new information.
This is where “sum-like” comes in from above: We can just sum all of the host offsets to get the host offset of the transitive dependency. The target offset is the transitive dependency is the host offset + 1, just as it was with the dependencies composed to make this transitive one; it can be ignored as it doesnt add any new information.
Because of the bounds checks, the uncommon cases are `h = t` and `h + 2 = t`. In the former case, the motivation for `mapOffset` is that since its host and target platforms are the same, no transitive dependency of it should be able to “discover” an offset greater than its reduced target offsets. `mapOffset` effectively “squashes” all its transitive dependencies offsets so that none will ever be greater than the target offset of the original `h = t` package. In the other case, `h + 1` is skipped over between the host and target offsets. Instead of squashing the offsets, we need to “rip” them apart so no transitive dependencies offset is that one.
@ -491,7 +528,7 @@ If the returned array contains exactly one object (e.g. `[{}]`), all values are
```
:::
### Recursive attributes in `mkDerivation` {#mkderivation-recursive-attributes}
### Fixed-point arguments of `mkDerivation` {#mkderivation-recursive-attributes}
If you pass a function to `mkDerivation`, it will receive as its argument the final arguments, including the overrides when reinvoked via `overrideAttrs`. For example:
@ -612,7 +649,7 @@ Zip files are unpacked using `unzip`. However, `unzip` is not in the standard en
#### Directories in the Nix store {#directories-in-the-nix-store}
These are simply copied to the current directory. The hash part of the file name is stripped, e.g. `/nix/store/1wydxgby13cz...-my-sources` would be copied to `my-sources`.
These are copied to the current directory. The hash part of the file name is stripped, e.g. `/nix/store/1wydxgby13cz...-my-sources` would be copied to `my-sources`.
Additional file types can be supported by setting the `unpackCmd` variable (see below).
@ -751,7 +788,7 @@ Hook executed at the end of the configure phase.
### The build phase {#build-phase}
The build phase is responsible for actually building the package (e.g. compiling it). The default `buildPhase` simply calls `make` if a file named `Makefile`, `makefile` or `GNUmakefile` exists in the current directory (or the `makefile` is explicitly set); otherwise it does nothing.
The build phase is responsible for actually building the package (e.g. compiling it). The default `buildPhase` calls `make` if a file named `Makefile`, `makefile` or `GNUmakefile` exists in the current directory (or the `makefile` is explicitly set); otherwise it does nothing.
#### Variables controlling the build phase {#variables-controlling-the-build-phase}
@ -1280,7 +1317,7 @@ Nix itself considers a build-time dependency as merely something that should pre
In order to alleviate this burden, the setup hook mechanism was written, where any package can include a shell script that \[by convention rather than enforcement by Nix\], any downstream reverse-dependency will source as part of its build process. That allows the downstream dependency to merely specify its dependencies, and lets those dependencies effectively initialize themselves. No boilerplate mirroring the list of dependencies is needed.
The setup hook mechanism is a bit of a sledgehammer though: a powerful feature with a broad and indiscriminate area of effect. The combination of its power and implicit use may be expedient, but isnt without costs. Nix itself is unchanged, but the spirit of added dependencies being effect-free is violated even if the latter isnt. For example, if a derivation path is mentioned more than once, Nix itself doesnt care and simply makes sure the dependency derivation is already built just the same—depending is just needing something to exist, and needing is idempotent. However, a dependency specified twice will have its setup hook run twice, and that could easily change the build environment (though a well-written setup hook will therefore strive to be idempotent so this is in fact not observable). More broadly, setup hooks are anti-modular in that multiple dependencies, whether the same or different, should not interfere and yet their setup hooks may well do so.
The setup hook mechanism is a bit of a sledgehammer though: a powerful feature with a broad and indiscriminate area of effect. The combination of its power and implicit use may be expedient, but isnt without costs. Nix itself is unchanged, but the spirit of added dependencies being effect-free is violated even if the latter isnt. For example, if a derivation path is mentioned more than once, Nix itself doesnt care and makes sure the dependency derivation is already built just the same—depending is just needing something to exist, and needing is idempotent. However, a dependency specified twice will have its setup hook run twice, and that could easily change the build environment (though a well-written setup hook will therefore strive to be idempotent so this is in fact not observable). More broadly, setup hooks are anti-modular in that multiple dependencies, whether the same or different, should not interfere and yet their setup hooks may well do so.
The most typical use of the setup hook is actually to add other hooks which are then run (i.e. after all the setup hooks) on each dependency. For example, the C compiler wrappers setup hook feeds itself flags for each dependency that contains relevant libraries and headers. This is done by defining a bash function, and appending its name to one of `envBuildBuildHooks`, `envBuildHostHooks`, `envBuildTargetHooks`, `envHostHostHooks`, `envHostTargetHooks`, or `envTargetTargetHooks`. These 6 bash variables correspond to the 6 sorts of dependencies by platform (theres 12 total but we ignore the propagated/non-propagated axis).

View file

@ -77,7 +77,7 @@ In Nixpkgs, we have multiple implementations of the BLAS/LAPACK numerical linear
The Nixpkgs attribute is `openblas` for ILP64 (integer width = 64 bits) and `openblasCompat` for LP64 (integer width = 32 bits). `openblasCompat` is the default.
- [LAPACK reference](http://www.netlib.org/lapack/) (also provides BLAS and CBLAS)
- [LAPACK reference](https://www.netlib.org/lapack/) (also provides BLAS and CBLAS)
The Nixpkgs attribute is `lapack-reference`.
@ -156,7 +156,7 @@ All programs that are built with [MPI](https://en.wikipedia.org/wiki/Message_Pas
- [MVAPICH](https://mvapich.cse.ohio-state.edu/), attribute name `mvapich`
To provide MPI enabled applications that use `MPICH`, instead of the default `Open MPI`, simply use the following overlay:
To provide MPI enabled applications that use `MPICH`, instead of the default `Open MPI`, use the following overlay:
```nix
self: super:

View file

@ -74,3 +74,23 @@ path/tests/prop.sh
# Run the lib.fileset tests
fileset/tests.sh
```
## Commit conventions
- Make sure you read about the [commit conventions](../CONTRIBUTING.md#commit-conventions) common to Nixpkgs as a whole.
- Format the commit messages in the following way:
```
lib.(section): (init | add additional argument | refactor | etc)
(Motivation for change. Additional information.)
```
Examples:
* lib.getExe': check arguments
* lib.fileset: Add an additional argument in the design docs
Closes #264537

View file

@ -50,4 +50,33 @@ rec {
lib.generators.toPretty {} xs}, but is: ${
lib.generators.toPretty {} val}";
/* Specialized `assertMsg` for checking if every one of `vals` is one of the elements
of the list `xs`. Useful for checking lists of supported attributes.
Example:
let sslLibraries = [ "libressl" "bearssl" ];
in assertEachOneOf "sslLibraries" sslLibraries [ "openssl" "bearssl" ]
stderr> error: each element in sslLibraries must be one of [
stderr> "openssl"
stderr> "bearssl"
stderr> ], but is: [
stderr> "libressl"
stderr> "bearssl"
stderr> ]
Type:
assertEachOneOf :: String -> List ComparableVal -> List ComparableVal -> Bool
*/
assertEachOneOf =
# The name of the variable the user entered `val` into, for inclusion in the error message
name:
# The list of values of what the user provided, to be compared against the values in `xs`
vals:
# The list of valid values
xs:
assertMsg
(lib.all (val: lib.elem val xs) vals)
"each element in ${name} must be one of ${
lib.generators.toPretty {} xs}, but is: ${
lib.generators.toPretty {} vals}";
}

View file

@ -13,16 +13,7 @@ rec {
scenarios (e.g. in ~/.config/nixpkgs/config.nix). For instance,
if you want to "patch" the derivation returned by a package
function in Nixpkgs to build another version than what the
function itself provides, you can do something like this:
mySed = overrideDerivation pkgs.gnused (oldAttrs: {
name = "sed-4.2.2-pre";
src = fetchurl {
url = ftp://alpha.gnu.org/gnu/sed/sed-4.2.2-pre.tar.bz2;
hash = "sha256-MxBJRcM2rYzQYwJ5XKxhXTQByvSg5jZc5cSHEZoB2IY=";
};
patches = [];
});
function itself provides.
For another application, see build-support/vm, where this
function is used to build arbitrary derivations inside a QEMU
@ -35,6 +26,19 @@ rec {
You should in general prefer `drv.overrideAttrs` over this function;
see the nixpkgs manual for more information on overriding.
Example:
mySed = overrideDerivation pkgs.gnused (oldAttrs: {
name = "sed-4.2.2-pre";
src = fetchurl {
url = ftp://alpha.gnu.org/gnu/sed/sed-4.2.2-pre.tar.bz2;
hash = "sha256-MxBJRcM2rYzQYwJ5XKxhXTQByvSg5jZc5cSHEZoB2IY=";
};
patches = [];
});
Type:
overrideDerivation :: Derivation -> ( Derivation -> AttrSet ) -> Derivation
*/
overrideDerivation = drv: f:
let
@ -55,6 +59,10 @@ rec {
injects `override` attribute which can be used to override arguments of
the function.
Please refer to documentation on [`<pkg>.overrideDerivation`](#sec-pkg-overrideDerivation) to learn about `overrideDerivation` and caveats
related to its use.
Example:
nix-repl> x = {a, b}: { result = a + b; }
nix-repl> y = lib.makeOverridable x { a = 1; b = 2; }
@ -65,23 +73,25 @@ rec {
nix-repl> y.override { a = 10; }
{ override = «lambda»; overrideDerivation = «lambda»; result = 12; }
Please refer to "Nixpkgs Contributors Guide" section
"<pkg>.overrideDerivation" to learn about `overrideDerivation` and caveats
related to its use.
Type:
makeOverridable :: (AttrSet -> a) -> AttrSet -> a
*/
makeOverridable = f: lib.setFunctionArgs
(origArgs: let
makeOverridable = f:
let
# Creates a functor with the same arguments as f
mirrorArgs = lib.mirrorFunctionArgs f;
in
mirrorArgs (origArgs:
let
result = f origArgs;
# Creates a functor with the same arguments as f
copyArgs = g: lib.setFunctionArgs g (lib.functionArgs f);
# Changes the original arguments with (potentially a function that returns) a set of new attributes
overrideWith = newArgs: origArgs // (if lib.isFunction newArgs then newArgs origArgs else newArgs);
# Re-call the function but with different arguments
overrideArgs = copyArgs (newArgs: makeOverridable f (overrideWith newArgs));
overrideArgs = mirrorArgs (newArgs: makeOverridable f (overrideWith newArgs));
# Change the result of the function call by applying g to it
overrideResult = g: makeOverridable (copyArgs (args: g (f args))) origArgs;
overrideResult = g: makeOverridable (mirrorArgs (args: g (f args))) origArgs;
in
if builtins.isAttrs result then
result // {
@ -95,8 +105,7 @@ rec {
lib.setFunctionArgs result (lib.functionArgs result) // {
override = overrideArgs;
}
else result)
(lib.functionArgs f);
else result);
/* Call the package function in the file `fn` with the required
@ -105,20 +114,29 @@ rec {
`autoArgs`. This function is intended to be partially
parameterised, e.g.,
```nix
callPackage = callPackageWith pkgs;
pkgs = {
libfoo = callPackage ./foo.nix { };
libbar = callPackage ./bar.nix { };
};
```
If the `libbar` function expects an argument named `libfoo`, it is
automatically passed as an argument. Overrides or missing
arguments can be supplied in `args`, e.g.
```nix
libbar = callPackage ./bar.nix {
libfoo = null;
enableX11 = true;
};
```
<!-- TODO: Apply "Example:" tag to the examples above -->
Type:
callPackageWith :: AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a
*/
callPackageWith = autoArgs: fn: args:
let
@ -129,7 +147,7 @@ rec {
# This includes automatic ones and ones passed explicitly
allArgs = builtins.intersectAttrs fargs autoArgs // args;
# A list of argument names that the function requires, but
# a list of argument names that the function requires, but
# wouldn't be passed to it
missingArgs = lib.attrNames
# Filter out arguments that have a default value
@ -176,7 +194,11 @@ rec {
/* Like callPackage, but for a function that returns an attribute
set of derivations. The override function is added to the
individual attributes. */
individual attributes.
Type:
callPackagesWith :: AttrSet -> ((AttrSet -> AttrSet) | Path) -> AttrSet -> AttrSet
*/
callPackagesWith = autoArgs: fn: args:
let
f = if lib.isFunction fn then fn else import fn;
@ -193,7 +215,11 @@ rec {
/* Add attributes to each output of a derivation without changing
the derivation itself and check a given condition when evaluating. */
the derivation itself and check a given condition when evaluating.
Type:
extendDerivation :: Bool -> Any -> Derivation -> Derivation
*/
extendDerivation = condition: passthru: drv:
let
outputs = drv.outputs or [ "out" ];
@ -227,7 +253,11 @@ rec {
/* Strip a derivation of all non-essential attributes, returning
only those needed by hydra-eval-jobs. Also strictly evaluate the
result to ensure that there are no thunks kept alive to prevent
garbage collection. */
garbage collection.
Type:
hydraJob :: (Derivation | Null) -> (Derivation | Null)
*/
hydraJob = drv:
let
outputs = drv.outputs or ["out"];
@ -265,7 +295,11 @@ rec {
called with the overridden packages. The package sets may be
hierarchical: the packages in the set are called with the scope
provided by `newScope` and the set provides a `newScope` attribute
which can form the parent scope for later package sets. */
which can form the parent scope for later package sets.
Type:
makeScope :: (AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a) -> (AttrSet -> AttrSet) -> AttrSet
*/
makeScope = newScope: f:
let self = f self // {
newScope = scope: newScope (self // scope);
@ -287,13 +321,48 @@ rec {
{ inherit otherSplices keep extra f; };
/* Like makeScope, but aims to support cross compilation. It's still ugly, but
hopefully it helps a little bit. */
hopefully it helps a little bit.
Type:
makeScopeWithSplicing' ::
{ splicePackages :: Splice -> AttrSet
, newScope :: AttrSet -> ((AttrSet -> a) | Path) -> AttrSet -> a
}
-> { otherSplices :: Splice, keep :: AttrSet -> AttrSet, extra :: AttrSet -> AttrSet }
-> AttrSet
Splice ::
{ pkgsBuildBuild :: AttrSet
, pkgsBuildHost :: AttrSet
, pkgsBuildTarget :: AttrSet
, pkgsHostHost :: AttrSet
, pkgsHostTarget :: AttrSet
, pkgsTargetTarget :: AttrSet
}
*/
makeScopeWithSplicing' =
{ splicePackages
, newScope
}:
{ otherSplices
# Attrs from `self` which won't be spliced.
# Avoid using keep, it's only used for a python hook workaround, added in PR #104201.
# ex: `keep = (self: { inherit (self) aAttr; })`
, keep ? (_self: {})
# Additional attrs to add to the sets `callPackage`.
# When the package is from a subset (but not a subset within a package IS #211340)
# within `spliced0` it will be spliced.
# When using an package outside the set but it's available from `pkgs`, use the package from `pkgs.__splicedPackages`.
# If the package is not available within the set or in `pkgs`, such as a package in a let binding, it will not be spliced
# ex:
# ```
# nix-repl> darwin.apple_sdk.frameworks.CoreFoundation
# «derivation ...CoreFoundation-11.0.0.drv»
# nix-repl> darwin.CoreFoundation
# error: attribute 'CoreFoundation' missing
# nix-repl> darwin.callPackage ({ CoreFoundation }: CoreFoundation) { }
# «derivation ...CoreFoundation-11.0.0.drv»
# ```
, extra ? (_spliced0: {})
, f
}:

View file

@ -74,7 +74,7 @@ let
importJSON importTOML warn warnIf warnIfNot throwIf throwIfNot checkListOfEnum
info showWarnings nixpkgsVersion version isInOldestRelease
mod compare splitByAndCompare
functionArgs setFunctionArgs isFunction toFunction
functionArgs setFunctionArgs isFunction toFunction mirrorFunctionArgs
toHexString toBaseDigits inPureEvalMode;
inherit (self.fixedPoints) fix fix' converge extends composeExtensions
composeManyExtensions makeExtensible makeExtensibleWithCustomName;
@ -92,7 +92,7 @@ let
concatMap flatten remove findSingle findFirst any all count
optional optionals toList range replicate partition zipListsWith zipLists
reverseList listDfs toposort sort naturalSort compareLists take
drop sublist last init crossLists unique intersectLists
drop sublist last init crossLists unique allUnique intersectLists
subtractLists mutuallyExclusive groupBy groupBy';
inherit (self.strings) concatStrings concatMapStrings concatImapStrings
intersperse concatStringsSep concatMapStringsSep

View file

@ -225,6 +225,9 @@ Arguments:
This use case makes little sense for files that are already in the store.
This should be a separate abstraction as e.g. `pkgs.drvLayout` instead, which could have a similar interface but be specific to derivations.
Additional capabilities could be supported that can't be done at evaluation time, such as renaming files, creating new directories, setting executable bits, etc.
- (+) An API for filtering/transforming Nix store paths could be much more powerful,
because it's not limited to just what is possible at evaluation time with `builtins.path`.
Operations such as moving and adding files would be supported.
### Single files
@ -235,11 +238,22 @@ Arguments:
And it would be unclear how the library should behave if the one file wouldn't be added to the store:
`toSource { root = ./file.nix; fileset = <empty>; }` has no reasonable result because returing an empty store path wouldn't match the file type, and there's no way to have an empty file store path, whatever that would mean.
### `fileFilter` takes a path
The `fileFilter` function takes a path, and not a file set, as its second argument.
- (-) Makes it harder to compose functions, since the file set type, the return value, can't be passed to the function itself like `fileFilter predicate fileset`
- (+) It's still possible to use `intersection` to filter on file sets: `intersection fileset (fileFilter predicate ./.)`
- (-) This does need an extra `./.` argument that's not obvious
- (+) This could always be `/.` or the project directory, `intersection` will make it lazy
- (+) In the future this will allow `fileFilter` to support a predicate property like `subpath` and/or `components` in a reproducible way.
This wouldn't be possible if it took a file set, because file sets don't have a predictable absolute path.
- (-) What about the base path?
- (+) That can change depending on which files are included, so if it's used for `fileFilter`
it would change the `subpath`/`components` value depending on which files are included.
- (+) If necessary, this restriction can be relaxed later, the opposite wouldn't be possible
## To update in the future
Here's a list of places in the library that need to be updated in the future:
- > The file set library is currently somewhat limited but is being expanded to include more functions over time.
in [the manual](../../doc/functions/fileset.section.md)
- If/Once a function to convert `lib.sources` values into file sets exists, the `_coerce` and `toSource` functions should be updated to mention that function in the error when such a value is passed
- If/Once a function exists that can optionally include a path depending on whether it exists, the error message for the path not existing in `_coerce` should mention the new function

View file

@ -3,19 +3,27 @@ let
inherit (import ./internal.nix { inherit lib; })
_coerce
_singleton
_coerceMany
_toSourceFilter
_fromSourceFilter
_unionMany
_fileFilter
_printFileset
_intersection
_difference
_mirrorStorePath
_fetchGitSubmodulesMinver
;
inherit (builtins)
isBool
isList
isPath
pathExists
seq
typeOf
nixVersion
;
inherit (lib.lists)
@ -30,6 +38,7 @@ let
inherit (lib.strings)
isStringLike
versionOlder
;
inherit (lib.filesystem)
@ -41,7 +50,9 @@ let
;
inherit (lib.trivial)
isFunction
pipe
inPureEvalMode
;
in {
@ -119,11 +130,10 @@ in {
Paths in [strings](https://nixos.org/manual/nix/stable/language/values.html#type-string), including Nix store paths, cannot be passed as `root`.
`root` has to be a directory.
<!-- Ignore the indentation here, this is a nixdoc rendering bug that needs to be fixed: https://github.com/nix-community/nixdoc/issues/75 -->
:::{.note}
Changing `root` only affects the directory structure of the resulting store path, it does not change which files are added to the store.
The only way to change which files get added to the store is by changing the `fileset` attribute.
:::
:::{.note}
Changing `root` only affects the directory structure of the resulting store path, it does not change which files are added to the store.
The only way to change which files get added to the store is by changing the `fileset` attribute.
:::
*/
root,
/*
@ -132,10 +142,9 @@ The only way to change which files get added to the store is by changing the `fi
This argument can also be a path,
which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
<!-- Ignore the indentation here, this is a nixdoc rendering bug that needs to be fixed: https://github.com/nix-community/nixdoc/issues/75 -->
:::{.note}
If a directory does not recursively contain any file, it is omitted from the store path contents.
:::
:::{.note}
If a directory does not recursively contain any file, it is omitted from the store path contents.
:::
*/
fileset,
@ -151,9 +160,14 @@ If a directory does not recursively contain any file, it is omitted from the sto
sourceFilter = _toSourceFilter fileset;
in
if ! isPath root then
if isStringLike root then
if root ? _isLibCleanSourceWith then
throw ''
lib.fileset.toSource: `root` ("${toString root}") is a string-like value, but it should be a path instead.
lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
Note that this only works for sources created from paths.''
else if isStringLike root then
throw ''
lib.fileset.toSource: `root` (${toString root}) is a string-like value, but it should be a path instead.
Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
else
throw ''
@ -162,13 +176,13 @@ If a directory does not recursively contain any file, it is omitted from the sto
# See also ../path/README.md
else if ! fileset._internalIsEmptyWithoutBase && rootFilesystemRoot != filesetFilesystemRoot then
throw ''
lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` ("${toString root}"):
`root`: root "${toString rootFilesystemRoot}"
`fileset`: root "${toString filesetFilesystemRoot}"
Different roots are not supported.''
lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` (${toString root}):
`root`: Filesystem root is "${toString rootFilesystemRoot}"
`fileset`: Filesystem root is "${toString filesetFilesystemRoot}"
Different filesystem roots are not supported.''
else if ! pathExists root then
throw ''
lib.fileset.toSource: `root` (${toString root}) does not exist.''
lib.fileset.toSource: `root` (${toString root}) is a path that does not exist.''
else if pathType root != "directory" then
throw ''
lib.fileset.toSource: `root` (${toString root}) is a file, but it should be a directory instead. Potential solutions:
@ -180,13 +194,82 @@ If a directory does not recursively contain any file, it is omitted from the sto
- Set `root` to ${toString fileset._internalBase} or any directory higher up. This changes the layout of the resulting store path.
- Set `fileset` to a file set that cannot contain files outside the `root` (${toString root}). This could change the files included in the result.''
else
builtins.seq sourceFilter
seq sourceFilter
cleanSourceWith {
name = "source";
src = root;
filter = sourceFilter;
};
/*
Create a file set with the same files as a `lib.sources`-based value.
This does not import any of the files into the store.
This can be used to gradually migrate from `lib.sources`-based filtering to `lib.fileset`.
A file set can be turned back into a source using [`toSource`](#function-library-lib.fileset.toSource).
:::{.note}
File sets cannot represent empty directories.
Turning the result of this function back into a source using `toSource` will therefore not preserve empty directories.
:::
Type:
fromSource :: SourceLike -> FileSet
Example:
# There's no cleanSource-like function for file sets yet,
# but we can just convert cleanSource to a file set and use it that way
toSource {
root = ./.;
fileset = fromSource (lib.sources.cleanSource ./.);
}
# Keeping a previous sourceByRegex (which could be migrated to `lib.fileset.unions`),
# but removing a subdirectory using file set functions
difference
(fromSource (lib.sources.sourceByRegex ./. [
"^README\.md$"
# This regex includes everything in ./doc
"^doc(/.*)?$"
])
./doc/generated
# Use cleanSource, but limit it to only include ./Makefile and files under ./src
intersection
(fromSource (lib.sources.cleanSource ./.))
(unions [
./Makefile
./src
]);
*/
fromSource = source:
let
# This function uses `._isLibCleanSourceWith`, `.origSrc` and `.filter`,
# which are technically internal to lib.sources,
# but we'll allow this since both libraries are in the same code base
# and this function is a bridge between them.
isFiltered = source ? _isLibCleanSourceWith;
path = if isFiltered then source.origSrc else source;
in
# We can only support sources created from paths
if ! isPath path then
if isStringLike path then
throw ''
lib.fileset.fromSource: The source origin of the argument is a string-like value ("${toString path}"), but it should be a path instead.
Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.''
else
throw ''
lib.fileset.fromSource: The source origin of the argument is of type ${typeOf path}, but it should be a path instead.''
else if ! pathExists path then
throw ''
lib.fileset.fromSource: The source origin (${toString path}) of the argument does not exist.''
else if isFiltered then
_fromSourceFilter path source.filter
else
# If there's no filter, no need to run the expensive conversion, all subpaths will be included
_singleton path;
/*
The file set containing all files that are in either of two given file sets.
This is the same as [`unions`](#function-library-lib.fileset.unions),
@ -220,11 +303,11 @@ If a directory does not recursively contain any file, it is omitted from the sto
_unionMany
(_coerceMany "lib.fileset.union" [
{
context = "first argument";
context = "First argument";
value = fileset1;
}
{
context = "second argument";
context = "Second argument";
value = fileset2;
}
]);
@ -266,18 +349,79 @@ If a directory does not recursively contain any file, it is omitted from the sto
# which get [implicitly coerced to file sets](#sec-fileset-path-coercion).
filesets:
if ! isList filesets then
throw "lib.fileset.unions: Expected argument to be a list, but got a ${typeOf filesets}."
throw ''
lib.fileset.unions: Argument is of type ${typeOf filesets}, but it should be a list instead.''
else
pipe filesets [
# Annotate the elements with context, used by _coerceMany for better errors
(imap0 (i: el: {
context = "element ${toString i}";
context = "Element ${toString i}";
value = el;
}))
(_coerceMany "lib.fileset.unions")
_unionMany
];
/*
Filter a file set to only contain files matching some predicate.
Type:
fileFilter ::
({
name :: String,
type :: String,
...
} -> Bool)
-> Path
-> FileSet
Example:
# Include all regular `default.nix` files in the current directory
fileFilter (file: file.name == "default.nix") ./.
# Include all non-Nix files from the current directory
fileFilter (file: ! hasSuffix ".nix" file.name) ./.
# Include all files that start with a "." in the current directory
fileFilter (file: hasPrefix "." file.name) ./.
# Include all regular files (not symlinks or others) in the current directory
fileFilter (file: file.type == "regular") ./.
*/
fileFilter =
/*
The predicate function to call on all files contained in given file set.
A file is included in the resulting file set if this function returns true for it.
This function is called with an attribute set containing these attributes:
- `name` (String): The name of the file
- `type` (String, one of `"regular"`, `"symlink"` or `"unknown"`): The type of the file.
This matches result of calling [`builtins.readFileType`](https://nixos.org/manual/nix/stable/language/builtins.html#builtins-readFileType) on the file's path.
Other attributes may be added in the future.
*/
predicate:
# The path whose files to filter
path:
if ! isFunction predicate then
throw ''
lib.fileset.fileFilter: First argument is of type ${typeOf predicate}, but it should be a function instead.''
else if ! isPath path then
if path._type or "" == "fileset" then
throw ''
lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
If you need to filter files in a file set, use `intersection fileset (fileFilter pred ./.)` instead.''
else
throw ''
lib.fileset.fileFilter: Second argument is of type ${typeOf path}, but it should be a path instead.''
else if ! pathExists path then
throw ''
lib.fileset.fileFilter: Second argument (${toString path}) is a path that does not exist.''
else
_fileFilter predicate path;
/*
The file set containing all files that are in both of two given file sets.
See also [Intersection (set theory)](https://en.wikipedia.org/wiki/Intersection_(set_theory)).
@ -304,11 +448,11 @@ If a directory does not recursively contain any file, it is omitted from the sto
let
filesets = _coerceMany "lib.fileset.intersection" [
{
context = "first argument";
context = "First argument";
value = fileset1;
}
{
context = "second argument";
context = "Second argument";
value = fileset2;
}
];
@ -317,6 +461,58 @@ If a directory does not recursively contain any file, it is omitted from the sto
(elemAt filesets 0)
(elemAt filesets 1);
/*
The file set containing all files from the first file set that are not in the second file set.
See also [Difference (set theory)](https://en.wikipedia.org/wiki/Complement_(set_theory)#Relative_complement).
The given file sets are evaluated as lazily as possible,
with the first argument being evaluated first if needed.
Type:
union :: FileSet -> FileSet -> FileSet
Example:
# Create a file set containing all files from the current directory,
# except ones under ./tests
difference ./. ./tests
let
# A set of Nix-related files
nixFiles = unions [ ./default.nix ./nix ./tests/default.nix ];
in
# Create a file set containing all files under ./tests, except ones in `nixFiles`,
# meaning only without ./tests/default.nix
difference ./tests nixFiles
*/
difference =
# The positive file set.
# The result can only contain files that are also in this file set.
#
# This argument can also be a path,
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
positive:
# The negative file set.
# The result will never contain files that are also in this file set.
#
# This argument can also be a path,
# which gets [implicitly coerced to a file set](#sec-fileset-path-coercion).
negative:
let
filesets = _coerceMany "lib.fileset.difference" [
{
context = "First argument (positive set)";
value = positive;
}
{
context = "Second argument (negative set)";
value = negative;
}
];
in
_difference
(elemAt filesets 0)
(elemAt filesets 1);
/*
Incrementally evaluate and trace a file set in a pretty way.
This function is only intended for debugging purposes.
@ -352,7 +548,7 @@ If a directory does not recursively contain any file, it is omitted from the sto
let
# "fileset" would be a better name, but that would clash with the argument name,
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
actualFileset = _coerce "lib.fileset.trace: argument" fileset;
actualFileset = _coerce "lib.fileset.trace: Argument" fileset;
in
seq
(_printFileset actualFileset)
@ -399,11 +595,118 @@ If a directory does not recursively contain any file, it is omitted from the sto
let
# "fileset" would be a better name, but that would clash with the argument name,
# and we cannot change that because of https://github.com/nix-community/nixdoc/issues/76
actualFileset = _coerce "lib.fileset.traceVal: argument" fileset;
actualFileset = _coerce "lib.fileset.traceVal: Argument" fileset;
in
seq
(_printFileset actualFileset)
# We could also return the original fileset argument here,
# but that would then duplicate work for consumers of the fileset, because then they have to coerce it again
actualFileset;
/*
Create a file set containing all [Git-tracked files](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository) in a repository.
This function behaves like [`gitTrackedWith { }`](#function-library-lib.fileset.gitTrackedWith) - using the defaults.
Type:
gitTracked :: Path -> FileSet
Example:
# Include all files tracked by the Git repository in the current directory
gitTracked ./.
# Include only files tracked by the Git repository in the parent directory
# that are also in the current directory
intersection ./. (gitTracked ../.)
*/
gitTracked =
/*
The [path](https://nixos.org/manual/nix/stable/language/values#type-path) to the working directory of a local Git repository.
This directory must contain a `.git` file or subdirectory.
*/
path:
# See the gitTrackedWith implementation for more explanatory comments
let
fetchResult = builtins.fetchGit path;
in
if inPureEvalMode then
throw "lib.fileset.gitTracked: This function is currently not supported in pure evaluation mode, since it currently relies on `builtins.fetchGit`. See https://github.com/NixOS/nix/issues/9292."
else if ! isPath path then
throw "lib.fileset.gitTracked: Expected the argument to be a path, but it's a ${typeOf path} instead."
else if ! pathExists (path + "/.git") then
throw "lib.fileset.gitTracked: Expected the argument (${toString path}) to point to a local working tree of a Git repository, but it's not."
else
_mirrorStorePath path fetchResult.outPath;
/*
Create a file set containing all [Git-tracked files](https://git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository) in a repository.
The first argument allows configuration with an attribute set,
while the second argument is the path to the Git working tree.
If you don't need the configuration,
you can use [`gitTracked`](#function-library-lib.fileset.gitTracked) instead.
This is equivalent to the result of [`unions`](#function-library-lib.fileset.unions) on all files returned by [`git ls-files`](https://git-scm.com/docs/git-ls-files)
(which uses [`--cached`](https://git-scm.com/docs/git-ls-files#Documentation/git-ls-files.txt--c) by default).
:::{.warning}
Currently this function is based on [`builtins.fetchGit`](https://nixos.org/manual/nix/stable/language/builtins.html#builtins-fetchGit)
As such, this function causes all Git-tracked files to be unnecessarily added to the Nix store,
without being re-usable by [`toSource`](#function-library-lib.fileset.toSource).
This may change in the future.
:::
Type:
gitTrackedWith :: { recurseSubmodules :: Bool ? false } -> Path -> FileSet
Example:
# Include all files tracked by the Git repository in the current directory
# and any submodules under it
gitTracked { recurseSubmodules = true; } ./.
*/
gitTrackedWith =
{
/*
(optional, default: `false`) Whether to recurse into [Git submodules](https://git-scm.com/book/en/v2/Git-Tools-Submodules) to also include their tracked files.
If `true`, this is equivalent to passing the [--recurse-submodules](https://git-scm.com/docs/git-ls-files#Documentation/git-ls-files.txt---recurse-submodules) flag to `git ls-files`.
*/
recurseSubmodules ? false,
}:
/*
The [path](https://nixos.org/manual/nix/stable/language/values#type-path) to the working directory of a local Git repository.
This directory must contain a `.git` file or subdirectory.
*/
path:
let
# This imports the files unnecessarily, which currently can't be avoided
# because `builtins.fetchGit` is the only function exposing which files are tracked by Git.
# With the [lazy trees PR](https://github.com/NixOS/nix/pull/6530),
# the unnecessarily import could be avoided.
# However a simpler alternative still would be [a builtins.gitLsFiles](https://github.com/NixOS/nix/issues/2944).
fetchResult = builtins.fetchGit {
url = path;
# This is the only `fetchGit` parameter that makes sense in this context.
# We can't just pass `submodules = recurseSubmodules` here because
# this would fail for Nix versions that don't support `submodules`.
${if recurseSubmodules then "submodules" else null} = true;
};
in
if inPureEvalMode then
throw "lib.fileset.gitTrackedWith: This function is currently not supported in pure evaluation mode, since it currently relies on `builtins.fetchGit`. See https://github.com/NixOS/nix/issues/9292."
else if ! isBool recurseSubmodules then
throw "lib.fileset.gitTrackedWith: Expected the attribute `recurseSubmodules` of the first argument to be a boolean, but it's a ${typeOf recurseSubmodules} instead."
else if recurseSubmodules && versionOlder nixVersion _fetchGitSubmodulesMinver then
throw "lib.fileset.gitTrackedWith: Setting the attribute `recurseSubmodules` to `true` is only supported for Nix version ${_fetchGitSubmodulesMinver} and after, but Nix version ${nixVersion} is used."
else if ! isPath path then
throw "lib.fileset.gitTrackedWith: Expected the second argument to be a path, but it's a ${typeOf path} instead."
# We can identify local working directories by checking for .git,
# see https://git-scm.com/docs/gitrepository-layout#_description.
# Note that `builtins.fetchGit` _does_ work for bare repositories (where there's no `.git`),
# even though `git ls-files` wouldn't return any files in that case.
else if ! pathExists (path + "/.git") then
throw "lib.fileset.gitTrackedWith: Expected the second argument (${toString path}) to point to a local working tree of a Git repository, but it's not."
else
_mirrorStorePath path fetchResult.outPath;
}

View file

@ -7,7 +7,6 @@ let
isString
pathExists
readDir
seq
split
trace
typeOf
@ -17,7 +16,6 @@ let
attrNames
attrValues
mapAttrs
setAttrByPath
zipAttrsWith
;
@ -28,7 +26,6 @@ let
inherit (lib.lists)
all
commonPrefix
drop
elemAt
filter
findFirst
@ -170,7 +167,12 @@ rec {
else
value
else if ! isPath value then
if isStringLike value then
if value ? _isLibCleanSourceWith then
throw ''
${context} is a `lib.sources`-based value, but it should be a file set or a path instead.
To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
Note that this only works for sources created from paths.''
else if isStringLike value then
throw ''
${context} ("${toString value}") is a string-like value, but it should be a file set or a path instead.
Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.''
@ -179,7 +181,7 @@ rec {
${context} is of type ${typeOf value}, but it should be a file set or a path instead.''
else if ! pathExists value then
throw ''
${context} (${toString value}) does not exist.''
${context} (${toString value}) is a path that does not exist.''
else
_singleton value;
@ -208,9 +210,9 @@ rec {
if firstWithBase != null && differentIndex != null then
throw ''
${functionContext}: Filesystem roots are not the same:
${(head list).context}: root "${toString firstBaseRoot}"
${(elemAt list differentIndex).context}: root "${toString (elemAt filesets differentIndex)._internalBaseRoot}"
Different roots are not supported.''
${(head list).context}: Filesystem root is "${toString firstBaseRoot}"
${(elemAt list differentIndex).context}: Filesystem root is "${toString (elemAt filesets differentIndex)._internalBaseRoot}"
Different filesystem roots are not supported.''
else
filesets;
@ -424,7 +426,7 @@ rec {
# Filter suited when there's some files
# This can't be used for when there's no files, because the base directory is always included
nonEmpty =
path: _:
path: type:
let
# Add a slash to the path string, turning "/foo" to "/foo/",
# making sure to not have any false prefix matches below.
@ -433,25 +435,37 @@ rec {
# meaning this function can never receive "/" as an argument
pathSlash = path + "/";
in
# Same as `hasPrefix pathSlash baseString`, but more efficient.
# With base /foo/bar we need to include /foo:
# hasPrefix "/foo/" "/foo/bar/"
if substring 0 (stringLength pathSlash) baseString == pathSlash then
true
# Same as `! hasPrefix baseString pathSlash`, but more efficient.
# With base /foo/bar we need to exclude /baz
# ! hasPrefix "/baz/" "/foo/bar/"
else if substring 0 baseLength pathSlash != baseString then
false
else
# Same as `removePrefix baseString path`, but more efficient.
# From the above code we know that hasPrefix baseString pathSlash holds, so this is safe.
# We don't use pathSlash here because we only needed the trailing slash for the prefix matching.
# With base /foo and path /foo/bar/baz this gives
# inTree (split "/" (removePrefix "/foo/" "/foo/bar/baz"))
# == inTree (split "/" "bar/baz")
# == inTree [ "bar" "baz" ]
inTree (split "/" (substring baseLength (-1) path));
(
# Same as `hasPrefix pathSlash baseString`, but more efficient.
# With base /foo/bar we need to include /foo:
# hasPrefix "/foo/" "/foo/bar/"
if substring 0 (stringLength pathSlash) baseString == pathSlash then
true
# Same as `! hasPrefix baseString pathSlash`, but more efficient.
# With base /foo/bar we need to exclude /baz
# ! hasPrefix "/baz/" "/foo/bar/"
else if substring 0 baseLength pathSlash != baseString then
false
else
# Same as `removePrefix baseString path`, but more efficient.
# From the above code we know that hasPrefix baseString pathSlash holds, so this is safe.
# We don't use pathSlash here because we only needed the trailing slash for the prefix matching.
# With base /foo and path /foo/bar/baz this gives
# inTree (split "/" (removePrefix "/foo/" "/foo/bar/baz"))
# == inTree (split "/" "bar/baz")
# == inTree [ "bar" "baz" ]
inTree (split "/" (substring baseLength (-1) path))
)
# This is a way have an additional check in case the above is true without any significant performance cost
&& (
# This relies on the fact that Nix only distinguishes path types "directory", "regular", "symlink" and "unknown",
# so everything except "unknown" is allowed, seems reasonable to rely on that
type != "unknown"
|| throw ''
lib.fileset.toSource: `fileset` contains a file that cannot be added to the store: ${path}
This file is neither a regular file nor a symlink, the only file types supported by the Nix store.
Therefore the file set cannot be added to the Nix store as is. Make sure to not include that file to avoid this error.''
);
in
# Special case because the code below assumes that the _internalBase is always included in the result
# which shouldn't be done when we have no files at all in the base
@ -461,6 +475,59 @@ rec {
else
nonEmpty;
# Turn a builtins.filterSource-based source filter on a root path into a file set
# containing only files included by the filter.
# The filter is lazily called as necessary to determine whether paths are included
# Type: Path -> (String -> String -> Bool) -> fileset
_fromSourceFilter = root: sourceFilter:
let
# During the recursion we need to track both:
# - The path value such that we can safely call `readDir` on it
# - The path string value such that we can correctly call the `filter` with it
#
# While we could just recurse with the path value,
# this would then require converting it to a path string for every path,
# which is a fairly expensive operation
# Create a file set from a directory entry
fromDirEntry = path: pathString: type:
# The filter needs to run on the path as a string
if ! sourceFilter pathString type then
null
else if type == "directory" then
fromDir path pathString
else
type;
# Create a file set from a directory
fromDir = path: pathString:
mapAttrs
# This looks a bit funny, but we need both the path-based and the path string-based values
(name: fromDirEntry (path + "/${name}") (pathString + "/${name}"))
# We need to readDir on the path value, because reading on a path string
# would be unspecified if there are multiple filesystem roots
(readDir path);
rootPathType = pathType root;
# We need to convert the path to a string to imitate what builtins.path calls the filter function with.
# We don't want to rely on `toString` for this though because it's not very well defined, see ../path/README.md
# So instead we use `lib.path.splitRoot` to safely deconstruct the path into its filesystem root and subpath
# We don't need the filesystem root though, builtins.path doesn't expose that in any way to the filter.
# So we only need the components, which we then turn into a string as one would expect.
rootString = "/" + concatStringsSep "/" (components (splitRoot root).subpath);
in
if rootPathType == "directory" then
# We imitate builtins.path not calling the filter on the root path
_create root (fromDir root rootString)
else
# Direct files are always included by builtins.path without calling the filter
# But we need to lift up the base path to its parent to satisfy the base path invariant
_create (dirOf root)
{
${baseNameOf root} = rootPathType;
};
# Transforms the filesetTree of a file set to a shorter base path, e.g.
# _shortenTreeBase [ "foo" ] (_create /foo/bar null)
# => { bar = null; }
@ -638,4 +705,147 @@ rec {
else
# In all other cases it's the rhs
rhs;
# Compute the set difference between two file sets.
# The filesets must already be coerced and validated to be in the same filesystem root.
# Type: Fileset -> Fileset -> Fileset
_difference = positive: negative:
let
# The common base components prefix, e.g.
# (/foo/bar, /foo/bar/baz) -> /foo/bar
# (/foo/bar, /foo/baz) -> /foo
commonBaseComponentsLength =
# TODO: Have a `lib.lists.commonPrefixLength` function such that we don't need the list allocation from commonPrefix here
length (
commonPrefix
positive._internalBaseComponents
negative._internalBaseComponents
);
# We need filesetTree's with the same base to be able to compute the difference between them
# This here is the filesetTree from the negative file set, but for a base path that matches the positive file set.
# Examples:
# For `difference /foo /foo/bar`, `negativeTreeWithPositiveBase = { bar = "directory"; }`
# because under the base path of `/foo`, only `bar` from the negative file set is included
# For `difference /foo/bar /foo`, `negativeTreeWithPositiveBase = "directory"`
# because under the base path of `/foo/bar`, everything from the negative file set is included
# For `difference /foo /bar`, `negativeTreeWithPositiveBase = null`
# because under the base path of `/foo`, nothing from the negative file set is included
negativeTreeWithPositiveBase =
if commonBaseComponentsLength == length positive._internalBaseComponents then
# The common prefix is the same as the positive base path, so the second path is equal or longer.
# We need to _shorten_ the negative filesetTree to the same base path as the positive one
# E.g. for `difference /foo /foo/bar` the common prefix is /foo, equal to the positive file set's base
# So we need to shorten the base of the tree for the negative argument from /foo/bar to just /foo
_shortenTreeBase positive._internalBaseComponents negative
else if commonBaseComponentsLength == length negative._internalBaseComponents then
# The common prefix is the same as the negative base path, so the first path is longer.
# We need to lengthen the negative filesetTree to the same base path as the positive one.
# E.g. for `difference /foo/bar /foo` the common prefix is /foo, equal to the negative file set's base
# So we need to lengthen the base of the tree for the negative argument from /foo to /foo/bar
_lengthenTreeBase positive._internalBaseComponents negative
else
# The common prefix is neither the first nor the second path.
# This means there's no overlap between the two file sets,
# and nothing from the negative argument should get removed from the positive one
# E.g for `difference /foo /bar`, we remove nothing to get the same as `/foo`
null;
resultingTree =
_differenceTree
positive._internalBase
positive._internalTree
negativeTreeWithPositiveBase;
in
# If the first file set is empty, we can never have any files in the result
if positive._internalIsEmptyWithoutBase then
_emptyWithoutBase
# If the second file set is empty, nothing gets removed, so the result is just the first file set
else if negative._internalIsEmptyWithoutBase then
positive
else
# We use the positive file set base for the result,
# because only files from the positive side may be included,
# which is what base path is for
_create positive._internalBase resultingTree;
# Computes the set difference of two filesetTree's
# Type: Path -> filesetTree -> filesetTree
_differenceTree = path: lhs: rhs:
# If the lhs doesn't have any files, or the right hand side includes all files
if lhs == null || isString rhs then
# The result will always be empty
null
# If the right hand side has no files
else if rhs == null then
# The result is always the left hand side, because nothing gets removed
lhs
else
# Otherwise we always have two attribute sets to recurse into
mapAttrs (name: lhsValue:
_differenceTree (path + "/${name}") lhsValue (rhs.${name} or null)
) (_directoryEntries path lhs);
# Filters all files in a path based on a predicate
# Type: ({ name, type, ... } -> Bool) -> Path -> FileSet
_fileFilter = predicate: root:
let
# Check the predicate for a single file
# Type: String -> String -> filesetTree
fromFile = name: type:
if
predicate {
inherit name type;
# To ensure forwards compatibility with more arguments being added in the future,
# adding an attribute which can't be deconstructed :)
"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you're using `{ name, file }:`, use `{ name, file, ... }:` instead." = null;
}
then
type
else
null;
# Check the predicate for all files in a directory
# Type: Path -> filesetTree
fromDir = path:
mapAttrs (name: type:
if type == "directory" then
fromDir (path + "/${name}")
else
fromFile name type
) (readDir path);
rootType = pathType root;
in
if rootType == "directory" then
_create root (fromDir root)
else
# Single files are turned into a directory containing that file or nothing.
_create (dirOf root) {
${baseNameOf root} =
fromFile (baseNameOf root) rootType;
};
# Support for `builtins.fetchGit` with `submodules = true` was introduced in 2.4
# https://github.com/NixOS/nix/commit/55cefd41d63368d4286568e2956afd535cb44018
_fetchGitSubmodulesMinver = "2.4";
# Mirrors the contents of a Nix store path relative to a local path as a file set.
# Some notes:
# - The store path is read at evaluation time.
# - The store path must not include files that don't exist in the respective local path.
#
# Type: Path -> String -> FileSet
_mirrorStorePath = localPath: storePath:
let
recurse = focusedStorePath:
mapAttrs (name: type:
if type == "directory" then
recurse (focusedStorePath + "/${name}")
else
type
) (builtins.readDir focusedStorePath);
in
_create localPath
(recurse storePath);
}

View file

@ -1,5 +1,7 @@
#!/usr/bin/env bash
# shellcheck disable=SC2016
# shellcheck disable=SC2317
# shellcheck disable=SC2192
# Tests lib.fileset
# Run:
@ -41,15 +43,29 @@ crudeUnquoteJSON() {
cut -d \" -f2
}
prefixExpression='let
lib = import <nixpkgs/lib>;
internal = import <nixpkgs/lib/fileset/internal.nix> {
inherit lib;
};
in
with lib;
with internal;
with lib.fileset;'
prefixExpression() {
echo 'let
lib =
(import <nixpkgs/lib>)
'
if [[ "${1:-}" == "--simulate-pure-eval" ]]; then
echo '
.extend (final: prev: {
trivial = prev.trivial // {
inPureEvalMode = true;
};
})'
fi
echo '
;
internal = import <nixpkgs/lib/fileset/internal.nix> {
inherit lib;
};
in
with lib;
with internal;
with lib.fileset;'
}
# Check that two nix expression successfully evaluate to the same value.
# The expressions have `lib.fileset` in scope.
@ -58,7 +74,7 @@ expectEqual() {
local actualExpr=$1
local expectedExpr=$2
if actualResult=$(nix-instantiate --eval --strict --show-trace 2>"$tmp"/actualStderr \
--expr "$prefixExpression ($actualExpr)"); then
--expr "$(prefixExpression) ($actualExpr)"); then
actualExitCode=$?
else
actualExitCode=$?
@ -66,7 +82,7 @@ expectEqual() {
actualStderr=$(< "$tmp"/actualStderr)
if expectedResult=$(nix-instantiate --eval --strict --show-trace 2>"$tmp"/expectedStderr \
--expr "$prefixExpression ($expectedExpr)"); then
--expr "$(prefixExpression) ($expectedExpr)"); then
expectedExitCode=$?
else
expectedExitCode=$?
@ -93,8 +109,9 @@ expectEqual() {
# Usage: expectStorePath NIX
expectStorePath() {
local expr=$1
if ! result=$(nix-instantiate --eval --strict --json --read-write-mode --show-trace \
--expr "$prefixExpression ($expr)"); then
if ! result=$(nix-instantiate --eval --strict --json --read-write-mode --show-trace 2>"$tmp"/stderr \
--expr "$(prefixExpression) ($expr)"); then
cat "$tmp/stderr" >&2
die "$expr failed to evaluate, but it was expected to succeed"
fi
# This is safe because we assume to get back a store path in a string
@ -106,10 +123,16 @@ expectStorePath() {
# The expression has `lib.fileset` in scope.
# Usage: expectFailure NIX REGEX
expectFailure() {
if [[ "$1" == "--simulate-pure-eval" ]]; then
maybePure="--simulate-pure-eval"
shift
else
maybePure=""
fi
local expr=$1
local expectedErrorRegex=$2
if result=$(nix-instantiate --eval --strict --read-write-mode --show-trace 2>"$tmp/stderr" \
--expr "$prefixExpression $expr"); then
--expr "$(prefixExpression $maybePure) $expr"); then
die "$expr evaluated successfully to $result, but it was expected to fail"
fi
stderr=$(<"$tmp/stderr")
@ -126,12 +149,12 @@ expectTrace() {
local expectedTrace=$2
nix-instantiate --eval --show-trace >/dev/null 2>"$tmp"/stderrTrace \
--expr "$prefixExpression trace ($expr)" || true
--expr "$(prefixExpression) trace ($expr)" || true
actualTrace=$(sed -n 's/^trace: //p' "$tmp/stderrTrace")
nix-instantiate --eval --show-trace >/dev/null 2>"$tmp"/stderrTraceVal \
--expr "$prefixExpression traceVal ($expr)" || true
--expr "$(prefixExpression) traceVal ($expr)" || true
actualTraceVal=$(sed -n 's/^trace: //p' "$tmp/stderrTraceVal")
@ -224,23 +247,17 @@ withFileMonitor() {
fi
}
# Check whether a file set includes/excludes declared paths as expected, usage:
# Create the tree structure declared in the tree variable, usage:
#
# tree=(
# [a/b] =1 # Declare that file a/b should exist and expect it to be included in the store path
# [c/a] = # Declare that file c/a should exist and expect it to be excluded in the store path
# [c/d/]= # Declare that directory c/d/ should exist and expect it to be excluded in the store path
# [a/b] = # Declare that file a/b should exist
# [c/a] = # Declare that file c/a should exist
# [c/d/]= # Declare that directory c/d/ should exist
# )
# checkFileset './a' # Pass the fileset as the argument
# createTree
declare -A tree
checkFileset() {
# New subshell so that we can have a separate trap handler, see `trap` below
local fileset=$1
# Process the tree into separate arrays for included paths, excluded paths and excluded files.
local -a included=()
local -a excluded=()
local -a excludedFiles=()
createTree() {
# Track which paths need to be created
local -a dirsToCreate=()
local -a filesToCreate=()
@ -248,24 +265,9 @@ checkFileset() {
# If keys end with a `/` we treat them as directories, otherwise files
if [[ "$p" =~ /$ ]]; then
dirsToCreate+=("$p")
isFile=
else
filesToCreate+=("$p")
isFile=1
fi
case "${tree[$p]}" in
1)
included+=("$p")
;;
0)
excluded+=("$p")
if [[ -n "$isFile" ]]; then
excludedFiles+=("$p")
fi
;;
*)
die "Unsupported tree value: ${tree[$p]}"
esac
done
# Create all the necessary paths.
@ -280,6 +282,43 @@ checkFileset() {
mkdir -p "${parentsToCreate[@]}"
touch "${filesToCreate[@]}"
fi
}
# Check whether a file set includes/excludes declared paths as expected, usage:
#
# tree=(
# [a/b] =1 # Declare that file a/b should exist and expect it to be included in the store path
# [c/a] = # Declare that file c/a should exist and expect it to be excluded in the store path
# [c/d/]= # Declare that directory c/d/ should exist and expect it to be excluded in the store path
# )
# checkFileset './a' # Pass the fileset as the argument
checkFileset() {
# New subshell so that we can have a separate trap handler, see `trap` below
local fileset=$1
# Create the tree
createTree
# Process the tree into separate arrays for included paths, excluded paths and excluded files.
local -a included=()
local -a excluded=()
local -a excludedFiles=()
for p in "${!tree[@]}"; do
case "${tree[$p]}" in
1)
included+=("$p")
;;
0)
excluded+=("$p")
# If keys end with a `/` we treat them as directories, otherwise files
if [[ ! "$p" =~ /$ ]]; then
excludedFiles+=("$p")
fi
;;
*)
die "Unsupported tree value: ${tree[$p]}"
esac
done
expression="toSource { root = ./.; fileset = $fileset; }"
@ -318,9 +357,13 @@ checkFileset() {
#### Error messages #####
# Absolute paths in strings cannot be passed as `root`
expectFailure 'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \("/nix/store/foobar"\) is a string-like value, but it should be a path instead.
expectFailure 'toSource { root = "/nix/store/foobar"; fileset = ./.; }' 'lib.fileset.toSource: `root` \(/nix/store/foobar\) is a string-like value, but it should be a path instead.
\s*Paths in strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
expectFailure 'toSource { root = cleanSourceWith { src = ./.; }; fileset = ./.; }' 'lib.fileset.toSource: `root` is a `lib.sources`-based value, but it should be a path instead.
\s*To use a `lib.sources`-based value, convert it to a file set using `lib.fileset.fromSource` and pass it as `fileset`.
\s*Note that this only works for sources created from paths.'
# Only paths are accepted as `root`
expectFailure 'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `root` is of type int, but it should be a path instead.'
@ -328,21 +371,21 @@ expectFailure 'toSource { root = 10; fileset = ./.; }' 'lib.fileset.toSource: `r
mkdir -p {foo,bar}/mock-root
expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
toSource { root = ./foo/mock-root; fileset = ./bar/mock-root; }
' 'lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` \("'"$work"'/foo/mock-root"\):
\s*`root`: root "'"$work"'/foo/mock-root"
\s*`fileset`: root "'"$work"'/bar/mock-root"
\s*Different roots are not supported.'
rm -rf *
' 'lib.fileset.toSource: Filesystem roots are not the same for `fileset` and `root` \('"$work"'/foo/mock-root\):
\s*`root`: Filesystem root is "'"$work"'/foo/mock-root"
\s*`fileset`: Filesystem root is "'"$work"'/bar/mock-root"
\s*Different filesystem roots are not supported.'
rm -rf -- *
# `root` needs to exist
expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) does not exist.'
expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a path that does not exist.'
# `root` needs to be a file
touch a
expectFailure 'toSource { root = ./a; fileset = ./a; }' 'lib.fileset.toSource: `root` \('"$work"'/a\) is a file, but it should be a directory instead. Potential solutions:
\s*- If you want to import the file into the store _without_ a containing directory, use string interpolation or `builtins.path` instead of this function.
\s*- If you want to import the file into the store _with_ a containing directory, set `root` to the containing directory, such as '"$work"', and set `fileset` to the file path.'
rm -rf *
rm -rf -- *
# The fileset argument should be evaluated, even if the directory is empty
expectFailure 'toSource { root = ./.; fileset = abort "This should be evaluated"; }' 'evaluation aborted with the following error message: '\''This should be evaluated'\'
@ -352,15 +395,25 @@ mkdir a
expectFailure 'toSource { root = ./a; fileset = ./.; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
\s*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
\s*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
rm -rf *
rm -rf -- *
# non-regular and non-symlink files cannot be added to the Nix store
mkfifo a
expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` contains a file that cannot be added to the store: '"$work"'/a
\s*This file is neither a regular file nor a symlink, the only file types supported by the Nix store.
\s*Therefore the file set cannot be added to the Nix store as is. Make sure to not include that file to avoid this error.'
rm -rf -- *
# Path coercion only works for paths
expectFailure 'toSource { root = ./.; fileset = 10; }' 'lib.fileset.toSource: `fileset` is of type int, but it should be a file set or a path instead.'
expectFailure 'toSource { root = ./.; fileset = "/some/path"; }' 'lib.fileset.toSource: `fileset` \("/some/path"\) is a string-like value, but it should be a file set or a path instead.
\s*Paths represented as strings are not supported by `lib.fileset`, use `lib.sources` or derivations instead.'
expectFailure 'toSource { root = ./.; fileset = cleanSourceWith { src = ./.; }; }' 'lib.fileset.toSource: `fileset` is a `lib.sources`-based value, but it should be a file set or a path instead.
\s*To convert a `lib.sources`-based value to a file set you can use `lib.fileset.fromSource`.
\s*Note that this only works for sources created from paths.'
# Path coercion errors for non-existent paths
expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) does not exist.'
expectFailure 'toSource { root = ./.; fileset = ./a; }' 'lib.fileset.toSource: `fileset` \('"$work"'/a\) is a path that does not exist.'
# File sets cannot be evaluated directly
expectFailure 'union ./. ./.' 'lib.fileset: Directly evaluating a file set is not supported.
@ -483,26 +536,26 @@ mkdir -p {foo,bar}/mock-root
expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
toSource { root = ./.; fileset = union ./foo/mock-root ./bar/mock-root; }
' 'lib.fileset.union: Filesystem roots are not the same:
\s*first argument: root "'"$work"'/foo/mock-root"
\s*second argument: root "'"$work"'/bar/mock-root"
\s*Different roots are not supported.'
\s*First argument: Filesystem root is "'"$work"'/foo/mock-root"
\s*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
\s*Different filesystem roots are not supported.'
expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
toSource { root = ./.; fileset = unions [ ./foo/mock-root ./bar/mock-root ]; }
' 'lib.fileset.unions: Filesystem roots are not the same:
\s*element 0: root "'"$work"'/foo/mock-root"
\s*element 1: root "'"$work"'/bar/mock-root"
\s*Different roots are not supported.'
rm -rf *
\s*Element 0: Filesystem root is "'"$work"'/foo/mock-root"
\s*Element 1: Filesystem root is "'"$work"'/bar/mock-root"
\s*Different filesystem roots are not supported.'
rm -rf -- *
# Coercion errors show the correct context
expectFailure 'toSource { root = ./.; fileset = union ./a ./.; }' 'lib.fileset.union: first argument \('"$work"'/a\) does not exist.'
expectFailure 'toSource { root = ./.; fileset = union ./. ./b; }' 'lib.fileset.union: second argument \('"$work"'/b\) does not exist.'
expectFailure 'toSource { root = ./.; fileset = unions [ ./a ./. ]; }' 'lib.fileset.unions: element 0 \('"$work"'/a\) does not exist.'
expectFailure 'toSource { root = ./.; fileset = unions [ ./. ./b ]; }' 'lib.fileset.unions: element 1 \('"$work"'/b\) does not exist.'
expectFailure 'toSource { root = ./.; fileset = union ./a ./.; }' 'lib.fileset.union: First argument \('"$work"'/a\) is a path that does not exist.'
expectFailure 'toSource { root = ./.; fileset = union ./. ./b; }' 'lib.fileset.union: Second argument \('"$work"'/b\) is a path that does not exist.'
expectFailure 'toSource { root = ./.; fileset = unions [ ./a ./. ]; }' 'lib.fileset.unions: Element 0 \('"$work"'/a\) is a path that does not exist.'
expectFailure 'toSource { root = ./.; fileset = unions [ ./. ./b ]; }' 'lib.fileset.unions: Element 1 \('"$work"'/b\) is a path that does not exist.'
# unions needs a list
expectFailure 'toSource { root = ./.; fileset = unions null; }' 'lib.fileset.unions: Expected argument to be a list, but got a null.'
expectFailure 'toSource { root = ./.; fileset = unions null; }' 'lib.fileset.unions: Argument is of type null, but it should be a list instead.'
# The tree of later arguments should not be evaluated if a former argument already includes all files
tree=()
@ -596,14 +649,14 @@ mkdir -p {foo,bar}/mock-root
expectFailure 'with ((import <nixpkgs/lib>).extend (import <nixpkgs/lib/fileset/mock-splitRoot.nix>)).fileset;
toSource { root = ./.; fileset = intersection ./foo/mock-root ./bar/mock-root; }
' 'lib.fileset.intersection: Filesystem roots are not the same:
\s*first argument: root "'"$work"'/foo/mock-root"
\s*second argument: root "'"$work"'/bar/mock-root"
\s*Different roots are not supported.'
\s*First argument: Filesystem root is "'"$work"'/foo/mock-root"
\s*Second argument: Filesystem root is "'"$work"'/bar/mock-root"
\s*Different filesystem roots are not supported.'
rm -rf -- *
# Coercion errors show the correct context
expectFailure 'toSource { root = ./.; fileset = intersection ./a ./.; }' 'lib.fileset.intersection: first argument \('"$work"'/a\) does not exist.'
expectFailure 'toSource { root = ./.; fileset = intersection ./. ./b; }' 'lib.fileset.intersection: second argument \('"$work"'/b\) does not exist.'
expectFailure 'toSource { root = ./.; fileset = intersection ./a ./.; }' 'lib.fileset.intersection: First argument \('"$work"'/a\) is a path that does not exist.'
expectFailure 'toSource { root = ./.; fileset = intersection ./. ./b; }' 'lib.fileset.intersection: Second argument \('"$work"'/b\) is a path that does not exist.'
# The tree of later arguments should not be evaluated if a former argument already excludes all files
tree=(
@ -677,6 +730,191 @@ tree=(
)
checkFileset 'intersection (unions [ ./a/b ./c/d ./c/e ]) (unions [ ./a ./c/d/f ./c/e ])'
## Difference
# Subtracting something from itself results in nothing
tree=(
[a]=0
)
checkFileset 'difference ./. ./.'
# The tree of the second argument should not be evaluated if not needed
checkFileset 'difference _emptyWithoutBase (_create ./. (abort "This should not be used!"))'
checkFileset 'difference (_create ./. null) (_create ./. (abort "This should not be used!"))'
# Subtracting nothing gives the same thing back
tree=(
[a]=1
)
checkFileset 'difference ./. _emptyWithoutBase'
checkFileset 'difference ./. (_create ./. null)'
# Subtracting doesn't influence the base path
mkdir a b
touch {a,b}/x
expectEqual 'toSource { root = ./a; fileset = difference ./a ./b; }' 'toSource { root = ./a; fileset = ./a; }'
rm -rf -- *
# Also not the other way around
mkdir a
expectFailure 'toSource { root = ./a; fileset = difference ./. ./a; }' 'lib.fileset.toSource: `fileset` could contain files in '"$work"', which is not under the `root` \('"$work"'/a\). Potential solutions:
\s*- Set `root` to '"$work"' or any directory higher up. This changes the layout of the resulting store path.
\s*- Set `fileset` to a file set that cannot contain files outside the `root` \('"$work"'/a\). This could change the files included in the result.'
rm -rf -- *
# Difference actually works
# We test all combinations of ./., ./a, ./a/x and ./b
tree=(
[a/x]=0
[a/y]=0
[b]=0
[c]=0
)
checkFileset 'difference ./. ./.'
checkFileset 'difference ./a ./.'
checkFileset 'difference ./a/x ./.'
checkFileset 'difference ./b ./.'
checkFileset 'difference ./a ./a'
checkFileset 'difference ./a/x ./a'
checkFileset 'difference ./a/x ./a/x'
checkFileset 'difference ./b ./b'
tree=(
[a/x]=0
[a/y]=0
[b]=1
[c]=1
)
checkFileset 'difference ./. ./a'
tree=(
[a/x]=1
[a/y]=1
[b]=0
[c]=0
)
checkFileset 'difference ./a ./b'
tree=(
[a/x]=1
[a/y]=0
[b]=0
[c]=0
)
checkFileset 'difference ./a/x ./b'
tree=(
[a/x]=0
[a/y]=1
[b]=0
[c]=0
)
checkFileset 'difference ./a ./a/x'
tree=(
[a/x]=0
[a/y]=0
[b]=1
[c]=0
)
checkFileset 'difference ./b ./a'
checkFileset 'difference ./b ./a/x'
tree=(
[a/x]=0
[a/y]=1
[b]=1
[c]=1
)
checkFileset 'difference ./. ./a/x'
tree=(
[a/x]=1
[a/y]=1
[b]=0
[c]=1
)
checkFileset 'difference ./. ./b'
## File filter
# The first argument needs to be a function
expectFailure 'fileFilter null (abort "this is not needed")' 'lib.fileset.fileFilter: First argument is of type null, but it should be a function instead.'
# The second argument needs to be an existing path
expectFailure 'fileFilter (file: abort "this is not needed") _emptyWithoutBase' 'lib.fileset.fileFilter: Second argument is a file set, but it should be a path instead.
\s*If you need to filter files in a file set, use `intersection fileset \(fileFilter pred \./\.\)` instead.'
expectFailure 'fileFilter (file: abort "this is not needed") null' 'lib.fileset.fileFilter: Second argument is of type null, but it should be a path instead.'
expectFailure 'fileFilter (file: abort "this is not needed") ./a' 'lib.fileset.fileFilter: Second argument \('"$work"'/a\) is a path that does not exist.'
# The predicate is not called when there's no files
tree=()
checkFileset 'fileFilter (file: abort "this is not needed") ./.'
# The predicate must be able to handle extra attributes
touch a
expectFailure 'toSource { root = ./.; fileset = fileFilter ({ name, type }: true) ./.; }' 'called with unexpected argument '\''"lib.fileset.fileFilter: The predicate function passed as the first argument must be able to handle extra attributes for future compatibility. If you'\''re using `\{ name, file \}:`, use `\{ name, file, ... \}:` instead."'\'
rm -rf -- *
# .name is the name, and it works correctly, even recursively
tree=(
[a]=1
[b]=0
[c/a]=1
[c/b]=0
[d/c/a]=1
[d/c/b]=0
)
checkFileset 'fileFilter (file: file.name == "a") ./.'
tree=(
[a]=0
[b]=1
[c/a]=0
[c/b]=1
[d/c/a]=0
[d/c/b]=1
)
checkFileset 'fileFilter (file: file.name != "a") ./.'
# `.type` is the file type
mkdir d
touch d/a
ln -s d/b d/b
mkfifo d/c
expectEqual \
'toSource { root = ./.; fileset = fileFilter (file: file.type == "regular") ./.; }' \
'toSource { root = ./.; fileset = ./d/a; }'
expectEqual \
'toSource { root = ./.; fileset = fileFilter (file: file.type == "symlink") ./.; }' \
'toSource { root = ./.; fileset = ./d/b; }'
expectEqual \
'toSource { root = ./.; fileset = fileFilter (file: file.type == "unknown") ./.; }' \
'toSource { root = ./.; fileset = ./d/c; }'
expectEqual \
'toSource { root = ./.; fileset = fileFilter (file: file.type != "regular") ./.; }' \
'toSource { root = ./.; fileset = union ./d/b ./d/c; }'
expectEqual \
'toSource { root = ./.; fileset = fileFilter (file: file.type != "symlink") ./.; }' \
'toSource { root = ./.; fileset = union ./d/a ./d/c; }'
expectEqual \
'toSource { root = ./.; fileset = fileFilter (file: file.type != "unknown") ./.; }' \
'toSource { root = ./.; fileset = union ./d/a ./d/b; }'
rm -rf -- *
# It's lazy
tree=(
[b]=1
[c/a]=1
)
# Note that union evaluates the first argument first if necessary, that's why we can use ./c/a here
checkFileset 'union ./c/a (fileFilter (file: assert file.name != "a"; true) ./.)'
# but here we need to use ./c
checkFileset 'union (fileFilter (file: assert file.name != "a"; true) ./.) ./c'
# Make sure single files are filtered correctly
tree=(
[a]=1
[b]=0
)
checkFileset 'fileFilter (file: assert file.name == "a"; true) ./a'
tree=(
[a]=0
[b]=0
)
checkFileset 'fileFilter (file: assert file.name == "a"; false) ./a'
## Tracing
@ -823,6 +1061,390 @@ touch 0 "${filesToCreate[@]}"
expectTrace 'unions (mapAttrsToList (n: _: ./. + "/${n}") (removeAttrs (builtins.readDir ./.) [ "0" ]))' "$expectedTrace"
rm -rf -- *
## lib.fileset.fromSource
# Check error messages
expectFailure 'fromSource null' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
expectFailure 'fromSource (lib.cleanSource "")' 'lib.fileset.fromSource: The source origin of the argument is a string-like value \(""\), but it should be a path instead.
\s*Sources created from paths in strings cannot be turned into file sets, use `lib.sources` or derivations instead.'
expectFailure 'fromSource (lib.cleanSource null)' 'lib.fileset.fromSource: The source origin of the argument is of type null, but it should be a path instead.'
# fromSource on a path works and is the same as coercing that path
mkdir a
touch a/b c
expectEqual 'trace (fromSource ./.) null' 'trace ./. null'
rm -rf -- *
# Check that converting to a file set doesn't read the included files
mkdir a
touch a/b
run() {
expectEqual "trace (fromSource (lib.cleanSourceWith { src = ./a; })) null" "builtins.trace \"$work/a (all files in directory)\" null"
rm a/b
}
withFileMonitor run a/b
rm -rf -- *
# Check that converting to a file set doesn't read entries for directories that are filtered out
mkdir -p a/b
touch a/b/c
run() {
expectEqual "trace (fromSource (lib.cleanSourceWith {
src = ./a;
filter = pathString: type: false;
})) null" "builtins.trace \"(empty)\" null"
rm a/b/c
rmdir a/b
}
withFileMonitor run a/b
rm -rf -- *
# The filter is not needed on empty directories
expectEqual 'trace (fromSource (lib.cleanSourceWith {
src = ./.;
filter = abort "filter should not be needed";
})) null' 'trace _emptyWithoutBase null'
# Single files also work
touch a b
expectEqual 'trace (fromSource (cleanSourceWith { src = ./a; })) null' 'trace ./a null'
rm -rf -- *
# For a tree assigning each subpath true/false,
# check whether a source filter with those results includes the same files
# as a file set created using fromSource. Usage:
#
# tree=(
# [a]=1 # ./a is a file and the filter should return true for it
# [b/]=0 # ./b is a directory and the filter should return false for it
# )
# checkSource
checkSource() {
createTree
# Serialise the tree as JSON (there's only minimal savings with jq,
# and we don't need to handle escapes)
{
echo "{"
first=1
for p in "${!tree[@]}"; do
if [[ -z "$first" ]]; then
echo ","
else
first=
fi
echo "\"$p\":"
case "${tree[$p]}" in
1)
echo "true"
;;
0)
echo "false"
;;
*)
die "Unsupported tree value: ${tree[$p]}"
esac
done
echo "}"
} > "$tmp/tree.json"
# An expression to create a source value with a filter matching the tree
sourceExpr='
let
tree = importJSON '"$tmp"'/tree.json;
in
cleanSourceWith {
src = ./.;
filter =
pathString: type:
let
stripped = removePrefix (toString ./. + "/") pathString;
key = stripped + optionalString (type == "directory") "/";
in
tree.${key} or
(throw "tree key ${key} missing");
}
'
filesetExpr='
toSource {
root = ./.;
fileset = fromSource ('"$sourceExpr"');
}
'
# Turn both into store paths
sourceStorePath=$(expectStorePath "$sourceExpr")
filesetStorePath=$(expectStorePath "$filesetExpr")
# Loop through each path in the tree
while IFS= read -r -d $'\0' subpath; do
if [[ ! -e "$sourceStorePath"/"$subpath" ]]; then
# If it's not in the source store path, it's also not in the file set store path
if [[ -e "$filesetStorePath"/"$subpath" ]]; then
die "The store path $sourceStorePath created by $expr doesn't contain $subpath, but the corresponding store path $filesetStorePath created via fromSource does contain $subpath"
fi
elif [[ -z "$(find "$sourceStorePath"/"$subpath" -type f)" ]]; then
# If it's an empty directory in the source store path, it shouldn't be in the file set store path
if [[ -e "$filesetStorePath"/"$subpath" ]]; then
die "The store path $sourceStorePath created by $expr contains the path $subpath without any files, but the corresponding store path $filesetStorePath created via fromSource didn't omit it"
fi
else
# If it's non-empty directory or a file, it should be in the file set store path
if [[ ! -e "$filesetStorePath"/"$subpath" ]]; then
die "The store path $sourceStorePath created by $expr contains the non-empty path $subpath, but the corresponding store path $filesetStorePath created via fromSource doesn't include it"
fi
fi
done < <(find . -mindepth 1 -print0)
rm -rf -- *
}
# Check whether the filter is evaluated correctly
tree=(
[a]=
[b/]=
[b/c]=
[b/d]=
[e/]=
[e/e/]=
)
# We fill out the above tree values with all possible combinations of 0 and 1
# Then check whether a filter based on those return values gets turned into the corresponding file set
for i in $(seq 0 $((2 ** ${#tree[@]} - 1 ))); do
for p in "${!tree[@]}"; do
tree[$p]=$(( i % 2 ))
(( i /= 2 )) || true
done
checkSource
done
# The filter is called with the same arguments in the same order
mkdir a e
touch a/b a/c d e
expectEqual '
trace (fromSource (cleanSourceWith {
src = ./.;
filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
})) null
' '
builtins.seq (cleanSourceWith {
src = ./.;
filter = pathString: type: builtins.trace "${pathString} ${toString type}" true;
}).outPath
builtins.trace "'"$work"' (all files in directory)"
null
'
rm -rf -- *
# Test that if a directory is not included, the filter isn't called on its contents
mkdir a b
touch a/c b/d
expectEqual 'trace (fromSource (cleanSourceWith {
src = ./.;
filter = pathString: type:
if pathString == toString ./a then
false
else if pathString == toString ./b then
true
else if pathString == toString ./b/d then
true
else
abort "This filter should not be called with path ${pathString}";
})) null' 'trace (_create ./. { b = "directory"; }) null'
rm -rf -- *
# The filter is called lazily:
# If a later say intersection removes a part of the tree, the filter won't run on it
mkdir a d
touch a/{b,c} d/e
expectEqual 'trace (intersection ./a (fromSource (lib.cleanSourceWith {
src = ./.;
filter = pathString: type:
if pathString == toString ./a || pathString == toString ./a/b then
true
else if pathString == toString ./a/c then
false
else
abort "filter should not be called on ${pathString}";
}))) null' 'trace ./a/b null'
rm -rf -- *
## lib.fileset.gitTracked/gitTrackedWith
# The first/second argument has to be a path
expectFailure 'gitTracked null' 'lib.fileset.gitTracked: Expected the argument to be a path, but it'\''s a null instead.'
expectFailure 'gitTrackedWith {} null' 'lib.fileset.gitTrackedWith: Expected the second argument to be a path, but it'\''s a null instead.'
# The path has to contain a .git directory
expectFailure 'gitTracked ./.' 'lib.fileset.gitTracked: Expected the argument \('"$work"'\) to point to a local working tree of a Git repository, but it'\''s not.'
expectFailure 'gitTrackedWith {} ./.' 'lib.fileset.gitTrackedWith: Expected the second argument \('"$work"'\) to point to a local working tree of a Git repository, but it'\''s not.'
# recurseSubmodules has to be a boolean
expectFailure 'gitTrackedWith { recurseSubmodules = null; } ./.' 'lib.fileset.gitTrackedWith: Expected the attribute `recurseSubmodules` of the first argument to be a boolean, but it'\''s a null instead.'
# recurseSubmodules = true is not supported on all Nix versions
if [[ "$(nix-instantiate --eval --expr "$(prefixExpression) (versionAtLeast builtins.nixVersion _fetchGitSubmodulesMinver)")" == true ]]; then
fetchGitSupportsSubmodules=1
else
fetchGitSupportsSubmodules=
expectFailure 'gitTrackedWith { recurseSubmodules = true; } ./.' 'lib.fileset.gitTrackedWith: Setting the attribute `recurseSubmodules` to `true` is only supported for Nix version 2.4 and after, but Nix version [0-9.]+ is used.'
fi
# Checks that `gitTrackedWith` contains the same files as `git ls-files`
# for the current working directory.
# If --recurse-submodules is passed, the flag is passed through to `git ls-files`
# and as `recurseSubmodules` to `gitTrackedWith`
checkGitTrackedWith() {
if [[ "${1:-}" == "--recurse-submodules" ]]; then
gitLsFlags="--recurse-submodules"
gitTrackedArg="{ recurseSubmodules = true; }"
else
gitLsFlags=""
gitTrackedArg="{ }"
fi
# All files listed by `git ls-files`
expectedFiles=()
while IFS= read -r -d $'\0' file; do
# If there are submodules but --recurse-submodules isn't passed,
# `git ls-files` lists them as empty directories,
# we need to filter that out since we only want to check/count files
if [[ -f "$file" ]]; then
expectedFiles+=("$file")
fi
done < <(git ls-files -z $gitLsFlags)
storePath=$(expectStorePath 'toSource { root = ./.; fileset = gitTrackedWith '"$gitTrackedArg"' ./.; }')
# Check that each expected file is also in the store path with the same content
for expectedFile in "${expectedFiles[@]}"; do
if [[ ! -e "$storePath"/"$expectedFile" ]]; then
die "Expected file $expectedFile to exist in $storePath, but it doesn't.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
fi
if ! diff "$expectedFile" "$storePath"/"$expectedFile"; then
die "Expected file $expectedFile to have the same contents as in $storePath, but it doesn't.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
fi
done
# This is a cheap way to verify the inverse: That all files in the store path are also expected
# We just count the number of files in both and verify they're the same
actualFileCount=$(find "$storePath" -type f -printf . | wc -c)
if [[ "${#expectedFiles[@]}" != "$actualFileCount" ]]; then
die "Expected ${#expectedFiles[@]} files in $storePath, but got $actualFileCount.\nGit status:\n$(git status)\nStore path contents:\n$(find "$storePath")"
fi
}
# Runs checkGitTrackedWith with and without --recurse-submodules
# Allows testing both variants together
checkGitTracked() {
checkGitTrackedWith
if [[ -n "$fetchGitSupportsSubmodules" ]]; then
checkGitTrackedWith --recurse-submodules
fi
}
createGitRepo() {
git init -q "$1"
# Only repo-local config
git -C "$1" config user.name "Nixpkgs"
git -C "$1" config user.email "nixpkgs@nixos.org"
# Get at least a HEAD commit, needed for older Nix versions
git -C "$1" commit -q --allow-empty -m "Empty commit"
}
# Check the error message for pure eval mode
createGitRepo .
expectFailure --simulate-pure-eval 'toSource { root = ./.; fileset = gitTracked ./.; }' 'lib.fileset.gitTracked: This function is currently not supported in pure evaluation mode, since it currently relies on `builtins.fetchGit`. See https://github.com/NixOS/nix/issues/9292.'
expectFailure --simulate-pure-eval 'toSource { root = ./.; fileset = gitTrackedWith {} ./.; }' 'lib.fileset.gitTrackedWith: This function is currently not supported in pure evaluation mode, since it currently relies on `builtins.fetchGit`. See https://github.com/NixOS/nix/issues/9292.'
rm -rf -- *
# Go through all stages of Git files
# See https://www.git-scm.com/book/en/v2/Git-Basics-Recording-Changes-to-the-Repository
# Empty repository
createGitRepo .
checkGitTracked
# Untracked file
echo a > a
checkGitTracked
# Staged file
git add a
checkGitTracked
# Committed file
git commit -q -m "Added a"
checkGitTracked
# Edited file
echo b > a
checkGitTracked
# Removed file
git rm -f -q a
checkGitTracked
rm -rf -- *
# gitignored file
createGitRepo .
echo a > .gitignore
touch a
git add -A
checkGitTracked
# Add it regardless (needs -f)
git add -f a
checkGitTracked
rm -rf -- *
# Directory
createGitRepo .
mkdir -p d1/d2/d3
touch d1/d2/d3/a
git add d1
checkGitTracked
rm -rf -- *
# Submodules
createGitRepo .
createGitRepo sub
# Untracked submodule
git -C sub commit -q --allow-empty -m "Empty commit"
checkGitTracked
# Tracked submodule
git submodule add ./sub sub >/dev/null
checkGitTracked
# Untracked file
echo a > sub/a
checkGitTracked
# Staged file
git -C sub add a
checkGitTracked
# Committed file
git -C sub commit -q -m "Add a"
checkGitTracked
# Changed file
echo b > sub/b
checkGitTracked
# Removed file
git -C sub rm -f -q a
checkGitTracked
rm -rf -- *
# TODO: Once we have combinators and a property testing library, derive property tests from https://en.wikipedia.org/wiki/Algebra_of_sets
echo >&2 tests ok

View file

@ -45,7 +45,7 @@ rec {
}
```
This is where `fix` comes in, it contains the syntactic that's not in `f` anymore.
This is where `fix` comes in, it contains the syntactic recursion that's not in `f` anymore.
```nix
nix-repl> fix = f:

View file

@ -516,17 +516,17 @@ in mkLicense lset) ({
generaluser = {
fullName = "GeneralUser GS License v2.0";
url = "http://www.schristiancollins.com/generaluser.php"; # license included in sources
url = "https://www.schristiancollins.com/generaluser.php"; # license included in sources
};
gfl = {
fullName = "GUST Font License";
url = "http://www.gust.org.pl/fonts/licenses/GUST-FONT-LICENSE.txt";
url = "https://www.gust.org.pl/projects/e-foundry/licenses/GUST-FONT-LICENSE.txt";
};
gfsl = {
fullName = "GUST Font Source License";
url = "http://www.gust.org.pl/fonts/licenses/GUST-FONT-SOURCE-LICENSE.txt";
url = "https://www.gust.org.pl/projects/e-foundry/licenses/GUST-FONT-SOURCE-LICENSE.txt";
};
gpl1Only = {
@ -613,7 +613,7 @@ in mkLicense lset) ({
info-zip = {
spdxId = "Info-ZIP";
fullName = "Info-ZIP License";
url = "http://www.info-zip.org/pub/infozip/license.html";
url = "https://infozip.sourceforge.net/license.html";
};
inria-compcert = {
@ -877,6 +877,21 @@ in mkLicense lset) ({
fullName = "Non-Profit Open Software License 3.0";
};
nvidiaCuda = {
shortName = "CUDA EULA";
fullName = "CUDA Toolkit End User License Agreement (EULA)";
url = "https://docs.nvidia.com/cuda/eula/index.html#cuda-toolkit-supplement-license-agreement";
free = false;
};
nvidiaCudaRedist = {
shortName = "CUDA EULA";
fullName = "CUDA Toolkit End User License Agreement (EULA)";
url = "https://docs.nvidia.com/cuda/eula/index.html#cuda-toolkit-supplement-license-agreement";
free = false;
redistributable = true;
};
obsidian = {
fullName = "Obsidian End User Agreement";
url = "https://obsidian.md/eula";
@ -1167,7 +1182,7 @@ in mkLicense lset) ({
xfig = {
fullName = "xfig";
url = "http://mcj.sourceforge.net/authors.html#xfig"; # https is broken
url = "https://mcj.sourceforge.net/authors.html#xfig";
};
zlib = {

View file

@ -821,6 +821,19 @@ rec {
*/
unique = foldl' (acc: e: if elem e acc then acc else acc ++ [ e ]) [];
/* Check if list contains only unique elements. O(n^2) complexity.
Type: allUnique :: [a] -> bool
Example:
allUnique [ 3 2 3 4 ]
=> false
allUnique [ 3 2 4 1 ]
=> true
*/
allUnique = list: (length (unique list) == length list);
/* Intersects list 'e' and another list. O(nm) complexity.
Example:

View file

@ -162,5 +162,12 @@ rec {
getExe' pkgs.imagemagick "convert"
=> "/nix/store/5rs48jamq7k6sal98ymj9l4k2bnwq515-imagemagick-7.1.1-15/bin/convert"
*/
getExe' = x: y: "${lib.getBin x}/bin/${y}";
getExe' = x: y:
assert lib.assertMsg (lib.isDerivation x)
"lib.meta.getExe': The first argument is of type ${builtins.typeOf x}, but it should be a derivation instead.";
assert lib.assertMsg (lib.isString y)
"lib.meta.getExe': The second argument is of type ${builtins.typeOf y}, but it should be a string instead.";
assert lib.assertMsg (builtins.length (lib.splitString "/" y) == 1)
"lib.meta.getExe': The second argument \"${y}\" is a nested path with a \"/\" character, but it should just be the name of the executable instead.";
"${lib.getBin x}/bin/${y}";
}

View file

@ -144,6 +144,20 @@ rec {
*/
concatLines = concatMapStrings (s: s + "\n");
/*
Replicate a string n times,
and concatenate the parts into a new string.
Type: replicate :: int -> string -> string
Example:
replicate 3 "v"
=> "vvv"
replicate 5 "hello"
=> "hellohellohellohellohello"
*/
replicate = n: s: concatStrings (lib.lists.replicate n s);
/* Construct a Unix-style, colon-separated search path consisting of
the given `subDir` appended to each of the given paths.

View file

@ -43,6 +43,10 @@ rec {
elaborate = args': let
args = if lib.isString args' then { system = args'; }
else args';
# TODO: deprecate args.rustc in favour of args.rust after 23.05 is EOL.
rust = assert !(args ? rust && args ? rustc); args.rust or args.rustc or {};
final = {
# Prefer to parse `config` as it is strictly more informative.
parsed = parse.mkSystemFromString (if args ? config then args.config else args.system);
@ -159,9 +163,101 @@ rec {
({
linux-kernel = args.linux-kernel or {};
gcc = args.gcc or {};
rustc = args.rustc or {};
} // platforms.select final)
linux-kernel gcc rustc;
linux-kernel gcc;
# TODO: remove after 23.05 is EOL, with an error pointing to the rust.* attrs.
rustc = args.rustc or {};
rust = rust // {
# Once args.rustc.platform.target-family is deprecated and
# removed, there will no longer be any need to modify any
# values from args.rust.platform, so we can drop all the
# "args ? rust" etc. checks, and merge args.rust.platform in
# /after/.
platform = rust.platform or {} // {
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_arch
arch =
/**/ if rust ? platform then rust.platform.arch
else if final.isAarch32 then "arm"
else if final.isMips64 then "mips64" # never add "el" suffix
else if final.isPower64 then "powerpc64" # never add "le" suffix
else final.parsed.cpu.name;
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_os
os =
/**/ if rust ? platform then rust.platform.os or "none"
else if final.isDarwin then "macos"
else final.parsed.kernel.name;
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_family
target-family =
/**/ if args ? rust.platform.target-family then args.rust.platform.target-family
else if args ? rustc.platform.target-family
then
(
# Since https://github.com/rust-lang/rust/pull/84072
# `target-family` is a list instead of single value.
let
f = args.rustc.platform.target-family;
in
if builtins.isList f then f else [ f ]
)
else lib.optional final.isUnix "unix"
++ lib.optional final.isWindows "windows";
# https://doc.rust-lang.org/reference/conditional-compilation.html#target_vendor
vendor = let
inherit (final.parsed) vendor;
in rust.platform.vendor or {
"w64" = "pc";
}.${vendor.name} or vendor.name;
};
# The name of the rust target, even if it is custom. Adjustments are
# because rust has slightly different naming conventions than we do.
rustcTarget = let
inherit (final.parsed) cpu kernel abi;
cpu_ = rust.platform.arch or {
"armv7a" = "armv7";
"armv7l" = "armv7";
"armv6l" = "arm";
"armv5tel" = "armv5te";
"riscv64" = "riscv64gc";
}.${cpu.name} or cpu.name;
vendor_ = final.rust.platform.vendor;
in rust.config
or "${cpu_}-${vendor_}-${kernel.name}${lib.optionalString (abi.name != "unknown") "-${abi.name}"}";
# The name of the rust target if it is standard, or the json file
# containing the custom target spec.
rustcTargetSpec =
/**/ if rust ? platform
then builtins.toFile (final.rust.rustcTarget + ".json") (builtins.toJSON rust.platform)
else final.rust.rustcTarget;
# The name of the rust target if it is standard, or the
# basename of the file containing the custom target spec,
# without the .json extension.
#
# This is the name used by Cargo for target subdirectories.
cargoShortTarget =
lib.removeSuffix ".json" (baseNameOf "${final.rust.rustcTargetSpec}");
# When used as part of an environment variable name, triples are
# uppercased and have all hyphens replaced by underscores:
#
# https://github.com/rust-lang/cargo/pull/9169
# https://github.com/rust-lang/cargo/issues/8285#issuecomment-634202431
cargoEnvVarTarget =
lib.strings.replaceStrings ["-"] ["_"]
(lib.strings.toUpper final.rust.cargoShortTarget);
# True if the target is no_std
# https://github.com/rust-lang/rust/blob/2e44c17c12cec45b6a682b1e53a04ac5b5fcc9d2/src/bootstrap/config.rs#L415-L421
isNoStdTarget =
builtins.any (t: lib.hasInfix t final.rust.rustcTarget) ["-none" "nvptx" "switch" "-uefi"];
};
linuxArch =
if final.isAarch32 then "arm"

View file

@ -115,6 +115,7 @@ rec {
};
gnu64 = { config = "x86_64-unknown-linux-gnu"; };
gnu64_simplekernel = gnu64 // platforms.pc_simplekernel; # see test/cross/default.nix
gnu32 = { config = "i686-unknown-linux-gnu"; };
musl64 = { config = "x86_64-unknown-linux-musl"; };

View file

@ -100,6 +100,32 @@ rec {
];
};
# given two patterns, return a pattern which is their logical AND.
# Since a pattern is a list-of-disjuncts, this needs to
patternLogicalAnd = pat1_: pat2_:
let
# patterns can be either a list or a (bare) singleton; turn
# them into singletons for uniform handling
pat1 = lib.toList pat1_;
pat2 = lib.toList pat2_;
in
lib.concatMap (attr1:
map (attr2:
lib.recursiveUpdateUntil
(path: subattr1: subattr2:
if (builtins.intersectAttrs subattr1 subattr2) == {} || subattr1 == subattr2
then true
else throw ''
pattern conflict at path ${toString path}:
${builtins.toJSON subattr1}
${builtins.toJSON subattr2}
'')
attr1
attr2
)
pat2)
pat1;
matchAnyAttrs = patterns:
if builtins.isList patterns then attrs: any (pattern: matchAttrs pattern attrs) patterns
else matchAttrs patterns;

View file

@ -29,6 +29,15 @@ let
assert type.check value;
setType type.name ({ inherit name; } // value));
# gnu-config will ignore the portion of a triple matching the
# regex `e?abi.*$` when determining the validity of a triple. In
# other words, `i386-linuxabichickenlips` is a valid triple.
removeAbiSuffix = x:
let match = builtins.match "(.*)e?abi.*" x;
in if match==null
then x
else lib.elemAt match 0;
in
rec {
@ -466,7 +475,7 @@ rec {
else vendors.unknown;
kernel = if hasPrefix "darwin" args.kernel then getKernel "darwin"
else if hasPrefix "netbsd" args.kernel then getKernel "netbsd"
else getKernel args.kernel;
else getKernel (removeAbiSuffix args.kernel);
abi =
/**/ if args ? abi then getAbi args.abi
else if isLinux parsed || isWindows parsed then

View file

@ -64,8 +64,14 @@ expectSuccess "pathType $PWD/directory" '"directory"'
expectSuccess "pathType $PWD/regular" '"regular"'
expectSuccess "pathType $PWD/symlink" '"symlink"'
expectSuccess "pathType $PWD/fifo" '"unknown"'
# Different errors depending on whether the builtins.readFilePath primop is available or not
expectFailure "pathType $PWD/non-existent" "error: (evaluation aborted with the following error message: 'lib.filesystem.pathType: Path $PWD/non-existent does not exist.'|getting status of '$PWD/non-existent': No such file or directory)"
# Only check error message when a Nixpkgs-specified error is thrown,
# which is only the case when `readFileType` is not available
# and the fallback implementation needs to be used.
if [[ "$(nix-instantiate --eval --expr 'builtins ? readFileType')" == false ]]; then
expectFailure "pathType $PWD/non-existent" \
"error: evaluation aborted with the following error message: 'lib.filesystem.pathType: Path $PWD/non-existent does not exist.'"
fi
expectSuccess "pathIsDirectory /." "true"
expectSuccess "pathIsDirectory $PWD/directory" "true"

View file

@ -191,6 +191,11 @@ runTests {
expected = "a\nb\nc\n";
};
testReplicateString = {
expr = strings.replicate 5 "hello";
expected = "hellohellohellohellohello";
};
testSplitStringsSimple = {
expr = strings.splitString "." "a.b.c.d";
expected = [ "a" "b" "c" "d" ];
@ -721,6 +726,15 @@ runTests {
expected = 7;
};
testAllUnique_true = {
expr = allUnique [ 3 2 4 1 ];
expected = true;
};
testAllUnique_false = {
expr = allUnique [ 3 2 3 4 ];
expected = false;
};
# ATTRSETS
testConcatMapAttrs = {
@ -1906,4 +1920,32 @@ runTests {
expr = (with types; either int (listOf (either bool str))).description;
expected = "signed integer or list of (boolean or string)";
};
# Meta
testGetExe'Output = {
expr = getExe' {
type = "derivation";
out = "somelonghash";
bin = "somelonghash";
} "executable";
expected = "somelonghash/bin/executable";
};
testGetExeOutput = {
expr = getExe {
type = "derivation";
out = "somelonghash";
bin = "somelonghash";
meta.mainProgram = "mainProgram";
};
expected = "somelonghash/bin/mainProgram";
};
testGetExe'FailureFirstArg = testingThrow (
getExe' "not a derivation" "executable"
);
testGetExe'FailureSecondArg = testingThrow (
getExe' { type = "derivation"; } "dir/executable"
);
}

View file

@ -25,11 +25,13 @@ let
];
nativeBuildInputs = [
nix
pkgs.gitMinimal
] ++ lib.optional pkgs.stdenv.isLinux pkgs.inotify-tools;
strictDeps = true;
} ''
datadir="${nix}/share"
export TEST_ROOT=$(pwd)/test-tmp
export HOME=$(mktemp -d)
export NIX_BUILD_HOOK=
export NIX_CONF_DIR=$TEST_ROOT/etc
export NIX_LOCALSTATE_DIR=$TEST_ROOT/var

View file

@ -448,6 +448,40 @@ rec {
isFunction = f: builtins.isFunction f ||
(f ? __functor && isFunction (f.__functor f));
/*
`mirrorFunctionArgs f g` creates a new function `g'` with the same behavior as `g` (`g' x == g x`)
but its function arguments mirroring `f` (`lib.functionArgs g' == lib.functionArgs f`).
Type:
mirrorFunctionArgs :: (a -> b) -> (a -> c) -> (a -> c)
Example:
addab = {a, b}: a + b
addab { a = 2; b = 4; }
=> 6
lib.functionArgs addab
=> { a = false; b = false; }
addab1 = attrs: addab attrs + 1
addab1 { a = 2; b = 4; }
=> 7
lib.functionArgs addab1
=> { }
addab1' = lib.mirrorFunctionArgs addab addab1
addab1' { a = 2; b = 4; }
=> 7
lib.functionArgs addab1'
=> { a = false; b = false; }
*/
mirrorFunctionArgs =
# Function to provide the argument metadata
f:
let
fArgs = functionArgs f;
in
# Function to set the argument metadata to
g:
setFunctionArgs g fArgs;
/*
Turns any non-callable values into constant functions.
Returns callable values as is.

View file

@ -371,6 +371,15 @@
githubId = 124545;
name = "Anthony Cowley";
};
acuteenvy = {
matrix = "@acuteenvy:matrix.org";
github = "acuteenvy";
githubId = 126529524;
name = "Lena";
keys = [{
fingerprint = "CE85 54F7 B9BC AC0D D648 5661 AB5F C04C 3C94 443F";
}];
};
adamcstephens = {
email = "happy.plan4249@valkor.net";
matrix = "@adam:valkor.net";
@ -446,6 +455,13 @@
githubId = 25236206;
name = "Adrian Dole";
};
adriangl = {
email = "adrian@lauterer.it";
matrix = "@adriangl:pvv.ntnu.no";
github = "adrlau";
githubId = 25004152;
name = "Adrian Gunnar Lauterer";
};
AdsonCicilioti = {
name = "Adson Cicilioti";
email = "adson.cicilioti@live.com";
@ -533,6 +549,12 @@
githubId = 732652;
name = "Andreas Herrmann";
};
ahoneybun = {
email = "aaron@system76.com";
github = "ahoneybun";
githubId = 4884946;
name = "Aaron Honeycutt";
};
ahrzb = {
email = "ahrzb5@gmail.com";
github = "ahrzb";
@ -1274,6 +1296,9 @@
github = "antonmosich";
githubId = 27223336;
name = "Anton Mosich";
keys = [ {
fingerprint = "F401 287C 324F 0A1C B321 657B 9B96 97B8 FB18 7D14";
} ];
};
antono = {
email = "self@antono.info";
@ -1383,6 +1408,12 @@
githubId = 59743220;
name = "Vinícius Müller";
};
arcuru = {
email = "patrick@jackson.dev";
github = "arcuru";
githubId = 160646;
name = "Patrick Jackson";
};
ardumont = {
email = "eniotna.t@gmail.com";
github = "ardumont";
@ -1395,6 +1426,12 @@
githubId = 58516559;
name = "Alexander Rezvov";
};
argrat = {
email = "n.bertazzo@protonmail.com";
github = "argrat";
githubId = 98821629;
name = "Nicolò Bertazzo";
};
arian-d = {
email = "arianxdehghani@gmail.com";
github = "arian-d";
@ -1762,12 +1799,6 @@
githubId = 1217745;
name = "Aldwin Vlasblom";
};
aveltras = {
email = "romain.viallard@outlook.fr";
github = "aveltras";
githubId = 790607;
name = "Romain Viallard";
};
averelld = {
email = "averell+nixos@rxd4.com";
github = "averelld";
@ -2608,12 +2639,6 @@
githubId = 200617;
name = "Ben Sima";
};
bstrik = {
email = "dutchman55@gmx.com";
github = "bstrik";
githubId = 7716744;
name = "Berno Strik";
};
btlvr = {
email = "btlvr@protonmail.com";
github = "btlvr";
@ -2766,6 +2791,12 @@
githubId = 7435854;
name = "Victor Calvert";
};
camelpunch = {
email = "me@andrewbruce.net";
github = "camelpunch";
githubId = 141733;
name = "Andrew Bruce";
};
cameronfyfe = {
email = "cameron.j.fyfe@gmail.com";
github = "cameronfyfe";
@ -3045,6 +3076,9 @@
email = "chayleaf-nix@pavluk.org";
github = "chayleaf";
githubId = 9590981;
keys = [{
fingerprint = "4314 3701 154D 9E5F 7051 7ECF 7817 1AD4 6227 E68E";
}];
matrix = "@chayleaf:matrix.pavluk.org";
name = "Anna Pavlyuk";
};
@ -3054,6 +3088,12 @@
githubId = 1689801;
name = "Mikhail Chekan";
};
chen = {
email = "i@cuichen.cc";
github = "cu1ch3n";
githubId = 80438676;
name = "Chen Cui";
};
ChengCat = {
email = "yu@cheng.cat";
github = "ChengCat";
@ -3667,6 +3707,12 @@
githubId = 1222362;
name = "Matías Lang";
};
criyle = {
email = "i+nixos@goj.ac";
name = "Yang Gao";
githubId = 6821729;
github = "criyle";
};
CRTified = {
email = "carl.schneider+nixos@rub.de";
matrix = "@schnecfk:ruhr-uni-bochum.de";
@ -3677,6 +3723,15 @@
fingerprint = "2017 E152 BB81 5C16 955C E612 45BC C1E2 709B 1788";
}];
};
Cryolitia = {
name = "Beiyan Cryolitia";
email = "Cryolitia@gmail.com";
github = "Cryolitia";
githubId = 23723294;
keys = [{
fingerprint = "1C3C 6547 538D 7152 310C 0EEA 84DD 0C01 30A5 4DF7";
}];
};
cryptix = {
email = "cryptix@riseup.net";
github = "cryptix";
@ -3880,12 +3935,25 @@
githubId = 50051176;
name = "Daniel Rolls";
};
danielsidhion = {
email = "nixpkgs@sidhion.com";
github = "DanielSidhion";
githubId = 160084;
name = "Daniel Sidhion";
};
daniyalsuri6 = {
email = "daniyal.suri@gmail.com";
github = "daniyalsuri6";
githubId = 107034852;
name = "Daniyal Suri";
};
dannixon = {
email = "dan@dan-nixon.com";
github = "DanNixon";
githubId = 4037377;
name = "Dan Nixon";
matrix = "@dannixon:matrix.org";
};
dansbandit = {
github = "dansbandit";
githubId = 4530687;
@ -4174,6 +4242,12 @@
githubId = 12224254;
name = "Delta";
};
delta231 = {
email = "swstkbaranwal@gmail.com";
github = "Delta456";
githubId = 28479139;
name = "Swastik Baranwal";
};
deltadelta = {
email = "contact@libellules.eu";
name = "Dara Ly";
@ -4192,6 +4266,12 @@
githubId = 5503422;
name = "Dmitriy Demin";
};
demine = {
email = "riches_tweaks0o@icloud.com";
github = "demine0";
githubId = 51992962;
name = "Nikita Demin";
};
demize = {
email = "johannes@kyriasis.com";
github = "kyrias";
@ -4510,6 +4590,12 @@
githubId = 1708810;
name = "Daniel Vianna";
};
dmytrokyrychuk = {
email = "dmytro@kyrych.uk";
github = "dmytrokyrychuk";
githubId = 699961;
name = "Dmytro Kyrychuk";
};
dnr = {
email = "dnr@dnr.im";
github = "dnr";
@ -5278,6 +5364,13 @@
fingerprint = "F178 B4B4 6165 6D1B 7C15 B55D 4029 3358 C7B9 326B";
}];
};
ericthemagician = {
email = "eric@ericyen.com";
matrix = "@eric:jupiterbroadcasting.com";
github = "EricTheMagician";
githubId = 323436;
name = "Eric Yen";
};
erikarvstedt = {
email = "erik.arvstedt@gmail.com";
matrix = "@erikarvstedt:matrix.org";
@ -5938,6 +6031,11 @@
githubId = 119691;
name = "Michael Gough";
};
franciscod = {
github = "franciscod";
githubId = 726447;
name = "Francisco Demartino";
};
franzmondlichtmann = {
name = "Franz Schroepf";
email = "franz-schroepf@t-online.de";
@ -6014,6 +6112,10 @@
github = "frogamic";
githubId = 10263813;
name = "Dominic Shelton";
matrix = "@frogamic:beeper.com";
keys = [{
fingerprint = "779A 7CA8 D51C C53A 9C51 43F7 AAE0 70F0 67EC 00A5";
}];
};
frontsideair = {
email = "photonia@gmail.com";
@ -6080,7 +6182,7 @@
};
fugi = {
email = "me@fugi.dev";
github = "FugiMuffi";
github = "fugidev";
githubId = 21362942;
name = "Fugi";
};
@ -6189,6 +6291,16 @@
githubId = 45048741;
name = "Alwanga Oyango";
};
galaxy = {
email = "galaxy@dmc.chat";
matrix = "@galaxy:mozilla.org";
name = "The Galaxy";
github = "ga1aksy";
githubId = 148551648;
keys = [{
fingerprint = "48CA 3873 9E9F CA8E 76A0 835A E3DE CF85 4212 E1EA";
}];
};
gal_bolle = {
email = "florent.becker@ens-lyon.org";
github = "FlorentBecker";
@ -6437,6 +6549,12 @@
githubId = 1713676;
name = "Luis G. Torres";
};
giomf = {
email = "giomf@mailbox.org";
github = "giomf";
githubId = 35076723;
name = "Guillaume Fournier";
};
giorgiga = {
email = "giorgio.gallo@bitnic.it";
github = "giorgiga";
@ -6621,6 +6739,12 @@
githubId = 4656860;
name = "Gaute Ravndal";
};
gray-heron = {
email = "ave+nix@cezar.info";
github = "gray-heron";
githubId = 7032646;
name = "Cezary Siwek";
};
graysonhead = {
email = "grayson@graysonhead.net";
github = "graysonhead";
@ -7218,6 +7342,7 @@
};
hubble = {
name = "Hubble the Wolverine";
email = "hubblethewolverine@gmail.com";
matrix = "@hubofeverything:bark.lgbt";
github = "the-furry-hubofeverything";
githubId = 53921912;
@ -7392,6 +7517,13 @@
githubId = 1550265;
name = "Dominic Steinitz";
};
iFreilicht = {
github = "iFreilicht";
githubId = 9742635;
matrix = "@ifreilicht:matrix.org";
email = "nixpkgs@mail.felix-uhl.de";
name = "Felix Uhl";
};
ifurther = {
github = "ifurther";
githubId = 55025025;
@ -7421,6 +7553,12 @@
githubId = 25505957;
name = "Ilian";
};
iliayar = {
email = "iliayar3@gmail.com";
github = "iliayar";
githubId = 17529355;
name = "Ilya Yaroshevskiy";
};
ilikeavocadoes = {
email = "ilikeavocadoes@hush.com";
github = "ilikeavocadoes";
@ -7589,6 +7727,12 @@
githubId = 88038050;
name = "Souvik Sen";
};
iogamaster = {
email = "iogamastercode+nixpkgs@gmail.com";
name = "IogaMaster";
github = "iogamaster";
githubId = 67164465;
};
ionutnechita = {
email = "ionut_n2001@yahoo.com";
github = "ionutnechita";
@ -7833,6 +7977,12 @@
githubId = 2212681;
name = "Jakub Grzgorz Sokołowski";
};
jakuzure = {
email = "shin@posteo.jp";
github = "jakuzure";
githubId = 11823547;
name = "jakuzure";
};
jali-clarke = {
email = "jinnah.ali-clarke@outlook.com";
name = "Jinnah Ali-Clarke";
@ -7893,6 +8043,12 @@
githubId = 488556;
name = "Javier Aguirre";
};
javimerino = {
email = "merino.jav@gmail.com";
name = "Javi Merino";
github = "JaviMerino";
githubId = 44926;
};
jayesh-bhoot = {
name = "Jayesh Bhoot";
email = "jb@jayeshbhoot.com";
@ -8152,6 +8308,15 @@
githubId = 18501;
name = "Julien Langlois";
};
jfly = {
name = "Jeremy Fleischman";
email = "jeremyfleischman@gmail.com";
github = "jfly";
githubId = 277474;
keys = [{
fingerprint = "F1F1 3395 8E8E 9CC4 D9FC 9647 1931 9CD8 416A 642B";
}];
};
jfrankenau = {
email = "johannes@frankenau.net";
github = "jfrankenau";
@ -8289,6 +8454,12 @@
githubId = 3081095;
name = "Jürgen Keck";
};
jl178 = {
email = "jeredlittle1996@gmail.com";
github = "jl178";
githubId = 72664723;
name = "Jered Little";
};
jlamur = {
email = "contact@juleslamur.fr";
github = "jlamur";
@ -9737,6 +9908,11 @@
}];
name = "Joseph LaFreniere";
};
lagoja = {
github = "Lagoja";
githubId =750845;
name = "John Lago";
};
laikq = {
email = "gwen@quasebarth.de";
github = "laikq";
@ -10388,6 +10564,12 @@
githubId = 2487922;
name = "Lars Jellema";
};
ludat = {
email = "lucas6246@gmail.com";
github = "ludat";
githubId = 4952044;
name = "Lucas David Traverso";
};
ludo = {
email = "ludo@gnu.org";
github = "civodul";
@ -10922,6 +11104,12 @@
githubId = 29855073;
name = "Michael Colicchia";
};
massimogengarelli = {
email = "massimo.gengarelli@gmail.com";
github = "massix";
githubId = 585424;
name = "Massimo Gengarelli";
};
matejc = {
email = "cotman.matej@gmail.com";
github = "matejc";
@ -11043,6 +11231,12 @@
githubId = 11810057;
name = "Matt Snider";
};
matusf = {
email = "matus.ferech@gmail.com";
github = "matusf";
githubId = 18228995;
name = "Matúš Ferech";
};
maurer = {
email = "matthew.r.maurer+nix@gmail.com";
github = "maurer";
@ -11554,6 +11748,12 @@
githubId = 34864484;
name = "Mikael Fangel";
};
mikecm = {
email = "mikecmcleod@gmail.com";
github = "MaxwellDupre";
githubId = 14096356;
name = "Michael McLeod";
};
mikefaille = {
email = "michael@faille.io";
github = "mikefaille";
@ -11666,6 +11866,13 @@
githubId = 149558;
name = "Merlin Gaillard";
};
mirkolenz = {
name = "Mirko Lenz";
email = "mirko@mirkolenz.com";
matrix = "@mlenz:matrix.org";
github = "mirkolenz";
githubId = 5160954;
};
mirrexagon = {
email = "mirrexagon@mirrexagon.com";
github = "mirrexagon";
@ -12033,12 +12240,30 @@
github = "MrTarantoga";
githubId = 53876219;
};
mrtnvgr = {
name = "Egor Martynov";
github = "mrtnvgr";
githubId = 48406064;
keys = [{
fingerprint = "6FAD DB43 D5A5 FE52 6835 0943 5B33 79E9 81EF 48B1";
}];
};
mrVanDalo = {
email = "contact@ingolf-wagner.de";
github = "mrVanDalo";
githubId = 839693;
name = "Ingolf Wanger";
};
msanft = {
email = "moritz.sanft@outlook.de";
matrix = "@msanft:matrix.org";
name = "Moritz Sanft";
github = "msanft";
githubId = 58110325;
keys = [{
fingerprint = "3CAC 1D21 3D97 88FF 149A E116 BB8B 30F5 A024 C31C";
}];
};
mschristiansen = {
email = "mikkel@rheosystems.com";
github = "mschristiansen";
@ -12269,6 +12494,11 @@
fingerprint = "9E6A 25F2 C1F2 9D76 ED00 1932 1261 173A 01E1 0298";
}];
};
nadir-ishiguro = {
github = "nadir-ishiguro";
githubId = 23151917;
name = "nadir-ishiguro";
};
nadrieril = {
email = "nadrieril@gmail.com";
github = "Nadrieril";
@ -12312,6 +12542,11 @@
githubId = 6709831;
name = "Jake Hill";
};
nasageek = {
github = "NasaGeek";
githubId = 474937;
name = "Chris Roberts";
};
nasirhm = {
email = "nasirhussainm14@gmail.com";
github = "nasirhm";
@ -12702,13 +12937,6 @@
fingerprint = "9B1A 7906 5D2F 2B80 6C8A 5A1C 7D2A CDAF 4653 CF28";
}];
};
ninjatrappeur = {
email = "felix@alternativebit.fr";
matrix = "@ninjatrappeur:matrix.org";
github = "NinjaTrappeur";
githubId = 1219785;
name = "Félix Baylac-Jacqué";
};
nintron = {
email = "nintron@sent.com";
github = "Nintron27";
@ -12748,6 +12976,11 @@
githubId = 66913205;
name = "Rick Sanchez";
};
nix-julia = {
name = "nix-julia";
github = "nix-julia";
githubId = 149073815;
};
nixy = {
email = "nixy@nixy.moe";
github = "nixy";
@ -13251,6 +13484,15 @@
githubId = 75299;
name = "Malcolm Matalka";
};
orhun = {
email = "orhunparmaksiz@gmail.com";
github = "orhun";
githubId = 24392180;
name = "Orhun Parmaksız";
keys = [{
fingerprint = "165E 0FF7 C48C 226E 1EC3 63A7 F834 2482 4B3E 4B90";
}];
};
orichter = {
email = "richter-oliver@gmx.net";
github = "ORichterSec";
@ -13495,12 +13737,6 @@
githubId = 6931743;
name = "pasqui23";
};
patricksjackson = {
email = "patrick@jackson.dev";
github = "patricksjackson";
githubId = 160646;
name = "Patrick Jackson";
};
patryk27 = {
email = "pwychowaniec@pm.me";
github = "Patryk27";
@ -13522,6 +13758,11 @@
githubId = 15645854;
name = "Brad Christensen";
};
paumr = {
github = "paumr";
name = "Michael Bergmeister";
githubId = 53442728;
};
paveloom = {
email = "paveloom@riseup.net";
github = "paveloom";
@ -13583,6 +13824,7 @@
pbsds = {
name = "Peder Bergebakken Sundt";
email = "pbsds@hotmail.com";
matrix = "@pederbs:pvv.ntnu.no";
github = "pbsds";
githubId = 140964;
};
@ -13634,6 +13876,12 @@
githubId = 152312;
name = "Periklis Tsirakidis";
};
perstark = {
email = "perstark.se@gmail.com";
github = "perstarkse";
githubId = 63069986;
name = "Per Stark";
};
petercommand = {
email = "petercommand@gmail.com";
github = "petercommand";
@ -13751,6 +13999,12 @@
githubId = 9267430;
name = "Philipp Mildenberger";
};
philiptaron = {
email = "philip.taron@gmail.com";
github = "philiptaron";
githubId = 43863;
name = "Philip Taron";
};
phip1611 = {
email = "phip1611@gmail.com";
github = "phip1611";
@ -13787,6 +14041,13 @@
githubId = 627831;
name = "Hoang Xuan Phu";
};
picnoir = {
email = "felix@alternativebit.fr";
matrix = "@picnoir:alternativebit.fr";
github = "picnoir";
githubId = 1219785;
name = "Félix Baylac-Jacqué";
};
piegames = {
name = "piegames";
email = "nix@piegames.de";
@ -13902,6 +14163,12 @@
githubId = 610615;
name = "Chih-Mao Chen";
};
pks = {
email = "ps@pks.im";
github = "pks-t";
githubId = 4056630;
name = "Patrick Steinhardt";
};
plabadens = {
name = "Pierre Labadens";
email = "labadens.pierre+nixpkgs@gmail.com";
@ -13938,12 +14205,25 @@
githubId = 7839004;
name = "Dmitriy Pleshevskiy";
};
pluiedev = {
email = "hi@pluie.me";
github = "pluiedev";
githubId = 22406910;
name = "Leah Amelia Chen";
};
plumps = {
email = "maks.bronsky@web.de";
github = "plumps";
githubId = 13000278;
name = "Maksim Bronsky";
};
plusgut = {
name = "Carlo Jeske";
email = "carlo.jeske+nixpkgs@webentwickler2-0.de";
github = "plusgut";
githubId = 277935;
matrix = "@plusgut5:matrix.org";
};
PlushBeaver = {
name = "Dmitry Kozlyuk";
email = "dmitry.kozliuk+nixpkgs@gmail.com";
@ -14436,7 +14716,7 @@
};
quantenzitrone = {
email = "quantenzitrone@protonmail.com";
github = "Quantenzitrone";
github = "quantenzitrone";
githubId = 74491719;
matrix = "@quantenzitrone:matrix.org";
name = "quantenzitrone";
@ -14630,6 +14910,12 @@
githubId = 145816;
name = "David McKay";
};
rayslash = {
email = "stevemathewjoy@tutanota.com";
github = "rayslash";
githubId = 45141270;
name = "Steve Mathew Joy";
};
razvan = {
email = "razvan.panda@gmail.com";
github = "freeman42x";
@ -14804,6 +15090,12 @@
githubId = 165283;
name = "Alexey Kutepov";
};
rexxDigital = {
email = "joellarssonpriv@gmail.com";
github = "rexxDigital";
githubId = 44014925;
name = "Rexx Larsson";
};
rgnns = {
email = "jglievano@gmail.com";
github = "rgnns";
@ -16042,6 +16334,12 @@
fingerprint = "AB63 4CD9 3322 BD42 6231 F764 C404 1EA6 B326 33DE";
}];
};
shivaraj-bh = {
email = "sbh69840@gmail.com";
name = "Shivaraj B H";
github = "shivaraj-bh";
githubId = 23645788;
};
shlevy = {
email = "shea@shealevy.com";
github = "shlevy";
@ -16260,11 +16558,10 @@
githubId = 158321;
name = "Stewart Mackenzie";
};
skeidel = {
email = "svenkeidel@gmail.com";
github = "svenkeidel";
githubId = 266500;
name = "Sven Keidel";
skovati = {
github = "skovati";
githubId = 49844593;
name = "skovati";
};
skykanin = {
github = "skykanin";
@ -16386,6 +16683,16 @@
github = "SnO2WMaN";
githubId = 15155608;
};
snowflake = {
email = "snowflake@pissmail.com";
name = "Snowflake";
github = "snf1k";
githubId = 149651684;
matrix = "@snowflake:mozilla.org";
keys = [{
fingerprint = "8223 7B6F 2FF4 8F16 B652 6CA3 934F 9E5F 9701 2C0B";
}];
};
snpschaaf = {
email = "philipe.schaaf@secunet.com";
name = "Philippe Schaaf";
@ -17017,6 +17324,12 @@
githubId = 7075751;
name = "Patrick Hilhorst";
};
sysedwinistrator = {
email = "edwin.mowen@gmail.com";
github = "sysedwinistrator";
githubId = 71331875;
name = "Edwin Mackenzie-Owen";
};
szczyp = {
email = "qb@szczyp.com";
github = "Szczyp";
@ -17142,6 +17455,12 @@
githubId = 1901799;
name = "Nathan van Doorn";
};
taranarmo = {
email = "taranarmo@gmail.com";
github = "taranarmo";
githubId = 11619234;
name = "Sergey Volkov";
};
tari = {
email = "peter@taricorp.net";
github = "tari";
@ -17784,6 +18103,12 @@
githubId = 858790;
name = "Tobias Mayer";
};
tochiaha = {
email = "tochiahan@proton.me";
github = "Tochiaha";
githubId = 74688871;
name = "Tochukwu Ahanonu";
};
tokudan = {
email = "git@danielfrank.net";
github = "tokudan";
@ -17829,6 +18154,10 @@
githubId = 13155277;
name = "Tom Houle";
};
tomkoid = {
email = "tomaszierl@outlook.com";
name = "Tomkoid";
};
tomodachi94 = {
email = "tomodachi94+nixpkgs@protonmail.com";
matrix = "@tomodachi94:matrix.org";
@ -17984,6 +18313,12 @@
githubId = 15064765;
name = "tshaynik";
};
tsowell = {
email = "tom@ldtlb.com";
github = "tsowell";
githubId = 4044033;
name = "Thomas Sowell";
};
ttuegel = {
email = "ttuegel@mailbox.org";
github = "ttuegel";
@ -18608,6 +18943,12 @@
githubId = 7038383;
name = "Vojta Káně";
};
volfyd = {
email = "lb.nix@lisbethmail.com";
github = "volfyd";
githubId = 3578382;
name = "Leif Huhn";
};
volhovm = {
email = "volhovm.cs@gmail.com";
github = "volhovm";
@ -18713,6 +19054,13 @@
fingerprint = "47F7 009E 3AE3 1DA7 988E 12E1 8C9B 0A8F C0C0 D862";
}];
};
wamirez = {
email = "wamirez@protonmail.com";
matrix = "@wamirez:matrix.org";
github = "wamirez";
githubId = 24505474;
name = "Daniel Ramirez";
};
wamserma = {
name = "Markus S. Wamser";
email = "github-dev@mail2013.wamser.eu";
@ -18828,6 +19176,12 @@
fingerprint = "640B EDDE 9734 310A BFA3 B257 52ED AE6A 3995 AFAB";
}];
};
whiteley = {
email = "mattwhiteley@gmail.com";
github = "whiteley";
githubId = 2215;
name = "Matt Whiteley";
};
WhittlesJr = {
email = "alex.joseph.whitt@gmail.com";
github = "WhittlesJr";
@ -18941,11 +19295,11 @@
githubId = 168610;
name = "Ricardo M. Correia";
};
wjlroe = {
email = "willroe@gmail.com";
github = "wjlroe";
githubId = 43315;
name = "William Roe";
wladmis = {
email = "dev@wladmis.org";
github = "wladmis";
githubId = 5000261;
name = "Wladmis";
};
wldhx = {
email = "wldhx+nixpkgs@wldhx.me";
@ -19125,11 +19479,11 @@
name = "Uli Baum";
};
xfix = {
email = "konrad@borowski.pw";
email = "kamila@borowska.pw";
matrix = "@xfix:matrix.org";
github = "xfix";
githubId = 1297598;
name = "Konrad Borowski";
name = "Kamila Borowska";
};
xfnw = {
email = "xfnw+nixos@riseup.net";
@ -19231,6 +19585,12 @@
github = "yanganto";
githubId = 10803111;
};
yannip = {
email = "yPapandreou7@gmail.com";
github = "YanniPapandreou";
githubId = 15948162;
name = "Yanni Papandreou";
};
yarny = {
github = "Yarny0";
githubId = 41838844;
@ -19275,6 +19635,13 @@
fingerprint = "FD0A C425 9EF5 4084 F99F 9B47 2ACC 9749 7C68 FAD4";
}];
};
YellowOnion = {
name = "Daniel Hill";
email = "daniel@gluo.nz";
github = "YellowOnion";
githubId = 364160;
matrix = "@woobilicious:matrix.org";
};
yesbox = {
email = "jesper.geertsen.jonsson@gmail.com";
github = "yesbox";
@ -19336,6 +19703,11 @@
github = "ymeister";
githubId = 47071325;
};
ymstnt = {
name = "YMSTNT";
github = "ymstnt";
githubId = 21342713;
};
yoavlavi = {
email = "yoav@yoavlavi.com";
github = "yoav-lavi";
@ -19415,6 +19787,12 @@
fingerprint = "85F8 E850 F8F2 F823 F934 535B EC50 6589 9AEA AF4C";
}];
};
yunfachi = {
email = "yunfachi@gmail.com";
github = "yunfachi";
githubId = 73419713;
name = "Yunfachi";
};
yureien = {
email = "contact@sohamsen.me";
github = "Yureien";
@ -19655,6 +20033,12 @@
github = "zmitchell";
githubId = 10246891;
};
znaniye = {
email = "zn4niye@proton.me";
github = "znaniye";
githubId = 134703788;
name = "Samuel Silva";
};
znewman01 = {
email = "znewman01@gmail.com";
github = "znewman01";

View file

@ -187,7 +187,7 @@ getBuildReports opt = runReq defaultHttpConfig do
getEvalBuilds :: HydraSlownessWorkaroundFlag -> Int -> Req (Seq Build)
getEvalBuilds NoHydraSlownessWorkaround id =
hydraJSONQuery (responseTimeout 900000000) ["eval", showT id, "builds"]
hydraJSONQuery mempty ["eval", showT id, "builds"]
getEvalBuilds HydraSlownessWorkaround id = do
Eval{builds} <- hydraJSONQuery mempty [ "eval", showT id ]
forM builds $ \buildId -> do
@ -195,14 +195,15 @@ getEvalBuilds HydraSlownessWorkaround id = do
hydraJSONQuery mempty [ "build", showT buildId ]
hydraQuery :: HttpResponse a => Proxy a -> Option 'Https -> [Text] -> Req (HttpResponseBody a)
hydraQuery responseType option query =
responseBody
<$> req
GET
(foldl' (/:) (https "hydra.nixos.org") query)
NoReqBody
responseType
(header "User-Agent" "hydra-report.hs/v1 (nixpkgs;maintainers/scripts/haskell) pls fix https://github.com/NixOS/nixos-org-configurations/issues/270" <> option)
hydraQuery responseType option query = do
let customHeaderOpt =
header
"User-Agent"
"hydra-report.hs/v1 (nixpkgs;maintainers/scripts/haskell) pls fix https://github.com/NixOS/nixos-org-configurations/issues/270"
customTimeoutOpt = responseTimeout 900_000_000 -- 15 minutes
opts = customHeaderOpt <> customTimeoutOpt <> option
url = foldl' (/:) (https "hydra.nixos.org") query
responseBody <$> req GET url NoReqBody responseType opts
hydraJSONQuery :: FromJSON a => Option 'Https -> [Text] -> Req a
hydraJSONQuery = hydraQuery jsonResponse

View file

@ -39,5 +39,5 @@ fi
package_list="$(nix-build -A haskell.package-list)/nixos-hackage-packages.csv"
username=$(grep "^username:" "$CABAL_DIR/config" | sed "s/^username: //")
password_command=$(grep "^password-command:" "$CABAL_DIR/config" | sed "s/^password-command: //")
curl -u "$username:$($password_command | head -n1)" --digest -H "Content-type: text/csv" -T "$package_list" http://hackage.haskell.org/distro/NixOS/packages.csv
curl -u "$username:$($password_command | head -n1)" --digest -H "Content-type: text/csv" -T "$package_list" https://hackage.haskell.org/distro/NixOS/packages.csv
echo

Some files were not shown because too many files have changed in this diff Show more