Project import generated by Copybara.

GitOrigin-RevId: 40f79f003b6377bd2f4ed4027dde1f8f922995dd
This commit is contained in:
Default email 2022-12-17 11:02:37 +01:00
parent 46c0e2713a
commit a66bca1520
2580 changed files with 65067 additions and 58608 deletions

View file

@ -78,6 +78,8 @@
/nixos/doc/manual/man-nixos-option.xml @nbp
/nixos/modules/installer/tools/nixos-option.sh @nbp
/nixos/modules/system @dasJ
/nixos/modules/system/activation/bootspec.nix @grahamc @cole-h @raitobezarius
/nixos/modules/system/activation/bootspec.cue @grahamc @cole-h @raitobezarius
# NixOS integration test driver
/nixos/lib/test-driver @tfc
@ -146,6 +148,11 @@
# Browsers
/pkgs/applications/networking/browsers/firefox @mweinelt
# Certificate Authorities
pkgs/data/misc/cacert/ @ajs124 @lukegb @mweinelt
pkgs/development/libraries/nss/ @ajs124 @lukegb @mweinelt
pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
# Jetbrains
/pkgs/applications/editors/jetbrains @edwtjo
@ -219,10 +226,10 @@
/pkgs/top-level/emacs-packages.nix @adisbladis
# Neovim
/pkgs/applications/editors/neovim @jonringer @teto
/pkgs/applications/editors/neovim @figsoda @jonringer @teto
# VimPlugins
/pkgs/applications/editors/vim/plugins @jonringer
/pkgs/applications/editors/vim/plugins @figsoda @jonringer
# VsCode Extensions
/pkgs/applications/editors/vscode/extensions @jonringer
@ -284,11 +291,8 @@
# Matrix
/pkgs/servers/heisenbridge @piegamesde
/pkgs/servers/matrix-conduit @piegamesde
/pkgs/servers/matrix-synapse/matrix-appservice-irc @piegamesde
/nixos/modules/services/misc/heisenbridge.nix @piegamesde
/nixos/modules/services/misc/matrix-appservice-irc.nix @piegamesde
/nixos/modules/services/misc/matrix-conduit.nix @piegamesde
/nixos/tests/matrix-appservice-irc.nix @piegamesde
/nixos/tests/matrix-conduit.nix @piegamesde
# Dotnet

View file

@ -2,6 +2,8 @@
,*
.*.swp
.*.swo
.\#*
\#*\#
.idea/
.vscode/
outputs/

View file

@ -1,3 +1,14 @@
ajs124 <git@ajs124.de> <ajs124@users.noreply.github.com>
Anderson Torres <torres.anderson.85@protonmail.com>
Daniel Løvbrøtte Olsen <me@dandellion.xyz> <daniel.olsen99@gmail.com>
Fabian Affolter <mail@fabian-affolter.ch> <fabian@affolter-engineering.ch>
Janne Heß <janne@hess.ooo> <dasJ@users.noreply.github.com>
Jörg Thalheim <joerg@thalheim.io> <Mic92@users.noreply.github.com>
Martin Weinelt <hexa@darmstadt.ccc.de> <mweinelt@users.noreply.github.com>
R. RyanTM <ryantm-bot@ryantm.com>
Sandro <sandro.jaeckel@gmail.com>
Robert Hensing <robert@roberthensing.nl> <roberth@users.noreply.github.com>
Sandro Jäckel <sandro.jaeckel@gmail.com>
Sandro Jäckel <sandro.jaeckel@gmail.com> <sandro.jaeckel@sap.com>
superherointj <5861043+superherointj@users.noreply.github.com>
Vladimír Čunát <v@cunat.cz> <vcunat@gmail.com>
Vladimír Čunát <v@cunat.cz> <vladimir.cunat@nic.cz>

View file

@ -53,6 +53,10 @@ In addition to writing properly formatted commit messages, it's important to inc
Package version upgrades usually allow for simpler commit messages, including attribute name, old and new version, as well as a reference to the relevant release notes/changelog. Every once in a while a package upgrade requires more extensive changes, and that subsequently warrants a more verbose message.
Pull requests should not be squash merged in order to keep complete commit messages and GPG signatures intact and must not be when the change doesn't make sense as a single commit.
This means that, when addressing review comments in order to keep the pull request in an always mergeable status, you will sometimes need to rewrite your branch's history and then force-push it with `git push --force-with-lease`.
Useful git commands that can help a lot with this are `git commit --patch --amend` and `git rebase --interactive @~3`. For more details consult the git man pages.
## Rebasing between branches (i.e. from master to staging)
From time to time, changes between branches must be rebased, for example, if the

View file

@ -14,7 +14,7 @@ For example, consider the following fetcher:
```nix
fetchurl {
url = "http://www.example.org/hello-1.0.tar.gz";
sha256 = "0v6r3wwnsk5pdjr188nip3pjgn1jrn5pc5ajpcfy6had6b3v4dwm";
hash = "sha256-lTeyxzJNQeMdu1IVdovNMtgn77jRIhSybLdMbTkf2Ww=";
};
```
@ -23,17 +23,17 @@ A common mistake is to update a fetchers URL, or a version parameter, without
```nix
fetchurl {
url = "http://www.example.org/hello-1.1.tar.gz";
sha256 = "0v6r3wwnsk5pdjr188nip3pjgn1jrn5pc5ajpcfy6had6b3v4dwm";
hash = "sha256-lTeyxzJNQeMdu1IVdovNMtgn77jRIhSybLdMbTkf2Ww=";
};
```
**This will reuse the old contents**.
Remember to invalidate the hash argument, in this case by setting the `sha256` attribute to an empty string.
Remember to invalidate the hash argument, in this case by setting the `hash` attribute to an empty string.
```nix
fetchurl {
url = "http://www.example.org/hello-1.1.tar.gz";
sha256 = "";
hash = "";
};
```
@ -42,14 +42,14 @@ Use the resulting error message to determine the correct hash.
```
error: hash mismatch in fixed-output derivation '/path/to/my.drv':
specified: sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=
got: sha256-RApQUm78dswhBLC/rfU9y0u6pSAzHceIJqgmetRD24E=
got: sha256-lTeyxzJNQeMdu1IVdovNMtgn77jRIhSybLdMbTkf2Ww=
```
A similar problem arises while testing changes to a fetcher's implementation. If the output of the derivation already exists in the Nix store, test failures can go undetected. The [`invalidateFetcherByDrvHash`](#tester-invalidateFetcherByDrvHash) function helps prevent reusing cached derivations.
## `fetchurl` and `fetchzip` {#fetchurl}
Two basic fetchers are `fetchurl` and `fetchzip`. Both of these have two required arguments, a URL and a hash. The hash is typically `sha256`, although many more hash algorithms are supported. Nixpkgs contributors are currently recommended to use `sha256`. This hash will be used by Nix to identify your source. A typical usage of `fetchurl` is provided below.
Two basic fetchers are `fetchurl` and `fetchzip`. Both of these have two required arguments, a URL and a hash. The hash is typically `hash`, although many more hash algorithms are supported. Nixpkgs contributors are currently recommended to use `hash`. This hash will be used by Nix to identify your source. A typical usage of `fetchurl` is provided below.
```nix
{ stdenv, fetchurl }:
@ -58,7 +58,7 @@ stdenv.mkDerivation {
name = "hello";
src = fetchurl {
url = "http://www.example.org/hello.tar.gz";
sha256 = "1111111111111111111111111111111111111111111111111111";
hash = "sha256-BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB=";
};
}
```
@ -76,18 +76,18 @@ The main difference between `fetchurl` and `fetchzip` is in how they store the c
- `includes`: Include only files matching these patterns (applies after the above arguments).
- `revert`: Revert the patch.
Note that because the checksum is computed after applying these effects, using or modifying these arguments will have no effect unless the `sha256` argument is changed as well.
Note that because the checksum is computed after applying these effects, using or modifying these arguments will have no effect unless the `hash` argument is changed as well.
Most other fetchers return a directory rather than a single file.
## `fetchsvn` {#fetchsvn}
Used with Subversion. Expects `url` to a Subversion directory, `rev`, and `sha256`.
Used with Subversion. Expects `url` to a Subversion directory, `rev`, and `hash`.
## `fetchgit` {#fetchgit}
Used with Git. Expects `url` to a Git repo, `rev`, and `sha256`. `rev` in this case can be full the git commit id (SHA1 hash) or a tag name like `refs/tags/v1.0`.
Used with Git. Expects `url` to a Git repo, `rev`, and `hash`. `rev` in this case can be full the git commit id (SHA1 hash) or a tag name like `refs/tags/v1.0`.
Additionally, the following optional arguments can be given: `fetchSubmodules = true` makes `fetchgit` also fetch the submodules of a repository. If `deepClone` is set to true, the entire repository is cloned as opposing to just creating a shallow clone. `deepClone = true` also implies `leaveDotGit = true` which means that the `.git` directory of the clone won't be removed after checkout.
@ -104,32 +104,32 @@ stdenv.mkDerivation {
"directory/to/be/included"
"another/directory"
];
sha256 = "0000000000000000000000000000000000000000000000000000";
hash = "sha256-AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=";
};
}
```
## `fetchfossil` {#fetchfossil}
Used with Fossil. Expects `url` to a Fossil archive, `rev`, and `sha256`.
Used with Fossil. Expects `url` to a Fossil archive, `rev`, and `hash`.
## `fetchcvs` {#fetchcvs}
Used with CVS. Expects `cvsRoot`, `tag`, and `sha256`.
Used with CVS. Expects `cvsRoot`, `tag`, and `hash`.
## `fetchhg` {#fetchhg}
Used with Mercurial. Expects `url`, `rev`, and `sha256`.
Used with Mercurial. Expects `url`, `rev`, and `hash`.
A number of fetcher functions wrap part of `fetchurl` and `fetchzip`. They are mainly convenience functions intended for commonly used destinations of source code in Nixpkgs. These wrapper fetchers are listed below.
## `fetchFromGitea` {#fetchfromgitea}
`fetchFromGitea` expects five arguments. `domain` is the gitea server name. `owner` is a string corresponding to the Gitea user or organization that controls this repository. `repo` corresponds to the name of the software repository. These are located at the top of every Gitea HTML page as `owner`/`repo`. `rev` corresponds to the Git commit hash or tag (e.g `v1.0`) that will be downloaded from Git. Finally, `sha256` corresponds to the hash of the extracted directory. Again, other hash algorithms are also available but `sha256` is currently preferred.
`fetchFromGitea` expects five arguments. `domain` is the gitea server name. `owner` is a string corresponding to the Gitea user or organization that controls this repository. `repo` corresponds to the name of the software repository. These are located at the top of every Gitea HTML page as `owner`/`repo`. `rev` corresponds to the Git commit hash or tag (e.g `v1.0`) that will be downloaded from Git. Finally, `hash` corresponds to the hash of the extracted directory. Again, other hash algorithms are also available but `hash` is currently preferred.
## `fetchFromGitHub` {#fetchfromgithub}
`fetchFromGitHub` expects four arguments. `owner` is a string corresponding to the GitHub user or organization that controls this repository. `repo` corresponds to the name of the software repository. These are located at the top of every GitHub HTML page as `owner`/`repo`. `rev` corresponds to the Git commit hash or tag (e.g `v1.0`) that will be downloaded from Git. Finally, `sha256` corresponds to the hash of the extracted directory. Again, other hash algorithms are also available, but `sha256` is currently preferred.
`fetchFromGitHub` expects four arguments. `owner` is a string corresponding to the GitHub user or organization that controls this repository. `repo` corresponds to the name of the software repository. These are located at the top of every GitHub HTML page as `owner`/`repo`. `rev` corresponds to the Git commit hash or tag (e.g `v1.0`) that will be downloaded from Git. Finally, `hash` corresponds to the hash of the extracted directory. Again, other hash algorithms are also available, but `hash` is currently preferred.
`fetchFromGitHub` uses `fetchzip` to download the source archive generated by GitHub for the specified revision. If `leaveDotGit`, `deepClone` or `fetchSubmodules` are set to `true`, `fetchFromGitHub` will use `fetchgit` instead. Refer to its section for documentation of these options.
@ -156,7 +156,7 @@ This is used with repo.or.cz repositories. The arguments expected are very simil
## `fetchFromSourcehut` {#fetchfromsourcehut}
This is used with sourcehut repositories. Similar to `fetchFromGitHub` above,
it expects `owner`, `repo`, `rev` and `sha256`, but don't forget the tilde (~)
it expects `owner`, `repo`, `rev` and `hash`, but don't forget the tilde (~)
in front of the username! Expected arguments also include `vc` ("git" (default)
or "hg"), `domain` and `fetchSubmodules`.

View file

@ -35,7 +35,7 @@ appimageTools.wrapType2 { # or wrapType1
name = "patchwork";
src = fetchurl {
url = "https://github.com/ssbc/patchwork/releases/download/v3.11.4/Patchwork-3.11.4-linux-x86_64.AppImage";
sha256 = "1blsprpkvm0ws9b96gb36f0rbf8f5jgmw4x6dsb1kswr4ysf591s";
hash = "sha256-OqTitCeZ6xmWbqYTXp8sDrmVgTNjPZNW0hzUPW++mq4=";
};
extraPkgs = pkgs: with pkgs; [ ];
}

View file

@ -62,6 +62,8 @@ The above example will build a Docker image `redis/latest` from the given base i
- `config` is used to specify the configuration of the containers that will be started off the built image in Docker. The available options are listed in the [Docker Image Specification v1.2.0](https://github.com/moby/moby/blob/master/image/spec/v1.2.md#image-json-field-descriptions).
- `architecture` is _optional_ and used to specify the image architecture, this is useful for multi-architecture builds that don't need cross compiling. If not specified it will default to `hostPlatform`.
- `diskSize` is used to specify the disk size of the VM used to build the image in megabytes. By default it's 1024 MiB.
- `buildVMMemorySize` is used to specify the memory size of the VM to build the image in megabytes. By default it's 512 MiB.
@ -141,6 +143,8 @@ Create a Docker image with many of the store paths being on their own layer to i
`config` _optional_
`architecture` is _optional_ and used to specify the image architecture, this is useful for multi-architecture builds that don't need cross compiling. If not specified it will default to `hostPlatform`.
: Run-time configuration of the container. A full list of the options are available at in the [Docker Image Specification v1.2.0](https://github.com/moby/moby/blob/master/image/spec/v1.2.md#image-json-field-descriptions).
*Default:* `{}`
@ -245,10 +249,10 @@ Its parameters are described in the example below:
pullImage {
imageName = "nixos/nix";
imageDigest =
"sha256:20d9485b25ecfd89204e843a962c1bd70e9cc6858d65d7f5fadc340246e2116b";
"sha256:473a2b527958665554806aea24d0131bacec46d23af09fef4598eeab331850fa";
finalImageName = "nix";
finalImageTag = "1.11";
sha256 = "0mqjy3zq2v6rrhizgb9nvhczl87lcfphq9601wcprdika2jz7qh8";
finalImageTag = "2.11.1";
sha256 = "sha256-qvhj+Hlmviz+KEBVmsyPIzTB3QlVAFzwAY1zDPIBGxc=";
os = "linux";
arch = "x86_64";
}

View file

@ -103,7 +103,7 @@ let
owner = "Someone";
repo = "AwesomeMod";
rev = "...";
sha256 = "...";
hash = "...";
};
# Path to be installed in the unpacked source (default: ".")
modRoot = "contents/under/this/path/will/be/installed";

View file

@ -43,11 +43,11 @@ packageOverrides = pkgs: {
name = "myplugin1-1.0";
srcFeature = fetchurl {
url = "http://…/features/myplugin1.jar";
sha256 = "123…";
hash = "sha256-123…";
};
srcPlugin = fetchurl {
url = "http://…/plugins/myplugin1.jar";
sha256 = "123…";
hash = "sha256-123…";
};
});
(plugins.buildEclipseUpdateSite {
@ -55,7 +55,7 @@ packageOverrides = pkgs: {
src = fetchurl {
stripRoot = false;
url = "http://…/myplugin2.zip";
sha256 = "123…";
hash = "sha256-123…";
};
});
];

View file

@ -12,7 +12,7 @@ The `wrapFirefox` function allows to pass policies, preferences and extensions t
(fetchFirefoxAddon {
name = "ublock"; # Has to be unique!
url = "https://addons.mozilla.org/firefox/downloads/file/3679754/ublock_origin-1.31.0-an+fx.xpi";
sha256 = "1h768ljlh3pi23l27qp961v1hd0nbj2vasgy11bmcrlqp40zgvnr";
hash = "sha256-2e73AbmYZlZXCP5ptYVcFjQYdjDp4iPoEPEOSCVF5sA=";
})
];

View file

@ -73,7 +73,7 @@ stdenv.mkDerivation {
name = "exemplary-weechat-script";
src = fetchurl {
url = "https://scripts.tld/your-scripts.tar.gz";
sha256 = "...";
hash = "...";
};
passthru.scripts = [ "foo.py" "bar.lua" ];
installPhase = ''

View file

@ -147,7 +147,7 @@ tests.fetchgit = testers.invalidateFetcherByDrvHash fetchgit {
name = "nix-source";
url = "https://github.com/NixOS/nix";
rev = "9d9dbe6ed05854e03811c361a3380e09183f4f4a";
sha256 = "sha256-7DszvbCNTjpzGRmpIVAWXk20P0/XTrWZ79KSOGLrUWY=";
hash = "sha256-7DszvbCNTjpzGRmpIVAWXk20P0/XTrWZ79KSOGLrUWY=";
};
```

View file

@ -426,9 +426,10 @@ In the file `pkgs/top-level/all-packages.nix` you can find fetch helpers, these
```nix
src = fetchgit {
url = "git@github.com:NixOS/nix.git"
url = "git://github.com/NixOS/nix.git";
rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae";
sha256 = "1cw5fszffl5pkpa6s6wjnkiv6lm5k618s32sp60kvmvpy7a2v9kg";
hash = "sha256-7D4m+saJjbSFP5hOwpQq2FGR2rr+psQMTcyb1ZvtXsQ=";
}
```
@ -438,7 +439,7 @@ In the file `pkgs/top-level/all-packages.nix` you can find fetch helpers, these
src = fetchgit {
url = "https://github.com/NixOS/nix.git";
rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae";
sha256 = "1cw5fszffl5pkpa6s6wjnkiv6lm5k618s32sp60kvmvpy7a2v9kg";
hash = "sha256-7D4m+saJjbSFP5hOwpQq2FGR2rr+psQMTcyb1ZvtXsQ=";
}
```
@ -449,14 +450,14 @@ In the file `pkgs/top-level/all-packages.nix` you can find fetch helpers, these
owner = "NixOS";
repo = "nix";
rev = "1f795f9f44607cc5bec70d1300150bfefcef2aae";
sha256 = "1i2yxndxb6yc9l6c99pypbd92lfq5aac4klq7y2v93c9qvx2cgpc";
hash = "ha256-7D4m+saJjbSFP5hOwpQq2FGR2rr+psQMTcyb1ZvtXsQ=;
}
```
When fetching from GitHub, commits must always be referenced by their full commit hash. This is because GitHub shares commit hashes among all forks and returns `404 Not Found` when a short commit hash is ambiguous. It already happens for some short, 6-character commit hashes in `nixpkgs`.
It is a practical vector for a denial-of-service attack by pushing large amounts of auto generated commits into forks and was already [demonstrated against GitHub Actions Beta](https://blog.teddykatz.com/2019/11/12/github-actions-dos.html).
Find the value to put as `sha256` by running `nix-shell -p nix-prefetch-github --run "nix-prefetch-github --rev 1f795f9f44607cc5bec70d1300150bfefcef2aae NixOS nix"`.
Find the value to put as `hash` by running `nix-shell -p nix-prefetch-github --run "nix-prefetch-github --rev 1f795f9f44607cc5bec70d1300150bfefcef2aae NixOS nix"`.
## Obtaining source hash {#sec-source-hashes}
@ -486,12 +487,12 @@ Preferred source hash type is sha256. There are several ways to get it.
- `lib.fakeHash`
- `lib.fakeSha256`
- `lib.fakeSha512`
in the package expression, attempt build and extract correct hash from error messages.
::: {.warning}
You must use one of these four fake hashes and not some arbitrarily-chosen hash.
See [](#sec-source-hashes-security).
:::
@ -519,7 +520,7 @@ patches = [
(fetchpatch {
name = "fix-check-for-using-shared-freetype-lib.patch";
url = "http://git.ghostscript.com/?p=ghostpdl.git;a=patch;h=8f5d285";
sha256 = "1f0k043rng7f0rfl9hhb89qzvvksqmkrikmm38p61yfx51l325xr";
hash = "sha256-uRcxaCjd+WAuGrXOmGfFeu79cUILwkRdBu48mwcBE7g=";
})
];
```
@ -669,3 +670,18 @@ stdenv.mkDerivation {
...
}
```
### Import From Derivation {#ssec-import-from-derivation}
Import From Derivation (IFD) is disallowed in Nixpkgs for performance reasons:
[Hydra] evaluates the entire package set, and sequential builds during evaluation would increase evaluation times to become impractical.
[Hydra]: https://github.com/NixOS/hydra
Import From Derivation can be worked around in some cases by committing generated intermediate files to version control and reading those instead.
<!-- TODO: remove the following and link to Nix manual once https://github.com/NixOS/nix/pull/7332 is merged -->
See also [NixOS Wiki: Import From Derivation].
[NixOS Wiki: Import From Derivation]: https://nixos.wiki/wiki/Import_From_Derivation

View file

@ -76,7 +76,7 @@ Security fixes are submitted in the same way as other changes and thus the same
(fetchpatch {
name = "CVE-2019-11068.patch";
url = "https://gitlab.gnome.org/GNOME/libxslt/commit/e03553605b45c88f0b4b2980adfbbb8f6fca2fd6.patch";
sha256 = "0pkpb4837km15zgg6h57bncp66d5lwrlvkr73h0lanywq7zrwhj8";
hash = "sha256-SEKe/8HcW0UBHCfPTTOnpRlzmV2nQPPeL6HOMxBZd14=";
})
```

View file

@ -17,6 +17,8 @@ with pkgs; stdenv.mkDerivation {
mkdir -p $out
ln -s ${locationsXml} $out/locations.xml
docgen asserts 'Assert functions'
docgen attrsets 'Attribute-set functions'
docgen strings 'String manipulation functions'
docgen trivial 'Miscellaneous functions'
docgen lists 'List manipulation functions'

View file

@ -2,11 +2,14 @@
<xsl:stylesheet
xmlns:xsl="http://www.w3.org/1999/XSL/Transform"
version="1.0">
<xsl:param name="section.autolabel" select="1" />
<xsl:param name="section.label.includes.component.label" select="1" />
<xsl:param name="chapter.autolabel" select="0" />
<xsl:param name="part.autolabel" select="0" />
<xsl:param name="preface.autolabel" select="0" />
<xsl:param name="reference.autolabel" select="0" />
<xsl:param name="section.autolabel" select="0" />
<xsl:param name="html.stylesheet" select="'style.css overrides.css highlightjs/mono-blue.css'" />
<xsl:param name="html.script" select="'./highlightjs/highlight.pack.js ./highlightjs/loader.js'" />
<xsl:param name="xref.with.number.and.title" select="1" />
<xsl:param name="xref.with.number.and.title" select="0" />
<xsl:param name="use.id.as.filename" select="1" />
<xsl:param name="toc.section.depth" select="0" />
<xsl:param name="admon.style" select="''" />

View file

@ -8,14 +8,14 @@
Nixpkgs provides a standard library at <varname>pkgs.lib</varname>, or through <code>import &lt;nixpkgs/lib&gt;</code>.
</para>
<xi:include href="./library/asserts.xml" />
<xi:include href="./library/attrsets.xml" />
<!-- These docs are generated via nixdoc. To add another generated
library function file to this list, the file
`lib-function-docs.nix` must also be updated. -->
<xi:include href="./library/generated/asserts.xml" />
<xi:include href="./library/generated/attrsets.xml" />
<xi:include href="./library/generated/strings.xml" />
<xi:include href="./library/generated/trivial.xml" />

View file

View file

@ -1,112 +0,0 @@
<section xmlns="http://docbook.org/ns/docbook"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns:xi="http://www.w3.org/2001/XInclude"
xml:id="sec-functions-library-asserts">
<title>Assert functions</title>
<section xml:id="function-library-lib.asserts.assertMsg">
<title><function>lib.asserts.assertMsg</function></title>
<subtitle><literal>assertMsg :: Bool -> String -> Bool</literal>
</subtitle>
<xi:include href="./locations.xml" xpointer="lib.asserts.assertMsg" />
<para>
Print a trace message if <literal>pred</literal> is false.
</para>
<para>
Intended to be used to augment asserts with helpful error messages.
</para>
<variablelist>
<varlistentry>
<term>
<varname>pred</varname>
</term>
<listitem>
<para>
Condition under which the <varname>msg</varname> should <emphasis>not</emphasis> be printed.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>msg</varname>
</term>
<listitem>
<para>
Message to print.
</para>
</listitem>
</varlistentry>
</variablelist>
<example xml:id="function-library-lib.asserts.assertMsg-example-false">
<title>Printing when the predicate is false</title>
<programlisting><![CDATA[
assert lib.asserts.assertMsg ("foo" == "bar") "foo is not bar, silly"
stderr> trace: foo is not bar, silly
stderr> assert failed
]]></programlisting>
</example>
</section>
<section xml:id="function-library-lib.asserts.assertOneOf">
<title><function>lib.asserts.assertOneOf</function></title>
<subtitle><literal>assertOneOf :: String -> String ->
StringList -> Bool</literal>
</subtitle>
<xi:include href="./locations.xml" xpointer="lib.asserts.assertOneOf" />
<para>
Specialized <function>asserts.assertMsg</function> for checking if <varname>val</varname> is one of the elements of <varname>xs</varname>. Useful for checking enums.
</para>
<variablelist>
<varlistentry>
<term>
<varname>name</varname>
</term>
<listitem>
<para>
The name of the variable the user entered <varname>val</varname> into, for inclusion in the error message.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>val</varname>
</term>
<listitem>
<para>
The value of what the user provided, to be compared against the values in <varname>xs</varname>.
</para>
</listitem>
</varlistentry>
<varlistentry>
<term>
<varname>xs</varname>
</term>
<listitem>
<para>
The list of valid values.
</para>
</listitem>
</varlistentry>
</variablelist>
<example xml:id="function-library-lib.asserts.assertOneOf-example">
<title>Ensuring a user provided a possible value</title>
<programlisting><![CDATA[
let sslLibrary = "bearssl";
in lib.asserts.assertOneOf "sslLibrary" sslLibrary [ "openssl" "libressl" ];
=> false
stderr> trace: sslLibrary must be one of "openssl", "libressl", but is: "bearssl"
]]></programlisting>
</example>
</section>
</section>

File diff suppressed because it is too large Load diff

View file

@ -52,7 +52,7 @@ agda.withPackages (p: [
repo = "agda-stdlib";
owner = "agda";
rev = "v1.5";
sha256 = "16fcb7ssj6kj687a042afaa2gq48rc8abihpm14k684ncihb2k4w";
hash = "sha256-nEyxYGSWIDNJqBfGpRDLiOAnlHJKEKAOMnIaqfVZzJk=";
};
}))
])
@ -83,7 +83,7 @@ agda.withPackages (p: [
owner = "owner";
version = "...";
rev = "...";
sha256 = "...";
hash = "...";
};
})
])

View file

@ -115,7 +115,7 @@ If there are git depencencies.
owner = "elixir-libraries";
repo = "prometheus.ex";
rev = "a4e9beb3c1c479d14b352fd9d6dd7b1f6d7deee5";
sha256 = "1v0q4bi7sb253i8q016l7gwlv5562wk5zy3l2sa446csvsacnpjk";
hash = "sha256-U17LlN6aGUKUFnT4XyYXppRN+TvUBIBRHEUsfeIiGOw=";
};
# you can re-use the same beamDeps argument as generated
beamDeps = with final; [ prometheus ];
@ -124,7 +124,7 @@ If there are git depencencies.
};
```
You will need to run the build process once to fix the sha256 to correspond to your new git src.
You will need to run the build process once to fix the hash to correspond to your new git src.
###### FOD {#fixed-output-derivation}
@ -138,13 +138,13 @@ Practical steps
mixFodDeps = fetchMixDeps {
pname = "mix-deps-${pname}";
inherit src version;
sha256 = lib.fakeSha256;
hash = lib.fakeHash;
};
```
The first build will complain about the sha256 value, you can replace with the suggested value after that.
The first build will complain about the hash value, you can replace with the suggested value after that.
Note that if after you've replaced the value, nix suggests another sha256, then mix is not fetching the dependencies reproducibly. An FOD will not work in that case and you will have to use mix2nix.
Note that if after you've replaced the value, nix suggests another hash, then mix is not fetching the dependencies reproducibly. An FOD will not work in that case and you will have to use mix2nix.
##### mixRelease - example {#mix-release-example}
@ -170,7 +170,7 @@ let
pname = "mix-deps-${pname}";
inherit src version;
# nix will complain and tell you the right value to replace this with
sha256 = lib.fakeSha256;
hash = lib.fakeHash;
# if you have build time environment variables add them here
MY_ENV_VAR="my_value";
};
@ -282,7 +282,7 @@ mkShell {
### Using an overlay
If you need to use an overlay to change some attributes of a derivation, e.g. if you need a bugfix from a version that is not yet available in nixpkgs, you can override attributes such as `version` (and the corresponding `sha256`) and then use this overlay in your development environment:
If you need to use an overlay to change some attributes of a derivation, e.g. if you need a bugfix from a version that is not yet available in nixpkgs, you can override attributes such as `version` (and the corresponding `hash`) and then use this overlay in your development environment:
#### `shell.nix`
@ -291,7 +291,7 @@ let
elixir_1_13_1_overlay = (self: super: {
elixir_1_13 = super.elixir_1_13.override {
version = "1.13.1";
sha256 = "0z0b1w2vvw4vsnb99779c2jgn9bgslg7b1pmd9vlbv02nza9qj5p";
sha256 = "sha256-t0ic1LcC7EV3avWGdR7VbyX7pGDpnJSW1ZvwvQUPC3w=";
};
});
pkgs = import <nixpkgs> { overlays = [ elixir_1_13_1_overlay ]; };

View file

@ -24,7 +24,7 @@ The recommended way of defining a derivation for a Coq library, is to use the `c
* if it is a string of the form `"#N"`, and the domain is github, then it tries to download the current head of the pull request `#N` from github,
* `defaultVersion` (optional). Coq libraries may be compatible with some specific versions of Coq only. The `defaultVersion` attribute is used when no `version` is provided (or if `version = null`) to select the version of the library to use by default, depending on the context. This selection will mainly depend on a `coq` version number but also possibly on other packages versions (e.g. `mathcomp`). If its value ends up to be `null`, the package is marked for removal in end-user `coqPackages` attribute set.
* `release` (optional, defaults to `{}`), lists all the known releases of the library and for each of them provides an attribute set with at least a `sha256` attribute (you may put the empty string `""` in order to automatically insert a fake sha256, this will trigger an error which will allow you to find the correct sha256), each attribute set of the list of releases also takes optional overloading arguments for the fetcher as below (i.e.`domain`, `owner`, `repo`, `rev` assuming the default fetcher is used) and optional overrides for the result of the fetcher (i.e. `version` and `src`).
* `fetcher` (optional, defaults to a generic fetching mechanism supporting github or gitlab based infrastructures), is a function that takes at least an `owner`, a `repo`, a `rev`, and a `sha256` and returns an attribute set with a `version` and `src`.
* `fetcher` (optional, defaults to a generic fetching mechanism supporting github or gitlab based infrastructures), is a function that takes at least an `owner`, a `repo`, a `rev`, and a `hash` and returns an attribute set with a `version` and `src`.
* `repo` (optional, defaults to the value of `pname`),
* `owner` (optional, defaults to `"coq-community"`).
* `domain` (optional, defaults to `"github.com"`), domains including the strings `"github"` or `"gitlab"` in their names are automatically supported, otherwise, one must change the `fetcher` argument to support them (cf `pkgs/development/coq-modules/heq/default.nix` for an example),

View file

@ -27,7 +27,7 @@ crystal.buildCrystalPackage rec {
owner = "mint-lang";
repo = "mint";
rev = version;
sha256 = "0vxbx38c390rd2ysvbwgh89v2232sh5rbsp3nk9wzb70jybpslvl";
hash = "sha256-dFN9l5fgrM/TtOPqlQvUYgixE4KPr629aBmkwdDoq28=";
};
# Insert the path to your shards.nix file here
@ -62,7 +62,7 @@ crystal.buildCrystalPackage rec {
owner = "mint-lang";
repo = "mint";
rev = version;
sha256 = "0vxbx38c390rd2ysvbwgh89v2232sh5rbsp3nk9wzb70jybpslvl";
hash = "sha256-dFN9l5fgrM/TtOPqlQvUYgixE4KPr629aBmkwdDoq28=";
};
shardsFile = ./shards.nix;

View file

@ -32,3 +32,22 @@ mypkg = let
}});
in callPackage { inherit cudaPackages; };
```
The CUDA NVCC compiler requires flags to determine which hardware you
want to target for in terms of SASS (real hardware) or PTX (JIT kernels).
Nixpkgs tries to target support real architecture defaults based on the
CUDA toolkit version with PTX support for future hardware. Experienced
users may optmize this configuration for a variety of reasons such as
reducing binary size and compile time, supporting legacy hardware, or
optimizing for specific hardware.
You may provide capabilities to add support or reduce binary size through
`config` using `cudaCapabilities = [ "6.0" "7.0" ];` and
`cudaForwardCompat = true;` if you want PTX support for future hardware.
Please consult [GPUs supported](https://en.wikipedia.org/wiki/CUDA#GPUs_supported)
for your specific card(s).
Library maintainers should consult [NVCC Docs](https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/)
and release notes for their software package.

View file

@ -0,0 +1,93 @@
# Cue (Cuelang) {#cuelang}
[Cuelang](https://cuelang.org/) is a language to:
- describe schemas and validate backward-compatibility
- generate code and schemas in various formats (e.g. JSON Schema, OpenAPI)
- do configuration akin to [Dhall Lang](https://dhall-lang.org/)
- perform data validation
## Cuelang schema quick start
Cuelang schemas are similar to JSON, here is a quick cheatsheet:
- Default types includes: `null`, `string`, `bool`, `bytes`, `number`, `int`, `float`, lists as `[...T]` where `T` is a type.
- All structures, defined by: `myStructName: { <fields> }` are **open** -- they accept fields which are not specified.
- Closed structures can be built by doing `myStructName: close({ <fields> })` -- they are strict in what they accept.
- `#X` are **definitions**, referenced definitions are **recursively closed**, i.e. all its children structures are **closed**.
- `&` operator is the [unification operator](https://cuelang.org/docs/references/spec/#unification) (similar to a type-level merging operator), `|` is the [disjunction operator](https://cuelang.org/docs/references/spec/#disjunction) (similar to a type-level union operator).
- Values **are** types, i.e. `myStruct: { a: 3 }` is a valid type definition that only allows `3` as value.
- Read <https://cuelang.org/docs/concepts/logic/> to learn more about the semantics.
- Read <https://cuelang.org/docs/references/spec/> to learn about the language specification.
## `writeCueValidator`
Nixpkgs provides a `pkgs.writeCueValidator` helper, which will write a validation script based on the provided Cuelang schema.
Here is an example:
```
pkgs.writeCueValidator
(pkgs.writeText "schema.cue" ''
#Def1: {
field1: string
}
'')
{ document = "#Def1"; }
```
- The first parameter is the Cue schema file.
- The second paramter is an options parameter, currently, only: `document` can be passed.
`document` : match your input data against this fragment of structure or definition, e.g. you may use the same schema file but differents documents based on the data you are validating.
Another example, given the following `validator.nix` :
```
{ pkgs ? import <nixpkgs> {} }:
let
genericValidator = version:
pkgs.writeCueValidator
(pkgs.writeText "schema.cue" ''
#Version1: {
field1: string
}
#Version2: #Version1 & {
field1: "unused"
}''
)
{ document = "#Version${toString version}"; };
in
{
validateV1 = genericValidator 1;
validateV2 = genericValidator 2;
}
```
The result is a script that will validate the file you pass as the first argument against the schema you provided `writeCueValidator`.
It can be any format that `cue vet` supports, i.e. YAML or JSON for example.
Here is an example, named `example.json`, given the following JSON:
```
{ "field1": "abc" }
```
You can run the result script (named `validate`) as the following:
```console
$ nix-build validator.nix
$ ./result example.json
$ ./result-2 example.json
field1: conflicting values "unused" and "abc":
./example.json:1:13
../../../../../../nix/store/v64dzx3vr3glpk0cq4hzmh450lrwh6sg-schema.cue:5:11
$ sed -i 's/"abc"/3/' example.json
$ ./result example.json
field1: conflicting values 3 and string (mismatched types int and string):
./example.json:1:13
../../../../../../nix/store/v64dzx3vr3glpk0cq4hzmh450lrwh6sg-schema.cue:5:11
```
**Known limitations**
* The script will enforce **concrete** values and will not accept lossy transformations (strictness). You can add these options if you need them.

View file

@ -91,7 +91,7 @@ buildDhallPackage {
let
nixpkgs = builtins.fetchTarball {
url = "https://github.com/NixOS/nixpkgs/archive/94b2848559b12a8ed1fe433084686b2a81123c99.tar.gz";
sha256 = "1pbl4c2dsaz2lximgd31m96jwbps6apn3anx8cvvhk1gl9rkg107";
sha256 = "sha256-B4Q3c6IvTLg3Q92qYa8y+i4uTaphtFdjp+Ir3QQjdN0=";
};
dhallOverlay = self: super: {
@ -295,7 +295,7 @@ terms of `buildDhallPackage` that accepts the following arguments:
* `document`: Set to `true` to generate documentation for the package
Additionally, `buildDhallGitHubPackage` accepts the same arguments as
`fetchFromGitHub`, such as `sha256` or `fetchSubmodules`.
`fetchFromGitHub`, such as `hash` or `fetchSubmodules`.
## `dhall-to-nixpkgs` {#ssec-dhall-dhall-to-nixpkgs}
@ -316,7 +316,7 @@ $ dhall-to-nixpkgs github https://github.com/Gabriel439/dhall-semver.git
repo = "dhall-semver";
rev = "2d44ae605302ce5dc6c657a1216887fbb96392a4";
fetchSubmodules = false;
sha256 = "0y8shvp8srzbjjpmnsvz9c12ciihnx1szs0yzyi9ashmrjvd0jcz";
hash = "sha256-n0nQtswVapWi/x7or0O3MEYmAkt/a1uvlOtnje6GGnk=";
directory = "";
file = "package.dhall";
source = false;

View file

@ -121,7 +121,7 @@ This `xmlmirror` example features a emscriptenPackage which is defined completel
src = pkgs.fetchgit {
url = "https://gitlab.com/odfplugfest/xmlmirror.git";
rev = "4fd7e86f7c9526b8f4c1733e5c8b45175860a8fd";
sha256 = "1jasdqnbdnb83wbcnyrp32f36w3xwhwp0wq8lwwmhqagxrij1r4b";
hash = "sha256-i+QgY+5PYVg5pwhzcDnkfXAznBg3e8sWH2jZtixuWsk=";
};
configurePhase = ''

View file

@ -11,7 +11,13 @@ The function `buildGoModule` builds Go programs managed with Go modules. It buil
In the following is an example expression using `buildGoModule`, the following arguments are of special significance to the function:
- `vendorHash`: is the hash of the output of the intermediate fetcher derivation. `vendorHash` can also take `null` as an input. When `null` is used as a value, rather than fetching the dependencies and vendoring them, we use the vendoring included within the source repo. If you'd like to not have to update this field on dependency changes, run `go mod vendor` in your source repo and set `vendorHash = null;`
- `vendorHash`: is the hash of the output of the intermediate fetcher derivation.
`vendorHash` can also be set to `null`.
In that case, rather than fetching the dependencies and vendoring them, the dependencies vendored in the source repo will be used.
To avoid updating this field when dependencies change, run `go mod vendor` in your source repo and set `vendorHash = null;`
To obtain the actual hash, set `vendorHash = lib.fakeSha256;` and run the build ([more details here](#sec-source-hashes)).
- `proxyVendor`: Fetches (go mod download) and proxies the vendor directory. This is useful if your code depends on c code and go mod tidy does not include the needed sources to build or if any dependency has case-insensitive conflicts which will produce platform dependant `vendorHash` checksums.
```nix
@ -23,7 +29,7 @@ pet = buildGoModule rec {
owner = "knqyf263";
repo = "pet";
rev = "v${version}";
sha256 = "0m2fzpqxk7hrbxsgqplkg7h2p7gv6s1miymv3gvw0cz039skag0s";
hash = "sha256-Gjw1dRrgM8D3G7v6WIM2+50r4HmTXvx0Xxme2fH9TlQ=";
};
vendorHash = "sha256-ciBIR+a1oaYH+H1PcC8cD8ncfJczk1IiJ8iYNM+R6aA=";
@ -59,7 +65,7 @@ deis = buildGoPackage rec {
owner = "deis";
repo = "deis";
rev = "v${version}";
sha256 = "1qv9lxqx7m18029lj8cw3k7jngvxs4iciwrypdy0gd2nnghc68sw";
hash = "sha256-XCPD4LNWtAd8uz7zyCLRfT8rzxycIUmTACjU03GnaeM=";
};
goDeps = ./deps.nix;
@ -76,11 +82,11 @@ The `goDeps` attribute can be imported from a separate `nix` file that defines w
goPackagePath = "gopkg.in/yaml.v2";
fetch = {
# `fetch type` that needs to be used to get package source.
# If `git` is used there should be `url`, `rev` and `sha256` defined next to it.
# If `git` is used there should be `url`, `rev` and `hash` defined next to it.
type = "git";
url = "https://gopkg.in/yaml.v2";
rev = "a83829b6f1293c91addabc89d0571c246397bbf4";
sha256 = "1m4dsmk90sbi17571h6pld44zxz7jc4lrnl4f27dpd1l8g5xvjhh";
hash = "sha256-EMrdy0M0tNuOcITaTAmT5/dPSKPXwHDKCXFpkGbVjdQ=";
};
}
{
@ -89,7 +95,7 @@ The `goDeps` attribute can be imported from a separate `nix` file that defines w
type = "git";
url = "https://github.com/docopt/docopt-go";
rev = "784ddc588536785e7299f7272f39101f7faccc3f";
sha256 = "0wwz48jl9fvl1iknvn9dqr4gfy1qs03gxaikrxxp9gry6773v3sj";
hash = "sha256-Uo89zjE+v3R7zzOq/gbQOHj3SMYt2W1nDHS7RCUin3M=";
};
}
]

View file

@ -90,7 +90,7 @@ build-idris-package {
owner = "Heather";
repo = "Idris.Yaml";
rev = "5afa51ffc839844862b8316faba3bafa15656db4";
sha256 = "1g4pi0swmg214kndj85hj50ccmckni7piprsxfdzdfhg87s0avw7";
hash = "sha256-h28F9EEPuvab6zrfeE+0k1XGQJGwINnsJEG8yjWIl7w=";
};
meta = with lib; {

View file

@ -13,6 +13,7 @@
<xi:include href="coq.section.xml" />
<xi:include href="crystal.section.xml" />
<xi:include href="cuda.section.xml" />
<xi:include href="cuelang.section.xml" />
<xi:include href="dhall.section.xml" />
<xi:include href="dotnet.section.xml" />
<xi:include href="emscripten.section.xml" />

View file

@ -242,7 +242,7 @@ If the downloaded files contain the `package.json` and `yarn.lock` files they ca
```nix
offlineCache = fetchYarnDeps {
yarnLock = src + "/yarn.lock";
sha256 = "....";
hash = "....";
};
```
@ -330,13 +330,16 @@ mkYarnPackage rec {
- The `echo 9` steps comes from this answer: <https://stackoverflow.com/a/49139496>
- Exporting the headers in `npm_config_nodedir` comes from this issue: <https://github.com/nodejs/node-gyp/issues/1191#issuecomment-301243919>
## Outside of nixpkgs {#javascript-outside-nixpkgs}
## Outside Nixpkgs {#javascript-outside-nixpkgs}
There are some other options available that can't be used inside nixpkgs. Those other options are written in Nix. Importing them in nixpkgs will require moving the source code into nixpkgs. Using [Import From Derivation](https://nixos.wiki/wiki/Import_From_Derivation) is not allowed in Hydra at present. If you are packaging something outside nixpkgs, those can be considered
There are some other tools available, which are written in the Nix language.
These that can't be used inside Nixpkgs because they require [Import From Derivation](#ssec-import-from-derivation), which is not allowed in Nixpkgs.
If you are packaging something outside Nixpkgs, consider the following:
### npmlock2nix {#javascript-npmlock2nix}
[npmlock2nix](https://github.com/nix-community/npmlock2nix) aims at building node_modules without code generation. It hasn't reached v1 yet, the API might be subject to change.
[npmlock2nix](https://github.com/nix-community/npmlock2nix) aims at building `node_modules` without code generation. It hasn't reached v1 yet, the API might be subject to change.
#### Pitfalls {#javascript-npmlock2nix-pitfalls}
@ -344,7 +347,7 @@ There are some [problems with npm v7](https://github.com/tweag/npmlock2nix/issue
### nix-npm-buildpackage {#javascript-nix-npm-buildpackage}
[nix-npm-buildpackage](https://github.com/serokell/nix-npm-buildpackage) aims at building node_modules without code generation. It hasn't reached v1 yet, the API might change. It supports both package-lock.json and yarn.lock.
[nix-npm-buildpackage](https://github.com/serokell/nix-npm-buildpackage) aims at building `node_modules` without code generation. It hasn't reached v1 yet, the API might change. It supports both `package-lock.json` and yarn.lock.
#### Pitfalls {#javascript-nix-npm-buildpackage-pitfalls}

View file

@ -183,7 +183,7 @@ luaposix = buildLuarocksPackage {
src = fetchurl {
url = "https://raw.githubusercontent.com/rocks-moonscript-org/moonrocks-mirror/master/luaposix-34.0.4-1.src.rock";
sha256 = "0yrm5cn2iyd0zjd4liyj27srphvy0gjrjx572swar6zqr4dwjqp2";
hash = "sha256-4mLJG8n4m6y4Fqd0meUDfsOb9RHSR0qa/KD5KCwrNXs=";
};
disabled = (luaOlder "5.1") || (luaAtLeast "5.4");
propagatedBuildInputs = [ bit32 lua std_normalize ];

View file

@ -25,7 +25,7 @@ nimPackages.buildNimPackage rec {
src = fetchurl {
url = "https://git.sr.ht/~ehmry/hottext/archive/v${version}.tar.gz";
sha256 = "sha256-hIUofi81zowSMbt1lUsxCnVzfJGN3FEiTtN8CEFpwzY=";
hash = "sha256-hIUofi81zowSMbt1lUsxCnVzfJGN3FEiTtN8CEFpwzY=";
};
buildInputs = with nimPackages; [
@ -65,7 +65,7 @@ buildNimPackage rec {
version = "2.0.4";
src = fetchNimble {
inherit pname version;
hash = "sha256-Vtcj8goI4zZPQs2TbFoBFlcR5UqDtOldaXSH/+/xULk=";
hash = "sha256-qDtVSnf+7rTq36WAxgsUZ8XoUk4sKwHyt8EJcY5WP+o=";
};
propagatedBuildInputs = [ SDL2 ];
}

View file

@ -79,7 +79,7 @@ buildDunePackage rec {
owner = "inhabitedtype";
repo = pname;
rev = version;
sha256 = "1hmrkdcdlkwy7rxhngf3cv3sa61cznnd9p5lmqhx20664gx2ibrh";
hash = "sha256-MK8o+iPGANEhrrTc1Kz9LBilx2bDPQt7Pp5P2libucI=";
};
checkInputs = [ alcotest ppx_let ];
@ -110,7 +110,7 @@ buildDunePackage rec {
src = fetchurl {
url = "https://github.com/flowtype/ocaml-${pname}/releases/download/v${version}/${pname}-v${version}.tbz";
sha256 = "09ygcxxd5warkdzz17rgpidrd0pg14cy2svvnvy1hna080lzg7vp";
hash = "sha256-d5/3KUBAWRj8tntr4RkJ74KWW7wvn/B/m1nx0npnzyc=";
};
meta = with lib; {

View file

@ -39,7 +39,7 @@ ClassC3 = buildPerlPackage rec {
version = "0.21";
src = fetchurl {
url = "mirror://cpan/authors/id/F/FL/FLORA/${pname}-${version}.tar.gz";
sha256 = "1bl8z095y4js66pwxnm7s853pi9czala4sqc743fdlnk27kq94gz";
hash = "sha256-/5GE5xHT0uYGOQxroqj6LMU7CtKn2s6vMVoSXxL4iK4=";
};
};
```
@ -78,7 +78,7 @@ buildPerlPackage rec {
src = fetchurl {
url = "mirror://cpan/authors/id/P/PM/PMQS/${pname}-${version}.tar.gz";
sha256 = "07xf50riarb60l1h6m2dqmql8q5dij619712fsgw7ach04d8g3z1";
hash = "sha256-4Y+HGgGQqcOfdiKcFIyMrWBEccVNVAMDBWZlFTMorh8=";
};
preConfigure = ''
@ -96,7 +96,7 @@ ClassC3Componentised = buildPerlPackage rec {
version = "1.0004";
src = fetchurl {
url = "mirror://cpan/authors/id/A/AS/ASH/${pname}-${version}.tar.gz";
sha256 = "0xql73jkcdbq4q9m0b0rnca6nrlvf5hyzy8is0crdk65bynvs8q1";
hash = "sha256-ASO9rV/FzJYZ0BH572Fxm2ZrFLMZLFATJng1NuU4FHc=";
};
propagatedBuildInputs = [
ClassC3 ClassInspector TestException MROCompat
@ -111,11 +111,11 @@ On Darwin, if a script has too many `-Idir` flags in its first line (its “sheb
ImageExifTool = buildPerlPackage {
pname = "Image-ExifTool";
version = "11.50";
version = "12.50";
src = fetchurl {
url = "https://www.sno.phy.queensu.ca/~phil/exiftool/${pname}-${version}.tar.gz";
sha256 = "0d8v48y94z8maxkmw1rv7v9m0jg2dc8xbp581njb6yhr7abwqdv3";
url = "https://exiftool.org/${pname}-${version}.tar.gz";
hash = "sha256-vOhB/FwQMC8PPvdnjDvxRpU6jAZcC6GMQfc0AH4uwKg=";
};
buildInputs = lib.optional stdenv.isDarwin shortenPerlShebang;
@ -146,7 +146,7 @@ $ nix-generate-from-cpan XML::Simple
version = "2.22";
src = fetchurl {
url = "mirror://cpan/authors/id/G/GR/GRANTM/XML-Simple-2.22.tar.gz";
sha256 = "b9450ef22ea9644ae5d6ada086dc4300fa105be050a2030ebd4efd28c198eb49";
hash = "sha256-uUUO8i6pZErl1q2ghtxDAPoQW+BQogMOvU79KMGY60k=";
};
propagatedBuildInputs = [ XMLNamespaceSupport XMLSAX XMLSAXExpat ];
meta = {

View file

@ -356,7 +356,7 @@ buildPythonPackage rec {
src = fetchPypi {
inherit pname version;
sha256 = "08fdd5ef7c96480ad11c12d472de21acd32359996f69a5259299b540feba4560";
hash = "sha256-CP3V73yWSArRHBLUct4hrNMjWZlvaaUlkpm1QP66RWA=";
};
doCheck = false;
@ -401,7 +401,7 @@ with import <nixpkgs> {};
src = python39.pkgs.fetchPypi {
inherit pname version;
sha256 = "08fdd5ef7c96480ad11c12d472de21acd32359996f69a5259299b540feba4560";
hash = "sha256-CP3V73yWSArRHBLUct4hrNMjWZlvaaUlkpm1QP66RWA=";
};
doCheck = false;
@ -450,7 +450,7 @@ buildPythonPackage rec {
src = fetchPypi {
inherit pname version;
sha256 = "14b2ef766d4c9652ab813182e866f493475e65e558bed0822e38bf07bba1a278";
hash = "sha256-FLLvdm1MllKrgTGC6Gb0k0deZeVYvtCCLji/B7uhong=";
};
checkInputs = [ pytest ];
@ -484,7 +484,7 @@ buildPythonPackage rec {
src = fetchPypi {
inherit pname version;
sha256 = "16a0fa97hym9ysdk3rmqz32xdjqmy4w34ld3rm3jf5viqjx65lxk";
hash = "sha256-s9NiusRxFydHzaNRMjjxFcvWxfi45jGb9ql6eJJyQJk=";
};
buildInputs = [ pkgs.libxml2 pkgs.libxslt ];
@ -517,7 +517,7 @@ buildPythonPackage rec {
src = fetchPypi {
inherit pname version;
sha256 = "f6bbb6afa93085409ab24885a1a3cdb8909f095a142f4d49e346f2bd1b789074";
hash = "sha256-9ru2r6kwhUCaskiFoaPNuJCfCVoUL01J40byvRt4kHQ=";
};
buildInputs = [ pkgs.fftw pkgs.fftwFloat pkgs.fftwLongDouble];
@ -855,7 +855,7 @@ buildPythonPackage rec {
src = fetchPypi {
inherit pname version;
sha256 = "08fdd5ef7c96480ad11c12d472de21acd32359996f69a5259299b540feba4560";
hash = "sha256-CP3V73yWSArRHBLUct4hrNMjWZlvaaUlkpm1QP66RWA=";
};
meta = with lib; {
@ -988,7 +988,7 @@ buildPythonPackage rec {
src = fetchPypi {
inherit pname version;
sha256 = "cf8436dc59d8695346fcd3ab296de46425ecab00d64096cebe79fb51ecb2eb93";
hash = "sha256-z4Q23FnYaVNG/NOrKW3kZCXsqwDWQJbOvnn7Ueyy65M=";
};
postPatch = ''
@ -1098,7 +1098,7 @@ with import <nixpkgs> {};
src = super.fetchPypi {
pname = "pandas";
inherit version;
sha256 = "08blshqj9zj1wyjhhw3kl2vas75vhhicvv72flvf1z3jvapgw295";
hash = "sha256-JQn+rtpy/OA2deLszSKEuxyttqBzcAil50H+JDHUdCE=";
};
});
};
@ -1158,7 +1158,7 @@ python3.pkgs.buildPythonApplication rec {
src = python3.pkgs.fetchPypi {
inherit pname version;
sha256 = "035w8gqql36zlan0xjrzz9j4lh9hs0qrsgnbyw07qs7lnkvbdv9x";
hash = "sha256-Pe229rT0aHwA98s+nTHQMEFKZPo/yw6sot8MivFDvAw=";
};
propagatedBuildInputs = with python3.pkgs; [ tornado python-daemon ];
@ -1674,9 +1674,9 @@ If you need to change a package's attribute(s) from `configuration.nix` you coul
packageOverrides = python-self: python-super: {
twisted = python-super.twisted.overridePythonAttrs (oldAttrs: {
src = super.fetchPypi {
pname = "twisted";
pname = "Twisted";
version = "19.10.0";
sha256 = "7394ba7f272ae722a74f3d969dcf599bc4ef093bc392038748a490f1724a515d";
hash = "sha256-c5S6fycq5yKnTz2Wnc9Zm8TvCTvDkgOHSKSQ8XJKUV0=";
extension = "tar.bz2";
};
});
@ -1712,9 +1712,9 @@ self: super: {
packageOverrides = python-self: python-super: {
twisted = python-super.twisted.overrideAttrs (oldAttrs: {
src = super.fetchPypi {
pname = "twisted";
pname = "Twisted";
version = "19.10.0";
sha256 = "7394ba7f272ae722a74f3d969dcf599bc4ef093bc392038748a490f1724a515d";
hash = "sha256-c5S6fycq5yKnTz2Wnc9Zm8TvCTvDkgOHSKSQ8XJKUV0=";
extension = "tar.bz2";
};
});

View file

@ -30,10 +30,10 @@ rustPlatform.buildRustPackage rec {
owner = "BurntSushi";
repo = pname;
rev = version;
sha256 = "1hqps7l5qrjh9f914r5i6kmcz6f1yb951nv4lby0cjnp5l253kps";
hash = "sha256-+s5RBC3XSgb8omTbUNLywZnP6jSxZBKSS1BmXOjRF8M=";
};
cargoSha256 = "03wf9r2csi6jpa7v5sw5lpxkrk4wfzwmzx7k3991q3bdjzcwnnwp";
cargoHash = "sha256-jtBw4ahSl88L0iuCXxQgZVm1EcboWRJMNtjxLVTtzts=";
meta = with lib; {
description = "A fast line-oriented regex search tool, similar to ag and ack";
@ -97,10 +97,10 @@ rustPlatform.buildRustPackage rec {
src = fetchCrate {
inherit pname version;
sha256 = "1mqaynrqaas82f5957lx31x80v74zwmwmjxxlbywajb61vh00d38";
sha256 = "sha256-aDQA4A5mScX9or3Lyiv/5GyAehidnpKKE0grhbP1Ctc=";
};
cargoHash = "sha256-JmBZcDVYJaK1cK05cxx5BrnGWp4t8ca6FLUbvIot67s=";
cargoHash = "sha256-tbrTbutUs5aPSV+yE0IBUZAAytgmZV7Eqxia7g+9zRs=";
cargoDepsName = pname;
# ...
@ -331,6 +331,20 @@ rustPlatform.buildRustPackage {
}
```
#### Using `cargo-nextest` {#using-cargo-nextest}
Tests can be run with [cargo-nextest](https://github.com/nextest-rs/nextest)
by setting `useNextest = true`. The same options still apply, but nextest
accepts a different set of arguments and the settings might need to be
adapted to be compatible with cargo-nextest.
```nix
rustPlatform.buildRustPackage {
/* ... */
useNextest = true;
}
```
#### Setting `test-threads` {#setting-test-threads}
`buildRustPackage` will use parallel test threads by default,
@ -403,8 +417,8 @@ cargoDeps = rustPlatform.fetchCargoTarball {
```
The `src` attribute is required, as well as a hash specified through
one of the `sha256` or `hash` attributes. The following optional
attributes can also be used:
one of the `hash` attribute. The following optional attributes can
also be used:
* `name`: the name that is used for the dependencies tarball. If
`name` is not specified, then the name `cargo-deps` will be used.
@ -474,6 +488,9 @@ you of the correct hash.
flags can be passed to the tests using `checkFlags` and
`checkFlagsArray`. By default, tests are run in parallel. This can
be disabled by setting `dontUseCargoParallelTests`.
* `cargoNextestHook`: run tests using
[cargo-nextest](https://github.com/nextest-rs/nextest). The same
options for `cargoCheckHook` also applies to `cargoNextestHook`.
* `cargoInstallHook`: install binaries and static/shared libraries
that were built using `cargoBuildHook`.
* `bindgenHook`: for crates which use `bindgen` as a build dependency, lets
@ -514,7 +531,7 @@ buildPythonPackage rec {
cargoDeps = rustPlatform.fetchCargoTarball {
inherit src sourceRoot;
name = "${pname}-${version}";
hash = "sha256-BoHIN/519Top1NUBjpB/oEMqi86Omt3zTQcXFWqrek0=";
hash = "sha256-miW//pnOmww2i6SOGbkrAIdc/JMDT4FJLqdMFojZeoY=";
};
sourceRoot = "source/bindings/python";
@ -551,7 +568,7 @@ buildPythonPackage rec {
src = fetchPypi {
inherit pname version;
sha256 = "1i1mx5y9hkyfi9jrrkcw804hmkcglxi6rmf7vin7jfnbr2bf4q64";
hash = "sha256-xGDilsjLOnls3MfVbGKnj80KCUCczZxlis5PmHzpNcQ=";
};
cargoDeps = rustPlatform.fetchCargoTarball {
@ -591,7 +608,7 @@ buildPythonPackage rec {
owner = "Qiskit";
repo = "retworkx";
rev = version;
sha256 = "11n30ldg3y3y6qxg3hbj837pnbwjkqw3nxq6frds647mmmprrd20";
hash = "sha256-11n30ldg3y3y6qxg3hbj837pnbwjkqw3nxq6frds647mmmprrd20=";
};
cargoDeps = rustPlatform.fetchCargoTarball {
@ -665,7 +682,7 @@ let src = fetchFromGitHub {
repo = "nixpkgs-mozilla";
# commit from: 2019-05-15
rev = "9f35c4b09fd44a77227e79ff0c1b4b6a69dff533";
sha256 = "18h0nvh55b5an4gmlgfbvwbyqj91bklf1zymis6lbdh75571qaz0";
hash = "sha256-18h0nvh55b5an4gmlgfbvwbyqj91bklf1zymis6lbdh75571qaz0=";
};
in
with import "${src.out}/rust-overlay.nix" pkgs pkgs;
@ -756,7 +773,7 @@ rustPlatform.buildRustPackage rec {
owner = "BurntSushi";
repo = "ripgrep";
rev = version;
sha256 = "1hqps7l5qrjh9f914r5i6kmcz6f1yb951nv4lby0cjnp5l253kps";
hash = "sha256-1hqps7l5qrjh9f914r5i6kmcz6f1yb951nv4lby0cjnp5l253kps=";
};
cargoSha256 = "03wf9r2csi6jpa7v5sw5lpxkrk4wfzwmzx7k3991q3bdjzcwnnwp";

View file

@ -55,11 +55,11 @@ let
srcs = [
(fetchurl {
url = "http://mirrors.ctan.org/macros/latex/contrib/foiltex/foiltex.dtx";
sha256 = "07frz0krpz7kkcwlayrwrj2a2pixmv0icbngyw92srp9fp23cqpz";
hash = "sha256-/2I2xHXpZi0S988uFsGuPV6hhMw8e0U5m/P8myf42R0=";
})
(fetchurl {
url = "http://mirrors.ctan.org/macros/latex/contrib/foiltex/foiltex.ins";
sha256 = "09wkyidxk3n3zvqxfs61wlypmbhi1pxmjdi1kns9n2ky8ykbff99";
hash = "sha256-KTm3pkd+Cpu0nSE2WfsNEa56PeXBaNfx/sOO2Vv0kyc=";
})
];

View file

@ -10,12 +10,21 @@ At the moment we support two different methods for managing plugins:
- Vim packages (*recommended*)
- vim-plug (vim only)
Right now two Vim packages are available: `vim` which has most features that require extra
dependencies disabled and `vim-full` which has them configurable and enabled by default.
::: {.note}
`vim_configurable` is a deprecated alias for `vim-full` and refers to the fact that its
build-time features are configurable. It has nothing to do with user configuration,
and both the `vim` and `vim-full` packages can be customized as explained in the next section.
:::
## Custom configuration {#custom-configuration}
Adding custom .vimrc lines can be done using the following code:
```nix
vim_configurable.customize {
vim-full.customize {
# `name` optionally specifies the name of the executable and package
name = "vim-with-plugins";
@ -62,7 +71,7 @@ neovim-qt.override {
To store your plugins in Vim packages (the native Vim plugin manager, see `:help packages`) the following example can be used:
```nix
vim_configurable.customize {
vim-full.customize {
vimrcConfig.packages.myVimPackage = with pkgs.vimPlugins; {
# loaded on launch
start = [ youcompleteme fugitive ];
@ -101,7 +110,7 @@ The resulting package can be added to `packageOverrides` in `~/.nixpkgs/config.n
```nix
{
packageOverrides = pkgs: with pkgs; {
myVim = vim_configurable.customize {
myVim = vim-full.customize {
# `name` specifies the name of the executable and package
name = "vim-with-plugins";
# add here code from the example section
@ -131,7 +140,7 @@ let
owner = "dkprice";
repo = "vim-easygrep";
rev = "d0c36a77cc63c22648e792796b1815b44164653a";
sha256 = "0y2p5mz0d5fhg6n68lhfhl8p4mlwkb82q337c22djs4w5zyzggbc";
hash = "sha256-bL33/S+caNmEYGcMLNCanFZyEYUOUmSsedCVBn4tV3g=";
};
};
in
@ -190,7 +199,7 @@ To use [vim-plug](https://github.com/junegunn/vim-plug) to manage your Vim
plugins the following example can be used:
```nix
vim_configurable.customize {
vim-full.customize {
vimrcConfig.packages.myVimPackage = with pkgs.vimPlugins; {
# loaded on launch
plug.plugins = [ youcompleteme fugitive phpCompletion elm-vim ];

View file

@ -61,7 +61,7 @@ stdenv.mkDerivation {
builder = ./builder.sh;
src = fetchurl {
url = "http://ftp.nluug.nl/gnu/binutils/binutils-2.16.1.tar.bz2";
sha256 = "1ian3kwh2vg6hr3ymrv48s04gijs539vzrq62xr76bxbhbwnz2np";
hash = "sha256-14pv+YKrL3NyFwbnv9MoWsZHgEZk5+pHhuZtAfkcVsU=";
};
inherit noSysDirs;
configureFlags = [ "--target=arm-linux" ];
@ -85,7 +85,7 @@ stdenv.mkDerivation {
builder = ./builder.sh;
src = fetchurl {
url = "http://www.kernel.org/pub/linux/kernel/v2.6/linux-2.6.13.1.tar.bz2";
sha256 = "12qxmc827fjhaz53kjy7vyrzsaqcg78amiqsb3qm20z26w705lma";
hash = "sha256-qtICDjfiA1HxWBrHqtB5DCv9s9/HyznKV1C6IxCrHYs=";
};
}
---
@ -151,8 +151,8 @@ stdenv.mkDerivation {
name = "gcc-4.0.2-arm";
builder = ./builder.sh;
src = fetchurl {
url = ftp://ftp.nluug.nl/pub/gnu/gcc/gcc-4.0.2/gcc-core-4.0.2.tar.bz2;
sha256 = "02fxh0asflm8825w23l2jq1wvs7hbnam0jayrivg7zdv2ifnc0rc";
url = "ftp://ftp.nluug.nl/pub/gnu/gcc/gcc-4.0.2/gcc-core-4.0.2.tar.bz2";
hash = "sha256-LANmXRS7/fN2zF5JUJVd8OjNA5aCDsGLQKhSpxWA3Qk=";
};
# !!! apply only if noSysDirs is set
patches = [./no-sys-dirs.patch ./gcc-inhibit.patch];

View file

@ -11,58 +11,12 @@ meta = with lib; {
'';
homepage = "https://www.gnu.org/software/hello/manual/";
license = licenses.gpl3Plus;
maintainers = [ maintainers.eelco ];
maintainers = with maintainers; [ eelco ];
platforms = platforms.all;
};
```
Meta-attributes are not passed to the builder of the package. Thus, a change to a meta-attribute doesnt trigger a recompilation of the package. The value of a meta-attribute must be a string.
The meta-attributes of a package can be queried from the command-line using `nix-env`:
```ShellSession
$ nix-env -qa hello --json
{
"hello": {
"meta": {
"description": "A program that produces a familiar, friendly greeting",
"homepage": "https://www.gnu.org/software/hello/manual/",
"license": {
"fullName": "GNU General Public License version 3 or later",
"shortName": "GPLv3+",
"url": "http://www.fsf.org/licensing/licenses/gpl.html"
},
"longDescription": "GNU Hello is a program that prints \"Hello, world!\" when you run it.\nIt is fully customizable.\n",
"maintainers": [
"Ludovic Court\u00e8s <ludo@gnu.org>"
],
"platforms": [
"i686-linux",
"x86_64-linux",
"armv5tel-linux",
"armv7l-linux",
"mips32-linux",
"x86_64-darwin",
"i686-cygwin",
"i686-freebsd13",
"x86_64-freebsd13",
"i686-openbsd",
"x86_64-openbsd"
],
"position": "/home/user/dev/nixpkgs/pkgs/applications/misc/hello/default.nix:14"
},
"name": "hello-2.9",
"system": "x86_64-linux"
}
}
```
`nix-env` knows about the `description` field specifically:
```ShellSession
$ nix-env -qa hello --description
hello-2.3 A program that produces a familiar, friendly greeting
```
Meta-attributes are not passed to the builder of the package. Thus, a change to a meta-attribute doesnt trigger a recompilation of the package.
## Standard meta-attributes {#sec-standard-meta-attributes}

View file

@ -11,7 +11,7 @@ stdenv.mkDerivation {
name = "libfoo-1.2.3";
src = fetchurl {
url = "http://example.org/libfoo-1.2.3.tar.bz2";
sha256 = "0x2g1jqygyr5wiwg4ma1nd7w4ydpy82z9gkcv8vh2v8dn3y58v5m";
hash = "sha256-tWxU/LANbQE32my+9AXyt3nCT7NBVfJ45CX757EMT3Q=";
};
}
```
@ -24,7 +24,7 @@ stdenv.mkDerivation rec {
version = "1.2.3";
src = fetchurl {
url = "http://example.org/libfoo-source-${version}.tar.bz2";
sha256 = "0x2g1jqygyr5wiwg4ma1nd7w4ydpy82z9gkcv8vh2v8dn3y58v5m";
hash = "sha256-tWxU/LANbQE32my+9AXyt3nCT7NBVfJ45CX757EMT3Q=";
};
}
```

View file

@ -74,8 +74,8 @@ Example usage:
mySed = pkgs.gnused.overrideDerivation (oldAttrs: {
name = "sed-4.2.2-pre";
src = fetchurl {
url = ftp://alpha.gnu.org/gnu/sed/sed-4.2.2-pre.tar.bz2;
sha256 = "11nq06d131y4wmf3drm0yk502d2xc6n5qy82cg88rb9nqd2lj41k";
url = "ftp://alpha.gnu.org/gnu/sed/sed-4.2.2-pre.tar.bz2";
hash = "sha256-MxBJRcM2rYzQYwJ5XKxhXTQByvSg5jZc5cSHEZoB2IY=";
};
patches = [];
});

View file

@ -16,11 +16,15 @@ rec {
assertMsg :: Bool -> String -> Bool
*/
# TODO(Profpatsch): add tests that check stderr
assertMsg = pred: msg:
assertMsg =
# Predicate that needs to succeed, otherwise `msg` is thrown
pred:
# Message to throw in case `pred` fails
msg:
pred || builtins.throw msg;
/* Specialized `assertMsg` for checking if val is one of the elements
of a list. Useful for checking enums.
/* Specialized `assertMsg` for checking if `val` is one of the elements
of the list `xs`. Useful for checking enums.
Example:
let sslLibrary = "libressl";
@ -33,7 +37,14 @@ rec {
Type:
assertOneOf :: String -> ComparableVal -> List ComparableVal -> Bool
*/
assertOneOf = name: val: xs: assertMsg
assertOneOf =
# The name of the variable the user entered `val` into, for inclusion in the error message
name:
# The value of what the user provided, to be compared against the values in `xs`
val:
# The list of valid values
xs:
assertMsg
(lib.elem val xs)
"${name} must be one of ${
lib.generators.toPretty {} xs}, but is: ${

View file

@ -20,13 +20,22 @@ rec {
=> 3
attrByPath ["z" "z"] 6 x
=> 6
Type:
attrByPath :: [String] -> Any -> AttrSet -> Any
*/
attrByPath = attrPath: default: e:
attrByPath =
# A list of strings representing the attribute path to return from `set`
attrPath:
# Default value if `attrPath` does not resolve to an existing value
default:
# The nested attribute set to select values from
set:
let attr = head attrPath;
in
if attrPath == [] then e
else if e ? ${attr}
then attrByPath (tail attrPath) default e.${attr}
if attrPath == [] then set
else if set ? ${attr}
then attrByPath (tail attrPath) default set.${attr}
else default;
/* Return if an attribute from nested attribute set exists.
@ -38,8 +47,14 @@ rec {
hasAttrByPath ["z" "z"] x
=> false
Type:
hasAttrByPath :: [String] -> AttrSet -> Bool
*/
hasAttrByPath = attrPath: e:
hasAttrByPath =
# A list of strings representing the attribute path to check from `set`
attrPath:
# The nested attribute set to check
e:
let attr = head attrPath;
in
if attrPath == [] then true
@ -48,13 +63,20 @@ rec {
else false;
/* Return nested attribute set in which an attribute is set.
/* Create a new attribute set with `value` set at the nested attribute location specified in `attrPath`.
Example:
setAttrByPath ["a" "b"] 3
=> { a = { b = 3; }; }
Type:
setAttrByPath :: [String] -> Any -> AttrSet
*/
setAttrByPath = attrPath: value:
setAttrByPath =
# A list of strings representing the attribute path to set
attrPath:
# The value to set at the location described by `attrPath`
value:
let
len = length attrPath;
atDepth = n:
@ -63,8 +85,8 @@ rec {
else { ${elemAt attrPath n} = atDepth (n + 1); };
in atDepth 0;
/* Like `attrByPath' without a default value. If it doesn't find the
path it will throw.
/* Like `attrByPath', but without a default value. If it doesn't find the
path it will throw an error.
Example:
x = { a = { b = 3; }; }
@ -72,10 +94,17 @@ rec {
=> 3
getAttrFromPath ["z" "z"] x
=> error: cannot find attribute `z.z'
Type:
getAttrFromPath :: [String] -> AttrSet -> Value
*/
getAttrFromPath = attrPath:
getAttrFromPath =
# A list of strings representing the attribute path to get from `set`
attrPath:
# The nested attribute set to find the value in.
set:
let errorMsg = "cannot find attribute `" + concatStringsSep "." attrPath + "'";
in attrByPath attrPath (abort errorMsg);
in attrByPath attrPath (abort errorMsg) set;
/* Map each attribute in the given set and merge them into a new attribute set.
@ -101,19 +130,23 @@ rec {
Takes a list of updates to apply and an attribute set to apply them to,
and returns the attribute set with the updates applied. Updates are
represented as { path = ...; update = ...; } values, where `path` is a
represented as `{ path = ...; update = ...; }` values, where `path` is a
list of strings representing the attribute path that should be updated,
and `update` is a function that takes the old value at that attribute path
as an argument and returns the new
value it should be.
Properties:
- Updates to deeper attribute paths are applied before updates to more
shallow attribute paths
- Multiple updates to the same attribute path are applied in the order
they appear in the update list
- If any but the last `path` element leads into a value that is not an
attribute set, an error is thrown
- If there is an update for an attribute path that doesn't exist,
accessing the argument in the update function causes an error, but
intermediate attribute sets are implicitly created as needed
@ -134,6 +167,9 @@ rec {
}
] { a.b.c = 0; }
=> { a = { b = { d = 1; }; }; x = { y = "xy"; }; }
Type:
updateManyAttrsByPath :: [AttrSet] -> AttrSet -> AttrSet
*/
updateManyAttrsByPath = let
# When recursing into attributes, instead of updating the `path` of each
@ -199,8 +235,15 @@ rec {
Example:
attrVals ["a" "b" "c"] as
=> [as.a as.b as.c]
Type:
attrVals :: [String] -> AttrSet -> [Any]
*/
attrVals = nameList: set: map (x: set.${x}) nameList;
attrVals =
# The list of attributes to fetch from `set`. Each attribute name must exist on the attrbitue set
nameList:
# The set to get attribute values from
set: map (x: set.${x}) nameList;
/* Return the values of all attributes in the given set, sorted by
@ -209,6 +252,8 @@ rec {
Example:
attrValues {c = 3; a = 1; b = 2;}
=> [1 2 3]
Type:
attrValues :: AttrSet -> [Any]
*/
attrValues = builtins.attrValues or (attrs: attrVals (attrNames attrs) attrs);
@ -219,8 +264,15 @@ rec {
Example:
getAttrs [ "a" "b" ] { a = 1; b = 2; c = 3; }
=> { a = 1; b = 2; }
Type:
getAttrs :: [String] -> AttrSet -> AttrSet
*/
getAttrs = names: attrs: genAttrs names (name: attrs.${name});
getAttrs =
# A list of attribute names to get out of `set`
names:
# The set to get the named attributes from
attrs: genAttrs names (name: attrs.${name});
/* Collect each attribute named `attr' from a list of attribute
sets. Sets that don't contain the named attribute are ignored.
@ -228,6 +280,9 @@ rec {
Example:
catAttrs "a" [{a = 1;} {b = 0;} {a = 2;}]
=> [1 2]
Type:
catAttrs :: String -> [AttrSet] -> [Any]
*/
catAttrs = builtins.catAttrs or
(attr: l: concatLists (map (s: if s ? ${attr} then [s.${attr}] else []) l));
@ -239,8 +294,15 @@ rec {
Example:
filterAttrs (n: v: n == "foo") { foo = 1; bar = 2; }
=> { foo = 1; }
Type:
filterAttrs :: (String -> Any -> Bool) -> AttrSet -> AttrSet
*/
filterAttrs = pred: set:
filterAttrs =
# Predicate taking an attribute name and an attribute value, which returns `true` to include the attribute, or `false` to exclude the attribute.
pred:
# The attribute set to filter
set:
listToAttrs (concatMap (name: let v = set.${name}; in if pred name v then [(nameValuePair name v)] else []) (attrNames set));
@ -250,8 +312,15 @@ rec {
Example:
filterAttrsRecursive (n: v: v != null) { foo = { bar = null; }; }
=> { foo = {}; }
Type:
filterAttrsRecursive :: (String -> Any -> Bool) -> AttrSet -> AttrSet
*/
filterAttrsRecursive = pred: set:
filterAttrsRecursive =
# Predicate taking an attribute name and an attribute value, which returns `true` to include the attribute, or `false` to exclude the attribute.
pred:
# The attribute set to filter
set:
listToAttrs (
concatMap (name:
let v = set.${name}; in
@ -269,23 +338,28 @@ rec {
Example:
foldAttrs (item: acc: [item] ++ acc) [] [{ a = 2; } { a = 3; }]
=> { a = [ 2 3 ]; }
Type:
foldAttrs :: (Any -> Any -> Any) -> Any -> [AttrSets] -> Any
*/
foldAttrs = op: nul:
foldAttrs =
# A function, given a value and a collector combines the two.
op:
# The starting value.
nul:
# A list of attribute sets to fold together by key.
list_of_attrs:
foldr (n: a:
foldr (name: o:
o // { ${name} = op n.${name} (a.${name} or nul); }
) a (attrNames n)
) {};
) {} list_of_attrs;
/* Recursively collect sets that verify a given predicate named `pred'
from the set `attrs'. The recursion is stopped when the predicate is
verified.
Type:
collect ::
(AttrSet -> Bool) -> AttrSet -> [x]
Example:
collect isList { a = { b = ["b"]; }; c = [1]; }
=> [["b"] [1]]
@ -293,8 +367,15 @@ rec {
collect (x: x ? outPath)
{ a = { outPath = "a/"; }; b = { outPath = "b/"; }; }
=> [{ outPath = "a/"; } { outPath = "b/"; }]
Type:
collect :: (AttrSet -> Bool) -> AttrSet -> [x]
*/
collect = pred: attrs:
collect =
# Given an attribute's value, determine if recursion should stop.
pred:
# The attribute set to recursively collect.
attrs:
if pred attrs then
[ attrs ]
else if isAttrs attrs then
@ -312,8 +393,12 @@ rec {
{ a = 2; b = 10; }
{ a = 2; b = 20; }
]
Type:
cartesianProductOfSets :: AttrSet -> [AttrSet]
*/
cartesianProductOfSets = attrsOfLists:
cartesianProductOfSets =
# Attribute set with attributes that are lists of values
attrsOfLists:
foldl' (listOfAttrs: attrName:
concatMap (attrs:
map (listValue: attrs // { ${attrName} = listValue; }) attrsOfLists.${attrName}
@ -321,25 +406,32 @@ rec {
) [{}] (attrNames attrsOfLists);
/* Utility function that creates a {name, value} pair as expected by
builtins.listToAttrs.
/* Utility function that creates a `{name, value}` pair as expected by `builtins.listToAttrs`.
Example:
nameValuePair "some" 6
=> { name = "some"; value = 6; }
Type:
nameValuePair :: String -> Any -> AttrSet
*/
nameValuePair = name: value: { inherit name value; };
nameValuePair =
# Attribute name
name:
# Attribute value
value:
{ inherit name value; };
/* Apply a function to each element in an attribute set. The
function takes two arguments --- the attribute name and its value
--- and returns the new value for the attribute. The result is a
new attribute set.
/* Apply a function to each element in an attribute set, creating a new attribute set.
Example:
mapAttrs (name: value: name + "-" + value)
{ x = "foo"; y = "bar"; }
=> { x = "x-foo"; y = "y-bar"; }
Type:
mapAttrs :: (String -> Any -> Any) -> AttrSet -> AttrSet
*/
mapAttrs = builtins.mapAttrs or
(f: set:
@ -354,24 +446,35 @@ rec {
mapAttrs' (name: value: nameValuePair ("foo_" + name) ("bar-" + value))
{ x = "a"; y = "b"; }
=> { foo_x = "bar-a"; foo_y = "bar-b"; }
Type:
mapAttrs' :: (String -> Any -> { name = String; value = Any }) -> AttrSet -> AttrSet
*/
mapAttrs' = f: set:
mapAttrs' =
# A function, given an attribute's name and value, returns a new `nameValuePair`.
f:
# Attribute set to map over.
set:
listToAttrs (map (attr: f attr set.${attr}) (attrNames set));
/* Call a function for each attribute in the given set and return
the result in a list.
Type:
mapAttrsToList ::
(String -> a -> b) -> AttrSet -> [b]
Example:
mapAttrsToList (name: value: name + value)
{ x = "a"; y = "b"; }
=> [ "xa" "yb" ]
Type:
mapAttrsToList :: (String -> a -> b) -> AttrSet -> [b]
*/
mapAttrsToList = f: attrs:
mapAttrsToList =
# A function, given an attribute's name and value, returns a new value.
f:
# Attribute set to map over.
attrs:
map (name: f name attrs.${name}) (attrNames attrs);
@ -379,16 +482,20 @@ rec {
attribute sets. Also, the first argument of the argument
function is a *list* of the names of the containing attributes.
Type:
mapAttrsRecursive ::
([String] -> a -> b) -> AttrSet -> AttrSet
Example:
mapAttrsRecursive (path: value: concatStringsSep "-" (path ++ [value]))
{ n = { a = "A"; m = { b = "B"; c = "C"; }; }; d = "D"; }
=> { n = { a = "n-a-A"; m = { b = "n-m-b-B"; c = "n-m-c-C"; }; }; d = "d-D"; }
Type:
mapAttrsRecursive :: ([String] -> a -> b) -> AttrSet -> AttrSet
*/
mapAttrsRecursive = mapAttrsRecursiveCond (as: true);
mapAttrsRecursive =
# A function, given a list of attribute names and a value, returns a new value.
f:
# Set to recursively map over.
set:
mapAttrsRecursiveCond (as: true) f set;
/* Like `mapAttrsRecursive', but it takes an additional predicate
@ -397,10 +504,6 @@ rec {
recurse, but does apply the map function. If it returns true, it
does recurse, and does not apply the map function.
Type:
mapAttrsRecursiveCond ::
(AttrSet -> Bool) -> ([String] -> a -> b) -> AttrSet -> AttrSet
Example:
# To prevent recursing into derivations (which are attribute
# sets with the attribute "type" equal to "derivation"):
@ -408,8 +511,17 @@ rec {
(as: !(as ? "type" && as.type == "derivation"))
(x: ... do something ...)
attrs
Type:
mapAttrsRecursiveCond :: (AttrSet -> Bool) -> ([String] -> a -> b) -> AttrSet -> AttrSet
*/
mapAttrsRecursiveCond = cond: f: set:
mapAttrsRecursiveCond =
# A function, given the attribute set the recursion is currently at, determine if to recurse deeper into that attribute set.
cond:
# A function, given a list of attribute names and a value, returns a new value.
f:
# Attribute set to recursively map over.
set:
let
recurse = path:
let
@ -428,13 +540,20 @@ rec {
Example:
genAttrs [ "foo" "bar" ] (name: "x_" + name)
=> { foo = "x_foo"; bar = "x_bar"; }
Type:
genAttrs :: [ String ] -> (String -> Any) -> AttrSet
*/
genAttrs = names: f:
genAttrs =
# Names of values in the resulting attribute set.
names:
# A function, given the name of the attribute, returns the attribute's value.
f:
listToAttrs (map (n: nameValuePair n (f n)) names);
/* Check whether the argument is a derivation. Any set with
{ type = "derivation"; } counts as a derivation.
`{ type = "derivation"; }` counts as a derivation.
Example:
nixpkgs = import <nixpkgs> {}
@ -442,25 +561,36 @@ rec {
=> true
isDerivation "foobar"
=> false
*/
isDerivation = x: x.type or null == "derivation";
/* Converts a store path to a fake derivation. */
toDerivation = path:
let
path' = builtins.storePath path;
res =
{ type = "derivation";
name = sanitizeDerivationName (builtins.substring 33 (-1) (baseNameOf path'));
outPath = path';
outputs = [ "out" ];
out = res;
outputName = "out";
};
Type:
isDerivation :: Any -> Bool
*/
isDerivation =
# Value to check.
value: value.type or null == "derivation";
/* Converts a store path to a fake derivation.
Type:
toDerivation :: Path -> Derivation
*/
toDerivation =
# A store path to convert to a derivation.
path:
let
path' = builtins.storePath path;
res =
{ type = "derivation";
name = sanitizeDerivationName (builtins.substring 33 (-1) (baseNameOf path'));
outPath = path';
outputs = [ "out" ];
out = res;
outputName = "out";
};
in res;
/* If `cond' is true, return the attribute set `as',
/* If `cond` is true, return the attribute set `as`,
otherwise an empty attribute set.
Example:
@ -468,47 +598,82 @@ rec {
=> { my = "set"; }
optionalAttrs (false) { my = "set"; }
=> { }
Type:
optionalAttrs :: Bool -> AttrSet
*/
optionalAttrs = cond: as: if cond then as else {};
optionalAttrs =
# Condition under which the `as` attribute set is returned.
cond:
# The attribute set to return if `cond` is `true`.
as:
if cond then as else {};
/* Merge sets of attributes and use the function f to merge attributes
/* Merge sets of attributes and use the function `f` to merge attributes
values.
Example:
zipAttrsWithNames ["a"] (name: vs: vs) [{a = "x";} {a = "y"; b = "z";}]
=> { a = ["x" "y"]; }
Type:
zipAttrsWithNames :: [ String ] -> (String -> [ Any ] -> Any) -> [ AttrSet ] -> AttrSet
*/
zipAttrsWithNames = names: f: sets:
zipAttrsWithNames =
# List of attribute names to zip.
names:
# A function, accepts an attribute name, all the values, and returns a combined value.
f:
# List of values from the list of attribute sets.
sets:
listToAttrs (map (name: {
inherit name;
value = f name (catAttrs name sets);
}) names);
/* Implementation note: Common names appear multiple times in the list of
/* Merge sets of attributes and use the function f to merge attribute values.
Like `lib.attrsets.zipAttrsWithNames` with all key names are passed for `names`.
Implementation note: Common names appear multiple times in the list of
names, hopefully this does not affect the system because the maximal
laziness avoid computing twice the same expression and listToAttrs does
laziness avoid computing twice the same expression and `listToAttrs` does
not care about duplicated attribute names.
Example:
zipAttrsWith (name: values: values) [{a = "x";} {a = "y"; b = "z";}]
=> { a = ["x" "y"]; b = ["z"] }
Type:
zipAttrsWith :: (String -> [ Any ] -> Any) -> [ AttrSet ] -> AttrSet
*/
zipAttrsWith =
builtins.zipAttrsWith or (f: sets: zipAttrsWithNames (concatMap attrNames sets) f sets);
/* Like `zipAttrsWith' with `(name: values: values)' as the function.
Example:
zipAttrs [{a = "x";} {a = "y"; b = "z";}]
=> { a = ["x" "y"]; b = ["z"] }
/* Merge sets of attributes and combine each attribute value in to a list.
Like `lib.attrsets.zipAttrsWith' with `(name: values: values)' as the function.
Example:
zipAttrs [{a = "x";} {a = "y"; b = "z";}]
=> { a = ["x" "y"]; b = ["z"] }
Type:
zipAttrs :: [ AttrSet ] -> AttrSet
*/
zipAttrs = zipAttrsWith (name: values: values);
zipAttrs =
# List of attribute sets to zip together.
sets:
zipAttrsWith (name: values: values) sets;
/* Does the same as the update operator '//' except that attributes are
merged until the given predicate is verified. The predicate should
accept 3 arguments which are the path to reach the attribute, a part of
the first attribute set and a part of the second attribute set. When
the predicate is verified, the value of the first attribute set is
the predicate is satisfied, the value of the first attribute set is
replaced by the value of the second attribute set.
Example:
@ -524,15 +689,23 @@ rec {
baz = 4;
}
returns: {
=> {
foo.bar = 1; # 'foo.*' from the second set
foo.quz = 2; #
bar = 3; # 'bar' from the first set
baz = 4; # 'baz' from the second set
}
*/
recursiveUpdateUntil = pred: lhs: rhs:
Type:
recursiveUpdateUntil :: ( [ String ] -> AttrSet -> AttrSet -> Bool ) -> AttrSet -> AttrSet -> AttrSet
*/
recursiveUpdateUntil =
# Predicate, taking the path to the current attribute as a list of strings for attribute names, and the two values at that path from the original arguments.
pred:
# Left attribute set of the merge.
lhs:
# Right attribute set of the merge.
rhs:
let f = attrPath:
zipAttrsWith (n: values:
let here = attrPath ++ [n]; in
@ -544,6 +717,7 @@ rec {
);
in f [] [rhs lhs];
/* A recursive variant of the update operator //. The recursion
stops when one of the attribute values is not an attribute set,
in which case the right hand side value takes precedence over the
@ -562,16 +736,32 @@ rec {
boot.loader.grub.device = "";
}
*/
recursiveUpdate = recursiveUpdateUntil (path: lhs: rhs: !(isAttrs lhs && isAttrs rhs));
Type:
recursiveUpdate :: AttrSet -> AttrSet -> AttrSet
*/
recursiveUpdate =
# Left attribute set of the merge.
lhs:
# Right attribute set of the merge.
rhs:
recursiveUpdateUntil (path: lhs: rhs: !(isAttrs lhs && isAttrs rhs)) lhs rhs;
/* Returns true if the pattern is contained in the set. False otherwise.
Example:
matchAttrs { cpu = {}; } { cpu = { bits = 64; }; }
=> true
*/
matchAttrs = pattern: attrs: assert isAttrs pattern;
Type:
matchAttrs :: AttrSet -> AttrSet -> Bool
*/
matchAttrs =
# Attribute set strucutre to match
pattern:
# Attribute set to find patterns in
attrs:
assert isAttrs pattern;
all id (attrValues (zipAttrsWithNames (attrNames pattern) (n: values:
let pat = head values; val = elemAt values 1; in
if length values == 1 then false
@ -579,6 +769,7 @@ rec {
else pat == val
) [pattern attrs]));
/* Override only the attributes that are already present in the old set
useful for deep-overriding.
@ -589,10 +780,18 @@ rec {
=> { b = 2; }
overrideExisting { a = 3; b = 2; } { a = 1; }
=> { a = 1; b = 2; }
Type:
overrideExisting :: AttrSet -> AttrSet -> AttrSet
*/
overrideExisting = old: new:
overrideExisting =
# Original attribute set
old:
# Attribute set with attributes to override in `old`.
new:
mapAttrs (name: value: new.${name} or value) old;
/* Turns a list of strings into a human-readable description of those
strings represented as an attribute path. The result of this function is
not intended to be machine-readable.
@ -602,44 +801,120 @@ rec {
=> "foo.\"10\".bar"
showAttrPath []
=> "<root attribute path>"
Type:
showAttrPath :: [String] -> String
*/
showAttrPath = path:
showAttrPath =
# Attribute path to render to a string
path:
if path == [] then "<root attribute path>"
else concatMapStringsSep "." escapeNixIdentifier path;
/* Get a package output.
If no output is found, fallback to `.out` and then to the default.
Example:
getOutput "dev" pkgs.openssl
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev"
Type:
getOutput :: String -> Derivation -> String
*/
getOutput = output: pkg:
if ! pkg ? outputSpecified || ! pkg.outputSpecified
then pkg.${output} or pkg.out or pkg
else pkg;
/* Get a package's `bin` output.
If the output does not exist, fallback to `.out` and then to the default.
Example:
getOutput pkgs.openssl
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r"
Type:
getOutput :: Derivation -> String
*/
getBin = getOutput "bin";
/* Get a package's `lib` output.
If the output does not exist, fallback to `.out` and then to the default.
Example:
getOutput pkgs.openssl
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-lib"
Type:
getOutput :: Derivation -> String
*/
getLib = getOutput "lib";
/* Get a package's `dev` output.
If the output does not exist, fallback to `.out` and then to the default.
Example:
getOutput pkgs.openssl
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-dev"
Type:
getOutput :: Derivation -> String
*/
getDev = getOutput "dev";
/* Get a package's `man` output.
If the output does not exist, fallback to `.out` and then to the default.
Example:
getOutput pkgs.openssl
=> "/nix/store/9rz8gxhzf8sw4kf2j2f1grr49w8zx5vj-openssl-1.0.1r-man"
Type:
getOutput :: Derivation -> String
*/
getMan = getOutput "man";
/* Pick the outputs of packages to place in buildInputs */
chooseDevOutputs = builtins.map getDev;
/* Pick the outputs of packages to place in `buildInputs` */
chooseDevOutputs =
# List of packages to pick `dev` outputs from
drvs:
builtins.map getDev drvs;
/* Make various Nix tools consider the contents of the resulting
attribute set when looking for what to build, find, etc.
This function only affects a single attribute set; it does not
apply itself recursively for nested attribute sets.
Example:
{ pkgs ? import <nixpkgs> {} }:
{
myTools = pkgs.lib.recurseIntoAttrs {
inherit (pkgs) hello figlet;
};
}
Type:
recurseIntoAttrs :: AttrSet -> AttrSet
*/
recurseIntoAttrs =
attrs: attrs // { recurseForDerivations = true; };
# An attribute set to scan for derivations.
attrs:
attrs // { recurseForDerivations = true; };
/* Undo the effect of recurseIntoAttrs.
Type:
recurseIntoAttrs :: AttrSet -> AttrSet
*/
dontRecurseIntoAttrs =
attrs: attrs // { recurseForDerivations = false; };
# An attribute set to not scan for derivations.
attrs:
attrs // { recurseForDerivations = false; };
/* `unionOfDisjoint x y` is equal to `x // y // z` where the
attrnames in `z` are the intersection of the attrnames in `x` and
@ -655,9 +930,9 @@ rec {
in
(x // y) // mask;
/*** deprecated stuff ***/
# deprecated
zipWithNames = zipAttrsWithNames;
# deprecated
zip = builtins.trace
"lib.zip is deprecated, use lib.zipAttrsWith instead" zipAttrsWith;
}

View file

@ -101,6 +101,7 @@ let
upperChars toLower toUpper addContextFrom splitString
removePrefix removeSuffix versionOlder versionAtLeast
getName getVersion
mesonOption mesonBool mesonEnable
nameFromURL enableFeature enableFeatureAs withFeature
withFeatureAs fixedWidthString fixedWidthNumber isStorePath
toInt toIntBase10 readPathsFromFile fileContents;

View file

@ -278,8 +278,11 @@ rec {
mapAny 0;
/* Pretty print a value, akin to `builtins.trace`.
* Should probably be a builtin as well.
*/
* Should probably be a builtin as well.
* The pretty-printed string should be suitable for rendering default values
* in the NixOS manual. In particular, it should be as close to a valid Nix expression
* as possible.
*/
toPretty = {
/* If this option is true, attrsets like { __pretty = fn; val = ; }
will use fn to convert val to a pretty printed representation.
@ -294,20 +297,25 @@ rec {
introSpace = if multiline then "\n${indent} " else " ";
outroSpace = if multiline then "\n${indent}" else " ";
in if isInt v then toString v
else if isFloat v then "~${toString v}"
# toString loses precision on floats, so we use toJSON instead. This isn't perfect
# as the resulting string may not parse back as a float (e.g. 42, 1e-06), but for
# pretty-printing purposes this is acceptable.
else if isFloat v then builtins.toJSON v
else if isString v then
let
# Separate a string into its lines
newlineSplits = filter (v: ! isList v) (builtins.split "\n" v);
# For a '' string terminated by a \n, which happens when the closing '' is on a new line
multilineResult = "''" + introSpace + concatStringsSep introSpace (lib.init newlineSplits) + outroSpace + "''";
# For a '' string not terminated by a \n, which happens when the closing '' is not on a new line
multilineResult' = "''" + introSpace + concatStringsSep introSpace newlineSplits + "''";
# For single lines, replace all newlines with their escaped representation
singlelineResult = "\"" + libStr.escape [ "\"" ] (concatStringsSep "\\n" newlineSplits) + "\"";
in if multiline && length newlineSplits > 1 then
if lib.last newlineSplits == "" then multilineResult else multilineResult'
else singlelineResult
lines = filter (v: ! isList v) (builtins.split "\n" v);
escapeSingleline = libStr.escape [ "\\" "\"" "\${" ];
escapeMultiline = libStr.replaceStrings [ "\${" "''" ] [ "''\${" "'''" ];
singlelineResult = "\"" + concatStringsSep "\\n" (map escapeSingleline lines) + "\"";
multilineResult = let
escapedLines = map escapeMultiline lines;
# The last line gets a special treatment: if it's empty, '' is on its own line at the "outer"
# indentation level. Otherwise, '' is appended to the last line.
lastLine = lib.last escapedLines;
in "''" + introSpace + concatStringsSep introSpace (lib.init escapedLines)
+ (if lastLine == "" then outroSpace else introSpace + lastLine) + "''";
in
if multiline && length lines > 1 then multilineResult else singlelineResult
else if true == v then "true"
else if false == v then "false"
else if null == v then "null"
@ -326,11 +334,11 @@ rec {
else "<function, args: {${showFnas}}>"
else if isAttrs v then
# apply pretty values if allowed
if attrNames v == [ "__pretty" "val" ] && allowPrettyValues
if allowPrettyValues && v ? __pretty && v ? val
then v.__pretty v.val
else if v == {} then "{ }"
else if v ? type && v.type == "derivation" then
"<derivation ${v.drvPath or "???"}>"
"<derivation ${v.name or "???"}>"
else "{" + introSpace
+ libStr.concatStringsSep introSpace (libAttr.mapAttrsToList
(name: value:

View file

@ -284,7 +284,18 @@ rec {
if config._module.check && config._module.freeformType == null && merged.unmatchedDefns != [] then
let
firstDef = head merged.unmatchedDefns;
baseMsg = "The option `${showOption (prefix ++ firstDef.prefix)}' does not exist. Definition values:${showDefs [ firstDef ]}";
baseMsg =
let
optText = showOption (prefix ++ firstDef.prefix);
defText =
builtins.addErrorContext
"while evaluating the error message for definitions for `${optText}', which is an option that does not exist"
(builtins.addErrorContext
"while evaluating a definition from `${firstDef.file}'"
( showDefs [ firstDef ])
);
in
"The option `${optText}' does not exist. Definition values:${defText}";
in
if attrNames options == [ "_module" ]
then
@ -833,7 +844,7 @@ rec {
filterOverrides' = defs:
let
getPrio = def: if def.value._type or "" == "override" then def.value.priority else defaultPriority;
getPrio = def: if def.value._type or "" == "override" then def.value.priority else defaultOverridePriority;
highestPrio = foldl' (prio: def: min (getPrio def) prio) 9999 defs;
strip = def: if def.value._type or "" == "override" then def // { value = def.value.content; } else def;
in {
@ -842,7 +853,7 @@ rec {
};
/* Sort a list of properties. The sort priority of a property is
1000 by default, but can be overridden by wrapping the property
defaultOrderPriority by default, but can be overridden by wrapping the property
using mkOrder. */
sortProperties = defs:
let
@ -851,7 +862,7 @@ rec {
then def // { value = def.value.content; inherit (def.value) priority; }
else def;
defs' = map strip defs;
compare = a: b: (a.priority or 1000) < (b.priority or 1000);
compare = a: b: (a.priority or defaultOrderPriority) < (b.priority or defaultOrderPriority);
in sort compare defs';
# This calls substSubModules, whose entire purpose is only to ensure that
@ -887,10 +898,13 @@ rec {
mkOptionDefault = mkOverride 1500; # priority of option defaults
mkDefault = mkOverride 1000; # used in config sections of non-user modules to set a default
defaultOverridePriority = 100;
mkImageMediaOverride = mkOverride 60; # image media profiles can be derived by inclusion into host config, hence needing to override host config, but do allow user to mkForce
mkForce = mkOverride 50;
mkVMOverride = mkOverride 10; # used by nixos-rebuild build-vm
defaultPriority = lib.warnIf (lib.isInOldestRelease 2305) "lib.modules.defaultPriority is deprecated, please use lib.modules.defaultOverridePriority instead." defaultOverridePriority;
mkFixStrictness = lib.warn "lib.mkFixStrictness has no effect and will be removed. It returns its argument unmodified, so you can just remove any calls." id;
mkOrder = priority: content:
@ -899,11 +913,9 @@ rec {
};
mkBefore = mkOrder 500;
defaultOrderPriority = 1000;
mkAfter = mkOrder 1500;
# The default priority for things that don't have a priority specified.
defaultPriority = 100;
# Convenient property used to transfer all definitions and their
# properties from one option to another. This property is useful for
# renaming options, and also for including properties from another module
@ -930,10 +942,10 @@ rec {
# Similar to mkAliasAndWrapDefinitions but copies over the priority from the
# option as well.
#
# If a priority is not set, it assumes a priority of defaultPriority.
# If a priority is not set, it assumes a priority of defaultOverridePriority.
mkAliasAndWrapDefsWithPriority = wrap: option:
let
prio = option.highestPrio or defaultPriority;
prio = option.highestPrio or defaultOverridePriority;
defsWithPrio = map (mkOverride prio) option.definitions;
in mkAliasIfDef option (wrap (mkMerge defsWithPrio));
@ -1115,7 +1127,7 @@ rec {
# to definitions.
mkDerivedConfig = opt: f:
mkOverride
(opt.highestPrio or defaultPriority)
(opt.highestPrio or defaultOverridePriority)
(f opt.value);
doRename = { from, to, visible, warn, use, withPriority ? true }:

View file

@ -218,7 +218,7 @@ rec {
# the set generated with filterOptionSets.
optionAttrSetToDocList = optionAttrSetToDocList' [];
optionAttrSetToDocList' = prefix: options:
optionAttrSetToDocList' = _: options:
concatMap (opt:
let
docOption = rec {
@ -234,9 +234,8 @@ rec {
readOnly = opt.readOnly or false;
type = opt.type.description or "unspecified";
}
// optionalAttrs (opt ? example) { example = scrubOptionValue opt.example; }
// optionalAttrs (opt ? default) { default = scrubOptionValue opt.default; }
// optionalAttrs (opt ? defaultText) { default = opt.defaultText; }
// optionalAttrs (opt ? example) { example = renderOptionValue opt.example; }
// optionalAttrs (opt ? default) { default = renderOptionValue (opt.defaultText or opt.default); }
// optionalAttrs (opt ? relatedPackages && opt.relatedPackages != null) { inherit (opt) relatedPackages; };
subOptions =
@ -256,6 +255,9 @@ rec {
efficient: the XML representation of derivations is very large
(on the order of megabytes) and is not actually used by the
manual generator.
This function was made obsolete by renderOptionValue and is kept for
compatibility with out-of-tree code.
*/
scrubOptionValue = x:
if isDerivation x then
@ -265,6 +267,17 @@ rec {
else x;
/* Ensures that the given option value (default or example) is a `_type`d string
by rendering Nix values to `literalExpression`s.
*/
renderOptionValue = v:
if v ? _type && v ? text then v
else literalExpression (lib.generators.toPretty {
multiline = true;
allowPrettyValues = true;
} v);
/* For use in the `defaultText` and `example` option attributes. Causes the
given string to be rendered verbatim in the documentation as Nix code. This
is necessary for complex values, e.g. functions, or values that depend on

View file

@ -328,7 +328,7 @@ rec {
escape ["(" ")"] "(foo)"
=> "\\(foo\\)"
*/
escape = list: replaceChars list (map (c: "\\${c}") list);
escape = list: replaceStrings list (map (c: "\\${c}") list);
/* Escape occurence of the element of `list` in `string` by
converting to its ASCII value and prefixing it with \\x.
@ -341,7 +341,7 @@ rec {
=> "foo\\x20bar"
*/
escapeC = list: replaceChars list (map (c: "\\x${ toLower (lib.toHexString (charToInt c))}") list);
escapeC = list: replaceStrings list (map (c: "\\x${ toLower (lib.toHexString (charToInt c))}") list);
/* Quote string to be used safely within the Bourne shell.
@ -471,19 +471,8 @@ rec {
["\"" "'" "<" ">" "&"]
["&quot;" "&apos;" "&lt;" "&gt;" "&amp;"];
# Obsolete - use replaceStrings instead.
replaceChars = builtins.replaceStrings or (
del: new: s:
let
substList = lib.zipLists del new;
subst = c:
let found = lib.findFirst (sub: sub.fst == c) null substList; in
if found == null then
c
else
found.snd;
in
stringAsChars subst s);
# warning added 12-12-2022
replaceChars = lib.warn "replaceChars is a deprecated alias of replaceStrings, replace usages of it with replaceStrings." builtins.replaceStrings;
# Case conversion utilities.
lowerChars = stringToCharacters "abcdefghijklmnopqrstuvwxyz";
@ -497,7 +486,7 @@ rec {
toLower "HOME"
=> "home"
*/
toLower = replaceChars upperChars lowerChars;
toLower = replaceStrings upperChars lowerChars;
/* Converts an ASCII string to upper-case.
@ -507,10 +496,10 @@ rec {
toUpper "home"
=> "HOME"
*/
toUpper = replaceChars lowerChars upperChars;
toUpper = replaceStrings lowerChars upperChars;
/* Appends string context from another string. This is an implementation
detail of Nix.
detail of Nix and should be used carefully.
Strings in Nix carry an invisible `context` which is a list of strings
representing store paths. If the string is later used in a derivation
@ -533,13 +522,11 @@ rec {
splitString "/" "/usr/local/bin"
=> [ "" "usr" "local" "bin" ]
*/
splitString = _sep: _s:
splitString = sep: s:
let
sep = builtins.unsafeDiscardStringContext _sep;
s = builtins.unsafeDiscardStringContext _s;
splits = builtins.filter builtins.isString (builtins.split (escapeRegex sep) s);
splits = builtins.filter builtins.isString (builtins.split (escapeRegex (toString sep)) (toString s));
in
map (v: addContextFrom _sep (addContextFrom _s v)) splits;
map (addContextFrom s) splits;
/* Return a string without the specified prefix, if the prefix matches.
@ -661,6 +648,61 @@ rec {
name = head (splitString sep filename);
in assert name != filename; name;
/* Create a -D<feature>=<value> string that can be passed to typical Meson
invocations.
Type: mesonOption :: string -> string -> string
@param feature The feature to be set
@param value The desired value
Example:
mesonOption "engine" "opengl"
=> "-Dengine=opengl"
*/
mesonOption = feature: value:
assert (lib.isString feature);
assert (lib.isString value);
"-D${feature}=${value}";
/* Create a -D<condition>={true,false} string that can be passed to typical
Meson invocations.
Type: mesonBool :: string -> bool -> string
@param condition The condition to be made true or false
@param flag The controlling flag of the condition
Example:
mesonBool "hardened" true
=> "-Dhardened=true"
mesonBool "static" false
=> "-Dstatic=false"
*/
mesonBool = condition: flag:
assert (lib.isString condition);
assert (lib.isBool flag);
mesonOption condition (lib.boolToString flag);
/* Create a -D<feature>={enabled,disabled} string that can be passed to
typical Meson invocations.
Type: mesonEnable :: string -> bool -> string
@param feature The feature to be enabled or disabled
@param flag The controlling flag
Example:
mesonEnable "docs" true
=> "-Ddocs=enabled"
mesonEnable "savage" false
=> "-Dsavage=disabled"
*/
mesonEnable = feature: flag:
assert (lib.isString feature);
assert (lib.isBool flag);
mesonOption feature (if flag then "enabled" else "disabled");
/* Create an --{enable,disable}-<feat> string that can be passed to
standard GNU Autoconf scripts.
@ -807,9 +849,9 @@ rec {
*/
toInt = str:
let
# RegEx: Match any leading whitespace, then any digits, and finally match any trailing
# whitespace.
strippedInput = match "[[:space:]]*([[:digit:]]+)[[:space:]]*" str;
# RegEx: Match any leading whitespace, possibly a '-', one or more digits,
# and finally match any trailing whitespace.
strippedInput = match "[[:space:]]*(-?[[:digit:]]+)[[:space:]]*" str;
# RegEx: Match a leading '0' then one or more digits.
isLeadingZero = match "0[[:digit:]]+" (head strippedInput) == [];
@ -858,9 +900,10 @@ rec {
*/
toIntBase10 = str:
let
# RegEx: Match any leading whitespace, then match any zero padding, capture any remaining
# digits after that, and finally match any trailing whitespace.
strippedInput = match "[[:space:]]*0*([[:digit:]]+)[[:space:]]*" str;
# RegEx: Match any leading whitespace, then match any zero padding,
# capture possibly a '-' followed by one or more digits,
# and finally match any trailing whitespace.
strippedInput = match "[[:space:]]*0*(-?[[:digit:]]+)[[:space:]]*" str;
# RegEx: Match at least one '0'.
isZero = match "0+" (head strippedInput) == [];

View file

@ -339,6 +339,8 @@ runTests {
(0 == toInt " 0")
(0 == toInt "0 ")
(0 == toInt " 0 ")
(-1 == toInt "-1")
(-1 == toInt " -1 ")
];
testToIntFails = testAllTrue [
@ -383,6 +385,8 @@ runTests {
(0 == toIntBase10 " 000000")
(0 == toIntBase10 "000000 ")
(0 == toIntBase10 " 000000 ")
(-1 == toIntBase10 "-1")
(-1 == toIntBase10 " -1 ")
];
testToIntBase10Fails = testAllTrue [
@ -727,7 +731,7 @@ runTests {
float = 0.1337;
bool = true;
emptystring = "";
string = ''fno"rd'';
string = "fn\${o}\"r\\d";
newlinestring = "\n";
path = /. + "/foo";
null_ = null;
@ -735,16 +739,16 @@ runTests {
functionArgs = { arg ? 4, foo }: arg;
list = [ 3 4 function [ false ] ];
emptylist = [];
attrs = { foo = null; "foo bar" = "baz"; };
attrs = { foo = null; "foo b/ar" = "baz"; };
emptyattrs = {};
drv = deriv;
};
expected = rec {
int = "42";
float = "~0.133700";
float = "0.1337";
bool = "true";
emptystring = ''""'';
string = ''"fno\"rd"'';
string = ''"fn\''${o}\"r\\d"'';
newlinestring = "\"\\n\"";
path = "/foo";
null_ = "null";
@ -752,9 +756,9 @@ runTests {
functionArgs = "<function, args: {arg?, foo}>";
list = "[ 3 4 ${function} [ false ] ]";
emptylist = "[ ]";
attrs = "{ foo = null; \"foo bar\" = \"baz\"; }";
attrs = "{ foo = null; \"foo b/ar\" = \"baz\"; }";
emptyattrs = "{ }";
drv = "<derivation ${deriv.drvPath}>";
drv = "<derivation ${deriv.name}>";
};
};
@ -799,8 +803,8 @@ runTests {
newlinestring = "\n";
multilinestring = ''
hello
there
test
''${there}
te'''st
'';
multilinestring' = ''
hello
@ -827,8 +831,8 @@ runTests {
multilinestring = ''
'''
hello
there
test
'''''${there}
te''''st
''''';
multilinestring' = ''
'''

View file

@ -64,6 +64,9 @@ checkConfigOutput '^"one two"$' config.result ./shorthand-meta.nix
# Check boolean option.
checkConfigOutput '^false$' config.enable ./declare-enable.nix
checkConfigError 'The option .* does not exist. Definition values:\n\s*- In .*: true' config.enable ./define-enable.nix
checkConfigError 'The option .* does not exist. Definition values:\n\s*- In .*' config.enable ./define-enable-throw.nix
checkConfigError 'while evaluating a definition from `.*/define-enable-abort.nix' config.enable ./define-enable-abort.nix
checkConfigError 'while evaluating the error message for definitions for .enable., which is an option that does not exist' config.enable ./define-enable-abort.nix
checkConfigOutput '^1$' config.bare-submodule.nested ./declare-bare-submodule.nix ./declare-bare-submodule-nested-option.nix
checkConfigOutput '^2$' config.bare-submodule.deep ./declare-bare-submodule.nix ./declare-bare-submodule-deep-option.nix

View file

@ -0,0 +1,3 @@
{
config.enable = abort "oops";
}

View file

@ -0,0 +1,3 @@
{
config.enable = throw "oops";
}

View file

@ -1677,6 +1677,15 @@
githubId = 214787;
name = "Herwig Hochleitner";
};
benediktbroich = {
name = "Benedikt Broich";
email = "b.broich@posteo.de";
github = "BenediktBroich";
githubId = 32903896;
keys = [{
fingerprint = "CB5C 7B3C 3E6F 2A59 A583 A90A 8A60 0376 7BE9 5976";
}];
};
benesim = {
name = "Benjamin Isbarn";
email = "benjamin.isbarn@gmail.com";
@ -3646,6 +3655,15 @@
githubId = 126339;
name = "Domen Kozar";
};
DomesticMoth = {
name = "Andrew";
email = "silkmoth@protonmail.com";
github = "DomesticMoth";
githubId = 91414737;
keys = [{
fingerprint = "7D6B AE0A A98A FDE9 3396 E721 F87E 15B8 3AA7 3087";
}];
};
dominikh = {
email = "dominik@honnef.co";
github = "dominikh";
@ -3695,6 +3713,12 @@
fingerprint = "4749 0887 CF3B 85A1 6355 C671 78C7 DD40 DF23 FB16";
}];
};
DPDmancul = {
name = "Davide Peressoni";
email = "davide.peressoni@tuta.io";
matrix = "@dpd-:matrix.org";
githubId = 3186857;
};
dpercy = {
email = "dpercy@dpercy.dev";
github = "dpercy";
@ -3832,12 +3856,6 @@
githubId = 15128988;
name = "Maksim Dzabraev";
};
e-user = {
email = "nixos@sodosopa.io";
github = "outergod";
githubId = 93086;
name = "Alexander Kahl";
};
eadwu = {
email = "edmund.wu@protonmail.com";
github = "eadwu";
@ -5170,6 +5188,12 @@
fingerprint = "C006 B8A0 0618 F3B6 E0E4 2ECD 5D47 2848 30FA A4FA";
}];
};
gotcha = {
email = "gotcha@bubblenet.be";
github = "gotcha";
githubId = 105204;
name = "Godefroid Chapelle";
};
govanify = {
name = "Gauvain 'GovanifY' Roussel-Tarbouriech";
email = "gauvain@govanify.com";
@ -5761,6 +5785,15 @@
githubId = 15371828;
name = "Hugo Lageneste";
};
huyngo = {
email = "huyngo@disroot.org";
github = "Huy-Ngo";
name = "Ngô Ngc Đc Huy";
githubId = 19296926;
keys = [{
fingerprint = "DF12 23B1 A9FD C5BE 3DA5 B6F7 904A F1C7 CDF6 95C3";
}];
};
hypersw = {
email = "baltic@hypersw.net";
github = "hypersw";
@ -6542,6 +6575,12 @@
githubId = 6445082;
name = "Joseph Lukasik";
};
jhh = {
email = "jeff@j3ff.io";
github = "jhh";
githubId = 14412;
name = "Jeff Hutchison";
};
jhhuh = {
email = "jhhuh.note@gmail.com";
github = "jhhuh";
@ -9412,6 +9451,15 @@
githubId = 249317;
name = "montag451";
};
montchr = {
name = "Chris Montgomery";
email = "chris@cdom.io";
github = "montchr";
githubId = 1757914;
keys = [{
fingerprint = "6460 4147 C434 F65E C306 A21F 135E EDD0 F719 34F3";
}];
};
moosingin3space = {
email = "moosingin3space@gmail.com";
github = "moosingin3space";
@ -10848,6 +10896,16 @@
fingerprint = "5D69 CF04 B7BC 2BC1 A567 9267 00BC F29B 3208 0700";
}];
};
phdcybersec = {
name = "Léo Lavaur";
email = "phdcybersec@pm.me";
github = "phdcybersec";
githubId = 82591009;
keys = [{
fingerprint = "7756 E88F 3C6A 47A5 C5F0 CDFB AB54 6777 F93E 20BF";
}];
};
phfroidmont = {
name = "Paul-Henri Froidmont";
email = "nix.contact-j9dw4d@froidmont.org";
@ -10955,6 +11013,12 @@
fingerprint = "D03B 218C AE77 1F77 D7F9 20D9 823A 6154 4264 08D3";
}];
};
piperswe = {
email = "contact@piperswe.me";
github = "piperswe";
githubId = 1830959;
name = "Piper McCorkle";
};
pjbarnoy = {
email = "pjbarnoy@gmail.com";
github = "pjbarnoy";
@ -11955,6 +12019,12 @@
githubId = 852967;
name = "Russell O'Connor";
};
rodrgz = {
email = "rodrgz@proton.me";
github = "rodrgz";
githubId = 53882428;
name = "Erik Rodriguez";
};
roelvandijk = {
email = "roel@lambdacube.nl";
github = "roelvandijk";
@ -14072,6 +14142,12 @@
githubId = 8794235;
name = "Tom Siewert";
};
tonyshkurenko = {
email = "support@twingate.com";
github = "tonyshkurenko";
githubId = 8597964;
name = "Anton Shkurenko";
};
toonn = {
email = "nixpkgs@toonn.io";
matrix = "@toonn:matrix.org";
@ -14343,6 +14419,7 @@
};
urandom = {
email = "colin@urandom.co.uk";
matrix = "@urandom0:matrix.org";
github = "urandom2";
githubId = 2526260;
keys = [{
@ -15996,4 +16073,10 @@
github = "wuyoli";
githubId = 104238274;
};
ziguana = {
name = "Zig Uana";
email = "git@ziguana.dev";
github = "ziguana";
githubId = 45833444;
};
}

View file

@ -2,7 +2,7 @@
}:
with nixpkgs;
let
pyEnv = python3.withPackages(ps: [ ps.GitPython ]);
pyEnv = python3.withPackages(ps: [ ps.gitpython ]);
in
mkShell {
packages = [

View file

@ -506,6 +506,18 @@ with lib.maintainers; {
enableFeatureFreezePing = true;
};
node = {
members = [
lilyinstarlight
marsam
winter
yuka
];
scope = "Maintain Node.js runtimes and build tooling.";
shortName = "Node.js";
enableFeatureFreezePing = true;
};
numtide = {
members = [
mic92
@ -731,6 +743,7 @@ with lib.maintainers; {
vim = {
members = [
figsoda
jonringer
softinio
teto

View file

@ -159,6 +159,40 @@ environment.variables.VK_ICD_FILENAMES =
"/run/opengl-driver/share/vulkan/icd.d/radeon_icd.x86_64.json";
```
## VA-API {#sec-gpu-accel-va-api}
[VA-API (Video Acceleration API)](https://www.intel.com/content/www/us/en/developer/articles/technical/linuxmedia-vaapi.html)
is an open-source library and API specification, which provides access to
graphics hardware acceleration capabilities for video processing.
VA-API drivers are loaded by `libva`. The version in nixpkgs is built to search
the opengl driver path, so drivers can be installed in
[](#opt-hardware.opengl.extraPackages).
VA-API can be tested using:
```ShellSession
$ nix-shell -p libva-utils --run vainfo
```
### Intel {#sec-gpu-accel-va-api-intel}
Modern Intel GPUs use the iHD driver, which can be installed with:
```nix
hardware.opengl.extraPackages = [
intel-media-driver
];
```
Older Intel GPUs use the i965 driver, which can be installed with:
```nix
hardware.opengl.extraPackages = [
vaapiIntel
];
```
## Common issues {#sec-gpu-accel-common-issues}
### User permissions {#sec-gpu-accel-common-issues-permissions}

View file

@ -17,6 +17,16 @@ you may want to use one of the unversioned `pkgs.linuxPackages_*` aliases
such as `pkgs.linuxPackages_latest`, that are kept up to date with new
versions.
Please note that the current convention in NixOS is to only keep actively
maintained kernel versions on both unstable and the currently supported stable
release(s) of NixOS. This means that a non-longterm kernel will be removed after it's
abandoned by the kernel developers, even on stable NixOS versions. If you
pin your kernel onto a non-longterm version, expect your evaluation to fail as
soon as the version is out of maintenance.
Longterm versions of kernels will be removed before the next stable NixOS that will
exceed the maintenance period of the kernel version.
The default Linux kernel configuration should be fine for most users.
You can see the configuration of your current kernel with the following
command:
@ -138,3 +148,26 @@ $ cd linux-*
$ make -C $dev/lib/modules/*/build M=$(pwd)/drivers/net/ethernet/mellanox modules
# insmod ./drivers/net/ethernet/mellanox/mlx5/core/mlx5_core.ko
```
## ZFS {#sec-linux-zfs}
It's a common issue that the latest stable version of ZFS doesn't support the latest
available Linux kernel. It is recommended to use the latest available LTS that's compatible
with ZFS. Usually this is the default kernel provided by nixpkgs (i.e. `pkgs.linuxPackages`).
Alternatively, it's possible to pin the system to the latest available kernel
version *that is supported by ZFS* like this:
```nix
{
boot.kernelPackages = pkgs.zfs.latestCompatibleLinuxPackages;
}
```
Please note that the version this attribute points to isn't monotonic because the latest kernel
version only refers to kernel versions supported by the Linux developers. In other words,
the latest kernel version that ZFS is compatible with may decrease over time.
An example: the latest version ZFS is compatible with is 5.19 which is a non-longterm version. When 5.19
is out of maintenance, the latest supported kernel version is 5.15 because it's longterm and the versions
5.16, 5.17 and 5.18 are already out of maintenance because they're non-longterm.

View file

@ -50,7 +50,7 @@ networking.wireless.networks = {
echelon = {
pskRaw = "dca6d6ed41f4ab5a984c9f55f6f66d4efdc720ebf66959810f4329bb391c5435";
};
}
};
```
or you can use it to directly generate the `wpa_supplicant.conf`:

View file

@ -102,11 +102,14 @@ let
'';
manualXsltprocOptions = toString [
"--param section.autolabel 1"
"--param section.label.includes.component.label 1"
"--param chapter.autolabel 0"
"--param part.autolabel 0"
"--param preface.autolabel 0"
"--param reference.autolabel 0"
"--param section.autolabel 0"
"--stringparam html.stylesheet 'style.css overrides.css highlightjs/mono-blue.css'"
"--stringparam html.script './highlightjs/highlight.pack.js ./highlightjs/loader.js'"
"--param xref.with.number.and.title 1"
"--param xref.with.number.and.title 0"
"--param toc.section.depth 0"
"--param generate.consistent.ids 1"
"--stringparam admon.style ''"

View file

@ -0,0 +1,36 @@
# Experimental feature: Bootspec {#sec-experimental-bootspec}
Bootspec is a experimental feature, introduced in the [RFC-0125 proposal](https://github.com/NixOS/rfcs/pull/125), the reference implementation can be found [there](https://github.com/NixOS/nixpkgs/pull/172237) in order to standardize bootloader support
and advanced boot workflows such as SecureBoot and potentially more.
You can enable the creation of bootspec documents through [`boot.bootspec.enable = true`](options.html#opt-boot.bootspec.enable), which will prompt a warning until [RFC-0125](https://github.com/NixOS/rfcs/pull/125) is officially merged.
## Schema {#sec-experimental-bootspec-schema}
The bootspec schema is versioned and validated against [a CUE schema file](https://cuelang.org/) which should considered as the source of truth for your applications.
You will find the current version [here](../../../modules/system/activation/bootspec.cue).
## Extensions mechanism {#sec-experimental-bootspec-extensions}
Bootspec cannot account for all usecases.
For this purpose, Bootspec offers a generic extension facility [`boot.bootspec.extensions`](options.html#opt-boot.bootspec.extensions) which can be used to inject any data needed for your usecases.
An example for SecureBoot is to get the Nix store path to `/etc/os-release` in order to bake it into a unified kernel image:
```nix
{ config, lib, ... }: {
boot.bootspec.extensions = {
"org.secureboot.osRelease" = config.environment.etc."os-release".source;
};
}
```
To reduce incompatibility and prevent names from clashing between applications, it is **highly recommended** to use a unique namespace for your extensions.
## External bootloaders {#sec-experimental-bootspec-external-bootloaders}
It is possible to enable your own bootloader through [`boot.loader.external.installHook`](options.html#opt-boot.loader.external.installHook) which can wrap an existing bootloader.
Currently, there is no good story to compose existing bootloaders to enrich their features, e.g. SecureBoot, etc. It will be necessary to reimplement or reuse existing parts.

View file

@ -12,6 +12,7 @@
<xi:include href="../from_md/development/sources.chapter.xml" />
<xi:include href="../from_md/development/writing-modules.chapter.xml" />
<xi:include href="../from_md/development/building-parts.chapter.xml" />
<xi:include href="../from_md/development/bootspec.chapter.xml" />
<xi:include href="../from_md/development/what-happens-during-a-system-switch.chapter.xml" />
<xi:include href="../from_md/development/writing-documentation.chapter.xml" />
<xi:include href="../from_md/development/nixos-tests.chapter.xml" />

View file

@ -59,17 +59,35 @@ config = {
## Setting Priorities {#sec-option-definitions-setting-priorities .unnumbered}
A module can override the definitions of an option in other modules by
setting a *priority*. All option definitions that do not have the lowest
setting an *override priority*. All option definitions that do not have the lowest
priority value are discarded. By default, option definitions have
priority 1000. You can specify an explicit priority by using
`mkOverride`, e.g.
priority 100 and option defaults have priority 1500.
You can specify an explicit priority by using `mkOverride`, e.g.
```nix
services.openssh.enable = mkOverride 10 false;
```
This definition causes all other definitions with priorities above 10 to
be discarded. The function `mkForce` is equal to `mkOverride 50`.
be discarded. The function `mkForce` is equal to `mkOverride 50`, and
`mkDefault` is equal to `mkOverride 1000`.
## Ordering Definitions {#sec-option-definitions-ordering .unnumbered}
It is also possible to influence the order in which the definitions for an option are
merged by setting an *order priority* with `mkOrder`. The default order priority is 1000.
The functions `mkBefore` and `mkAfter` are equal to `mkOrder 500` and `mkOrder 1500`, respectively.
As an example,
```nix
hardware.firmware = mkBefore [ myFirmware ];
```
This definition ensures that `myFirmware` comes before other unordered
definitions in the final list value of `hardware.firmware`.
Note that this is different from [override priorities](#sec-option-definitions-setting-priorities):
setting an order does not affect whether the definition is included or not.
## Merging Configurations {#sec-option-definitions-merging .unnumbered}

View file

@ -177,6 +177,48 @@ environment.variables.AMD_VULKAN_ICD = &quot;RADV&quot;;
# Or
environment.variables.VK_ICD_FILENAMES =
&quot;/run/opengl-driver/share/vulkan/icd.d/radeon_icd.x86_64.json&quot;;
</programlisting>
</section>
</section>
<section xml:id="sec-gpu-accel-va-api">
<title>VA-API</title>
<para>
<link xlink:href="https://www.intel.com/content/www/us/en/developer/articles/technical/linuxmedia-vaapi.html">VA-API
(Video Acceleration API)</link> is an open-source library and API
specification, which provides access to graphics hardware
acceleration capabilities for video processing.
</para>
<para>
VA-API drivers are loaded by <literal>libva</literal>. The version
in nixpkgs is built to search the opengl driver path, so drivers
can be installed in
<xref linkend="opt-hardware.opengl.extraPackages" />.
</para>
<para>
VA-API can be tested using:
</para>
<programlisting>
$ nix-shell -p libva-utils --run vainfo
</programlisting>
<section xml:id="sec-gpu-accel-va-api-intel">
<title>Intel</title>
<para>
Modern Intel GPUs use the iHD driver, which can be installed
with:
</para>
<programlisting language="bash">
hardware.opengl.extraPackages = [
intel-media-driver
];
</programlisting>
<para>
Older Intel GPUs use the i965 driver, which can be installed
with:
</para>
<programlisting language="bash">
hardware.opengl.extraPackages = [
vaapiIntel
];
</programlisting>
</section>
</section>

View file

@ -21,6 +21,19 @@ boot.kernelPackages = pkgs.linuxKernel.packages.linux_3_10;
<literal>pkgs.linuxPackages_latest</literal>, that are kept up to
date with new versions.
</para>
<para>
Please note that the current convention in NixOS is to only keep
actively maintained kernel versions on both unstable and the
currently supported stable release(s) of NixOS. This means that a
non-longterm kernel will be removed after its abandoned by the
kernel developers, even on stable NixOS versions. If you pin your
kernel onto a non-longterm version, expect your evaluation to fail
as soon as the version is out of maintenance.
</para>
<para>
Longterm versions of kernels will be removed before the next stable
NixOS that will exceed the maintenance period of the kernel version.
</para>
<para>
The default Linux kernel configuration should be fine for most
users. You can see the configuration of your current kernel with the
@ -154,4 +167,38 @@ $ make -C $dev/lib/modules/*/build M=$(pwd)/drivers/net/ethernet/mellanox module
# insmod ./drivers/net/ethernet/mellanox/mlx5/core/mlx5_core.ko
</programlisting>
</section>
<section xml:id="sec-linux-zfs">
<title>ZFS</title>
<para>
Its a common issue that the latest stable version of ZFS doesnt
support the latest available Linux kernel. It is recommended to
use the latest available LTS thats compatible with ZFS. Usually
this is the default kernel provided by nixpkgs (i.e.
<literal>pkgs.linuxPackages</literal>).
</para>
<para>
Alternatively, its possible to pin the system to the latest
available kernel version <emphasis>that is supported by
ZFS</emphasis> like this:
</para>
<programlisting language="bash">
{
boot.kernelPackages = pkgs.zfs.latestCompatibleLinuxPackages;
}
</programlisting>
<para>
Please note that the version this attribute points to isnt
monotonic because the latest kernel version only refers to kernel
versions supported by the Linux developers. In other words, the
latest kernel version that ZFS is compatible with may decrease
over time.
</para>
<para>
An example: the latest version ZFS is compatible with is 5.19
which is a non-longterm version. When 5.19 is out of maintenance,
the latest supported kernel version is 5.15 because its longterm
and the versions 5.16, 5.17 and 5.18 are already out of
maintenance because theyre non-longterm.
</para>
</section>
</chapter>

View file

@ -54,7 +54,7 @@ networking.wireless.networks = {
echelon = {
pskRaw = &quot;dca6d6ed41f4ab5a984c9f55f6f66d4efdc720ebf66959810f4329bb391c5435&quot;;
};
}
};
</programlisting>
<para>
or you can use it to directly generate the

View file

@ -0,0 +1,73 @@
<chapter xmlns="http://docbook.org/ns/docbook" xmlns:xlink="http://www.w3.org/1999/xlink" xml:id="sec-experimental-bootspec">
<title>Experimental feature: Bootspec</title>
<para>
Bootspec is a experimental feature, introduced in the
<link xlink:href="https://github.com/NixOS/rfcs/pull/125">RFC-0125
proposal</link>, the reference implementation can be found
<link xlink:href="https://github.com/NixOS/nixpkgs/pull/172237">there</link>
in order to standardize bootloader support and advanced boot
workflows such as SecureBoot and potentially more.
</para>
<para>
You can enable the creation of bootspec documents through
<link xlink:href="options.html#opt-boot.bootspec.enable"><literal>boot.bootspec.enable = true</literal></link>,
which will prompt a warning until
<link xlink:href="https://github.com/NixOS/rfcs/pull/125">RFC-0125</link>
is officially merged.
</para>
<section xml:id="sec-experimental-bootspec-schema">
<title>Schema</title>
<para>
The bootspec schema is versioned and validated against
<link xlink:href="https://cuelang.org/">a CUE schema file</link>
which should considered as the source of truth for your
applications.
</para>
<para>
You will find the current version
<link xlink:href="../../../modules/system/activation/bootspec.cue">here</link>.
</para>
</section>
<section xml:id="sec-experimental-bootspec-extensions">
<title>Extensions mechanism</title>
<para>
Bootspec cannot account for all usecases.
</para>
<para>
For this purpose, Bootspec offers a generic extension facility
<link xlink:href="options.html#opt-boot.bootspec.extensions"><literal>boot.bootspec.extensions</literal></link>
which can be used to inject any data needed for your usecases.
</para>
<para>
An example for SecureBoot is to get the Nix store path to
<literal>/etc/os-release</literal> in order to bake it into a
unified kernel image:
</para>
<programlisting language="bash">
{ config, lib, ... }: {
boot.bootspec.extensions = {
&quot;org.secureboot.osRelease&quot; = config.environment.etc.&quot;os-release&quot;.source;
};
}
</programlisting>
<para>
To reduce incompatibility and prevent names from clashing between
applications, it is <emphasis role="strong">highly
recommended</emphasis> to use a unique namespace for your
extensions.
</para>
</section>
<section xml:id="sec-experimental-bootspec-external-bootloaders">
<title>External bootloaders</title>
<para>
It is possible to enable your own bootloader through
<link xlink:href="options.html#opt-boot.loader.external.installHook"><literal>boot.loader.external.installHook</literal></link>
which can wrap an existing bootloader.
</para>
<para>
Currently, there is no good story to compose existing bootloaders
to enrich their features, e.g. SecureBoot, etc. It will be
necessary to reimplement or reuse existing parts.
</para>
</section>
</chapter>

View file

@ -66,11 +66,11 @@ config = {
<title>Setting Priorities</title>
<para>
A module can override the definitions of an option in other
modules by setting a <emphasis>priority</emphasis>. All option
definitions that do not have the lowest priority value are
discarded. By default, option definitions have priority 1000. You
can specify an explicit priority by using
<literal>mkOverride</literal>, e.g.
modules by setting an <emphasis>override priority</emphasis>. All
option definitions that do not have the lowest priority value are
discarded. By default, option definitions have priority 100 and
option defaults have priority 1500. You can specify an explicit
priority by using <literal>mkOverride</literal>, e.g.
</para>
<programlisting language="bash">
services.openssh.enable = mkOverride 10 false;
@ -78,7 +78,35 @@ services.openssh.enable = mkOverride 10 false;
<para>
This definition causes all other definitions with priorities above
10 to be discarded. The function <literal>mkForce</literal> is
equal to <literal>mkOverride 50</literal>.
equal to <literal>mkOverride 50</literal>, and
<literal>mkDefault</literal> is equal to
<literal>mkOverride 1000</literal>.
</para>
</section>
<section xml:id="sec-option-definitions-ordering">
<title>Ordering Definitions</title>
<para>
It is also possible to influence the order in which the
definitions for an option are merged by setting an <emphasis>order
priority</emphasis> with <literal>mkOrder</literal>. The default
order priority is 1000. The functions <literal>mkBefore</literal>
and <literal>mkAfter</literal> are equal to
<literal>mkOrder 500</literal> and
<literal>mkOrder 1500</literal>, respectively. As an example,
</para>
<programlisting language="bash">
hardware.firmware = mkBefore [ myFirmware ];
</programlisting>
<para>
This definition ensures that <literal>myFirmware</literal> comes
before other unordered definitions in the final list value of
<literal>hardware.firmware</literal>.
</para>
<para>
Note that this is different from
<link linkend="sec-option-definitions-setting-priorities">override
priorities</link>: setting an order does not affect whether the
definition is included or not.
</para>
</section>
<section xml:id="sec-option-definitions-merging">

View file

@ -561,6 +561,14 @@
<link xlink:href="options.html#opt-services.prometheus.exporters.smartctl.enable">services.prometheus.exporters.smartctl</link>.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="https://docs.twingate.com/docs/linux">twingate</link>,
a high performance, easy to use zero trust solution that
enables access to private resources from any device with
better security than a VPN.
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="sec-release-21.11-incompatibilities">

View file

@ -2,8 +2,8 @@
<title>Release 22.11 (“Raccoon”, 2022.11/30)</title>
<para>
The NixOS release team is happy to announce a new version of NixOS
22.11. NixOS is both a Linux distribution, and a set of packages
usable on other Linux systems and macOS.
22.11. NixOS is a Linux distribution, whose set of packages can also
be used on other Linux systems and macOS.
</para>
<para>
This release is supported until the end of June 2023, handing over
@ -1124,9 +1124,9 @@ services.github-runner.serviceOverrides.SupplementaryGroups = [
<listitem>
<para>
Previously, the options
<xref linkend="opt-services.grafana.provision.datasources" />
<link linkend="opt-services.grafana.provision.datasources">services.grafana.provision.datasources</link>
and
<xref linkend="opt-services.grafana.provision.dashboards" />
<link linkend="opt-services.grafana.provision.dashboards">services.grafana.provision.dashboards</link>
expected lists of datasources or dashboards for the
<link xlink:href="https://grafana.com/docs/grafana/latest/administration/provisioning/">declarative
provisioning</link>.
@ -1139,14 +1139,14 @@ services.github-runner.serviceOverrides.SupplementaryGroups = [
<para>
<emphasis role="strong">datasources</emphasis>, please
rename your declarations to
<xref linkend="opt-services.grafana.provision.datasources.settings.datasources" />.
<link linkend="opt-services.grafana.provision.datasources.settings.datasources">services.grafana.provision.datasources.settings.datasources</link>.
</para>
</listitem>
<listitem>
<para>
<emphasis role="strong">dashboards</emphasis>, please
rename your declarations to
<xref linkend="opt-services.grafana.provision.dashboards.settings.providers" />.
<link linkend="opt-services.grafana.provision.dashboards.settings.providers">services.grafana.provision.dashboards.settings.providers</link>.
</para>
</listitem>
</itemizedlist>
@ -1159,9 +1159,9 @@ services.github-runner.serviceOverrides.SupplementaryGroups = [
Its possible to declare the
<literal>apiVersion</literal> of your dashboards and
datasources by
<xref linkend="opt-services.grafana.provision.datasources.settings.apiVersion" />
<link linkend="opt-services.grafana.provision.datasources.settings.apiVersion">services.grafana.provision.datasources.settings.apiVersion</link>
(or
<xref linkend="opt-services.grafana.provision.dashboards.settings.apiVersion" />).
<link linkend="opt-services.grafana.provision.dashboards.settings.apiVersion">services.grafana.provision.dashboards.settings.apiVersion</link>).
</para>
</listitem>
<listitem>
@ -1169,9 +1169,9 @@ services.github-runner.serviceOverrides.SupplementaryGroups = [
Instead of declaring datasources and dashboards in
pure Nix, its also possible to specify configuration
files (or directories) with YAML instead using
<xref linkend="opt-services.grafana.provision.datasources.path" />
<link linkend="opt-services.grafana.provision.datasources.path">services.grafana.provision.datasources.path</link>
(or
<xref linkend="opt-services.grafana.provision.dashboards.path" />.
<link linkend="opt-services.grafana.provision.dashboards.path">services.grafana.provision.dashboards.path</link>.
This is useful when having provisioning files from
non-NixOS Grafana instances that you also want to
deploy to NixOS.
@ -1186,9 +1186,9 @@ services.github-runner.serviceOverrides.SupplementaryGroups = [
</listitem>
<listitem>
<para>
<xref linkend="opt-services.grafana.provision.notifiers" />
<link linkend="opt-services.grafana.provision.notifiers">services.grafana.provision.notifiers</link>
is not affected by this change because this feature is
deprecated by Grafana and will probably removed in
deprecated by Grafana and will probably be removed in
Grafana 10. Its recommended to use
<literal>services.grafana.provision.alerting.contactPoints</literal>
instead.

View file

@ -22,10 +22,34 @@
</section>
<section xml:id="sec-release-23.05-new-services">
<title>New Services</title>
<itemizedlist spacing="compact">
<itemizedlist>
<listitem>
<para>
Create the first release note entry in this section!
<link xlink:href="https://github.com/akinomyoga/ble.sh">blesh</link>,
a line editor written in pure bash. Available as
<link linkend="opt-programs.bash.blesh.enable">programs.bash.blesh</link>.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="https://github.com/junegunn/fzf">fzf</link>,
a command line fuzzyfinder. Available as
<link linkend="opt-programs.fzf.fuzzyCompletion">programs.fzf</link>.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="https://github.com/ellie/atuin">atuin</link>,
a sync server for shell history. Available as
<link linkend="opt-services.atuin.enable">services.atuin</link>.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="https://v2raya.org">v2rayA</link>, a Linux
web GUI client of Project V which supports V2Ray, Xray, SS,
SSR, Trojan and Pingtunnel. Available as
<link xlink:href="options.html#opt-services.v2raya.enable">services.v2raya</link>.
</para>
</listitem>
</itemizedlist>
@ -44,6 +68,14 @@
instead.
</para>
</listitem>
<listitem>
<para>
<literal>borgbackup</literal> module now has an option for
inhibiting system sleep while backups are running, defaulting
to off (not inhibiting sleep), available as
<link linkend="opt-services.borgbackup.jobs._name_.inhibitsSleep"><literal>services.borgbackup.jobs.&lt;name&gt;.inhibitsSleep</literal></link>.
</para>
</listitem>
<listitem>
<para>
The EC2 image module no longer fetches instance metadata in
@ -68,6 +100,36 @@
deprecation</link>.
</para>
</listitem>
<listitem>
<para>
The
<link linkend="opt-services.snapserver.openFirewall">services.snapserver.openFirewall</link>
module option default value has been changed from
<literal>true</literal> to <literal>false</literal>. You will
need to explicitely set this option to
<literal>true</literal>, or configure your firewall.
</para>
</listitem>
<listitem>
<para>
The
<link linkend="opt-services.tmate-ssh-server.openFirewall">services.tmate-ssh-server.openFirewall</link>
module option default value has been changed from
<literal>true</literal> to <literal>false</literal>. You will
need to explicitely set this option to
<literal>true</literal>, or configure your firewall.
</para>
</listitem>
<listitem>
<para>
The
<link linkend="opt-services.unifi-video.openFirewall">services.unifi-video.openFirewall</link>
module option default value has been changed from
<literal>true</literal> to <literal>false</literal>. You will
need to explicitely set this option to
<literal>true</literal>, or configure your firewall.
</para>
</listitem>
<listitem>
<para>
The EC2 image module previously detected and automatically
@ -88,6 +150,26 @@
relying on this should provide their own implementation.
</para>
</listitem>
<listitem>
<para>
Qt 5.12 and 5.14 have been removed, as the corresponding
branches have been EOL upstream for a long time. This affected
under 10 packages in nixpkgs, largely unmaintained upstream as
well, however, out-of-tree package expressions may need to be
updated manually.
</para>
</listitem>
<listitem>
<para>
In <literal>mastodon</literal> it is now necessary to specify
location of file with <literal>PostgreSQL</literal> database
password. In
<literal>services.mastodon.database.passwordFile</literal>
parameter default value
<literal>/var/lib/mastodon/secrets/db-password</literal> has
been changed to <literal>null</literal>.
</para>
</listitem>
<listitem>
<para>
The <literal>nix.readOnlyStore</literal> option has been
@ -100,6 +182,17 @@
<section xml:id="sec-release-23.05-notable-changes">
<title>Other Notable Changes</title>
<itemizedlist>
<listitem>
<para>
<literal>vim_configurable</literal> has been renamed to
<literal>vim-full</literal> to avoid confusion:
<literal>vim-full</literal>s build-time features are
configurable, but both <literal>vim</literal> and
<literal>vim-full</literal> are
<emphasis>customizable</emphasis> (in the sense of user
configuration, like vimrc).
</para>
</listitem>
<listitem>
<para>
The module for the application firewall
@ -108,6 +201,45 @@
<link linkend="opt-services.opensnitch.rules">services.opensnitch.rules</link>
</para>
</listitem>
<listitem>
<para>
<literal>services.mastodon</literal> gained a tootctl wrapped
named <literal>mastodon-tootctl</literal> similar to
<literal>nextcloud-occ</literal> which can be executed from
any user and switches to the configured mastodon user with
sudo and sources the environment variables.
</para>
</listitem>
<listitem>
<para>
The <literal>dnsmasq</literal> service now takes configuration
via the <literal>services.dnsmasq.settings</literal> attribute
set. The option
<literal>services.dnsmasq.extraConfig</literal> will be
deprecated when NixOS 22.11 reaches end of life.
</para>
</listitem>
<listitem>
<para>
To reduce closure size in
<literal>nixos/modules/profiles/minimal.nix</literal> profile
disabled installation documentations and manuals. Also
disabled <literal>logrotate</literal> and
<literal>udisks2</literal> services.
</para>
</listitem>
<listitem>
<para>
The minimal ISO image now uses the
<literal>nixos/modules/profiles/minimal.nix</literal> profile.
</para>
</listitem>
<listitem>
<para>
<literal>mastodon</literal> now supports connection to a
remote <literal>PostgreSQL</literal> database.
</para>
</listitem>
<listitem>
<para>
A new <literal>virtualisation.rosetta</literal> module was
@ -121,6 +253,35 @@
<link xlink:href="https://search.nixos.org/packages?channel=unstable&amp;show=utm&amp;from=0&amp;size=1&amp;sort=relevance&amp;type=packages&amp;query=utm">package</link>.
</para>
</listitem>
<listitem>
<para>
The new option <literal>users.motdFile</literal> allows
configuring a Message Of The Day that can be updated
dynamically.
</para>
</listitem>
<listitem>
<para>
Resilio sync secret keys can now be provided using a secrets
file at runtime, preventing these secrets from ending up in
the Nix store.
</para>
</listitem>
<listitem>
<para>
The <literal>services.fwupd</literal> module now allows
arbitrary daemon settings to be configured in a structured
manner
(<link linkend="opt-services.fwupd.daemonSettings"><literal>services.fwupd.daemonSettings</literal></link>).
</para>
</listitem>
<listitem>
<para>
The <literal>unifi-poller</literal> package and corresponding
NixOS module have been renamed to <literal>unpoller</literal>
to match upstream.
</para>
</listitem>
</itemizedlist>
</section>
</section>

View file

@ -164,6 +164,8 @@ In addition to numerous new and upgraded packages, this release has the followin
- [smartctl_exporter](https://github.com/prometheus-community/smartctl_exporter), a Prometheus exporter for [S.M.A.R.T.](https://en.wikipedia.org/wiki/S.M.A.R.T.) data. Available as [services.prometheus.exporters.smartctl](options.html#opt-services.prometheus.exporters.smartctl.enable).
- [twingate](https://docs.twingate.com/docs/linux), a high performance, easy to use zero trust solution that enables access to private resources from any device with better security than a VPN.
## Backward Incompatibilities {#sec-release-21.11-incompatibilities}
- The NixOS VM test framework, `pkgs.nixosTest`/`make-test-python.nix` (`pkgs.testers.nixosTest` since 22.05), now requires detaching commands such as `succeed("foo &")` and `succeed("foo | xclip -i")` to close stdout.

View file

@ -1,6 +1,6 @@
# Release 22.11 (“Raccoon”, 2022.11/30) {#sec-release-22.11}
The NixOS release team is happy to announce a new version of NixOS 22.11. NixOS is both a Linux distribution, and a set of packages usable on other Linux systems and macOS.
The NixOS release team is happy to announce a new version of NixOS 22.11. NixOS is a Linux distribution, whose set of packages can also be used on other Linux systems and macOS.
This release is supported until the end of June 2023, handing over to NixOS 23.05.
@ -340,32 +340,32 @@ In addition to numerous new and upgraded packages, this release includes the fol
Alternatively you can also set all your values from `extraOptions` to
`systemd.services.grafana.environment`, make sure you don't forget to add
the `GF_` prefix though!
- Previously, the options [](#opt-services.grafana.provision.datasources) and
[](#opt-services.grafana.provision.dashboards) expected lists of datasources
- Previously, the options [services.grafana.provision.datasources](#opt-services.grafana.provision.datasources) and
[services.grafana.provision.dashboards](#opt-services.grafana.provision.dashboards) expected lists of datasources
or dashboards for the [declarative provisioning](https://grafana.com/docs/grafana/latest/administration/provisioning/).
To declare lists of
- **datasources**, please rename your declarations to [](#opt-services.grafana.provision.datasources.settings.datasources).
- **dashboards**, please rename your declarations to [](#opt-services.grafana.provision.dashboards.settings.providers).
- **datasources**, please rename your declarations to [services.grafana.provision.datasources.settings.datasources](#opt-services.grafana.provision.datasources.settings.datasources).
- **dashboards**, please rename your declarations to [services.grafana.provision.dashboards.settings.providers](#opt-services.grafana.provision.dashboards.settings.providers).
This change was made to support more features for that:
- It's possible to declare the `apiVersion` of your dashboards and datasources
by [](#opt-services.grafana.provision.datasources.settings.apiVersion) (or
[](#opt-services.grafana.provision.dashboards.settings.apiVersion)).
by [services.grafana.provision.datasources.settings.apiVersion](#opt-services.grafana.provision.datasources.settings.apiVersion) (or
[services.grafana.provision.dashboards.settings.apiVersion](#opt-services.grafana.provision.dashboards.settings.apiVersion)).
- Instead of declaring datasources and dashboards in pure Nix, it's also possible
to specify configuration files (or directories) with YAML instead using
[](#opt-services.grafana.provision.datasources.path) (or
[](#opt-services.grafana.provision.dashboards.path). This is useful when having
[services.grafana.provision.datasources.path](#opt-services.grafana.provision.datasources.path) (or
[services.grafana.provision.dashboards.path](#opt-services.grafana.provision.dashboards.path). This is useful when having
provisioning files from non-NixOS Grafana instances that you also want to
deploy to NixOS.
__Note:__ secrets from these files will be leaked into the store unless you use a
[**file**-provider or env-var](https://grafana.com/docs/grafana/latest/setup-grafana/configure-grafana/#file-provider) for secrets!
- [](#opt-services.grafana.provision.notifiers) is not affected by this change because
this feature is deprecated by Grafana and will probably removed in Grafana 10.
- [services.grafana.provision.notifiers](#opt-services.grafana.provision.notifiers) is not affected by this change because
this feature is deprecated by Grafana and will probably be removed in Grafana 10.
It's recommended to use `services.grafana.provision.alerting.contactPoints` instead.
- The `services.grafana.provision.alerting` option was added. It includes suboptions for every alerting-related objects (with the exception of `notifiers`), which means it's now possible to configure modern Grafana alerting declaratively.

View file

@ -14,7 +14,13 @@ In addition to numerous new and upgraded packages, this release has the followin
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
- Create the first release note entry in this section!
- [blesh](https://github.com/akinomyoga/ble.sh), a line editor written in pure bash. Available as [programs.bash.blesh](#opt-programs.bash.blesh.enable).
- [fzf](https://github.com/junegunn/fzf), a command line fuzzyfinder. Available as [programs.fzf](#opt-programs.fzf.fuzzyCompletion).
- [atuin](https://github.com/ellie/atuin), a sync server for shell history. Available as [services.atuin](#opt-services.atuin.enable).
- [v2rayA](https://v2raya.org), a Linux web GUI client of Project V which supports V2Ray, Xray, SS, SSR, Trojan and Pingtunnel. Available as [services.v2raya](options.html#opt-services.v2raya.enable).
## Backward Incompatibilities {#sec-release-23.05-incompatibilities}
@ -22,21 +28,56 @@ In addition to numerous new and upgraded packages, this release has the followin
- `carnix` and `cratesIO` has been removed due to being unmaintained, use alternatives such as [naersk](https://github.com/nix-community/naersk) and [crate2nix](https://github.com/kolloch/crate2nix) instead.
- `borgbackup` module now has an option for inhibiting system sleep while backups are running, defaulting to off (not inhibiting sleep), available as [`services.borgbackup.jobs.<name>.inhibitsSleep`](#opt-services.borgbackup.jobs._name_.inhibitsSleep).
- The EC2 image module no longer fetches instance metadata in stage-1. This results in a significantly smaller initramfs, since network drivers no longer need to be included, and faster boots, since metadata fetching can happen in parallel with startup of other services.
This breaks services which rely on metadata being present by the time stage-2 is entered. Anything which reads EC2 metadata from `/etc/ec2-metadata` should now have an `after` dependency on `fetch-ec2-metadata.service`
- `services.sourcehut.dispatch` and the corresponding package (`sourcehut.dispatchsrht`) have been removed due to [upstream deprecation](https://sourcehut.org/blog/2022-08-01-dispatch-deprecation-plans/).
- The [services.snapserver.openFirewall](#opt-services.snapserver.openFirewall) module option default value has been changed from `true` to `false`. You will need to explicitely set this option to `true`, or configure your firewall.
- The [services.tmate-ssh-server.openFirewall](#opt-services.tmate-ssh-server.openFirewall) module option default value has been changed from `true` to `false`. You will need to explicitely set this option to `true`, or configure your firewall.
- The [services.unifi-video.openFirewall](#opt-services.unifi-video.openFirewall) module option default value has been changed from `true` to `false`. You will need to explicitely set this option to `true`, or configure your firewall.
- The EC2 image module previously detected and automatically mounted ext3-formatted instance store devices and partitions in stage-1 (initramfs), storing `/tmp` on the first discovered device. This behaviour, which only catered to very specific use cases and could not be disabled, has been removed. Users relying on this should provide their own implementation, and probably use ext4 and perform the mount in stage-2.
- The EC2 image module previously detected and activated swap-formatted instance store devices and partitions in stage-1 (initramfs). This behaviour has been removed. Users relying on this should provide their own implementation.
- Qt 5.12 and 5.14 have been removed, as the corresponding branches have been EOL upstream for a long time. This affected under 10 packages in nixpkgs, largely unmaintained upstream as well, however, out-of-tree package expressions may need to be updated manually.
- In `mastodon` it is now necessary to specify location of file with `PostgreSQL` database password. In `services.mastodon.database.passwordFile` parameter default value `/var/lib/mastodon/secrets/db-password` has been changed to `null`.
- The `nix.readOnlyStore` option has been renamed to `boot.readOnlyNixStore` to clarify that it configures the NixOS boot process, not the Nix daemon.
## Other Notable Changes {#sec-release-23.05-notable-changes}
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
- `vim_configurable` has been renamed to `vim-full` to avoid confusion: `vim-full`'s build-time features are configurable, but both `vim` and `vim-full` are *customizable* (in the sense of user configuration, like vimrc).
- The module for the application firewall `opensnitch` got the ability to configure rules. Available as [services.opensnitch.rules](#opt-services.opensnitch.rules)
- `services.mastodon` gained a tootctl wrapped named `mastodon-tootctl` similar to `nextcloud-occ` which can be executed from any user and switches to the configured mastodon user with sudo and sources the environment variables.
- The `dnsmasq` service now takes configuration via the
`services.dnsmasq.settings` attribute set. The option
`services.dnsmasq.extraConfig` will be deprecated when NixOS 22.11 reaches
end of life.
- To reduce closure size in `nixos/modules/profiles/minimal.nix` profile disabled installation documentations and manuals. Also disabled `logrotate` and `udisks2` services.
- The minimal ISO image now uses the `nixos/modules/profiles/minimal.nix` profile.
- `mastodon` now supports connection to a remote `PostgreSQL` database.
- A new `virtualisation.rosetta` module was added to allow running `x86_64` binaries through [Rosetta](https://developer.apple.com/documentation/apple-silicon/about-the-rosetta-translation-environment) inside virtualised NixOS guests on Apple silicon. This feature works by default with the [UTM](https://docs.getutm.app/) virtualisation [package](https://search.nixos.org/packages?channel=unstable&show=utm&from=0&size=1&sort=relevance&type=packages&query=utm).
- The new option `users.motdFile` allows configuring a Message Of The Day that can be updated dynamically.
- Resilio sync secret keys can now be provided using a secrets file at runtime, preventing these secrets from ending up in the Nix store.
- The `services.fwupd` module now allows arbitrary daemon settings to be configured in a structured manner ([`services.fwupd.daemonSettings`](#opt-services.fwupd.daemonSettings)).
- The `unifi-poller` package and corresponding NixOS module have been renamed to `unpoller` to match upstream.

View file

@ -26,7 +26,7 @@
# If you include more than one option list into a document, you need to
# provide different ids.
, variablelistId ? "configuration-variable-list"
# Strig to prefix to the option XML/HTML id attributes.
# String to prefix to the option XML/HTML id attributes.
, optionIdPrefix ? "opt-"
, revision ? "" # Specify revision for the options
# a set of options the docs we are generating will be merged into, as if by recursiveUpdate.
@ -45,28 +45,11 @@
}:
let
# Make a value safe for JSON. Functions are replaced by the string "<function>",
# derivations are replaced with an attrset
# { _type = "derivation"; name = <name of that derivation>; }.
# We need to handle derivations specially because consumers want to know about them,
# but we can't easily use the type,name subset of keys (since type is often used as
# a module option and might cause confusion). Use _type,name instead to the same
# effect, since _type is already used by the module system.
substSpecial = x:
if lib.isDerivation x then { _type = "derivation"; name = x.name; }
else if builtins.isAttrs x then lib.mapAttrs (name: substSpecial) x
else if builtins.isList x then map substSpecial x
else if lib.isFunction x then "<function>"
else x;
rawOpts = lib.optionAttrSetToDocList options;
transformedOpts = map transformOptions rawOpts;
filteredOpts = lib.filter (opt: opt.visible && !opt.internal) transformedOpts;
optionsList = lib.flip map filteredOpts
(opt: opt
// lib.optionalAttrs (opt ? example) { example = substSpecial opt.example; }
// lib.optionalAttrs (opt ? default) { default = substSpecial opt.default; }
// lib.optionalAttrs (opt ? type) { type = substSpecial opt.type; }
// lib.optionalAttrs (opt ? relatedPackages && opt.relatedPackages != []) { relatedPackages = genRelatedPackages opt.relatedPackages opt.name; }
);
@ -111,14 +94,16 @@ in rec {
inherit optionsNix;
optionsAsciiDoc = pkgs.runCommand "options.adoc" {} ''
${pkgs.python3Minimal}/bin/python ${./generateAsciiDoc.py} \
< ${optionsJSON}/share/doc/nixos/options.json \
${pkgs.python3Minimal}/bin/python ${./generateDoc.py} \
--format asciidoc \
${optionsJSON}/share/doc/nixos/options.json \
> $out
'';
optionsCommonMark = pkgs.runCommand "options.md" {} ''
${pkgs.python3Minimal}/bin/python ${./generateCommonMark.py} \
< ${optionsJSON}/share/doc/nixos/options.json \
${pkgs.python3Minimal}/bin/python ${./generateDoc.py} \
--format commonmark \
${optionsJSON}/share/doc/nixos/options.json \
> $out
'';

View file

@ -1,37 +0,0 @@
import json
import sys
options = json.load(sys.stdin)
# TODO: declarations: link to github
for (name, value) in options.items():
print(f'== {name}')
print()
print(value['description'])
print()
print('[discrete]')
print('=== details')
print()
print(f'Type:: {value["type"]}')
if 'default' in value:
print('Default::')
print('+')
print('----')
print(json.dumps(value['default'], ensure_ascii=False, separators=(',', ':')))
print('----')
print()
else:
print('No Default:: {blank}')
if value['readOnly']:
print('Read Only:: {blank}')
else:
print()
if 'example' in value:
print('Example::')
print('+')
print('----')
print(json.dumps(value['example'], ensure_ascii=False, separators=(',', ':')))
print('----')
print()
else:
print('No Example:: {blank}')
print()

View file

@ -1,27 +0,0 @@
import json
import sys
options = json.load(sys.stdin)
for (name, value) in options.items():
print('##', name.replace('<', '&lt;').replace('>', '&gt;'))
print(value['description'])
print()
if 'type' in value:
print('*_Type_*:')
print(value['type'])
print()
print()
if 'default' in value:
print('*_Default_*')
print('```')
print(json.dumps(value['default'], ensure_ascii=False, separators=(',', ':')))
print('```')
print()
print()
if 'example' in value:
print('*_Example_*')
print('```')
print(json.dumps(value['example'], ensure_ascii=False, separators=(',', ':')))
print('```')
print()
print()

View file

@ -0,0 +1,108 @@
import argparse
import json
import sys
formats = ['commonmark', 'asciidoc']
parser = argparse.ArgumentParser(
description = 'Generate documentation for a set of JSON-formatted NixOS options'
)
parser.add_argument(
'nix_options_path',
help = 'a path to a JSON file containing the NixOS options'
)
parser.add_argument(
'-f',
'--format',
choices = formats,
required = True,
help = f'the documentation format to generate'
)
args = parser.parse_args()
# Pretty-print certain Nix types, like literal expressions.
def render_types(obj):
if '_type' not in obj: return obj
_type = obj['_type']
if _type == 'literalExpression' or _type == 'literalDocBook':
return obj['text']
if _type == 'derivation':
return obj['name']
raise Exception(f'Unexpected type `{_type}` in {json.dumps(obj)}')
def generate_commonmark(options):
for (name, value) in options.items():
print('##', name.replace('<', '&lt;').replace('>', '&gt;'))
print(value['description'])
print()
if 'type' in value:
print('*_Type_*')
print ('```')
print(value['type'])
print ('```')
print()
print()
if 'default' in value:
print('*_Default_*')
print('```')
print(json.dumps(value['default'], ensure_ascii=False, separators=(',', ':')))
print('```')
print()
print()
if 'example' in value:
print('*_Example_*')
print('```')
print(json.dumps(value['example'], ensure_ascii=False, separators=(',', ':')))
print('```')
print()
print()
# TODO: declarations: link to github
def generate_asciidoc(options):
for (name, value) in options.items():
print(f'== {name}')
print()
print(value['description'])
print()
print('[discrete]')
print('=== details')
print()
print(f'Type:: {value["type"]}')
if 'default' in value:
print('Default::')
print('+')
print('----')
print(json.dumps(value['default'], ensure_ascii=False, separators=(',', ':')))
print('----')
print()
else:
print('No Default:: {blank}')
if value['readOnly']:
print('Read Only:: {blank}')
else:
print()
if 'example' in value:
print('Example::')
print('+')
print('----')
print(json.dumps(value['example'], ensure_ascii=False, separators=(',', ':')))
print('----')
print()
else:
print('No Example:: {blank}')
print()
with open(args.nix_options_path) as nix_options_json:
options = json.load(nix_options_json, object_hook=render_types)
if args.format == 'commonmark':
generate_commonmark(options)
elif args.format == 'asciidoc':
generate_asciidoc(options)
else:
raise Exception(f'Unsupported documentation format `--format {args.format}`')

View file

@ -138,82 +138,6 @@
</xsl:template>
<xsl:template match="string[contains(@value, '&#010;')]" mode="top">
<programlisting>
<xsl:text>''&#010;</xsl:text>
<xsl:value-of select='str:replace(str:replace(@value, "&apos;&apos;", "&apos;&apos;&apos;"), "${", "&apos;&apos;${")' />
<xsl:text>''</xsl:text>
</programlisting>
</xsl:template>
<xsl:template match="*" mode="top">
<literal><xsl:apply-templates select="." /></literal>
</xsl:template>
<xsl:template match="null">
<xsl:text>null</xsl:text>
</xsl:template>
<xsl:template match="string">
<xsl:choose>
<xsl:when test="(contains(@value, '&quot;') or contains(@value, '\')) and not(contains(@value, '&#010;'))">
<xsl:text>''</xsl:text><xsl:value-of select='str:replace(str:replace(@value, "&apos;&apos;", "&apos;&apos;&apos;"), "${", "&apos;&apos;${")' /><xsl:text>''</xsl:text>
</xsl:when>
<xsl:otherwise>
<xsl:text>"</xsl:text><xsl:value-of select="str:replace(str:replace(str:replace(str:replace(@value, '\', '\\'), '&quot;', '\&quot;'), '&#010;', '\n'), '${', '\${')" /><xsl:text>"</xsl:text>
</xsl:otherwise>
</xsl:choose>
</xsl:template>
<xsl:template match="int">
<xsl:value-of select="@value" />
</xsl:template>
<xsl:template match="bool[@value = 'true']">
<xsl:text>true</xsl:text>
</xsl:template>
<xsl:template match="bool[@value = 'false']">
<xsl:text>false</xsl:text>
</xsl:template>
<xsl:template match="list">
[
<xsl:for-each select="*">
<xsl:apply-templates select="." />
<xsl:text> </xsl:text>
</xsl:for-each>
]
</xsl:template>
<xsl:template match="attrs[attr[@name = '_type' and string[@value = 'literalExpression']]]">
<xsl:value-of select="attr[@name = 'text']/string/@value" />
</xsl:template>
<xsl:template match="attrs">
{
<xsl:for-each select="attr">
<xsl:value-of select="@name" />
<xsl:text> = </xsl:text>
<xsl:apply-templates select="*" /><xsl:text>; </xsl:text>
</xsl:for-each>
}
</xsl:template>
<xsl:template match="attrs[attr[@name = '_type' and string[@value = 'derivation']]]">
<replaceable>(build of <xsl:value-of select="attr[@name = 'name']/string/@value" />)</replaceable>
</xsl:template>
<xsl:template match="attr[@name = 'declarations' or @name = 'definitions']">
<simplelist>
<!--
@ -275,10 +199,4 @@
</simplelist>
</xsl:template>
<xsl:template match="function">
<xsl:text>λ</xsl:text>
</xsl:template>
</xsl:stylesheet>

View file

@ -8,9 +8,9 @@ let
systemd = cfg.package;
in rec {
shellEscape = s: (replaceChars [ "\\" ] [ "\\\\" ] s);
shellEscape = s: (replaceStrings [ "\\" ] [ "\\\\" ] s);
mkPathSafeName = lib.replaceChars ["@" ":" "\\" "[" "]"] ["-" "-" "-" "" ""];
mkPathSafeName = lib.replaceStrings ["@" ":" "\\" "[" "]"] ["-" "-" "-" "" ""];
# a type for options that take a unit name
unitNameType = types.strMatching "[a-zA-Z0-9@%:_.\\-]+[.](service|socket|device|mount|automount|swap|target|path|timer|scope|slice)";
@ -258,7 +258,7 @@ in rec {
makeJobScript = name: text:
let
scriptName = replaceChars [ "\\" "@" ] [ "-" "_" ] (shellEscape name);
scriptName = replaceStrings [ "\\" "@" ] [ "-" "_" ] (shellEscape name);
out = (pkgs.writeShellScriptBin scriptName ''
set -e
${text}

View file

@ -48,7 +48,7 @@ rec {
trim = s: removeSuffix "/" (removePrefix "/" s);
normalizedPath = strings.normalizePath s;
in
replaceChars ["/"] ["-"]
replaceStrings ["/"] ["-"]
(replacePrefix "." (strings.escapeC ["."] ".")
(strings.escapeC (stringToCharacters " !\"#$%&'()*+,;<=>=@[\\]^`{|}~-")
(if normalizedPath == "/" then normalizedPath else trim normalizedPath)));
@ -67,7 +67,7 @@ rec {
else if builtins.isInt arg || builtins.isFloat arg then toString arg
else throw "escapeSystemdExecArg only allows strings, paths and numbers";
in
replaceChars [ "%" "$" ] [ "%%" "$$" ] (builtins.toJSON s);
replaceStrings [ "%" "$" ] [ "%%" "$$" ] (builtins.toJSON s);
# Quotes a list of arguments into a single string for use in a Exec*
# line.
@ -112,7 +112,7 @@ rec {
else if isAttrs item then
map (name:
let
escapedName = ''"${replaceChars [''"'' "\\"] [''\"'' "\\\\"] name}"'';
escapedName = ''"${replaceStrings [''"'' "\\"] [''\"'' "\\\\"] name}"'';
in
recurse (prefix + "." + escapedName) item.${name}) (attrNames item)
else if isList item then

View file

@ -94,9 +94,4 @@ with lib;
# Before changing this value read the documentation for this option
# (e.g. man configuration.nix or on https://nixos.org/nixos/options.html).
system.stateVersion = "21.05"; # Did you read the comment?
# As this is intended as a stadalone image, undo some of the minimal profile stuff
documentation.enable = true;
documentation.nixos.enable = true;
environment.noXlibs = false;
}

View file

@ -26,9 +26,4 @@ with lib;
# Network
networking.useDHCP = false;
networking.interfaces.eth0.useDHCP = true;
# As this is intended as a standalone image, undo some of the minimal profile stuff
documentation.enable = true;
documentation.nixos.enable = true;
environment.noXlibs = false;
}

View file

@ -82,8 +82,8 @@ in {
kerberos = mkOption {
type = types.package;
default = pkgs.krb5Full;
defaultText = literalExpression "pkgs.krb5Full";
default = pkgs.krb5;
defaultText = literalExpression "pkgs.krb5";
example = literalExpression "pkgs.heimdal";
description = lib.mdDoc ''
The Kerberos implementation that will be present in

View file

@ -30,9 +30,11 @@ with lib;
beam = super.beam_nox;
cairo = super.cairo.override { x11Support = false; };
dbus = super.dbus.override { x11Support = false; };
ffmpeg_4 = super.ffmpeg_4.override { sdlSupport = false; vdpauSupport = false; };
ffmpeg_5 = super.ffmpeg_5.override { sdlSupport = false; vdpauSupport = false; };
ffmpeg_4 = super.ffmpeg_4-headless;
ffmpeg_5 = super.ffmpeg_5-headless;
gobject-introspection = super.gobject-introspection.override { x11Support = false; };
imagemagick = super.imagemagick.override { libX11Support = false; libXtSupport = false; };
imagemagickBig = super.imagemagickBig.override { libX11Support = false; libXtSupport = false; };
libva = super.libva-minimal;
networkmanager-fortisslvpn = super.networkmanager-fortisslvpn.override { withGnome = false; };
networkmanager-iodine = super.networkmanager-iodine.override { withGnome = false; };
@ -41,7 +43,10 @@ with lib;
networkmanager-openvpn = super.networkmanager-openvpn.override { withGnome = false; };
networkmanager-sstp = super.networkmanager-vpnc.override { withGnome = false; };
networkmanager-vpnc = super.networkmanager-vpnc.override { withGnome = false; };
pinentry = super.pinentry.override { enabledFlavors = [ "curses" "tty" "emacs" ]; withLibsecret = false; };
qemu = super.qemu.override { gtkSupport = false; spiceSupport = false; sdlSupport = false; };
qrencode = super.qrencode.overrideAttrs (_: { doCheck = false; });
zbar = super.zbar.override { enableVideo = false; withXorg = false; };
}));
};
}

View file

@ -160,7 +160,7 @@ let
config = rec {
device = mkIf options.label.isDefined
"/dev/disk/by-label/${config.label}";
deviceName = lib.replaceChars ["\\"] [""] (escapeSystemdPath config.device);
deviceName = lib.replaceStrings ["\\"] [""] (escapeSystemdPath config.device);
realDevice = if config.randomEncryption.enable then "/dev/mapper/${deviceName}" else config.device;
};

View file

@ -61,6 +61,17 @@ in
Defaults to `false` to respect its opt-in nature.
'';
};
xdgOpenUsePortal = mkOption {
type = types.bool;
default = false;
description = lib.mdDoc ''
Sets environment variable `NIXOS_XDG_OPEN_USE_PORTAL` to `1`
This will make `xdg-open` use the portal to open programs, which resolves bugs involving
programs opening inside FHS envs or with unexpected env vars set from wrappers.
See [#160923](https://github.com/NixOS/nixpkgs/issues/160923) for more info.
'';
};
};
config =
@ -95,6 +106,7 @@ in
sessionVariables = {
GTK_USE_PORTAL = mkIf cfg.gtkUsePortal "1";
NIXOS_XDG_OPEN_USE_PORTAL = mkIf cfg.xdgOpenUsePortal "1";
XDG_DESKTOP_PORTAL_DIR = "${joinedPortals}/share/xdg-desktop-portal/portals";
};
};

View file

@ -132,6 +132,8 @@ in
options zram num_devices=${toString cfg.numDevices}
'';
boot.kernelParams = ["zram.num_devices=${toString cfg.numDevices}"];
services.udev.extraRules = ''
KERNEL=="zram[0-9]*", ENV{SYSTEMD_WANTS}="zram-init-%k.service", TAG+="systemd"
'';
@ -178,9 +180,9 @@ in
serviceConfig = {
Type = "oneshot";
RemainAfterExit = true;
ExecStartPre = "${modprobe} -r zram";
ExecStart = "${modprobe} zram";
ExecStop = "${modprobe} -r zram";
ExecStartPre = "-${modprobe} -r zram";
ExecStart = "-${modprobe} zram";
ExecStop = "-${modprobe} -r zram";
};
restartTriggers = [
cfg.numDevices

View file

@ -6,6 +6,12 @@
with lib;
let
# This is copied into the installer image, so it's important that it is filtered
# to avoid including a large .git directory.
# We also want the source name to be normalised to "source" to avoid depending on the
# location of nixpkgs.
# In the future we might want to expose the ISO image from the flake and use
# `self.outPath` directly instead.
nixpkgs = lib.cleanSource pkgs.path;
# We need a copy of the Nix expressions for Nixpkgs and NixOS on the
@ -31,7 +37,14 @@ let
in
{
nix.registry.nixpkgs.flake.outPath = builtins.path { name = "source"; path = pkgs.path; };
# Pin the nixpkgs flake in the installer to our cleaned up nixpkgs source.
# FIXME: this might be surprising and is really only needed for offline installations,
# see discussion in https://github.com/NixOS/nixpkgs/pull/204178#issuecomment-1336289021
nix.registry.nixpkgs.to = {
type = "path";
path = nixpkgs;
};
# Provide the NixOS/Nixpkgs sources in /etc/nixos. This is required
# for nixos-install.
boot.postBootCommands = mkAfter

View file

@ -1,14 +1,17 @@
# This module defines a small NixOS installation CD. It does not
# contain any graphical stuff.
{ ... }:
{ lib, ... }:
{
imports =
[ ./installation-cd-base.nix
];
imports = [
../../profiles/minimal.nix
./installation-cd-base.nix
];
isoImage.edition = "minimal";
documentation.man.enable = lib.mkOverride 500 true;
fonts.fontconfig.enable = false;
fonts.fontconfig.enable = lib.mkForce false;
isoImage.edition = lib.mkForce "minimal";
}

View file

@ -1,10 +1,12 @@
# This module defines a small netboot environment.
{ ... }:
{ lib, ... }:
{
imports =
[ ./netboot-base.nix
../../profiles/minimal.nix
];
imports = [
./netboot-base.nix
../../profiles/minimal.nix
];
documentation.man.enable = lib.mkOverride 500 true;
}

View file

@ -1,7 +1,7 @@
{
x86_64-linux = "/nix/store/xdlpraypxdimjyfrr4k06narrv8nmfgh-nix-2.11.1";
i686-linux = "/nix/store/acghbpn3aaj2q64mz3ljipsgf9d9qxlp-nix-2.11.1";
aarch64-linux = "/nix/store/0lrf6danhdqjsrhala134ak8vn0b9ghj-nix-2.11.1";
x86_64-darwin = "/nix/store/60sx4c6xflgqk11gvijwzlsczbxgxgwh-nix-2.11.1";
aarch64-darwin = "/nix/store/dmk5m3nlqp1awaqrp1f06qhhkh3l102n-nix-2.11.1";
x86_64-linux = "/nix/store/h88w1442c7hzkbw8sgpcsbqp4lhz6l5p-nix-2.12.0";
i686-linux = "/nix/store/j23527l1c3hfx17nssc0v53sq6c741zs-nix-2.12.0";
aarch64-linux = "/nix/store/zgzmdymyh934y3r4vqh8z337ba4cwsjb-nix-2.12.0";
x86_64-darwin = "/nix/store/wnlrzllazdyg1nrw9na497p4w0m7i7mm-nix-2.12.0";
aarch64-darwin = "/nix/store/7n5yamgzg5dpp5vb6ipdqgfh6cf30wmn-nix-2.12.0";
}

View file

@ -48,10 +48,15 @@ let
};
scrubDerivations = namePrefix: pkgSet: mapAttrs
(name: value:
let wholeName = "${namePrefix}.${name}"; in
if isAttrs value then
let
wholeName = "${namePrefix}.${name}";
guard = lib.warn "Attempt to evaluate package ${wholeName} in option documentation; this is not supported and will eventually be an error. Use `mkPackageOption` or `literalExpression` instead.";
in if isAttrs value then
scrubDerivations wholeName value
// (optionalAttrs (isDerivation value) { outPath = "\${${wholeName}}"; })
// optionalAttrs (isDerivation value) {
outPath = guard "\${${wholeName}}";
drvPath = guard drvPath;
}
else value
)
pkgSet;

File diff suppressed because it is too large Load diff

View file

@ -20,7 +20,13 @@
pkgs.mkpasswd # for generating password files
# Some text editors.
pkgs.vim
(pkgs.vim.customize {
name = "vim";
vimrcConfig.packages.default = {
start = [ pkgs.vimPlugins.vim-nix ];
};
vimrcConfig.customRC = "syntax on";
})
# Some networking tools.
pkgs.fuse
@ -36,6 +42,7 @@
pkgs.smartmontools # for diagnosing hard disks
pkgs.pciutils
pkgs.usbutils
pkgs.nvme-cli
# Tools to create / manipulate filesystems.
pkgs.ntfsprogs # for resizing NTFS partitions

View file

@ -10,10 +10,20 @@ with lib;
documentation.enable = mkDefault false;
documentation.doc.enable = mkDefault false;
documentation.info.enable = mkDefault false;
documentation.man.enable = mkDefault false;
documentation.nixos.enable = mkDefault false;
programs.command-not-found.enable = mkDefault false;
services.logrotate.enable = mkDefault false;
services.udisks2.enable = mkDefault false;
xdg.autostart.enable = mkDefault false;
xdg.icons.enable = mkDefault false;
xdg.mime.enable = mkDefault false;

View file

@ -0,0 +1,16 @@
{ lib, config, pkgs, ... }:
with lib;
let
cfg = config.programs.bash.blesh;
in {
options = {
programs.bash.blesh.enable = mkEnableOption (mdDoc "blesh");
};
config = mkIf cfg.enable {
programs.bash.interactiveShellInit = mkBefore ''
source ${pkgs.blesh}/share/blesh/ble.sh
'';
};
meta.maintainers = with maintainers; [ laalsaas ];
}

View file

@ -5,6 +5,8 @@ with lib;
let
cfg = config.programs.firefox;
nmh = cfg.nativeMessagingHosts;
policyFormat = pkgs.formats.json { };
organisationInfo = ''
@ -15,15 +17,15 @@ let
given control of your browser, unless of course they also control your
NixOS configuration.
'';
in {
in
{
options.programs.firefox = {
enable = mkEnableOption (mdDoc "the Firefox web browser");
package = mkOption {
description = mdDoc "Firefox package to use.";
type = types.package;
default = pkgs.firefox;
description = mdDoc "Firefox package to use.";
defaultText = literalExpression "pkgs.firefox";
relatedPackages = [
"firefox"
@ -31,12 +33,12 @@ in {
"firefox-bin"
"firefox-devedition-bin"
"firefox-esr"
"firefox-esr-wayland"
"firefox-wayland"
];
};
policies = mkOption {
type = policyFormat.type;
default = { };
description = mdDoc ''
Group policies to install.
@ -48,43 +50,97 @@ in {
${organisationInfo}
'';
type = policyFormat.type;
default = {};
};
preferences = mkOption {
type = with types; attrsOf (oneOf [ bool int string ]);
default = { };
description = mdDoc ''
Preferences to set from `about://config`.
Preferences to set from `about:config`.
Some of these might be able to be configured more ergonomically
using policies.
${organisationInfo}
'';
type = with types; attrsOf (oneOf [ bool int string ]);
default = {};
};
preferencesStatus = mkOption {
type = types.enum [ "default" "locked" "user" "clear" ];
default = "locked";
description = mdDoc ''
The status of `firefox.preferences`.
`status` can assume the following values:
- `"default"`: Preferences appear as default.
- `"locked"`: Preferences appear as default and can't be changed.
- `"user"`: Preferences appear as changed.
- `"clear"`: Value has no effect. Resets to factory defaults on each startup.
'';
};
autoConfig = mkOption {
type = types.lines;
default = "";
description = mdDoc ''
AutoConfig files can be used to set and lock preferences that are not covered
by the policies.json for Mac and Linux. This method can be used to automatically
change user preferences or prevent the end user from modifiying specific
preferences by locking them. More info can be found in https://support.mozilla.org/en-US/kb/customizing-firefox-using-autoconfig.
'';
};
nativeMessagingHosts = mapAttrs (_: v: mkEnableOption (mdDoc v)) {
browserpass = "Browserpass support";
bukubrow = "Bukubrow support";
ff2mpv = "ff2mpv support";
fxCast = "fx_cast support";
gsconnect = "GSConnect support";
jabref = "JabRef support";
passff = "PassFF support";
tridactyl = "Tridactyl support";
ugetIntegrator = "Uget Integrator support";
};
};
config = mkIf cfg.enable {
environment.systemPackages = [ cfg.package ];
environment.systemPackages = [
(cfg.package.override {
extraPrefs = cfg.autoConfig;
extraNativeMessagingHosts = with pkgs; optionals nmh.ff2mpv [
ff2mpv
] ++ optionals nmh.gsconnect [
gnomeExtensions.gsconnect
] ++ optionals nmh.jabref [
jabref
] ++ optionals nmh.passff [
passff-host
];
})
];
environment.etc."firefox/policies/policies.json".source =
let policiesJSON =
policyFormat.generate
"firefox-policies.json"
{ inherit (cfg) policies; };
in mkIf (cfg.policies != {}) "${policiesJSON}";
nixpkgs.config.firefox = {
enableBrowserpass = nmh.browserpass;
enableBukubrow = nmh.bukubrow;
enableTridactylNative = nmh.tridactyl;
enableUgetIntegrator = nmh.ugetIntegrator;
enableFXCastBridge = nmh.fxCast;
};
environment.etc =
let
policiesJSON = policyFormat.generate "firefox-policies.json" { inherit (cfg) policies; };
in
mkIf (cfg.policies != { }) {
"firefox/policies/policies.json".source = "${policiesJSON}";
};
# Preferences are converted into a policy
programs.firefox.policies =
mkIf (cfg.preferences != {})
{
Preferences = (mapAttrs (name: value: {
Value = value;
Status = "locked";
}) cfg.preferences);
};
programs.firefox.policies = mkIf (cfg.preferences != { }) {
Preferences = (mapAttrs
(_: value: { Value = value; Status = cfg.preferencesStatus; })
cfg.preferences);
};
};
meta.maintainers = with maintainers; [ danth ];

Some files were not shown because too many files have changed in this diff Show more