Project import generated by Copybara.

GitOrigin-RevId: 4bb072f0a8b267613c127684e099a70e1f6ff106
This commit is contained in:
Default email 2023-03-27 12:17:25 -07:00
parent 410b979fe2
commit eaf6957cd3
1093 changed files with 370190 additions and 21055 deletions

View file

@ -12,7 +12,7 @@ jobs:
tf-providers:
permissions:
contents: write # for peter-evans/create-pull-request to create branch
pull-requests: write # for peter-evans/create-pull-request to create a PR, for peter-evans/create-or-update-comment to create or update comment
pull-requests: write # for peter-evans/create-pull-request to create a PR
if: github.repository_owner == 'NixOS' && github.ref == 'refs/heads/master' # ensure workflow_dispatch only runs on master
runs-on: ubuntu-latest
steps:
@ -36,6 +36,12 @@ jobs:
--argstr keep-going true \
--argstr max-workers 2 \
--argstr path terraform-providers
- name: get failed updates
run: |
echo 'FAILED<<EOF' >> $GITHUB_ENV
git ls-files --others >> $GITHUB_ENV
echo 'EOF' >> $GITHUB_ENV
# cleanup logs of failed updates so they aren't included in the PR
- name: clean repo
run: |
git clean -f
@ -47,10 +53,16 @@ jobs:
https://github.com/NixOS/nixpkgs/actions/runs/${{ github.run_id }}
These providers failed to update:
```
${{ env.FAILED }}
```
Check that all providers build with:
```
@ofborg build terraform.full
```
If there is more than ten commits in the PR `ofborg` won't build it automatically and you will need to use the above command.
branch: terraform-providers-update
delete-branch: false
title: ${{ steps.setup.outputs.title }}

View file

@ -50,6 +50,11 @@ package. `cargoHash256` is used for traditional Nix SHA-256 hashes,
such as the one in the example above. `cargoHash` should instead be
used for [SRI](https://www.w3.org/TR/SRI/) hashes. For example:
Exception: If the application has cargo `git` dependencies, the `cargoHash`/`cargoSha256`
approach will not work, and you will need to copy the `Cargo.lock` file of the application
to nixpkgs and continue with the next section for specifying the options of the`cargoLock`
section.
```nix
cargoHash = "sha256-l1vL2ZdtDRxSGvP0X/l3nMw8+6WF67KPutJEzUROjg8=";
```

View file

@ -176,7 +176,7 @@ rec {
# Only show the error for the first missing argument
error = errorForArg (lib.head missingArgs);
in if missingArgs == [] then makeOverridable f allArgs else throw error;
in if missingArgs == [] then makeOverridable f allArgs else abort error;
/* Like callPackage, but for a function that returns an attribute

View file

@ -1835,6 +1835,12 @@
fingerprint = "E9A3 7864 2165 28CE 507C CA82 72EA BF75 C331 CD25";
}];
};
benkuhn = {
email = "ben@ben-kuhn.com";
github = "ben-kuhn";
githubId = 16821405;
name = "Ben Kuhn";
};
benley = {
email = "benley@gmail.com";
github = "benley";
@ -2094,15 +2100,6 @@
githubId = 68566724;
name = "bootstrap-prime";
};
boppyt = {
email = "boppy@nwcpz.com";
github = "boppyt";
githubId = 71049646;
name = "Zack A";
keys = [{
fingerprint = "E8D7 5C19 9F65 269B 439D F77B 6310 C97D E31D 1545";
}];
};
borisbabic = {
email = "boris.ivan.babic@gmail.com";
github = "borisbabic";
@ -7433,6 +7430,13 @@
githubId = 7673602;
name = "Jonathan Ringer";
};
jopejoe1 = {
email = "johannes@joens.email";
matrix = "@jopejoe1:matrix.org";
github = "jopejoe1";
githubId = 34899572;
name = "Johannes Jöns";
};
jordanisaacs = {
name = "Jordan Isaacs";
email = "nix@jdisaacs.com";
@ -9354,6 +9358,12 @@
githubId = 458783;
name = "Martin Gammelsæter";
};
martinramm = {
email = "martin-ramm@gmx.de";
github = "MartinRamm";
githubId = 31626748;
name = "Martin Ramm";
};
marzipankaiser = {
email = "nixos@gaisseml.de";
github = "marzipankaiser";
@ -10320,6 +10330,12 @@
githubId = 3073833;
name = "Massimo Redaelli";
};
mrene = {
email = "mathieu.rene@gmail.com";
github = "mrene";
githubId = 254443;
name = "Mathieu Rene";
};
mrfreezeex = {
email = "arthur@cri.epita.fr";
github = "MrFreezeex";
@ -13604,6 +13620,16 @@
githubId = 17243347;
name = "Sebastian Sellmeier";
};
sefidel = {
name = "sefidel";
email = "contact@sefidel.net";
matrix = "@sef:exotic.sh";
github = "sefidel";
githubId = 71049646;
keys = [{
fingerprint = "8BDF DFB5 6842 2393 82A0 441B 9238 BC70 9E05 516A";
}];
};
sei40kr = {
name = "Seong Yong-ju";
email = "sei40kr@gmail.com";
@ -14178,6 +14204,16 @@
githubId = 6277322;
name = "Wei Tang";
};
soywod = {
name = "Clément DOUIN";
email = "clement.douin@posteo.net";
matrix = "@soywod:matrix.org";
github = "soywod";
githubId = 10437171;
keys = [{
fingerprint = "75F0 AB7C FE01 D077 AEE6 CAFD 353E 4A18 EE0F AB72";
}];
};
spacefrogg = {
email = "spacefrogg-nixos@meterriblecrew.net";
github = "spacefrogg";
@ -15151,6 +15187,12 @@
githubId = 1391883;
name = "Tom Hall";
};
thubrecht = {
email = "tom@hubrecht.ovh";
github = "Tom-Hubrecht";
githubId = 26650391;
name = "Tom Hubrecht";
};
Thunderbottom = {
email = "chinmaydpai@gmail.com";
github = "Thunderbottom";
@ -16153,12 +16195,6 @@
github = "wegank";
githubId = 9713184;
};
weihua = {
email = "luwh364@gmail.com";
github = "weihua-lu";
githubId = 9002575;
name = "Weihua Lu";
};
welteki = {
email = "welteki@pm.me";
github = "welteki";

View file

@ -0,0 +1,4 @@
#!/usr/bin/env nix-shell
#!nix-shell -I nixpkgs=. -i bash -p "import ./maintainers/scripts/convert-to-import-cargo-lock" nix-prefetch-git
convert-to-import-cargo-lock "$@"

View file

@ -0,0 +1 @@
/target

View file

@ -0,0 +1,106 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "anyhow"
version = "1.0.69"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "224afbd727c3d6e4b90103ece64b8d1b67fbb1973b1046c2281eed3f3803f800"
[[package]]
name = "basic-toml"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e819b667739967cd44d308b8c7b71305d8bb0729ac44a248aa08f33d01950b4"
dependencies = [
"serde",
]
[[package]]
name = "convert-to-import-cargo-lock"
version = "0.1.0"
dependencies = [
"anyhow",
"basic-toml",
"serde",
"serde_json",
]
[[package]]
name = "itoa"
version = "1.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
[[package]]
name = "proc-macro2"
version = "1.0.51"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
dependencies = [
"proc-macro2",
]
[[package]]
name = "ryu"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
[[package]]
name = "serde"
version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.152"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "serde_json"
version = "1.0.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cad406b69c91885b5107daf2c29572f6c8cdb3c66826821e286c533490c0bc76"
dependencies = [
"itoa",
"ryu",
"serde",
]
[[package]]
name = "syn"
version = "1.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"

View file

@ -0,0 +1,12 @@
[package]
name = "convert-to-import-cargo-lock"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
anyhow = { version = "1.0.69" }
basic-toml = "0.1.1"
serde = { version = "1.0.152", features = ["derive"] }
serde_json = "1.0.93"

View file

@ -0,0 +1,16 @@
with import ../../../. { };
rustPlatform.buildRustPackage {
name = "convert-to-import-cargo-lock";
src = lib.cleanSourceWith {
src = ./.;
filter = name: type:
let
name' = builtins.baseNameOf name;
in
name' != "default.nix" && name' != "target";
};
cargoLock.lockFile = ./Cargo.lock;
}

View file

@ -0,0 +1,5 @@
with import ../../../. { };
mkShell {
packages = [ rustc cargo clippy rustfmt ] ++ lib.optional stdenv.isDarwin libiconv;
}

View file

@ -0,0 +1,241 @@
#![warn(clippy::pedantic)]
#![allow(clippy::too_many_lines)]
use anyhow::anyhow;
use serde::Deserialize;
use std::{collections::HashMap, env, fs, path::PathBuf, process::Command};
#[derive(Deserialize)]
struct CargoLock<'a> {
#[serde(rename = "package", borrow)]
packages: Vec<Package<'a>>,
metadata: Option<HashMap<&'a str, &'a str>>,
}
#[derive(Deserialize)]
struct Package<'a> {
name: &'a str,
version: &'a str,
source: Option<&'a str>,
checksum: Option<&'a str>,
}
#[derive(Deserialize)]
struct PrefetchOutput {
sha256: String,
}
fn main() -> anyhow::Result<()> {
let mut hashes = HashMap::new();
let attr_count = env::args().len() - 1;
for (i, attr) in env::args().skip(1).enumerate() {
println!("converting {attr} ({}/{attr_count})", i + 1);
convert(&attr, &mut hashes)?;
}
Ok(())
}
fn convert(attr: &str, hashes: &mut HashMap<String, String>) -> anyhow::Result<()> {
let package_path = nix_eval(format!("{attr}.meta.position"))?
.and_then(|p| p.split_once(':').map(|(f, _)| PathBuf::from(f)));
if package_path.is_none() {
eprintln!("can't automatically convert {attr}: doesn't exist");
return Ok(());
}
let package_path = package_path.unwrap();
if package_path.with_file_name("Cargo.lock").exists() {
eprintln!("skipping {attr}: already has a vendored Cargo.lock");
return Ok(());
}
let mut src = PathBuf::from(
String::from_utf8(
Command::new("nix-build")
.arg("-A")
.arg(format!("{attr}.src"))
.output()?
.stdout,
)?
.trim(),
);
if !src.exists() {
eprintln!("can't automatically convert {attr}: src doesn't exist (bad attr?)");
return Ok(());
} else if !src.metadata()?.is_dir() {
eprintln!("can't automatically convert {attr}: src isn't a directory");
return Ok(());
}
if let Some(mut source_root) = nix_eval(format!("{attr}.sourceRoot"))?.map(PathBuf::from) {
source_root = source_root.components().skip(1).collect();
src.push(source_root);
}
let cargo_lock_path = src.join("Cargo.lock");
if !cargo_lock_path.exists() {
eprintln!("can't automatically convert {attr}: src doesn't contain Cargo.lock");
return Ok(());
}
let cargo_lock_content = fs::read_to_string(cargo_lock_path)?;
let cargo_lock: CargoLock = basic_toml::from_str(&cargo_lock_content)?;
let mut git_dependencies = Vec::new();
for package in cargo_lock.packages.iter().filter(|p| {
p.source.is_some()
&& p.checksum
.or_else(|| {
cargo_lock
.metadata
.as_ref()?
.get(
format!("checksum {} {} ({})", p.name, p.version, p.source.unwrap())
.as_str(),
)
.copied()
})
.is_none()
}) {
let (typ, original_url) = package
.source
.unwrap()
.split_once('+')
.expect("dependency should have well-formed source url");
if let Some(hash) = hashes.get(original_url) {
continue;
}
assert_eq!(
typ, "git",
"packages without checksums should be git dependencies"
);
let (mut url, rev) = original_url
.split_once('#')
.expect("git dependency should have commit");
// TODO: improve
if let Some((u, _)) = url.split_once('?') {
url = u;
}
let prefetch_output: PrefetchOutput = serde_json::from_slice(
&Command::new("nix-prefetch-git")
.args(["--url", url, "--rev", rev, "--quiet", "--fetch-submodules"])
.output()?
.stdout,
)?;
let output_hash = String::from_utf8(
Command::new("nix")
.args([
"--extra-experimental-features",
"nix-command",
"hash",
"to-sri",
"--type",
"sha256",
&prefetch_output.sha256,
])
.output()?
.stdout,
)?;
let hash = output_hash.trim().to_string();
git_dependencies.push((
format!("{}-{}", package.name, package.version),
output_hash.trim().to_string().clone(),
));
hashes.insert(original_url.to_string(), hash);
}
fs::write(
package_path.with_file_name("Cargo.lock"),
cargo_lock_content,
)?;
let mut package_lines: Vec<_> = fs::read_to_string(&package_path)?
.lines()
.map(String::from)
.collect();
let (cargo_deps_line_index, cargo_deps_line) = package_lines
.iter_mut()
.enumerate()
.find(|(_, l)| {
l.trim_start().starts_with("cargoHash") || l.trim_start().starts_with("cargoSha256")
})
.expect("package should contain cargoHash/cargoSha256");
let spaces = " ".repeat(cargo_deps_line.len() - cargo_deps_line.trim_start().len());
if git_dependencies.is_empty() {
*cargo_deps_line = format!("{spaces}cargoLock.lockFile = ./Cargo.lock;");
} else {
*cargo_deps_line = format!("{spaces}cargoLock = {{");
let mut index_iter = cargo_deps_line_index + 1..;
package_lines.insert(
index_iter.next().unwrap(),
format!("{spaces} lockFile = ./Cargo.lock;"),
);
package_lines.insert(
index_iter.next().unwrap(),
format!("{spaces} outputHashes = {{"),
);
for ((dep, hash), index) in git_dependencies.drain(..).zip(&mut index_iter) {
package_lines.insert(index, format!("{spaces} {dep:?} = {hash:?};"));
}
package_lines.insert(index_iter.next().unwrap(), format!("{spaces} }};"));
package_lines.insert(index_iter.next().unwrap(), format!("{spaces}}};"));
}
if package_lines.last().map(String::as_str) != Some("") {
package_lines.push(String::new());
}
fs::write(package_path, package_lines.join("\n"))?;
Ok(())
}
fn nix_eval(attr: impl AsRef<str>) -> anyhow::Result<Option<String>> {
let output = String::from_utf8(
Command::new("nix-instantiate")
.args(["--eval", "-A", attr.as_ref()])
.output()?
.stdout,
)?;
let trimmed = output.trim();
if trimmed.is_empty() || trimmed == "null" {
Ok(None)
} else {
Ok(Some(
trimmed
.strip_prefix('"')
.and_then(|p| p.strip_suffix('"'))
.ok_or_else(|| anyhow!("couldn't parse nix-instantiate output: {output:?}"))?
.to_string(),
))
}
}

View file

@ -221,6 +221,7 @@ with lib.maintainers; {
docs = {
members = [
asymmetric
ryantm
];
scope = "Maintain nixpkgs/NixOS documentation and tools for building it.";
@ -424,11 +425,14 @@ with lib.maintainers; {
llvm = {
members = [
ericson2314
sternenseemann
lovek323
dtzWill
ericson2314
lovek323
primeos
qyliss
raitobezarius
rrbutani
sternenseemann
];
scope = "Maintain LLVM package sets and related packages";
shortName = "LLVM";

View file

@ -12,12 +12,18 @@ In addition to numerous new and upgraded packages, this release has the followin
- default linux: 5.15 -\> 6.1, all supported kernels available
- systemd has been updated to v253.1, see [the pull request](https://github.com/NixOS/nixpkgs/pull/216826) for more info.
It's recommended to use `nixos-rebuild boot` and `reboot`, rather than `nixos-rebuild switch` - since in some rare cases
the switch of a live system might fail.
- Cinnamon has been updated to 5.6, see [the pull request](https://github.com/NixOS/nixpkgs/pull/201328#issue-1449910204) for what is changed.
- KDE Plasma has been updated to v5.27, see [the release notes](https://kde.org/announcements/plasma/5/5.27.0/) for what is changed.
- `nixos-rebuild` now supports an extra `--specialisation` option that can be used to change specialisation for `switch` and `test` commands.
- `libxcrypt`, the library providing the `crypt(3)` password hashing function, is now built without support for algorithms not flagged [`strong`](https://github.com/besser82/libxcrypt/blob/v4.4.33/lib/hashes.conf#L48). This affects the availability of password hashing algorithms used for system login (`login(1)`, `passwd(1)`), but also Apache2 Basic-Auth, Samba, OpenLDAP, Dovecot, and [many other packages](https://github.com/search?q=repo%3ANixOS%2Fnixpkgs%20libxcrypt&type=code).
## New Services {#sec-release-23.05-new-services}
<!-- To avoid merge conflicts, consider adding your item at an arbitrary place in the list instead. -->
@ -45,6 +51,8 @@ In addition to numerous new and upgraded packages, this release has the followin
- [opensearch](https://opensearch.org), a search server alternative to Elasticsearch. Available as [services.opensearch](options.html#opt-services.opensearch.enable).
- [authelia](https://www.authelia.com/), is an open-source authentication and authorization server. Available under [services.authelia](options.html#opt-services.authelia.enable).
- [goeland](https://github.com/slurdge/goeland), an alternative to rss2email written in golang with many filters. Available as [services.goeland](#opt-services.goeland.enable).
- [alertmanager-irc-relay](https://github.com/google/alertmanager-irc-relay), a Prometheus Alertmanager IRC Relay. Available as [services.prometheus.alertmanagerIrcRelay](options.html#opt-services.prometheus.alertmanagerIrcRelay.enable).
@ -77,7 +85,7 @@ In addition to numerous new and upgraded packages, this release has the followin
- [nimdow](https://github.com/avahe-kellenberger/nimdow), a window manager written in Nim, inspired by dwm.
- [woodpecker-agent](https://woodpecker-ci.org/), a simple CI engine with great extensibility. Available as [services.woodpecker-agent](#opt-services.woodpecker-agent.enable).
- [woodpecker-agents](https://woodpecker-ci.org/), a simple CI engine with great extensibility. Available as [services.woodpecker-agents](#opt-services.woodpecker-agents.agents._name_.enable).
- [woodpecker-server](https://woodpecker-ci.org/), a simple CI engine with great extensibility. Available as [services.woodpecker-server](#opt-services.woodpecker-server.enable).
@ -154,6 +162,8 @@ In addition to numerous new and upgraded packages, this release has the followin
- Nebula now runs as a system user and group created for each nebula network, using the `CAP_NET_ADMIN` ambient capability on launch rather than starting as root. Ensure that any files each Nebula instance needs to access are owned by the correct user and group, by default `nebula-${networkName}`.
- The `i18n.inputMethod.fcitx` option has been replaced with `i18n.inputMethod.fcitx5` because fcitx 4 `pkgs.fcitx` has been removed.
- In `mastodon` it is now necessary to specify location of file with `PostgreSQL` database password. In `services.mastodon.database.passwordFile` parameter default value `/var/lib/mastodon/secrets/db-password` has been changed to `null`.
- The `--target-host` and `--build-host` options of `nixos-rebuild` no longer treat the `localhost` value specially to build on/deploy to local machine, omit the relevant flag.
@ -179,12 +189,16 @@ In addition to numerous new and upgraded packages, this release has the followin
- conntrack helper autodetection has been removed from kernels 6.0 and up upstream, and an assertion was added to ensure things don't silently stop working. Migrate your configuration to assign helpers explicitly or use an older LTS kernel branch as a temporary workaround.
- The `services.pipewire.config` options have been removed, as they have basically never worked correctly. All behavior defined by the default configuration can be overridden with drop-in files as necessary - see [below](#sec-release-23.05-migration-pipewire) for details.
- The catch-all `hardware.video.hidpi.enable` option was removed. Users on high density displays may want to:
- Set `services.xserver.upscaleDefaultCursor` to upscale the default X11 cursor for higher resolutions
- Adjust settings under `fonts.fontconfig` according to preference
- Adjust `console.font` according to preference, though the kernel will generally choose a reasonably sized font
- `services.pipewire.media-session` and the `pipewire-media-session` package have been removed, as they are no longer supported upstream. Users are encouraged to use `services.pipewire.wireplumber` instead.
- The `baget` package and module was removed due to being unmaintained.
## Other Notable Changes {#sec-release-23.05-notable-changes}
@ -245,6 +259,8 @@ In addition to numerous new and upgraded packages, this release has the followin
If undesired, the old behavior can be restored by overriding the builders with
`{ installDocumentation = false; }`.
- The new option `networking.nftables.checkRuleset` controls whether the ruleset is checked for syntax or not during build. It is `true` by default. The check might fail because it is in a sandbox environment. To circumvent this, the ruleset file can be edited using the `networking.nftables.preCheckRuleset` option.
- `mastodon` now supports connection to a remote `PostgreSQL` database.
- `nextcloud` has an option to enable SSE-C in S3.
@ -335,3 +351,87 @@ In addition to numerous new and upgraded packages, this release has the followin
- The option `services.prometheus.exporters.pihole.interval` does not exist anymore and has been removed.
- `k3s` can now be configured with an EnvironmentFile for its systemd service, allowing secrets to be provided without ending up in the Nix Store.
- `boot.initrd.luks.device.<name>` has a new `tryEmptyPassphrase` option, this is useful for OEM's who need to install an encrypted disk with a future settable passphrase
## Detailed migration information {#sec-release-23.05-migration}
### Pipewire configuration overrides {#sec-release-23.05-migration-pipewire}
#### Why this change? {#sec-release-23.05-migration-pipewire-why}
The Pipewire config semantics don't really match the NixOS module semantics, so it's extremely awkward to override the default config, especially when lists are involved. Vendoring the configuration files in nixpkgs also creates unnecessary maintenance overhead.
Also, upstream added a lot of accomodations to allow doing most of the things you'd want to do with a config edit in better ways.
#### Migrating your configuration {#sec-release-23.05-migration-pipewire-how}
Compare your settings to [the defaults](https://gitlab.freedesktop.org/pipewire/pipewire/-/tree/master/src/daemon) and where your configuration differs from them.
Then, create a drop-in JSON file in `/etc/pipewire/<config file name>.d/99-custom.conf` (the actual filename can be anything) and migrate your changes to it according to the following sections.
Repeat for every file you've modified, changing the directory name accordingly.
#### Things you can just copy over {#sec-release-23.05-migration-pipewire-simple}
If you are:
- setting properties via `*.properties`
- loading a new module to `context.modules`
- creating new objects with `context.objects`
- declaring SPA libraries with `context.spa-libs`
- running custom commands with `context.exec`
- adding new rules with `*.rules`
- running custom PulseAudio commands with `pulse.cmd`
Simply move the definitions into the drop-in.
Note that the use of `context.exec` is not recommended and other methods of running your thing are likely a better option.
```json
{
"context.properties": {
"your.property.name": "your.property.value"
},
"context.modules": [
{ "name": "libpipewire-module-my-cool-thing" }
],
"context.objects": [
{ "factory": { ... } }
],
"alsa.rules": [
{ "matches: { ... }, "actions": { ... } }
]
}
```
#### Removing a module from `context.modules` {#sec-release-23.05-migration-pipewire-removing-modules}
Look for an option to disable it via `context.properties` (`"module.x11.bell": "false"` is likely the most common use case here).
If one is not available, proceed to [Nuclear option](#sec-release-23.05-migration-pipewire).
#### Modifying a module's parameters in `context.modules` {#sec-release-23.05-migration-pipewire-modifying-modules}
For most modules (e.g. `libpipewire-module-rt`) it's enough to load the module again with the new arguments, e.g.:
```json
{
"context.modules": [
{
"name": "libpipewire-module-rt",
"args": {
"rt.prio": 90
}
}
]
}
```
Note that `module-rt` specifically will generally use the highest values available by default, so setting limits on the `pipewire` systemd service is preferable to reloading.
If reloading the module is not an option, proceed to [Nuclear option](#sec-release-23.05-migration-pipewire).
#### Nuclear option {#sec-release-23.05-migration-pipewire-nuclear}
If all else fails, you can still manually copy the contents of the default configuration file
from `${pkgs.pipewire.lib}/share/pipewire` to `/etc/pipewire` and edit it to fully override the default.
However, this should be done only as a last resort. Please talk to the Pipewire maintainers if you ever need to do this.

View file

@ -539,7 +539,9 @@ in {
###### implementation
config = {
config = let
cryptSchemeIdPatternGroup = "(${lib.concatStringsSep "|" pkgs.libxcrypt.enabledCryptSchemeIds})";
in {
users.users = {
root = {
@ -601,15 +603,16 @@ in {
text = ''
users=()
while IFS=: read -r user hash tail; do
if [[ "$hash" = "$"* && ! "$hash" =~ ^\$(y|gy|7|2b|2y|2a|6)\$ ]]; then
if [[ "$hash" = "$"* && ! "$hash" =~ ^\''$${cryptSchemeIdPatternGroup}\$ ]]; then
users+=("$user")
fi
done </etc/shadow
if (( "''${#users[@]}" )); then
echo "
WARNING: The following user accounts rely on password hashes that will
be removed in NixOS 23.05. They should be renewed as soon as possible."
WARNING: The following user accounts rely on password hashing algorithms
that have been removed. They need to be renewed as soon as possible, as
they do prevent their users from logging in."
printf ' - %s\n' "''${users[@]}"
fi
'';
@ -699,7 +702,20 @@ in {
users.groups.${user.name} = {};
'';
}
]
] ++ (map (shell: {
assertion = (user.shell == pkgs.${shell}) -> (config.programs.${shell}.enable == true);
message = ''
users.users.${user.name}.shell is set to ${shell}, but
programs.${shell}.enable is not true. This will cause the ${shell}
shell to lack the basic nix directories in its PATH and might make
logging in as that user impossible. You can fix it with:
programs.${shell}.enable = true;
'';
}) [
"fish"
"xonsh"
"zsh"
])
));
warnings =
@ -716,7 +732,7 @@ in {
let
sep = "\\$";
base64 = "[a-zA-Z0-9./]+";
id = "[a-z0-9-]+";
id = cryptSchemeIdPatternGroup;
value = "[a-zA-Z0-9/+.-]+";
options = "${id}(=${value})?(,${id}=${value})*";
scheme = "${id}(${sep}${options})?";

View file

@ -69,21 +69,50 @@ in
package = mkOption {
type = types.package;
internal = true;
default = cfg.mesaPackage;
description = lib.mdDoc ''
The package that provides the OpenGL implementation.
The default is Mesa's drivers which should cover all OpenGL-capable
hardware. If you want to use another Mesa version, adjust
{option}`mesaPackage`.
'';
};
package32 = mkOption {
type = types.package;
internal = true;
default = cfg.mesaPackage32;
description = lib.mdDoc ''
The package that provides the 32-bit OpenGL implementation on
64-bit systems. Used when {option}`driSupport32Bit` is
set.
Same as {option}`package` but for the 32-bit OpenGL implementation on
64-bit systems. Used when {option}`driSupport32Bit` is set.
'';
};
mesaPackage = mkOption {
type = types.package;
default = pkgs.mesa_23;
defaultText = literalExpression "pkgs.mesa_23";
example = literalExpression "pkgs.mesa_22";
description = lib.mdDoc ''
The Mesa driver package used for rendering support on the system.
You should only need to adjust this if you require a newer Mesa
version for your hardware or because you need to patch a bug.
'';
apply = mesa: mesa.drivers or (throw "`mesa` package must have a `drivers` output.");
};
mesaPackage32 = mkOption {
type = types.package;
default = pkgs.pkgsi686Linux.mesa_23;
defaultText = literalExpression "pkgs.pkgsi686Linux.mesa_23";
example = literalExpression "pkgs.pkgsi686Linux.mesa_22";
description = lib.mdDoc ''
Same as {option}`mesaPackage` but for the 32-bit Mesa on 64-bit
systems. Used when {option}`driSupport32Bit` is set.
'';
apply = mesa: mesa.drivers or (throw "`mesa` package must have a `drivers` output.");
};
extraPackages = mkOption {
type = types.listOf types.package;
default = [];
@ -97,7 +126,6 @@ in
:::
'';
};
extraPackages32 = mkOption {
type = types.listOf types.package;
default = [];
@ -153,9 +181,6 @@ in
environment.sessionVariables.LD_LIBRARY_PATH = mkIf cfg.setLdLibraryPath
([ "/run/opengl-driver/lib" ] ++ optional cfg.driSupport32Bit "/run/opengl-driver-32/lib");
hardware.opengl.package = mkDefault pkgs.mesa.drivers;
hardware.opengl.package32 = mkDefault pkgs.pkgsi686Linux.mesa.drivers;
boot.extraModulePackages = optional (elem "virtualbox" videoDrivers) kernelPackages.virtualboxGuestAdditions;
};
}

View file

@ -9,7 +9,7 @@ than there are keys on the keyboard.
The following input methods are available in NixOS:
- IBus: The intelligent input bus.
- Fcitx: A customizable lightweight input method.
- Fcitx5: The next generation of fcitx, addons (including engines, dictionaries, skins) can be added using `i18n.inputMethod.fcitx5.addons`.
- Nabi: A Korean input method based on XIM.
- Uim: The universal input method, is a library with a XIM bridge.
- Hime: An extremely easy-to-use input method framework.
@ -67,38 +67,40 @@ application in the Nix store. The `glib` packages must
match exactly. If they do not, uninstalling and reinstalling the
application is a likely fix.
## Fcitx {#module-services-input-methods-fcitx}
## Fcitx5 {#module-services-input-methods-fcitx}
Fcitx is an input method framework with extension support. It has three
Fcitx5 is an input method framework with extension support. It has three
built-in Input Method Engine, Pinyin, QuWei and Table-based input methods.
The following snippet can be used to configure Fcitx:
```
i18n.inputMethod = {
enabled = "fcitx";
fcitx.engines = with pkgs.fcitx-engines; [ mozc hangul m17n ];
enabled = "fcitx5";
fcitx5.addons = with pkgs; [ fcitx5-mozc fcitx5-hangul fcitx5-m17n ];
};
```
`i18n.inputMethod.fcitx.engines` is optional and can be
used to add extra Fcitx engines.
`i18n.inputMethod.fcitx5.addons` is optional and can be
used to add extra Fcitx5 addons.
Available extra Fcitx engines are:
Available extra Fcitx5 addons are:
- Anthy (`fcitx-engines.anthy`): Anthy is a system for
- Anthy (`fcitx5-anthy`): Anthy is a system for
Japanese input method. It converts Hiragana text to Kana Kanji mixed text.
- Chewing (`fcitx-engines.chewing`): Chewing is an
- Chewing (`fcitx5-chewing`): Chewing is an
intelligent Zhuyin input method. It is one of the most popular input
methods among Traditional Chinese Unix users.
- Hangul (`fcitx-engines.hangul`): Korean input method.
- Unikey (`fcitx-engines.unikey`): Vietnamese input method.
- m17n (`fcitx-engines.m17n`): m17n is an input method that
- Hangul (`fcitx5-hangul`): Korean input method.
- Unikey (`fcitx5-unikey`): Vietnamese input method.
- m17n (`fcitx5-m17n`): m17n is an input method that
uses input methods and corresponding icons in the m17n database.
- mozc (`fcitx-engines.mozc`): A Japanese input method from
- mozc (`fcitx5-mozc`): A Japanese input method from
Google.
- table-others (`fcitx-engines.table-others`): Various
- table-others (`fcitx5-table-other`): Various
table-based input methods.
- chinese-addons (`fcitx5-chinese-addons`): Various chinese input methods.
- rime (`fcitx5-rime`): RIME support for fcitx5.
## Nabi {#module-services-input-methods-nabi}

View file

@ -29,9 +29,9 @@ in
options.i18n = {
inputMethod = {
enabled = mkOption {
type = types.nullOr (types.enum [ "ibus" "fcitx" "fcitx5" "nabi" "uim" "hime" "kime" ]);
type = types.nullOr (types.enum [ "ibus" "fcitx5" "nabi" "uim" "hime" "kime" ]);
default = null;
example = "fcitx";
example = "fcitx5";
description = lib.mdDoc ''
Select the enabled input method. Input methods is a software to input symbols that are not available on standard input devices.
@ -40,7 +40,6 @@ in
Currently the following input methods are available in NixOS:
- ibus: The intelligent input bus, extra input engines can be added using `i18n.inputMethod.ibus.engines`.
- fcitx: A customizable lightweight input method, extra input engines can be added using `i18n.inputMethod.fcitx.engines`.
- fcitx5: The next generation of fcitx, addons (including engines, dictionaries, skins) can be added using `i18n.inputMethod.fcitx5.addons`.
- nabi: A Korean input method based on XIM. Nabi doesn't support Qt 5.
- uim: The universal input method, is a library with a XIM bridge. uim mainly support Chinese, Japanese and Korean.

View file

@ -1,46 +0,0 @@
{ config, pkgs, lib, ... }:
with lib;
let
cfg = config.i18n.inputMethod.fcitx;
fcitxPackage = pkgs.fcitx.override { plugins = cfg.engines; };
fcitxEngine = types.package // {
name = "fcitx-engine";
check = x: (lib.types.package.check x) && (attrByPath ["meta" "isFcitxEngine"] false x);
};
in
{
options = {
i18n.inputMethod.fcitx = {
engines = mkOption {
type = with types; listOf fcitxEngine;
default = [];
example = literalExpression "with pkgs.fcitx-engines; [ mozc hangul ]";
description =
let
enginesDrv = filterAttrs (const isDerivation) pkgs.fcitx-engines;
engines = concatStringsSep ", "
(map (name: "`${name}`") (attrNames enginesDrv));
in
lib.mdDoc "Enabled Fcitx engines. Available engines are: ${engines}.";
};
};
};
config = mkIf (config.i18n.inputMethod.enabled == "fcitx") {
i18n.inputMethod.package = fcitxPackage;
environment.variables = {
GTK_IM_MODULE = "fcitx";
QT_IM_MODULE = "fcitx";
XMODIFIERS = "@im=fcitx";
};
services.xserver.displayManager.sessionCommands = "${fcitxPackage}/bin/fcitx";
};
# uses attributes of the linked package
meta.buildDocsInSandbox = false;
}

View file

@ -21,6 +21,9 @@ with lib;
# ISO naming.
isoImage.isoName = "${config.isoImage.isoBaseName}-${config.system.nixos.label}-${pkgs.stdenv.hostPlatform.system}.iso";
# BIOS booting
isoImage.makeBiosBootable = true;
# EFI booting
isoImage.makeEfiBootable = true;

View file

@ -535,10 +535,17 @@ in
'';
};
isoImage.makeBiosBootable = mkOption {
default = false;
description = lib.mdDoc ''
Whether the ISO image should be a BIOS-bootable disk.
'';
};
isoImage.makeEfiBootable = mkOption {
default = false;
description = lib.mdDoc ''
Whether the ISO image should be an efi-bootable volume.
Whether the ISO image should be an EFI-bootable volume.
'';
};
@ -693,7 +700,7 @@ in
boot.loader.grub.enable = false;
environment.systemPackages = [ grubPkgs.grub2 grubPkgs.grub2_efi ]
++ optional canx86BiosBoot pkgs.syslinux
++ optional (config.isoImage.makeBiosBootable && canx86BiosBoot) pkgs.syslinux
;
# In stage 1 of the boot, mount the CD as the root FS by label so
@ -744,7 +751,7 @@ in
{ source = pkgs.writeText "version" config.system.nixos.label;
target = "/version.txt";
}
] ++ optionals canx86BiosBoot [
] ++ optionals (config.isoImage.makeBiosBootable && canx86BiosBoot) [
{ source = config.isoImage.splashImage;
target = "/isolinux/background.png";
}
@ -771,7 +778,7 @@ in
{ source = config.isoImage.efiSplashImage;
target = "/EFI/boot/efi-background.png";
}
] ++ optionals (config.boot.loader.grub.memtest86.enable && canx86BiosBoot) [
] ++ optionals (config.boot.loader.grub.memtest86.enable && config.isoImage.makeBiosBootable && canx86BiosBoot) [
{ source = "${pkgs.memtest86plus}/memtest.bin";
target = "/boot/memtest.bin";
}
@ -786,10 +793,10 @@ in
# Create the ISO image.
system.build.isoImage = pkgs.callPackage ../../../lib/make-iso9660-image.nix ({
inherit (config.isoImage) isoName compressImage volumeID contents;
bootable = canx86BiosBoot;
bootable = config.isoImage.makeBiosBootable && canx86BiosBoot;
bootImage = "/isolinux/isolinux.bin";
syslinux = if canx86BiosBoot then pkgs.syslinux else null;
} // optionalAttrs (config.isoImage.makeUsbBootable && canx86BiosBoot) {
syslinux = if config.isoImage.makeBiosBootable && canx86BiosBoot then pkgs.syslinux else null;
} // optionalAttrs (config.isoImage.makeUsbBootable && config.isoImage.makeBiosBootable && canx86BiosBoot) {
usbBootable = true;
isohybridMbrImage = "${pkgs.syslinux}/share/syslinux/isohdpfx.bin";
} // optionalAttrs config.isoImage.makeEfiBootable {

View file

@ -103,7 +103,6 @@
./hardware/xone.nix
./hardware/xpadneo.nix
./i18n/input-method/default.nix
./i18n/input-method/fcitx.nix
./i18n/input-method/fcitx5.nix
./i18n/input-method/hime.nix
./i18n/input-method/ibus.nix
@ -377,7 +376,7 @@
./services/continuous-integration/jenkins/default.nix
./services/continuous-integration/jenkins/job-builder.nix
./services/continuous-integration/jenkins/slave.nix
./services/continuous-integration/woodpecker/agent.nix
./services/continuous-integration/woodpecker/agents.nix
./services/continuous-integration/woodpecker/server.nix
./services/databases/aerospike.nix
./services/databases/cassandra.nix
@ -431,7 +430,6 @@
./services/desktops/gvfs.nix
./services/desktops/malcontent.nix
./services/desktops/neard.nix
./services/desktops/pipewire/pipewire-media-session.nix
./services/desktops/pipewire/pipewire.nix
./services/desktops/pipewire/wireplumber.nix
./services/desktops/profile-sync-daemon.nix
@ -1212,6 +1210,7 @@
./services/web-apps/zabbix.nix
./services/web-servers/agate.nix
./services/web-servers/apache-httpd/default.nix
./services/web-servers/authelia.nix
./services/web-servers/caddy/default.nix
./services/web-servers/darkhttpd.nix
./services/web-servers/fcgiwrap.nix

View file

@ -240,7 +240,7 @@ in
[
./known_hosts
(writeText "github.keys" '''
github.com ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ==
github.com ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk=
github.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg=
github.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl
''')

View file

@ -33,7 +33,7 @@ in {
security.wrappers.dumpcap = {
source = "${wireshark}/bin/dumpcap";
capabilities = "cap_net_raw+p";
capabilities = "cap_net_raw,cap_net_admin+eip";
owner = "root";
group = "wireshark";
permissions = "u+rx,g+x";

View file

@ -107,6 +107,8 @@ with lib;
(mkRemovedOptionModule [ "services" "riak" ] "The corresponding package was removed from nixpkgs.")
(mkRemovedOptionModule [ "services" "cryptpad" ] "The corresponding package was removed from nixpkgs.")
(mkRemovedOptionModule [ "i18n" "inputMethod" "fcitx" ] "The fcitx module has been removed. Plesae use fcitx5 instead")
# Do NOT add any option renames here, see top of the file
];
}

View file

@ -620,7 +620,7 @@ let
optionalString config.services.homed.enable ''
password sufficient ${config.systemd.package}/lib/security/pam_systemd_home.so
'' + ''
password sufficient pam_unix.so nullok sha512
password sufficient pam_unix.so nullok yescrypt
'' +
optionalString config.security.pam.enableEcryptfs ''
password optional ${pkgs.ecryptfs}/lib/security/pam_ecryptfs.so

View file

@ -1,99 +0,0 @@
{ config
, lib
, pkgs
, ...
}:
let
cfg = config.services.woodpecker-agent;
in
{
meta.maintainers = [ lib.maintainers.janik ];
options = {
services.woodpecker-agent = {
enable = lib.mkEnableOption (lib.mdDoc "the Woodpecker-Agent, Agents execute tasks generated by a Server, every install will need one server and at least one agent");
package = lib.mkPackageOptionMD pkgs "woodpecker-agent" { };
environment = lib.mkOption {
default = { };
type = lib.types.attrsOf lib.types.str;
example = lib.literalExpression ''
{
WOODPECKER_SERVER = "localhost:9000";
WOODPECKER_BACKEND = "docker";
DOCKER_HOST = "unix:///run/podman/podman.sock";
}
'';
description = lib.mdDoc "woodpecker-agent config envrionment variables, for other options read the [documentation](https://woodpecker-ci.org/docs/administration/agent-config)";
};
extraGroups = lib.mkOption {
default = null;
type = lib.types.nullOr (lib.types.listOf lib.types.str);
example = [ "podman" ];
description = lib.mdDoc ''
Additional groups for the systemd service.
'';
};
environmentFile = lib.mkOption {
type = lib.types.nullOr lib.types.path;
default = null;
example = "/root/woodpecker-agent.env";
description = lib.mdDoc ''
File to load environment variables
from. This is helpful for specifying secrets.
Example content of environmentFile:
```
WOODPECKER_AGENT_SECRET=your-shared-secret-goes-here
```
'';
};
};
};
config = lib.mkIf cfg.enable {
systemd.services = {
woodpecker-agent = {
description = "Woodpecker-Agent Service";
wantedBy = [ "multi-user.target" ];
after = [ "network-online.target" ];
wants = [ "network-online.target" ];
serviceConfig = {
DynamicUser = true;
SupplementaryGroups = lib.optionals (cfg.extraGroups != null) cfg.extraGroups;
EnvironmentFile = lib.optional (cfg.environmentFile != null) cfg.environmentFile;
ExecStart = "${cfg.package}/bin/woodpecker-agent";
Restart = "on-failure";
RestartSec = 15;
CapabilityBoundingSet = "";
# Security
NoNewPrivileges = true;
# Sandboxing
ProtectSystem = "strict";
PrivateTmp = true;
PrivateDevices = true;
PrivateUsers = true;
ProtectHostname = true;
ProtectClock = true;
ProtectKernelTunables = true;
ProtectKernelModules = true;
ProtectKernelLogs = true;
ProtectControlGroups = true;
RestrictAddressFamilies = [ "AF_UNIX AF_INET AF_INET6" ];
LockPersonality = true;
MemoryDenyWriteExecute = true;
RestrictRealtime = true;
RestrictSUIDSGID = true;
PrivateMounts = true;
# System Call Filtering
SystemCallArchitectures = "native";
SystemCallFilter = "~@clock @privileged @cpu-emulation @debug @keyring @module @mount @obsolete @raw-io @reboot @setuid @swap";
};
inherit (cfg) environment;
};
};
};
}

View file

@ -0,0 +1,144 @@
{ config
, lib
, pkgs
, ...
}:
let
cfg = config.services.woodpecker-agents;
agentModule = lib.types.submodule {
options = {
enable = lib.mkEnableOption (lib.mdDoc "this Woodpecker-Agent. Agents execute tasks generated by a Server, every install will need one server and at least one agent");
package = lib.mkPackageOptionMD pkgs "woodpecker-agent" { };
environment = lib.mkOption {
default = { };
type = lib.types.attrsOf lib.types.str;
example = lib.literalExpression ''
{
WOODPECKER_SERVER = "localhost:9000";
WOODPECKER_BACKEND = "docker";
DOCKER_HOST = "unix:///run/podman/podman.sock";
}
'';
description = lib.mdDoc "woodpecker-agent config envrionment variables, for other options read the [documentation](https://woodpecker-ci.org/docs/administration/agent-config)";
};
extraGroups = lib.mkOption {
type = lib.types.listOf lib.types.str;
default = [ ];
example = [ "podman" ];
description = lib.mdDoc ''
Additional groups for the systemd service.
'';
};
environmentFile = lib.mkOption {
type = lib.types.listOf lib.types.path;
default = [ ];
example = [ "/var/secrets/woodpecker-agent.env" ];
description = lib.mdDoc ''
File to load environment variables
from. This is helpful for specifying secrets.
Example content of environmentFile:
```
WOODPECKER_AGENT_SECRET=your-shared-secret-goes-here
```
'';
};
};
};
mkAgentService = name: agentCfg: {
name = "woodpecker-agent-${name}";
value = {
description = "Woodpecker-Agent Service - ${name}";
wantedBy = [ "multi-user.target" ];
after = [ "network-online.target" ];
wants = [ "network-online.target" ];
serviceConfig = {
DynamicUser = true;
SupplementaryGroups = agentCfg.extraGroups;
EnvironmentFile = agentCfg.environmentFile;
ExecStart = lib.getExe agentCfg.package;
Restart = "on-failure";
RestartSec = 15;
CapabilityBoundingSet = "";
NoNewPrivileges = true;
ProtectSystem = "strict";
PrivateTmp = true;
PrivateDevices = true;
PrivateUsers = true;
ProtectHostname = true;
ProtectClock = true;
ProtectKernelTunables = true;
ProtectKernelModules = true;
ProtectKernelLogs = true;
ProtectControlGroups = true;
RestrictAddressFamilies = [ "AF_UNIX AF_INET AF_INET6" ];
LockPersonality = true;
MemoryDenyWriteExecute = true;
RestrictRealtime = true;
RestrictSUIDSGID = true;
PrivateMounts = true;
SystemCallArchitectures = "native";
SystemCallFilter = "~@clock @privileged @cpu-emulation @debug @keyring @module @mount @obsolete @raw-io @reboot @setuid @swap";
BindReadOnlyPaths = [
"-/etc/resolv.conf"
"-/etc/nsswitch.conf"
"-/etc/ssl/certs"
"-/etc/static/ssl/certs"
"-/etc/hosts"
"-/etc/localtime"
];
};
inherit (agentCfg) environment;
};
};
in
{
meta.maintainers = with lib.maintainers; [ janik ambroisie ];
options = {
services.woodpecker-agents = {
agents = lib.mkOption {
default = { };
type = lib.types.attrsOf agentModule;
example = {
docker = {
environment = {
WOODPECKER_SERVER = "localhost:9000";
WOODPECKER_BACKEND = "docker";
DOCKER_HOST = "unix:///run/podman/podman.sock";
};
extraGroups = [ "docker" ];
environmentFile = "/run/secrets/woodpecker/agent-secret.txt";
};
exec = {
environment = {
WOODPECKER_SERVER = "localhost:9000";
WOODPECKER_BACKEND = "exec";
};
environmentFile = "/run/secrets/woodpecker/agent-secret.txt";
};
};
description = lib.mdDoc "woodpecker-agents configurations";
};
};
};
config = {
systemd.services =
let
mkServices = lib.mapAttrs' mkAgentService;
enabledAgents = lib.filterAttrs (_: agent: agent.enable) cfg.agents;
in
mkServices enabledAgents;
};
}

View file

@ -8,7 +8,7 @@ let
cfg = config.services.woodpecker-server;
in
{
meta.maintainers = [ lib.maintainers.janik ];
meta.maintainers = with lib.maintainers; [ janik ambroisie ];
options = {

View file

@ -1,54 +0,0 @@
{
"context.properties": {
"log.level": 0
},
"context.spa-libs": {
"audio.convert.*": "audioconvert/libspa-audioconvert",
"support.*": "support/libspa-support"
},
"context.modules": [
{
"name": "libpipewire-module-rt",
"args": {},
"flags": [
"ifexists",
"nofail"
]
},
{
"name": "libpipewire-module-protocol-native"
},
{
"name": "libpipewire-module-client-node"
},
{
"name": "libpipewire-module-client-device"
},
{
"name": "libpipewire-module-adapter"
},
{
"name": "libpipewire-module-metadata"
},
{
"name": "libpipewire-module-session-manager"
}
],
"filter.properties": {},
"stream.properties": {},
"alsa.properties": {},
"alsa.rules": [
{
"matches": [
{
"application.process.binary": "resolve"
}
],
"actions": {
"update-props": {
"alsa.buffer-bytes": 131072
}
}
}
]
}

View file

@ -1,31 +0,0 @@
{
"context.properties": {
"log.level": 0
},
"context.spa-libs": {
"audio.convert.*": "audioconvert/libspa-audioconvert",
"support.*": "support/libspa-support"
},
"context.modules": [
{
"name": "libpipewire-module-protocol-native"
},
{
"name": "libpipewire-module-client-node"
},
{
"name": "libpipewire-module-client-device"
},
{
"name": "libpipewire-module-adapter"
},
{
"name": "libpipewire-module-metadata"
},
{
"name": "libpipewire-module-session-manager"
}
],
"filter.properties": {},
"stream.properties": {}
}

View file

@ -1,28 +0,0 @@
{
"context.properties": {
"log.level": 0
},
"context.spa-libs": {
"audio.convert.*": "audioconvert/libspa-audioconvert",
"support.*": "support/libspa-support"
},
"context.modules": [
{
"name": "libpipewire-module-rt",
"args": {},
"flags": [
"ifexists",
"nofail"
]
},
{
"name": "libpipewire-module-protocol-native"
},
{
"name": "libpipewire-module-client-node"
},
{
"name": "libpipewire-module-adapter"
}
]
}

View file

@ -1,75 +0,0 @@
{
"context.properties": {
"log.level": 0
},
"context.spa-libs": {
"support.*": "support/libspa-support"
},
"context.modules": [
{
"name": "libpipewire-module-rt",
"args": {},
"flags": [
"ifexists",
"nofail"
]
},
{
"name": "libpipewire-module-protocol-native"
},
{
"name": "libpipewire-module-client-node"
},
{
"name": "libpipewire-module-metadata"
}
],
"jack.properties": {},
"jack.rules": [
{
"matches": [
{}
],
"actions": {
"update-props": {}
}
},
{
"matches": [
{
"application.process.binary": "jack_bufsize"
}
],
"actions": {
"update-props": {
"jack.global-buffer-size": true
}
}
},
{
"matches": [
{
"application.process.binary": "qsynth"
}
],
"actions": {
"update-props": {
"node.pause-on-idle": false,
"node.passive": true
}
}
},
{
"matches": [
{
"client.name": "Mixxx"
}
],
"actions": {
"update-props": {
"jack.merge-monitor": false
}
}
}
]
}

View file

@ -1,120 +0,0 @@
{
"context.properties": {
"link.max-buffers": 16,
"core.daemon": true,
"core.name": "pipewire-0",
"settings.check-quantum": true,
"settings.check-rate": true,
"vm.overrides": {
"default.clock.min-quantum": 1024
}
},
"context.spa-libs": {
"audio.convert.*": "audioconvert/libspa-audioconvert",
"api.alsa.*": "alsa/libspa-alsa",
"support.*": "support/libspa-support"
},
"context.modules": [
{
"name": "libpipewire-module-rt",
"args": {
"nice.level": -11
},
"flags": [
"ifexists",
"nofail"
]
},
{
"name": "libpipewire-module-protocol-native"
},
{
"name": "libpipewire-module-profiler"
},
{
"name": "libpipewire-module-metadata"
},
{
"name": "libpipewire-module-spa-node-factory"
},
{
"name": "libpipewire-module-client-node"
},
{
"name": "libpipewire-module-access",
"args": {}
},
{
"name": "libpipewire-module-adapter"
},
{
"name": "libpipewire-module-link-factory"
}
],
"context.objects": [
{
"factory": "metadata",
"args": {
"metadata.name": "default"
}
},
{
"factory": "spa-node-factory",
"args": {
"factory.name": "support.node.driver",
"node.name": "Dummy-Driver",
"node.group": "pipewire.dummy",
"priority.driver": 20000
}
},
{
"factory": "spa-node-factory",
"args": {
"factory.name": "support.node.driver",
"node.name": "Freewheel-Driver",
"priority.driver": 19000,
"node.group": "pipewire.freewheel",
"node.freewheel": true
}
},
{
"factory": "adapter",
"args": {
"factory.name": "api.alsa.pcm.source",
"node.name": "system",
"node.description": "system",
"media.class": "Audio/Source",
"api.alsa.path": "hw:0",
"node.suspend-on-idle": true,
"resample.disable": true,
"channelmix.disable": true,
"adapter.auto-port-config": {
"mode": "dsp",
"monitor": false,
"control": false,
"position": "unknown"
}
}
},
{
"factory": "adapter",
"args": {
"factory.name": "api.alsa.pcm.sink",
"node.name": "system",
"node.description": "system",
"media.class": "Audio/Sink",
"api.alsa.path": "hw:0",
"node.suspend-on-idle": true,
"resample.disable": true,
"channelmix.disable": true,
"adapter.auto-port-config": {
"mode": "dsp",
"monitor": false,
"control": false,
"position": "unknown"
}
}
}
],
"context.exec": []
}

View file

@ -1,38 +0,0 @@
{
"context.properties": {},
"context.modules": [
{
"name": "libpipewire-module-rt",
"args": {
"nice.level": -11
},
"flags": [
"ifexists",
"nofail"
]
},
{
"name": "libpipewire-module-protocol-native"
},
{
"name": "libpipewire-module-client-node"
},
{
"name": "libpipewire-module-adapter"
},
{
"name": "libpipewire-module-rtp-source",
"args": {
"sap.ip": "239.255.255.255",
"sap.port": 9875,
"sess.latency.msec": 10,
"local.ifname": "eth0",
"stream.props": {
"media.class": "Audio/Source",
"node.virtual": false,
"device.api": "aes67"
}
}
}
]
}

View file

@ -1,38 +0,0 @@
{
"context.properties": {},
"context.spa-libs": {
"audio.convert.*": "audioconvert/libspa-audioconvert",
"support.*": "support/libspa-support"
},
"context.modules": [
{
"name": "libpipewire-module-rt",
"args": {
"nice.level": -11
},
"flags": [
"ifexists",
"nofail"
]
},
{
"name": "libpipewire-module-protocol-native"
},
{
"name": "libpipewire-module-client-node"
},
{
"name": "libpipewire-module-adapter"
},
{
"name": "libpipewire-module-avb",
"args": {}
}
],
"context.exec": [],
"stream.properties": {},
"avb.properties": {
"ifname": "enp3s0",
"vm.overrides": {}
}
}

View file

@ -1,106 +0,0 @@
{
"context.properties": {},
"context.spa-libs": {
"audio.convert.*": "audioconvert/libspa-audioconvert",
"support.*": "support/libspa-support"
},
"context.modules": [
{
"name": "libpipewire-module-rt",
"args": {
"nice.level": -11
},
"flags": [
"ifexists",
"nofail"
]
},
{
"name": "libpipewire-module-protocol-native"
},
{
"name": "libpipewire-module-client-node"
},
{
"name": "libpipewire-module-adapter"
},
{
"name": "libpipewire-module-metadata"
},
{
"name": "libpipewire-module-protocol-pulse",
"args": {}
}
],
"context.exec": [],
"pulse.cmd": [
{
"cmd": "load-module",
"args": "module-always-sink",
"flags": []
}
],
"stream.properties": {},
"pulse.properties": {
"server.address": [
"unix:native"
],
"vm.overrides": {
"pulse.min.quantum": "1024/48000"
}
},
"pulse.rules": [
{
"matches": [
{}
],
"actions": {
"update-props": {}
}
},
{
"matches": [
{
"application.process.binary": "teams"
},
{
"application.process.binary": "teams-insiders"
},
{
"application.process.binary": "skypeforlinux"
}
],
"actions": {
"quirks": [
"force-s16-info"
]
}
},
{
"matches": [
{
"application.process.binary": "firefox"
}
],
"actions": {
"quirks": [
"remove-capture-dont-move"
]
}
},
{
"matches": [
{
"application.name": "~speech-dispatcher.*"
}
],
"actions": {
"update-props": {
"pulse.min.req": "512/48000",
"pulse.min.quantum": "512/48000",
"pulse.idle.timeout": 5
}
}
}
]
}

View file

@ -1,110 +0,0 @@
{
"context.properties": {
"link.max-buffers": 16,
"core.daemon": true,
"core.name": "pipewire-0",
"vm.overrides": {
"default.clock.min-quantum": 1024
},
"module.x11.bell": true
},
"context.spa-libs": {
"audio.convert.*": "audioconvert/libspa-audioconvert",
"avb.*": "avb/libspa-avb",
"api.alsa.*": "alsa/libspa-alsa",
"api.v4l2.*": "v4l2/libspa-v4l2",
"api.libcamera.*": "libcamera/libspa-libcamera",
"api.bluez5.*": "bluez5/libspa-bluez5",
"api.vulkan.*": "vulkan/libspa-vulkan",
"api.jack.*": "jack/libspa-jack",
"support.*": "support/libspa-support"
},
"context.modules": [
{
"name": "libpipewire-module-rt",
"args": {
"nice.level": -11
},
"flags": [
"ifexists",
"nofail"
]
},
{
"name": "libpipewire-module-protocol-native"
},
{
"name": "libpipewire-module-profiler"
},
{
"name": "libpipewire-module-metadata"
},
{
"name": "libpipewire-module-spa-device-factory"
},
{
"name": "libpipewire-module-spa-node-factory"
},
{
"name": "libpipewire-module-client-node"
},
{
"name": "libpipewire-module-client-device"
},
{
"name": "libpipewire-module-portal",
"flags": [
"ifexists",
"nofail"
]
},
{
"name": "libpipewire-module-access",
"args": {}
},
{
"name": "libpipewire-module-adapter"
},
{
"name": "libpipewire-module-link-factory"
},
{
"name": "libpipewire-module-session-manager"
},
{
"name": "libpipewire-module-x11-bell",
"args": {},
"flags": [
"ifexists",
"nofail"
],
"condition": [
{
"module.x11.bell": true
}
]
}
],
"context.objects": [
{
"factory": "spa-node-factory",
"args": {
"factory.name": "support.node.driver",
"node.name": "Dummy-Driver",
"node.group": "pipewire.dummy",
"priority.driver": 20000
}
},
{
"factory": "spa-node-factory",
"args": {
"factory.name": "support.node.driver",
"node.name": "Freewheel-Driver",
"priority.driver": 19000,
"node.group": "pipewire.freewheel",
"node.freewheel": true
}
}
],
"context.exec": []
}

View file

@ -1,34 +0,0 @@
{
"properties": {},
"rules": [
{
"matches": [
{
"device.name": "~alsa_card.*"
}
],
"actions": {
"update-props": {
"api.alsa.use-acp": true,
"api.acp.auto-profile": false,
"api.acp.auto-port": false
}
}
},
{
"matches": [
{
"node.name": "~alsa_input.*"
},
{
"node.name": "~alsa_output.*"
}
],
"actions": {
"update-props": {
"node.pause-on-idle": false
}
}
}
]
}

View file

@ -1,36 +0,0 @@
{
"properties": {},
"rules": [
{
"matches": [
{
"device.name": "~bluez_card.*"
}
],
"actions": {
"update-props": {
"bluez5.auto-connect": [
"hfp_hf",
"hsp_hs",
"a2dp_sink"
]
}
}
},
{
"matches": [
{
"node.name": "~bluez_input.*"
},
{
"node.name": "~bluez_output.*"
}
],
"actions": {
"update-props": {
"node.pause-on-idle": false
}
}
}
]
}

View file

@ -1,68 +0,0 @@
{
"context.properties": {},
"context.spa-libs": {
"api.bluez5.*": "bluez5/libspa-bluez5",
"api.alsa.*": "alsa/libspa-alsa",
"api.v4l2.*": "v4l2/libspa-v4l2",
"api.libcamera.*": "libcamera/libspa-libcamera"
},
"context.modules": [
{
"name": "libpipewire-module-rtkit",
"args": {},
"flags": [
"ifexists",
"nofail"
]
},
{
"name": "libpipewire-module-protocol-native"
},
{
"name": "libpipewire-module-client-node"
},
{
"name": "libpipewire-module-client-device"
},
{
"name": "libpipewire-module-adapter"
},
{
"name": "libpipewire-module-metadata"
},
{
"name": "libpipewire-module-session-manager"
}
],
"session.modules": {
"default": [
"flatpak",
"portal",
"v4l2",
"suspend-node",
"policy-node"
],
"with-audio": [
"metadata",
"default-nodes",
"default-profile",
"default-routes",
"alsa-seq",
"alsa-monitor"
],
"with-alsa": [
"with-audio"
],
"with-jack": [
"with-audio"
],
"with-pulseaudio": [
"with-audio",
"bluez5",
"bluez5-autoswitch",
"logind",
"restore-stream",
"streams-follow-default"
]
}
}

View file

@ -1,30 +0,0 @@
{
"properties": {},
"rules": [
{
"matches": [
{
"device.name": "~v4l2_device.*"
}
],
"actions": {
"update-props": {}
}
},
{
"matches": [
{
"node.name": "~v4l2_input.*"
},
{
"node.name": "~v4l2_output.*"
}
],
"actions": {
"update-props": {
"node.pause-on-idle": false
}
}
}
]
}

View file

@ -1,141 +0,0 @@
# pipewire example session manager.
{ config, lib, pkgs, ... }:
with lib;
let
json = pkgs.formats.json {};
cfg = config.services.pipewire.media-session;
enable32BitAlsaPlugins = cfg.alsa.support32Bit
&& pkgs.stdenv.isx86_64
&& pkgs.pkgsi686Linux.pipewire != null;
# Use upstream config files passed through spa-json-dump as the base
# Patched here as necessary for them to work with this module
defaults = {
alsa-monitor = lib.importJSON ./media-session/alsa-monitor.conf.json;
bluez-monitor = lib.importJSON ./media-session/bluez-monitor.conf.json;
media-session = lib.importJSON ./media-session/media-session.conf.json;
v4l2-monitor = lib.importJSON ./media-session/v4l2-monitor.conf.json;
};
configs = {
alsa-monitor = recursiveUpdate defaults.alsa-monitor cfg.config.alsa-monitor;
bluez-monitor = recursiveUpdate defaults.bluez-monitor cfg.config.bluez-monitor;
media-session = recursiveUpdate defaults.media-session cfg.config.media-session;
v4l2-monitor = recursiveUpdate defaults.v4l2-monitor cfg.config.v4l2-monitor;
};
in {
meta = {
maintainers = teams.freedesktop.members;
# uses attributes of the linked package
buildDocsInSandbox = false;
};
###### interface
options = {
services.pipewire.media-session = {
enable = mkOption {
type = types.bool;
default = false;
description = lib.mdDoc "Whether to enable the deprecated example Pipewire session manager";
};
package = mkOption {
type = types.package;
default = pkgs.pipewire-media-session;
defaultText = literalExpression "pkgs.pipewire-media-session";
description = lib.mdDoc ''
The pipewire-media-session derivation to use.
'';
};
config = {
media-session = mkOption {
type = json.type;
description = lib.mdDoc ''
Configuration for the media session core. For details see
https://gitlab.freedesktop.org/pipewire/media-session/-/blob/${cfg.package.version}/src/daemon/media-session.d/media-session.conf
'';
default = defaults.media-session;
};
alsa-monitor = mkOption {
type = json.type;
description = lib.mdDoc ''
Configuration for the alsa monitor. For details see
https://gitlab.freedesktop.org/pipewire/media-session/-/blob/${cfg.package.version}/src/daemon/media-session.d/alsa-monitor.conf
'';
default = defaults.alsa-monitor;
};
bluez-monitor = mkOption {
type = json.type;
description = lib.mdDoc ''
Configuration for the bluez5 monitor. For details see
https://gitlab.freedesktop.org/pipewire/media-session/-/blob/${cfg.package.version}/src/daemon/media-session.d/bluez-monitor.conf
'';
default = defaults.bluez-monitor;
};
v4l2-monitor = mkOption {
type = json.type;
description = lib.mdDoc ''
Configuration for the V4L2 monitor. For details see
https://gitlab.freedesktop.org/pipewire/media-session/-/blob/${cfg.package.version}/src/daemon/media-session.d/v4l2-monitor.conf
'';
default = defaults.v4l2-monitor;
};
};
};
};
###### implementation
config = mkIf cfg.enable {
environment.systemPackages = [ cfg.package ];
systemd.packages = [ cfg.package ];
# Enable either system or user units.
systemd.services.pipewire-media-session.enable = config.services.pipewire.systemWide;
systemd.user.services.pipewire-media-session.enable = !config.services.pipewire.systemWide;
systemd.services.pipewire-media-session.wantedBy = [ "pipewire.service" ];
systemd.user.services.pipewire-media-session.wantedBy = [ "pipewire.service" ];
environment.etc."pipewire/media-session.d/media-session.conf" = {
source = json.generate "media-session.conf" configs.media-session;
};
environment.etc."pipewire/media-session.d/v4l2-monitor.conf" = {
source = json.generate "v4l2-monitor.conf" configs.v4l2-monitor;
};
environment.etc."pipewire/media-session.d/with-audio" =
mkIf config.services.pipewire.audio.enable {
text = "";
};
environment.etc."pipewire/media-session.d/with-alsa" =
mkIf config.services.pipewire.alsa.enable {
text = "";
};
environment.etc."pipewire/media-session.d/alsa-monitor.conf" =
mkIf config.services.pipewire.alsa.enable {
source = json.generate "alsa-monitor.conf" configs.alsa-monitor;
};
environment.etc."pipewire/media-session.d/with-pulseaudio" =
mkIf config.services.pipewire.pulse.enable {
text = "";
};
environment.etc."pipewire/media-session.d/bluez-monitor.conf" =
mkIf config.services.pipewire.pulse.enable {
source = json.generate "bluez-monitor.conf" configs.bluez-monitor;
};
environment.etc."pipewire/media-session.d/with-jack" =
mkIf config.services.pipewire.jack.enable {
text = "";
};
};
}

View file

@ -4,7 +4,6 @@
with lib;
let
json = pkgs.formats.json {};
cfg = config.services.pipewire;
enable32BitAlsaPlugins = cfg.alsa.support32Bit
&& pkgs.stdenv.isx86_64
@ -18,34 +17,8 @@ let
mkdir -p "$out/lib"
ln -s "${cfg.package.jack}/lib" "$out/lib/pipewire"
'';
# Use upstream config files passed through spa-json-dump as the base
# Patched here as necessary for them to work with this module
defaults = {
client = lib.importJSON ./daemon/client.conf.json;
client-rt = lib.importJSON ./daemon/client-rt.conf.json;
jack = lib.importJSON ./daemon/jack.conf.json;
minimal = lib.importJSON ./daemon/minimal.conf.json;
pipewire = lib.importJSON ./daemon/pipewire.conf.json;
pipewire-pulse = lib.importJSON ./daemon/pipewire-pulse.conf.json;
};
useSessionManager = cfg.wireplumber.enable || cfg.media-session.enable;
configs = {
client = recursiveUpdate defaults.client cfg.config.client;
client-rt = recursiveUpdate defaults.client-rt cfg.config.client-rt;
jack = recursiveUpdate defaults.jack cfg.config.jack;
pipewire = recursiveUpdate (if useSessionManager then defaults.pipewire else defaults.minimal) cfg.config.pipewire;
pipewire-pulse = recursiveUpdate defaults.pipewire-pulse cfg.config.pipewire-pulse;
};
in {
meta = {
maintainers = teams.freedesktop.members ++ [ lib.maintainers.k900 ];
# uses attributes of the linked package
buildDocsInSandbox = false;
};
meta.maintainers = teams.freedesktop.members ++ [ lib.maintainers.k900 ];
###### interface
options = {
@ -69,53 +42,6 @@ in {
'';
};
config = {
client = mkOption {
type = json.type;
default = {};
description = lib.mdDoc ''
Configuration for pipewire clients. For details see
https://gitlab.freedesktop.org/pipewire/pipewire/-/blob/${cfg.package.version}/src/daemon/client.conf.in
'';
};
client-rt = mkOption {
type = json.type;
default = {};
description = lib.mdDoc ''
Configuration for realtime pipewire clients. For details see
https://gitlab.freedesktop.org/pipewire/pipewire/-/blob/${cfg.package.version}/src/daemon/client-rt.conf.in
'';
};
jack = mkOption {
type = json.type;
default = {};
description = lib.mdDoc ''
Configuration for the pipewire daemon's jack module. For details see
https://gitlab.freedesktop.org/pipewire/pipewire/-/blob/${cfg.package.version}/src/daemon/jack.conf.in
'';
};
pipewire = mkOption {
type = json.type;
default = {};
description = lib.mdDoc ''
Configuration for the pipewire daemon. For details see
https://gitlab.freedesktop.org/pipewire/pipewire/-/blob/${cfg.package.version}/src/daemon/pipewire.conf.in
'';
};
pipewire-pulse = mkOption {
type = json.type;
default = {};
description = lib.mdDoc ''
Configuration for the pipewire-pulse daemon. For details see
https://gitlab.freedesktop.org/pipewire/pipewire/-/blob/${cfg.package.version}/src/daemon/pipewire-pulse.conf.in
'';
};
};
audio = {
enable = lib.mkOption {
type = lib.types.bool;
@ -153,10 +79,20 @@ in {
https://github.com/PipeWire/pipewire/blob/master/NEWS
'';
};
};
};
imports = [
(lib.mkRemovedOptionModule ["services" "pipewire" "config"] ''
Overriding default Pipewire configuration through NixOS options never worked correctly and is no longer supported.
Please create drop-in files in /etc/pipewire/pipewire.conf.d/ to make the desired setting changes instead.
'')
(lib.mkRemovedOptionModule ["services" "pipewire" "media-session"] ''
pipewire-media-session is no longer supported upstream and has been removed.
Please switch to `services.pipewire.wireplumber` instead.
'')
];
###### implementation
config = mkIf cfg.enable {
@ -222,22 +158,6 @@ in {
source = "${cfg.package}/share/alsa/alsa.conf.d/99-pipewire-default.conf";
};
environment.etc."pipewire/client.conf" = {
source = json.generate "client.conf" configs.client;
};
environment.etc."pipewire/client-rt.conf" = {
source = json.generate "client-rt.conf" configs.client-rt;
};
environment.etc."pipewire/jack.conf" = {
source = json.generate "jack.conf" configs.jack;
};
environment.etc."pipewire/pipewire.conf" = {
source = json.generate "pipewire.conf" configs.pipewire;
};
environment.etc."pipewire/pipewire-pulse.conf" = mkIf cfg.pulse.enable {
source = json.generate "pipewire-pulse.conf" configs.pipewire-pulse;
};
environment.sessionVariables.LD_LIBRARY_PATH =
lib.mkIf cfg.jack.enable [ "${cfg.package.jack}/lib" ];
@ -256,12 +176,5 @@ in {
};
groups.pipewire.gid = config.ids.gids.pipewire;
};
# https://gitlab.freedesktop.org/pipewire/pipewire/-/issues/464#note_723554
systemd.services.pipewire.environment."PIPEWIRE_LINK_PASSIVE" = "1";
systemd.user.services.pipewire.environment."PIPEWIRE_LINK_PASSIVE" = "1";
# pipewire-pulse default config expects pactl to be in PATH
systemd.user.services.pipewire-pulse.path = lib.mkIf cfg.pulse.enable [ pkgs.pulseaudio ];
};
}

View file

@ -28,10 +28,6 @@ in
config = lib.mkIf cfg.enable {
assertions = [
{
assertion = !config.services.pipewire.media-session.enable;
message = "WirePlumber and pipewire-media-session can't be enabled at the same time.";
}
{
assertion = !config.hardware.bluetooth.hsphfpd.enable;
message = "Using Wireplumber conflicts with hsphfpd, as it provides the same functionality. `hardware.bluetooth.hsphfpd.enable` needs be set to false";

View file

@ -86,6 +86,7 @@ let
mkService = name: keyboard: nameValuePair (mkName name) {
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "notify";
ExecStart = ''
${getExe cfg.package} \
--cfg ${mkConfig name keyboard} \
@ -123,8 +124,7 @@ let
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectProc = "invisible";
RestrictAddressFamilies =
if (keyboard.port == null) then "none" else [ "AF_INET" ];
RestrictAddressFamilies = [ "AF_UNIX" ] ++ optional (keyboard.port != null) "AF_INET";
RestrictNamespaces = true;
RestrictRealtime = true;
SystemCallArchitectures = [ "native" ];

View file

@ -60,7 +60,7 @@ in {
'')
(mkRemovedOptionModule [ "services" "matrix-synapse" "create_local_database" ] ''
Database configuration must be done manually. An exemplary setup is demonstrated in
<nixpkgs/nixos/tests/matrix-synapse.nix>
<nixpkgs/nixos/tests/matrix/synapse.nix>
'')
(mkRemovedOptionModule [ "services" "matrix-synapse" "web_client" ] "")
(mkRemovedOptionModule [ "services" "matrix-synapse" "room_invite_state_types" ] ''
@ -711,7 +711,7 @@ in {
If you
- try to deploy a fresh synapse, you need to configure the database yourself. An example
for this can be found in <nixpkgs/nixos/tests/matrix-synapse.nix>
for this can be found in <nixpkgs/nixos/tests/matrix/synapse.nix>
- update your existing matrix-synapse instance, you simply need to add `services.postgresql.enable = true`
to your configuration.

View file

@ -176,7 +176,11 @@ in
"/ip4/0.0.0.0/tcp/4001"
"/ip6/::/tcp/4001"
"/ip4/0.0.0.0/udp/4001/quic"
"/ip4/0.0.0.0/udp/4001/quic-v1"
"/ip4/0.0.0.0/udp/4001/quic-v1/webtransport"
"/ip6/::/udp/4001/quic"
"/ip6/::/udp/4001/quic-v1"
"/ip6/::/udp/4001/quic-v1/webtransport"
];
description = lib.mdDoc "Where Kubo listens for incoming p2p connections";
};

View file

@ -184,6 +184,22 @@ in
.tar.gz files.
'';
};
environmentFile = mkOption {
type = types.nullOr types.path;
default = null;
description = lib.mdDoc ''
Environment file (see {manpage}`systemd.exec(5)`
"EnvironmentFile=" section for the syntax) which sets config options
for mattermost (see [the mattermost documentation](https://docs.mattermost.com/configure/configuration-settings.html#environment-variables)).
Settings defined in the environment file will overwrite settings
set via nix or via the {option}`services.mattermost.extraConfig`
option.
Useful for setting config options without their value ending up in the
(world-readable) nix store, e.g. for a database password.
'';
};
localDatabaseCreate = mkOption {
type = types.bool;
@ -321,6 +337,7 @@ in
Restart = "always";
RestartSec = "10";
LimitNOFILE = "49152";
EnvironmentFile = cfg.environmentFile;
};
unitConfig.JoinsNamespaceOf = mkIf cfg.localDatabaseCreate "postgresql.service";
};

View file

@ -1112,7 +1112,7 @@ in {
${optionalString (cfg.nginx.recommendedHttpHeaders) ''
add_header X-Content-Type-Options nosniff;
add_header X-XSS-Protection "1; mode=block";
add_header X-Robots-Tag none;
add_header X-Robots-Tag "noindex, nofollow";
add_header X-Download-Options noopen;
add_header X-Permitted-Cross-Domain-Policies none;
add_header X-Frame-Options sameorigin;

View file

@ -0,0 +1,401 @@
{ lib
, pkgs
, config
, ...
}:
let
cfg = config.services.authelia;
format = pkgs.formats.yaml { };
configFile = format.generate "config.yml" cfg.settings;
autheliaOpts = with lib; { name, ... }: {
options = {
enable = mkEnableOption (mdDoc "Authelia instance");
name = mkOption {
type = types.str;
default = name;
description = mdDoc ''
Name is used as a suffix for the service name, user, and group.
By default it takes the value you use for `<instance>` in:
{option}`services.authelia.<instance>`
'';
};
package = mkOption {
default = pkgs.authelia;
type = types.package;
defaultText = literalExpression "pkgs.authelia";
description = mdDoc "Authelia derivation to use.";
};
user = mkOption {
default = "authelia-${name}";
type = types.str;
description = mdDoc "The name of the user for this authelia instance.";
};
group = mkOption {
default = "authelia-${name}";
type = types.str;
description = mdDoc "The name of the group for this authelia instance.";
};
secrets = mkOption {
description = mdDoc ''
It is recommended you keep your secrets separate from the configuration.
It's especially important to keep the raw secrets out of your nix configuration,
as the values will be preserved in your nix store.
This attribute allows you to configure the location of secret files to be loaded at runtime.
https://www.authelia.com/configuration/methods/secrets/
'';
default = { };
type = types.submodule {
options = {
manual = mkOption {
default = false;
example = true;
description = mdDoc ''
Configuring authelia's secret files via the secrets attribute set
is intended to be convenient and help catch cases where values are required
to run at all.
If a user wants to set these values themselves and bypass the validation they can set this value to true.
'';
type = types.bool;
};
# required
jwtSecretFile = mkOption {
type = types.nullOr types.path;
default = null;
description = mdDoc ''
Path to your JWT secret used during identity verificaiton.
'';
};
oidcIssuerPrivateKeyFile = mkOption {
type = types.nullOr types.path;
default = null;
description = mdDoc ''
Path to your private key file used to encrypt OIDC JWTs.
'';
};
oidcHmacSecretFile = mkOption {
type = types.nullOr types.path;
default = null;
description = mdDoc ''
Path to your HMAC secret used to sign OIDC JWTs.
'';
};
sessionSecretFile = mkOption {
type = types.nullOr types.path;
default = null;
description = mdDoc ''
Path to your session secret. Only used when redis is used as session storage.
'';
};
# required
storageEncryptionKeyFile = mkOption {
type = types.nullOr types.path;
default = null;
description = mdDoc ''
Path to your storage encryption key.
'';
};
};
};
};
environmentVariables = mkOption {
type = types.attrsOf types.str;
description = mdDoc ''
Additional environment variables to provide to authelia.
If you are providing secrets please consider the options under {option}`services.authelia.<instance>.secrets`
or make sure you use the `_FILE` suffix.
If you provide the raw secret rather than the location of a secret file that secret will be preserved in the nix store.
For more details: https://www.authelia.com/configuration/methods/secrets/
'';
default = { };
};
settings = mkOption {
description = mdDoc ''
Your Authelia config.yml as a Nix attribute set.
There are several values that are defined and documented in nix such as `default_2fa_method`,
but additional items can also be included.
https://github.com/authelia/authelia/blob/master/config.template.yml
'';
default = { };
example = ''
{
theme = "light";
default_2fa_method = "totp";
log.level = "debug";
server.disable_healthcheck = true;
}
'';
type = types.submodule {
freeformType = format.type;
options = {
theme = mkOption {
type = types.enum [ "light" "dark" "grey" "auto" ];
default = "light";
example = "dark";
description = mdDoc "The theme to display.";
};
default_2fa_method = mkOption {
type = types.enum [ "" "totp" "webauthn" "mobile_push" ];
default = "";
example = "webauthn";
description = mdDoc ''
Default 2FA method for new users and fallback for preferred but disabled methods.
'';
};
server = {
host = mkOption {
type = types.str;
default = "localhost";
example = "0.0.0.0";
description = mdDoc "The address to listen on.";
};
port = mkOption {
type = types.port;
default = 9091;
description = mdDoc "The port to listen on.";
};
};
log = {
level = mkOption {
type = types.enum [ "info" "debug" "trace" ];
default = "debug";
example = "info";
description = mdDoc "Level of verbosity for logs: info, debug, trace.";
};
format = mkOption {
type = types.enum [ "json" "text" ];
default = "json";
example = "text";
description = mdDoc "Format the logs are written as.";
};
file_path = mkOption {
type = types.nullOr types.path;
default = null;
example = "/var/log/authelia/authelia.log";
description = mdDoc "File path where the logs will be written. If not set logs are written to stdout.";
};
keep_stdout = mkOption {
type = types.bool;
default = false;
example = true;
description = mdDoc "Whether to also log to stdout when a `file_path` is defined.";
};
};
telemetry = {
metrics = {
enabled = mkOption {
type = types.bool;
default = false;
example = true;
description = mdDoc "Enable Metrics.";
};
address = mkOption {
type = types.str;
default = "tcp://127.0.0.1:9959";
example = "tcp://0.0.0.0:8888";
description = mdDoc "The address to listen on for metrics. This should be on a different port to the main `server.port` value.";
};
};
};
};
};
};
settingsFiles = mkOption {
type = types.listOf types.path;
default = [ ];
example = [ "/etc/authelia/config.yml" "/etc/authelia/access-control.yml" "/etc/authelia/config/" ];
description = mdDoc ''
Here you can provide authelia with configuration files or directories.
It is possible to give authelia multiple files and use the nix generated configuration
file set via {option}`services.authelia.<instance>.settings`.
'';
};
};
};
in
{
options.services.authelia.instances = with lib; mkOption {
default = { };
type = types.attrsOf (types.submodule autheliaOpts);
description = mdDoc ''
Multi-domain protection currently requires multiple instances of Authelia.
If you don't require multiple instances of Authelia you can define just the one.
https://www.authelia.com/roadmap/active/multi-domain-protection/
'';
example = ''
{
main = {
enable = true;
secrets.storageEncryptionKeyFile = "/etc/authelia/storageEncryptionKeyFile";
secrets.jwtSecretFile = "/etc/authelia/jwtSecretFile";
settings = {
theme = "light";
default_2fa_method = "totp";
log.level = "debug";
server.disable_healthcheck = true;
};
};
preprod = {
enable = false;
secrets.storageEncryptionKeyFile = "/mnt/pre-prod/authelia/storageEncryptionKeyFile";
secrets.jwtSecretFile = "/mnt/pre-prod/jwtSecretFile";
settings = {
theme = "dark";
default_2fa_method = "webauthn";
server.host = "0.0.0.0";
};
};
test.enable = true;
test.secrets.manual = true;
test.settings.theme = "grey";
test.settings.server.disable_healthcheck = true;
test.settingsFiles = [ "/mnt/test/authelia" "/mnt/test-authelia.conf" ];
};
}
'';
};
config =
let
mkInstanceServiceConfig = instance:
let
execCommand = "${instance.package}/bin/authelia";
configFile = format.generate "config.yml" instance.settings;
configArg = "--config ${builtins.concatStringsSep "," (lib.concatLists [[configFile] instance.settingsFiles])}";
in
{
description = "Authelia authentication and authorization server";
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
environment =
(lib.filterAttrs (_: v: v != null) {
AUTHELIA_JWT_SECRET_FILE = instance.secrets.jwtSecretFile;
AUTHELIA_STORAGE_ENCRYPTION_KEY_FILE = instance.secrets.storageEncryptionKeyFile;
AUTHELIA_SESSION_SECRET_FILE = instance.secrets.sessionSecretFile;
AUTHELIA_IDENTITY_PROVIDERS_OIDC_ISSUER_PRIVATE_KEY_FILE = instance.secrets.oidcIssuerPrivateKeyFile;
AUTHELIA_IDENTITY_PROVIDERS_OIDC_HMAC_SECRET_FILE = instance.secrets.oidcHmacSecretFile;
})
// instance.environmentVariables;
preStart = "${execCommand} ${configArg} validate-config";
serviceConfig = {
User = instance.user;
Group = instance.group;
ExecStart = "${execCommand} ${configArg}";
Restart = "always";
RestartSec = "5s";
StateDirectory = "authelia-${instance.name}";
StateDirectoryMode = "0700";
# Security options:
AmbientCapabilities = "";
CapabilityBoundingSet = "";
DeviceAllow = "";
LockPersonality = true;
MemoryDenyWriteExecute = true;
NoNewPrivileges = true;
PrivateTmp = true;
PrivateDevices = true;
PrivateUsers = true;
ProtectClock = true;
ProtectControlGroups = true;
ProtectHome = "read-only";
ProtectHostname = true;
ProtectKernelLogs = true;
ProtectKernelModules = true;
ProtectKernelTunables = true;
ProtectProc = "noaccess";
ProtectSystem = "strict";
RestrictAddressFamilies = [ "AF_INET" "AF_INET6" ];
RestrictNamespaces = true;
RestrictRealtime = true;
RestrictSUIDSGID = true;
SystemCallArchitectures = "native";
SystemCallErrorNumber = "EPERM";
SystemCallFilter = [
"@system-service"
"~@cpu-emulation"
"~@debug"
"~@keyring"
"~@memlock"
"~@obsolete"
"~@privileged"
"~@setuid"
];
};
};
mkInstanceUsersConfig = instance: {
groups."authelia-${instance.name}" =
lib.mkIf (instance.group == "authelia-${instance.name}") {
name = "authelia-${instance.name}";
};
users."authelia-${instance.name}" =
lib.mkIf (instance.user == "authelia-${instance.name}") {
name = "authelia-${instance.name}";
isSystemUser = true;
group = instance.group;
};
};
instances = lib.attrValues cfg.instances;
in
{
assertions = lib.flatten (lib.flip lib.mapAttrsToList cfg.instances (name: instance:
[
{
assertion = instance.secrets.manual || (instance.secrets.jwtSecretFile != null && instance.secrets.storageEncryptionKeyFile != null);
message = ''
Authelia requires a JWT Secret and a Storage Encryption Key to work.
Either set them like so:
services.authelia.${name}.secrets.jwtSecretFile = /my/path/to/jwtsecret;
services.authelia.${name}.secrets.storageEncryptionKeyFile = /my/path/to/encryptionkey;
Or set services.authelia.${name}.secrets.manual = true and provide them yourself via
environmentVariables or settingsFiles.
Do not include raw secrets in nix settings.
'';
}
]
));
systemd.services = lib.mkMerge
(map
(instance: lib.mkIf instance.enable {
"authelia-${instance.name}" = mkInstanceServiceConfig instance;
})
instances);
users = lib.mkMerge
(map
(instance: lib.mkIf instance.enable (mkInstanceUsersConfig instance))
instances);
};
}

View file

@ -323,7 +323,7 @@ in
account sufficient pam_unix.so
password requisite pam_unix.so nullok sha512
password requisite pam_unix.so nullok yescrypt
session optional pam_keyinit.so revoke
session include login

View file

@ -302,7 +302,7 @@ in
account sufficient pam_unix.so
password requisite pam_unix.so nullok sha512
password requisite pam_unix.so nullok yescrypt
session optional pam_keyinit.so revoke
session include login

View file

@ -21,7 +21,7 @@ in
# loaders.cache based on that and set the environment variable
# GDK_PIXBUF_MODULE_FILE to point to it.
config = lib.mkIf (cfg.modulePackages != []) {
environment.variables = {
environment.sessionVariables = {
GDK_PIXBUF_MODULE_FILE = "${loadersCache}";
};
};

View file

@ -1,23 +1,63 @@
{ config, lib, pkgs, ... }:
{ config, pkgs, lib, ... }:
with lib;
let
cfg = config.services.xserver.windowManager.qtile;
pyEnv = pkgs.python3.withPackages (p: [ (cfg.package.unwrapped or cfg.package) ] ++ (cfg.extraPackages p));
in
{
options.services.xserver.windowManager.qtile = {
enable = mkEnableOption (lib.mdDoc "qtile");
package = mkPackageOptionMD pkgs "qtile" { };
package = mkPackageOptionMD pkgs "qtile-unwrapped" { };
configFile = mkOption {
type = with types; nullOr path;
default = null;
example = literalExpression "./your_config.py";
description = lib.mdDoc ''
Path to the qtile configuration file.
If null, $XDG_CONFIG_HOME/qtile/config.py will be used.
'';
};
backend = mkOption {
type = types.enum [ "x11" "wayland" ];
default = "x11";
description = lib.mdDoc ''
Backend to use in qtile:
<option>x11</option> or <option>wayland</option>.
'';
};
extraPackages = mkOption {
type = types.functionTo (types.listOf types.package);
default = _: [];
defaultText = literalExpression ''
python3Packages: with python3Packages; [];
'';
description = lib.mdDoc ''
Extra Python packages available to Qtile.
An example would be to include `python3Packages.qtile-extras`
for additional unoffical widgets.
'';
example = literalExpression ''
python3Packages: with python3Packages; [
qtile-extras
];
'';
};
};
config = mkIf cfg.enable {
services.xserver.windowManager.session = [{
name = "qtile";
start = ''
${cfg.package}/bin/qtile start &
${pyEnv}/bin/qtile start -b ${cfg.backend} \
${optionalString (cfg.configFile != null)
"--config \"${cfg.configFile}\""} &
waitPID=$!
'';
}];

View file

@ -158,6 +158,20 @@ let
wait_target "header" ${dev.header} || die "${dev.header} is unavailable"
''}
try_empty_passphrase() {
${if dev.tryEmptyPassphrase then ''
echo "Trying empty passphrase!"
echo "" | ${csopen}
cs_status=$?
if [ $cs_status -eq 0 ]; then
return 0
else
return 1
fi
'' else "return 1"}
}
do_open_passphrase() {
local passphrase
@ -212,13 +226,27 @@ let
${csopen} --key-file=${dev.keyFile} \
${optionalString (dev.keyFileSize != null) "--keyfile-size=${toString dev.keyFileSize}"} \
${optionalString (dev.keyFileOffset != null) "--keyfile-offset=${toString dev.keyFileOffset}"}
cs_status=$?
if [ $cs_status -ne 0 ]; then
echo "Key File ${dev.keyFile} failed!"
if ! try_empty_passphrase; then
${if dev.fallbackToPassword then "echo" else "die"} "${dev.keyFile} is unavailable"
echo " - failing back to interactive password prompt"
do_open_passphrase
fi
fi
else
${if dev.fallbackToPassword then "echo" else "die"} "${dev.keyFile} is unavailable"
echo " - failing back to interactive password prompt"
do_open_passphrase
# If the key file never shows up we should also try the empty passphrase
if ! try_empty_passphrase; then
${if dev.fallbackToPassword then "echo" else "die"} "${dev.keyFile} is unavailable"
echo " - failing back to interactive password prompt"
do_open_passphrase
fi
fi
'' else ''
do_open_passphrase
if ! try_empty_passphrase; then
do_open_passphrase
fi
''}
}
@ -476,6 +504,7 @@ let
preLVM = filterAttrs (n: v: v.preLVM) luks.devices;
postLVM = filterAttrs (n: v: !v.preLVM) luks.devices;
stage1Crypttab = pkgs.writeText "initrd-crypttab" (lib.concatStringsSep "\n" (lib.mapAttrsToList (n: v: let
opts = v.crypttabExtraOpts
++ optional v.allowDiscards "discard"
@ -483,6 +512,8 @@ let
++ optional (v.header != null) "header=${v.header}"
++ optional (v.keyFileOffset != null) "keyfile-offset=${toString v.keyFileOffset}"
++ optional (v.keyFileSize != null) "keyfile-size=${toString v.keyFileSize}"
++ optional (v.keyFileTimeout != null) "keyfile-timeout=${builtins.toString v.keyFileTimeout}s"
++ optional (v.tryEmptyPassphrase) "try-empty-password=true"
;
in "${n} ${v.device} ${if v.keyFile == null then "-" else v.keyFile} ${lib.concatStringsSep "," opts}") luks.devices));
@ -594,6 +625,25 @@ in
'';
};
tryEmptyPassphrase = mkOption {
default = false;
type = types.bool;
description = lib.mdDoc ''
If keyFile fails then try an empty passphrase first before
prompting for password.
'';
};
keyFileTimeout = mkOption {
default = null;
example = 5;
type = types.nullOr types.int;
description = lib.mdDoc ''
The amount of time in seconds for a keyFile to appear before
timing out and trying passwords.
'';
};
keyFileSize = mkOption {
default = null;
example = 4096;
@ -889,6 +939,10 @@ in
message = "boot.initrd.luks.devices.<name>.bypassWorkqueues is not supported for kernels older than 5.9";
}
{ assertion = !config.boot.initrd.systemd.enable -> all (x: x.keyFileTimeout == null) (attrValues luks.devices);
message = "boot.initrd.luks.devices.<name>.keyFileTimeout is only supported for systemd initrd";
}
{ assertion = config.boot.initrd.systemd.enable -> all (dev: !dev.fallbackToPassword) (attrValues luks.devices);
message = "boot.initrd.luks.devices.<name>.fallbackToPassword is implied by systemd stage 1.";
}

View file

@ -79,6 +79,8 @@ let
# Filesystems.
"systemd-fsck@.service"
"systemd-fsck-root.service"
"systemd-growfs@.service"
"systemd-growfs-root.service"
"systemd-remount-fs.service"
"systemd-pstore.service"
"local-fs.target"

View file

@ -56,6 +56,7 @@ let
"systemd-ask-password-console.path"
"systemd-ask-password-console.service"
"systemd-fsck@.service"
"systemd-growfs@.service"
"systemd-halt.service"
"systemd-hibernate-resume@.service"
"systemd-journald-audit.socket"
@ -371,6 +372,7 @@ in {
managerEnvironment.PATH = "/bin:/sbin";
contents = {
"/tmp/.keep".text = "systemd requires the /tmp mount point in the initrd cpio archive";
"/init".source = "${cfg.package}/lib/systemd/systemd";
"/etc/systemd/system".source = stage1Units;

View file

@ -142,6 +142,7 @@ in
defaultNetwork.settings = lib.mkOption {
type = json.type;
default = { };
example = lib.literalExpression "{ dns_enabled = true; }";
description = lib.mdDoc ''
Settings for podman's default network.
'';
@ -149,7 +150,7 @@ in
};
config = lib.mkIf cfg.enable (lib.mkMerge [
config = lib.mkIf cfg.enable
{
environment.systemPackages = [ cfg.package ]
++ lib.optional cfg.dockerCompat dockerCompat;
@ -235,6 +236,5 @@ in
'';
}
];
}
]);
};
}

View file

@ -85,6 +85,7 @@ in {
atop = handleTest ./atop.nix {};
atuin = handleTest ./atuin.nix {};
auth-mysql = handleTest ./auth-mysql.nix {};
authelia = handleTest ./authelia.nix {};
avahi = handleTest ./avahi.nix {};
avahi-with-resolved = handleTest ./avahi.nix { networkd = true; };
babeld = handleTest ./babeld.nix {};
@ -217,10 +218,12 @@ in {
extra-python-packages = handleTest ./extra-python-packages.nix {};
evcc = handleTest ./evcc.nix {};
fancontrol = handleTest ./fancontrol.nix {};
fcitx = handleTest ./fcitx {};
fcitx5 = handleTest ./fcitx5 {};
fenics = handleTest ./fenics.nix {};
ferm = handleTest ./ferm.nix {};
firefox = handleTest ./firefox.nix { firefoxPackage = pkgs.firefox; };
firefox-beta = handleTest ./firefox.nix { firefoxPackage = pkgs.firefox-beta; };
firefox-devedition = handleTest ./firefox.nix { firefoxPackage = pkgs.firefox-devedition; };
firefox-esr = handleTest ./firefox.nix { firefoxPackage = pkgs.firefox-esr; }; # used in `tested` job
firefox-esr-102 = handleTest ./firefox.nix { firefoxPackage = pkgs.firefox-esr-102; };
firejail = handleTest ./firejail.nix {};
@ -311,6 +314,7 @@ in {
influxdb = handleTest ./influxdb.nix {};
initrd-network-openvpn = handleTest ./initrd-network-openvpn {};
initrd-network-ssh = handleTest ./initrd-network-ssh {};
initrd-luks-empty-passphrase = handleTest ./initrd-luks-empty-passphrase.nix {};
initrdNetwork = handleTest ./initrd-network.nix {};
initrd-secrets = handleTest ./initrd-secrets.nix {};
initrd-secrets-changing = handleTest ./initrd-secrets-changing.nix {};
@ -662,6 +666,7 @@ in {
systemd-initrd-btrfs-raid = handleTest ./systemd-initrd-btrfs-raid.nix {};
systemd-initrd-luks-fido2 = handleTest ./systemd-initrd-luks-fido2.nix {};
systemd-initrd-luks-keyfile = handleTest ./systemd-initrd-luks-keyfile.nix {};
systemd-initrd-luks-empty-passphrase = handleTest ./initrd-luks-empty-passphrase.nix { systemdStage1 = true; };
systemd-initrd-luks-password = handleTest ./systemd-initrd-luks-password.nix {};
systemd-initrd-luks-tpm2 = handleTest ./systemd-initrd-luks-tpm2.nix {};
systemd-initrd-modprobe = handleTest ./systemd-initrd-modprobe.nix {};

View file

@ -0,0 +1,169 @@
# Test Authelia as an auth server for Traefik as a reverse proxy of a local web service
import ./make-test-python.nix ({ pkgs, ... }: {
name = "authelia";
meta.maintainers = with lib.maintainers; [ jk ];
nodes = {
authelia = { config, pkgs, lib, ... }: {
services.authelia.instances.testing = {
enable = true;
secrets.storageEncryptionKeyFile = "/etc/authelia/storageEncryptionKeyFile";
secrets.jwtSecretFile = "/etc/authelia/jwtSecretFile";
settings = {
authentication_backend.file.path = "/etc/authelia/users_database.yml";
access_control.default_policy = "one_factor";
session.domain = "example.com";
storage.local.path = "/tmp/db.sqlite3";
notifier.filesystem.filename = "/tmp/notifications.txt";
};
};
# These should not be set from nix but through other means to not leak the secret!
# This is purely for testing purposes!
environment.etc."authelia/storageEncryptionKeyFile" = {
mode = "0400";
user = "authelia-testing";
text = "you_must_generate_a_random_string_of_more_than_twenty_chars_and_configure_this";
};
environment.etc."authelia/jwtSecretFile" = {
mode = "0400";
user = "authelia-testing";
text = "a_very_important_secret";
};
environment.etc."authelia/users_database.yml" = {
mode = "0400";
user = "authelia-testing";
text = ''
users:
bob:
disabled: false
displayname: bob
# password of password
password: $argon2id$v=19$m=65536,t=3,p=4$2ohUAfh9yetl+utr4tLcCQ$AsXx0VlwjvNnCsa70u4HKZvFkC8Gwajr2pHGKcND/xs
email: bob@jim.com
groups:
- admin
- dev
'';
};
services.traefik = {
enable = true;
dynamicConfigOptions = {
tls.certificates =
let
certDir = pkgs.runCommand "selfSignedCerts" { buildInputs = [ pkgs.openssl ]; } ''
openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -nodes -subj '/CN=example.com/CN=auth.example.com/CN=static.example.com' -days 36500
mkdir -p $out
cp key.pem cert.pem $out
'';
in
[{
certFile = "${certDir}/cert.pem";
keyFile = "${certDir}/key.pem";
}];
http.middlewares.authelia.forwardAuth = {
address = "http://localhost:9091/api/verify?rd=https%3A%2F%2Fauth.example.com%2F";
trustForwardHeader = true;
authResponseHeaders = [
"Remote-User"
"Remote-Groups"
"Remote-Email"
"Remote-Name"
];
};
http.middlewares.authelia-basic.forwardAuth = {
address = "http://localhost:9091/api/verify?auth=basic";
trustForwardHeader = true;
authResponseHeaders = [
"Remote-User"
"Remote-Groups"
"Remote-Email"
"Remote-Name"
];
};
http.routers.simplehttp = {
rule = "Host(`static.example.com`)";
tls = true;
entryPoints = "web";
service = "simplehttp";
};
http.routers.simplehttp-basic-auth = {
rule = "Host(`static-basic-auth.example.com`)";
tls = true;
entryPoints = "web";
service = "simplehttp";
middlewares = [ "authelia-basic@file" ];
};
http.services.simplehttp = {
loadBalancer.servers = [{
url = "http://localhost:8000";
}];
};
http.routers.authelia = {
rule = "Host(`auth.example.com`)";
tls = true;
entryPoints = "web";
service = "authelia@file";
};
http.services.authelia = {
loadBalancer.servers = [{
url = "http://localhost:9091";
}];
};
};
staticConfigOptions = {
global = {
checkNewVersion = false;
sendAnonymousUsage = false;
};
entryPoints.web.address = ":443";
};
};
systemd.services.simplehttp =
let fakeWebPageDir = pkgs.writeTextDir "index.html" "hello"; in
{
script = "${pkgs.python3}/bin/python -m http.server --directory ${fakeWebPageDir} 8000";
serviceConfig.Type = "simple";
wantedBy = [ "multi-user.target" ];
};
};
};
testScript = ''
start_all()
authelia.wait_for_unit("simplehttp.service")
authelia.wait_for_unit("traefik.service")
authelia.wait_for_unit("authelia-testing.service")
authelia.wait_for_open_port(443)
authelia.wait_for_unit("multi-user.target")
with subtest("Check for authelia"):
# expect the login page
assert "Login - Authelia", "could not reach authelia" in \
authelia.succeed("curl --insecure -sSf -H Host:auth.example.com https://authelia:443/")
with subtest("Check contacting basic http server via traefik with https works"):
assert "hello", "could not reach raw static site" in \
authelia.succeed("curl --insecure -sSf -H Host:static.example.com https://authelia:443/")
with subtest("Test traefik and authelia"):
with subtest("No details fail"):
authelia.fail("curl --insecure -sSf -H Host:static-basic-auth.example.com https://authelia:443/")
with subtest("Incorrect details fail"):
authelia.fail("curl --insecure -sSf -u 'bob:wordpass' -H Host:static-basic-auth.example.com https://authelia:443/")
authelia.fail("curl --insecure -sSf -u 'alice:password' -H Host:static-basic-auth.example.com https://authelia:443/")
with subtest("Correct details pass"):
assert "hello", "could not reach authed static site with valid credentials" in \
authelia.succeed("curl --insecure -sSf -u 'bob:password' -H Host:static-basic-auth.example.com https://authelia:443/")
'';
})

View file

@ -145,7 +145,7 @@ in {
client2.succeed("[ $(consul kv get testkey) == 42 ]")
def rolling_reboot_test(proper_rolling_procedure=True):
def rolling_restart_test(proper_rolling_procedure=True):
"""
Tests that the cluster can tolearate failures of any single server,
following the recommended rolling upgrade procedure from
@ -158,7 +158,13 @@ in {
"""
for server in servers:
server.crash()
server.block()
server.systemctl("stop consul")
# Make sure the stopped peer is recognized as being down
client1.wait_until_succeeds(
f"[ $(consul members | grep {server.name} | grep -o -E 'failed|left' | wc -l) == 1 ]"
)
# For each client, wait until they have connection again
# using `kv get -recurse` before issuing commands.
@ -170,8 +176,8 @@ in {
client2.succeed("[ $(consul kv get testkey) == 43 ]")
client2.succeed("consul kv delete testkey")
# Restart crashed machine.
server.start()
server.unblock()
server.systemctl("start consul")
if proper_rolling_procedure:
# Wait for recovery.
@ -197,10 +203,14 @@ in {
"""
for server in servers:
server.crash()
server.block()
server.systemctl("stop --no-block consul")
for server in servers:
server.start()
# --no-block is async, so ensure it has been stopped by now
server.wait_until_fails("systemctl is-active --quiet consul")
server.unblock()
server.systemctl("start consul")
# Wait for recovery.
wait_for_healthy_servers()
@ -217,13 +227,13 @@ in {
# Run the tests.
print("rolling_reboot_test()")
rolling_reboot_test()
print("rolling_restart_test()")
rolling_restart_test()
print("all_servers_crash_simultaneously_test()")
all_servers_crash_simultaneously_test()
print("rolling_reboot_test(proper_rolling_procedure=False)")
rolling_reboot_test(proper_rolling_procedure=False)
print("rolling_restart_test(proper_rolling_procedure=False)")
rolling_restart_test(proper_rolling_procedure=False)
'';
})

View file

@ -1,12 +0,0 @@
[Hotkey]
SwitchKey=Disabled
IMSwitchHotkey=ALT_SHIFT
TimeInterval=240
[Program]
DelayStart=5
[Output]
[Appearance]

View file

@ -1,4 +0,0 @@
[Profile]
IMName=zhengma-large
EnabledIMList=fcitx-keyboard-us:True,zhengma-large:True,m17n_sa_harvard-kyoto:True
PreeditStringInClientWindow=False

View file

@ -0,0 +1,11 @@
[Hotkey]
EnumerateSkipFirst=False
[Hotkey/TriggerKeys]
0=Control+space
[Hotkey/EnumerateForwardKeys]
0=Alt+Shift_L
[Hotkey/EnumerateBackwardKeys]
0=Alt+Shift_R

View file

@ -1,64 +1,48 @@
import ../make-test-python.nix (
import ../make-test-python.nix ({ pkgs, ... }:
# copy_from_host works only for store paths
rec {
name = "fcitx5";
nodes.machine = { pkgs, ... }:
{
pkgs, ...
}:
# copy_from_host works only for store paths
rec {
name = "fcitx";
meta.broken = true; # takes hours to time out since October 2021
nodes.machine =
{
pkgs,
...
}:
{
imports = [
../common/user-account.nix
];
imports = [
../common/user-account.nix
];
environment.systemPackages = [
# To avoid clashing with xfce4-terminal
pkgs.alacritty
];
environment.systemPackages = [
# To avoid clashing with xfce4-terminal
pkgs.alacritty
];
services.xserver = {
enable = true;
displayManager = {
lightdm.enable = true;
autoLogin = {
enable = true;
user = "alice";
};
};
services.xserver =
{
enable = true;
desktopManager.xfce.enable = true;
};
displayManager = {
lightdm.enable = true;
autoLogin = {
enable = true;
user = "alice";
};
};
i18n.inputMethod = {
enabled = "fcitx5";
fcitx5.addons = [
pkgs.fcitx5-m17n
pkgs.fcitx5-chinese-addons
];
};
};
desktopManager.xfce.enable = true;
};
i18n = {
inputMethod = {
enabled = "fcitx";
fcitx.engines = [
pkgs.fcitx-engines.m17n
pkgs.fcitx-engines.table-extra
];
};
};
}
;
testScript = { nodes, ... }:
let
user = nodes.machine.config.users.users.alice;
userName = user.name;
userHome = user.home;
xauth = "${userHome}/.Xauthority";
fcitx_confdir = "${userHome}/.config/fcitx";
in
''
testScript = { nodes, ... }:
let
user = nodes.machine.users.users.alice;
xauth = "${user.home}/.Xauthority";
fcitx_confdir = "${user.home}/.config/fcitx5";
in
''
# We need config files before login session
# So copy first thing
@ -75,13 +59,13 @@ import ../make-test-python.nix (
start_all()
machine.wait_for_file("${xauth}")
machine.wait_for_file("${xauth}}")
machine.succeed("xauth merge ${xauth}")
machine.sleep(5)
machine.succeed("su - ${userName} -c 'alacritty&'")
machine.succeed("su - ${userName} -c 'fcitx&'")
machine.succeed("su - ${user.name} -c 'alacritty&'")
machine.succeed("su - ${user.name} -c 'fcitx5&'")
machine.sleep(10)
### Type on terminal
@ -109,8 +93,10 @@ import ../make-test-python.nix (
machine.send_key("ctrl-spc")
machine.sleep(1)
### Default zhengma, enter 一下
machine.send_chars("a2")
### Default wubi, enter 一下
machine.send_chars("gggh")
machine.sleep(1)
machine.send_key("\n")
machine.sleep(1)
### Switch to Harvard Kyoto
@ -134,9 +120,8 @@ import ../make-test-python.nix (
machine.screenshot("terminal_chars")
### Verify that file contents are as expected
file_content = machine.succeed("cat ${userHome}/fcitx_test.out")
file_content = machine.succeed("cat ${user.home}/fcitx_test.out")
assert file_content == "\n"
''
;
}
)
;
})

View file

@ -0,0 +1,15 @@
[Groups/0]
Name=NixOS_test
Default Layout=us
DefaultIM=wbx
[Groups/0/Items/0]
Name=wbx
Layout=us
[Groups/0/Items/1]
Name=m17n_sa_harvard-kyoto
Layout=us
[GroupOrder]
0=NixOS_test

View file

@ -0,0 +1,97 @@
{ system ? builtins.currentSystem
, config ? {}
, pkgs ? import ../.. {inherit system config; }
, systemdStage1 ? false }:
import ./make-test-python.nix ({ lib, pkgs, ... }: let
keyfile = pkgs.writeText "luks-keyfile" ''
MIGHAoGBAJ4rGTSo/ldyjQypd0kuS7k2OSsmQYzMH6TNj3nQ/vIUjDn7fqa3slt2
gV6EK3TmTbGc4tzC1v4SWx2m+2Bjdtn4Fs4wiBwn1lbRdC6i5ZYCqasTWIntWn+6
FllUkMD5oqjOR/YcboxG8Z3B5sJuvTP9llsF+gnuveWih9dpbBr7AgEC
'';
in {
name = "initrd-luks-empty-passphrase";
nodes.machine = { pkgs, ... }: {
virtualisation = {
emptyDiskImages = [ 512 ];
useBootLoader = true;
useEFIBoot = true;
};
boot.loader.systemd-boot.enable = true;
boot.initrd.systemd = lib.mkIf systemdStage1 {
enable = true;
emergencyAccess = true;
};
environment.systemPackages = with pkgs; [ cryptsetup ];
specialisation.boot-luks-wrong-keyfile.configuration = {
boot.initrd.luks.devices = lib.mkVMOverride {
cryptroot = {
device = "/dev/vdc";
keyFile = "/etc/cryptroot.key";
tryEmptyPassphrase = true;
fallbackToPassword = !systemdStage1;
};
};
virtualisation.bootDevice = "/dev/mapper/cryptroot";
boot.initrd.secrets."/etc/cryptroot.key" = keyfile;
};
specialisation.boot-luks-missing-keyfile.configuration = {
boot.initrd.luks.devices = lib.mkVMOverride {
cryptroot = {
device = "/dev/vdc";
keyFile = "/etc/cryptroot.key";
tryEmptyPassphrase = true;
fallbackToPassword = !systemdStage1;
};
};
virtualisation.bootDevice = "/dev/mapper/cryptroot";
};
};
testScript = ''
# Encrypt key with empty key so boot should try keyfile and then fallback to empty passphrase
def grub_select_boot_luks_wrong_key_file():
"""
Selects "boot-luks" from the GRUB menu
to trigger a login request.
"""
machine.send_monitor_command("sendkey down")
machine.send_monitor_command("sendkey down")
machine.send_monitor_command("sendkey ret")
def grub_select_boot_luks_missing_key_file():
"""
Selects "boot-luks" from the GRUB menu
to trigger a login request.
"""
machine.send_monitor_command("sendkey down")
machine.send_monitor_command("sendkey ret")
# Create encrypted volume
machine.wait_for_unit("multi-user.target")
machine.succeed("echo "" | cryptsetup luksFormat /dev/vdc --batch-mode")
machine.succeed("bootctl set-default nixos-generation-1-specialisation-boot-luks-wrong-keyfile.conf")
machine.succeed("sync")
machine.crash()
# Check if rootfs is on /dev/mapper/cryptroot
machine.wait_for_unit("multi-user.target")
assert "/dev/mapper/cryptroot on / type ext4" in machine.succeed("mount")
# Choose boot-luks-missing-keyfile specialisation
machine.succeed("bootctl set-default nixos-generation-1-specialisation-boot-luks-missing-keyfile.conf")
machine.succeed("sync")
machine.crash()
# Check if rootfs is on /dev/mapper/cryptroot
machine.wait_for_unit("multi-user.target")
assert "/dev/mapper/cryptroot on / type ext4" in machine.succeed("mount")
'';
})

View file

@ -50,6 +50,13 @@ in
mutableConfig = false;
extraConfig.SupportSettings.HelpLink = "https://search.nixos.org";
};
environmentFile = makeMattermost {
mutableConfig = false;
extraConfig.SupportSettings.AboutLink = "https://example.org";
environmentFile = pkgs.writeText "mattermost-env" ''
MM_SUPPORTSETTINGS_ABOUTLINK=https://nixos.org
'';
};
};
testScript = let
@ -69,6 +76,7 @@ in
rm -f $mattermostConfig
echo "$newConfig" > "$mattermostConfig"
'';
in
''
start_all()
@ -120,5 +128,13 @@ in
# Our edits should be ignored on restart
immutable.succeed("${expectConfig ''.AboutLink == "https://nixos.org" and .HelpLink == "https://search.nixos.org"''}")
## Environment File node tests ##
environmentFile.wait_for_unit("mattermost.service")
environmentFile.wait_for_open_port(8065)
# Settings in the environment file should override settings set otherwise
environmentFile.succeed("${expectConfig ''.AboutLink == "https://nixos.org"''}")
'';
})

View file

@ -8,7 +8,7 @@ expected_lines = {
"auth sufficient pam_rootok.so",
"auth sufficient pam_unix.so likeauth try_first_pass",
"password sufficient @@pam_krb5@@/lib/security/pam_krb5.so use_first_pass",
"password sufficient pam_unix.so nullok sha512",
"password sufficient pam_unix.so nullok yescrypt",
"session optional @@pam_krb5@@/lib/security/pam_krb5.so",
"session required pam_env.so conffile=/etc/pam/environment readenv=0",
"session required pam_unix.so",

View file

@ -12,6 +12,7 @@ let
then pkgs.zfsUnstable.latestCompatibleLinuxPackages
else pkgs.linuxPackages
, enableUnstable ? false
, enableSystemdStage1 ? false
, extraTest ? ""
}:
makeTest {
@ -36,6 +37,7 @@ let
boot.kernelPackages = kernelPackage;
boot.supportedFilesystems = [ "zfs" ];
boot.zfs.enableUnstable = enableUnstable;
boot.initrd.systemd.enable = enableSystemdStage1;
environment.systemPackages = [ pkgs.parted ];
@ -176,6 +178,11 @@ in {
enableUnstable = true;
};
unstableWithSystemdStage1 = makeZfsTest "unstable" {
enableUnstable = true;
enableSystemdStage1 = true;
};
installer = (import ./installer.nix { }).zfsroot;
expand-partitions = makeTest {

View file

@ -26,13 +26,13 @@ assert withQt -> wrapQtAppsHook != null;
stdenv.mkDerivation rec {
pname = "carla";
version = "2.5.3";
version = "2.5.4";
src = fetchFromGitHub {
owner = "falkTX";
repo = pname;
rev = "v${version}";
hash = "sha256-J0C3GLdlLMkm3LHl6l3OI2rA73A6z5MMcNJ1I1T0pbI=";
hash = "sha256-St0+avF9/UzQj8T1eZq5HSmxnaK9+BXSuufyX0NJYbU=";
};
nativeBuildInputs = [

View file

@ -4,11 +4,11 @@
stdenv.mkDerivation rec {
pname = "ebumeter";
version = "0.4.2";
version = "0.5.1";
src = fetchurl {
url = "https://kokkinizita.linuxaudio.org/linuxaudio/downloads/${pname}-${version}.tar.bz2";
sha256 = "1wm9j1phmpicrp7jdsvdbc3mghdd92l61yl9qbps0brq2ljjyd5s";
url = "https://kokkinizita.linuxaudio.org/linuxaudio/downloads/${pname}-${version}.tar.xz";
hash = "sha256-U2ZpNfvy+X1RdA9Q4gvFYzAxlgc6kYjJpQ/0sEX0A4I=";
};
buildInputs = [

File diff suppressed because it is too large Load diff

View file

@ -42,10 +42,11 @@ stdenv.mkDerivation rec {
})
];
cargoDeps = rustPlatform.fetchCargoTarball {
inherit src;
name = "${pname}-${version}";
sha256 = "0y34b5rnr75h7dxbx93mafrmwsh187wq5js7fmkb1m1yyybj1v1x";
cargoDeps = rustPlatform.importCargoLock {
lockFile = ./Cargo.lock;
outputHashes = {
"gettext-rs-0.4.2" = "sha256-wyZ1bf0oFcQo8gEi2GEalRUoKMoJYHysu79qcfjd4Ng=";
};
};
nativeBuildInputs = [

View file

@ -1,4 +1,4 @@
{ mkDerivation, lib, fetchFromGitHub, pkg-config, sconsPackages, qtbase, lash, libjack2, jack ? libjack2, alsa-lib
{ mkDerivation, lib, fetchFromGitHub, pkg-config, scons, qtbase, lash, libjack2, jack ? libjack2, alsa-lib
, fetchpatch
}:
@ -22,7 +22,7 @@ mkDerivation rec {
})
];
nativeBuildInputs = [ sconsPackages.scons_latest pkg-config ];
nativeBuildInputs = [ scons pkg-config ];
buildInputs = [
qtbase
lash

View file

@ -2,7 +2,7 @@
, stdenv
, fetchFromGitHub
, pkg-config
, sconsPackages
, scons
, rubberband
, boost
, libjack2
@ -25,7 +25,7 @@ stdenv.mkDerivation rec {
nativeBuildInputs = [
pkg-config
rubberband
sconsPackages.scons_latest
scons
];
buildInputs = [ libsamplerate libsndfile liblo libjack2 boost ];
prefixKey = "PREFIX=";

View file

@ -8,6 +8,7 @@
, alsa-lib
, libpulseaudio
, fftw
, fftwFloat
, json_c
, libjack2
, jackSupport ? true
@ -33,6 +34,7 @@ stdenv.mkDerivation rec {
alsa-lib
libpulseaudio
fftw
fftwFloat
json_c
] ++ lib.optional jackSupport libjack2;

View file

@ -5,11 +5,11 @@
stdenv.mkDerivation rec {
pname = "lsp-plugins";
version = "1.2.5";
version = "1.2.6";
src = fetchurl {
url = "https://github.com/sadko4u/${pname}/releases/download/${version}/${pname}-src-${version}.tar.gz";
sha256 = "sha256-YYrt+FbpY7iEui0aw4Ce94BW1SHDk0OH8gFSzkW2fkw=";
sha256 = "sha256-lNrIsXW3ZNKMFwsl5qowWqK/ZaCaQUAlrSscnsOxvVg=";
};
outputs = [ "out" "dev" "doc" ];
@ -19,6 +19,8 @@ stdenv.mkDerivation rec {
makeFlags = [
"PREFIX=${placeholder "out"}"
"ETCDIR=${placeholder "out"}/etc"
"SHAREDDIR=${placeholder "out"}/share"
];
env.NIX_CFLAGS_COMPILE = "-DLSP_NO_EXPERIMENTAL";
@ -34,138 +36,58 @@ stdenv.mkDerivation rec {
meta = with lib;
{ description = "Collection of open-source audio plugins";
longDescription = ''
Compatible with follwing formats:
Compatible with the following formats:
- CLAP - set of plugins for Clever Audio Plugins API
- LADSPA - set of plugins for Linux Audio Developer's Simple Plugin API
- LV2 - set of plugins and UIs for Linux Audio Developer's Simple Plugin API (LADSPA) version 2
- LinuxVST - set of plugins and UIs for Steinberg's VST 2.4 format ported on GNU/Linux Platform
- JACK - Standalone versions for JACK Audio connection Kit with UI
Contains the following plugins:
Contains the following plugins (https://lsp-plug.in/?page=plugins)
- Limiter Mono - Begrenzer Mono
- Limiter Stereo - Begrenzer Stereo
- Dynamic Processor LeftRight - Dynamikprozessor LeftRight
- Dynamic Processor MidSide - Dynamikprozessor MidSide
- Dynamic Processor Mono - Dynamikprozessor Mono
- Dynamic Processor Stereo - Dynamikprozessor Stereo
- Expander LeftRight - Expander LeftRight
- Expander MidSide - Expander MidSide
- Expander Mono - Expander Mono
- Expander Stereo - Expander Stereo
- Crossover LeftRight x8 - Frequenzweiche LeftRight x8
- Crossover MidSide x8 - Frequenzweiche MidSide x8
- Crossover Mono x8 - Frequenzweiche Mono x8
- Crossover Stereo x8 - Frequenzweiche Stereo x8
- Gate LeftRight - Gate LeftRight
- Gate MidSide - Gate MidSide
- Gate Mono - Gate Mono
- Gate Stereo - Gate Stereo
- Graphic Equalizer x16 LeftRight - Grafischer Entzerrer x16 LeftRight
- Graphic Equalizer x16 MidSide - Grafischer Entzerrer x16 MidSide
- Graphic Equalizer x16 Mono - Grafischer Entzerrer x16 Mono
- Graphic Equalizer x16 Stereo - Grafischer Entzerrer x16 Stereo
- Graphic Equalizer x32 LeftRight - Grafischer Entzerrer x32 LeftRight
- Graphic Equalizer x32 MidSide - Grafischer Entzerrer x32 MidSide
- Graphic Equalizer x32 Mono - Grafischer Entzerrer x32 Mono
- Graphic Equalizer x32 Stereo - Grafischer Entzerrer x32 Stereo
- Impulse Responses Mono - Impulsantworten Mono
- Impulse Responses Stereo - Impulsantworten Stereo
- Impulse Reverb Mono - Impulsnachhall Mono
- Impulse Reverb Stereo - Impulsnachhall Stereo
- Sampler Mono - Klangerzeuger Mono
- Sampler Stereo - Klangerzeuger Stereo
- Compressor LeftRight - Kompressor LeftRight
- Compressor MidSide - Kompressor MidSide
- Compressor Mono - Kompressor Mono
- Compressor Stereo - Kompressor Stereo
- Artistic Delay Mono - Künstlerische Verzögerung
- Artistic Delay Stereo - Künstlerische Verzögerung
- Latency Meter - Latenzmessgerät
- Loudness Compensator Mono - Lautstärke Kompensator Mono
- Loudness Compensator Stereo - Lautstärke Kompensator Stereo
- Multiband Expander LeftRight x8 - Multi-band Expander LeftRight x8
- Multiband Expander MidSide x8 - Multi-band Expander MidSide x8
- Multiband Expander Mono x8 - Multi-band Expander Mono x8
- Multiband Expander Stereo x8 - Multi-band Expander Stereo x8
- Multiband Gate LeftRight x8 - Multi-band Gate LeftRight x8
- Multiband Gate MidSide x8 - Multi-band Gate MidSide x8
- Multiband Gate Mono x8 - Multi-band Gate Mono x8
- Multiband Gate Stereo x8 - Multi-band Gate Stereo x8
- Multiband Compressor LeftRight x8 - Multi-band Kompressor LeftRight x8
- Multiband Compressor MidSide x8 - Multi-band Kompressor MidSide x8
- Multiband Compressor Mono x8 - Multi-band Kompressor Mono x8
- Multiband Compressor Stereo x8 - Multi-band Kompressor Stereo x8
- Oscilloscope x1 - Oscilloscope x1
- Oscilloscope x2 - Oscilloscope x2
- Oscilloscope x4 - Oscilloscope x4
- Oscillator Mono - Oszillator Mono
- Parametric Equalizer x16 LeftRight - Parametrischer Entzerrer x16 LeftRight
- Parametric Equalizer x16 MidSide - Parametrischer Entzerrer x16 MidSide
- Parametric Equalizer x16 Mono - Parametrischer Entzerrer x16 Mono
- Parametric Equalizer x16 Stereo - Parametrischer Entzerrer x16 Stereo
- Parametric Equalizer x32 LeftRight - Parametrischer Entzerrer x32 LeftRight
- Parametric Equalizer x32 MidSide - Parametrischer Entzerrer x32 MidSide
- Parametric Equalizer x32 Mono - Parametrischer Entzerrer x32 Mono
- Parametric Equalizer x32 Stereo - Parametrischer Entzerrer x32 Stereo
- Phase Detector - Phasendetektor
- Profiler Mono - Profiler Mono
- Profiler Stereo - Profiler Stereo
- Room Builder Mono - Raumbaumeister Mono
- Room Builder Stereo - Raumbaumeister Stereo
- Multi-Sampler x12 DirectOut - Schlagzeug x12 Direktausgabe
- Multi-Sampler x12 Stereo - Schlagzeug x12 Stereo
- Multi-Sampler x24 DirectOut - Schlagzeug x24 Direktausgabe
- Multi-Sampler x24 Stereo - Schlagzeug x24 Stereo
- Multi-Sampler x48 DirectOut - Schlagzeug x48 Direktausgabe
- Multi-Sampler x48 Stereo - Schlagzeug x48 Stereo
- Sidechain Multiband Expander LeftRight x8 - Sidechain Multi-band Expander LeftRight x8
- Sidechain Multiband Expander MidSide x8 - Sidechain Multi-band Expander MidSide x8
- Sidechain Multiband Expander Mono x8 - Sidechain Multi-band Expander Mono x8
- Sidechain Multiband Expander Stereo x8 - Sidechain Multi-band Expander Stereo x8
- Sidechain Multiband Gate LeftRight x8 - Sidechain Multi-band Gate LeftRight x8
- Sidechain Multiband Gate MidSide x8 - Sidechain Multi-band Gate MidSide x8
- Sidechain Multiband Gate Mono x8 - Sidechain Multi-band Gate Mono x8
- Sidechain Multiband Gate Stereo x8 - Sidechain Multi-band Gate Stereo x8
- Sidechain Multiband Compressor LeftRight x8 - Sidechain Multi-band Kompressor LeftRight x8
- Sidechain Multiband Compressor MidSide x8 - Sidechain Multi-band Kompressor MidSide x8
- Sidechain Multiband Compressor Mono x8 - Sidechain Multi-band Kompressor Mono x8
- Sidechain Multiband Compressor Stereo x8 - Sidechain Multi-band Kompressor Stereo x8
- Sidechain Limiter Mono - Sidechain-Begrenzer Mono
- Sidechain Limiter Stereo - Sidechain-Begrenzer Stereo
- Sidechain Dynamic Processor LeftRight - Sidechain-Dynamikprozessor LeftRight
- Sidechain Dynamic Processor MidSide - Sidechain-Dynamikprozessor MidSide
- Sidechain Dynamic Processor Mono - Sidechain-Dynamikprozessor Mono
- Sidechain Dynamic Processor Stereo - Sidechain-Dynamikprozessor Stereo
- Sidechain Expander LeftRight - Sidechain-Expander LeftRight
- Sidechain Expander MidSide - Sidechain-Expander MidSide
- Sidechain Expander Mono - Sidechain-Expander Mono
- Sidechain Expander Stereo - Sidechain-Expander Stereo
- Sidechain Gate LeftRight - Sidechain-Gate LeftRight
- Sidechain Gate MidSide - Sidechain-Gate MidSide
- Sidechain Gate Mono - Sidechain-Gate Mono
- Sidechain Gate Stereo - Sidechain-Gate Stereo
- Sidechain Compressor LeftRight - Sidechain-Kompressor LeftRight
- Sidechain Compressor MidSide - Sidechain-Kompressor MidSide
- Sidechain Compressor Mono - Sidechain-Kompressor Mono
- Sidechain Compressor Stereo - Sidechain-Kompressor Stereo
- Slapback Delay Mono - Slapback-Delay Mono
- Slapback Delay Stereo - Slapback-Delay Stereo
- Spectrum Analyzer x1 - Spektrumanalysator x1
- Spectrum Analyzer x12 - Spektrumanalysator x12
- Spectrum Analyzer x16 - Spektrumanalysator x16
- Spectrum Analyzer x2 - Spektrumanalysator x2
- Spectrum Analyzer x4 - Spektrumanalysator x4
- Spectrum Analyzer x8 - Spektrumanalysator x8
- Surge Filter Mono - Sprungfilter Mono
- Surge Filter Stereo - Sprungfilter Stereo
- Trigger MIDI Mono - Triggersensor MIDI Mono
- Trigger MIDI Stereo - Triggersensor MIDI Stereo
- Trigger Mono - Triggersensor Mono
- Trigger Stereo - Triggersensor Stereo
- Delay Compensator Mono - Verzögerungsausgleicher Mono
- Delay Compensator Stereo - Verzögerungsausgleicher Stereo
- Delay Compensator x2 Stereo - Verzögerungsausgleicher x2 Stereo
Equalizers:
- Graphic Equalizer
- Parametric Equalizer
Dynamic Processing:
- Compressor
- Dynamic Processor
- Expander
- Gate
- Limiter
Multiband Dynamic Processing:
- Multiband Compressor
- Multiband Dynamic Processor
- Multiband Expander
- Multiband Gate
Convolution / Reverb processing:
- Impulse Responses
- Impulse Reverb
- Room Builder
Delay Effects:
- Artistic Delay
- Compensation Delay
- Slap-back Delay
Analyzers:
- Oscilloscope
- Phase Detector
- Spectrum Analyzer
Multiband Processing:
- Crossover
Samplers:
- Multisampler
- Sampler
Generators / Oscillators:
- Noise Generator
- Oscillator
Utilitary Plugins:
- A/B Test Plugin
- Latency Meter
- Loudness Compensator
- Mixer
- Profiler
- Surge Filter
- Trigger
'';
homepage = "https://lsp-plug.in";
maintainers = with maintainers; [ magnetophon ];

View file

@ -35,6 +35,6 @@ rustPlatform.buildRustPackage rec {
homepage = "https://github.com/mrene/minidsp-rs";
license = licenses.asl20;
platforms = platforms.linux ++ platforms.darwin;
maintainers = [maintainers.adamcstephens];
maintainers = [maintainers.adamcstephens maintainers.mrene];
};
}

View file

@ -15,14 +15,14 @@
stdenv.mkDerivation rec {
pname = "mmlgui";
version = "unstable-2022-10-13";
version = "unstable-2023-03-19";
src = fetchFromGitHub {
owner = "superctr";
repo = "mmlgui";
rev = "6b2687504644b481db403f032f463e38bbbb1dca";
rev = "59ac28c0008e227c03799cce85b77f96241159b1";
fetchSubmodules = true;
sha256 = "OkYVjdvi8ls01DaIiDEPFXFCUh6g9AEeSlbDFfdPKeo=";
sha256 = "0CHRUizhg/WOWhDOsFqRiGu/m/U7xt5du8Uvnl7kxpU=";
};
postPatch = ''

View file

@ -15,7 +15,7 @@
, musepackSupport ? true, libmpc, libmpcdec, taglib
, vorbisSupport ? true, libvorbis
, speexSupport ? true, speex
, ffmpegSupport ? true, ffmpeg
, ffmpegSupport ? true, ffmpeg_4
, sndfileSupport ? true, libsndfile
, wavpackSupport ? true, wavpack
# Misc
@ -56,7 +56,7 @@ stdenv.mkDerivation rec {
++ lib.optionals musepackSupport [ libmpc libmpcdec taglib ]
++ lib.optional vorbisSupport libvorbis
++ lib.optional speexSupport speex
++ lib.optional ffmpegSupport ffmpeg
++ lib.optional ffmpegSupport ffmpeg_4
++ lib.optional sndfileSupport libsndfile
++ lib.optional wavpackSupport wavpack
# Misc

View file

@ -32,13 +32,13 @@
stdenv.mkDerivation rec {
pname = "musikcube";
version = "0.99.5";
version = "0.99.6";
src = fetchFromGitHub {
owner = "clangen";
repo = pname;
rev = version;
sha256 = "sha256-SbWL36GRIJPSvxZyj6sebJxTkSPsUcsKyC3TmcIq2O0";
sha256 = "sha256-D25P254iaOsS0TyAKAiarDP37D4U9Dw7mdvUin/Qblc=";
};
outputs = [ "out" "dev" ];

View file

@ -1,4 +1,4 @@
{ lib, stdenv, fetchFromGitHub, cmake, eigen, ffmpeg }:
{ lib, stdenv, fetchFromGitHub, cmake, eigen, ffmpeg_4 }:
stdenv.mkDerivation {
pname = "musly";
version = "unstable-2017-04-26";
@ -9,7 +9,7 @@ stdenv.mkDerivation {
sha256 = "1q42wvdwy2pac7bhfraqqj2czw7w2m33ms3ifjl8phm7d87i8825";
};
nativeBuildInputs = [ cmake ];
buildInputs = [ eigen ffmpeg ];
buildInputs = [ eigen ffmpeg_4 ];
fixupPhase = lib.optionalString stdenv.isDarwin ''
install_name_tool -change libmusly.dylib $out/lib/libmusly.dylib $out/bin/musly
install_name_tool -change libmusly_resample.dylib $out/lib/libmusly_resample.dylib $out/bin/musly

File diff suppressed because it is too large Load diff

View file

@ -30,9 +30,11 @@ stdenv.mkDerivation rec {
hash = "sha256-A3mvf6TZ3+aiWA6rg9G5NMaDKvO0VQzwIM1t0MaTpTc=";
};
cargoDeps = rustPlatform.fetchCargoTarball {
inherit src;
hash = "sha256-Y7rZTbg0zd/eoo6E8TmV8JJPs1N0bLlBjvB6W07Kelg=";
cargoDeps = rustPlatform.importCargoLock {
lockFile = ./Cargo.lock;
outputHashes = {
"netease-cloud-music-api-1.0.2" = "sha256-7Yp2ZBg5wHnDPtdPLwZQnqcSlVuGCrXpV5M/dp/IaOE=";
};
};
nativeBuildInputs = [

View file

@ -2,21 +2,21 @@
buildGoModule rec {
pname = "NoiseTorch";
version = "0.12.0";
version = "0.12.2";
src = fetchFromGitHub {
owner = "noisetorch";
repo = "NoiseTorch";
rev = "v${version}";
sha256 = "sha256-A6cX1ck47/ZIn9cnV/Ow4CxVFfOX5J0K0Q+B70jCFdQ=";
fetchSubmodules = true;
sha256 = "sha256-gOPSMPH99Upi/30OnAdwSb7SaMV0i/uHB051cclfz6A=";
};
vendorHash = null;
doCheck = false;
ldflags = [ "-s" "-w" "-X main.version=${version}" "-X main.distribution=nix" ];
ldflags = [ "-s" "-w" "-X main.version=${version}" "-X main.distribution=nixpkgs" ];
subPackages = [ "." ];
@ -32,9 +32,6 @@ buildGoModule rec {
'';
meta = with lib; {
insecure = true;
knownVulnerabilities =
lib.optional (lib.versionOlder version "0.12") "https://github.com/noisetorch/NoiseTorch/releases/tag/v0.12.0";
description = "Virtual microphone device with noise supression for PulseAudio";
homepage = "https://github.com/noisetorch/NoiseTorch";
license = licenses.gpl3Plus;

View file

@ -1,4 +1,4 @@
{lib, stdenv, fetchurl, sconsPackages, boost, ladspaH, pkg-config }:
{lib, stdenv, fetchurl, scons, boost, ladspaH, pkg-config }:
stdenv.mkDerivation {
version = "0.2-2";
@ -9,7 +9,7 @@ stdenv.mkDerivation {
sha256 = "16064vvl2w5lz4xi3lyjk4xx7fphwsxc14ajykvndiz170q32s6i";
};
nativeBuildInputs = [ pkg-config sconsPackages.scons_latest ];
nativeBuildInputs = [ pkg-config scons ];
buildInputs = [ boost ladspaH ];
patchPhase = ''
@ -17,6 +17,7 @@ stdenv.mkDerivation {
sed -i -e '4d' SConstruct
sed -i 's@Options@Variables@g' SConstruct
sed -i "s@-fomit-frame-pointer -ffast-math -mfpmath=sse@-I ${boost.dev}/include@g" SConstruct
sed -i "s@env.has_key('cxx')@'cxx' in env@g" SConstruct
sed -i "s@ladspa.h@${ladspaH}/include/ladspa.h@g" filters.cpp
sed -i "s@LADSPA_HINT_SAMPLE_RATE, 0, 0.5@LADSPA_HINT_SAMPLE_RATE, 0.0001, 0.5@g" filters.cpp
sed -i "s/= check/= detail::filter_base<internal_type, checked>::check/" nova/source/dsp/filter.hpp

File diff suppressed because it is too large Load diff

View file

@ -25,7 +25,15 @@ rustPlatform.buildRustPackage rec {
hash = "sha256-ZKhHN0ruLb6ZVKkrKv/YawRsVop6SP1QF/nrtkmA8P8=";
};
cargoSha256 = "sha256-TDxoRWQAzrgPElEEDNYkk3XX2i+LnNLMuY/J3pb3Xlk=";
cargoLock = {
lockFile = ./Cargo.lock;
outputHashes = {
"cubeb-0.10.1" = "sha256-PRQL8dq5BAsodbVlm5SnuzUDLg9/UY3BmoumcmWF+aY=";
"druid-0.7.0" = "sha256-fnsm+KGsuePLRRjTecJ0GBQEySSeDIth13AX/aAigqU=";
"druid-enums-0.1.0" = "sha256-4fo0ywoK+m4OuqYlbNbJS2BZK/VBFqeAYEFNGnGUVmM=";
"piet-0.5.0" = "sha256-hCg8vABnLAO8egFwMtRSpRdzH6auETrICoUfuBZVzz8=";
};
};
# specify the subdirectory of the binary crate to build from the workspace
buildAndTestSubdir = "psst-gui";

View file

@ -16,6 +16,10 @@ mkDerivation rec {
buildInputs = [ alsa-lib fluidsynth libjack2 qtbase qttools qtx11extras ];
enableParallelBuilding = true;
# Missing install depends:
# lrelease error: Parse error at src/translations/qsynth_ru.ts:1503:33: Premature end of document.
# make: *** [Makefile:107: src/translations/qsynth_ru.qm] Error 1
enableParallelInstalling = false;
meta = with lib; {
description = "Fluidsynth GUI";

View file

@ -5,11 +5,11 @@
stdenv.mkDerivation rec {
pname = "snd";
version = "23.1";
version = "23.2";
src = fetchurl {
url = "mirror://sourceforge/snd/snd-${version}.tar.gz";
sha256 = "sha256-zNXA/HFUCTxKzeKde/XVK239dInnxkPBmxXrlicl1fI=";
sha256 = "sha256-MLBFK34RgpEoK2reA+Ik35pY5YuzetDU1Wz9yAPQhEc=";
};
nativeBuildInputs = [ pkg-config ];

View file

@ -1,4 +1,4 @@
{ lib, stdenv, fetchurl, alsa-lib, boost, bzip2, fftw, fftwFloat, libfishsound
{ lib, stdenv, fetchurl, fetchpatch2, alsa-lib, boost, bzip2, fftw, fftwFloat, libfishsound
, libid3tag, liblo, libmad, liboggz, libpulseaudio, libsamplerate
, libsndfile, lrdf, opusfile, portaudio, rubberband, serd, sord, capnproto
, wrapQtAppsHook, pkg-config
@ -14,6 +14,16 @@ stdenv.mkDerivation rec {
sha256 = "0k45k9fawcm4s5yy05x00pgww7j8m7k2cxcc7g0fn9vqy7vcbq9h";
};
patches = [
(fetchpatch2 {
url = "https://github.com/sonic-visualiser/svcore/commit/5a7b517e43b7f0b3f03b7fc3145102cf4e5b0ffc.patch";
stripLen = 1;
extraPrefix = "svcore/";
sha256 = "sha256-DOCdQqCihkR0g/6m90DbJxw00QTpyVmFzCxagrVWKiI=";
})
./match-vamp.patch
];
buildInputs =
[ alsa-lib boost bzip2 fftw fftwFloat libfishsound libid3tag liblo
libmad liboggz libpulseaudio libsamplerate libsndfile lrdf opusfile
@ -36,7 +46,5 @@ stdenv.mkDerivation rec {
license = licenses.gpl2Plus;
maintainers = [ maintainers.vandenoever ];
platforms = platforms.linux;
# undefined reference to `std::__throw_bad_array_new_length()@GLIBCXX_3.4.29'
broken = true; # at 2022-09-30
};
}

View file

@ -0,0 +1,11 @@
--- a/match/src/FullDTW.h
+++ b/match/src/FullDTW.h
@@ -83,7 +83,7 @@
* against the best-matching subsequence of s1; otherwise it is
* against the whole of s1.
*/
- std::vector<size_t> align(const featureseq_t &s1,
+ std::vector<std::size_t> align(const featureseq_t &s1,
const featureseq_t &s2);
private:

View file

@ -5,10 +5,10 @@
let
pname = "sonixd";
version = "0.15.4";
version = "0.15.5";
src = fetchurl {
url = "https://github.com/jeffvli/sonixd/releases/download/v${version}/Sonixd-${version}-linux-x86_64.AppImage";
sha256 = "sha256-n4n16S8ktPiVc0iyjVNNIyo9oEIBwGIuzj0xgm/ETeo=";
sha256 = "sha256-j8B+o/CJ5SsZPMNbugyP3T9Kb+xuxlVxH02loxlwwDg=";
};
appimageContents = appimageTools.extractType2 { inherit pname version src; };
in

View file

@ -1,4 +1,4 @@
{ lib, stdenv, fetchFromGitHub, autoreconfHook, intltool, pkg-config, ffmpeg, wxGTK32, gtk3, wrapGAppsHook }:
{ lib, stdenv, fetchFromGitHub, autoreconfHook, intltool, pkg-config, ffmpeg_4, wxGTK32, gtk3, wrapGAppsHook }:
stdenv.mkDerivation rec {
pname = "spek";
@ -13,7 +13,7 @@ stdenv.mkDerivation rec {
nativeBuildInputs = [ autoreconfHook intltool pkg-config wrapGAppsHook ];
buildInputs = [ ffmpeg wxGTK32 gtk3 ];
buildInputs = [ ffmpeg_4 wxGTK32 gtk3 ];
meta = with lib; {
description = "Analyse your audio files by showing their spectrogram";

View file

@ -25,13 +25,13 @@
stdenv.mkDerivation rec {
pname = "tauon";
version = "7.6.0";
version = "7.6.2";
src = fetchFromGitHub {
owner = "Taiko2k";
repo = "TauonMusicBox";
rev = "v${version}";
hash = "sha256-oQ3mcDrWWIT/2fu4MBw+0GjxWGFw1aLSTvmaKSDsdz4=";
hash = "sha256-x/tHCDplC45XEaBaf0aQ0w8AS1SorXtYilJoiOcBDtM=";
};
postUnpack = ''

View file

@ -30,7 +30,7 @@
, expat
, libid3tag
, libopus
, ffmpeg
, ffmpeg_4
, soundtouch
, pcre
, portaudio
@ -110,7 +110,7 @@ stdenv.mkDerivation rec {
buildInputs = [
alsa-lib
expat
ffmpeg
ffmpeg_4
file
flac
glib

View file

@ -5,11 +5,11 @@
mkDerivation rec {
pname = "vmpk";
version = "0.8.7";
version = "0.8.8";
src = fetchurl {
url = "mirror://sourceforge/${pname}/${version}/${pname}-${version}.tar.bz2";
sha256 = "sha256-0y1XS+I3bmNrJ65LT0LyTd8aSLXVlVZFFDZwgxVDLGk=";
sha256 = "sha256-+NjTcszb1KXGynIcCf4IEDvN4f8pgXtR1TksxGR5ZHQ=";
};
nativeBuildInputs = [ cmake pkg-config qttools docbook-xsl-nons ];

View file

@ -58,7 +58,7 @@ stdenv.mkDerivation rec {
homepage = "https://www.bitcoinunlimited.info/";
maintainers = with maintainers; [ DmitryTsygankov ];
license = licenses.mit;
broken = stdenv.isDarwin;
broken = true;
platforms = platforms.unix;
};
}

View file

@ -54,5 +54,6 @@ stdenv.mkDerivation rec {
license = licenses.mit;
maintainers = with maintainers; [ edwtjo offline ];
platforms = platforms.unix;
broken = true;
};
}

Some files were not shown because too many files have changed in this diff Show more