Compare commits
No commits in common. "f0381076a4c7dec6a85787855cde06593ab67d9b" and "18cc5d7cd9045f683a7f217ebf0286432334809b" have entirely different histories.
f0381076a4
...
18cc5d7cd9
11608 changed files with 40851 additions and 30856 deletions
3
third_party/nixpkgs/.git-blame-ignore-revs
vendored
3
third_party/nixpkgs/.git-blame-ignore-revs
vendored
|
@ -203,6 +203,3 @@ ce21e97a1f20dee15da85c084f9d1148d84f853b
|
|||
|
||||
# sqlc: format with nixfmt
|
||||
2bdec131b2bb2c8563f4556d741d34ccb77409e2
|
||||
|
||||
# treewide: migrate packages to pkgs/by-name, take 1
|
||||
571c71e6f73af34a229414f51585738894211408
|
||||
|
|
|
@ -1,27 +0,0 @@
|
|||
---
|
||||
name: Module requests
|
||||
about: For NixOS modules that you would like to see
|
||||
title: 'Module request: MODULENAME'
|
||||
labels: '9.needs: module (new)'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
### Description
|
||||
|
||||
<!-- Describe what the module should accomplish: -->
|
||||
|
||||
### Notify maintainers
|
||||
|
||||
<!-- If applicable, tag the maintainers of the package that corresponds to the module. If the search.nixos.org result shows no maintainers, tag the person that last updated the package. -->
|
||||
|
||||
-----
|
||||
|
||||
Note for maintainers: Please tag this issue in your PR.
|
||||
|
||||
---
|
||||
|
||||
Add a :+1: [reaction] to [issues you find important].
|
||||
|
||||
[reaction]: https://github.blog/2016-03-10-add-reactions-to-pull-requests-issues-and-comments/
|
||||
[issues you find important]: https://github.com/NixOS/nixpkgs/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc
|
24
third_party/nixpkgs/ci/OWNERS
vendored
24
third_party/nixpkgs/ci/OWNERS
vendored
|
@ -206,12 +206,6 @@ pkgs/data/misc/cacert/ @ajs124 @lukegb @mweinelt
|
|||
pkgs/development/libraries/nss/ @ajs124 @lukegb @mweinelt
|
||||
pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
|
||||
|
||||
# Java
|
||||
/doc/languages-frameworks/java.section.md @NixOS/java
|
||||
/doc/languages-frameworks/gradle.section.md @NixOS/java
|
||||
/doc/languages-frameworks/maven.section.md @NixOS/java
|
||||
/pkgs/top-level/java-packages.nix @NixOS/java
|
||||
|
||||
# Jetbrains
|
||||
/pkgs/applications/editors/jetbrains @edwtjo
|
||||
|
||||
|
@ -235,7 +229,7 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
|
|||
/pkgs/servers/sql/postgresql @NixOS/postgres
|
||||
/nixos/modules/services/databases/postgresql.md @NixOS/postgres
|
||||
/nixos/modules/services/databases/postgresql.nix @NixOS/postgres
|
||||
/nixos/tests/postgresql @NixOS/postgres
|
||||
/nixos/tests/postgresql.nix @NixOS/postgres
|
||||
|
||||
# Hardened profile & related modules
|
||||
/nixos/modules/profiles/hardened.nix @joachifm
|
||||
|
@ -254,13 +248,13 @@ pkgs/development/python-modules/buildcatrust/ @ajs124 @lukegb @mweinelt
|
|||
|
||||
# Network Time Daemons
|
||||
/pkgs/by-name/ch/chrony @thoughtpolice
|
||||
/pkgs/by-name/nt/ntp @thoughtpolice
|
||||
/pkgs/by-name/op/openntpd @thoughtpolice
|
||||
/pkgs/tools/networking/ntp @thoughtpolice
|
||||
/pkgs/tools/networking/openntpd @thoughtpolice
|
||||
/nixos/modules/services/networking/ntp @thoughtpolice
|
||||
|
||||
# Network
|
||||
/pkgs/by-name/ke/kea @mweinelt
|
||||
/pkgs/by-name/ba/babeld @mweinelt
|
||||
/pkgs/tools/networking/kea/default.nix @mweinelt
|
||||
/pkgs/tools/networking/babeld/default.nix @mweinelt
|
||||
/nixos/modules/services/networking/babeld.nix @mweinelt
|
||||
/nixos/modules/services/networking/kea.nix @mweinelt
|
||||
/nixos/modules/services/networking/knot.nix @mweinelt
|
||||
|
@ -413,12 +407,12 @@ pkgs/by-name/lx/lxc* @adamcstephens
|
|||
|
||||
# GNU Tar & Zip
|
||||
/pkgs/tools/archivers/gnutar @RossComputerGuy
|
||||
/pkgs/by-name/zi/zip @RossComputerGuy
|
||||
/pkgs/tools/archivers/zip @RossComputerGuy
|
||||
|
||||
# SELinux
|
||||
/pkgs/by-name/ch/checkpolicy @RossComputerGuy
|
||||
/pkgs/by-name/li/libselinux @RossComputerGuy
|
||||
/pkgs/by-name/li/libsepol @RossComputerGuy
|
||||
/pkgs/os-specific/linux/checkpolicy @RossComputerGuy
|
||||
/pkgs/os-specific/linux/libselinux @RossComputerGuy
|
||||
/pkgs/os-specific/linux/libsepol @RossComputerGuy
|
||||
|
||||
# installShellFiles
|
||||
/pkgs/by-name/in/installShellFiles/* @Ericson2314
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
# `aws-c-common` {#aws-c-common}
|
||||
|
||||
This hook exposes its own [CMake](#cmake) modules by setting [`CMAKE_MODULE_PATH`](https://cmake.org/cmake/help/latest/variable/CMAKE_MODULE_PATH.html) through [the `cmakeFlags` variable](#cmake-flags)
|
||||
to the nonstandard `$out/lib/cmake` directory, as a workaround for [an upstream bug](https://github.com/awslabs/aws-c-common/issues/844).
|
1
third_party/nixpkgs/doc/hooks/index.md
vendored
1
third_party/nixpkgs/doc/hooks/index.md
vendored
|
@ -8,7 +8,6 @@ The stdenv built-in hooks are documented in [](#ssec-setup-hooks).
|
|||
autoconf.section.md
|
||||
automake.section.md
|
||||
autopatchelf.section.md
|
||||
aws-c-common.section.md
|
||||
bmake.section.md
|
||||
breakpoint.section.md
|
||||
cernlib.section.md
|
||||
|
|
|
@ -2,301 +2,242 @@
|
|||
|
||||
## Darwin (macOS) {#sec-darwin}
|
||||
|
||||
The Darwin `stdenv` differs from most other ones in Nixpkgs in a few key ways.
|
||||
These differences reflect the default assumptions for building software on that platform.
|
||||
In many cases, you can ignore these differences because the software you are packaging is already written with them in mind.
|
||||
When you do that, write your derivation as normal. You don’t have to include any Darwin-specific special cases.
|
||||
The easiest way to know whether your derivation requires special handling for Darwin is to write it as if it doesn’t and see if it works.
|
||||
If it does, you’re done; skip the rest of this.
|
||||
Some common issues when packaging software for Darwin:
|
||||
|
||||
- Darwin uses Clang by default instead of GCC. Packages that refer to `$CC` or `cc` should just work in most cases.
|
||||
Some packages may hardcode `gcc` or `g++`. You can usually fix that by setting `makeFlags = [ "CC=cc" "CXX=C++" ]`.
|
||||
If that does not work, you will have to patch the build scripts yourself to use the correct compiler for Darwin.
|
||||
- Darwin needs an SDK to build software.
|
||||
The SDK provides a default set of frameworks and libraries to build software, most of which are specific to Darwin.
|
||||
There are multiple versions of the SDK packages in Nixpkgs, but one is included by default in the `stdenv`.
|
||||
Usually, you don’t have to change or pick a different SDK. When in doubt, use the default.
|
||||
- The SDK used by your build can be found using the `DEVELOPER_DIR` environment variable.
|
||||
There are also versions of this variable available when cross-compiling depending on the SDK’s role.
|
||||
The `SDKROOT` variable is also set with the path to the SDK’s libraries and frameworks.
|
||||
`SDKROOT` is always a sub-folder of `DEVELOPER_DIR`.
|
||||
- Darwin includes a platform-specific tool called `xcrun` to help builds locate binaries they need.
|
||||
A version of `xcrun` is part of the `stdenv` on Darwin.
|
||||
If your package invokes `xcrun` via an absolute path (such as `/usr/bin/xcrun`), you will need to patch the build scripts to use `xcrun` instead.
|
||||
- The Darwin `stdenv` uses clang instead of gcc. When referring to the compiler `$CC` or `cc` will work in both cases. Some builds hardcode gcc/g++ in their build scripts, that can usually be fixed with using something like `makeFlags = [ "CC=cc" ];` or by patching the build scripts.
|
||||
|
||||
To reiterate: you usually don’t have to worry about this stuff.
|
||||
Start with writing your derivation as if everything is already set up for you (because in most cases it already is).
|
||||
If you run into issues or failures, continue reading below for how to deal with the most common issues you may encounter.
|
||||
|
||||
### Darwin Issue Troubleshooting {#sec-darwin-troubleshooting}
|
||||
|
||||
#### Package requires a non-default SDK or fails to build due to missing frameworks or symbols {#sec-darwin-troubleshooting-using-sdks}
|
||||
|
||||
In some cases, you may have to use a non-default SDK.
|
||||
This can happen when a package requires APIs that are not present in the default SDK.
|
||||
For example, Metal Performance Shaders were added in macOS 12.
|
||||
If the default SDK is 11.3, then a package that requires Metal Performance Shaders will fail to build due to missing frameworks and symbols.
|
||||
|
||||
To use a non-default SDK, add it to your derivation’s `buildInputs`.
|
||||
It is not necessary to override the SDK in the `stdenv` nor is it necessary to override the SDK used by your dependencies.
|
||||
If your derivation needs a non-default SDK at build time (e.g., for a `depsBuildBuild` compiler), see the cross-compilation documentation for which input you should use.
|
||||
|
||||
When determining whether to use a non-default SDK, consider the following:
|
||||
|
||||
- Try building your derivation with the default SDK. If it works, you’re done.
|
||||
- If the package specifies a specific version, use that. See below for how to map Xcode version to SDK version.
|
||||
- If the package’s documentation indicates it supports optional features on newer SDKs, consider using the SDK that enables those features.
|
||||
If you’re not sure, use the default SDK.
|
||||
|
||||
Note: It is possible to have multiple, different SDK versions in your inputs.
|
||||
When that happens, the one with the highest version is always used.
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
# ...
|
||||
buildInputs = [ apple-sdk_14 ];
|
||||
}
|
||||
```
|
||||
|
||||
#### What is a “deployment target” (or minimum version)? {#sec-darwin-troubleshooting-using-deployment-targets}
|
||||
|
||||
The “deployment target” refers to the minimum version of macOS that is expected to run an application.
|
||||
In most cases, the default is fine, and you don’t have to do anything else.
|
||||
If you’re not sure, don’t do anything, and that will probably be fine.
|
||||
|
||||
Some packages require setting a non-default deployment target (or minimum version) to gain access to certain APIs.
|
||||
You do that using the `darwinMinVersionHook`, which takes the deployment target version as a parameter.
|
||||
There are primarily two ways to determine the deployment target.
|
||||
|
||||
- The upstream documentation will specify a deployment target or minimum version. Use that.
|
||||
- The build will fail because an API requires a certain version. Use that.
|
||||
- In all other cases, you probably don’t need to specify a minimum version. The default is usually good enough.
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3"; # Upstream specifies the minimum supported version as 12.5.
|
||||
buildInputs = [ (darwinMinVersionHook "12.5") ];
|
||||
}
|
||||
```
|
||||
|
||||
Note: It is possible to have multiple, different instances of `darwinMinVerisonHook` in your inputs.
|
||||
When that happens, the one with the highest version is always used.
|
||||
|
||||
#### Picking an SDK version {#sec-darwin-troubleshooting-picking-sdk-version}
|
||||
|
||||
The following is a list of Xcode versions, the SDK version in Nixpkgs, and the attribute to use to add it.
|
||||
Check your package’s documentation (platform support or installation instructions) to find which Xcode or SDK version to use.
|
||||
Generally, only the last SDK release for a major version is packaged (each _x_ in 10._x_ until 10.15 is considered a major version).
|
||||
|
||||
| Xcode version | SDK version | Nixpkgs attribute |
|
||||
|--------------------|---------------------------------------------------|-------------------|
|
||||
| Varies by platform | 10.12.2 (x86_64-darwin)<br/>11.3 (aarch64-darwin) | `apple-sdk` |
|
||||
| 8.0–8.3.3 | 10.12.2 | `apple-sdk_10_12` |
|
||||
| 9.0–9.4.1 | 10.13.2 | `apple-sdk_10_13` |
|
||||
| 10.0–10.3 | 10.14.6 | `apple-sdk_10_14` |
|
||||
| 11.0–11.7 | 10.15.6 | `apple-sdk_10_15` |
|
||||
| 12.0–12.5.1 | 11.3 | `apple-sdk_11` |
|
||||
| 13.0–13.4.1 | 12.3 | `apple-sdk_12` |
|
||||
| 14.0–14.3.1 | 13.3 | `apple-sdk_13` |
|
||||
| 15.0–15.4 | 14.4 | `apple-sdk_14` |
|
||||
| 16.0 | 15.0 | `apple-sdk_15` |
|
||||
|
||||
|
||||
#### Darwin Default SDK versions {#sec-darwin-troubleshooting-darwin-defaults}
|
||||
|
||||
The current default versions of the deployment target (minimum version) and SDK are indicated by Darwin-specific attributes on the platform. Because of the ways that minimum version and SDK can be changed that are not visible to Nix, they should be treated as lower bounds.
|
||||
If you need to parameterize over a specific version, create a function that takes the version as a parameter instead of relying on these attributes.
|
||||
|
||||
- `darwinMinVersion` defaults to 10.12 on x86_64-darwin and 11.0 on aarch64-darwin.
|
||||
It sets the default deployment target.
|
||||
- `darwinSdkVersion` defaults to 10.12 on x86-64-darwin and 11.0 on aarch64-darwin.
|
||||
Only the major version determines the SDK version, resulting in the 10.12.2 and 11.3 SDKs being used on these platforms respectively.
|
||||
|
||||
|
||||
#### `xcrun` cannot find a binary {#sec-darwin-troubleshooting-xcrun}
|
||||
|
||||
`xcrun` searches `PATH` and the SDK’s toolchain for binaries to run.
|
||||
If it cannot find a required binary, it will fail. When that happens, add the package for that binary to your derivation’s `nativeBuildInputs` (or `nativeCheckInputs` if the failure is happening when running tests).
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
# ...
|
||||
nativeBuildInputs = [ bison ];
|
||||
buildCommand = ''
|
||||
xcrun bison foo.y # produces foo.tab.c
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
# ...
|
||||
'';
|
||||
}
|
||||
```
|
||||
buildPhase = ''
|
||||
$CC -o hello hello.c
|
||||
'';
|
||||
}
|
||||
```
|
||||
|
||||
#### Package requires `xcodebuild` {#sec-darwin-troubleshooting-xcodebuild}
|
||||
- On Darwin, libraries are linked using absolute paths, libraries are resolved by their `install_name` at link time. Sometimes packages won’t set this correctly causing the library lookups to fail at runtime. This can be fixed by adding extra linker flags or by running `install_name_tool -id` during the `fixupPhase`.
|
||||
|
||||
The xcbuild package provides an `xcodebuild` command for packages that really depend on Xcode.
|
||||
This replacement is not 100% compatible and may run into some issues, but it is able to build many packages.
|
||||
To use `xcodebuild`, add `xcbuildHook` to your package’s `nativeBuildInputs`.
|
||||
It will provide a `buildPhase` for your derivation.
|
||||
You can use `xcbuildFlags` to specify flags to `xcodebuild` such as the required schema.
|
||||
If a schema has spaces in its name, you must set `__structuredAttrs` to `true`.
|
||||
See MoltenVK for an example of setting up xcbuild.
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
# ...
|
||||
makeFlags = lib.optional stdenv.hostPlatform.isDarwin "LDFLAGS=-Wl,-install_name,$(out)/lib/libfoo.dylib";
|
||||
}
|
||||
```
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
xcbuildFlags = [
|
||||
"-configuration"
|
||||
"Release"
|
||||
"-project"
|
||||
"libfoo-project.xcodeproj"
|
||||
"-scheme"
|
||||
"libfoo Package (macOS only)"
|
||||
];
|
||||
__structuredAttrs = true;
|
||||
}
|
||||
```
|
||||
- Even if the libraries are linked using absolute paths and resolved via their `install_name` correctly, tests can sometimes fail to run binaries. This happens because the `checkPhase` runs before the libraries are installed.
|
||||
|
||||
##### Fixing absolute paths to `xcodebuild`, `xcrun`, and `PlistBuddy` {#sec-darwin-troubleshooting-xcodebuild-absolute-paths}
|
||||
This can usually be solved by running the tests after the `installPhase` or alternatively by using `DYLD_LIBRARY_PATH`. More information about this variable can be found in the *dyld(1)* manpage.
|
||||
|
||||
Many build systems hardcode the absolute paths to `xcodebuild`, `xcrun`, and `PlistBuddy` as `/usr/bin/xcodebuild`, `/usr/bin/xcrun`, and `/usr/libexec/PlistBuddy` respectively.
|
||||
These paths will need to be replaced with relative paths and the xcbuild package if `xcodebuild` or `PListBuddy` are used.
|
||||
```
|
||||
dyld: Library not loaded: /nix/store/7hnmbscpayxzxrixrgxvvlifzlxdsdir-jq-1.5-lib/lib/libjq.1.dylib
|
||||
Referenced from: /private/tmp/nix-build-jq-1.5.drv-0/jq-1.5/tests/../jq
|
||||
Reason: image not found
|
||||
./tests/jqtest: line 5: 75779 Abort trap: 6
|
||||
```
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
postPatch = ''
|
||||
subsituteInPlace Makefile \
|
||||
--replace-fail '/usr/bin/xcodebuild' 'xcodebuild' \
|
||||
--replace-fail '/usr/bin/xcrun' 'xcrun' \
|
||||
--replace-fail '/usr/bin/PListBuddy' 'PListBuddy'
|
||||
'';
|
||||
}
|
||||
```
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
# ...
|
||||
doInstallCheck = true;
|
||||
installCheckTarget = "check";
|
||||
}
|
||||
```
|
||||
|
||||
#### How to use libiconv on Darwin {#sec-darwin-troubleshooting-libiconv}
|
||||
- Some packages assume Xcode is available and use `xcrun` to resolve build tools like `clang`, etc. The Darwin stdenv includes `xcrun`, and it will return the path to any binary available in a build.
|
||||
|
||||
The libiconv package is included in the SDK by default along with libresolv and libsbuf.
|
||||
You do not need to do anything to use these packages. They are available automatically.
|
||||
If your derivation needs the `iconv` binary, add the `libiconv` package to your `nativeBuildInputs` (or `nativeCheckInputs` for tests).
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
# ...
|
||||
nativeBuildInputs = [ bison ];
|
||||
buildCommand = ''
|
||||
xcrun bison foo.y # produces foo.tab.c
|
||||
# ...
|
||||
'';
|
||||
}
|
||||
```
|
||||
The package `xcbuild` can be used to build projects that really depend on Xcode. However, this replacement is not 100% compatible with Xcode and can occasionally cause issues.
|
||||
|
||||
#### Library install name issues {#sec-darwin-troubleshooting-install-name}
|
||||
Note: Some packages may hardcode an absolute path to `xcrun`, `xcodebuild`, or `xcode-select`. Those paths should be removed or replaced.
|
||||
|
||||
Libraries on Darwin are usually linked with absolute paths.
|
||||
This is determined by something called an “install name”, which is resolved at link time.
|
||||
Sometimes packages will not set this correctly, causing binaries linking to it not to find their libraries at runtime.
|
||||
This can be fixed by adding extra linker flags or by using `install_name_tool` to set it in `fixupPhase`.
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
prePatch = ''
|
||||
substituteInPlace Makefile \
|
||||
--replace-fail /usr/bin/xcrun xcrun
|
||||
# or: --replace-fail /usr/bin/xcrun '${lib.getExe' buildPackages.xcbuild "xcrun"}'
|
||||
'';
|
||||
}
|
||||
```
|
||||
|
||||
##### Setting the install name via linker flags {#sec-darwin-troubleshooting-install-name-linker-flags}
|
||||
- Multiple SDKs are available for use in nixpkgs. Each platform has a default SDK (10.12.2 for x86_64-darwin and 11.3 for aarch64-darwin), which is available as the `apple-sdk` package.
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
The SDK provides the necessary headers and text-based stubs to link common frameworks and libraries (such as libSystem, which is effectively Darwin’s libc). Projects will sometimes indicate which SDK to use by the Xcode version. As a rule of thumb, subtract one from the Xcode version to get the available SDK in nixpkgs.
|
||||
|
||||
The `DEVELOPER_DIR` variable in the build environment has the path to the SDK in the build environment. The `SDKROOT` variable there contains a sysroot with the framework, header, and library paths. You can reference an SDK’s sysroot from Nix using the `sdkroot` attribute on the SDK package. Note that it is preferable to use `SDKROOT` because the latter will be resolved to the highest SDK version of any available to your derivation.
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
# ...
|
||||
env.PACKAGE_SPECIFIC_SDK_VAR = apple-sdk_10_15.sdkroot;
|
||||
# or
|
||||
buildInputs = [ apple-sdk_10_15 ];
|
||||
postPatch = ''
|
||||
export PACKAGE_SPECIFIC_SDK_VAR=$SDKROOT
|
||||
'';
|
||||
}
|
||||
```
|
||||
|
||||
The following is a list of Xcode versions, the SDK version in nixpkgs, and the attribute to use to add it. Generally, only the last SDK release for a major version is packaged (each _x_ in 10._x_ until 10.15 is considered a major version).
|
||||
|
||||
| Xcode version | SDK version | nixpkgs attribute |
|
||||
|--------------------|---------------------------------------------------|-------------------|
|
||||
| Varies by platform | 10.12.2 (x86_64-darwin)<br/>11.3 (aarch64-darwin) | `apple-sdk` |
|
||||
| 8.0–8.3.3 | 10.12.2 | `apple-sdk_10_12` |
|
||||
| 9.0–9.4.1 | 10.13.2 | `apple-sdk_10_13` |
|
||||
| 10.0–10.3 | 10.14.6 | `apple-sdk_10_14` |
|
||||
| 11.0–11.7 | 10.15.6 | `apple-sdk_10_15` |
|
||||
| 12.0–12.5.1 | 11.3 | `apple-sdk_11` |
|
||||
| 13.0–13.4.1 | 12.3 | `apple-sdk_12` |
|
||||
| 14.0–14.3.1 | 13.3 | `apple-sdk_13` |
|
||||
| 15.0–15.4 | 14.4 | `apple-sdk_14` |
|
||||
| 16.0 | 15.0 | `apple-sdk_15` |
|
||||
|
||||
To use a non-default SDK, add it to your build inputs.
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
# ...
|
||||
buildInputs = [ apple-sdk_15 ]; # Uses the 15.0 SDK instead of the default SDK for the platform.
|
||||
}
|
||||
```
|
||||
|
||||
If your derivation has multiple SDKs its inputs (e.g., because they have been propagated by its dependencies), it will use the highest SDK version available.
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3"; # Upstream specifies that it needs Xcode 12 to build, so use the 11.3 SDK.
|
||||
# ...
|
||||
buildInputs = [ apple-sdk_11 ];
|
||||
nativeBuildInputs = [ swift ]; # Propagates the 13.3 SDK, so the 13.3 SDK package will be used instead of the 11.3 SDK.
|
||||
}
|
||||
```
|
||||
|
||||
- When a package indicates a minimum supported version, also called the deployment target, you can set it in your derivation using `darwinMinVersionHook`. If you need to set a minimum version higher than the default SDK, you should also add the corresponding SDK to your `buildInputs`.
|
||||
|
||||
The deployment target controls how Darwin handles availability and access to some APIs. In most cases, if a deployment target is newer than the first availability of an API, that API will be linked directly. Otherwise, the API will be weakly linked and checked at runtime.
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3"; # Upstream specifies the minimum supported version as 12.5.
|
||||
buildInputs = [ (darwinMinVersionHook "12.5") ];
|
||||
}
|
||||
```
|
||||
|
||||
If your derivation has multiple versions of this hook in its inputs (e.g., because it has been propagated by one of your dependencies), it will use the highest deployment target available.
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3"; # Upstream specifies the minimum supported version as 10.15.
|
||||
buildInputs = [ qt6.qtbase (darwinMinVersionHook "10.15") ];
|
||||
}
|
||||
# Qt 6 specifies a minimum version of 12.0, so the minimum version resolves to 12.0.
|
||||
```
|
||||
|
||||
|
||||
- You should rely on the default SDK when possible. If a package specifies a required SDK version, use that version (e.g., libuv requires 11.0, so it should use `apple-sdk_11`). When a package supports multiple SDKs, determine which SDK package to use based on the following rules of thumb:
|
||||
|
||||
- If a package supports multiple SDK versions, use the lowest supported SDK version by the package (but no lower than the default SDK). That ensures maximal platform compatibility for the package.
|
||||
|
||||
- If a package specifies a range of supported SDK versions _and_ a minimum supported version, assume the package is using availability checks to support the indicated minimum version. Add the highest supported SDK and a `darwinMinVersionHook` set to the minimum version supported by the upstream package.
|
||||
|
||||
Warning: Avoid using newer SDKs than an upstream package supports. When a binary is linked on Darwin, the SDK version used to build it is recorded in the binary. Runtime behavior can vary based on the SDK version, which may work fine but can also result in unexpected behavior or crashes when building with an unsupported SDK.
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "foo-1.2.3";
|
||||
# ...
|
||||
buildInputs = [ apple-sdk_15 (darwinMinVersionHook "10.15") ]; # Upstream builds with the 15.0 SDK but supports 10.15.
|
||||
}
|
||||
```
|
||||
|
||||
- Libraries that require a minimum version can propagate an appropriate SDK and `darwinMinVersionHook`. Derivations using that library will automatically use an appropriate SDK and minimum version. Even if the library builds with a newer SDK, it should propagate the minimum supported SDK. Derivations that need a newer SDK can add it to their `buildInputs`.
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
# ...
|
||||
buildInputs = [ apple-sdk_15 ]; # Upstream builds with the 15.0 SDK but supports 10.15.
|
||||
propagatedBuildInputs = [ apple-sdk_10_15 (darwinMinVersionHook "10.15") ];
|
||||
}
|
||||
# ...
|
||||
makeFlags = lib.optional stdenv.hostPlatform.isDarwin "LDFLAGS=-Wl,-install_name,$(out)/lib/libfoo.dylib";
|
||||
}
|
||||
```
|
||||
|
||||
##### Setting the install name using `install_name_tool` {#sec-darwin-troubleshooting-install-name-install_name_tool}
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
stdenv.mkDerivation {
|
||||
name = "bar-1.2.3";
|
||||
# ...
|
||||
buildInputs = [ libfoo ]; # Builds with the 10.15 SDK
|
||||
}
|
||||
# ...
|
||||
postFixup = ''
|
||||
# `-id <install_name>` takes the install name. The last parameter is the path to the library.
|
||||
${stdenv.cc.targetPrefix}install_name_tool -id "$out/lib/libfoo.dylib" "$out/lib/libfoo.dylib"
|
||||
'';
|
||||
}
|
||||
```
|
||||
stdenv.mkDerivation {
|
||||
name = "baz-1.2.3";
|
||||
# ...
|
||||
buildInputs = [ apple-sdk_12 libfoo ]; # Builds with the 12.3 SDK
|
||||
}
|
||||
```
|
||||
|
||||
Even if libraries are linked using absolute paths and resolved via their install name correctly, tests in `checkPhase` can sometimes fail to run binaries because they are linked against libraries that have not yet been installed.
|
||||
This can usually be solved by running the tests after the `installPhase` or by using `DYLD_LIBRARY_PATH` (see {manpage}`dyld(1)` for more on setting `DYLD_LIBRARY_PATH`).
|
||||
- Many SDK libraries and frameworks use text-based stubs to link against system libraries and frameworks, but several are built from source (typically corresponding to the source releases for the latest release of macOS). Several of these are propagated to your package automatically. They can be accessed via the `darwin` package set along with others that are not propagated by default.
|
||||
|
||||
##### Setting the install name using `fixDarwinDylibNames` hook {#sec-darwin-troubleshooting-install-name-fixDarwinDylibNames}
|
||||
- libiconv
|
||||
- libresolv
|
||||
- libsbuf
|
||||
|
||||
If your package has numerous dylibs needing fixed, while it is preferable to fix the issue in the package’s build, you can update them all by adding the `fixDarwinDylibNames` hook to your `nativeBuildInputs`.
|
||||
This hook will scan your package’s outputs for dylibs and correct their install names.
|
||||
Note that if any binaries in your outputs linked those dylibs, you may need to use `install_name_tool` to replace references to them with the correct paths.
|
||||
Other common libraries are available in Darwin-specific versions with modifications from Apple. Note that these packages may be made the default on Darwin in the future.
|
||||
|
||||
#### Propagating an SDK (advanced, compilers-only) {#sec-darwin-troubleshooting-propagating-sdks}
|
||||
- ICU (compatible with the top-level icu package, but it also provides `libicucore.B.dylib` with an ABI compatible with the Darwin system version)
|
||||
- libpcap (compatible with the top-level libpcap, but it includes Darwin-specific extensions)
|
||||
|
||||
The SDK is a package, and it can be propagated.
|
||||
`darwinMinVersionHook` with a version specified can also be propagated.
|
||||
However, most packages should *not* do this.
|
||||
The exception is compilers.
|
||||
When you propagate an SDK, it becomes part of your derivation’s public API, and changing the SDK or removing it can be a breaking change.
|
||||
That is why propagating it is only recommended for compilers.
|
||||
- The legacy SDKs packages are still available in the `darwin` package set under their existing names, but all packages in these SDKs (frameworks, libraries, etc) are stub packages for evaluation compatibility.
|
||||
|
||||
When authoring a compiler derivation, propagate the SDK only for the ways you expect users to use your compiler.
|
||||
Depending on your expected use cases, you may have to do one or all of these.
|
||||
In most cases, a derivation can be updated by deleting all of its SDK inputs (frameworks, libraries, etc). If you had to override the SDK, see below for how to do that using the new SDK pattern. If your derivation depends on the layout of the old frameworks or other internal details, you have more work to do.
|
||||
|
||||
- Put it in `depsTargetTargetPropagated` when your compiler is expected to be added to `nativeBuildInputs`.
|
||||
That will ensure the SDK is effectively part of the target derivation’s `buildInputs`.
|
||||
- If your compiler uses a hook, put it in the hook’s `depsTargetTargetPropagated` instead.
|
||||
The effect should be the same as the above.
|
||||
- If your package uses the builder pattern, update your builder to add the SDK to the derivation’s `buildInputs`.
|
||||
When a package depended on the location of frameworks, references to those framework packages can usually be replaced with `${apple-sdk.sdkroot}/System` or `$SDKROOT/System`. For example, if you substituted `${darwin.apple_sdk.frameworks.OpenGL}/Library/Frameworks/OpenGL.framework` in your derivation, you should replace it with `${apple-sdk.sdkroot}/System/Library/Frameworks/OpenGL.framework` or `$SDKROOT/System/Library/Frameworks`. The latter is preferred because it supports using the SDK that is resolved when multiple SDKs are propagated (see above).
|
||||
|
||||
If you’re not sure whether to propagate an SDK, don’t.
|
||||
If your package is a compiler or language, and you’re not sure, ask @NixOS/darwin-maintainers for help deciding.
|
||||
Note: the new SDK pattern uses the name `apple-sdk` to better align with nixpkgs naming conventions. The old SDK pattern uses `apple_sdk`.
|
||||
|
||||
### Dealing with `darwin.apple_sdk.frameworks` {#sec-darwin-legacy-frameworks}
|
||||
- There are two legacy patterns that are being phased out. These patterns were used in the past to change the SDK version. They have been reimplemented to use the `apple-sdk` packages.
|
||||
|
||||
You may see references to `darwin.apple_sdk.frameworks`.
|
||||
This is the legacy SDK pattern, and it is being phased out.
|
||||
All packages in `darwin.apple_sdk`, `darwin.apple_sdk_11_0`, and `darwin.apple_sdk_12_3` are stubs that do nothing.
|
||||
If your derivation references them, you can delete them. The default SDK should be enough to build your package.
|
||||
- `pkgs.darwin.apple_sdk_11_0.callPackage` - this pattern was used to provide frameworks from the 11.0 SDK. It now adds the `apple-sdk_11` package to your derivation’s build inputs.
|
||||
- `overrideSDK` - this stdenv adapter would try to replace the frameworks used by your derivation and its transitive dependencies. It now adds the `apple-sdk_11` package for `11.0` or the `apple-sdk_12` package for `12.3`. If `darwinMinVersion` is specified, it will add `darwinMinVersionHook` with the specified minimum version. No other SDK versions are supported.
|
||||
|
||||
Note: the new SDK pattern uses the name `apple-sdk` to better align with Nixpkgs naming conventions.
|
||||
The legacy SDK pattern uses `apple_sdk`.
|
||||
You always know you are using the old SDK pattern if the name is `apple_sdk`.
|
||||
- Darwin supports cross-compilation between Darwin platforms. Cross-compilation from Linux is not currently supported but may be supported in the future. To cross-compile to Darwin, you can set `crossSystem` or use one of the Darwin systems in `pkgsCross`. The `darwinMinVersionHook` and the SDKs support cross-compilation. If you need to specify a different SDK version for a `depsBuildBuild` compiler, add it to your `nativeBuildInputs`.
|
||||
|
||||
Some derivations may depend on the location of frameworks in those old packages.
|
||||
To update your derivation to find them in the new SDK, use `$SDKROOT` instead in `preConfigure`.
|
||||
For example, if you substitute `${darwin.apple_sdk.frameworks.OpenGL}/Library/Frameworks/OpenGL.framework` in `postPatch`, replace it with `$SDKROOT/System/Library/Frameworks/OpenGL.framework` in `preConfigure`.
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
# ...
|
||||
depsBuildBuild = [ buildPackages.stdenv.cc ];
|
||||
nativeBuildInputs = [ apple-sdk_12 ];
|
||||
buildInputs = [ apple-sdk_13 ];
|
||||
depsTargetTargetPropagated = [ apple-sdk_14 ];
|
||||
}
|
||||
# The build-build clang will use the 12.3 SDK while the package build itself will use the 13.3 SDK.
|
||||
# Derivations that add this package as an input will have the 14.4 SDK propagated to them.
|
||||
```
|
||||
|
||||
Note that if your derivation is changing a system path (such as `/System/Library/Frameworks/OpenGL.framework`), you may be able to remove the path.
|
||||
Compilers and binutils targeting Darwin look for system paths in the SDK sysroot.
|
||||
Some of them (such as Zig or `bindgen` for Rust) depend on it.
|
||||
The different target SDK and hooks are mangled based on role:
|
||||
|
||||
#### Updating legacy SDK overrides {#sec-darwin-legacy-frameworks-overrides}
|
||||
- `DEVELOPER_DIR_FOR_BUILD` and `MACOSX_DEPLOYMENT_TARGET_FOR_BUILD` for the build platform;
|
||||
- `DEVELOPER_DIR` and `MACOSX_DEPLOYMENT_TARGET` for the host platform; and
|
||||
- `DEVELOPER_DIR_FOR_TARGET` and `MACOSX_DEPLOYMENT_TARGET_FOR_TARGET` for the build platform.
|
||||
|
||||
The legacy SDK provided two ways of overriding the default SDK.
|
||||
These are both being phased out along with the legacy SDKs.
|
||||
They have been updated to set up the new SDK for you, but you should replace them with doing that directly.
|
||||
In static compilation situations, it is possible for the build and host platform to be the same platform but have different SDKs with the same version (one dynamic and one static). cc-wrapper takes care of handling this distinction.
|
||||
|
||||
- `pkgs.darwin.apple_sdk_11_0.callPackage` - this pattern was used to provide frameworks from the 11.0 SDK.
|
||||
It now adds the `apple-sdk_11` package to your derivation’s build inputs.
|
||||
- `overrideSDK` - this stdenv adapter would try to replace the frameworks used by your derivation and its transitive dependencies.
|
||||
It now adds the `apple-sdk_11` package for `11.0` or the `apple-sdk_12` package for `12.3`.
|
||||
If `darwinMinVersion` is specified, it will add `darwinMinVersionHook` with the specified minimum version.
|
||||
No other SDK versions are supported.
|
||||
- The current default versions of the deployment target (minimum version) and SDK are indicated by Darwin-specific attributes on the platform. Because of the ways that minimum version and SDK can be changed that are not visible to Nix, they should be treated as lower bounds. If you need to parameterize over a specific version, create a function that takes the version as a parameter instead of relying on these attributes.
|
||||
|
||||
### Darwin Cross-Compilation {#sec-darwin-legacy-cross-compilation}
|
||||
|
||||
Darwin supports cross-compilation between Darwin platforms.
|
||||
Cross-compilation from Linux is not currently supported but may be supported in the future.
|
||||
To cross-compile to Darwin, you can set `crossSystem` or use one of the Darwin systems in `pkgsCross`.
|
||||
The `darwinMinVersionHook` and the SDKs support cross-compilation.
|
||||
If you need to specify a different SDK version for a `depsBuildBuild` compiler, add it to your `nativeBuildInputs`.
|
||||
|
||||
```nix
|
||||
stdenv.mkDerivation {
|
||||
name = "libfoo-1.2.3";
|
||||
# ...
|
||||
depsBuildBuild = [ buildPackages.stdenv.cc ];
|
||||
nativeBuildInputs = [ apple-sdk_12 ];
|
||||
buildInputs = [ apple-sdk_13 ];
|
||||
depsTargetTargetPropagated = [ apple-sdk_14 ];
|
||||
}
|
||||
# The build-build `clang` will use the 12.3 SDK while the package build itself will use the 13.3 SDK.
|
||||
# Derivations that add this package as an input will have the 14.4 SDK propagated to them.
|
||||
```
|
||||
|
||||
The different target SDK and hooks are mangled based on role:
|
||||
|
||||
- `DEVELOPER_DIR_FOR_BUILD` and `MACOSX_DEPLOYMENT_TARGET_FOR_BUILD` for the build platform;
|
||||
- `DEVELOPER_DIR` and `MACOSX_DEPLOYMENT_TARGET` for the host platform; and
|
||||
- `DEVELOPER_DIR_FOR_TARGET` and `MACOSX_DEPLOYMENT_TARGET_FOR_TARGET` for the build platform.
|
||||
|
||||
In static compilation situations, it is possible for the build and host platform to be the same platform but have different SDKs with the same version (one dynamic and one static).
|
||||
cc-wrapper and bintools-wrapper take care of handling this distinction.
|
||||
- `darwinMinVersion` defaults to 10.12 on x86_64-darwin and 11.0 on aarch64-darwin. It sets the default `MACOSX_DEPLOYMENT_TARGET`.
|
||||
- `darwinSdkVersion` defaults to 10.12 on x86-64-darwin and 11.0 on aarch64-darwin. Only the major version determines the SDK version, resulting in the 10.12.2 and 11.3 SDKs being used on these platforms respectively.
|
||||
|
|
2
third_party/nixpkgs/lib/minver.nix
vendored
2
third_party/nixpkgs/lib/minver.nix
vendored
|
@ -1,2 +1,2 @@
|
|||
# Expose the minimum required version for evaluating Nixpkgs
|
||||
"2.3.17"
|
||||
"2.3"
|
||||
|
|
51
third_party/nixpkgs/maintainers/README.md
vendored
51
third_party/nixpkgs/maintainers/README.md
vendored
|
@ -175,54 +175,3 @@ for further information.
|
|||
|
||||
# nixpkgs-merge-bot
|
||||
To streamline autoupdates, leverage the nixpkgs-merge-bot by commenting `@NixOS/nixpkgs-merge-bot merge` if the package resides in pkgs-by-name and the commenter is among the package maintainers. The bot ensures that all ofborg checks, except for darwin, are successfully completed before merging the pull request. Should the checks still be underway, the bot patiently waits for ofborg to finish before attempting the merge again.
|
||||
|
||||
# Guidelines for Committers
|
||||
|
||||
When merging pull requests, care must be taken to reduce impact to the `master`
|
||||
branch. If a commit breaks evaluation, it will affect Ofborg evaluation results
|
||||
in other pull requests and block Hydra CI, thus introducing chaos to our
|
||||
workflow.
|
||||
|
||||
One approach to avoid merging such problematic changes is to wait for
|
||||
successful Ofborg evaluation. Additionally, using tools like
|
||||
[nixpkgs-review](https://github.com/Mic92/nixpkgs-review) can help spot issues
|
||||
early, before Ofborg finishes evaluation.
|
||||
|
||||
## Breaking changes
|
||||
|
||||
In general breaking changes to `master` and `staging` branches are permitted,
|
||||
as long as they are documented in the release notes. Though restrictions might
|
||||
apply towards the end of a NixOS release cycle, due to our feature freeze
|
||||
mechanism. This is to avoid large-scale breakages shortly before and during
|
||||
a Zero Hydra Failures (ZHF) campaign. These restrictions also intend to
|
||||
decrease the likelihood of a delayed NixOS release. The feature freeze period
|
||||
is documented in the announcement of each release schedule.
|
||||
|
||||
> These are some example changes and if they are considered a breaking change
|
||||
> during a freeze period:
|
||||
>
|
||||
> - `foo: 1.2.3 -> 1.2.4` - Assuming this package follows semantic versioning
|
||||
> and none of its dependent packages fail to build because of this change, it
|
||||
> can be safely merged. Otherwise, if it can be confirmed that there is no
|
||||
> major change in its functionality or API, but only adding new features or
|
||||
> fixing bugs, it
|
||||
> can also be merged.
|
||||
> - `unmaintained-software: drop` - If this PR removes a leaf package or the
|
||||
> removal doesn't otherwise break other packages, it can be merged.
|
||||
> - `cool-tool: rename from fancy-tool` - As long as this PR replaces all
|
||||
> references to the old attribute name with the new name and adds an alias,
|
||||
> it can be merged.
|
||||
> - `libpopular: 4.3.2 -> 5.0.0` - If this PR would trigger many rebuilds
|
||||
> and/or target `staging`, it should probably be delayed until after the
|
||||
> freeze-period is over. Alternatively, if this PR is for a popular package
|
||||
> and doesn't cause many rebuilds, it should also be delayed to reduce risk
|
||||
> of breakage. If a PR includes important changes, such as security fixes, it
|
||||
> should be brought up to
|
||||
> release managers.
|
||||
> - `nixos/transmission: refactor` - If this PR adjusts the type, default value
|
||||
> or effect of options in the NixOS module, so that users must rewrite their
|
||||
> configuration to keep the current behavior unchanged, it should not be
|
||||
> merged, as we don't have enough time to collect user feedback and avoid
|
||||
> possible breakage. However, it should be accepted if the current behavior
|
||||
> is
|
||||
> considered broken and is fixed by the PR.
|
||||
|
|
|
@ -3536,12 +3536,6 @@
|
|||
github = "scaredmushroom";
|
||||
githubId = 45340040;
|
||||
};
|
||||
caperren = {
|
||||
name = "Corwin Perren";
|
||||
email = "caperren@gmail.com";
|
||||
github = "caperren";
|
||||
githubId = 4566591;
|
||||
};
|
||||
CaptainJawZ = {
|
||||
email = "CaptainJawZ@outlook.com";
|
||||
name = "Danilo Reyes";
|
||||
|
@ -5132,11 +5126,6 @@
|
|||
githubId = 130508846;
|
||||
name = "Elliot Cameron";
|
||||
};
|
||||
deadbaed = {
|
||||
name = "Philippe Loctaux";
|
||||
github = "deadbaed";
|
||||
githubId = 8809909;
|
||||
};
|
||||
dearrude = {
|
||||
name = "Ebrahim Nejati";
|
||||
email = "dearrude@tfwno.gf";
|
||||
|
@ -12549,12 +12538,6 @@
|
|||
githubId = 40217331;
|
||||
name = "LizeLive";
|
||||
};
|
||||
llakala = {
|
||||
email = "elevenaka11@gmail.com";
|
||||
github = "llakala";
|
||||
githubId = 78693624;
|
||||
name = "llakala";
|
||||
};
|
||||
lluchs = {
|
||||
email = "lukas.werling@gmail.com";
|
||||
github = "lluchs";
|
||||
|
@ -13515,12 +13498,6 @@
|
|||
githubId = 322214;
|
||||
name = "Mathnerd314";
|
||||
};
|
||||
mathstlouis = {
|
||||
email = "matfino+gh@gmail.com";
|
||||
github = "mathstlouis";
|
||||
githubId = 35696151;
|
||||
name = "mathstlouis";
|
||||
};
|
||||
matklad = {
|
||||
email = "aleksey.kladov@gmail.com";
|
||||
github = "matklad";
|
||||
|
@ -18619,16 +18596,6 @@
|
|||
{ fingerprint = "C0A7 A9BB 115B C857 4D75 EA99 BBB7 A680 1DF1 E03F"; }
|
||||
];
|
||||
};
|
||||
ritiek = {
|
||||
name = "Ritiek Malhotra";
|
||||
email = "ritiekmalhotra123@gmail.com";
|
||||
matrix = "@ritiek:matrix.org";
|
||||
github = "ritiek";
|
||||
githubId = 20314742;
|
||||
keys = [
|
||||
{ fingerprint = "66FF 6099 7B04 845F F4C0 CB4F EB6F C9F9 FC96 4257"; }
|
||||
];
|
||||
};
|
||||
rixed = {
|
||||
email = "rixed-github@happyleptic.org";
|
||||
github = "rixed";
|
||||
|
@ -18671,12 +18638,6 @@
|
|||
githubId = 82817;
|
||||
name = "Robert Kreuzer";
|
||||
};
|
||||
rksm = {
|
||||
email = "robert@kra.hn";
|
||||
github = "rksm";
|
||||
githubId = 467450;
|
||||
name = "Robert Krahn";
|
||||
};
|
||||
rlupton20 = {
|
||||
email = "richard.lupton@gmail.com";
|
||||
github = "rlupton20";
|
||||
|
@ -18771,13 +18732,6 @@
|
|||
githubId = 521306;
|
||||
name = "Rob Glossop";
|
||||
};
|
||||
robinkrahl = {
|
||||
email = "nix@ireas.org";
|
||||
github = "robinkrahl";
|
||||
githubId = 165115;
|
||||
keys = [ { fingerprint = "EC7E F0F9 B681 4C24 6236 3842 B755 6972 702A FD45"; } ];
|
||||
name = "Robin Krahl";
|
||||
};
|
||||
roblabla = {
|
||||
email = "robinlambertz+dev@gmail.com";
|
||||
github = "roblabla";
|
||||
|
@ -21807,12 +21761,6 @@
|
|||
githubId = 57180880;
|
||||
name = "Ansh Tyagi";
|
||||
};
|
||||
therealgramdalf = {
|
||||
email = "gramdalftech@gmail.com";
|
||||
github = "TheRealGramdalf";
|
||||
githubId = 79593869;
|
||||
name = "Gramdalf";
|
||||
};
|
||||
therealr5 = {
|
||||
email = "rouven@rfive.de";
|
||||
github = "therealr5";
|
||||
|
@ -21943,12 +21891,6 @@
|
|||
githubId = 1391883;
|
||||
name = "Tom Hall";
|
||||
};
|
||||
thtrf = {
|
||||
email = "thtrf@proton.me";
|
||||
github = "thtrf";
|
||||
githubId = 82712122;
|
||||
name = "thtrf";
|
||||
};
|
||||
Thunderbottom = {
|
||||
email = "chinmaydpai@gmail.com";
|
||||
github = "Thunderbottom";
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
# - pkgs/development/lua-modules/updater/updater.py
|
||||
|
||||
# format:
|
||||
# $ nix run nixpkgs#ruff maintainers/scripts/pluginupdate.py
|
||||
# $ nix run nixpkgs#black maintainers/scripts/pluginupdate.py
|
||||
# type-check:
|
||||
# $ nix run nixpkgs#python3.pkgs.mypy maintainers/scripts/pluginupdate.py
|
||||
# linted:
|
||||
|
@ -142,7 +142,7 @@ class Repo:
|
|||
return loaded
|
||||
|
||||
def prefetch(self, ref: Optional[str]) -> str:
|
||||
log.info("Prefetching %s", self.uri)
|
||||
print("Prefetching %s", self.uri)
|
||||
loaded = self._prefetch(ref)
|
||||
return loaded["sha256"]
|
||||
|
||||
|
@ -195,7 +195,7 @@ class RepoGitHub(Repo):
|
|||
xml = req.read()
|
||||
|
||||
# Filter out illegal XML characters
|
||||
illegal_xml_regex = re.compile(b"[\x00-\x08\x0b-\x0c\x0e-\x1f\x7f]")
|
||||
illegal_xml_regex = re.compile(b"[\x00-\x08\x0B-\x0C\x0E-\x1F\x7F]")
|
||||
xml = illegal_xml_regex.sub(b"", xml)
|
||||
|
||||
root = ET.fromstring(xml)
|
||||
|
@ -256,7 +256,13 @@ class PluginDesc:
|
|||
|
||||
@property
|
||||
def name(self):
|
||||
return self.alias or self.repo.name
|
||||
if self.alias is None:
|
||||
return self.repo.name
|
||||
else:
|
||||
return self.alias
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.repo.name < other.repo.name
|
||||
|
||||
@staticmethod
|
||||
def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> "PluginDesc":
|
||||
|
@ -264,12 +270,7 @@ class PluginDesc:
|
|||
branch = row["branch"]
|
||||
repo = make_repo(row["repo"], branch.strip())
|
||||
repo.token = config.github_token
|
||||
return PluginDesc(
|
||||
repo,
|
||||
branch.strip(),
|
||||
# alias is usually an empty string
|
||||
row["alias"] if row["alias"] else None,
|
||||
)
|
||||
return PluginDesc(repo, branch.strip(), row["alias"])
|
||||
|
||||
@staticmethod
|
||||
def load_from_string(config: FetchConfig, line: str) -> "PluginDesc":
|
||||
|
@ -327,11 +328,12 @@ def load_plugins_from_csv(
|
|||
return plugins
|
||||
|
||||
|
||||
|
||||
def run_nix_expr(expr, nixpkgs: str, **args):
|
||||
"""
|
||||
'''
|
||||
:param expr nix expression to fetch current plugins
|
||||
:param nixpkgs Path towards a nixpkgs checkout
|
||||
"""
|
||||
'''
|
||||
with CleanEnvironment(nixpkgs) as nix_path:
|
||||
cmd = [
|
||||
"nix",
|
||||
|
@ -380,14 +382,16 @@ class Editor:
|
|||
fetch_config = FetchConfig(args.proc, args.github_token)
|
||||
editor = self
|
||||
for plugin_line in args.add_plugins:
|
||||
log.debug("using plugin_line %s", plugin_line)
|
||||
log.debug("using plugin_line", plugin_line)
|
||||
pdesc = PluginDesc.load_from_string(fetch_config, plugin_line)
|
||||
log.debug("loaded as pdesc %s", pdesc)
|
||||
log.debug("loaded as pdesc", pdesc)
|
||||
append = [pdesc]
|
||||
editor.rewrite_input(
|
||||
fetch_config, args.input_file, editor.deprecated, append=append
|
||||
)
|
||||
plugin, _ = prefetch_plugin(pdesc)
|
||||
plugin, _ = prefetch_plugin(
|
||||
pdesc,
|
||||
)
|
||||
autocommit = not args.no_commit
|
||||
if autocommit:
|
||||
commit(
|
||||
|
@ -402,9 +406,9 @@ class Editor:
|
|||
# Expects arguments generated by 'update' subparser
|
||||
def update(self, args):
|
||||
"""CSV spec"""
|
||||
print("the update member function should be overridden in subclasses")
|
||||
print("the update member function should be overriden in subclasses")
|
||||
|
||||
def get_current_plugins(self, nixpkgs: str) -> List[Plugin]:
|
||||
def get_current_plugins(self, nixpkgs) -> List[Plugin]:
|
||||
"""To fill the cache"""
|
||||
data = run_nix_expr(self.get_plugins, nixpkgs)
|
||||
plugins = []
|
||||
|
@ -436,7 +440,6 @@ class Editor:
|
|||
|
||||
plugins, redirects = check_results(results)
|
||||
|
||||
plugins = sorted(plugins, key=lambda v: v[1].normalized_name)
|
||||
self.generate_nix(plugins, outfile)
|
||||
|
||||
return redirects
|
||||
|
@ -556,7 +559,6 @@ class Editor:
|
|||
parser = self.create_parser()
|
||||
args = parser.parse_args()
|
||||
command = args.command or "update"
|
||||
logging.basicConfig()
|
||||
log.setLevel(LOG_LEVELS[args.debug])
|
||||
log.info("Chose to run command: %s", command)
|
||||
self.nixpkgs = args.nixpkgs
|
||||
|
@ -589,24 +591,25 @@ def prefetch_plugin(
|
|||
p: PluginDesc,
|
||||
cache: "Optional[Cache]" = None,
|
||||
) -> Tuple[Plugin, Optional[Repo]]:
|
||||
repo, branch, alias = p.repo, p.branch, p.alias
|
||||
name = alias or p.repo.name
|
||||
commit = None
|
||||
log.info(f"Fetching last commit for plugin {p.name} from {p.repo.uri}@{p.branch}")
|
||||
commit, date = p.repo.latest_commit()
|
||||
|
||||
log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
|
||||
commit, date = repo.latest_commit()
|
||||
cached_plugin = cache[commit] if cache else None
|
||||
if cached_plugin is not None:
|
||||
log.debug(f"Cache hit for {p.name}!")
|
||||
cached_plugin.name = p.name
|
||||
log.debug("Cache hit !")
|
||||
cached_plugin.name = name
|
||||
cached_plugin.date = date
|
||||
return cached_plugin, p.repo.redirect
|
||||
return cached_plugin, repo.redirect
|
||||
|
||||
has_submodules = p.repo.has_submodules()
|
||||
log.debug(f"prefetch {p.name}")
|
||||
sha256 = p.repo.prefetch(commit)
|
||||
has_submodules = repo.has_submodules()
|
||||
log.debug(f"prefetch {name}")
|
||||
sha256 = repo.prefetch(commit)
|
||||
|
||||
return (
|
||||
Plugin(p.name, commit, has_submodules, sha256, date=date),
|
||||
p.repo.redirect,
|
||||
Plugin(name, commit, has_submodules, sha256, date=date),
|
||||
repo.redirect,
|
||||
)
|
||||
|
||||
|
||||
|
@ -621,7 +624,7 @@ def print_download_error(plugin: PluginDesc, ex: Exception):
|
|||
|
||||
|
||||
def check_results(
|
||||
results: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]],
|
||||
results: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]]
|
||||
) -> Tuple[List[Tuple[PluginDesc, Plugin]], Redirects]:
|
||||
""" """
|
||||
failures: List[Tuple[PluginDesc, Exception]] = []
|
||||
|
@ -639,9 +642,10 @@ def check_results(
|
|||
|
||||
print(f"{len(results) - len(failures)} plugins were checked", end="")
|
||||
if len(failures) == 0:
|
||||
print()
|
||||
return plugins, redirects
|
||||
else:
|
||||
log.error(f", {len(failures)} plugin(s) could not be downloaded:\n")
|
||||
print(f", {len(failures)} plugin(s) could not be downloaded:\n")
|
||||
|
||||
for plugin, exception in failures:
|
||||
print_download_error(plugin, exception)
|
||||
|
@ -734,7 +738,10 @@ def rewrite_input(
|
|||
append: List[PluginDesc] = [],
|
||||
):
|
||||
log.info("Rewriting input file %s", input_file)
|
||||
plugins = load_plugins_from_csv(config, input_file)
|
||||
plugins = load_plugins_from_csv(
|
||||
config,
|
||||
input_file,
|
||||
)
|
||||
|
||||
plugins.extend(append)
|
||||
|
||||
|
@ -746,25 +753,15 @@ def rewrite_input(
|
|||
deprecations = json.load(f)
|
||||
# TODO parallelize this step
|
||||
for pdesc, new_repo in redirects.items():
|
||||
log.info("Resolving deprecated plugin %s -> %s", pdesc.name, new_repo.name)
|
||||
log.info("Rewriting input file %s", input_file)
|
||||
new_pdesc = PluginDesc(new_repo, pdesc.branch, pdesc.alias)
|
||||
|
||||
old_plugin, _ = prefetch_plugin(pdesc)
|
||||
new_plugin, _ = prefetch_plugin(new_pdesc)
|
||||
|
||||
if old_plugin.normalized_name != new_plugin.normalized_name:
|
||||
deprecations[old_plugin.normalized_name] = {
|
||||
"new": new_plugin.normalized_name,
|
||||
"date": cur_date_iso,
|
||||
}
|
||||
|
||||
# remove plugin from index file, so we won't add it to deprecations again
|
||||
for i, plugin in enumerate(plugins):
|
||||
if plugin.name == pdesc.name:
|
||||
plugins.pop(i)
|
||||
break
|
||||
plugins.append(new_pdesc)
|
||||
|
||||
with open(deprecated, "w") as f:
|
||||
json.dump(deprecations, f, indent=4, sort_keys=True)
|
||||
f.write("\n")
|
||||
|
@ -775,7 +772,7 @@ def rewrite_input(
|
|||
fieldnames = ["repo", "branch", "alias"]
|
||||
writer = csv.DictWriter(f, fieldnames, dialect="unix", quoting=csv.QUOTE_NONE)
|
||||
writer.writeheader()
|
||||
for plugin in sorted(plugins, key=lambda x: x.name):
|
||||
for plugin in sorted(plugins):
|
||||
writer.writerow(asdict(plugin))
|
||||
|
||||
|
||||
|
@ -795,11 +792,9 @@ def update_plugins(editor: Editor, args):
|
|||
|
||||
log.info("Start updating plugins")
|
||||
if args.proc > 1 and args.github_token == None:
|
||||
log.warning(
|
||||
"You have enabled parallel updates but haven't set a github token.\n"
|
||||
"You may be hit with `HTTP Error 429: too many requests` as a consequence."
|
||||
"Either set --proc=1 or --github-token=YOUR_TOKEN. "
|
||||
)
|
||||
log.warning("You have enabled parallel updates but haven't set a github token.\n"
|
||||
"You may be hit with `HTTP Error 429: too many requests` as a consequence."
|
||||
"Either set --proc=1 or --github-token=YOUR_TOKEN. ")
|
||||
|
||||
fetch_config = FetchConfig(args.proc, args.github_token)
|
||||
update = editor.get_update(args.input_file, args.outfile, fetch_config)
|
||||
|
@ -815,9 +810,11 @@ def update_plugins(editor: Editor, args):
|
|||
if autocommit:
|
||||
try:
|
||||
repo = git.Repo(os.getcwd())
|
||||
updated = datetime.now(tz=UTC).strftime("%Y-%m-%d")
|
||||
updated = datetime.now(tz=UTC).strftime('%Y-%m-%d')
|
||||
print(args.outfile)
|
||||
commit(repo, f"{editor.attr_path}: update on {updated}", [args.outfile])
|
||||
commit(repo,
|
||||
f"{editor.attr_path}: update on {updated}", [args.outfile]
|
||||
)
|
||||
except git.InvalidGitRepositoryError as e:
|
||||
print(f"Not in a git repository: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
|
13
third_party/nixpkgs/maintainers/team-list.nix
vendored
13
third_party/nixpkgs/maintainers/team-list.nix
vendored
|
@ -497,19 +497,6 @@ with lib.maintainers;
|
|||
shortName = "Input-Output Global employees";
|
||||
};
|
||||
|
||||
java = {
|
||||
githubTeams = [ "java" ];
|
||||
members = [
|
||||
chayleaf
|
||||
fliegendewurst
|
||||
infinidoge
|
||||
tomodachi94
|
||||
];
|
||||
shortName = "Java";
|
||||
scope = "Maintainers of the Nixpkgs Java ecosystem (JDK, JVM, Java, Gradle, Maven, Ant, and adjacent projects)";
|
||||
enableFeatureFreezePing = true;
|
||||
};
|
||||
|
||||
jitsi = {
|
||||
members = [
|
||||
cleeyv
|
||||
|
|
|
@ -109,8 +109,6 @@
|
|||
|
||||
- [Firefly-iii Data Importer](https://github.com/firefly-iii/data-importer), a data importer for Firefly-III. Available as [services.firefly-iii-data-importer](options.html#opt-services.firefly-iii-data-importer.enable).
|
||||
|
||||
- [Dashy](https://dashy.to), an open source, highly customizable, easy to use, privacy-respecting dashboard app. Available as [services.dashy](options.html#opt-services.dashy).
|
||||
|
||||
- [QGroundControl], a ground station support and configuration manager for the PX4 and APM Flight Stacks. Available as [programs.qgroundcontrol](options.html#opt-programs.qgroundcontrol.enable).
|
||||
|
||||
- [Eintopf](https://eintopf.info), a community event and calendar web application. Available as [services.eintopf](options.html#opt-services.eintopf.enable).
|
||||
|
@ -197,8 +195,6 @@
|
|||
|
||||
## Backward Incompatibilities {#sec-release-24.11-incompatibilities}
|
||||
|
||||
- Nixpkgs now requires Nix 2.3.17 or newer to allow for zstd compressed binary artifacts.
|
||||
|
||||
- The `sound` options have been removed or renamed, as they had a lot of unintended side effects. See [below](#sec-release-24.11-migration-sound) for details.
|
||||
|
||||
- The NVIDIA driver no longer defaults to the proprietary kernel module with versions >= 560. You will need to manually set `hardware.nvidia.open` to select the proprietary or open modules.
|
||||
|
|
|
@ -11,12 +11,14 @@ in
|
|||
type = lib.types.bool;
|
||||
default = false;
|
||||
description = ''
|
||||
Enables udev rules for Nitrokey devices.
|
||||
Enables udev rules for Nitrokey devices. By default grants access
|
||||
to users in the "nitrokey" group. You may want to install the
|
||||
nitrokey-app package, depending on your device and needs.
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf cfg.enable {
|
||||
services.udev.packages = [ pkgs.nitrokey-udev-rules ];
|
||||
services.udev.packages = [ pkgs.libnitrokey ];
|
||||
};
|
||||
}
|
||||
|
|
|
@ -12,19 +12,14 @@ in
|
|||
options.programs.localsend = {
|
||||
enable = lib.mkEnableOption "localsend, an open source cross-platform alternative to AirDrop";
|
||||
|
||||
package = lib.mkPackageOption pkgs "localsend" { };
|
||||
|
||||
openFirewall =
|
||||
lib.mkEnableOption "opening the firewall port ${toString firewallPort} for receiving files"
|
||||
// {
|
||||
default = true;
|
||||
};
|
||||
openFirewall = lib.mkEnableOption "opening the firewall port ${toString firewallPort} for receiving files" // {
|
||||
default = true;
|
||||
};
|
||||
};
|
||||
|
||||
config = lib.mkIf cfg.enable {
|
||||
environment.systemPackages = [ cfg.package ];
|
||||
environment.systemPackages = [ pkgs.localsend ];
|
||||
networking.firewall.allowedTCPPorts = lib.optionals cfg.openFirewall [ firewallPort ];
|
||||
networking.firewall.allowedUDPPorts = lib.optionals cfg.openFirewall [ firewallPort ];
|
||||
};
|
||||
|
||||
meta.maintainers = with lib.maintainers; [ pandapip1 ];
|
||||
|
|
|
@ -101,12 +101,7 @@ in {
|
|||
};
|
||||
};
|
||||
|
||||
systemd = {
|
||||
packages = [ cfg.package ];
|
||||
tmpfiles.rules = [
|
||||
"d /etc/openvpn3/configs 0750 openvpn openvpn - -"
|
||||
];
|
||||
};
|
||||
systemd.packages = [ cfg.package ];
|
||||
};
|
||||
|
||||
meta.maintainers = with lib.maintainers; [ shamilton progrm_jarvis ];
|
||||
|
|
|
@ -183,6 +183,7 @@ let
|
|||
certToConfig = cert: data: let
|
||||
acmeServer = data.server;
|
||||
useDns = data.dnsProvider != null;
|
||||
useDnsOrS3 = useDns || data.s3Bucket != null;
|
||||
destPath = "/var/lib/acme/${cert}";
|
||||
selfsignedDeps = lib.optionals (cfg.preliminarySelfsigned) [ "acme-selfsigned-${cert}.service" ];
|
||||
|
||||
|
@ -366,11 +367,13 @@ let
|
|||
"/var/lib/acme/.lego/${cert}/${certDir}:/tmp/certificates"
|
||||
];
|
||||
|
||||
EnvironmentFile = lib.mkIf (data.environmentFile != null) data.environmentFile;
|
||||
EnvironmentFile = lib.mkIf useDnsOrS3 data.environmentFile;
|
||||
|
||||
Environment = lib.mapAttrsToList (k: v: ''"${k}=%d/${k}"'') data.credentialFiles;
|
||||
Environment = lib.mkIf useDnsOrS3
|
||||
(lib.mapAttrsToList (k: v: ''"${k}=%d/${k}"'') data.credentialFiles);
|
||||
|
||||
LoadCredential = lib.mapAttrsToList (k: v: "${k}:${v}") data.credentialFiles;
|
||||
LoadCredential = lib.mkIf useDnsOrS3
|
||||
(lib.mapAttrsToList (k: v: "${k}:${v}") data.credentialFiles);
|
||||
|
||||
# Run as root (Prefixed with +)
|
||||
ExecStartPost = "+" + (pkgs.writeShellScript "acme-postrun" ''
|
||||
|
|
|
@ -95,7 +95,7 @@ in
|
|||
after = ["network.target"];
|
||||
wantedBy = ["multi-user.target"];
|
||||
script = ''
|
||||
exec ${fhsEnvExecutable} --dir ${cfg.dataDir} ${allowRemoteGuiRpcFlag}
|
||||
${fhsEnvExecutable} --dir ${cfg.dataDir} ${allowRemoteGuiRpcFlag}
|
||||
'';
|
||||
serviceConfig = {
|
||||
User = "boinc";
|
||||
|
|
|
@ -55,12 +55,7 @@ in
|
|||
default = "";
|
||||
};
|
||||
passwordFile = lib.mkOption {
|
||||
type = lib.types.path;
|
||||
example = lib.literalExpression ''
|
||||
pkgs.writeText "roundcube-postgres-passwd.txt" '''
|
||||
hostname:port:database:username:password
|
||||
'''
|
||||
'';
|
||||
type = lib.types.str;
|
||||
description = ''
|
||||
Password file for the postgresql connection.
|
||||
Must be formatted according to PostgreSQL .pgpass standard (see https://www.postgresql.org/docs/current/libpq-pgpass.html)
|
||||
|
|
|
@ -70,7 +70,7 @@ in
|
|||
LoadCredential = "bot-password-file:${cfg.botPasswordFile}";
|
||||
RestartSec = "10s";
|
||||
StateDirectory = "hebbot";
|
||||
WorkingDirectory = "/var/lib/hebbot";
|
||||
WorkingDirectory = "hebbot";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
|
|
@ -275,7 +275,7 @@ in
|
|||
systemd.services.guix-daemon = {
|
||||
environment = serviceEnv;
|
||||
script = ''
|
||||
exec ${lib.getExe' package "guix-daemon"} \
|
||||
${lib.getExe' package "guix-daemon"} \
|
||||
--build-users-group=${cfg.group} \
|
||||
${lib.optionalString (cfg.substituters.urls != [ ])
|
||||
"--substitute-urls='${lib.concatStringsSep " " cfg.substituters.urls}'"} \
|
||||
|
@ -384,7 +384,7 @@ in
|
|||
}
|
||||
'';
|
||||
script = ''
|
||||
exec ${lib.getExe' package "guix"} publish \
|
||||
${lib.getExe' package "guix"} publish \
|
||||
--user=${cfg.publish.user} --port=${builtins.toString cfg.publish.port} \
|
||||
${lib.escapeShellArgs cfg.publish.extraArgs}
|
||||
'';
|
||||
|
@ -440,10 +440,12 @@ in
|
|||
description = "Guix garbage collection";
|
||||
startAt = cfg.gc.dates;
|
||||
script = ''
|
||||
exec ${lib.getExe' package "guix"} gc ${lib.escapeShellArgs cfg.gc.extraArgs}
|
||||
${lib.getExe' package "guix"} gc ${lib.escapeShellArgs cfg.gc.extraArgs}
|
||||
'';
|
||||
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
|
||||
PrivateDevices = true;
|
||||
PrivateNetwork = true;
|
||||
ProtectControlGroups = true;
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
{ config, pkgs, lib, ... }:
|
||||
let
|
||||
cfg = config.services.nzbget;
|
||||
pkg = pkgs.nzbget;
|
||||
stateDir = "/var/lib/nzbget";
|
||||
configFile = "${stateDir}/nzbget.conf";
|
||||
configOpts = lib.concatStringsSep " " (lib.mapAttrsToList (name: value: "-o ${name}=${lib.escapeShellArg (toStr value)}") cfg.settings);
|
||||
|
@ -23,8 +24,6 @@ in
|
|||
services.nzbget = {
|
||||
enable = lib.mkEnableOption "NZBGet, for downloading files from news servers";
|
||||
|
||||
package = lib.mkPackageOption pkgs "nzbget" { };
|
||||
|
||||
user = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "nzbget";
|
||||
|
@ -65,8 +64,8 @@ in
|
|||
InfoTarget = "screen";
|
||||
DetailTarget = "screen";
|
||||
# required paths
|
||||
ConfigTemplate = "${cfg.package}/share/nzbget/nzbget.conf";
|
||||
WebDir = "${cfg.package}/share/nzbget/webui";
|
||||
ConfigTemplate = "${pkg}/share/nzbget/nzbget.conf";
|
||||
WebDir = "${pkg}/share/nzbget/webui";
|
||||
# nixos handles package updates
|
||||
UpdateCheck = "none";
|
||||
};
|
||||
|
@ -82,7 +81,7 @@ in
|
|||
|
||||
preStart = ''
|
||||
if [ ! -f ${configFile} ]; then
|
||||
${pkgs.coreutils}/bin/install -m 0700 ${cfg.package}/share/nzbget/nzbget.conf ${configFile}
|
||||
${pkgs.coreutils}/bin/install -m 0700 ${pkg}/share/nzbget/nzbget.conf ${configFile}
|
||||
fi
|
||||
'';
|
||||
|
||||
|
@ -93,8 +92,8 @@ in
|
|||
Group = cfg.group;
|
||||
UMask = "0002";
|
||||
Restart = "on-failure";
|
||||
ExecStart = "${cfg.package}/bin/nzbget --server --configfile ${stateDir}/nzbget.conf ${configOpts}";
|
||||
ExecStop = "${cfg.package}/bin/nzbget --quit";
|
||||
ExecStart = "${pkg}/bin/nzbget --server --configfile ${stateDir}/nzbget.conf ${configOpts}";
|
||||
ExecStop = "${pkg}/bin/nzbget --quit";
|
||||
};
|
||||
};
|
||||
|
||||
|
|
|
@ -12,8 +12,6 @@ in {
|
|||
on how to set up a reverse proxy
|
||||
'';
|
||||
|
||||
package = lib.mkPackageOption pkgs "ombi" { };
|
||||
|
||||
dataDir = lib.mkOption {
|
||||
type = lib.types.str;
|
||||
default = "/var/lib/ombi";
|
||||
|
@ -60,7 +58,7 @@ in {
|
|||
Type = "simple";
|
||||
User = cfg.user;
|
||||
Group = cfg.group;
|
||||
ExecStart = "${lib.getExe cfg.package} --storage '${cfg.dataDir}' --host 'http://*:${toString cfg.port}'";
|
||||
ExecStart = "${pkgs.ombi}/bin/Ombi --storage '${cfg.dataDir}' --host 'http://*:${toString cfg.port}'";
|
||||
Restart = "on-failure";
|
||||
};
|
||||
};
|
||||
|
|
|
@ -1,31 +1,37 @@
|
|||
{ config, lib, pkgs, ... }:
|
||||
|
||||
with lib;
|
||||
|
||||
let
|
||||
|
||||
cfg = config.services.teamviewer;
|
||||
|
||||
in
|
||||
|
||||
{
|
||||
|
||||
###### interface
|
||||
|
||||
options = {
|
||||
services.teamviewer = {
|
||||
enable = lib.mkEnableOption "TeamViewer daemon & system package";
|
||||
package = lib.mkPackageOption pkgs "teamviewer" { };
|
||||
};
|
||||
|
||||
services.teamviewer.enable = mkEnableOption "TeamViewer daemon";
|
||||
|
||||
};
|
||||
|
||||
config = lib.mkIf (cfg.enable) {
|
||||
environment.systemPackages = [ cfg.package ];
|
||||
###### implementation
|
||||
|
||||
services.dbus.packages = [ cfg.package ];
|
||||
config = mkIf (cfg.enable) {
|
||||
|
||||
environment.systemPackages = [ pkgs.teamviewer ];
|
||||
|
||||
services.dbus.packages = [ pkgs.teamviewer ];
|
||||
|
||||
systemd.services.teamviewerd = {
|
||||
description = "TeamViewer remote control daemon";
|
||||
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
wants = [ "network-online.target" ];
|
||||
after = [
|
||||
"network-online.target"
|
||||
"network.target"
|
||||
"dbus.service"
|
||||
];
|
||||
after = [ "network-online.target" "network.target" "dbus.service" ];
|
||||
requires = [ "dbus.service" ];
|
||||
preStart = "mkdir -pv /var/lib/teamviewer /var/log/teamviewer";
|
||||
|
||||
|
@ -33,11 +39,12 @@ in
|
|||
startLimitBurst = 10;
|
||||
serviceConfig = {
|
||||
Type = "simple";
|
||||
ExecStart = "${cfg.package}/bin/teamviewerd -f";
|
||||
ExecStart = "${pkgs.teamviewer}/bin/teamviewerd -f";
|
||||
PIDFile = "/run/teamviewerd.pid";
|
||||
ExecReload = "${pkgs.coreutils}/bin/kill -HUP $MAINPID";
|
||||
Restart = "on-abort";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
@ -526,7 +526,6 @@ in
|
|||
ExecStartPre = "-rm /var/cache/frigate/*.mp4";
|
||||
ExecStart = "${cfg.package.python.interpreter} -m frigate";
|
||||
Restart = "on-failure";
|
||||
SyslogIdentifier = "frigate";
|
||||
|
||||
User = "frigate";
|
||||
Group = "frigate";
|
||||
|
|
|
@ -1,173 +0,0 @@
|
|||
{
|
||||
config,
|
||||
lib,
|
||||
pkgs,
|
||||
...
|
||||
}:
|
||||
let
|
||||
inherit (lib.types) package str;
|
||||
inherit (lib)
|
||||
mkIf
|
||||
mkOption
|
||||
mkEnableOption
|
||||
mkPackageOption
|
||||
;
|
||||
cfg = config.services.dashy;
|
||||
in
|
||||
{
|
||||
options.services.dashy = {
|
||||
enable = mkEnableOption ''
|
||||
Dashy, a highly customizable, easy to use, privacy-respecting dashboard app.
|
||||
|
||||
Note that this builds a static web app as opposed to running a full node server, unlike the default docker image.
|
||||
|
||||
Writing config changes to disk through the UI, triggering a rebuild through the UI and application status checks are
|
||||
unavailable without the node server; Everything else will work fine.
|
||||
|
||||
See the deployment docs for [building from source](https://dashy.to/docs/deployment#build-from-source), [hosting with a CDN](https://dashy.to/docs/deployment#hosting-with-cdn) and [CDN cloud deploy](https://dashy.to/docs/deployment#cdn--cloud-deploy) for more information.
|
||||
'';
|
||||
|
||||
virtualHost = {
|
||||
enableNginx = mkEnableOption "a virtualhost to serve dashy through nginx";
|
||||
|
||||
domain = mkOption {
|
||||
description = ''
|
||||
Domain to use for the virtual host.
|
||||
|
||||
This can be used to change nginx options like
|
||||
```nix
|
||||
services.nginx.virtualHosts."$\{config.services.dashy.virtualHost.domain}".listen = [ ... ]
|
||||
```
|
||||
or
|
||||
```nix
|
||||
services.nginx.virtualHosts."example.com".listen = [ ... ]
|
||||
```
|
||||
'';
|
||||
type = str;
|
||||
};
|
||||
};
|
||||
|
||||
package = mkPackageOption pkgs "dashy-ui" { };
|
||||
|
||||
finalDrv = mkOption {
|
||||
readOnly = true;
|
||||
default =
|
||||
if cfg.settings != { } then cfg.package.override { inherit (cfg) settings; } else cfg.package;
|
||||
defaultText = ''
|
||||
if cfg.settings != {}
|
||||
then cfg.package.override {inherit (cfg) settings;}
|
||||
else cfg.package;
|
||||
'';
|
||||
type = package;
|
||||
description = ''
|
||||
Final derivation containing the fully built static files
|
||||
'';
|
||||
};
|
||||
|
||||
settings = mkOption {
|
||||
default = { };
|
||||
description = ''
|
||||
Settings serialized into `user-data/conf.yml` before build.
|
||||
If left empty, the default configuration shipped with the package will be used instead.
|
||||
|
||||
Note that the full configuration will be written to the nix store as world readable, which may include secrets such as [password hashes](https://dashy.to/docs/configuring#appconfigauthusers-optional).
|
||||
|
||||
To add files such as icons or backgrounds, you can reference them in line such as
|
||||
```nix
|
||||
icon = "$\{./icon.png}";
|
||||
```
|
||||
This will add the file to the nix store upon build, referencing it by file path as expected by Dashy.
|
||||
'';
|
||||
example = ''
|
||||
{
|
||||
appConfig = {
|
||||
cssThemes = [
|
||||
"example-theme-1"
|
||||
"example-theme-2"
|
||||
];
|
||||
enableFontAwesome = true;
|
||||
fontAwesomeKey = "e9076c7025";
|
||||
theme = "thebe";
|
||||
};
|
||||
pageInfo = {
|
||||
description = "My Awesome Dashboard";
|
||||
navLinks = [
|
||||
{
|
||||
path = "/";
|
||||
title = "Home";
|
||||
}
|
||||
{
|
||||
path = "https://example.com";
|
||||
title = "Example 1";
|
||||
}
|
||||
{
|
||||
path = "https://example.com";
|
||||
title = "Example 2";
|
||||
}
|
||||
];
|
||||
title = "Dashy";
|
||||
};
|
||||
sections = [
|
||||
{
|
||||
displayData = {
|
||||
collapsed = true;
|
||||
cols = 2;
|
||||
customStyles = "border: 2px dashed red;";
|
||||
itemSize = "large";
|
||||
};
|
||||
items = [
|
||||
{
|
||||
backgroundColor = "#0079ff";
|
||||
color = "#00ffc9";
|
||||
description = "Source code and documentation on GitHub";
|
||||
icon = "fab fa-github";
|
||||
target = "sametab";
|
||||
title = "Source";
|
||||
url = "https://github.com/Lissy93/dashy";
|
||||
}
|
||||
{
|
||||
description = "View currently open issues, or raise a new one";
|
||||
icon = "fas fa-bug";
|
||||
title = "Issues";
|
||||
url = "https://github.com/Lissy93/dashy/issues";
|
||||
}
|
||||
{
|
||||
description = "Live Demo #1";
|
||||
icon = "fas fa-rocket";
|
||||
target = "iframe";
|
||||
title = "Demo 1";
|
||||
url = "https://dashy-demo-1.as93.net";
|
||||
}
|
||||
{
|
||||
description = "Live Demo #2";
|
||||
icon = "favicon";
|
||||
target = "newtab";
|
||||
title = "Demo 2";
|
||||
url = "https://dashy-demo-2.as93.net";
|
||||
}
|
||||
];
|
||||
name = "Getting Started";
|
||||
}
|
||||
];
|
||||
}
|
||||
'';
|
||||
inherit (pkgs.formats.json { }) type;
|
||||
};
|
||||
};
|
||||
|
||||
config = mkIf cfg.enable {
|
||||
services.nginx = mkIf cfg.virtualHost.enableNginx {
|
||||
enable = true;
|
||||
virtualHosts."${cfg.virtualHost.domain}" = {
|
||||
locations."/" = {
|
||||
root = cfg.finalDrv;
|
||||
tryFiles = "$uri /index.html ";
|
||||
};
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
meta.maintainers = [
|
||||
lib.maintainers.therealgramdalf
|
||||
];
|
||||
}
|
|
@ -116,7 +116,7 @@ in
|
|||
description = ''
|
||||
Configuration for Immich.
|
||||
See <https://immich.app/docs/install/config-file/> or navigate to
|
||||
<https://my.immich.app/admin/system-settings> for
|
||||
<https://your-immich-domain/admin/system-settings> for
|
||||
options and defaults.
|
||||
Setting it to `null` allows configuring Immich in the web interface.
|
||||
'';
|
||||
|
@ -270,7 +270,7 @@ in
|
|||
let
|
||||
postgresEnv =
|
||||
if isPostgresUnixSocket then
|
||||
{ DB_URL = "postgresql:///${cfg.database.name}?host=${cfg.database.host}"; }
|
||||
{ DB_URL = "socket://${cfg.database.host}?dbname=${cfg.database.name}"; }
|
||||
else
|
||||
{
|
||||
DB_HOSTNAME = cfg.database.host;
|
||||
|
@ -317,11 +317,6 @@ in
|
|||
after = [ "network.target" ];
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
inherit (cfg) environment;
|
||||
path = [
|
||||
# gzip and pg_dumpall are used by the backup service
|
||||
pkgs.gzip
|
||||
config.services.postgresql.package
|
||||
];
|
||||
|
||||
serviceConfig = commonServiceConfig // {
|
||||
ExecStart = lib.getExe cfg.package;
|
||||
|
|
|
@ -48,7 +48,7 @@ in
|
|||
] (
|
||||
opt: options.services.nextcloud.config.${opt} // {
|
||||
default = config.services.nextcloud.config.${opt};
|
||||
defaultText = lib.literalExpression "config.services.nextcloud.config.${opt}";
|
||||
defaultText = "config.services.nextcloud.config.${opt}";
|
||||
}
|
||||
)
|
||||
);
|
||||
|
|
13
third_party/nixpkgs/nixos/tests/all-tests.nix
vendored
13
third_party/nixpkgs/nixos/tests/all-tests.nix
vendored
|
@ -775,10 +775,13 @@ in {
|
|||
peering-manager = handleTest ./web-apps/peering-manager.nix {};
|
||||
peertube = handleTestOn ["x86_64-linux"] ./web-apps/peertube.nix {};
|
||||
peroxide = handleTest ./peroxide.nix {};
|
||||
pg_anonymizer = handleTest ./pg_anonymizer.nix {};
|
||||
pgadmin4 = handleTest ./pgadmin4.nix {};
|
||||
pgbouncer = handleTest ./pgbouncer.nix {};
|
||||
pghero = runTest ./pghero.nix;
|
||||
pgjwt = handleTest ./pgjwt.nix {};
|
||||
pgmanage = handleTest ./pgmanage.nix {};
|
||||
pgvecto-rs = handleTest ./pgvecto-rs.nix {};
|
||||
phosh = handleTest ./phosh.nix {};
|
||||
photonvision = handleTest ./photonvision.nix {};
|
||||
photoprism = handleTest ./photoprism.nix {};
|
||||
|
@ -811,7 +814,13 @@ in {
|
|||
postfix = handleTest ./postfix.nix {};
|
||||
postfix-raise-smtpd-tls-security-level = handleTest ./postfix-raise-smtpd-tls-security-level.nix {};
|
||||
postfixadmin = handleTest ./postfixadmin.nix {};
|
||||
postgresql = handleTest ./postgresql {};
|
||||
postgis = handleTest ./postgis.nix {};
|
||||
apache_datasketches = handleTest ./apache_datasketches.nix {};
|
||||
postgresql = handleTest ./postgresql.nix {};
|
||||
postgresql-jit = handleTest ./postgresql-jit.nix {};
|
||||
postgresql-wal-receiver = handleTest ./postgresql-wal-receiver.nix {};
|
||||
postgresql-tls-client-cert = handleTest ./postgresql-tls-client-cert.nix {};
|
||||
postgresql-wal2json = handleTest ./postgresql-wal2json.nix {};
|
||||
powerdns = handleTest ./powerdns.nix {};
|
||||
powerdns-admin = handleTest ./powerdns-admin.nix {};
|
||||
power-profiles-daemon = handleTest ./power-profiles-daemon.nix {};
|
||||
|
@ -1038,6 +1047,7 @@ in {
|
|||
tiddlywiki = handleTest ./tiddlywiki.nix {};
|
||||
tigervnc = handleTest ./tigervnc.nix {};
|
||||
tika = runTest ./tika.nix;
|
||||
timescaledb = handleTest ./timescaledb.nix {};
|
||||
timezone = handleTest ./timezone.nix {};
|
||||
timidity = handleTestOn ["aarch64-linux" "x86_64-linux"] ./timidity {};
|
||||
tinc = handleTest ./tinc {};
|
||||
|
@ -1057,6 +1067,7 @@ in {
|
|||
trezord = handleTest ./trezord.nix {};
|
||||
trickster = handleTest ./trickster.nix {};
|
||||
trilium-server = handleTestOn ["x86_64-linux"] ./trilium-server.nix {};
|
||||
tsja = handleTest ./tsja.nix {};
|
||||
tsm-client-gui = handleTest ./tsm-client-gui.nix {};
|
||||
ttyd = handleTest ./web-servers/ttyd.nix {};
|
||||
txredisapi = handleTest ./txredisapi.nix {};
|
||||
|
|
29
third_party/nixpkgs/nixos/tests/apache_datasketches.nix
vendored
Normal file
29
third_party/nixpkgs/nixos/tests/apache_datasketches.nix
vendored
Normal file
|
@ -0,0 +1,29 @@
|
|||
import ./make-test-python.nix ({ pkgs, ...} : {
|
||||
name = "postgis";
|
||||
meta = with pkgs.lib.maintainers; {
|
||||
maintainers = [ lsix ]; # TODO: Who's the maintener now?
|
||||
};
|
||||
|
||||
nodes = {
|
||||
master =
|
||||
{ pkgs, ... }:
|
||||
|
||||
{
|
||||
services.postgresql = let mypg = pkgs.postgresql_15; in {
|
||||
enable = true;
|
||||
package = mypg;
|
||||
extraPlugins = with mypg.pkgs; [
|
||||
apache_datasketches
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
start_all()
|
||||
master.wait_for_unit("postgresql")
|
||||
master.sleep(10) # Hopefully this is long enough!!
|
||||
master.succeed("sudo -u postgres psql -c 'CREATE EXTENSION datasketches;'")
|
||||
master.succeed("sudo -u postgres psql -c 'SELECT hll_sketch_to_string(hll_sketch_build(1));'")
|
||||
'';
|
||||
})
|
162
third_party/nixpkgs/nixos/tests/frr.nix
vendored
162
third_party/nixpkgs/nixos/tests/frr.nix
vendored
|
@ -5,11 +5,10 @@
|
|||
#
|
||||
# All interfaces are in OSPF Area 0.
|
||||
|
||||
import ./make-test-python.nix (
|
||||
{ pkgs, ... }:
|
||||
import ./make-test-python.nix ({ pkgs, ... }:
|
||||
let
|
||||
|
||||
ifAddr = node: iface: (pkgs.lib.head node.networking.interfaces.${iface}.ipv4.addresses).address;
|
||||
ifAddr = node: iface: (pkgs.lib.head node.config.networking.interfaces.${iface}.ipv4.addresses).address;
|
||||
|
||||
ospfConf1 = ''
|
||||
router ospf
|
||||
|
@ -26,94 +25,87 @@ import ./make-test-python.nix (
|
|||
'';
|
||||
|
||||
in
|
||||
{
|
||||
name = "frr";
|
||||
{
|
||||
name = "frr";
|
||||
|
||||
meta = with pkgs.lib.maintainers; {
|
||||
maintainers = [ ];
|
||||
};
|
||||
meta = with pkgs.lib.maintainers; {
|
||||
maintainers = [ ];
|
||||
};
|
||||
|
||||
nodes = {
|
||||
nodes = {
|
||||
|
||||
client =
|
||||
client =
|
||||
{ nodes, ... }:
|
||||
{
|
||||
virtualisation.vlans = [ 1 ];
|
||||
services.frr = {
|
||||
config = ''
|
||||
ip route 192.168.0.0/16 ${ifAddr nodes.router1 "eth1"}
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
router1 =
|
||||
{ ... }:
|
||||
{
|
||||
virtualisation.vlans = [ 1 2 ];
|
||||
boot.kernel.sysctl."net.ipv4.ip_forward" = "1";
|
||||
networking.firewall.extraCommands = "iptables -A nixos-fw -i eth2 -p ospfigp -j ACCEPT";
|
||||
services.frr = {
|
||||
ospfd.enable = true;
|
||||
config = ospfConf1;
|
||||
};
|
||||
|
||||
specialisation.ospf.configuration = {
|
||||
services.frr.config = ospfConf2;
|
||||
};
|
||||
};
|
||||
|
||||
router2 =
|
||||
{ ... }:
|
||||
{
|
||||
virtualisation.vlans = [ 3 2 ];
|
||||
boot.kernel.sysctl."net.ipv4.ip_forward" = "1";
|
||||
networking.firewall.extraCommands = "iptables -A nixos-fw -i eth2 -p ospfigp -j ACCEPT";
|
||||
services.frr = {
|
||||
ospfd.enable = true;
|
||||
config = ospfConf2;
|
||||
};
|
||||
};
|
||||
|
||||
server =
|
||||
{ nodes, ... }:
|
||||
{
|
||||
virtualisation.vlans = [ 3 ];
|
||||
services.frr = {
|
||||
config = ''
|
||||
ip route 192.168.0.0/16 ${ifAddr nodes.router2 "eth1"}
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript =
|
||||
{ nodes, ... }:
|
||||
{
|
||||
virtualisation.vlans = [ 1 ];
|
||||
services.frr = {
|
||||
config = ''
|
||||
ip route 192.168.0.0/16 ${ifAddr nodes.router1 "eth1"}
|
||||
'';
|
||||
};
|
||||
};
|
||||
''
|
||||
start_all()
|
||||
|
||||
router1 =
|
||||
{ ... }:
|
||||
{
|
||||
virtualisation.vlans = [
|
||||
1
|
||||
2
|
||||
];
|
||||
boot.kernel.sysctl."net.ipv4.ip_forward" = "1";
|
||||
networking.firewall.extraCommands = "iptables -A nixos-fw -i eth2 -p ospfigp -j ACCEPT";
|
||||
services.frr = {
|
||||
ospfd.enable = true;
|
||||
config = ospfConf1;
|
||||
};
|
||||
# Wait for the networking to start on all machines
|
||||
for machine in client, router1, router2, server:
|
||||
machine.wait_for_unit("network.target")
|
||||
|
||||
specialisation.ospf.configuration = {
|
||||
services.frr.config = ospfConf2;
|
||||
};
|
||||
};
|
||||
with subtest("Wait for FRR"):
|
||||
for gw in client, router1, router2, server:
|
||||
gw.wait_for_unit("frr")
|
||||
|
||||
router2 =
|
||||
{ ... }:
|
||||
{
|
||||
virtualisation.vlans = [
|
||||
3
|
||||
2
|
||||
];
|
||||
boot.kernel.sysctl."net.ipv4.ip_forward" = "1";
|
||||
networking.firewall.extraCommands = "iptables -A nixos-fw -i eth2 -p ospfigp -j ACCEPT";
|
||||
services.frr = {
|
||||
ospfd.enable = true;
|
||||
config = ospfConf2;
|
||||
};
|
||||
};
|
||||
router1.succeed("${nodes.router1.config.system.build.toplevel}/specialisation/ospf/bin/switch-to-configuration test >&2")
|
||||
|
||||
server =
|
||||
{ nodes, ... }:
|
||||
{
|
||||
virtualisation.vlans = [ 3 ];
|
||||
services.frr = {
|
||||
config = ''
|
||||
ip route 192.168.0.0/16 ${ifAddr nodes.router2 "eth1"}
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
with subtest("Wait for OSPF to form adjacencies"):
|
||||
for gw in router1, router2:
|
||||
gw.wait_until_succeeds("vtysh -c 'show ip ospf neighbor' | grep Full")
|
||||
gw.wait_until_succeeds("vtysh -c 'show ip route' | grep '^O>'")
|
||||
|
||||
testScript =
|
||||
{ nodes, ... }:
|
||||
''
|
||||
start_all()
|
||||
|
||||
# Wait for the networking to start on all machines
|
||||
for machine in client, router1, router2, server:
|
||||
machine.wait_for_unit("network.target")
|
||||
|
||||
with subtest("Wait for FRR"):
|
||||
for gw in client, router1, router2, server:
|
||||
gw.wait_for_unit("frr")
|
||||
|
||||
router1.succeed("${nodes.router1.system.build.toplevel}/specialisation/ospf/bin/switch-to-configuration test >&2")
|
||||
|
||||
with subtest("Wait for OSPF to form adjacencies"):
|
||||
for gw in router1, router2:
|
||||
gw.wait_until_succeeds("vtysh -c 'show ip ospf neighbor' | grep Full")
|
||||
gw.wait_until_succeeds("vtysh -c 'show ip route' | grep '^O>'")
|
||||
|
||||
with subtest("Test ICMP"):
|
||||
client.wait_until_succeeds("ping -4 -c 3 server >&2")
|
||||
'';
|
||||
}
|
||||
)
|
||||
with subtest("Test ICMP"):
|
||||
client.wait_until_succeeds("ping -4 -c 3 server >&2")
|
||||
'';
|
||||
})
|
||||
|
|
|
@ -16,7 +16,6 @@ import ./make-test-python.nix (
|
|||
machine.wait_for_open_port(53317)
|
||||
machine.wait_for_window("LocalSend", 10)
|
||||
machine.succeed("netstat --listening --program --tcp | grep -P 'tcp.*53317.*localsend'")
|
||||
machine.succeed("netstat --listening --program --udp | grep -P 'udp.*53317.*localsend'")
|
||||
'';
|
||||
}
|
||||
)
|
||||
|
|
94
third_party/nixpkgs/nixos/tests/pg_anonymizer.nix
vendored
Normal file
94
third_party/nixpkgs/nixos/tests/pg_anonymizer.nix
vendored
Normal file
|
@ -0,0 +1,94 @@
|
|||
import ./make-test-python.nix ({ pkgs, lib, ... }: {
|
||||
name = "pg_anonymizer";
|
||||
meta.maintainers = lib.teams.flyingcircus.members;
|
||||
|
||||
nodes.machine = { pkgs, ... }: {
|
||||
environment.systemPackages = [ pkgs.pg-dump-anon ];
|
||||
services.postgresql = {
|
||||
enable = true;
|
||||
extraPlugins = ps: [ ps.anonymizer ];
|
||||
settings.shared_preload_libraries = [ "anon" ];
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
start_all()
|
||||
machine.wait_for_unit("multi-user.target")
|
||||
machine.wait_for_unit("postgresql.service")
|
||||
|
||||
with subtest("Setup"):
|
||||
machine.succeed("sudo -u postgres psql --command 'create database demo'")
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -d demo -f ${pkgs.writeText "init.sql" ''
|
||||
create extension anon cascade;
|
||||
select anon.init();
|
||||
create table player(id serial, name text, points int);
|
||||
insert into player(id,name,points) values (1,'Foo', 23);
|
||||
insert into player(id,name,points) values (2,'Bar',42);
|
||||
security label for anon on column player.name is 'MASKED WITH FUNCTION anon.fake_last_name();';
|
||||
security label for anon on column player.points is 'MASKED WITH VALUE NULL';
|
||||
''}"
|
||||
)
|
||||
|
||||
def get_player_table_contents():
|
||||
return [
|
||||
x.split(',') for x in machine.succeed("sudo -u postgres psql -d demo --csv --command 'select * from player'").splitlines()[1:]
|
||||
]
|
||||
|
||||
def check_anonymized_row(row, id, original_name):
|
||||
assert row[0] == id, f"Expected first row to have ID {id}, but got {row[0]}"
|
||||
assert row[1] != original_name, f"Expected first row to have a name other than {original_name}"
|
||||
assert not bool(row[2]), "Expected points to be NULL in first row"
|
||||
|
||||
def find_xsv_in_dump(dump, sep=','):
|
||||
"""
|
||||
Expecting to find a CSV (for pg_dump_anon) or TSV (for pg_dump) structure, looking like
|
||||
|
||||
COPY public.player ...
|
||||
1,Shields,
|
||||
2,Salazar,
|
||||
\.
|
||||
|
||||
in the given dump (the commas are tabs in case of pg_dump).
|
||||
Extract the CSV lines and split by `sep`.
|
||||
"""
|
||||
|
||||
try:
|
||||
from itertools import dropwhile, takewhile
|
||||
return [x.split(sep) for x in list(takewhile(
|
||||
lambda x: x != "\\.",
|
||||
dropwhile(
|
||||
lambda x: not x.startswith("COPY public.player"),
|
||||
dump.splitlines()
|
||||
)
|
||||
))[1:]]
|
||||
except:
|
||||
print(f"Dump to process: {dump}")
|
||||
raise
|
||||
|
||||
def check_original_data(output):
|
||||
assert output[0] == ['1','Foo','23'], f"Expected first row from player table to be 1,Foo,23; got {output[0]}"
|
||||
assert output[1] == ['2','Bar','42'], f"Expected first row from player table to be 2,Bar,42; got {output[1]}"
|
||||
|
||||
def check_anonymized_rows(output):
|
||||
check_anonymized_row(output[0], '1', 'Foo')
|
||||
check_anonymized_row(output[1], '2', 'Bar')
|
||||
|
||||
with subtest("Check initial state"):
|
||||
check_original_data(get_player_table_contents())
|
||||
|
||||
with subtest("Anonymous dumps"):
|
||||
check_original_data(find_xsv_in_dump(
|
||||
machine.succeed("sudo -u postgres pg_dump demo"),
|
||||
sep='\t'
|
||||
))
|
||||
check_anonymized_rows(find_xsv_in_dump(
|
||||
machine.succeed("sudo -u postgres pg_dump_anon -U postgres -h /run/postgresql -d demo"),
|
||||
sep=','
|
||||
))
|
||||
|
||||
with subtest("Anonymize"):
|
||||
machine.succeed("sudo -u postgres psql -d demo --command 'select anon.anonymize_database();'")
|
||||
check_anonymized_rows(get_player_table_contents())
|
||||
'';
|
||||
})
|
35
third_party/nixpkgs/nixos/tests/pgjwt.nix
vendored
Normal file
35
third_party/nixpkgs/nixos/tests/pgjwt.nix
vendored
Normal file
|
@ -0,0 +1,35 @@
|
|||
import ./make-test-python.nix ({ pkgs, lib, ...}:
|
||||
|
||||
with pkgs; {
|
||||
name = "pgjwt";
|
||||
meta = with lib.maintainers; {
|
||||
maintainers = [ spinus willibutz ];
|
||||
};
|
||||
|
||||
nodes = {
|
||||
master = { ... }:
|
||||
{
|
||||
services.postgresql = {
|
||||
enable = true;
|
||||
extraPlugins = ps: with ps; [ pgjwt pgtap ];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript = { nodes, ... }:
|
||||
let
|
||||
sqlSU = "${nodes.master.services.postgresql.superUser}";
|
||||
pgProve = "${pkgs.perlPackages.TAPParserSourceHandlerpgTAP}";
|
||||
inherit (nodes.master.services.postgresql.package.pkgs) pgjwt;
|
||||
in
|
||||
''
|
||||
start_all()
|
||||
master.wait_for_unit("postgresql")
|
||||
master.succeed(
|
||||
"${pkgs.gnused}/bin/sed -e '12 i CREATE EXTENSION pgcrypto;\\nCREATE EXTENSION pgtap;\\nSET search_path TO tap,public;' ${pgjwt.src}/test.sql > /tmp/test.sql"
|
||||
)
|
||||
master.succeed(
|
||||
"${pkgs.sudo}/bin/sudo -u ${sqlSU} PGOPTIONS=--search_path=tap,public ${pgProve}/bin/pg_prove -d postgres -v -f /tmp/test.sql"
|
||||
)
|
||||
'';
|
||||
})
|
76
third_party/nixpkgs/nixos/tests/pgvecto-rs.nix
vendored
Normal file
76
third_party/nixpkgs/nixos/tests/pgvecto-rs.nix
vendored
Normal file
|
@ -0,0 +1,76 @@
|
|||
# mostly copied from ./timescaledb.nix which was copied from ./postgresql.nix
|
||||
# as it seemed unapproriate to test additional extensions for postgresql there.
|
||||
|
||||
{ system ? builtins.currentSystem
|
||||
, config ? { }
|
||||
, pkgs ? import ../.. { inherit system config; }
|
||||
}:
|
||||
|
||||
with import ../lib/testing-python.nix { inherit system pkgs; };
|
||||
with pkgs.lib;
|
||||
|
||||
let
|
||||
postgresql-versions = import ../../pkgs/servers/sql/postgresql pkgs;
|
||||
# Test cases from https://docs.pgvecto.rs/use-cases/hybrid-search.html
|
||||
test-sql = pkgs.writeText "postgresql-test" ''
|
||||
CREATE EXTENSION vectors;
|
||||
|
||||
CREATE TABLE items (
|
||||
id bigserial PRIMARY KEY,
|
||||
content text NOT NULL,
|
||||
embedding vectors.vector(3) NOT NULL -- 3 dimensions
|
||||
);
|
||||
|
||||
INSERT INTO items (content, embedding) VALUES
|
||||
('a fat cat sat on a mat and ate a fat rat', '[1, 2, 3]'),
|
||||
('a fat dog sat on a mat and ate a fat rat', '[4, 5, 6]'),
|
||||
('a thin cat sat on a mat and ate a thin rat', '[7, 8, 9]'),
|
||||
('a thin dog sat on a mat and ate a thin rat', '[10, 11, 12]');
|
||||
'';
|
||||
make-postgresql-test = postgresql-name: postgresql-package: makeTest {
|
||||
name = postgresql-name;
|
||||
meta = with pkgs.lib.maintainers; {
|
||||
maintainers = [ diogotcorreia ];
|
||||
};
|
||||
|
||||
nodes.machine = { ... }:
|
||||
{
|
||||
services.postgresql = {
|
||||
enable = true;
|
||||
package = postgresql-package;
|
||||
extraPlugins = ps: with ps; [
|
||||
pgvecto-rs
|
||||
];
|
||||
settings.shared_preload_libraries = "vectors";
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
def check_count(statement, lines):
|
||||
return 'test $(sudo -u postgres psql postgres -tAc "{}"|wc -l) -eq {}'.format(
|
||||
statement, lines
|
||||
)
|
||||
|
||||
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
with subtest("Postgresql with extension vectors is available just after unit start"):
|
||||
machine.succeed(check_count("SELECT * FROM pg_available_extensions WHERE name = 'vectors' AND default_version = '${postgresql-package.pkgs.pgvecto-rs.version}';", 1))
|
||||
|
||||
machine.succeed("sudo -u postgres psql -f ${test-sql}")
|
||||
|
||||
machine.succeed(check_count("SELECT content, embedding FROM items WHERE to_tsvector('english', content) @@ 'cat & rat'::tsquery;", 2))
|
||||
|
||||
machine.shutdown()
|
||||
'';
|
||||
|
||||
};
|
||||
applicablePostgresqlVersions = filterAttrs (_: value: versionAtLeast value.version "14") postgresql-versions;
|
||||
in
|
||||
mapAttrs'
|
||||
(name: package: {
|
||||
inherit name;
|
||||
value = make-postgresql-test name package;
|
||||
})
|
||||
applicablePostgresqlVersions
|
38
third_party/nixpkgs/nixos/tests/postgis.nix
vendored
Normal file
38
third_party/nixpkgs/nixos/tests/postgis.nix
vendored
Normal file
|
@ -0,0 +1,38 @@
|
|||
import ./make-test-python.nix ({ pkgs, ...} : {
|
||||
name = "postgis";
|
||||
meta = with pkgs.lib.maintainers; {
|
||||
maintainers = [ lsix ];
|
||||
};
|
||||
|
||||
nodes = {
|
||||
master =
|
||||
{ pkgs, ... }:
|
||||
|
||||
{
|
||||
services.postgresql = {
|
||||
enable = true;
|
||||
package = pkgs.postgresql;
|
||||
extraPlugins = ps: with ps; [
|
||||
postgis
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
start_all()
|
||||
master.wait_for_unit("postgresql")
|
||||
master.sleep(10) # Hopefully this is long enough!!
|
||||
master.succeed("sudo -u postgres psql -c 'CREATE EXTENSION postgis;'")
|
||||
master.succeed("sudo -u postgres psql -c 'CREATE EXTENSION postgis_raster;'")
|
||||
master.succeed("sudo -u postgres psql -c 'CREATE EXTENSION postgis_topology;'")
|
||||
master.succeed("sudo -u postgres psql -c 'select postgis_version();'")
|
||||
master.succeed("[ \"$(sudo -u postgres psql --no-psqlrc --tuples-only -c 'select postgis_version();')\" = \" ${
|
||||
pkgs.lib.versions.major pkgs.postgis.version
|
||||
}.${
|
||||
pkgs.lib.versions.minor pkgs.postgis.version
|
||||
} USE_GEOS=1 USE_PROJ=1 USE_STATS=1\" ]")
|
||||
# st_makepoint goes through c code
|
||||
master.succeed("sudo -u postgres psql --no-psqlrc --tuples-only -c 'select st_makepoint(1, 1)'")
|
||||
'';
|
||||
})
|
55
third_party/nixpkgs/nixos/tests/postgresql-jit.nix
vendored
Normal file
55
third_party/nixpkgs/nixos/tests/postgresql-jit.nix
vendored
Normal file
|
@ -0,0 +1,55 @@
|
|||
{ system ? builtins.currentSystem
|
||||
, config ? {}
|
||||
, pkgs ? import ../.. { inherit system config; }
|
||||
, package ? null
|
||||
}:
|
||||
|
||||
with import ../lib/testing-python.nix { inherit system pkgs; };
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
packages = builtins.attrNames (import ../../pkgs/servers/sql/postgresql pkgs);
|
||||
|
||||
mkJitTestFromName = name:
|
||||
mkJitTest pkgs.${name};
|
||||
|
||||
mkJitTest = package: makeTest {
|
||||
name = package.name;
|
||||
meta.maintainers = with lib.maintainers; [ ma27 ];
|
||||
nodes.machine = { pkgs, lib, ... }: {
|
||||
services.postgresql = {
|
||||
inherit package;
|
||||
enable = true;
|
||||
enableJIT = true;
|
||||
initialScript = pkgs.writeText "init.sql" ''
|
||||
create table demo (id int);
|
||||
insert into demo (id) select generate_series(1, 5);
|
||||
'';
|
||||
};
|
||||
};
|
||||
testScript = ''
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql.service")
|
||||
|
||||
with subtest("JIT is enabled"):
|
||||
machine.succeed("sudo -u postgres psql <<<'show jit;' | grep 'on'")
|
||||
|
||||
with subtest("Test JIT works fine"):
|
||||
output = machine.succeed(
|
||||
"cat ${pkgs.writeText "test.sql" ''
|
||||
set jit_above_cost = 1;
|
||||
EXPLAIN ANALYZE SELECT CONCAT('jit result = ', SUM(id)) FROM demo;
|
||||
SELECT CONCAT('jit result = ', SUM(id)) from demo;
|
||||
''} | sudo -u postgres psql"
|
||||
)
|
||||
assert "JIT:" in output
|
||||
assert "jit result = 15" in output
|
||||
|
||||
machine.shutdown()
|
||||
'';
|
||||
};
|
||||
in
|
||||
if package == null then
|
||||
lib.genAttrs packages mkJitTestFromName
|
||||
else
|
||||
mkJitTest package
|
141
third_party/nixpkgs/nixos/tests/postgresql-tls-client-cert.nix
vendored
Normal file
141
third_party/nixpkgs/nixos/tests/postgresql-tls-client-cert.nix
vendored
Normal file
|
@ -0,0 +1,141 @@
|
|||
{ system ? builtins.currentSystem
|
||||
, config ? { }
|
||||
, pkgs ? import ../.. { inherit system config; }
|
||||
, package ? null
|
||||
}:
|
||||
|
||||
with import ../lib/testing-python.nix { inherit system pkgs; };
|
||||
|
||||
let
|
||||
lib = pkgs.lib;
|
||||
|
||||
# Makes a test for a PostgreSQL package, given by name and looked up from `pkgs`.
|
||||
makeTestAttribute = name:
|
||||
{
|
||||
inherit name;
|
||||
value = makePostgresqlTlsClientCertTest pkgs."${name}";
|
||||
};
|
||||
|
||||
makePostgresqlTlsClientCertTest = pkg:
|
||||
let
|
||||
runWithOpenSSL = file: cmd: pkgs.runCommand file
|
||||
{
|
||||
buildInputs = [ pkgs.openssl ];
|
||||
}
|
||||
cmd;
|
||||
caKey = runWithOpenSSL "ca.key" "openssl ecparam -name prime256v1 -genkey -noout -out $out";
|
||||
caCert = runWithOpenSSL
|
||||
"ca.crt"
|
||||
''
|
||||
openssl req -new -x509 -sha256 -key ${caKey} -out $out -subj "/CN=test.example" -days 36500
|
||||
'';
|
||||
serverKey =
|
||||
runWithOpenSSL "server.key" "openssl ecparam -name prime256v1 -genkey -noout -out $out";
|
||||
serverKeyPath = "/var/lib/postgresql";
|
||||
serverCert =
|
||||
runWithOpenSSL "server.crt" ''
|
||||
openssl req -new -sha256 -key ${serverKey} -out server.csr -subj "/CN=db.test.example"
|
||||
openssl x509 -req -in server.csr -CA ${caCert} -CAkey ${caKey} \
|
||||
-CAcreateserial -out $out -days 36500 -sha256
|
||||
'';
|
||||
clientKey =
|
||||
runWithOpenSSL "client.key" "openssl ecparam -name prime256v1 -genkey -noout -out $out";
|
||||
clientCert =
|
||||
runWithOpenSSL "client.crt" ''
|
||||
openssl req -new -sha256 -key ${clientKey} -out client.csr -subj "/CN=test"
|
||||
openssl x509 -req -in client.csr -CA ${caCert} -CAkey ${caKey} \
|
||||
-CAcreateserial -out $out -days 36500 -sha256
|
||||
'';
|
||||
clientKeyPath = "/root";
|
||||
|
||||
in
|
||||
makeTest {
|
||||
name = "postgresql-tls-client-cert-${pkg.name}";
|
||||
meta.maintainers = with lib.maintainers; [ erictapen ];
|
||||
|
||||
nodes.server = { ... }: {
|
||||
system.activationScripts = {
|
||||
keyPlacement.text = ''
|
||||
mkdir -p '${serverKeyPath}'
|
||||
cp '${serverKey}' '${serverKeyPath}/server.key'
|
||||
chown postgres:postgres '${serverKeyPath}/server.key'
|
||||
chmod 600 '${serverKeyPath}/server.key'
|
||||
'';
|
||||
};
|
||||
services.postgresql = {
|
||||
package = pkg;
|
||||
enable = true;
|
||||
enableTCPIP = true;
|
||||
ensureUsers = [
|
||||
{
|
||||
name = "test";
|
||||
ensureDBOwnership = true;
|
||||
}
|
||||
];
|
||||
ensureDatabases = [ "test" ];
|
||||
settings = {
|
||||
ssl = "on";
|
||||
ssl_ca_file = toString caCert;
|
||||
ssl_cert_file = toString serverCert;
|
||||
ssl_key_file = "${serverKeyPath}/server.key";
|
||||
};
|
||||
authentication = ''
|
||||
hostssl test test ::/0 cert clientcert=verify-full
|
||||
'';
|
||||
};
|
||||
networking = {
|
||||
interfaces.eth1 = {
|
||||
ipv6.addresses = [
|
||||
{ address = "fc00::1"; prefixLength = 120; }
|
||||
];
|
||||
};
|
||||
firewall.allowedTCPPorts = [ 5432 ];
|
||||
};
|
||||
};
|
||||
|
||||
nodes.client = { ... }: {
|
||||
system.activationScripts = {
|
||||
keyPlacement.text = ''
|
||||
mkdir -p '${clientKeyPath}'
|
||||
cp '${clientKey}' '${clientKeyPath}/client.key'
|
||||
chown root:root '${clientKeyPath}/client.key'
|
||||
chmod 600 '${clientKeyPath}/client.key'
|
||||
'';
|
||||
};
|
||||
environment = {
|
||||
variables = {
|
||||
PGHOST = "db.test.example";
|
||||
PGPORT = "5432";
|
||||
PGDATABASE = "test";
|
||||
PGUSER = "test";
|
||||
PGSSLMODE = "verify-full";
|
||||
PGSSLCERT = clientCert;
|
||||
PGSSLKEY = "${clientKeyPath}/client.key";
|
||||
PGSSLROOTCERT = caCert;
|
||||
};
|
||||
systemPackages = [ pkg ];
|
||||
};
|
||||
networking = {
|
||||
interfaces.eth1 = {
|
||||
ipv6.addresses = [
|
||||
{ address = "fc00::2"; prefixLength = 120; }
|
||||
];
|
||||
};
|
||||
hosts = { "fc00::1" = [ "db.test.example" ]; };
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
server.wait_for_unit("multi-user.target")
|
||||
client.wait_for_unit("multi-user.target")
|
||||
client.succeed("psql -c \"SELECT 1;\"")
|
||||
'';
|
||||
};
|
||||
|
||||
in
|
||||
if package == null then
|
||||
# all-tests.nix: Maps the generic function over all attributes of PostgreSQL packages
|
||||
builtins.listToAttrs (map makeTestAttribute (builtins.attrNames (import ../../pkgs/servers/sql/postgresql pkgs)))
|
||||
else
|
||||
# Called directly from <package>.tests
|
||||
makePostgresqlTlsClientCertTest package
|
124
third_party/nixpkgs/nixos/tests/postgresql-wal-receiver.nix
vendored
Normal file
124
third_party/nixpkgs/nixos/tests/postgresql-wal-receiver.nix
vendored
Normal file
|
@ -0,0 +1,124 @@
|
|||
{ system ? builtins.currentSystem,
|
||||
config ? {},
|
||||
pkgs ? import ../.. { inherit system config; },
|
||||
package ? null
|
||||
}:
|
||||
|
||||
with import ../lib/testing-python.nix { inherit system pkgs; };
|
||||
|
||||
let
|
||||
lib = pkgs.lib;
|
||||
|
||||
# Makes a test for a PostgreSQL package, given by name and looked up from `pkgs`.
|
||||
makeTestAttribute = name:
|
||||
{
|
||||
inherit name;
|
||||
value = makePostgresqlWalReceiverTest pkgs."${name}";
|
||||
};
|
||||
|
||||
makePostgresqlWalReceiverTest = pkg:
|
||||
let
|
||||
postgresqlDataDir = "/var/lib/postgresql/${pkg.psqlSchema}";
|
||||
replicationUser = "wal_receiver_user";
|
||||
replicationSlot = "wal_receiver_slot";
|
||||
replicationConn = "postgresql://${replicationUser}@localhost";
|
||||
baseBackupDir = "/var/cache/wals/pg_basebackup";
|
||||
walBackupDir = "/var/cache/wals/pg_wal";
|
||||
|
||||
recoveryFile = pkgs.writeTextDir "recovery.signal" "";
|
||||
|
||||
in makeTest {
|
||||
name = "postgresql-wal-receiver-${pkg.name}";
|
||||
meta.maintainers = with lib.maintainers; [ pacien ];
|
||||
|
||||
nodes.machine = { ... }: {
|
||||
systemd.tmpfiles.rules = [
|
||||
"d /var/cache/wals 0750 postgres postgres - -"
|
||||
];
|
||||
|
||||
services.postgresql = {
|
||||
package = pkg;
|
||||
enable = true;
|
||||
settings = {
|
||||
max_replication_slots = 10;
|
||||
max_wal_senders = 10;
|
||||
recovery_end_command = "touch recovery.done";
|
||||
restore_command = "cp ${walBackupDir}/%f %p";
|
||||
wal_level = "archive"; # alias for replica on pg >= 9.6
|
||||
};
|
||||
authentication = ''
|
||||
host replication ${replicationUser} all trust
|
||||
'';
|
||||
initialScript = pkgs.writeText "init.sql" ''
|
||||
create user ${replicationUser} replication;
|
||||
select * from pg_create_physical_replication_slot('${replicationSlot}');
|
||||
'';
|
||||
};
|
||||
|
||||
services.postgresqlWalReceiver.receivers.main = {
|
||||
postgresqlPackage = pkg;
|
||||
connection = replicationConn;
|
||||
slot = replicationSlot;
|
||||
directory = walBackupDir;
|
||||
};
|
||||
# This is only to speedup test, it isn't time racing. Service is set to autorestart always,
|
||||
# default 60sec is fine for real system, but is too much for a test
|
||||
systemd.services.postgresql-wal-receiver-main.serviceConfig.RestartSec = lib.mkForce 5;
|
||||
systemd.services.postgresql.serviceConfig.ReadWritePaths = [ "/var/cache/wals" ];
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
# make an initial base backup
|
||||
machine.wait_for_unit("postgresql")
|
||||
machine.wait_for_unit("postgresql-wal-receiver-main")
|
||||
# WAL receiver healthchecks PG every 5 seconds, so let's be sure they have connected each other
|
||||
# required only for 9.4
|
||||
machine.sleep(5)
|
||||
machine.succeed(
|
||||
"${pkg}/bin/pg_basebackup --dbname=${replicationConn} --pgdata=${baseBackupDir}"
|
||||
)
|
||||
|
||||
# create a dummy table with 100 records
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql --command='create table dummy as select * from generate_series(1, 100) as val;'"
|
||||
)
|
||||
|
||||
# stop postgres and destroy data
|
||||
machine.systemctl("stop postgresql")
|
||||
machine.systemctl("stop postgresql-wal-receiver-main")
|
||||
machine.succeed("rm -r ${postgresqlDataDir}/{base,global,pg_*}")
|
||||
|
||||
# restore the base backup
|
||||
machine.succeed(
|
||||
"cp -r ${baseBackupDir}/* ${postgresqlDataDir} && chown postgres:postgres -R ${postgresqlDataDir}"
|
||||
)
|
||||
|
||||
# prepare WAL and recovery
|
||||
machine.succeed("chmod a+rX -R ${walBackupDir}")
|
||||
machine.execute(
|
||||
"for part in ${walBackupDir}/*.partial; do mv $part ''${part%%.*}; done"
|
||||
) # make use of partial segments too
|
||||
machine.succeed(
|
||||
"cp ${recoveryFile}/* ${postgresqlDataDir}/ && chmod 666 ${postgresqlDataDir}/recovery*"
|
||||
)
|
||||
|
||||
# replay WAL
|
||||
machine.systemctl("start postgresql")
|
||||
machine.wait_for_file("${postgresqlDataDir}/recovery.done")
|
||||
machine.systemctl("restart postgresql")
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
# check that our records have been restored
|
||||
machine.succeed(
|
||||
"test $(sudo -u postgres psql --pset='pager=off' --tuples-only --command='select count(distinct val) from dummy;') -eq 100"
|
||||
)
|
||||
'';
|
||||
};
|
||||
|
||||
in
|
||||
if package == null then
|
||||
# all-tests.nix: Maps the generic function over all attributes of PostgreSQL packages
|
||||
builtins.listToAttrs (map makeTestAttribute (builtins.attrNames (import ../../pkgs/servers/sql/postgresql pkgs)))
|
||||
else
|
||||
# Called directly from <package>.tests
|
||||
makePostgresqlWalReceiverTest package
|
60
third_party/nixpkgs/nixos/tests/postgresql-wal2json.nix
vendored
Normal file
60
third_party/nixpkgs/nixos/tests/postgresql-wal2json.nix
vendored
Normal file
|
@ -0,0 +1,60 @@
|
|||
{
|
||||
system ? builtins.currentSystem,
|
||||
config ? { },
|
||||
pkgs ? import ../.. { inherit system config; },
|
||||
postgresql ? null,
|
||||
}:
|
||||
|
||||
let
|
||||
makeTest = import ./make-test-python.nix;
|
||||
# Makes a test for a PostgreSQL package, given by name and looked up from `pkgs`.
|
||||
makeTestAttribute = name: {
|
||||
inherit name;
|
||||
value = makePostgresqlWal2jsonTest pkgs."${name}";
|
||||
};
|
||||
|
||||
makePostgresqlWal2jsonTest =
|
||||
postgresqlPackage:
|
||||
makeTest {
|
||||
name = "postgresql-wal2json-${postgresqlPackage.name}";
|
||||
meta.maintainers = with pkgs.lib.maintainers; [ euank ];
|
||||
|
||||
nodes.machine = {
|
||||
services.postgresql = {
|
||||
package = postgresqlPackage;
|
||||
enable = true;
|
||||
extraPlugins = with postgresqlPackage.pkgs; [ wal2json ];
|
||||
settings = {
|
||||
wal_level = "logical";
|
||||
max_replication_slots = "10";
|
||||
max_wal_senders = "10";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
machine.wait_for_unit("postgresql")
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -qAt -f ${./postgresql/wal2json/example2.sql} postgres > /tmp/example2.out"
|
||||
)
|
||||
machine.succeed(
|
||||
"diff ${./postgresql/wal2json/example2.out} /tmp/example2.out"
|
||||
)
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -qAt -f ${./postgresql/wal2json/example3.sql} postgres > /tmp/example3.out"
|
||||
)
|
||||
machine.succeed(
|
||||
"diff ${./postgresql/wal2json/example3.out} /tmp/example3.out"
|
||||
)
|
||||
'';
|
||||
};
|
||||
|
||||
in
|
||||
# By default, create one test per postgresql version
|
||||
if postgresql == null then
|
||||
builtins.listToAttrs (
|
||||
map makeTestAttribute (builtins.attrNames (import ../../pkgs/servers/sql/postgresql pkgs))
|
||||
)
|
||||
# but if postgresql is set, we're being made as a passthru test for a specific postgres + wal2json version, just run one
|
||||
else
|
||||
makePostgresqlWal2jsonTest postgresql
|
226
third_party/nixpkgs/nixos/tests/postgresql.nix
vendored
Normal file
226
third_party/nixpkgs/nixos/tests/postgresql.nix
vendored
Normal file
|
@ -0,0 +1,226 @@
|
|||
{ system ? builtins.currentSystem,
|
||||
config ? {},
|
||||
pkgs ? import ../.. { inherit system config; }
|
||||
}:
|
||||
|
||||
with import ../lib/testing-python.nix { inherit system pkgs; };
|
||||
with pkgs.lib;
|
||||
|
||||
let
|
||||
postgresql-versions = import ../../pkgs/servers/sql/postgresql pkgs;
|
||||
test-sql = pkgs.writeText "postgresql-test" ''
|
||||
CREATE EXTENSION pgcrypto; -- just to check if lib loading works
|
||||
CREATE TABLE sth (
|
||||
id int
|
||||
);
|
||||
INSERT INTO sth (id) VALUES (1);
|
||||
INSERT INTO sth (id) VALUES (1);
|
||||
INSERT INTO sth (id) VALUES (1);
|
||||
INSERT INTO sth (id) VALUES (1);
|
||||
INSERT INTO sth (id) VALUES (1);
|
||||
CREATE TABLE xmltest ( doc xml );
|
||||
INSERT INTO xmltest (doc) VALUES ('<test>ok</test>'); -- check if libxml2 enabled
|
||||
'';
|
||||
make-postgresql-test = postgresql-name: postgresql-package: backup-all: makeTest {
|
||||
name = postgresql-name;
|
||||
meta = with pkgs.lib.maintainers; {
|
||||
maintainers = [ zagy ];
|
||||
};
|
||||
|
||||
nodes.machine = {...}:
|
||||
{
|
||||
services.postgresql = {
|
||||
enable = true;
|
||||
package = postgresql-package;
|
||||
};
|
||||
|
||||
services.postgresqlBackup = {
|
||||
enable = true;
|
||||
databases = optional (!backup-all) "postgres";
|
||||
};
|
||||
};
|
||||
|
||||
testScript = let
|
||||
backupName = if backup-all then "all" else "postgres";
|
||||
backupService = if backup-all then "postgresqlBackup" else "postgresqlBackup-postgres";
|
||||
backupFileBase = "/var/backup/postgresql/${backupName}";
|
||||
in ''
|
||||
def check_count(statement, lines):
|
||||
return 'test $(sudo -u postgres psql postgres -tAc "{}"|wc -l) -eq {}'.format(
|
||||
statement, lines
|
||||
)
|
||||
|
||||
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
with subtest("Postgresql is available just after unit start"):
|
||||
machine.succeed(
|
||||
"cat ${test-sql} | sudo -u postgres psql"
|
||||
)
|
||||
|
||||
with subtest("Postgresql survives restart (bug #1735)"):
|
||||
machine.shutdown()
|
||||
import time
|
||||
time.sleep(2)
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
machine.fail(check_count("SELECT * FROM sth;", 3))
|
||||
machine.succeed(check_count("SELECT * FROM sth;", 5))
|
||||
machine.fail(check_count("SELECT * FROM sth;", 4))
|
||||
machine.succeed(check_count("SELECT xpath('/test/text()', doc) FROM xmltest;", 1))
|
||||
|
||||
with subtest("Backup service works"):
|
||||
machine.succeed(
|
||||
"systemctl start ${backupService}.service",
|
||||
"zcat ${backupFileBase}.sql.gz | grep '<test>ok</test>'",
|
||||
"ls -hal /var/backup/postgresql/ >/dev/console",
|
||||
"stat -c '%a' ${backupFileBase}.sql.gz | grep 600",
|
||||
)
|
||||
with subtest("Backup service removes prev files"):
|
||||
machine.succeed(
|
||||
# Create dummy prev files.
|
||||
"touch ${backupFileBase}.prev.sql{,.gz,.zstd}",
|
||||
"chown postgres:postgres ${backupFileBase}.prev.sql{,.gz,.zstd}",
|
||||
|
||||
# Run backup.
|
||||
"systemctl start ${backupService}.service",
|
||||
"ls -hal /var/backup/postgresql/ >/dev/console",
|
||||
|
||||
# Since nothing has changed in the database, the cur and prev files
|
||||
# should match.
|
||||
"zcat ${backupFileBase}.sql.gz | grep '<test>ok</test>'",
|
||||
"cmp ${backupFileBase}.sql.gz ${backupFileBase}.prev.sql.gz",
|
||||
|
||||
# The prev files with unused suffix should be removed.
|
||||
"[ ! -f '${backupFileBase}.prev.sql' ]",
|
||||
"[ ! -f '${backupFileBase}.prev.sql.zstd' ]",
|
||||
|
||||
# Both cur and prev file should only be accessible by the postgres user.
|
||||
"stat -c '%a' ${backupFileBase}.sql.gz | grep 600",
|
||||
"stat -c '%a' '${backupFileBase}.prev.sql.gz' | grep 600",
|
||||
)
|
||||
with subtest("Backup service fails gracefully"):
|
||||
# Sabotage the backup process
|
||||
machine.succeed("rm /run/postgresql/.s.PGSQL.5432")
|
||||
machine.fail(
|
||||
"systemctl start ${backupService}.service",
|
||||
)
|
||||
machine.succeed(
|
||||
"ls -hal /var/backup/postgresql/ >/dev/console",
|
||||
"zcat ${backupFileBase}.prev.sql.gz | grep '<test>ok</test>'",
|
||||
"stat ${backupFileBase}.in-progress.sql.gz",
|
||||
)
|
||||
# In a previous version, the second run would overwrite prev.sql.gz,
|
||||
# so we test a second run as well.
|
||||
machine.fail(
|
||||
"systemctl start ${backupService}.service",
|
||||
)
|
||||
machine.succeed(
|
||||
"stat ${backupFileBase}.in-progress.sql.gz",
|
||||
"zcat ${backupFileBase}.prev.sql.gz | grep '<test>ok</test>'",
|
||||
)
|
||||
|
||||
|
||||
with subtest("Initdb works"):
|
||||
machine.succeed("sudo -u postgres initdb -D /tmp/testpostgres2")
|
||||
|
||||
machine.log(machine.execute("systemd-analyze security postgresql.service | grep -v ✓")[1])
|
||||
|
||||
machine.shutdown()
|
||||
'';
|
||||
|
||||
};
|
||||
|
||||
mk-ensure-clauses-test = postgresql-name: postgresql-package: makeTest {
|
||||
name = postgresql-name;
|
||||
meta = with pkgs.lib.maintainers; {
|
||||
maintainers = [ zagy ];
|
||||
};
|
||||
|
||||
nodes.machine = {...}:
|
||||
{
|
||||
services.postgresql = {
|
||||
enable = true;
|
||||
package = postgresql-package;
|
||||
ensureUsers = [
|
||||
{
|
||||
name = "all-clauses";
|
||||
ensureClauses = {
|
||||
superuser = true;
|
||||
createdb = true;
|
||||
createrole = true;
|
||||
"inherit" = true;
|
||||
login = true;
|
||||
replication = true;
|
||||
bypassrls = true;
|
||||
};
|
||||
}
|
||||
{
|
||||
name = "default-clauses";
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
testScript = let
|
||||
getClausesQuery = user: pkgs.lib.concatStringsSep " "
|
||||
[
|
||||
"SELECT row_to_json(row)"
|
||||
"FROM ("
|
||||
"SELECT"
|
||||
"rolsuper,"
|
||||
"rolinherit,"
|
||||
"rolcreaterole,"
|
||||
"rolcreatedb,"
|
||||
"rolcanlogin,"
|
||||
"rolreplication,"
|
||||
"rolbypassrls"
|
||||
"FROM pg_roles"
|
||||
"WHERE rolname = '${user}'"
|
||||
") row;"
|
||||
];
|
||||
in ''
|
||||
import json
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
with subtest("All user permissions are set according to the ensureClauses attr"):
|
||||
clauses = json.loads(
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -tc \"${getClausesQuery "all-clauses"}\""
|
||||
)
|
||||
)
|
||||
print(clauses)
|
||||
assert clauses['rolsuper'], 'expected user with clauses to have superuser clause'
|
||||
assert clauses['rolinherit'], 'expected user with clauses to have inherit clause'
|
||||
assert clauses['rolcreaterole'], 'expected user with clauses to have create role clause'
|
||||
assert clauses['rolcreatedb'], 'expected user with clauses to have create db clause'
|
||||
assert clauses['rolcanlogin'], 'expected user with clauses to have login clause'
|
||||
assert clauses['rolreplication'], 'expected user with clauses to have replication clause'
|
||||
assert clauses['rolbypassrls'], 'expected user with clauses to have bypassrls clause'
|
||||
|
||||
with subtest("All user permissions default when ensureClauses is not provided"):
|
||||
clauses = json.loads(
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -tc \"${getClausesQuery "default-clauses"}\""
|
||||
)
|
||||
)
|
||||
assert not clauses['rolsuper'], 'expected user with no clauses set to have default superuser clause'
|
||||
assert clauses['rolinherit'], 'expected user with no clauses set to have default inherit clause'
|
||||
assert not clauses['rolcreaterole'], 'expected user with no clauses set to have default create role clause'
|
||||
assert not clauses['rolcreatedb'], 'expected user with no clauses set to have default create db clause'
|
||||
assert clauses['rolcanlogin'], 'expected user with no clauses set to have default login clause'
|
||||
assert not clauses['rolreplication'], 'expected user with no clauses set to have default replication clause'
|
||||
assert not clauses['rolbypassrls'], 'expected user with no clauses set to have default bypassrls clause'
|
||||
|
||||
machine.shutdown()
|
||||
'';
|
||||
};
|
||||
in
|
||||
concatMapAttrs (name: package: {
|
||||
${name} = make-postgresql-test name package false;
|
||||
${name + "-backup-all"} = make-postgresql-test "${name + "-backup-all"}" package true;
|
||||
${name + "-clauses"} = mk-ensure-clauses-test name package;
|
||||
}) postgresql-versions
|
|
@ -1,116 +0,0 @@
|
|||
{
|
||||
pkgs,
|
||||
makeTest,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
|
||||
makeTestFor =
|
||||
package:
|
||||
makeTest {
|
||||
name = "postgresql_anonymizer-${package.name}";
|
||||
meta.maintainers = lib.teams.flyingcircus.members;
|
||||
|
||||
nodes.machine =
|
||||
{ pkgs, ... }:
|
||||
{
|
||||
environment.systemPackages = [ pkgs.pg-dump-anon ];
|
||||
services.postgresql = {
|
||||
inherit package;
|
||||
enable = true;
|
||||
extraPlugins = ps: [ ps.anonymizer ];
|
||||
settings.shared_preload_libraries = [ "anon" ];
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
start_all()
|
||||
machine.wait_for_unit("multi-user.target")
|
||||
machine.wait_for_unit("postgresql.service")
|
||||
|
||||
with subtest("Setup"):
|
||||
machine.succeed("sudo -u postgres psql --command 'create database demo'")
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -d demo -f ${pkgs.writeText "init.sql" ''
|
||||
create extension anon cascade;
|
||||
select anon.init();
|
||||
create table player(id serial, name text, points int);
|
||||
insert into player(id,name,points) values (1,'Foo', 23);
|
||||
insert into player(id,name,points) values (2,'Bar',42);
|
||||
security label for anon on column player.name is 'MASKED WITH FUNCTION anon.fake_last_name();';
|
||||
security label for anon on column player.points is 'MASKED WITH VALUE NULL';
|
||||
''}"
|
||||
)
|
||||
|
||||
def get_player_table_contents():
|
||||
return [
|
||||
x.split(',') for x in machine.succeed("sudo -u postgres psql -d demo --csv --command 'select * from player'").splitlines()[1:]
|
||||
]
|
||||
|
||||
def check_anonymized_row(row, id, original_name):
|
||||
assert row[0] == id, f"Expected first row to have ID {id}, but got {row[0]}"
|
||||
assert row[1] != original_name, f"Expected first row to have a name other than {original_name}"
|
||||
assert not bool(row[2]), "Expected points to be NULL in first row"
|
||||
|
||||
def find_xsv_in_dump(dump, sep=','):
|
||||
"""
|
||||
Expecting to find a CSV (for pg_dump_anon) or TSV (for pg_dump) structure, looking like
|
||||
|
||||
COPY public.player ...
|
||||
1,Shields,
|
||||
2,Salazar,
|
||||
\.
|
||||
|
||||
in the given dump (the commas are tabs in case of pg_dump).
|
||||
Extract the CSV lines and split by `sep`.
|
||||
"""
|
||||
|
||||
try:
|
||||
from itertools import dropwhile, takewhile
|
||||
return [x.split(sep) for x in list(takewhile(
|
||||
lambda x: x != "\\.",
|
||||
dropwhile(
|
||||
lambda x: not x.startswith("COPY public.player"),
|
||||
dump.splitlines()
|
||||
)
|
||||
))[1:]]
|
||||
except:
|
||||
print(f"Dump to process: {dump}")
|
||||
raise
|
||||
|
||||
def check_original_data(output):
|
||||
assert output[0] == ['1','Foo','23'], f"Expected first row from player table to be 1,Foo,23; got {output[0]}"
|
||||
assert output[1] == ['2','Bar','42'], f"Expected first row from player table to be 2,Bar,42; got {output[1]}"
|
||||
|
||||
def check_anonymized_rows(output):
|
||||
check_anonymized_row(output[0], '1', 'Foo')
|
||||
check_anonymized_row(output[1], '2', 'Bar')
|
||||
|
||||
with subtest("Check initial state"):
|
||||
check_original_data(get_player_table_contents())
|
||||
|
||||
with subtest("Anonymous dumps"):
|
||||
check_original_data(find_xsv_in_dump(
|
||||
machine.succeed("sudo -u postgres pg_dump demo"),
|
||||
sep='\t'
|
||||
))
|
||||
check_anonymized_rows(find_xsv_in_dump(
|
||||
machine.succeed("sudo -u postgres pg_dump_anon -U postgres -h /run/postgresql -d demo"),
|
||||
sep=','
|
||||
))
|
||||
|
||||
with subtest("Anonymize"):
|
||||
machine.succeed("sudo -u postgres psql -d demo --command 'select anon.anonymize_database();'")
|
||||
check_anonymized_rows(get_player_table_contents())
|
||||
'';
|
||||
};
|
||||
in
|
||||
lib.recurseIntoAttrs (
|
||||
lib.concatMapAttrs (n: p: { ${n} = makeTestFor p; }) (
|
||||
lib.filterAttrs (_: p: !p.pkgs.anonymizer.meta.broken) pkgs.postgresqlVersions
|
||||
)
|
||||
// {
|
||||
passthru.override = p: makeTestFor p;
|
||||
}
|
||||
)
|
|
@ -1,26 +0,0 @@
|
|||
{
|
||||
system ? builtins.currentSystem,
|
||||
config ? { },
|
||||
pkgs ? import ../../.. { inherit system config; },
|
||||
}:
|
||||
|
||||
with import ../../lib/testing-python.nix { inherit system pkgs; };
|
||||
|
||||
let
|
||||
importWithArgs = path: import path { inherit pkgs makeTest; };
|
||||
in
|
||||
{
|
||||
# postgresql
|
||||
postgresql = importWithArgs ./postgresql.nix;
|
||||
postgresql-jit = importWithArgs ./postgresql-jit.nix;
|
||||
postgresql-wal-receiver = importWithArgs ./postgresql-wal-receiver.nix;
|
||||
postgresql-tls-client-cert = importWithArgs ./postgresql-tls-client-cert.nix;
|
||||
|
||||
# extensions
|
||||
anonymizer = importWithArgs ./anonymizer.nix;
|
||||
pgjwt = importWithArgs ./pgjwt.nix;
|
||||
pgvecto-rs = importWithArgs ./pgvecto-rs.nix;
|
||||
timescaledb = importWithArgs ./timescaledb.nix;
|
||||
tsja = importWithArgs ./tsja.nix;
|
||||
wal2json = importWithArgs ./wal2json.nix;
|
||||
}
|
|
@ -1,57 +0,0 @@
|
|||
{
|
||||
pkgs,
|
||||
makeTest,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
|
||||
makeTestFor =
|
||||
package:
|
||||
makeTest {
|
||||
name = "pgjwt-${package.name}";
|
||||
meta = with lib.maintainers; {
|
||||
maintainers = [
|
||||
spinus
|
||||
willibutz
|
||||
];
|
||||
};
|
||||
|
||||
nodes.master =
|
||||
{ ... }:
|
||||
{
|
||||
services.postgresql = {
|
||||
inherit package;
|
||||
enable = true;
|
||||
extraPlugins =
|
||||
ps: with ps; [
|
||||
pgjwt
|
||||
pgtap
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
testScript =
|
||||
{ nodes, ... }:
|
||||
let
|
||||
sqlSU = "${nodes.master.services.postgresql.superUser}";
|
||||
pgProve = "${pkgs.perlPackages.TAPParserSourceHandlerpgTAP}";
|
||||
inherit (nodes.master.services.postgresql.package.pkgs) pgjwt;
|
||||
in
|
||||
''
|
||||
start_all()
|
||||
master.wait_for_unit("postgresql")
|
||||
master.succeed(
|
||||
"${pkgs.sudo}/bin/sudo -u ${sqlSU} ${pgProve}/bin/pg_prove -d postgres -v -f ${pgjwt.src}/test.sql"
|
||||
)
|
||||
'';
|
||||
};
|
||||
in
|
||||
lib.recurseIntoAttrs (
|
||||
lib.concatMapAttrs (n: p: { ${n} = makeTestFor p; }) (
|
||||
lib.filterAttrs (_: p: !p.pkgs.pgjwt.meta.broken) pkgs.postgresqlVersions
|
||||
)
|
||||
// {
|
||||
passthru.override = p: makeTestFor p;
|
||||
}
|
||||
)
|
|
@ -1,81 +0,0 @@
|
|||
{
|
||||
pkgs,
|
||||
makeTest,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
|
||||
# Test cases from https://docs.pgvecto.rs/use-cases/hybrid-search.html
|
||||
test-sql = pkgs.writeText "postgresql-test" ''
|
||||
CREATE EXTENSION vectors;
|
||||
|
||||
CREATE TABLE items (
|
||||
id bigserial PRIMARY KEY,
|
||||
content text NOT NULL,
|
||||
embedding vectors.vector(3) NOT NULL -- 3 dimensions
|
||||
);
|
||||
|
||||
INSERT INTO items (content, embedding) VALUES
|
||||
('a fat cat sat on a mat and ate a fat rat', '[1, 2, 3]'),
|
||||
('a fat dog sat on a mat and ate a fat rat', '[4, 5, 6]'),
|
||||
('a thin cat sat on a mat and ate a thin rat', '[7, 8, 9]'),
|
||||
('a thin dog sat on a mat and ate a thin rat', '[10, 11, 12]');
|
||||
'';
|
||||
|
||||
makeTestFor =
|
||||
postgresqlPackage:
|
||||
makeTest {
|
||||
name = "pgvecto-rs-${postgresqlPackage.name}";
|
||||
meta = with lib.maintainers; {
|
||||
maintainers = [ diogotcorreia ];
|
||||
};
|
||||
|
||||
nodes.machine =
|
||||
{ ... }:
|
||||
{
|
||||
services.postgresql = {
|
||||
enable = true;
|
||||
package = postgresqlPackage;
|
||||
extraPlugins =
|
||||
ps: with ps; [
|
||||
pgvecto-rs
|
||||
];
|
||||
settings.shared_preload_libraries = "vectors";
|
||||
};
|
||||
};
|
||||
|
||||
testScript =
|
||||
{ nodes, ... }:
|
||||
let
|
||||
inherit (nodes.machine.services.postgresql.package.pkgs) pgvecto-rs;
|
||||
in
|
||||
''
|
||||
def check_count(statement, lines):
|
||||
return 'test $(sudo -u postgres psql postgres -tAc "{}"|wc -l) -eq {}'.format(
|
||||
statement, lines
|
||||
)
|
||||
|
||||
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
with subtest("Postgresql with extension vectors is available just after unit start"):
|
||||
machine.succeed(check_count("SELECT * FROM pg_available_extensions WHERE name = 'vectors' AND default_version = '${pgvecto-rs.version}';", 1))
|
||||
|
||||
machine.succeed("sudo -u postgres psql -f ${test-sql}")
|
||||
|
||||
machine.succeed(check_count("SELECT content, embedding FROM items WHERE to_tsvector('english', content) @@ 'cat & rat'::tsquery;", 2))
|
||||
|
||||
machine.shutdown()
|
||||
'';
|
||||
};
|
||||
in
|
||||
lib.recurseIntoAttrs (
|
||||
lib.concatMapAttrs (n: p: { ${n} = makeTestFor p; }) (
|
||||
lib.filterAttrs (_: p: !p.pkgs.pgvecto-rs.meta.broken) pkgs.postgresqlVersions
|
||||
)
|
||||
// {
|
||||
passthru.override = p: makeTestFor p;
|
||||
}
|
||||
)
|
|
@ -1,58 +0,0 @@
|
|||
{
|
||||
pkgs,
|
||||
makeTest,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
|
||||
makeTestFor =
|
||||
package:
|
||||
makeTest {
|
||||
name = "postgresql-jit-${package.name}";
|
||||
meta.maintainers = with lib.maintainers; [ ma27 ];
|
||||
|
||||
nodes.machine =
|
||||
{ pkgs, ... }:
|
||||
{
|
||||
services.postgresql = {
|
||||
inherit package;
|
||||
enable = true;
|
||||
enableJIT = true;
|
||||
initialScript = pkgs.writeText "init.sql" ''
|
||||
create table demo (id int);
|
||||
insert into demo (id) select generate_series(1, 5);
|
||||
'';
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql.service")
|
||||
|
||||
with subtest("JIT is enabled"):
|
||||
machine.succeed("sudo -u postgres psql <<<'show jit;' | grep 'on'")
|
||||
|
||||
with subtest("Test JIT works fine"):
|
||||
output = machine.succeed(
|
||||
"cat ${pkgs.writeText "test.sql" ''
|
||||
set jit_above_cost = 1;
|
||||
EXPLAIN ANALYZE SELECT CONCAT('jit result = ', SUM(id)) FROM demo;
|
||||
SELECT CONCAT('jit result = ', SUM(id)) from demo;
|
||||
''} | sudo -u postgres psql"
|
||||
)
|
||||
assert "JIT:" in output
|
||||
assert "jit result = 15" in output
|
||||
|
||||
machine.shutdown()
|
||||
'';
|
||||
};
|
||||
in
|
||||
lib.recurseIntoAttrs (
|
||||
lib.concatMapAttrs (n: p: { ${n} = makeTestFor p; }) (
|
||||
lib.filterAttrs (n: _: lib.hasSuffix "_jit" n) pkgs.postgresqlVersions
|
||||
)
|
||||
// {
|
||||
passthru.override = p: makeTestFor p;
|
||||
}
|
||||
)
|
|
@ -1,135 +0,0 @@
|
|||
{
|
||||
pkgs,
|
||||
makeTest,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
|
||||
runWithOpenSSL =
|
||||
file: cmd:
|
||||
pkgs.runCommand file {
|
||||
buildInputs = [ pkgs.openssl ];
|
||||
} cmd;
|
||||
caKey = runWithOpenSSL "ca.key" "openssl ecparam -name prime256v1 -genkey -noout -out $out";
|
||||
caCert = runWithOpenSSL "ca.crt" ''
|
||||
openssl req -new -x509 -sha256 -key ${caKey} -out $out -subj "/CN=test.example" -days 36500
|
||||
'';
|
||||
serverKey = runWithOpenSSL "server.key" "openssl ecparam -name prime256v1 -genkey -noout -out $out";
|
||||
serverKeyPath = "/var/lib/postgresql";
|
||||
serverCert = runWithOpenSSL "server.crt" ''
|
||||
openssl req -new -sha256 -key ${serverKey} -out server.csr -subj "/CN=db.test.example"
|
||||
openssl x509 -req -in server.csr -CA ${caCert} -CAkey ${caKey} \
|
||||
-CAcreateserial -out $out -days 36500 -sha256
|
||||
'';
|
||||
clientKey = runWithOpenSSL "client.key" "openssl ecparam -name prime256v1 -genkey -noout -out $out";
|
||||
clientCert = runWithOpenSSL "client.crt" ''
|
||||
openssl req -new -sha256 -key ${clientKey} -out client.csr -subj "/CN=test"
|
||||
openssl x509 -req -in client.csr -CA ${caCert} -CAkey ${caKey} \
|
||||
-CAcreateserial -out $out -days 36500 -sha256
|
||||
'';
|
||||
clientKeyPath = "/root";
|
||||
|
||||
makeTestFor =
|
||||
package:
|
||||
makeTest {
|
||||
name = "postgresql-tls-client-cert-${package.name}";
|
||||
meta.maintainers = with lib.maintainers; [ erictapen ];
|
||||
|
||||
nodes.server =
|
||||
{ ... }:
|
||||
{
|
||||
system.activationScripts = {
|
||||
keyPlacement.text = ''
|
||||
mkdir -p '${serverKeyPath}'
|
||||
cp '${serverKey}' '${serverKeyPath}/server.key'
|
||||
chown postgres:postgres '${serverKeyPath}/server.key'
|
||||
chmod 600 '${serverKeyPath}/server.key'
|
||||
'';
|
||||
};
|
||||
services.postgresql = {
|
||||
inherit package;
|
||||
enable = true;
|
||||
enableTCPIP = true;
|
||||
ensureUsers = [
|
||||
{
|
||||
name = "test";
|
||||
ensureDBOwnership = true;
|
||||
}
|
||||
];
|
||||
ensureDatabases = [ "test" ];
|
||||
settings = {
|
||||
ssl = "on";
|
||||
ssl_ca_file = toString caCert;
|
||||
ssl_cert_file = toString serverCert;
|
||||
ssl_key_file = "${serverKeyPath}/server.key";
|
||||
};
|
||||
authentication = ''
|
||||
hostssl test test ::/0 cert clientcert=verify-full
|
||||
'';
|
||||
};
|
||||
networking = {
|
||||
interfaces.eth1 = {
|
||||
ipv6.addresses = [
|
||||
{
|
||||
address = "fc00::1";
|
||||
prefixLength = 120;
|
||||
}
|
||||
];
|
||||
};
|
||||
firewall.allowedTCPPorts = [ 5432 ];
|
||||
};
|
||||
};
|
||||
|
||||
nodes.client =
|
||||
{ ... }:
|
||||
{
|
||||
system.activationScripts = {
|
||||
keyPlacement.text = ''
|
||||
mkdir -p '${clientKeyPath}'
|
||||
cp '${clientKey}' '${clientKeyPath}/client.key'
|
||||
chown root:root '${clientKeyPath}/client.key'
|
||||
chmod 600 '${clientKeyPath}/client.key'
|
||||
'';
|
||||
};
|
||||
environment = {
|
||||
variables = {
|
||||
PGHOST = "db.test.example";
|
||||
PGPORT = "5432";
|
||||
PGDATABASE = "test";
|
||||
PGUSER = "test";
|
||||
PGSSLMODE = "verify-full";
|
||||
PGSSLCERT = clientCert;
|
||||
PGSSLKEY = "${clientKeyPath}/client.key";
|
||||
PGSSLROOTCERT = caCert;
|
||||
};
|
||||
systemPackages = [ package ];
|
||||
};
|
||||
networking = {
|
||||
interfaces.eth1 = {
|
||||
ipv6.addresses = [
|
||||
{
|
||||
address = "fc00::2";
|
||||
prefixLength = 120;
|
||||
}
|
||||
];
|
||||
};
|
||||
hosts = {
|
||||
"fc00::1" = [ "db.test.example" ];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
server.wait_for_unit("multi-user.target")
|
||||
client.wait_for_unit("multi-user.target")
|
||||
client.succeed("psql -c \"SELECT 1;\"")
|
||||
'';
|
||||
};
|
||||
in
|
||||
lib.recurseIntoAttrs (
|
||||
lib.concatMapAttrs (n: p: { ${n} = makeTestFor p; }) pkgs.postgresqlVersions
|
||||
// {
|
||||
passthru.override = p: makeTestFor p;
|
||||
}
|
||||
)
|
|
@ -1,115 +0,0 @@
|
|||
{
|
||||
pkgs,
|
||||
makeTest,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
|
||||
makeTestFor =
|
||||
package:
|
||||
let
|
||||
postgresqlDataDir = "/var/lib/postgresql/${package.psqlSchema}";
|
||||
replicationUser = "wal_receiver_user";
|
||||
replicationSlot = "wal_receiver_slot";
|
||||
replicationConn = "postgresql://${replicationUser}@localhost";
|
||||
baseBackupDir = "/var/cache/wals/pg_basebackup";
|
||||
walBackupDir = "/var/cache/wals/pg_wal";
|
||||
recoveryFile = pkgs.writeTextDir "recovery.signal" "";
|
||||
in
|
||||
makeTest {
|
||||
name = "postgresql-wal-receiver-${package.name}";
|
||||
meta.maintainers = with lib.maintainers; [ pacien ];
|
||||
|
||||
nodes.machine =
|
||||
{ ... }:
|
||||
{
|
||||
systemd.tmpfiles.rules = [
|
||||
"d /var/cache/wals 0750 postgres postgres - -"
|
||||
];
|
||||
|
||||
services.postgresql = {
|
||||
inherit package;
|
||||
enable = true;
|
||||
settings = {
|
||||
max_replication_slots = 10;
|
||||
max_wal_senders = 10;
|
||||
recovery_end_command = "touch recovery.done";
|
||||
restore_command = "cp ${walBackupDir}/%f %p";
|
||||
wal_level = "archive"; # alias for replica on pg >= 9.6
|
||||
};
|
||||
authentication = ''
|
||||
host replication ${replicationUser} all trust
|
||||
'';
|
||||
initialScript = pkgs.writeText "init.sql" ''
|
||||
create user ${replicationUser} replication;
|
||||
select * from pg_create_physical_replication_slot('${replicationSlot}');
|
||||
'';
|
||||
};
|
||||
|
||||
services.postgresqlWalReceiver.receivers.main = {
|
||||
postgresqlPackage = package;
|
||||
connection = replicationConn;
|
||||
slot = replicationSlot;
|
||||
directory = walBackupDir;
|
||||
};
|
||||
# This is only to speedup test, it isn't time racing. Service is set to autorestart always,
|
||||
# default 60sec is fine for real system, but is too much for a test
|
||||
systemd.services.postgresql-wal-receiver-main.serviceConfig.RestartSec = lib.mkForce 5;
|
||||
systemd.services.postgresql.serviceConfig.ReadWritePaths = [ "/var/cache/wals" ];
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
# make an initial base backup
|
||||
machine.wait_for_unit("postgresql")
|
||||
machine.wait_for_unit("postgresql-wal-receiver-main")
|
||||
# WAL receiver healthchecks PG every 5 seconds, so let's be sure they have connected each other
|
||||
# required only for 9.4
|
||||
machine.sleep(5)
|
||||
machine.succeed(
|
||||
"${package}/bin/pg_basebackup --dbname=${replicationConn} --pgdata=${baseBackupDir}"
|
||||
)
|
||||
|
||||
# create a dummy table with 100 records
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql --command='create table dummy as select * from generate_series(1, 100) as val;'"
|
||||
)
|
||||
|
||||
# stop postgres and destroy data
|
||||
machine.systemctl("stop postgresql")
|
||||
machine.systemctl("stop postgresql-wal-receiver-main")
|
||||
machine.succeed("rm -r ${postgresqlDataDir}/{base,global,pg_*}")
|
||||
|
||||
# restore the base backup
|
||||
machine.succeed(
|
||||
"cp -r ${baseBackupDir}/* ${postgresqlDataDir} && chown postgres:postgres -R ${postgresqlDataDir}"
|
||||
)
|
||||
|
||||
# prepare WAL and recovery
|
||||
machine.succeed("chmod a+rX -R ${walBackupDir}")
|
||||
machine.execute(
|
||||
"for part in ${walBackupDir}/*.partial; do mv $part ''${part%%.*}; done"
|
||||
) # make use of partial segments too
|
||||
machine.succeed(
|
||||
"cp ${recoveryFile}/* ${postgresqlDataDir}/ && chmod 666 ${postgresqlDataDir}/recovery*"
|
||||
)
|
||||
|
||||
# replay WAL
|
||||
machine.systemctl("start postgresql")
|
||||
machine.wait_for_file("${postgresqlDataDir}/recovery.done")
|
||||
machine.systemctl("restart postgresql")
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
# check that our records have been restored
|
||||
machine.succeed(
|
||||
"test $(sudo -u postgres psql --pset='pager=off' --tuples-only --command='select count(distinct val) from dummy;') -eq 100"
|
||||
)
|
||||
'';
|
||||
};
|
||||
in
|
||||
lib.recurseIntoAttrs (
|
||||
lib.concatMapAttrs (n: p: { ${n} = makeTestFor p; }) pkgs.postgresqlVersions
|
||||
// {
|
||||
passthru.override = p: makeTestFor p;
|
||||
}
|
||||
)
|
|
@ -1,244 +0,0 @@
|
|||
{
|
||||
pkgs,
|
||||
makeTest,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
|
||||
makeTestFor =
|
||||
package:
|
||||
lib.recurseIntoAttrs {
|
||||
postgresql = makeTestForWithBackupAll package false;
|
||||
postgresql-backup-all = makeTestForWithBackupAll package true;
|
||||
postgresql-clauses = makeEnsureTestFor package;
|
||||
};
|
||||
|
||||
test-sql = pkgs.writeText "postgresql-test" ''
|
||||
CREATE EXTENSION pgcrypto; -- just to check if lib loading works
|
||||
CREATE TABLE sth (
|
||||
id int
|
||||
);
|
||||
INSERT INTO sth (id) VALUES (1);
|
||||
INSERT INTO sth (id) VALUES (1);
|
||||
INSERT INTO sth (id) VALUES (1);
|
||||
INSERT INTO sth (id) VALUES (1);
|
||||
INSERT INTO sth (id) VALUES (1);
|
||||
CREATE TABLE xmltest ( doc xml );
|
||||
INSERT INTO xmltest (doc) VALUES ('<test>ok</test>'); -- check if libxml2 enabled
|
||||
'';
|
||||
|
||||
makeTestForWithBackupAll =
|
||||
package: backupAll:
|
||||
makeTest {
|
||||
name = "postgresql${lib.optionalString backupAll "-backup-all"}-${package.name}";
|
||||
meta = with lib.maintainers; {
|
||||
maintainers = [ zagy ];
|
||||
};
|
||||
|
||||
nodes.machine =
|
||||
{ ... }:
|
||||
{
|
||||
services.postgresql = {
|
||||
inherit (package) ;
|
||||
enable = true;
|
||||
};
|
||||
|
||||
services.postgresqlBackup = {
|
||||
enable = true;
|
||||
databases = lib.optional (!backupAll) "postgres";
|
||||
};
|
||||
};
|
||||
|
||||
testScript =
|
||||
let
|
||||
backupName = if backupAll then "all" else "postgres";
|
||||
backupService = if backupAll then "postgresqlBackup" else "postgresqlBackup-postgres";
|
||||
backupFileBase = "/var/backup/postgresql/${backupName}";
|
||||
in
|
||||
''
|
||||
def check_count(statement, lines):
|
||||
return 'test $(sudo -u postgres psql postgres -tAc "{}"|wc -l) -eq {}'.format(
|
||||
statement, lines
|
||||
)
|
||||
|
||||
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
with subtest("Postgresql is available just after unit start"):
|
||||
machine.succeed(
|
||||
"cat ${test-sql} | sudo -u postgres psql"
|
||||
)
|
||||
|
||||
with subtest("Postgresql survives restart (bug #1735)"):
|
||||
machine.shutdown()
|
||||
import time
|
||||
time.sleep(2)
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
machine.fail(check_count("SELECT * FROM sth;", 3))
|
||||
machine.succeed(check_count("SELECT * FROM sth;", 5))
|
||||
machine.fail(check_count("SELECT * FROM sth;", 4))
|
||||
machine.succeed(check_count("SELECT xpath('/test/text()', doc) FROM xmltest;", 1))
|
||||
|
||||
with subtest("Backup service works"):
|
||||
machine.succeed(
|
||||
"systemctl start ${backupService}.service",
|
||||
"zcat ${backupFileBase}.sql.gz | grep '<test>ok</test>'",
|
||||
"ls -hal /var/backup/postgresql/ >/dev/console",
|
||||
"stat -c '%a' ${backupFileBase}.sql.gz | grep 600",
|
||||
)
|
||||
with subtest("Backup service removes prev files"):
|
||||
machine.succeed(
|
||||
# Create dummy prev files.
|
||||
"touch ${backupFileBase}.prev.sql{,.gz,.zstd}",
|
||||
"chown postgres:postgres ${backupFileBase}.prev.sql{,.gz,.zstd}",
|
||||
|
||||
# Run backup.
|
||||
"systemctl start ${backupService}.service",
|
||||
"ls -hal /var/backup/postgresql/ >/dev/console",
|
||||
|
||||
# Since nothing has changed in the database, the cur and prev files
|
||||
# should match.
|
||||
"zcat ${backupFileBase}.sql.gz | grep '<test>ok</test>'",
|
||||
"cmp ${backupFileBase}.sql.gz ${backupFileBase}.prev.sql.gz",
|
||||
|
||||
# The prev files with unused suffix should be removed.
|
||||
"[ ! -f '${backupFileBase}.prev.sql' ]",
|
||||
"[ ! -f '${backupFileBase}.prev.sql.zstd' ]",
|
||||
|
||||
# Both cur and prev file should only be accessible by the postgres user.
|
||||
"stat -c '%a' ${backupFileBase}.sql.gz | grep 600",
|
||||
"stat -c '%a' '${backupFileBase}.prev.sql.gz' | grep 600",
|
||||
)
|
||||
with subtest("Backup service fails gracefully"):
|
||||
# Sabotage the backup process
|
||||
machine.succeed("rm /run/postgresql/.s.PGSQL.5432")
|
||||
machine.fail(
|
||||
"systemctl start ${backupService}.service",
|
||||
)
|
||||
machine.succeed(
|
||||
"ls -hal /var/backup/postgresql/ >/dev/console",
|
||||
"zcat ${backupFileBase}.prev.sql.gz | grep '<test>ok</test>'",
|
||||
"stat ${backupFileBase}.in-progress.sql.gz",
|
||||
)
|
||||
# In a previous version, the second run would overwrite prev.sql.gz,
|
||||
# so we test a second run as well.
|
||||
machine.fail(
|
||||
"systemctl start ${backupService}.service",
|
||||
)
|
||||
machine.succeed(
|
||||
"stat ${backupFileBase}.in-progress.sql.gz",
|
||||
"zcat ${backupFileBase}.prev.sql.gz | grep '<test>ok</test>'",
|
||||
)
|
||||
|
||||
|
||||
with subtest("Initdb works"):
|
||||
machine.succeed("sudo -u postgres initdb -D /tmp/testpostgres2")
|
||||
|
||||
machine.log(machine.execute("systemd-analyze security postgresql.service | grep -v ✓")[1])
|
||||
|
||||
machine.shutdown()
|
||||
'';
|
||||
};
|
||||
|
||||
makeEnsureTestFor =
|
||||
package:
|
||||
makeTest {
|
||||
name = "postgresql-clauses-${package.name}";
|
||||
meta = with lib.maintainers; {
|
||||
maintainers = [ zagy ];
|
||||
};
|
||||
|
||||
nodes.machine =
|
||||
{ ... }:
|
||||
{
|
||||
services.postgresql = {
|
||||
inherit package;
|
||||
enable = true;
|
||||
ensureUsers = [
|
||||
{
|
||||
name = "all-clauses";
|
||||
ensureClauses = {
|
||||
superuser = true;
|
||||
createdb = true;
|
||||
createrole = true;
|
||||
"inherit" = true;
|
||||
login = true;
|
||||
replication = true;
|
||||
bypassrls = true;
|
||||
};
|
||||
}
|
||||
{
|
||||
name = "default-clauses";
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
testScript =
|
||||
let
|
||||
getClausesQuery =
|
||||
user:
|
||||
lib.concatStringsSep " " [
|
||||
"SELECT row_to_json(row)"
|
||||
"FROM ("
|
||||
"SELECT"
|
||||
"rolsuper,"
|
||||
"rolinherit,"
|
||||
"rolcreaterole,"
|
||||
"rolcreatedb,"
|
||||
"rolcanlogin,"
|
||||
"rolreplication,"
|
||||
"rolbypassrls"
|
||||
"FROM pg_roles"
|
||||
"WHERE rolname = '${user}'"
|
||||
") row;"
|
||||
];
|
||||
in
|
||||
''
|
||||
import json
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
with subtest("All user permissions are set according to the ensureClauses attr"):
|
||||
clauses = json.loads(
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -tc \"${getClausesQuery "all-clauses"}\""
|
||||
)
|
||||
)
|
||||
print(clauses)
|
||||
assert clauses['rolsuper'], 'expected user with clauses to have superuser clause'
|
||||
assert clauses['rolinherit'], 'expected user with clauses to have inherit clause'
|
||||
assert clauses['rolcreaterole'], 'expected user with clauses to have create role clause'
|
||||
assert clauses['rolcreatedb'], 'expected user with clauses to have create db clause'
|
||||
assert clauses['rolcanlogin'], 'expected user with clauses to have login clause'
|
||||
assert clauses['rolreplication'], 'expected user with clauses to have replication clause'
|
||||
assert clauses['rolbypassrls'], 'expected user with clauses to have bypassrls clause'
|
||||
|
||||
with subtest("All user permissions default when ensureClauses is not provided"):
|
||||
clauses = json.loads(
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -tc \"${getClausesQuery "default-clauses"}\""
|
||||
)
|
||||
)
|
||||
assert not clauses['rolsuper'], 'expected user with no clauses set to have default superuser clause'
|
||||
assert clauses['rolinherit'], 'expected user with no clauses set to have default inherit clause'
|
||||
assert not clauses['rolcreaterole'], 'expected user with no clauses set to have default create role clause'
|
||||
assert not clauses['rolcreatedb'], 'expected user with no clauses set to have default create db clause'
|
||||
assert clauses['rolcanlogin'], 'expected user with no clauses set to have default login clause'
|
||||
assert not clauses['rolreplication'], 'expected user with no clauses set to have default replication clause'
|
||||
assert not clauses['rolbypassrls'], 'expected user with no clauses set to have default bypassrls clause'
|
||||
|
||||
machine.shutdown()
|
||||
'';
|
||||
};
|
||||
in
|
||||
lib.recurseIntoAttrs (
|
||||
lib.concatMapAttrs (n: p: { ${n} = makeTestFor p; }) pkgs.postgresqlVersions
|
||||
// {
|
||||
passthru.override = p: makeTestFor p;
|
||||
}
|
||||
)
|
|
@ -1,100 +0,0 @@
|
|||
{
|
||||
pkgs,
|
||||
makeTest,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
|
||||
test-sql = pkgs.writeText "postgresql-test" ''
|
||||
CREATE EXTENSION timescaledb;
|
||||
CREATE EXTENSION timescaledb_toolkit;
|
||||
|
||||
CREATE TABLE sth (
|
||||
time TIMESTAMPTZ NOT NULL,
|
||||
value DOUBLE PRECISION
|
||||
);
|
||||
|
||||
SELECT create_hypertable('sth', 'time');
|
||||
|
||||
INSERT INTO sth (time, value) VALUES
|
||||
('2003-04-12 04:05:06 America/New_York', 1.0),
|
||||
('2003-04-12 04:05:07 America/New_York', 2.0),
|
||||
('2003-04-12 04:05:08 America/New_York', 3.0),
|
||||
('2003-04-12 04:05:09 America/New_York', 4.0),
|
||||
('2003-04-12 04:05:10 America/New_York', 5.0)
|
||||
;
|
||||
|
||||
WITH t AS (
|
||||
SELECT
|
||||
time_bucket('1 day'::interval, time) AS dt,
|
||||
stats_agg(value) AS stats
|
||||
FROM sth
|
||||
GROUP BY time_bucket('1 day'::interval, time)
|
||||
)
|
||||
SELECT
|
||||
average(stats)
|
||||
FROM t;
|
||||
|
||||
SELECT * FROM sth;
|
||||
'';
|
||||
|
||||
makeTestFor =
|
||||
package:
|
||||
makeTest {
|
||||
name = "timescaledb-${package.name}";
|
||||
meta = with lib.maintainers; {
|
||||
maintainers = [ typetetris ];
|
||||
};
|
||||
|
||||
nodes.machine =
|
||||
{ ... }:
|
||||
{
|
||||
services.postgresql = {
|
||||
inherit package;
|
||||
enable = true;
|
||||
extraPlugins =
|
||||
ps: with ps; [
|
||||
timescaledb
|
||||
timescaledb_toolkit
|
||||
];
|
||||
settings = {
|
||||
shared_preload_libraries = "timescaledb, timescaledb_toolkit";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
def check_count(statement, lines):
|
||||
return 'test $(sudo -u postgres psql postgres -tAc "{}"|wc -l) -eq {}'.format(
|
||||
statement, lines
|
||||
)
|
||||
|
||||
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
with subtest("Postgresql with extensions timescaledb and timescaledb_toolkit is available just after unit start"):
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -f ${test-sql}"
|
||||
)
|
||||
|
||||
machine.fail(check_count("SELECT * FROM sth;", 3))
|
||||
machine.succeed(check_count("SELECT * FROM sth;", 5))
|
||||
machine.fail(check_count("SELECT * FROM sth;", 4))
|
||||
|
||||
machine.shutdown()
|
||||
'';
|
||||
};
|
||||
in
|
||||
# Not run by default, because this requires allowUnfree.
|
||||
# To run these tests:
|
||||
# NIXPKGS_ALLOW_UNFREE=1 nix-build -A nixosTests.postgresql.timescaledb
|
||||
lib.dontRecurseIntoAttrs (
|
||||
lib.concatMapAttrs (n: p: { ${n} = makeTestFor p; }) (
|
||||
lib.filterAttrs (_: p: !p.pkgs.timescaledb.meta.broken) pkgs.postgresqlVersions
|
||||
)
|
||||
// {
|
||||
passthru.override = p: makeTestFor p;
|
||||
}
|
||||
)
|
|
@ -1,50 +0,0 @@
|
|||
{
|
||||
pkgs,
|
||||
makeTest,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
|
||||
makeTestFor =
|
||||
package:
|
||||
makeTest {
|
||||
name = "tsja-${package.name}";
|
||||
meta = {
|
||||
maintainers = with lib.maintainers; [ chayleaf ];
|
||||
};
|
||||
|
||||
nodes.master =
|
||||
{ ... }:
|
||||
{
|
||||
services.postgresql = {
|
||||
inherit package;
|
||||
enable = true;
|
||||
extraPlugins =
|
||||
ps: with ps; [
|
||||
tsja
|
||||
];
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
start_all()
|
||||
master.wait_for_unit("postgresql")
|
||||
master.succeed("sudo -u postgres psql -f /run/current-system/sw/share/postgresql/extension/libtsja_dbinit.sql")
|
||||
# make sure "日本語" is parsed as a separate lexeme
|
||||
master.succeed("""
|
||||
sudo -u postgres \\
|
||||
psql -c "SELECT * FROM ts_debug('japanese', 'PostgreSQLで日本語のテキスト検索ができます。')" \\
|
||||
| grep "{日本語}"
|
||||
""")
|
||||
'';
|
||||
};
|
||||
in
|
||||
lib.recurseIntoAttrs (
|
||||
lib.concatMapAttrs (n: p: { ${n} = makeTestFor p; }) (
|
||||
lib.filterAttrs (_: p: !p.pkgs.tsja.meta.broken) pkgs.postgresqlVersions
|
||||
)
|
||||
// {
|
||||
passthru.override = p: makeTestFor p;
|
||||
}
|
||||
)
|
|
@ -1,52 +0,0 @@
|
|||
{
|
||||
pkgs,
|
||||
makeTest,
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (pkgs) lib;
|
||||
|
||||
makeTestFor =
|
||||
package:
|
||||
makeTest {
|
||||
name = "wal2json-${package.name}";
|
||||
meta.maintainers = with pkgs.lib.maintainers; [ euank ];
|
||||
|
||||
nodes.machine = {
|
||||
services.postgresql = {
|
||||
inherit package;
|
||||
enable = true;
|
||||
extraPlugins = with package.pkgs; [ wal2json ];
|
||||
settings = {
|
||||
wal_level = "logical";
|
||||
max_replication_slots = "10";
|
||||
max_wal_senders = "10";
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
machine.wait_for_unit("postgresql")
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -qAt -f ${./wal2json/example2.sql} postgres > /tmp/example2.out"
|
||||
)
|
||||
machine.succeed(
|
||||
"diff ${./wal2json/example2.out} /tmp/example2.out"
|
||||
)
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -qAt -f ${./wal2json/example3.sql} postgres > /tmp/example3.out"
|
||||
)
|
||||
machine.succeed(
|
||||
"diff ${./wal2json/example3.out} /tmp/example3.out"
|
||||
)
|
||||
'';
|
||||
};
|
||||
in
|
||||
lib.recurseIntoAttrs (
|
||||
lib.concatMapAttrs (n: p: { ${n} = makeTestFor p; }) (
|
||||
lib.filterAttrs (_: p: !p.pkgs.wal2json.meta.broken) pkgs.postgresqlVersions
|
||||
)
|
||||
// {
|
||||
passthru.override = p: makeTestFor p;
|
||||
}
|
||||
)
|
93
third_party/nixpkgs/nixos/tests/timescaledb.nix
vendored
Normal file
93
third_party/nixpkgs/nixos/tests/timescaledb.nix
vendored
Normal file
|
@ -0,0 +1,93 @@
|
|||
# mostly copied from ./postgresql.nix as it seemed unapproriate to
|
||||
# test additional extensions for postgresql there.
|
||||
|
||||
{ system ? builtins.currentSystem
|
||||
, config ? { }
|
||||
, pkgs ? import ../.. { inherit system config; }
|
||||
}:
|
||||
|
||||
with import ../lib/testing-python.nix { inherit system pkgs; };
|
||||
with pkgs.lib;
|
||||
|
||||
let
|
||||
postgresql-versions = import ../../pkgs/servers/sql/postgresql pkgs;
|
||||
test-sql = pkgs.writeText "postgresql-test" ''
|
||||
CREATE EXTENSION timescaledb;
|
||||
CREATE EXTENSION timescaledb_toolkit;
|
||||
|
||||
CREATE TABLE sth (
|
||||
time TIMESTAMPTZ NOT NULL,
|
||||
value DOUBLE PRECISION
|
||||
);
|
||||
|
||||
SELECT create_hypertable('sth', 'time');
|
||||
|
||||
INSERT INTO sth (time, value) VALUES
|
||||
('2003-04-12 04:05:06 America/New_York', 1.0),
|
||||
('2003-04-12 04:05:07 America/New_York', 2.0),
|
||||
('2003-04-12 04:05:08 America/New_York', 3.0),
|
||||
('2003-04-12 04:05:09 America/New_York', 4.0),
|
||||
('2003-04-12 04:05:10 America/New_York', 5.0)
|
||||
;
|
||||
|
||||
WITH t AS (
|
||||
SELECT
|
||||
time_bucket('1 day'::interval, time) AS dt,
|
||||
stats_agg(value) AS stats
|
||||
FROM sth
|
||||
GROUP BY time_bucket('1 day'::interval, time)
|
||||
)
|
||||
SELECT
|
||||
average(stats)
|
||||
FROM t;
|
||||
'';
|
||||
make-postgresql-test = postgresql-name: postgresql-package: makeTest {
|
||||
name = postgresql-name;
|
||||
meta = with pkgs.lib.maintainers; {
|
||||
maintainers = [ typetetris ];
|
||||
};
|
||||
|
||||
nodes.machine = { ... }:
|
||||
{
|
||||
services.postgresql = {
|
||||
enable = true;
|
||||
package = postgresql-package;
|
||||
extraPlugins = ps: with ps; [
|
||||
timescaledb
|
||||
timescaledb_toolkit
|
||||
];
|
||||
settings = { shared_preload_libraries = "timescaledb, timescaledb_toolkit"; };
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
def check_count(statement, lines):
|
||||
return 'test $(sudo -u postgres psql postgres -tAc "{}"|wc -l) -eq {}'.format(
|
||||
statement, lines
|
||||
)
|
||||
|
||||
|
||||
machine.start()
|
||||
machine.wait_for_unit("postgresql")
|
||||
|
||||
with subtest("Postgresql with extensions timescaledb and timescaledb_toolkit is available just after unit start"):
|
||||
machine.succeed(
|
||||
"sudo -u postgres psql -f ${test-sql}"
|
||||
)
|
||||
|
||||
machine.fail(check_count("SELECT * FROM sth;", 3))
|
||||
machine.succeed(check_count("SELECT * FROM sth;", 5))
|
||||
machine.fail(check_count("SELECT * FROM sth;", 4))
|
||||
|
||||
machine.shutdown()
|
||||
'';
|
||||
|
||||
};
|
||||
applicablePostgresqlVersions = filterAttrs (_: value: versionAtLeast value.version "14") postgresql-versions;
|
||||
in
|
||||
mapAttrs'
|
||||
(name: package: {
|
||||
inherit name;
|
||||
value = make-postgresql-test name package;
|
||||
})
|
||||
applicablePostgresqlVersions
|
32
third_party/nixpkgs/nixos/tests/tsja.nix
vendored
Normal file
32
third_party/nixpkgs/nixos/tests/tsja.nix
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
import ./make-test-python.nix ({ pkgs, lib, ...} : {
|
||||
name = "tsja";
|
||||
meta = {
|
||||
maintainers = with lib.maintainers; [ chayleaf ];
|
||||
};
|
||||
|
||||
nodes = {
|
||||
master =
|
||||
{ config, ... }:
|
||||
|
||||
{
|
||||
services.postgresql = {
|
||||
enable = true;
|
||||
extraPlugins = ps: with ps; [
|
||||
tsja
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
testScript = ''
|
||||
start_all()
|
||||
master.wait_for_unit("postgresql")
|
||||
master.succeed("sudo -u postgres psql -f /run/current-system/sw/share/postgresql/extension/libtsja_dbinit.sql")
|
||||
# make sure "日本語" is parsed as a separate lexeme
|
||||
master.succeed("""
|
||||
sudo -u postgres \\
|
||||
psql -c "SELECT * FROM ts_debug('japanese', 'PostgreSQLで日本語のテキスト検索ができます。')" \\
|
||||
| grep "{日本語}"
|
||||
""")
|
||||
'';
|
||||
})
|
|
@ -31,30 +31,21 @@ import ../make-test-python.nix (
|
|||
machine.succeed("curl --fail http://localhost:2283/")
|
||||
|
||||
machine.succeed("""
|
||||
curl -f --json '{ "email": "test@example.com", "name": "Admin", "password": "admin" }' http://localhost:2283/api/auth/admin-sign-up
|
||||
curl -H 'Content-Type: application/json' --data '{ "email": "test@example.com", "name": "Admin", "password": "admin" }' -X POST http://localhost:2283/api/auth/admin-sign-up
|
||||
""")
|
||||
res = machine.succeed("""
|
||||
curl -f --json '{ "email": "test@example.com", "password": "admin" }' http://localhost:2283/api/auth/login
|
||||
curl -H 'Content-Type: application/json' --data '{ "email": "test@example.com", "password": "admin" }' -X POST http://localhost:2283/api/auth/login
|
||||
""")
|
||||
token = json.loads(res)['accessToken']
|
||||
|
||||
res = machine.succeed("""
|
||||
curl -f -H 'Cookie: immich_access_token=%s' --json '{ "name": "API Key", "permissions": ["all"] }' http://localhost:2283/api/api-keys
|
||||
curl -H 'Content-Type: application/json' -H 'Cookie: immich_access_token=%s' --data '{ "name": "API Key", "permissions": ["all"] }' -X POST http://localhost:2283/api/api-keys
|
||||
""" % token)
|
||||
key = json.loads(res)['secret']
|
||||
|
||||
machine.succeed(f"immich login http://localhost:2283/api {key}")
|
||||
res = machine.succeed("immich server-info")
|
||||
print(res)
|
||||
|
||||
machine.succeed("""
|
||||
curl -f -X PUT -H 'Cookie: immich_access_token=%s' --json '{ "command": "start" }' http://localhost:2283/api/jobs/backupDatabase
|
||||
""" % token)
|
||||
res = machine.succeed("""
|
||||
curl -f -H 'Cookie: immich_access_token=%s' http://localhost:2283/api/jobs
|
||||
""" % token)
|
||||
assert json.loads(res)["backupDatabase"]["jobCounts"]["active"] == 1
|
||||
machine.wait_until_succeeds("ls /var/lib/immich/backups/*.sql.gz")
|
||||
'';
|
||||
}
|
||||
)
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue