Project import generated by Copybara.

GitOrigin-RevId: 8e4fe32876ca15e3d5eb3ecd3ca0b224417f5f17
This commit is contained in:
Default email 2021-04-26 15:14:03 -04:00
parent 2189cff663
commit 9405df4a82
880 changed files with 10526 additions and 5088 deletions

View file

@ -161,7 +161,7 @@ Many Nix packages are designed to run on multiple platforms. As such, its imp
### Tested via one or more NixOS test(s) if existing and applicable for the change (look inside nixos/tests) {#submitting-changes-nixos-tests} ### Tested via one or more NixOS test(s) if existing and applicable for the change (look inside nixos/tests) {#submitting-changes-nixos-tests}
Packages with automated tests are much more likely to be merged in a timely fashion because it doesnt require as much manual testing by the maintainer to verify the functionality of the package. If there are existing tests for the package, they should be run to verify your changes do not break the tests. Tests only apply to packages with NixOS modules defined and can only be run on Linux. For more details on writing and running tests, see the [section in the NixOS manual](https://nixos.org/nixos/manual/index.html#sec-nixos-tests). Packages with automated tests are much more likely to be merged in a timely fashion because it doesnt require as much manual testing by the maintainer to verify the functionality of the package. If there are existing tests for the package, they should be run to verify your changes do not break the tests. Tests can only be run on Linux. For more details on writing and running tests, see the [section in the NixOS manual](https://nixos.org/nixos/manual/index.html#sec-nixos-tests).
### Tested compilation of all pkgs that depend on this change using `nixpkgs-review` {#submitting-changes-tested-compilation} ### Tested compilation of all pkgs that depend on this change using `nixpkgs-review` {#submitting-changes-tested-compilation}

View file

@ -25,7 +25,7 @@ let
abiVersions = [ "armeabi-v7a" "arm64-v8a" ]; abiVersions = [ "armeabi-v7a" "arm64-v8a" ];
cmakeVersions = [ "3.10.2" ]; cmakeVersions = [ "3.10.2" ];
includeNDK = true; includeNDK = true;
ndkVersion = "22.0.7026061"; ndkVersions = ["22.0.7026061"];
useGoogleAPIs = false; useGoogleAPIs = false;
useGoogleTVAddOns = false; useGoogleTVAddOns = false;
includeExtras = [ includeExtras = [
@ -52,7 +52,11 @@ The following parameters are supported:
* `cmakeVersions` specifies which CMake versions should be deployed. * `cmakeVersions` specifies which CMake versions should be deployed.
* `includeNDK` specifies that the Android NDK bundle should be included. * `includeNDK` specifies that the Android NDK bundle should be included.
Defaults to: `false`. Defaults to: `false`.
* `ndkVersion` specifies the NDK version that we want to use. * `ndkVersions` specifies the NDK versions that we want to use. These are linked
under the `ndk` directory of the SDK root, and the first is linked under the
`ndk-bundle` directory.
* `ndkVersion` is equivalent to specifying one entry in `ndkVersions`, and
`ndkVersions` overrides this parameter if provided.
* `includeExtras` is an array of identifier strings referring to arbitrary * `includeExtras` is an array of identifier strings referring to arbitrary
add-on packages that should be installed. add-on packages that should be installed.
* `platformVersions` specifies which platform SDK versions should be included. * `platformVersions` specifies which platform SDK versions should be included.

View file

@ -123,7 +123,7 @@ depsBuildBuild = [ buildPackages.stdenv.cc ];
Add the following to your `mkDerivation` invocation. Add the following to your `mkDerivation` invocation.
```nix ```nix
doCheck = stdenv.hostPlatform == stdenv.buildPlatfrom; doCheck = stdenv.hostPlatform == stdenv.buildPlatform;
``` ```
## Cross-building packages {#sec-cross-usage} ## Cross-building packages {#sec-cross-usage}

View file

@ -125,6 +125,11 @@ lib.mapAttrs (n: v: v // { shortName = n; }) ({
fullName = ''BSD 4-clause "Original" or "Old" License''; fullName = ''BSD 4-clause "Original" or "Old" License'';
}; };
bsdOriginalUC = spdx {
spdxId = "BSD-4-Clause-UC";
fullName = "BSD 4-Clause University of California-Specific";
};
bsdProtection = spdx { bsdProtection = spdx {
spdxId = "BSD-Protection"; spdxId = "BSD-Protection";
fullName = "BSD Protection License"; fullName = "BSD Protection License";

View file

@ -112,6 +112,19 @@ rec {
aarch64 = "arm64"; aarch64 = "arm64";
}.${final.parsed.cpu.name} or final.parsed.cpu.name; }.${final.parsed.cpu.name} or final.parsed.cpu.name;
darwinPlatform =
if final.isMacOS then "macos"
else if final.isiOS then "ios"
else null;
# The canonical name for this attribute is darwinSdkVersion, but some
# platforms define the old name "sdkVer".
darwinSdkVersion = final.sdkVer or "10.12";
darwinMinVersion = final.darwinSdkVersion;
darwinMinVersionVariable =
if final.isMacOS then "MACOSX_DEPLOYMENT_TARGET"
else if final.isiOS then "IPHONEOS_DEPLOYMENT_TARGET"
else null;
emulator = pkgs: let emulator = pkgs: let
qemu-user = pkgs.qemu.override { qemu-user = pkgs.qemu.override {
smartcardSupport = false; smartcardSupport = false;

View file

@ -217,6 +217,7 @@ rec {
sdkVer = "14.3"; sdkVer = "14.3";
xcodeVer = "12.3"; xcodeVer = "12.3";
xcodePlatform = "iPhoneSimulator"; xcodePlatform = "iPhoneSimulator";
darwinPlatform = "ios-simulator";
useiOSPrebuilt = true; useiOSPrebuilt = true;
}; };
@ -226,6 +227,7 @@ rec {
sdkVer = "14.3"; sdkVer = "14.3";
xcodeVer = "12.3"; xcodeVer = "12.3";
xcodePlatform = "iPhoneSimulator"; xcodePlatform = "iPhoneSimulator";
darwinPlatform = "ios-simulator";
useiOSPrebuilt = true; useiOSPrebuilt = true;
}; };

View file

@ -492,12 +492,24 @@
fingerprint = "B422 CFB1 C9EF 73F7 E1E2 698D F53E 3233 42F7 A6D3A"; fingerprint = "B422 CFB1 C9EF 73F7 E1E2 698D F53E 3233 42F7 A6D3A";
}]; }];
}; };
amanjeev = {
email = "aj@amanjeev.com";
github = "amanjeev";
githubId = 160476;
name = "Amanjeev Sethi";
};
amar1729 = { amar1729 = {
email = "amar.paul16@gmail.com"; email = "amar.paul16@gmail.com";
github = "amar1729"; github = "amar1729";
githubId = 15623522; githubId = 15623522;
name = "Amar Paul"; name = "Amar Paul";
}; };
ambroisie = {
email = "bruno.nixpkgs@belanyi.fr";
github = "ambroisie";
githubId = 12465195;
name = "Bruno BELANYI";
};
ambrop72 = { ambrop72 = {
email = "ambrop7@gmail.com"; email = "ambrop7@gmail.com";
github = "ambrop72"; github = "ambrop72";
@ -8839,6 +8851,12 @@
githubId = 15379000; githubId = 15379000;
name = "schneefux"; name = "schneefux";
}; };
schnusch = {
email = "schnusch@users.noreply.github.com";
github = "schnusch";
githubId = 5104601;
name = "schnusch";
};
schristo = { schristo = {
email = "schristopher@konputa.com"; email = "schristopher@konputa.com";
name = "Scott Christopher"; name = "Scott Christopher";
@ -9849,6 +9867,12 @@
githubId = 3105057; githubId = 3105057;
name = "Jan Beinke"; name = "Jan Beinke";
}; };
therealansh = {
email = "tyagiansh23@gmail.com";
github = "therealansh";
githubId = 57180880;
name = "Ansh Tyagi";
};
thesola10 = { thesola10 = {
email = "me@thesola.io"; email = "me@thesola.io";
github = "thesola10"; github = "thesola10";

View file

@ -204,18 +204,18 @@
XKB XKB
</link> </link>
keyboard layouts using the option keyboard layouts using the option
<option> <option><link linkend="opt-services.xserver.extraLayouts">
<link linkend="opt-services.xserver.extraLayouts"> services.xserver.extraLayouts</link></option>.
services.xserver.extraLayouts </para>
</link> <para>
</option>.
As a first example, we are going to create a layout based on the basic US As a first example, we are going to create a layout based on the basic US
layout, with an additional layer to type some greek symbols by pressing the layout, with an additional layer to type some greek symbols by pressing the
right-alt key. right-alt key.
</para> </para>
<para> <para>
To do this we are going to create a <literal>us-greek</literal> file Create a file called <literal>us-greek</literal> with the following
with a <literal>xkb_symbols</literal> section. content (under a directory called <literal>symbols</literal>; it's
an XKB peculiarity that will help with testing):
</para> </para>
<programlisting> <programlisting>
xkb_symbols &quot;us-greek&quot; xkb_symbols &quot;us-greek&quot;
@ -231,14 +231,13 @@ xkb_symbols &quot;us-greek&quot;
}; };
</programlisting> </programlisting>
<para> <para>
To install the layout, the filepath, a description and the list of A minimal layout specification must include the following:
languages must be given:
</para> </para>
<programlisting> <programlisting>
<xref linkend="opt-services.xserver.extraLayouts"/>.us-greek = { <xref linkend="opt-services.xserver.extraLayouts"/>.us-greek = {
description = "US layout with alt-gr greek"; description = "US layout with alt-gr greek";
languages = [ "eng" ]; languages = [ "eng" ];
symbolsFile = /path/to/us-greek; symbolsFile = /yourpath/symbols/us-greek;
} }
</programlisting> </programlisting>
<note> <note>
@ -248,9 +247,27 @@ xkb_symbols &quot;us-greek&quot;
</para> </para>
</note> </note>
<para> <para>
The layout should now be installed and ready to use: try it by Applying this customization requires rebuilding several packages,
running <literal>setxkbmap us-greek</literal> and type and a broken XKB file can lead to the X session crashing at login.
<literal>&lt;alt&gt;+a</literal>. To change the default the usual Therefore, you're strongly advised to <emphasis role="strong">test
your layout before applying it</emphasis>:
<screen>
<prompt>$ </prompt>nix-shell -p xorg.xkbcomp
<prompt>$ </prompt>setxkbmap -I/yourpath us-greek -print | xkbcomp -I/yourpath - $DISPLAY
</screen>
</para>
<para>
You can inspect the predefined XKB files for examples:
<screen>
<prompt>$ </prompt>echo "$(nix-build --no-out-link '&lt;nixpkgs&gt;' -A xorg.xkeyboardconfig)/etc/X11/xkb/"
</screen>
</para>
<para>
Once the configuration is applied, and you did a logout/login
cycle, the layout should be ready to use. You can try it by e.g.
running <literal>setxkbmap us-greek</literal> and then type
<literal>&lt;alt&gt;+a</literal> (it may not get applied in your
terminal straight away). To change the default, the usual
<option> <option>
<link linkend="opt-services.xserver.layout"> <link linkend="opt-services.xserver.layout">
services.xserver.layout services.xserver.layout

View file

@ -15,6 +15,8 @@
, # size of the boot partition, is only used if partitionTableType is , # size of the boot partition, is only used if partitionTableType is
# either "efi" or "hybrid" # either "efi" or "hybrid"
# This will be undersized slightly, as this is actually the offset of
# the end of the partition. Generally it will be 1MiB smaller.
bootSize ? "256M" bootSize ? "256M"
, # The files and directories to be placed in the target file system. , # The files and directories to be placed in the target file system.
@ -163,6 +165,8 @@ let format' = format; in let
closureInfo = pkgs.closureInfo { rootPaths = [ config.system.build.toplevel channelSources ]; }; closureInfo = pkgs.closureInfo { rootPaths = [ config.system.build.toplevel channelSources ]; };
blockSize = toString (4 * 1024); # ext4fs block size (not block device sector size)
prepareImage = '' prepareImage = ''
export PATH=${binPath} export PATH=${binPath}
@ -175,6 +179,24 @@ let format' = format; in let
echo $(( "$1" * 512 )) echo $(( "$1" * 512 ))
} }
# Given lines of numbers, adds them together
sum_lines() {
local acc=0
while read -r number; do
acc=$((acc+number))
done
echo "$acc"
}
mebibyte=$(( 1024 * 1024 ))
# Approximative percentage of reserved space in an ext4 fs over 512MiB.
# 0.05208587646484375
# × 1000, integer part: 52
compute_fudge() {
echo $(( $1 * 52 / 1000 ))
}
mkdir $out mkdir $out
root="$PWD/root" root="$PWD/root"
@ -235,12 +257,53 @@ let format' = format; in let
${if diskSize == "auto" then '' ${if diskSize == "auto" then ''
${if partitionTableType == "efi" || partitionTableType == "hybrid" then '' ${if partitionTableType == "efi" || partitionTableType == "hybrid" then ''
additionalSpace=$(( ($(numfmt --from=iec '${additionalSpace}') + $(numfmt --from=iec '${bootSize}')) / 1000 )) # Add the GPT at the end
gptSpace=$(( 512 * 34 * 1 ))
# Normally we'd need to account for alignment and things, if bootSize
# represented the actual size of the boot partition. But it instead
# represents the offset at which it ends.
# So we know bootSize is the reserved space in front of the partition.
reservedSpace=$(( gptSpace + $(numfmt --from=iec '${bootSize}') ))
'' else if partitionTableType == "legacy+gpt" then ''
# Add the GPT at the end
gptSpace=$(( 512 * 34 * 1 ))
# And include the bios_grub partition; the ext4 partition starts at 2MB exactly.
reservedSpace=$(( gptSpace + 2 * mebibyte ))
'' else if partitionTableType == "legacy" then ''
# Add the 1MiB aligned reserved space (includes MBR)
reservedSpace=$(( mebibyte ))
'' else '' '' else ''
additionalSpace=$(( $(numfmt --from=iec '${additionalSpace}') / 1000 )) reservedSpace=0
''} ''}
diskSize=$(( $(set -- $(du -d0 $root); echo "$1") + $additionalSpace )) additionalSpace=$(( $(numfmt --from=iec '${additionalSpace}') + reservedSpace ))
truncate -s "$diskSize"K $diskImage
# Compute required space in filesystem blocks
diskUsage=$(find . ! -type d -exec 'du' '--apparent-size' '--block-size' "${blockSize}" '{}' ';' | cut -f1 | sum_lines)
# Each inode takes space!
numInodes=$(find . | wc -l)
# Convert to bytes, inodes take two blocks each!
diskUsage=$(( (diskUsage + 2 * numInodes) * ${blockSize} ))
# Then increase the required space to account for the reserved blocks.
fudge=$(compute_fudge $diskUsage)
requiredFilesystemSpace=$(( diskUsage + fudge ))
diskSize=$(( requiredFilesystemSpace + additionalSpace ))
# Round up to the nearest mebibyte.
# This ensures whole 512 bytes sector sizes in the disk image
# and helps towards aligning partitions optimally.
if (( diskSize % mebibyte )); then
diskSize=$(( ( diskSize / mebibyte + 1) * mebibyte ))
fi
truncate -s "$diskSize" $diskImage
printf "Automatic disk size...\n"
printf " Closure space use: %d bytes\n" $diskUsage
printf " fudge: %d bytes\n" $fudge
printf " Filesystem size needed: %d bytes\n" $requiredFilesystemSpace
printf " Additional space: %d bytes\n" $additionalSpace
printf " Disk image size: %d bytes\n" $diskSize
'' else '' '' else ''
truncate -s ${toString diskSize}M $diskImage truncate -s ${toString diskSize}M $diskImage
''} ''}
@ -251,9 +314,9 @@ let format' = format; in let
# Get start & length of the root partition in sectors to $START and $SECTORS. # Get start & length of the root partition in sectors to $START and $SECTORS.
eval $(partx $diskImage -o START,SECTORS --nr ${rootPartition} --pairs) eval $(partx $diskImage -o START,SECTORS --nr ${rootPartition} --pairs)
mkfs.${fsType} -F -L ${label} $diskImage -E offset=$(sectorsToBytes $START) $(sectorsToKilobytes $SECTORS)K mkfs.${fsType} -b ${blockSize} -F -L ${label} $diskImage -E offset=$(sectorsToBytes $START) $(sectorsToKilobytes $SECTORS)K
'' else '' '' else ''
mkfs.${fsType} -F -L ${label} $diskImage mkfs.${fsType} -b ${blockSize} -F -L ${label} $diskImage
''} ''}
echo "copying staging root to image..." echo "copying staging root to image..."
@ -283,6 +346,9 @@ in pkgs.vmTools.runInLinuxVM (
# Some tools assume these exist # Some tools assume these exist
ln -s vda /dev/xvda ln -s vda /dev/xvda
ln -s vda /dev/sda ln -s vda /dev/sda
# make systemd-boot find ESP without udev
mkdir /dev/block
ln -s /dev/vda1 /dev/block/254:1
mountPoint=/mnt mountPoint=/mnt
mkdir $mountPoint mkdir $mountPoint

View file

@ -10,7 +10,6 @@ with lib;
system.build.cloudstackImage = import ../../../lib/make-disk-image.nix { system.build.cloudstackImage = import ../../../lib/make-disk-image.nix {
inherit lib config pkgs; inherit lib config pkgs;
diskSize = 8192;
format = "qcow2"; format = "qcow2";
configFile = pkgs.writeText "configuration.nix" configFile = pkgs.writeText "configuration.nix"
'' ''

View file

@ -40,8 +40,9 @@ in {
}; };
sizeMB = mkOption { sizeMB = mkOption {
type = types.int; type = with types; either (enum [ "auto" ]) int;
default = if config.ec2.hvm then 2048 else 8192; default = "auto";
example = 8192;
description = "The size in MB of the image"; description = "The size in MB of the image";
}; };

View file

@ -12,8 +12,8 @@ with lib;
system.build.openstackImage = import ../../../lib/make-disk-image.nix { system.build.openstackImage = import ../../../lib/make-disk-image.nix {
inherit lib config; inherit lib config;
additionalSpace = "1024M";
pkgs = import ../../../.. { inherit (pkgs) system; }; # ensure we use the regular qemu-kvm package pkgs = import ../../../.. { inherit (pkgs) system; }; # ensure we use the regular qemu-kvm package
diskSize = 8192;
format = "qcow2"; format = "qcow2";
configFile = pkgs.writeText "configuration.nix" configFile = pkgs.writeText "configuration.nix"
'' ''

View file

@ -125,7 +125,7 @@ fi
# Resolve the flake. # Resolve the flake.
if [[ -n $flake ]]; then if [[ -n $flake ]]; then
flake=$(nix "${flakeFlags[@]}" flake info --json "${extraBuildFlags[@]}" "${lockFlags[@]}" -- "$flake" | jq -r .url) flake=$(nix "${flakeFlags[@]}" flake metadata --json "${extraBuildFlags[@]}" "${lockFlags[@]}" -- "$flake" | jq -r .url)
fi fi
if [[ ! -e $NIXOS_CONFIG && -z $system && -z $flake ]]; then if [[ ! -e $NIXOS_CONFIG && -z $system && -z $flake ]]; then

View file

@ -530,6 +530,7 @@
./services/misc/parsoid.nix ./services/misc/parsoid.nix
./services/misc/plex.nix ./services/misc/plex.nix
./services/misc/plikd.nix ./services/misc/plikd.nix
./services/misc/podgrab.nix
./services/misc/tautulli.nix ./services/misc/tautulli.nix
./services/misc/pinnwand.nix ./services/misc/pinnwand.nix
./services/misc/pykms.nix ./services/misc/pykms.nix
@ -701,6 +702,9 @@
./services/networking/iodine.nix ./services/networking/iodine.nix
./services/networking/iperf3.nix ./services/networking/iperf3.nix
./services/networking/ircd-hybrid/default.nix ./services/networking/ircd-hybrid/default.nix
./services/networking/iscsi/initiator.nix
./services/networking/iscsi/root-initiator.nix
./services/networking/iscsi/target.nix
./services/networking/iwd.nix ./services/networking/iwd.nix
./services/networking/jicofo.nix ./services/networking/jicofo.nix
./services/networking/jitsi-videobridge.nix ./services/networking/jitsi-videobridge.nix

View file

@ -10,13 +10,12 @@ let
paths = map (p: "${p}/pcsc/drivers") config.services.pcscd.plugins; paths = map (p: "${p}/pcsc/drivers") config.services.pcscd.plugins;
}; };
in { in
{
###### interface ###### interface
options = { options.services.pcscd = {
services.pcscd = {
enable = mkEnableOption "PCSC-Lite daemon"; enable = mkEnableOption "PCSC-Lite daemon";
plugins = mkOption { plugins = mkOption {
@ -44,26 +43,20 @@ in {
''; '';
}; };
}; };
};
###### implementation ###### implementation
config = mkIf config.services.pcscd.enable { config = mkIf config.services.pcscd.enable {
systemd.sockets.pcscd = { environment.etc."reader.conf".source = cfgFile;
description = "PCSC-Lite Socket";
wantedBy = [ "sockets.target" ]; systemd.packages = [ (getBin pkgs.pcsclite) ];
before = [ "multi-user.target" ];
socketConfig.ListenStream = "/run/pcscd/pcscd.comm"; systemd.sockets.pcscd.wantedBy = [ "sockets.target" ];
};
systemd.services.pcscd = { systemd.services.pcscd = {
description = "PCSC-Lite daemon";
environment.PCSCLITE_HP_DROPDIR = pluginEnv; environment.PCSCLITE_HP_DROPDIR = pluginEnv;
serviceConfig = { restartTriggers = [ "/etc/reader.conf" ];
ExecStart = "${getBin pkgs.pcsclite}/sbin/pcscd -f -x -c ${cfgFile}";
ExecReload = "${getBin pkgs.pcsclite}/sbin/pcscd -H";
};
}; };
}; };
} }

View file

@ -67,6 +67,13 @@ in
''; '';
}; };
queueRunnerInterval = mkOption {
type = types.str;
default = "5m";
description = ''
How often to spawn a new queue runner.
'';
};
}; };
}; };
@ -104,7 +111,7 @@ in
wantedBy = [ "multi-user.target" ]; wantedBy = [ "multi-user.target" ];
restartTriggers = [ config.environment.etc."exim.conf".source ]; restartTriggers = [ config.environment.etc."exim.conf".source ];
serviceConfig = { serviceConfig = {
ExecStart = "${cfg.package}/bin/exim -bdf -q30m"; ExecStart = "${cfg.package}/bin/exim -bdf -q${cfg.queueRunnerInterval}";
ExecReload = "${coreutils}/bin/kill -HUP $MAINPID"; ExecReload = "${coreutils}/bin/kill -HUP $MAINPID";
}; };
preStart = '' preStart = ''

View file

@ -126,19 +126,36 @@ in
}; };
systemd.services.sa-update = { systemd.services.sa-update = {
# Needs to be able to contact the update server.
wants = [ "network-online.target" ];
after = [ "network-online.target" ];
serviceConfig = {
Type = "oneshot";
User = "spamd";
Group = "spamd";
StateDirectory = "spamassassin";
ExecStartPost = "+${pkgs.systemd}/bin/systemctl -q --no-block try-reload-or-restart spamd.service";
};
script = '' script = ''
set +e set +e
${pkgs.su}/bin/su -s "${pkgs.bash}/bin/bash" -c "${pkgs.spamassassin}/bin/sa-update --gpghomedir=/var/lib/spamassassin/sa-update-keys/" spamd ${pkgs.spamassassin}/bin/sa-update --verbose --gpghomedir=/var/lib/spamassassin/sa-update-keys/
rc=$?
v=$?
set -e set -e
if [ $v -gt 1 ]; then
echo "sa-update execution error" if [[ $rc -gt 1 ]]; then
exit $v # sa-update failed.
exit $rc
fi fi
if [ $v -eq 0 ]; then
systemctl reload spamd.service if [[ $rc -eq 1 ]]; then
# No update was available, exit successfully.
exit 0
fi fi
# An update was available and installed. Compile the rules.
${pkgs.spamassassin}/bin/sa-compile
''; '';
}; };
@ -153,32 +170,22 @@ in
}; };
systemd.services.spamd = { systemd.services.spamd = {
description = "Spam Assassin Server"; description = "SpamAssassin Server";
wantedBy = [ "multi-user.target" ]; wantedBy = [ "multi-user.target" ];
after = [ "network.target" ]; wants = [ "sa-update.service" ];
after = [
"network.target"
"sa-update.service"
];
serviceConfig = { serviceConfig = {
ExecStart = "${pkgs.spamassassin}/bin/spamd ${optionalString cfg.debug "-D"} --username=spamd --groupname=spamd --virtual-config-dir=/var/lib/spamassassin/user-%u --allow-tell --pidfile=/run/spamd.pid"; User = "spamd";
ExecReload = "${pkgs.coreutils}/bin/kill -HUP $MAINPID"; Group = "spamd";
ExecStart = "+${pkgs.spamassassin}/bin/spamd ${optionalString cfg.debug "-D"} --username=spamd --groupname=spamd --virtual-config-dir=%S/spamassassin/user-%u --allow-tell --pidfile=/run/spamd.pid";
ExecReload = "+${pkgs.coreutils}/bin/kill -HUP $MAINPID";
StateDirectory = "spamassassin";
}; };
# 0 and 1 no error, exitcode > 1 means error:
# https://spamassassin.apache.org/full/3.1.x/doc/sa-update.html#exit_codes
preStart = ''
echo "Recreating '/var/lib/spamasassin' with creating '3.004001' (or similar) and 'sa-update-keys'"
mkdir -p /var/lib/spamassassin
chown spamd:spamd /var/lib/spamassassin -R
set +e
${pkgs.su}/bin/su -s "${pkgs.bash}/bin/bash" -c "${pkgs.spamassassin}/bin/sa-update --gpghomedir=/var/lib/spamassassin/sa-update-keys/" spamd
v=$?
set -e
if [ $v -gt 1 ]; then
echo "sa-update execution error"
exit $v
fi
chown spamd:spamd /var/lib/spamassassin -R
'';
}; };
}; };
} }

View file

@ -18,6 +18,7 @@ in
package = mkOption { package = mkOption {
type = types.package; type = types.package;
default = pkgs.jellyfin;
example = literalExample "pkgs.jellyfin"; example = literalExample "pkgs.jellyfin";
description = '' description = ''
Jellyfin package to use. Jellyfin package to use.
@ -98,11 +99,6 @@ in
}; };
}; };
services.jellyfin.package = mkDefault (
if versionAtLeast config.system.stateVersion "20.09" then pkgs.jellyfin
else pkgs.jellyfin_10_5
);
users.users = mkIf (cfg.user == "jellyfin") { users.users = mkIf (cfg.user == "jellyfin") {
jellyfin = { jellyfin = {
group = cfg.group; group = cfg.group;

View file

@ -0,0 +1,50 @@
{ config, lib, pkgs, ... }:
let
cfg = config.services.podgrab;
in
{
options.services.podgrab = with lib; {
enable = mkEnableOption "Podgrab, a self-hosted podcast manager";
passwordFile = mkOption {
type = with types; nullOr str;
default = null;
example = "/run/secrets/password.env";
description = ''
The path to a file containing the PASSWORD environment variable
definition for Podgrab's authentification.
'';
};
port = mkOption {
type = types.port;
default = 8080;
example = 4242;
description = "The port on which Podgrab will listen for incoming HTTP traffic.";
};
};
config = lib.mkIf cfg.enable {
systemd.services.podgrab = {
description = "Podgrab podcast manager";
wantedBy = [ "multi-user.target" ];
environment = {
CONFIG = "/var/lib/podgrab/config";
DATA = "/var/lib/podgrab/data";
GIN_MODE = "release";
PORT = toString cfg.port;
};
serviceConfig = {
DynamicUser = true;
EnvironmentFile = lib.optional (cfg.passwordFile != null) [
cfg.passwordFile
];
ExecStart = "${pkgs.podgrab}/bin/podgrab";
WorkingDirectory = "${pkgs.podgrab}/share";
StateDirectory = [ "podgrab/config" "podgrab/data" ];
};
};
};
meta.maintainers = with lib.maintainers; [ ambroisie ];
}

View file

@ -0,0 +1,84 @@
{ config, lib, pkgs, ... }: with lib;
let
cfg = config.services.openiscsi;
in
{
options.services.openiscsi = with types; {
enable = mkEnableOption "the openiscsi iscsi daemon";
enableAutoLoginOut = mkEnableOption ''
automatic login and logout of all automatic targets.
You probably do not want this.
'';
discoverPortal = mkOption {
type = nullOr str;
default = null;
description = "Portal to discover targets on";
};
name = mkOption {
type = str;
description = "Name of this iscsi initiator";
example = "iqn.2020-08.org.linux-iscsi.initiatorhost:example";
};
package = mkOption {
type = package;
description = "openiscsi package to use";
default = pkgs.openiscsi;
defaultText = "pkgs.openiscsi";
};
extraConfig = mkOption {
type = str;
default = "";
description = "Lines to append to default iscsid.conf";
};
extraConfigFile = mkOption {
description = ''
Append an additional file's contents to /etc/iscsid.conf. Use a non-store path
and store passwords in this file.
'';
default = null;
type = nullOr str;
};
};
config = mkIf cfg.enable {
environment.etc."iscsi/iscsid.conf.fragment".source = pkgs.runCommand "iscsid.conf" {} ''
cat "${cfg.package}/etc/iscsi/iscsid.conf" > $out
cat << 'EOF' >> $out
${cfg.extraConfig}
${optionalString cfg.enableAutoLoginOut "node.startup = automatic"}
EOF
'';
environment.etc."iscsi/initiatorname.iscsi".text = "InitiatorName=${cfg.name}";
system.activationScripts.iscsid = let
extraCfgDumper = optionalString (cfg.extraConfigFile != null) ''
if [ -f "${cfg.extraConfigFile}" ]; then
printf "\n# The following is from ${cfg.extraConfigFile}:\n"
cat "${cfg.extraConfigFile}"
else
echo "Warning: services.openiscsi.extraConfigFile ${cfg.extraConfigFile} does not exist!" >&2
fi
'';
in ''
(
cat ${config.environment.etc."iscsi/iscsid.conf.fragment".source}
${extraCfgDumper}
) > /etc/iscsi/iscsid.conf
'';
systemd.packages = [ cfg.package ];
systemd.services."iscsid".wantedBy = [ "multi-user.target" ];
systemd.sockets."iscsid".wantedBy = [ "sockets.target" ];
systemd.services."iscsi" = mkIf cfg.enableAutoLoginOut {
wantedBy = [ "remote-fs.target" ];
serviceConfig.ExecStartPre = mkIf (cfg.discoverPortal != null) "${cfg.package}/bin/iscsiadm --mode discoverydb --type sendtargets --portal ${escapeShellArg cfg.discoverPortal} --discover";
};
environment.systemPackages = [ cfg.package ];
boot.kernelModules = [ "iscsi_tcp" ];
};
}

View file

@ -0,0 +1,181 @@
{ config, lib, pkgs, ... }: with lib;
let
cfg = config.boot.iscsi-initiator;
in
{
# If you're booting entirely off another machine you may want to add
# this snippet to always boot the latest "system" version. It is not
# enabled by default in case you have an initrd on a local disk:
#
# boot.initrd.postMountCommands = ''
# ln -sfn /nix/var/nix/profiles/system/init /mnt-root/init
# stage2Init=/init
# '';
#
# Note: Theoretically you might want to connect to multiple portals and
# log in to multiple targets, however the authors of this module so far
# don't have the need or expertise to reasonably implement it. Also,
# consider carefully before making your boot chain depend on multiple
# machines to be up.
options.boot.iscsi-initiator = with types; {
name = mkOption {
description = ''
Name of the iSCSI initiator to boot from. Note, booting from iscsi
requires networkd based networking.
'';
default = null;
example = "iqn.2020-08.org.linux-iscsi.initiatorhost:example";
type = nullOr str;
};
discoverPortal = mkOption {
description = ''
iSCSI portal to boot from.
'';
default = null;
example = "192.168.1.1:3260";
type = nullOr str;
};
target = mkOption {
description = ''
Name of the iSCSI target to boot from.
'';
default = null;
example = "iqn.2020-08.org.linux-iscsi.targethost:example";
type = nullOr str;
};
logLevel = mkOption {
description = ''
Higher numbers elicits more logs.
'';
default = 1;
example = 8;
type = int;
};
loginAll = mkOption {
description = ''
Do not log into a specific target on the portal, but to all that we discover.
This overrides setting target.
'';
type = bool;
default = false;
};
extraConfig = mkOption {
description = "Extra lines to append to /etc/iscsid.conf";
default = null;
type = nullOr lines;
};
extraConfigFile = mkOption {
description = ''
Append an additional file's contents to `/etc/iscsid.conf`. Use a non-store path
and store passwords in this file. Note: the file specified here must be available
in the initrd, see: `boot.initrd.secrets`.
'';
default = null;
type = nullOr str;
};
};
config = mkIf (cfg.name != null) {
# The "scripted" networking configuration (ie: non-networkd)
# doesn't properly order the start and stop of the interfaces, and the
# network interfaces are torn down before unmounting disks. Since this
# module is specifically for very-early-boot network mounts, we need
# the network to stay on.
#
# We could probably fix the scripted options to properly order, but I'm
# not inclined to invest that time today. Hopefully this gets users far
# enough along and they can just use networkd.
networking.useNetworkd = true;
networking.useDHCP = false; # Required to set useNetworkd = true
boot.initrd = {
network.enable = true;
# By default, the stage-1 disables the network and resets the interfaces
# on startup. Since our startup disks are on the network, we can't let
# the network not work.
network.flushBeforeStage2 = false;
kernelModules = [ "iscsi_tcp" ];
extraUtilsCommands = ''
copy_bin_and_libs ${pkgs.openiscsi}/bin/iscsid
copy_bin_and_libs ${pkgs.openiscsi}/bin/iscsiadm
${optionalString (!config.boot.initrd.network.ssh.enable) "cp -pv ${pkgs.glibc.out}/lib/libnss_files.so.* $out/lib"}
mkdir -p $out/etc/iscsi
cp ${config.environment.etc.hosts.source} $out/etc/hosts
cp ${pkgs.openiscsi}/etc/iscsi/iscsid.conf $out/etc/iscsi/iscsid.fragment.conf
chmod +w $out/etc/iscsi/iscsid.fragment.conf
cat << 'EOF' >> $out/etc/iscsi/iscsid.fragment.conf
${optionalString (cfg.extraConfig != null) cfg.extraConfig}
EOF
'';
extraUtilsCommandsTest = ''
$out/bin/iscsiadm --version
'';
preLVMCommands = let
extraCfgDumper = optionalString (cfg.extraConfigFile != null) ''
if [ -f "${cfg.extraConfigFile}" ]; then
printf "\n# The following is from ${cfg.extraConfigFile}:\n"
cat "${cfg.extraConfigFile}"
else
echo "Warning: boot.iscsi-initiator.extraConfigFile ${cfg.extraConfigFile} does not exist!" >&2
fi
'';
in ''
${optionalString (!config.boot.initrd.network.ssh.enable) ''
# stolen from initrd-ssh.nix
echo 'root:x:0:0:root:/root:/bin/ash' > /etc/passwd
echo 'passwd: files' > /etc/nsswitch.conf
''}
cp -f $extraUtils/etc/hosts /etc/hosts
mkdir -p /etc/iscsi /run/lock/iscsi
echo "InitiatorName=${cfg.name}" > /etc/iscsi/initiatorname.iscsi
(
cat "$extraUtils/etc/iscsi/iscsid.fragment.conf"
printf "\n"
${optionalString cfg.loginAll ''echo "node.startup = automatic"''}
${extraCfgDumper}
) > /etc/iscsi/iscsid.conf
iscsid --foreground --no-pid-file --debug ${toString cfg.logLevel} &
iscsiadm --mode discoverydb \
--type sendtargets \
--discover \
--portal ${escapeShellArg cfg.discoverPortal} \
--debug ${toString cfg.logLevel}
${if cfg.loginAll then ''
iscsiadm --mode node --loginall all
'' else ''
iscsiadm --mode node --targetname ${escapeShellArg cfg.target} --login
''}
pkill -9 iscsid
'';
};
services.openiscsi = {
enable = true;
inherit (cfg) name;
};
assertions = [
{
assertion = cfg.loginAll -> cfg.target == null;
message = "iSCSI target name is set while login on all portals is enabled.";
}
];
};
}

View file

@ -0,0 +1,53 @@
{ config, lib, pkgs, ... }:
with lib;
let
cfg = config.services.target;
in
{
###### interface
options = {
services.target = with types; {
enable = mkEnableOption "the kernel's LIO iscsi target";
config = mkOption {
type = attrs;
default = {};
description = ''
Content of /etc/target/saveconfig.json
This file is normally read and written by targetcli
'';
};
};
};
###### implementation
config = mkIf cfg.enable {
environment.etc."target/saveconfig.json" = {
text = builtins.toJSON cfg.config;
mode = "0600";
};
environment.systemPackages = with pkgs; [ targetcli ];
boot.kernelModules = [ "configfs" "target_core_mod" "iscsi_target_mod" ];
systemd.services.iscsi-target = {
enable = true;
after = [ "network.target" "local-fs.target" ];
requires = [ "sys-kernel-config.mount" ];
wantedBy = [ "multi-user.target" ];
serviceConfig = {
Type = "oneshot";
ExecStart = "${pkgs.python3.pkgs.rtslib}/bin/targetctl restore";
ExecStop = "${pkgs.python3.pkgs.rtslib}/bin/targetctl clear";
RemainAfterExit = "yes";
};
};
systemd.tmpfiles.rules = [
"d /etc/target 0700 root root - -"
];
};
}

View file

@ -236,9 +236,12 @@ def main() -> None:
gens += get_generations(profile) gens += get_generations(profile)
remove_old_entries(gens) remove_old_entries(gens)
for gen in gens: for gen in gens:
try:
write_entry(*gen, machine_id) write_entry(*gen, machine_id)
if os.readlink(system_dir(*gen)) == args.default_config: if os.readlink(system_dir(*gen)) == args.default_config:
write_loader_conf(*gen) write_loader_conf(*gen)
except OSError as e:
print("ignoring profile '{}' in the list of boot entries because of the following error:\n{}".format(profile, e), file=sys.stderr)
memtest_entry_file = "@efiSysMountPoint@/loader/entries/memtest86.conf" memtest_entry_file = "@efiSysMountPoint@/loader/entries/memtest86.conf"
if os.path.exists(memtest_entry_file): if os.path.exists(memtest_entry_file):

View file

@ -9,8 +9,9 @@ in
options = { options = {
virtualisation.azureImage.diskSize = mkOption { virtualisation.azureImage.diskSize = mkOption {
type = with types; int; type = with types; either (enum [ "auto" ]) int;
default = 2048; default = "auto";
example = 2048;
description = '' description = ''
Size of disk image. Unit is MB. Size of disk image. Unit is MB.
''; '';

View file

@ -10,8 +10,9 @@ in
options = { options = {
virtualisation.digitalOceanImage.diskSize = mkOption { virtualisation.digitalOceanImage.diskSize = mkOption {
type = with types; int; type = with types; either (enum [ "auto" ]) int;
default = 4096; default = "auto";
example = 4096;
description = '' description = ''
Size of disk image. Unit is MB. Size of disk image. Unit is MB.
''; '';

View file

@ -18,8 +18,9 @@ in
options = { options = {
virtualisation.googleComputeImage.diskSize = mkOption { virtualisation.googleComputeImage.diskSize = mkOption {
type = with types; int; type = with types; either (enum [ "auto" ]) int;
default = 1536; default = "auto";
example = 1536;
description = '' description = ''
Size of disk image. Unit is MB. Size of disk image. Unit is MB.
''; '';

View file

@ -9,8 +9,9 @@ in {
options = { options = {
hyperv = { hyperv = {
baseImageSize = mkOption { baseImageSize = mkOption {
type = types.int; type = with types; either (enum [ "auto" ]) int;
default = 2048; default = "auto";
example = 2048;
description = '' description = ''
The size of the hyper-v base image in MiB. The size of the hyper-v base image in MiB.
''; '';

View file

@ -439,21 +439,16 @@ in
default = false; default = false;
description = '' description = ''
Whether this NixOS machine is a lightweight container running Whether this NixOS machine is a lightweight container running
in another NixOS system. If set to true, support for nested in another NixOS system.
containers is disabled by default, but can be reenabled by
setting <option>boot.enableContainers</option> to true.
''; '';
}; };
boot.enableContainers = mkOption { boot.enableContainers = mkOption {
type = types.bool; type = types.bool;
default = !config.boot.isContainer; default = true;
description = '' description = ''
Whether to enable support for NixOS containers. Defaults to true Whether to enable support for NixOS containers. Defaults to true
(at no cost if containers are not actually used), but only if the (at no cost if containers are not actually used).
system is not itself a lightweight container of a host.
To enable support for nested containers, this option has to be
explicitly set to true (in the outer container).
''; '';
}; };

View file

@ -11,8 +11,9 @@ in {
options = { options = {
virtualbox = { virtualbox = {
baseImageSize = mkOption { baseImageSize = mkOption {
type = types.int; type = with types; either (enum [ "auto" ]) int;
default = 50 * 1024; default = "auto";
example = 50 * 1024;
description = '' description = ''
The size of the VirtualBox base image in MiB. The size of the VirtualBox base image in MiB.
''; '';

View file

@ -18,8 +18,9 @@ in {
options = { options = {
vmware = { vmware = {
baseImageSize = mkOption { baseImageSize = mkOption {
type = types.int; type = with types; either (enum [ "auto" ]) int;
default = 2048; default = "auto";
example = 2048;
description = '' description = ''
The size of the VMWare base image in MiB. The size of the VMWare base image in MiB.
''; '';

View file

@ -161,9 +161,6 @@ in
environment.systemPackages = [ cfg.package ]; environment.systemPackages = [ cfg.package ];
# Make sure Domain 0 gets the required configuration
#boot.kernelPackages = pkgs.boot.kernelPackages.override { features={xen_dom0=true;}; };
boot.kernelModules = boot.kernelModules =
[ "xen-evtchn" "xen-gntdev" "xen-gntalloc" "xen-blkback" "xen-netback" [ "xen-evtchn" "xen-gntdev" "xen-gntalloc" "xen-blkback" "xen-netback"
"xen-pciback" "evtchn" "gntdev" "netbk" "blkbk" "xen-scsibk" "xen-pciback" "evtchn" "gntdev" "netbk" "blkbk" "xen-scsibk"

View file

@ -75,6 +75,7 @@ in
containers-ip = handleTest ./containers-ip.nix {}; containers-ip = handleTest ./containers-ip.nix {};
containers-macvlans = handleTest ./containers-macvlans.nix {}; containers-macvlans = handleTest ./containers-macvlans.nix {};
containers-names = handleTest ./containers-names.nix {}; containers-names = handleTest ./containers-names.nix {};
containers-nested = handleTest ./containers-nested.nix {};
containers-physical_interfaces = handleTest ./containers-physical_interfaces.nix {}; containers-physical_interfaces = handleTest ./containers-physical_interfaces.nix {};
containers-portforward = handleTest ./containers-portforward.nix {}; containers-portforward = handleTest ./containers-portforward.nix {};
containers-reloadable = handleTest ./containers-reloadable.nix {}; containers-reloadable = handleTest ./containers-reloadable.nix {};
@ -185,6 +186,7 @@ in
iodine = handleTest ./iodine.nix {}; iodine = handleTest ./iodine.nix {};
ipfs = handleTest ./ipfs.nix {}; ipfs = handleTest ./ipfs.nix {};
ipv6 = handleTest ./ipv6.nix {}; ipv6 = handleTest ./ipv6.nix {};
iscsi-root = handleTest ./iscsi-root.nix {};
jackett = handleTest ./jackett.nix {}; jackett = handleTest ./jackett.nix {};
jellyfin = handleTest ./jellyfin.nix {}; jellyfin = handleTest ./jellyfin.nix {};
jenkins = handleTest ./jenkins.nix {}; jenkins = handleTest ./jenkins.nix {};
@ -322,6 +324,7 @@ in
pleroma = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./pleroma.nix {}; pleroma = handleTestOn [ "x86_64-linux" "aarch64-linux" ] ./pleroma.nix {};
plikd = handleTest ./plikd.nix {}; plikd = handleTest ./plikd.nix {};
plotinus = handleTest ./plotinus.nix {}; plotinus = handleTest ./plotinus.nix {};
podgrab = handleTest ./podgrab.nix {};
podman = handleTestOn ["x86_64-linux"] ./podman.nix {}; podman = handleTestOn ["x86_64-linux"] ./podman.nix {};
pomerium = handleTestOn ["x86_64-linux"] ./pomerium.nix {}; pomerium = handleTestOn ["x86_64-linux"] ./pomerium.nix {};
postfix = handleTest ./postfix.nix {}; postfix = handleTest ./postfix.nix {};

View file

@ -0,0 +1,30 @@
# Test for NixOS' container nesting.
import ./make-test-python.nix ({ pkgs, ... }: {
name = "nested";
meta = with pkgs.lib.maintainers; { maintainers = [ sorki ]; };
machine = { lib, ... }:
let
makeNested = subConf: {
containers.nested = {
autoStart = true;
privateNetwork = true;
config = subConf;
};
};
in makeNested (makeNested { });
testScript = ''
machine.start()
machine.wait_for_unit("container@nested.service")
machine.succeed("systemd-run --pty --machine=nested -- machinectl list | grep nested")
print(
machine.succeed(
"systemd-run --pty --machine=nested -- systemd-run --pty --machine=nested -- systemctl status"
)
)
'';
})

View file

@ -1,44 +1,120 @@
# Test whether hibernation from partition works. # Test whether hibernation from partition works.
import ./make-test-python.nix (pkgs: { { system ? builtins.currentSystem
name = "hibernate"; , config ? {}
, pkgs ? import ../.. { inherit system config; }
}:
nodes = { with import ../lib/testing-python.nix { inherit system pkgs; };
machine = { config, lib, pkgs, ... }: with lib; {
virtualisation.emptyDiskImages = [ config.virtualisation.memorySize ]; let
# System configuration of the installed system, which is used for the actual
# hibernate testing.
installedConfig = with pkgs.lib; {
imports = [
../modules/testing/test-instrumentation.nix
../modules/profiles/qemu-guest.nix
../modules/profiles/minimal.nix
];
hardware.enableAllFirmware = mkForce false;
documentation.nixos.enable = false;
boot.loader.grub.device = "/dev/vda";
systemd.services.backdoor.conflicts = [ "sleep.target" ]; systemd.services.backdoor.conflicts = [ "sleep.target" ];
swapDevices = mkOverride 0 [ { device = "/dev/vdb"; } ]; powerManagement.resumeCommands = "systemctl --no-block restart backdoor.service";
networking.firewall.allowedTCPPorts = [ 4444 ]; fileSystems = {
"/".device = "/dev/vda2";
systemd.services.listener.serviceConfig.ExecStart = "${pkgs.netcat}/bin/nc -l 4444 -k";
}; };
swapDevices = mkOverride 0 [ { device = "/dev/vda1"; } ];
};
installedSystem = (import ../lib/eval-config.nix {
inherit system;
modules = [ installedConfig ];
}).config.system.build.toplevel;
in makeTest {
name = "hibernate";
probe = { pkgs, ...}: { nodes = {
environment.systemPackages = [ pkgs.netcat ]; # System configuration used for installing the installedConfig from above.
machine = { config, lib, pkgs, ... }: with lib; {
imports = [
../modules/profiles/installation-device.nix
../modules/profiles/base.nix
];
nix.binaryCaches = mkForce [ ];
nix.extraOptions = ''
hashed-mirrors =
connect-timeout = 1
'';
virtualisation.diskSize = 8 * 1024;
virtualisation.emptyDiskImages = [
# Small root disk for installer
512
];
virtualisation.bootDevice = "/dev/vdb";
}; };
}; };
# 9P doesn't support reconnection to virtio transport after a hibernation. # 9P doesn't support reconnection to virtio transport after a hibernation.
# Therefore, machine just hangs on any Nix store access. # Therefore, machine just hangs on any Nix store access.
# To work around it we run a daemon which listens to a TCP connection and # To avoid this, we install NixOS onto a temporary disk with everything we need
# try to connect to it as a test. # included into the store.
testScript = testScript =
'' ''
def create_named_machine(name):
return create_machine(
{
"qemuFlags": "-cpu max ${
if system == "x86_64-linux" then "-m 1024"
else "-m 768 -enable-kvm -machine virt,gic-version=host"}",
"hdaInterface": "virtio",
"hda": "vm-state-machine/machine.qcow2",
"name": name,
}
)
# Install NixOS
machine.start() machine.start()
machine.wait_for_unit("multi-user.target") machine.succeed(
machine.succeed("mkswap /dev/vdb") # Partition /dev/vda
machine.succeed("swapon -a") "flock /dev/vda parted --script /dev/vda -- mklabel msdos"
machine.start_job("listener") + " mkpart primary linux-swap 1M 1024M"
machine.wait_for_open_port(4444) + " mkpart primary ext2 1024M -1s",
machine.succeed("systemctl hibernate &") "udevadm settle",
machine.wait_for_shutdown() "mkfs.ext3 -L nixos /dev/vda2",
probe.wait_for_unit("multi-user.target") "mount LABEL=nixos /mnt",
machine.start() "mkswap /dev/vda1 -L swap",
probe.wait_until_succeeds("echo test | nc machine 4444 -N") # Install onto /mnt
"nix-store --load-db < ${pkgs.closureInfo {rootPaths = [installedSystem];}}/registration",
"nixos-install --root /mnt --system ${installedSystem} --no-root-passwd",
)
machine.shutdown()
# Start up
hibernate = create_named_machine("hibernate")
# Drop in file that checks if we un-hibernated properly (and not booted fresh)
hibernate.succeed(
"mkdir /run/test",
"mount -t ramfs -o size=1m ramfs /run/test",
"echo not persisted to disk > /run/test/suspended",
)
# Hibernate machine
hibernate.succeed("systemctl hibernate &")
hibernate.wait_for_shutdown()
# Restore machine from hibernation, validate our ramfs file is there.
resume = create_named_machine("resume")
resume.start()
resume.succeed("grep 'not persisted to disk' /run/test/suspended")
''; '';
}) }

View file

@ -0,0 +1,161 @@
import ./make-test-python.nix (
{ pkgs, lib, ... }:
let
initiatorName = "iqn.2020-08.org.linux-iscsi.initiatorhost:example";
targetName = "iqn.2003-01.org.linux-iscsi.target.x8664:sn.acf8fd9c23af";
in
{
name = "iscsi";
meta = {
maintainers = pkgs.lib.teams.deshaw.members
++ (with pkgs.lib.maintainers; [ ajs124 ]);
};
nodes = {
target = { config, pkgs, lib, ... }: {
services.target = {
enable = true;
config = {
fabric_modules = [];
storage_objects = [
{
dev = "/dev/vdb";
name = "test";
plugin = "block";
write_back = true;
wwn = "92b17c3f-6b40-4168-b082-ceeb7b495522";
}
];
targets = [
{
fabric = "iscsi";
tpgs = [
{
enable = true;
attributes = {
authentication = 0;
generate_node_acls = 1;
};
luns = [
{
alias = "94dfe06967";
alua_tg_pt_gp_name = "default_tg_pt_gp";
index = 0;
storage_object = "/backstores/block/test";
}
];
node_acls = [
{
mapped_luns = [
{
alias = "d42f5bdf8a";
index = 0;
tpg_lun = 0;
write_protect = false;
}
];
node_wwn = initiatorName;
}
];
portals = [
{
ip_address = "0.0.0.0";
iser = false;
offload = false;
port = 3260;
}
];
tag = 1;
}
];
wwn = targetName;
}
];
};
};
networking.firewall.allowedTCPPorts = [ 3260 ];
networking.firewall.allowedUDPPorts = [ 3260 ];
virtualisation.memorySize = 2048;
virtualisation.emptyDiskImages = [ 2048 ];
};
initiatorAuto = { nodes, config, pkgs, ... }: {
services.openiscsi = {
enable = true;
enableAutoLoginOut = true;
discoverPortal = "target";
name = initiatorName;
};
environment.systemPackages = with pkgs; [
xfsprogs
];
system.extraDependencies = [ nodes.initiatorRootDisk.config.system.build.toplevel ];
nix.binaryCaches = lib.mkForce [];
nix.extraOptions = ''
hashed-mirrors =
connect-timeout = 1
'';
};
initiatorRootDisk = { config, pkgs, modulesPath, lib, ... }: {
boot.loader.grub.enable = false;
boot.kernelParams = lib.mkOverride 5 (
[
"boot.shell_on_fail"
"console=tty1"
"ip=${config.networking.primaryIPAddress}:::255.255.255.0::ens9:none"
]
);
# defaults to true, puts some code in the initrd that tries to mount an overlayfs on /nix/store
virtualisation.writableStore = false;
fileSystems = lib.mkOverride 5 {
"/" = {
fsType = "xfs";
device = "/dev/sda";
options = [ "_netdev" ];
};
};
boot.iscsi-initiator = {
discoverPortal = "target";
name = initiatorName;
target = targetName;
};
};
};
testScript = { nodes, ... }: ''
target.start()
target.wait_for_unit("iscsi-target.service")
initiatorAuto.start()
initiatorAuto.wait_for_unit("iscsid.service")
initiatorAuto.wait_for_unit("iscsi.service")
initiatorAuto.get_unit_info("iscsi")
initiatorAuto.succeed("set -x; while ! test -e /dev/sda; do sleep 1; done")
initiatorAuto.succeed("mkfs.xfs /dev/sda")
initiatorAuto.succeed("mkdir /mnt && mount /dev/sda /mnt")
initiatorAuto.succeed(
"nixos-install --no-bootloader --no-root-passwd --system ${nodes.initiatorRootDisk.config.system.build.toplevel}"
)
initiatorAuto.succeed("umount /mnt && rmdir /mnt")
initiatorAuto.shutdown()
initiatorRootDisk.start()
initiatorRootDisk.wait_for_unit("multi-user.target")
initiatorRootDisk.wait_for_unit("iscsid")
initiatorRootDisk.succeed("touch test")
initiatorRootDisk.shutdown()
'';
}
)

View file

@ -0,0 +1,34 @@
let
defaultPort = 8080;
customPort = 4242;
in
import ./make-test-python.nix ({ pkgs, ... }: {
name = "podgrab";
nodes = {
default = { ... }: {
services.podgrab.enable = true;
};
customized = { ... }: {
services.podgrab = {
enable = true;
port = customPort;
};
};
};
testScript = ''
start_all()
default.wait_for_unit("podgrab")
default.wait_for_open_port("${toString defaultPort}")
default.succeed("curl --fail http://localhost:${toString defaultPort}")
customized.wait_for_unit("podgrab")
customized.wait_for_open_port("${toString customPort}")
customized.succeed("curl --fail http://localhost:${toString customPort}")
'';
meta.maintainers = with pkgs.lib.maintainers; [ ambroisie ];
})

View file

@ -4,13 +4,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "flacon"; pname = "flacon";
version = "6.1.0"; version = "7.0.1";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "flacon"; owner = "flacon";
repo = "flacon"; repo = "flacon";
rev = "v${version}"; rev = "v${version}";
sha256 = "04yp3aym7h70xjni9ancqv5lc4zds5a8dgw3fzgqs8k5nmh074gv"; sha256 = "sha256-35tARJkyhC8EisIyDCwuT/UUruzLjJRUuZysuqeNssM=";
}; };
nativeBuildInputs = [ cmake pkg-config wrapQtAppsHook ]; nativeBuildInputs = [ cmake pkg-config wrapQtAppsHook ];

View file

@ -0,0 +1,69 @@
{ lib
, python3
, fetchFromGitHub
, appstream-glib
, desktop-file-utils
, gettext
, glib
, gobject-introspection
, gst_all_1
, gtk3
, libhandy
, librsvg
, meson
, ninja
, pkg-config
, wrapGAppsHook
}:
python3.pkgs.buildPythonApplication rec {
pname = "mousai";
version = "0.3.1";
format = "other";
src = fetchFromGitHub {
owner = "SeaDve";
repo = "Mousai";
rev = "v${version}";
sha256 = "0x57dci0prhlj79h74yh79cazn48rn0bckz5j3z4njk4fwc3fvfx";
};
postPatch = ''
patchShebangs build-aux/meson
'';
nativeBuildInputs = [
appstream-glib
desktop-file-utils
gettext
glib
gtk3
meson
ninja
pkg-config
wrapGAppsHook
];
buildInputs = [
gobject-introspection
gst_all_1.gstreamer
gst_all_1.gst-plugins-base
gst_all_1.gst-plugins-good
gtk3
libhandy
librsvg
];
propagatedBuildInputs = with python3.pkgs; [
pygobject3
requests
];
meta = with lib; {
description = "Identify any songs in seconds";
homepage = "https://github.com/SeaDve/Mousai";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ dotlambda ];
};
}

View file

@ -30,7 +30,6 @@ python3.pkgs.buildPythonApplication rec {
++ optionals withDbusPython [ dbus-python ] ++ optionals withDbusPython [ dbus-python ]
++ optionals withPyInotify [ pyinotify ] ++ optionals withPyInotify [ pyinotify ]
++ optionals withMusicBrainzNgs [ musicbrainzngs ] ++ optionals withMusicBrainzNgs [ musicbrainzngs ]
++ optionals stdenv.isDarwin [ pyobjc ]
++ optionals withPahoMqtt [ paho-mqtt ]; ++ optionals withPahoMqtt [ paho-mqtt ];
LC_ALL = "en_US.UTF-8"; LC_ALL = "en_US.UTF-8";

View file

@ -1,7 +1,7 @@
{ stdenv, lib, zlib, glib, alsaLib, dbus, gtk3, atk, pango, freetype, fontconfig { stdenv, lib, zlib, glib, alsaLib, dbus, gtk3, atk, pango, freetype, fontconfig
, libgnome-keyring3, gdk-pixbuf, cairo, cups, expat, libgpgerror, nspr , libgnome-keyring3, gdk-pixbuf, cairo, cups, expat, libgpgerror, nspr
, gconf, nss, xorg, libcap, systemd, libnotify, libsecret, libuuid, at-spi2-atk , gconf, nss, xorg, libcap, systemd, libnotify, libsecret, libuuid, at-spi2-atk
, at-spi2-core, libdbusmenu , at-spi2-core, libdbusmenu, mesa
}: }:
let let
@ -12,6 +12,7 @@ let
xorg.libXcomposite xorg.libXi xorg.libXfixes xorg.libXrandr xorg.libXcomposite xorg.libXi xorg.libXfixes xorg.libXrandr
xorg.libXcursor xorg.libxkbfile xorg.libXScrnSaver libcap systemd libnotify xorg.libXcursor xorg.libxkbfile xorg.libXScrnSaver libcap systemd libnotify
xorg.libxcb libsecret libuuid at-spi2-atk at-spi2-core libdbusmenu xorg.libxcb libsecret libuuid at-spi2-atk at-spi2-core libdbusmenu
mesa # required for libgbm
]; ];
libPathNative = lib.makeLibraryPath packages; libPathNative = lib.makeLibraryPath packages;

View file

@ -12,14 +12,14 @@ assert stdenv ? glibc;
# find the downloads needed for new versions # find the downloads needed for new versions
# #
# to test: # to test:
# $ for e in cpp modeling platform sdk java committers rcp rust; do nix build -f default.nix pkgs.eclipses.eclipse-${e} -o eclipse-${e}; done # $ for e in cpp modeling platform sdk java jee committers rcp; do nix build -f default.nix pkgs.eclipses.eclipse-${e} -o eclipse-${e}; done
let let
platform_major = "4"; platform_major = "4";
platform_minor = "18"; platform_minor = "19";
year = "2020"; year = "2021";
month = "12"; month = "03";
timestamp = "${year}${month}021800"; timestamp = "${year}${month}031800";
gtk = gtk3; gtk = gtk3;
in rec { in rec {
@ -37,7 +37,7 @@ in rec {
src = src =
fetchurl { fetchurl {
url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-cpp-${year}-${month}-R-linux-gtk-x86_64.tar.gz"; url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-cpp-${year}-${month}-R-linux-gtk-x86_64.tar.gz";
sha512 = "MR6ddNmBKyXCyVGlGPfq6K2zJRywy4I5QDXji3rh81eJQ6zkEguo+VvD75i/szg/+FbCVA09vDVV06JgL4SHwQ=="; sha512 = "3j0lmll0glcr9p0hf49jiaq9xr8hadsy0y58wbbkdpldj3rclxr056dkswmiw2bkypfiwrjygbli5qxyp6mz380562hc2kjwijqq476";
}; };
}; };
@ -49,7 +49,7 @@ in rec {
src = src =
fetchurl { fetchurl {
url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-modeling-${year}-${month}-R-linux-gtk-x86_64.tar.gz"; url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-modeling-${year}-${month}-R-linux-gtk-x86_64.tar.gz";
sha512 = "hSi3IL+fWhlUfEJYv4LFO7WNbZpiofAgNGZbEOIBS0VpeHfJ5Y6UKMKMLfQlG3hlkAL5jg/cEJKb/ad4DxHbjQ=="; sha512 = "0iqz9a3ixcbmaci6lnspdnzwd2h1fcygi54hmsl89pq3d1k5scyhcl123ixi24csi782w847bn0lq00n0zwras9akmnhsflra4mw5pz";
}; };
}; };
@ -61,7 +61,7 @@ in rec {
src = src =
fetchurl { fetchurl {
url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/eclipse/downloads/drops${platform_major}/R-${platform_major}.${platform_minor}-${timestamp}/eclipse-platform-${platform_major}.${platform_minor}-linux-gtk-x86_64.tar.gz"; url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/eclipse/downloads/drops${platform_major}/R-${platform_major}.${platform_minor}-${timestamp}/eclipse-platform-${platform_major}.${platform_minor}-linux-gtk-x86_64.tar.gz";
sha512 = "cPRa7ICogpcuwzOlzSSCEcWpwpUhQuIv6lGBKuAu9mOwj7Nz0TPaWVWNqN1541uVRXVTzcWX+mwc2UBPzWUPxg=="; sha512 = "03v1ly7j9d9qnl3d9rl5a9kp483dz8i8v3cfnh55ksm9fk8iy2fzg6wq178ggnx2z5x9k88a4wk6n647yilh2hgc2l7926imkh2j1ly";
}; };
}; };
@ -86,7 +86,7 @@ in rec {
src = src =
fetchurl { fetchurl {
url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/eclipse/downloads/drops${platform_major}/R-${platform_major}.${platform_minor}-${timestamp}/eclipse-SDK-${platform_major}.${platform_minor}-linux-gtk-x86_64.tar.gz"; url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/eclipse/downloads/drops${platform_major}/R-${platform_major}.${platform_minor}-${timestamp}/eclipse-SDK-${platform_major}.${platform_minor}-linux-gtk-x86_64.tar.gz";
sha512 = "iN6z5iSJ2bhE1IH3uJj7aiaF/nSIgIAqadvaTBpE4gkgLAXgtfraFAzgcw0zJr5m2u5mULfW45hLkmIXselniQ=="; sha512 = "37m91my121pch12bwpwk5svfqkm7vl07wjx4fkhpy947v5kjf36hm6x0i45swdg7f0hk72y2qz5ka15ki5jv890qy5psj6z7ax9sys7";
}; };
}; };
@ -98,7 +98,19 @@ in rec {
src = src =
fetchurl { fetchurl {
url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-java-${year}-${month}-R-linux-gtk-x86_64.tar.gz"; url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-java-${year}-${month}-R-linux-gtk-x86_64.tar.gz";
sha512 = "HVqsWUVNNRdcaziGdNI96R9F2VMUE4nYK1VX1G3pK+srFDlkJ7+rj2sZjtWL7WcJR1XSbT03nJJzPyp01RsCvQ=="; sha512 = "3qrnj6krhrqc9rfwlim3v7kshwfhsi050pszw6xdfbj56mzr9whr7l76isbpxd5j0zipgfw3qrzrx379pdp899d35fv284ilvllzl4k";
};
};
### Eclipse Java EE
eclipse-jee = buildEclipse {
name = "eclipse-jee-${platform_major}.${platform_minor}";
description = "Eclipse IDE for Enterprise Java and Web Developers";
src =
fetchurl {
url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-jee-${year}-${month}-R-linux-gtk-x86_64.tar.gz";
sha512 = "04k4x9imabxddqlrgajn33ak8i58wcap40ll09xz23d1sxn9a8prh01s06ymgwg6ldg939srphvbz4112p8p0b1hl7m25a02qll91zv";
}; };
}; };
@ -110,7 +122,7 @@ in rec {
src = src =
fetchurl { fetchurl {
url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-committers-${year}-${month}-R-linux-gtk-x86_64.tar.gz"; url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-committers-${year}-${month}-R-linux-gtk-x86_64.tar.gz";
sha512 = "UtI4piLNRM3TsM9PzbGgsPqTkiurJ+7Q7jVra45an4YJHtfWcGTxxwUNnRzay6cHT49AjrWtVf1bovWSDXMiQA=="; sha512 = "2yksl3w7yr1a3h4zdpa9zf394r5c185zqxhigdv858ldg46kmr9h0l2c7shbgb16kkybcnrk4x44dhjvh60x8xw6ma05klp4lp9v5va";
}; };
}; };
@ -122,19 +134,7 @@ in rec {
src = src =
fetchurl { fetchurl {
url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-rcp-${year}-${month}-R-linux-gtk-x86_64.tar.gz"; url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-rcp-${year}-${month}-R-linux-gtk-x86_64.tar.gz";
sha512 = "9DqNjSx1Ypdzpt1jIOJ9KFx8y+cG55K6bqkWTqnGjjDr4h4mWSzvGjHGUtFrKl92WRzQZKjNPxzVreDMcUkc/g=="; sha512 = "3fhrhwbyqcys56c93s1vl9rbvn269nn5y3cb9f3n1qwgw6i97mim2zy98jl3r8cksf97jwsmqmsqclsgz9v799wcckv81dj1l628382";
};
};
### Eclipse IDE for Rust Developers
eclipse-rust = buildEclipse {
name = "eclipse-rust-${platform_major}.${platform_minor}";
description = "Eclipse IDE for Rust Developers";
src =
fetchurl {
url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/technology/epp/downloads/release/${year}-${month}/R/eclipse-rust-${year}-${month}-R-linux-gtk-x86_64.tar.gz";
sha512 = "QbaG1knCMFnVQkPeApcIamJMXPyL8zUQa0ZsTJOuTgU/fD1RiHN7/WS6ax5azzIJhpjEtj2LMU4XV+MwkzResw==";
}; };
}; };

View file

@ -248,12 +248,12 @@ rec {
cdt = buildEclipseUpdateSite rec { cdt = buildEclipseUpdateSite rec {
name = "cdt-${version}"; name = "cdt-${version}";
# find current version at https://www.eclipse.org/cdt/downloads.php # find current version at https://www.eclipse.org/cdt/downloads.php
version = "10.1.0"; version = "10.2.0";
src = fetchzip { src = fetchzip {
stripRoot = false; stripRoot = false;
url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/tools/cdt/releases/10.1/${name}/${name}.zip"; url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/tools/cdt/releases/10.2/${name}/${name}.zip";
sha256 = "1hbswcar3a5cw20mwrj82w9pvpkvvj6jrvqqf1lincva0r5sl7h8"; sha256 = "1r30cbpbzw3dfcsn54p6sqip86dqhydhsppjgaz60b6z138vzx49";
}; };
meta = with lib; { meta = with lib; {
@ -474,24 +474,6 @@ rec {
}; };
}; };
jdt = buildEclipseUpdateSite rec {
name = "jdt-${version}";
version = "4.18";
src = fetchzip {
stripRoot = false;
url = "https://www.eclipse.org/downloads/download.php?r=1&nf=1&file=/eclipse/downloads/drops4/R-${version}-202012021800/org.eclipse.jdt-${version}.zip";
sha256 = "q0O6OE2u0bdz1+nOkzXDrrOOzoEbVaXnejx4lX7uZgk=";
};
meta = with lib; {
homepage = "https://www.eclipse.org/jdt/";
description = "Eclipse Java development tools";
license = licenses.epl10;
platforms = platforms.all;
};
};
jdt-codemining = buildEclipsePlugin rec { jdt-codemining = buildEclipsePlugin rec {
name = "jdt-codemining-${version}"; name = "jdt-codemining-${version}";
version = "1.0.0.201806221018"; version = "1.0.0.201806221018";

View file

@ -113,6 +113,7 @@ let
hydraPlatforms = []; hydraPlatforms = [];
# prefer wrapper over the package # prefer wrapper over the package
priority = (neovim.meta.priority or 0) - 1; priority = (neovim.meta.priority or 0) - 1;
mainProgram = "nvim";
}; };
}; };
in in

View file

@ -4,7 +4,7 @@
, alsaLib, atk, at-spi2-atk, at-spi2-core, cairo, dbus, cups, expat , alsaLib, atk, at-spi2-atk, at-spi2-core, cairo, dbus, cups, expat
, gdk-pixbuf, glib, gtk3, libX11, libXScrnSaver, libXcomposite, libXcursor , gdk-pixbuf, glib, gtk3, libX11, libXScrnSaver, libXcomposite, libXcursor
, libXdamage, libXext, libXfixes, libXi, libXrandr, libXrender, libXtst , libXdamage, libXext, libXfixes, libXi, libXrandr, libXrender, libXtst
, libxcb, libuuid, libxshmfence, nspr, nss, pango , libxcb, libuuid, libxshmfence, nspr, nss, pango, mesa
, systemd , systemd
}: }:
@ -50,6 +50,7 @@ stdenv.mkDerivation rec {
libXtst libXtst
libxcb libxcb
libuuid libuuid
mesa # for libgbm
nspr nspr
nss nss
pango pango

View file

@ -1,7 +1,6 @@
{ lib { lib
, fetchFromGitHub , fetchFromGitHub
, xz , xz
, qt5
, wrapQtAppsHook , wrapQtAppsHook
, miniupnpc_2 , miniupnpc_2
, swftools , swftools
@ -10,14 +9,14 @@
pythonPackages.buildPythonPackage rec { pythonPackages.buildPythonPackage rec {
pname = "hydrus"; pname = "hydrus";
version = "434"; version = "436";
format = "other"; format = "other";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "hydrusnetwork"; owner = "hydrusnetwork";
repo = "hydrus"; repo = "hydrus";
rev = "v${version}"; rev = "v${version}";
sha256 = "sha256-7Allc9zawja8DO2idv+MAYZ/cBRTCMd0mbgBLfEVii8="; sha256 = "sha256-FXm8VUEY0OZ6/dc/qNwOXekhv5H2C9jjg/eNDoMvMn0==";
}; };
nativeBuildInputs = [ nativeBuildInputs = [

View file

@ -0,0 +1,115 @@
{ mkDerivation, lib, fetchFromGitHub, copyDesktopItems, makeDesktopItem, qmake
, qtbase, qtxmlpatterns, qttools, qtwebkit, libGL, fontconfig, openssl, poppler
, ffmpeg, libva, alsaLib, SDL, x264, libvpx, libvorbis, libtheora, libogg
, libopus, lame, fdk_aac, libass, quazip, libXext, libXfixes }:
let
importer = mkDerivation rec {
pname = "openboard-importer";
version = "unstable-2016-10-08";
src = fetchFromGitHub {
owner = "OpenBoard-org";
repo = "OpenBoard-Importer";
rev = "47927bda021b4f7f1540b794825fb0d601875e79";
sha256 = "19zhgsimy0f070caikc4vrrqyc8kv2h6rl37sy3iggks8z0g98gf";
};
nativeBuildInputs = [ qmake ];
installPhase = ''
install -Dm755 OpenBoardImporter $out/bin/OpenBoardImporter
'';
};
in mkDerivation rec {
pname = "openboard";
version = "1.6.1";
src = fetchFromGitHub {
owner = "OpenBoard-org";
repo = "OpenBoard";
rev = "v${version}";
sha256 = "sha256-OlGXGIMghil/GG6eso20+CWo/hCjarXGs6edXX9pc/M=";
};
postPatch = ''
substituteInPlace OpenBoard.pro \
--replace '/usr/include/quazip' '${quazip}/include/quazip5' \
--replace '/usr/include/poppler' '${poppler.dev}/include/poppler'
'';
nativeBuildInputs = [ qmake copyDesktopItems ];
buildInputs = [
qtbase
qtxmlpatterns
qttools
qtwebkit
libGL
fontconfig
openssl
poppler
ffmpeg
libva
alsaLib
SDL
x264
libvpx
libvorbis
libtheora
libogg
libopus
lame
fdk_aac
libass
quazip
libXext
libXfixes
];
propagatedBuildInputs = [ importer ];
makeFlags = [ "release-install" ];
desktopItems = [
(makeDesktopItem {
name = "OpenBoard";
exec = "OpenBoard %f";
icon = "OpenBoard";
comment = "OpenBoard, an interactive white board application";
desktopName = "OpenBoard";
mimeType = "application/ubz";
categories = "Education;";
startupNotify = true;
})
];
installPhase = ''
runHook preInstall
lrelease OpenBoard.pro
# Replicated release_scripts/linux/package.sh
mkdir -p $out/opt/openboard/i18n
cp -R resources/customizations build/linux/release/product/* $out/opt/openboard/
cp resources/i18n/*.qm $out/opt/openboard/i18n/
install -m644 resources/linux/openboard-ubz.xml $out/opt/openboard/etc/
install -Dm644 resources/images/OpenBoard.png $out/share/icons/hicolor/64x64/apps/OpenBoard.png
runHook postInstall
'';
dontWrapQtApps = true;
postFixup = ''
makeWrapper $out/opt/openboard/OpenBoard $out/bin/OpenBoard \
"''${qtWrapperArgs[@]}"
'';
meta = with lib; {
description = "Interactive whiteboard application";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ fufexan ];
platforms = platforms.linux;
};
}

View file

@ -1 +1 @@
WGET_ARGS=( http://download.kde.org/stable/release-service/20.12.2/src -A '*.tar.xz' ) WGET_ARGS=( http://download.kde.org/stable/release-service/20.12.3/src -A '*.tar.xz' )

File diff suppressed because it is too large Load diff

View file

@ -7,6 +7,7 @@
, glib , glib
, gtk3 , gtk3
, libnotify , libnotify
, scandir ? null
}: }:
python3Packages.buildPythonApplication rec { python3Packages.buildPythonApplication rec {

View file

@ -3,13 +3,13 @@
mkDerivation rec { mkDerivation rec {
pname = "cura"; pname = "cura";
version = "4.8.0"; version = "4.9.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "Ultimaker"; owner = "Ultimaker";
repo = "Cura"; repo = "Cura";
rev = version; rev = version;
sha256 = "060fqzspipm93ks0inrj7yrj5wmvkdfv8xaxrv22590yb9f95s9m"; sha256 = "1q515qwrzla3ikbsjmk91y0nrbwih11jycgmd50lkrmnkh7qj0r2";
}; };
materials = fetchFromGitHub { materials = fetchFromGitHub {
@ -22,7 +22,7 @@ mkDerivation rec {
buildInputs = [ qtbase qtquickcontrols2 qtgraphicaleffects ]; buildInputs = [ qtbase qtquickcontrols2 qtgraphicaleffects ];
propagatedBuildInputs = with python3.pkgs; [ propagatedBuildInputs = with python3.pkgs; [
libsavitar numpy-stl pyserial requests uranium zeroconf pynest2d libsavitar numpy-stl pyserial requests uranium zeroconf pynest2d
sentry-sdk trimesh sentry-sdk trimesh keyring
] ++ plugins; ] ++ plugins;
nativeBuildInputs = [ cmake python3.pkgs.wrapPython ]; nativeBuildInputs = [ cmake python3.pkgs.wrapPython ];

View file

@ -1,4 +1,4 @@
{ lib, stdenv, fetchFromGitHub, fetchpatch, python3Packages, libspnav }: { lib, stdenv, fetchFromGitHub, fetchpatch, python3Packages, libspnav, jq }:
let let
@ -34,18 +34,28 @@ let
rawmouse = stdenv.mkDerivation rec { rawmouse = stdenv.mkDerivation rec {
pname = "RawMouse"; pname = "RawMouse";
version = "1.0.13"; version = "1.1.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "smartavionics"; owner = "smartavionics";
repo = pname; repo = pname;
rev = version; rev = version;
sha256 = "1cj40pgsfcwliz47mkiqjbslkwcm34qb1pajc2mcljgflcnickly"; sha256 = "0hvi7qwd4xfnqnhbj9dgfjmvv9df7s42asf3fdfxv43n6nx74scw";
}; };
nativeBuildInputs = [ jq ];
propagatedBuildInputs = with python3Packages; [
hidapi
];
buildPhase = '' buildPhase = ''
substituteInPlace RawMouse/config.json --replace \ jq 'del(.devices) | .libspnav="${libspnav}/lib/libspnav.so"' \
/usr/local/lib/libspnav.so ${libspnav}/lib/libspnav.so <RawMouse/config.json >RawMouse/config.json.new
mv RawMouse/config.json.new RawMouse/config.json
# remove prebuilt binaries
rm -r RawMouse/hidapi
''; '';
installPhase = '' installPhase = ''

View file

@ -2,13 +2,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "curaengine"; pname = "curaengine";
version = "4.8.0"; version = "4.9.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "Ultimaker"; owner = "Ultimaker";
repo = "CuraEngine"; repo = "CuraEngine";
rev = version; rev = version;
sha256 = "083l327ry6hv3yaa1p8dx1hx7fm12b0lh5nlbshxjyym0vi15rw2"; sha256 = "0b82hwn7pb73h1azaandq93bkzlzskhgk71pwf4yws0j9bm6z084";
}; };
nativeBuildInputs = [ cmake ]; nativeBuildInputs = [ cmake ];
@ -19,7 +19,7 @@ stdenv.mkDerivation rec {
meta = with lib; { meta = with lib; {
description = "A powerful, fast and robust engine for processing 3D models into 3D printing instruction"; description = "A powerful, fast and robust engine for processing 3D models into 3D printing instruction";
homepage = "https://github.com/Ultimaker/CuraEngine"; homepage = "https://github.com/Ultimaker/CuraEngine";
license = licenses.agpl3; license = licenses.agpl3Only;
platforms = platforms.linux; platforms = platforms.linux;
maintainers = with maintainers; [ abbradar gebner ]; maintainers = with maintainers; [ abbradar gebner ];
}; };

View file

@ -20,7 +20,7 @@
}: }:
let let
version = "4.1.1"; version = "4.1.2";
libsecp256k1_name = libsecp256k1_name =
if stdenv.isLinux then "libsecp256k1.so.0" if stdenv.isLinux then "libsecp256k1.so.0"
@ -51,7 +51,7 @@ python3.pkgs.buildPythonApplication {
src = fetchurl { src = fetchurl {
url = "https://download.electrum.org/${version}/Electrum-${version}.tar.gz"; url = "https://download.electrum.org/${version}/Electrum-${version}.tar.gz";
sha256 = "0yg6ld92a4xgn7y8i51hmr3kmgmrbrjwniikkmyq9q141h2drb80"; sha256 = "05m6vbd4sfjk536kwa5wa3kv21jxxqnglx0ddvnmxfhf98371bhk";
}; };
postUnpack = '' postUnpack = ''
@ -59,6 +59,11 @@ python3.pkgs.buildPythonApplication {
cp -ar ${tests} $sourceRoot/electrum/tests cp -ar ${tests} $sourceRoot/electrum/tests
''; '';
prePatch = ''
substituteInPlace contrib/requirements/requirements.txt \
--replace "dnspython>=2.0,<2.1" "dnspython>=2.0"
'';
nativeBuildInputs = lib.optionals enableQt [ wrapQtAppsHook ]; nativeBuildInputs = lib.optionals enableQt [ wrapQtAppsHook ];
propagatedBuildInputs = with python3.pkgs; [ propagatedBuildInputs = with python3.pkgs; [

View file

@ -1,33 +1,56 @@
{ lib, stdenv, fetchurl, pkg-config, autoconf, automake, gettext { lib
, fluxbox, bc, gtkmm2, glibmm, libglademm, libsigcxx }: , stdenv
, fetchurl
, autoconf
, automake
, bc
, fluxbox
, gettext
, glibmm
, gtkmm2
, libglademm
, libsigcxx
, pkg-config
}:
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "fme"; pname = "fme";
version = "1.1.3"; version = "1.1.3";
src = fetchurl { src = fetchurl {
url = "https://github.com/rdehouss/fme/archive/v${version}.tar.gz"; url = "https://github.com/rdehouss/fme/archive/v${version}.tar.gz";
sha256 = "d1c81a6a38c0faad02943ad65d6d0314bd205c6de841669a2efe43e4c503e63d"; hash = "sha256-0cgaajjA+q0ClDrWXW0DFL0gXG3oQWaaLv5D5MUD5j0=";
}; };
nativeBuildInputs = [ pkg-config ]; nativeBuildInputs = [
buildInputs = [ autoconf automake gettext fluxbox bc gtkmm2 glibmm libglademm libsigcxx ]; autoconf
automake
gettext
pkg-config
];
buildInputs = [
bc
fluxbox
glibmm
gtkmm2
libglademm
libsigcxx
];
preConfigure = '' preConfigure = ''
./autogen.sh ./autogen.sh
''; '';
meta = with lib; { meta = with lib; {
homepage = "https://github.com/rdehouss/fme/";
description = "Editor for Fluxbox menus"; description = "Editor for Fluxbox menus";
longDescription = '' longDescription = ''
Fluxbox Menu Editor is a menu editor for the Window Manager Fluxbox written in C++ Fluxbox Menu Editor is a menu editor for the Window Manager Fluxbox
with the libraries Gtkmm, Glibmm, libglademm and gettext for internationalization. written in C++ with the libraries Gtkmm, Glibmm, libglademm and gettext
Its user-friendly interface will help you to edit, delete, move (Drag and Drop) for internationalization. Its user-friendly interface will help you to
a row, a submenu, etc very easily. edit, delete, move (Drag and Drop) a row, a submenu, etc very easily.
''; '';
homepage = "https://github.com/rdehouss/fme/"; license = licenses.gpl2Plus;
license = licenses.gpl2;
maintainers = [ maintainers.AndersonTorres ]; maintainers = [ maintainers.AndersonTorres ];
platforms = platforms.linux; platforms = platforms.linux;
}; };

View file

@ -23,7 +23,7 @@ buildPythonApplication rec {
dateutil gflags httplib2 parsedatetime six vobject dateutil gflags httplib2 parsedatetime six vobject
google-api-python-client oauth2client uritemplate google-api-python-client oauth2client uritemplate
libnotify libnotify
] ++ lib.optional (!isPy3k) futures; ];
# There are no tests as of 4.0.0a4 # There are no tests as of 4.0.0a4
doCheck = false; doCheck = false;

View file

@ -2,13 +2,13 @@
mkDerivation rec { mkDerivation rec {
pname = "heimer"; pname = "heimer";
version = "2.2.0"; version = "2.4.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "juzzlin"; owner = "juzzlin";
repo = pname; repo = pname;
rev = version; rev = version;
sha256 = "sha256-upsOmf46bCO8sVp5dBHPLUBZYZP3JyXa7H5KXbd76qo="; sha256 = "sha256-5cepT9Tfr/3nYbxRAMqKSUDB+suEyojnexWxZ0i7GBw=";
}; };
nativeBuildInputs = [ cmake ]; nativeBuildInputs = [ cmake ];

View file

@ -1,33 +1,40 @@
{ lib, python3Packages, ffmpeg_3 }: { lib, python3Packages, ffmpeg }:
python3Packages.buildPythonApplication rec { python3Packages.buildPythonApplication rec {
version = "2.1.1"; version = "2.2";
pname = "sigal"; pname = "sigal";
src = python3Packages.fetchPypi { src = python3Packages.fetchPypi {
inherit version pname; inherit version pname;
sha256 = "0l07p457svznirz7qllgyl3qbhiisv7klhz7cbdw6417hxf9bih8"; sha256 = "sha256-49XsNdZuicsiYJZuF1UdqMA4q33Ly/Ug/Hc4ybJKmPo=";
}; };
disabled = !(python3Packages.pythonAtLeast "3.6"); disabled = !(python3Packages.pythonAtLeast "3.6");
checkInputs = with python3Packages; [ pytest ];
propagatedBuildInputs = with python3Packages; [ propagatedBuildInputs = with python3Packages; [
# install_requires
jinja2 jinja2
markdown markdown
pillow pillow
pilkit pilkit
clint
click click
blinker blinker
natsort natsort
setuptools_scm # extras_require
boto
brotli
feedgenerator
zopfli
cryptography
]; ];
makeWrapperArgs = [ "--prefix PATH : ${ffmpeg_3}/bin" ]; checkInputs = [
ffmpeg
] ++ (with python3Packages; [
pytestCheckHook
]);
# No tests included makeWrapperArgs = [ "--prefix PATH : ${ffmpeg}/bin" ];
doCheck = false;
meta = with lib; { meta = with lib; {
description = "Yet another simple static gallery generator"; description = "Yet another simple static gallery generator";

View file

@ -2,16 +2,16 @@
rustPlatform.buildRustPackage rec { rustPlatform.buildRustPackage rec {
name = "xplr"; name = "xplr";
version = "0.5.6"; version = "0.5.7";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "sayanarijit"; owner = "sayanarijit";
repo = name; repo = name;
rev = "v${version}"; rev = "v${version}";
sha256 = "070jyii2p7qk6gij47n5i9a8bal5iijgn8cv79mrija3pgddniaz"; sha256 = "1j417g0isy3cpxdb2wrvrvypnx99qffi83s4a98791wyi8yqiw6b";
}; };
cargoSha256 = "113f0hbgy8c9gxl70b6frr0klfc8rm5klgwls7fgbb643rdh03b9"; cargoSha256 = "0kpwhk2f4czhilcnfqkw5hw2vxvldxqg491xkkgxjkph3w4qv3ji";
meta = with lib; { meta = with lib; {
description = "A hackable, minimal, fast TUI file explorer"; description = "A hackable, minimal, fast TUI file explorer";

View file

@ -151,6 +151,7 @@ let
bison gperf libkrb5 bison gperf libkrb5
glib gtk3 dbus-glib glib gtk3 dbus-glib
libXScrnSaver libXcursor libXtst libxshmfence libGLU libGL libXScrnSaver libXcursor libXtst libxshmfence libGLU libGL
mesa # required for libgbm
pciutils protobuf speechd libXdamage at-spi2-core pciutils protobuf speechd libXdamage at-spi2-core
jre jre
pipewire pipewire

View file

@ -6,7 +6,7 @@
, libjpeg, zlib, dbus, dbus-glib, bzip2, xorg , libjpeg, zlib, dbus, dbus-glib, bzip2, xorg
, freetype, fontconfig, file, nspr, nss, nss_3_53 , freetype, fontconfig, file, nspr, nss, nss_3_53
, yasm, libGLU, libGL, sqlite, unzip, makeWrapper , yasm, libGLU, libGL, sqlite, unzip, makeWrapper
, hunspell, libXdamage, libevent, libstartup_notification , hunspell, libevent, libstartup_notification
, libvpx_1_8 , libvpx_1_8
, icu67, libpng, jemalloc, glib, pciutils , icu67, libpng, jemalloc, glib, pciutils
, autoconf213, which, gnused, rustPackages, rustPackages_1_45 , autoconf213, which, gnused, rustPackages, rustPackages_1_45
@ -22,7 +22,7 @@
, pulseaudioSupport ? stdenv.isLinux, libpulseaudio , pulseaudioSupport ? stdenv.isLinux, libpulseaudio
, ffmpegSupport ? true , ffmpegSupport ? true
, gtk3Support ? true, gtk2, gtk3, wrapGAppsHook , gtk3Support ? true, gtk2, gtk3, wrapGAppsHook
, waylandSupport ? true, libxkbcommon , waylandSupport ? true, libxkbcommon, libdrm
, ltoSupport ? (stdenv.isLinux && stdenv.is64bit), overrideCC, buildPackages , ltoSupport ? (stdenv.isLinux && stdenv.is64bit), overrideCC, buildPackages
, gssSupport ? true, libkrb5 , gssSupport ? true, libkrb5
, pipewireSupport ? waylandSupport && webrtcSupport, pipewire , pipewireSupport ? waylandSupport && webrtcSupport, pipewire
@ -161,6 +161,7 @@ buildStdenv.mkDerivation ({
xorg.libX11 xorg.libXrender xorg.libXft xorg.libXt file xorg.libX11 xorg.libXrender xorg.libXft xorg.libXt file
xorg.pixman yasm libGLU libGL xorg.pixman yasm libGLU libGL
xorg.xorgproto xorg.xorgproto
xorg.libXdamage
xorg.libXext makeWrapper xorg.libXext makeWrapper
libevent libstartup_notification /* cairo */ libevent libstartup_notification /* cairo */
libpng jemalloc glib libpng jemalloc glib
@ -175,7 +176,7 @@ buildStdenv.mkDerivation ({
++ lib.optional pulseaudioSupport libpulseaudio # only headers are needed ++ lib.optional pulseaudioSupport libpulseaudio # only headers are needed
++ lib.optional gtk3Support gtk3 ++ lib.optional gtk3Support gtk3
++ lib.optional gssSupport libkrb5 ++ lib.optional gssSupport libkrb5
++ lib.optional waylandSupport libxkbcommon ++ lib.optionals waylandSupport [ libxkbcommon libdrm ]
++ lib.optional pipewireSupport pipewire ++ lib.optional pipewireSupport pipewire
++ lib.optional (lib.versionAtLeast ffversion "82") gnum4 ++ lib.optional (lib.versionAtLeast ffversion "82") gnum4
++ lib.optionals buildStdenv.isDarwin [ CoreMedia ExceptionHandling Kerberos ++ lib.optionals buildStdenv.isDarwin [ CoreMedia ExceptionHandling Kerberos

View file

@ -2,13 +2,13 @@
buildGoModule rec { buildGoModule rec {
pname = "fluxctl"; pname = "fluxctl";
version = "1.22.1"; version = "1.22.2";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "weaveworks"; owner = "weaveworks";
repo = "flux"; repo = "flux";
rev = version; rev = version;
sha256 = "sha256-SaDO3a50CLhgLafCdgKEfHpuHdIweSy5L/TUgEUv5CM="; sha256 = "sha256-qYdVplNHyD31m4IbIeL3x3nauZLl1XquslS3WrtUXBk=";
}; };
vendorSha256 = "sha256-4uSw/9lI/rdDqy78jNC9eHYW/v/sMFb+sQvwYG6GZks="; vendorSha256 = "sha256-4uSw/9lI/rdDqy78jNC9eHYW/v/sMFb+sQvwYG6GZks=";

View file

@ -2,13 +2,13 @@
buildGoModule rec { buildGoModule rec {
pname = "kubeconform"; pname = "kubeconform";
version = "0.4.6"; version = "0.4.7";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "yannh"; owner = "yannh";
repo = pname; repo = pname;
rev = "v${version}"; rev = "v${version}";
sha256 = "sha256-lduHYYskEPUimEX54ymOyo5jY7GGBB42YTefDMNS4qo="; sha256 = "sha256-ahVdKMx3u2KnJ30wi9rV8JCVg9wPmbgdrtG8IpWWlCs=";
}; };
vendorSha256 = null; vendorSha256 = null;

View file

@ -2,12 +2,12 @@
mkDerivation rec { mkDerivation rec {
pname = "chatterino2"; pname = "chatterino2";
version = "2.2.2"; version = "2.3.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "Chatterino"; owner = "Chatterino";
repo = pname; repo = pname;
rev = "v${version}"; rev = "v${version}";
sha256 = "026cs48hmqkv7k4akbm205avj2pn3x1g7q46chwa707k9km325dz"; sha256 = "0x12zcrbkxn2nn0hqkj1amrxv4q032id282cajzsx7by970r1shd";
fetchSubmodules = true; fetchSubmodules = true;
}; };
nativeBuildInputs = [ qmake pkg-config wrapQtAppsHook ]; nativeBuildInputs = [ qmake pkg-config wrapQtAppsHook ];

View file

@ -1,5 +1,5 @@
{ pname, version, src, binaryName, desktopName { pname, version, src, binaryName, desktopName
, autoPatchelfHook, fetchurl, makeDesktopItem, lib, stdenv, wrapGAppsHook , autoPatchelfHook, makeDesktopItem, lib, stdenv, wrapGAppsHook
, alsaLib, at-spi2-atk, at-spi2-core, atk, cairo, cups, dbus, expat, fontconfig , alsaLib, at-spi2-atk, at-spi2-core, atk, cairo, cups, dbus, expat, fontconfig
, freetype, gdk-pixbuf, glib, gtk3, libcxx, libdrm, libnotify, libpulseaudio, libuuid , freetype, gdk-pixbuf, glib, gtk3, libcxx, libdrm, libnotify, libpulseaudio, libuuid
, libX11, libXScrnSaver, libXcomposite, libXcursor, libXdamage, libXext , libX11, libXScrnSaver, libXcomposite, libXcursor, libXdamage, libXext
@ -18,6 +18,7 @@ in stdenv.mkDerivation rec {
cups cups
libdrm libdrm
libuuid libuuid
libXdamage
libX11 libX11
libXScrnSaver libXScrnSaver
libXtst libXtst

View file

@ -2,7 +2,7 @@
"name": "element-desktop", "name": "element-desktop",
"productName": "Element", "productName": "Element",
"main": "src/electron-main.js", "main": "src/electron-main.js",
"version": "1.7.25", "version": "1.7.26",
"description": "A feature-rich client for Matrix.org", "description": "A feature-rich client for Matrix.org",
"author": "Element", "author": "Element",
"repository": { "repository": {

View file

@ -8,12 +8,12 @@
let let
executableName = "element-desktop"; executableName = "element-desktop";
version = "1.7.25"; version = "1.7.26";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "vector-im"; owner = "vector-im";
repo = "element-desktop"; repo = "element-desktop";
rev = "v${version}"; rev = "v${version}";
sha256 = "sha256-q8hVmTLt/GdLc6NSldLggogObQcPFp+lAeS3wmO0qPo="; sha256 = "1iflsvzn36mywpzags55kjmyq71c3i7f1hgcdcp2ijmnrjk8fy3n";
}; };
in mkYarnPackage rec { in mkYarnPackage rec {
name = "element-desktop-${version}"; name = "element-desktop-${version}";
@ -73,6 +73,7 @@ in mkYarnPackage rec {
meta = with lib; { meta = with lib; {
description = "A feature-rich client for Matrix.org"; description = "A feature-rich client for Matrix.org";
homepage = "https://element.io/"; homepage = "https://element.io/";
changelog = "https://github.com/vector-im/element-desktop/blob/v${version}/CHANGELOG.md";
license = licenses.asl20; license = licenses.asl20;
maintainers = teams.matrix.members; maintainers = teams.matrix.members;
inherit (electron.meta) platforms; inherit (electron.meta) platforms;

View file

@ -12,11 +12,11 @@ let
in stdenv.mkDerivation rec { in stdenv.mkDerivation rec {
pname = "element-web"; pname = "element-web";
version = "1.7.25"; version = "1.7.26";
src = fetchurl { src = fetchurl {
url = "https://github.com/vector-im/element-web/releases/download/v${version}/element-v${version}.tar.gz"; url = "https://github.com/vector-im/element-web/releases/download/v${version}/element-v${version}.tar.gz";
sha256 = "sha256-T4lsGVSUHkw4R7tSeTKPifbhwaTf/YF2vVAakFSrt9k="; sha256 = "17y6wq6w1yk7r1rfply549kpilfraizahf67p00ra7cws66iqkwg";
}; };
installPhase = '' installPhase = ''
@ -32,6 +32,7 @@ in stdenv.mkDerivation rec {
meta = { meta = {
description = "A glossy Matrix collaboration client for the web"; description = "A glossy Matrix collaboration client for the web";
homepage = "https://element.io/"; homepage = "https://element.io/";
changelog = "https://github.com/vector-im/element-web/blob/v${version}/CHANGELOG.md";
maintainers = lib.teams.matrix.members; maintainers = lib.teams.matrix.members;
license = lib.licenses.asl20; license = lib.licenses.asl20;
platforms = lib.platforms.all; platforms = lib.platforms.all;

View file

@ -2,7 +2,7 @@
, gnome2, gtk3, atk, at-spi2-atk, cairo, pango, gdk-pixbuf, glib, freetype, fontconfig , gnome2, gtk3, atk, at-spi2-atk, cairo, pango, gdk-pixbuf, glib, freetype, fontconfig
, dbus, libX11, xorg, libXi, libXcursor, libXdamage, libXrandr, libXcomposite , dbus, libX11, xorg, libXi, libXcursor, libXdamage, libXrandr, libXcomposite
, libXext, libXfixes, libXrender, libXtst, libXScrnSaver, nss, nspr, alsaLib , libXext, libXfixes, libXrender, libXtst, libXScrnSaver, nss, nspr, alsaLib
, cups, expat, libuuid, at-spi2-core, libappindicator-gtk3 , cups, expat, libuuid, at-spi2-core, libappindicator-gtk3, mesa
# Runtime dependencies: # Runtime dependencies:
, systemd, libnotify, libdbusmenu, libpulseaudio , systemd, libnotify, libdbusmenu, libpulseaudio
# Unfortunately this also overwrites the UI language (not just the spell # Unfortunately this also overwrites the UI language (not just the spell
@ -73,6 +73,7 @@ in stdenv.mkDerivation rec {
libappindicator-gtk3 libappindicator-gtk3
libnotify libnotify
libuuid libuuid
mesa # for libgbm
nspr nspr
nss nss
pango pango

View file

@ -61,7 +61,7 @@
, alsaSupport ? stdenv.isLinux, alsaLib , alsaSupport ? stdenv.isLinux, alsaLib
, pulseaudioSupport ? stdenv.isLinux, libpulseaudio , pulseaudioSupport ? stdenv.isLinux, libpulseaudio
, gtk3Support ? true, gtk2, gtk3, wrapGAppsHook , gtk3Support ? true, gtk2, gtk3, wrapGAppsHook
, waylandSupport ? true , waylandSupport ? true, libdrm
, libxkbcommon, calendarSupport ? true , libxkbcommon, calendarSupport ? true
# Use official trademarked branding. Permission obtained at: # Use official trademarked branding. Permission obtained at:
@ -137,12 +137,13 @@ stdenv.mkDerivation rec {
xorg.libXt xorg.libXt
xorg.pixman xorg.pixman
xorg.xorgproto xorg.xorgproto
xorg.libXdamage
zip zip
zlib zlib
] ++ lib.optional alsaSupport alsaLib ] ++ lib.optional alsaSupport alsaLib
++ lib.optional gtk3Support gtk3 ++ lib.optional gtk3Support gtk3
++ lib.optional pulseaudioSupport libpulseaudio ++ lib.optional pulseaudioSupport libpulseaudio
++ lib.optional waylandSupport libxkbcommon; ++ lib.optionals waylandSupport [ libxkbcommon libdrm ];
NIX_CFLAGS_COMPILE =[ NIX_CFLAGS_COMPILE =[
"-I${glib.dev}/include/gio-unix-2.0" "-I${glib.dev}/include/gio-unix-2.0"

View file

@ -1,6 +1,6 @@
{ stdenv, lib, fetchurl, dpkg { stdenv, lib, fetchurl, dpkg
, alsaLib, atk, cairo, cups, dbus, expat, fontconfig, freetype , alsaLib, atk, cairo, cups, dbus, expat, fontconfig, freetype
, gdk-pixbuf, glib, gnome2, pango, nspr, nss, gtk3 , gdk-pixbuf, glib, gnome2, pango, nspr, nss, gtk3, mesa
, xorg, autoPatchelfHook, systemd, libnotify, libappindicator , xorg, autoPatchelfHook, systemd, libnotify, libappindicator
}: }:
@ -20,6 +20,7 @@ let deps = [
gtk3 gtk3
libappindicator libappindicator
libnotify libnotify
mesa
xorg.libX11 xorg.libX11
xorg.libXScrnSaver xorg.libXScrnSaver
xorg.libXcomposite xorg.libXcomposite

View file

@ -13,13 +13,13 @@ with lib;
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "remmina"; pname = "remmina";
version = "1.4.12"; version = "1.4.13";
src = fetchFromGitLab { src = fetchFromGitLab {
owner = "Remmina"; owner = "Remmina";
repo = "Remmina"; repo = "Remmina";
rev = "v${version}"; rev = "v${version}";
sha256 = "sha256-CjlNEmca4Kob5rdpZa+YfvdOIDDDYfhNsGYqGDxSGKY="; sha256 = "sha256-R+RfczAnt5R0YmXt5SmH8gOuWOH0LZ/70pOMDAXJgsE=";
}; };
nativeBuildInputs = [ cmake ninja pkg-config wrapGAppsHook ]; nativeBuildInputs = [ cmake ninja pkg-config wrapGAppsHook ];
@ -51,7 +51,7 @@ stdenv.mkDerivation rec {
''; '';
meta = { meta = {
license = licenses.gpl2; license = licenses.gpl2Plus;
homepage = "https://gitlab.com/Remmina/Remmina"; homepage = "https://gitlab.com/Remmina/Remmina";
description = "Remote desktop client written in GTK"; description = "Remote desktop client written in GTK";
maintainers = with maintainers; [ melsigl ryantm ]; maintainers = with maintainers; [ melsigl ryantm ];

View file

@ -10,7 +10,7 @@ assert withQt -> qt5 != null;
with lib; with lib;
let let
version = "3.4.4"; version = "3.4.5";
variant = if withQt then "qt" else "cli"; variant = if withQt then "qt" else "cli";
in stdenv.mkDerivation { in stdenv.mkDerivation {
@ -20,7 +20,7 @@ in stdenv.mkDerivation {
src = fetchurl { src = fetchurl {
url = "https://www.wireshark.org/download/src/all-versions/wireshark-${version}.tar.xz"; url = "https://www.wireshark.org/download/src/all-versions/wireshark-${version}.tar.xz";
sha256 = "0aad3m8nh4i75dgjs68217135bzqmhmlgjklmpjh1ihmjwgd373j"; sha256 = "sha256-3hqv0QCh4SB8hQ0YDpfdkauNoPXra+7FRfclzbFF0zM=";
}; };
cmakeFlags = [ cmakeFlags = [

View file

@ -21,6 +21,7 @@
, gtk3 , gtk3
, libpulseaudio , libpulseaudio
, libudev0-shim , libudev0-shim
, libdrm
, makeWrapper , makeWrapper
, nspr , nspr
, nss , nss
@ -101,6 +102,7 @@ in stdenv.mkDerivation rec {
gtk2 gtk2
gtk3 gtk3
libpulseaudio libpulseaudio
libdrm
nspr nspr
nss nss
qt5.qtbase qt5.qtbase

View file

@ -1,15 +1,15 @@
{ lib, stdenv, fetchurl, asciidoc, asciidoctor, autoconf, automake, cmake, { lib, stdenv, fetchurl, asciidoc, asciidoctor, autoconf, automake, cmake,
docbook_xsl, fftw, fftwFloat, gfortran, libtool, libusb1, qtbase, docbook_xsl, fftw, fftwFloat, gfortran, libtool, libusb1, qtbase,
qtmultimedia, qtserialport, qttools, texinfo, wrapQtAppsHook }: qtmultimedia, qtserialport, qttools, boost, texinfo, wrapQtAppsHook }:
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "wsjtx"; pname = "wsjtx";
version = "2.2.2"; version = "2.3.1";
# This is a "superbuild" tarball containing both wsjtx and a hamlib fork # This is a "superbuild" tarball containing both wsjtx and a hamlib fork
src = fetchurl { src = fetchurl {
url = "http://physics.princeton.edu/pulsar/k1jt/wsjtx-${version}.tgz"; url = "http://physics.princeton.edu/pulsar/k1jt/wsjtx-${version}.tgz";
sha256 = "17agyrhclqyahgdwba8vi9sl7vq03sm00jlyrmjgv34a4czidg0w"; sha256 = "11wzh4bxp9277kbqkyrc063akkk09czgxnkpk8k07vl4s3dan3hh";
}; };
# Hamlib builds with autotools, wsjtx builds with cmake # Hamlib builds with autotools, wsjtx builds with cmake
@ -18,7 +18,7 @@ stdenv.mkDerivation rec {
asciidoc asciidoctor autoconf automake cmake docbook_xsl gfortran libtool asciidoc asciidoctor autoconf automake cmake docbook_xsl gfortran libtool
qttools texinfo wrapQtAppsHook qttools texinfo wrapQtAppsHook
]; ];
buildInputs = [ fftw fftwFloat libusb1 qtbase qtmultimedia qtserialport ]; buildInputs = [ fftw fftwFloat libusb1 qtbase qtmultimedia qtserialport boost ];
# Remove Git dependency from superbuild since sources are included # Remove Git dependency from superbuild since sources are included
patches = [ ./super.patch ]; patches = [ ./super.patch ];
@ -36,6 +36,6 @@ stdenv.mkDerivation rec {
# Older licenses are for the statically-linked hamlib # Older licenses are for the statically-linked hamlib
license = with licenses; [ gpl3Plus gpl2Plus lgpl21Plus ]; license = with licenses; [ gpl3Plus gpl2Plus lgpl21Plus ];
platforms = platforms.linux; platforms = platforms.linux;
maintainers = with maintainers; [ lasandell ]; maintainers = with maintainers; [ lasandell numinit ];
}; };
} }

View file

@ -1,13 +1,13 @@
{lib, stdenv, fetchurl}: {lib, stdenv, fetchurl}:
let version = "1.5.1"; in let version = "1.6.0"; in
stdenv.mkDerivation { stdenv.mkDerivation {
pname = "tetgen"; pname = "tetgen";
inherit version; inherit version;
src = fetchurl { src = fetchurl {
url = "http://wias-berlin.de/software/tetgen/1.5/src/tetgen${version}.tar.gz"; url = "http://wias-berlin.de/software/tetgen/1.5/src/tetgen${version}.tar.gz";
sha256 = "0l5q066crs4cjj7qr0r2gnz8ajkgighngwglr1201h77lcs48sp4"; sha256 = "sha256-h7XmHr06Rx/E8s3XEkwrEd1mOfT+sflBpdL1EQ0Fzjk=";
}; };
installPhase = '' installPhase = ''

View file

@ -1,12 +1,14 @@
{ fetchurl, lib, which, ocamlPackages }: { fetchFromGitHub, lib, which, ocamlPackages }:
let let
pname = "alt-ergo"; pname = "alt-ergo";
version = "2.3.3"; version = "2.4.0";
src = fetchurl { src = fetchFromGitHub {
url = "https://alt-ergo.ocamlpro.com/http/alt-ergo-${version}/alt-ergo-${version}.tar.gz"; owner = "OCamlPro";
sha256 = "124k2a4ikk4wdpmvgjpgl97x9skvr9qznk8m68dzsynzpv6yksaj"; repo = pname;
rev = version;
sha256 = "1jm1yrvsg8iyfp9bb728zdx2i7yb6z7minjrfs27k5ncjqkjm65g";
}; };
useDune2 = true; useDune2 = true;
@ -19,6 +21,7 @@ let alt-ergo-lib = ocamlPackages.buildDunePackage rec {
pname = "alt-ergo-lib"; pname = "alt-ergo-lib";
inherit version src useDune2 nativeBuildInputs; inherit version src useDune2 nativeBuildInputs;
configureFlags = pname; configureFlags = pname;
buildInputs = with ocamlPackages; [ dune-configurator ];
propagatedBuildInputs = with ocamlPackages; [ num ocplib-simplex stdlib-shims zarith ]; propagatedBuildInputs = with ocamlPackages; [ num ocplib-simplex stdlib-shims zarith ];
}; in }; in
@ -36,7 +39,9 @@ ocamlPackages.buildDunePackage {
configureFlags = pname; configureFlags = pname;
buildInputs = [ alt-ergo-parsers ocamlPackages.menhir ]; buildInputs = [ alt-ergo-parsers ] ++ (with ocamlPackages; [
cmdliner menhir ])
;
meta = { meta = {
description = "High-performance theorem prover and SMT solver"; description = "High-performance theorem prover and SMT solver";

View file

@ -5,6 +5,7 @@
, makeWrapper , makeWrapper
, readline , readline
, gmp , gmp
, zlib
# one of # one of
# - "minimal" (~400M): # - "minimal" (~400M):
# Install the bare minimum of packages required by gap to start. # Install the bare minimum of packages required by gap to start.
@ -61,11 +62,11 @@ in
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "gap"; pname = "gap";
# https://www.gap-system.org/Releases/ # https://www.gap-system.org/Releases/
version = "4.10.2"; version = "4.11.0";
src = fetchurl { src = fetchurl {
url = "https://files.gap-system.org/gap-${lib.versions.major version}.${lib.versions.minor version}/tar.bz2/gap-${version}.tar.bz2"; url = "https://files.gap-system.org/gap-${lib.versions.major version}.${lib.versions.minor version}/tar.bz2/gap-${version}.tar.bz2";
sha256 = "0cp6ddk0469zzv1m1vair6gm27ic6c5m77ri8rn0znq3gaps6x94"; sha256 = "sha256-vwcKENwqxgWT/mXfD4c9ctTWdQHobrJipva9SPyGhgI=";
}; };
# remove all non-essential packages (which take up a lot of space) # remove all non-essential packages (which take up a lot of space)
@ -76,6 +77,7 @@ stdenv.mkDerivation rec {
buildInputs = [ buildInputs = [
readline readline
gmp gmp
zlib
]; ];
nativeBuildInputs = [ nativeBuildInputs = [
@ -83,34 +85,22 @@ stdenv.mkDerivation rec {
]; ];
patches = [ patches = [
# https://github.com/gap-system/gap/pull/3294
(fetchpatch {
name = "add-make-install-targets.patch";
url = "https://github.com/gap-system/gap/commit/3361c172e6c5ff3bb3f01ba9d6f1dd4ad42cea80.patch";
sha256 = "1kwp9qnfvmlbpf1c3rs6j5m2jz22rj7a4hb5x1gj9vkpiyn5pdyj";
})
# Fix for locale specific tests causing issues. Already upstream. # Fix for locale specific tests causing issues. Already upstream.
# Backport of https://github.com/gap-system/gap/pull/4022 # Backport of https://github.com/gap-system/gap/pull/4022
# WHEN REMOVING: also remove the`rm tst/testinstall/strings.tst` line in
# `postPatch` below. That line is necessary since the patch is not intended
# for gap 4.10.
(fetchpatch { (fetchpatch {
name = "remove-locale-specific-tests.patch"; name = "remove-locale-specific-tests.patch";
url = "https://github.com/gap-system/gap/commit/c18b0c4215b5212a2cc4f305e2d5b94ba716bee8.patch"; url = "https://github.com/gap-system/gap/commit/c18b0c4215b5212a2cc4f305e2d5b94ba716bee8.patch";
excludes = ["tst/testinstall/stringobj.tst"]; sha256 = "sha256-De+T9Y7ewRT6plJrj2VR8axRvD/JCTYKOBWB7Bw0oq0=";
sha256 = "1mz5b4mbw2jdd1ypp5s0dy6pp0jsvwsxr2dm4kbkls20r1r192sc";
}) })
# fixes aarch64 gc crashes: https://github.com/gap-system/gap/pull/3965 # fixes aarch64 gc crashes: https://github.com/gap-system/gap/pull/3965
./mark-genstackfuncbags-as-noinline.patch (fetchpatch {
name = "mark-genstackfuncbags-as-noinline.patch";
url = "https://github.com/gap-system/gap/commit/f0a8f49ff8dad0a5fa77253d45457c6f40f96778.patch";
sha256 = "sha256-GU9tOP1stX2vn8m8kXOBupEpxIYArA76ibKL8eLn0MY=";
})
]; ];
postPatch = ''
# File not covered by the remove-locale-specific-tests.patch patch above.
rm tst/testinstall/strings.tst
'';
# "teststandard" is a superset of testinstall. It takes ~1h instead of ~1min. # "teststandard" is a superset of testinstall. It takes ~1h instead of ~1min.
# tests are run twice, once with all packages loaded and once without # tests are run twice, once with all packages loaded and once without
# checkTarget = "teststandard"; # checkTarget = "teststandard";
@ -155,7 +145,6 @@ stdenv.mkDerivation rec {
mkdir -p "$out/bin" "$out/share/gap/" mkdir -p "$out/bin" "$out/share/gap/"
mkdir -p "$out/share/gap"
echo "Copying files to target directory" echo "Copying files to target directory"
cp -ar . "$out/share/gap/build-dir" cp -ar . "$out/share/gap/build-dir"

View file

@ -1,6 +1,6 @@
{ lib, stdenv { lib, stdenv
, fetchFromGitHub , fetchFromGitHub
, fetchurl , fetchpatch
, autoreconfHook , autoreconfHook
, pkg-config , pkg-config
, flint , flint
@ -11,16 +11,36 @@
}: }:
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
version = "0.7.26"; version = "0.7.27";
pname = "pynac"; pname = "pynac";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "pynac"; owner = "pynac";
repo = "pynac"; repo = "pynac";
rev = "pynac-${version}"; rev = "pynac-${version}";
sha256 = "09d2p74x1arkydlxy6pw4p4byi7r8q7f29w373h4d8a215kadc6d"; sha256 = "sha256-1HHCIeaNE2UsJNX92UlDGLJS8I4nC/8FnwX7Y4F9HpU=";
}; };
patches = [
(fetchpatch {
name = "handle_factor.patch";
url = "https://git.sagemath.org/sage.git/plain/build/pkgs/pynac/patches/handle_factor.patch?h=9.3.rc3";
sha256 = "sha256-U1lb5qwBqZZgklfDMhBX4K5u8bz5x42O4w7hyNy2YVw=";
})
(fetchpatch {
name = "power_inf_loop.patch";
url = "https://git.sagemath.org/sage.git/plain/build/pkgs/pynac/patches/power_inf_loop.patch?h=9.3.rc3";
sha256 = "sha256-VYeaJl8u2wl7FQ/6xnpZv1KpdNYEmJoPhuMrBADyTRs=";
})
(fetchpatch {
name = "too_much_sub.patch";
url = "https://git.sagemath.org/sage.git/plain/build/pkgs/pynac/patches/too_much_sub.patch?h=9.3.rc3";
sha256 = "sha256-lw7xSQ/l+rzPu+ghWF4omYF0mKksGGPuuHJTktvbdis=";
})
];
buildInputs = [ buildInputs = [
flint flint
gmp gmp
@ -34,14 +54,6 @@ stdenv.mkDerivation rec {
pkg-config pkg-config
]; ];
patches = [
(fetchurl {
name = "py_ssize_t_clean.patch";
url = "https://git.sagemath.org/sage.git/plain/build/pkgs/pynac/patches/py_ssize_t_clean.patch?h=9.2";
sha256 = "0l3gbg9hc4v671zf4w376krnk3wh8hj3649610nlvzzxckcryzab";
})
];
meta = with lib; { meta = with lib; {
description = "Python is Not a CAS -- modified version of Ginac"; description = "Python is Not a CAS -- modified version of Ginac";
longDescription = '' longDescription = ''

View file

@ -2,13 +2,13 @@
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "qalculate-gtk"; pname = "qalculate-gtk";
version = "3.17.0"; version = "3.18.0";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "qalculate"; owner = "qalculate";
repo = "qalculate-gtk"; repo = "qalculate-gtk";
rev = "v${version}"; rev = "v${version}";
sha256 = "sha256-Nxe1DZL8mh9aBWXQdlp5wC1l5b9mchlrRyE+LKC+yLI="; sha256 = "sha256-hE0di7B6dCnMmMBLgFkb6vPS4hS/7zD6BbviIucrn1I=";
}; };
hardeningDisable = [ "format" ]; hardeningDisable = [ "format" ];

View file

@ -19,6 +19,10 @@ let
linbox = pkgs.linbox.override { withSage = true; }; linbox = pkgs.linbox.override { withSage = true; };
pkg-config = pkgs.pkg-config; # not to confuse with pythonPackages.pkg-config pkg-config = pkgs.pkg-config; # not to confuse with pythonPackages.pkg-config
}; };
sage_docbuild = self.callPackage ./sage_docbuild.nix {
inherit sage-src;
};
}; };
}; };
@ -38,14 +42,16 @@ let
logo64 = "${sage-src}/doc/common/themes/sage/static/sageicon.png"; logo64 = "${sage-src}/doc/common/themes/sage/static/sageicon.png";
}; };
three = callPackage ./threejs-sage.nix { };
# A bash script setting various environment variables to tell sage where # A bash script setting various environment variables to tell sage where
# the files its looking fore are located. Also see `sage-env`. # the files its looking fore are located. Also see `sage-env`.
env-locations = callPackage ./env-locations.nix { env-locations = callPackage ./env-locations.nix {
inherit pari_data; inherit pari_data;
inherit singular maxima-ecl; inherit singular maxima-ecl;
inherit three;
ecl = maxima-ecl.ecl; ecl = maxima-ecl.ecl;
cysignals = python3.pkgs.cysignals; cysignals = python3.pkgs.cysignals;
three = nodePackages.three;
mathjax = nodePackages.mathjax; mathjax = nodePackages.mathjax;
}; };
@ -53,6 +59,7 @@ let
# the env-locations file. # the env-locations file.
sage-env = callPackage ./sage-env.nix { sage-env = callPackage ./sage-env.nix {
sagelib = python3.pkgs.sagelib; sagelib = python3.pkgs.sagelib;
sage_docbuild = python3.pkgs.sage_docbuild;
inherit env-locations; inherit env-locations;
inherit python3 singular palp flint pynac pythonEnv maxima-ecl; inherit python3 singular palp flint pynac pythonEnv maxima-ecl;
ecl = maxima-ecl.ecl; ecl = maxima-ecl.ecl;
@ -70,8 +77,8 @@ let
inherit python3 pythonEnv; inherit python3 pythonEnv;
inherit sage-env; inherit sage-env;
inherit pynac singular maxima-ecl; inherit pynac singular maxima-ecl;
inherit three;
pkg-config = pkgs.pkg-config; # not to confuse with pythonPackages.pkg-config pkg-config = pkgs.pkg-config; # not to confuse with pythonPackages.pkg-config
three = nodePackages.three;
}; };
# Doesn't actually build anything, just runs sages testsuite. This is a # Doesn't actually build anything, just runs sages testsuite. This is a
@ -86,6 +93,7 @@ let
pythonRuntimeDeps = with python3.pkgs; [ pythonRuntimeDeps = with python3.pkgs; [
sagelib sagelib
sage_docbuild
cvxopt cvxopt
networkx networkx
service-identity service-identity

View file

@ -1,13 +1,13 @@
diff --git a/src/sage/env.py b/src/sage/env.py diff --git a/src/sage/env.py b/src/sage/env.py
index 1ddfc7cfb9..45033d6328 100644 index 2908f5d04f..81dfd75c0d 100644
--- a/src/sage/env.py --- a/src/sage/env.py
+++ b/src/sage/env.py +++ b/src/sage/env.py
@@ -203,97 +203,13 @@ var('ARB_LIBRARY', 'arb') @@ -218,93 +218,12 @@ NTL_LIBDIR = var("NTL_LIBDIR")
var('SAGE_BANNER', '') SAGE_BANNER = var("SAGE_BANNER", "")
var('SAGE_IMPORTALL', 'yes') SAGE_IMPORTALL = var("SAGE_IMPORTALL", "yes")
- -
-def _get_shared_lib_filename(libname, *additional_libnames): -def _get_shared_lib_path(*libnames: str) -> Optional[str]:
- """ - """
- Return the full path to a shared library file installed in - Return the full path to a shared library file installed in
- ``$SAGE_LOCAL/lib`` or the directories associated with the - ``$SAGE_LOCAL/lib`` or the directories associated with the
@ -25,80 +25,76 @@ index 1ddfc7cfb9..45033d6328 100644
- For distributions like Debian that use a multiarch layout, we also try the - For distributions like Debian that use a multiarch layout, we also try the
- multiarch lib paths (i.e. ``/usr/lib/<arch>/``). - multiarch lib paths (i.e. ``/usr/lib/<arch>/``).
- -
- This returns ``None`` if the file does not exist. - This returns ``None`` if no matching library file could be found.
- -
- EXAMPLES:: - EXAMPLES::
- -
- sage: import sys - sage: import sys
- sage: from fnmatch import fnmatch - sage: from fnmatch import fnmatch
- sage: from sage.env import _get_shared_lib_filename - sage: from sage.env import _get_shared_lib_path
- sage: lib_filename = _get_shared_lib_filename("Singular", - sage: lib_filename = _get_shared_lib_path("Singular", "singular-Singular")
- ....: "singular-Singular")
- sage: if sys.platform == 'cygwin': - sage: if sys.platform == 'cygwin':
- ....: pattern = "*/cygSingular-*.dll" - ....: pattern = "*/cygSingular-*.dll"
- ....: elif sys.platform == 'darwin': - ....: elif sys.platform == 'darwin':
- ....: pattern = "*/libSingular.dylib" - ....: pattern = "*/libSingular-*.dylib"
- ....: else: - ....: else:
- ....: pattern = "*/lib*Singular.so" - ....: pattern = "*/lib*Singular-*.so"
- sage: fnmatch(lib_filename, pattern) - sage: fnmatch(str(lib_filename), pattern)
- True - True
- sage: _get_shared_lib_filename("an_absurd_lib") is None - sage: _get_shared_lib_path("an_absurd_lib") is None
- True - True
- """ - """
- -
- for libname in (libname,) + additional_libnames: - for libname in libnames:
- search_directories: List[Path] = []
- patterns: List[str] = []
- if sys.platform == 'cygwin': - if sys.platform == 'cygwin':
- # Later down we take the last matching DLL found, so search - # Later down we take the first matching DLL found, so search
- # SAGE_LOCAL second so that it takes precedence - # SAGE_LOCAL first so that it takes precedence
- bindirs = [ - search_directories = [
- sysconfig.get_config_var('BINDIR'), - Path(SAGE_LOCAL) / 'bin',
- os.path.join(SAGE_LOCAL, 'bin') - Path(sysconfig.get_config_var('BINDIR')),
- ] - ]
- pats = ['cyg{}.dll'.format(libname), 'cyg{}-*.dll'.format(libname)] - # Note: The following is not very robust, since if there are multible
- filenames = []
- for bindir in bindirs:
- for pat in pats:
- filenames += glob.glob(os.path.join(bindir, pat))
-
- # Note: This is not very robust, since if there are multi DLL
- # versions for the same library this just selects one more or less - # versions for the same library this just selects one more or less
- # at arbitrary. However, practically speaking, on Cygwin, there - # at arbitrary. However, practically speaking, on Cygwin, there
- # will only ever be one version - # will only ever be one version
- if filenames: - patterns = [f'cyg{libname}.dll', f'cyg{libname}-*.dll']
- return filenames[-1]
- else: - else:
- if sys.platform == 'darwin': - if sys.platform == 'darwin':
- ext = 'dylib' - ext = 'dylib'
- else: - else:
- ext = 'so' - ext = 'so'
- -
- libdirs = [ - search_directories = [Path(SAGE_LOCAL) / 'lib']
- os.path.join(SAGE_LOCAL, 'lib'), - libdir = sysconfig.get_config_var('LIBDIR')
- sysconfig.get_config_var('LIBDIR') - if libdir is not None:
- ] - libdir = Path(libdir)
- multilib = sysconfig.get_config_var('MULTILIB') - search_directories.append(libdir)
- if multilib:
- libdirs.insert(1, os.path.join(libdirs[0], multilib))
- -
- for libdir in libdirs: - multiarchlib = sysconfig.get_config_var('MULTIARCH')
- basename = 'lib{}.{}'.format(libname, ext) - if multiarchlib is not None:
- filename = os.path.join(libdir, basename) - search_directories.append(libdir / multiarchlib),
- if os.path.exists(filename): -
- return filename - patterns = [f'lib{libname}.{ext}']
-
- for directory in search_directories:
- for pattern in patterns:
- path = next(directory.glob(pattern), None)
- if path is not None:
- return str(path.resolve())
- -
- # Just return None if no files were found - # Just return None if no files were found
- return None - return None
-
- -
# locate singular shared object # locate singular shared object
# On Debian it's libsingular-Singular so try that as well # On Debian it's libsingular-Singular so try that as well
-SINGULAR_SO = _get_shared_lib_filename('Singular', 'singular-Singular') -SINGULAR_SO = var("SINGULAR_SO", _get_shared_lib_path("Singular", "singular-Singular"))
+SINGULAR_SO = '/default' +SINGULAR_SO = var("SINGULAR_SO", '/default')
var('SINGULAR_SO', SINGULAR_SO)
# locate libgap shared object # locate libgap shared object
-GAP_SO= _get_shared_lib_filename('gap','') -GAP_SO = var("GAP_SO", _get_shared_lib_path("gap", ""))
+GAP_SO = '/default' +GAP_SO = var("GAP_SO", '/default')
var('GAP_SO', GAP_SO)
# post process # post process
if ' ' in DOT_SAGE:

View file

@ -1,16 +0,0 @@
diff --git a/src/sage/repl/rich_output/display_manager.py b/src/sage/repl/rich_output/display_manager.py
index fb21f7a9c9..f39470777d 100644
--- a/src/sage/repl/rich_output/display_manager.py
+++ b/src/sage/repl/rich_output/display_manager.py
@@ -749,9 +749,9 @@ class DisplayManager(SageObject):
import sage.env
import re
import os
- with open(os.path.join(sage.env.THREEJS_DIR, 'build', 'three.min.js')) as f:
+ with open(os.path.join(sage.env.THREEJS_DIR, 'build', 'three.js')) as f:
text = f.read().replace('\n','')
- version = re.search(r'REVISION="(\d+)"', text).group(1)
+ version = re.search(r"REVISION = '(\d+)'", text).group(1)
return """
<script src="https://cdn.jsdelivr.net/gh/mrdoob/three.js@r{0}/build/three.min.js"></script>
<script src="https://cdn.jsdelivr.net/gh/mrdoob/three.js@r{0}/examples/js/controls/OrbitControls.js"></script>

View file

@ -1,131 +0,0 @@
diff --git a/src/sage/libs/eclib/interface.py b/src/sage/libs/eclib/interface.py
index e898456720..6b98c12328 100644
--- a/src/sage/libs/eclib/interface.py
+++ b/src/sage/libs/eclib/interface.py
@@ -758,78 +758,78 @@ class mwrank_MordellWeil(SageObject):
sage: EQ = mwrank_MordellWeil(E, verbose=True)
sage: EQ.search(1)
- P1 = [0:1:0] is torsion point, order 1
- P1 = [-3:0:1] is generator number 1
- saturating up to 20...Checking 2-saturation
+ P1 = [0:1:0] is torsion point, order 1
+ P1 = [-3:0:1] is generator number 1
+ saturating up to 20...Checking 2-saturation...
Points have successfully been 2-saturated (max q used = 7)
- Checking 3-saturation
+ Checking 3-saturation...
Points have successfully been 3-saturated (max q used = 7)
- Checking 5-saturation
+ Checking 5-saturation...
Points have successfully been 5-saturated (max q used = 23)
- Checking 7-saturation
+ Checking 7-saturation...
Points have successfully been 7-saturated (max q used = 41)
- Checking 11-saturation
+ Checking 11-saturation...
Points have successfully been 11-saturated (max q used = 17)
- Checking 13-saturation
+ Checking 13-saturation...
Points have successfully been 13-saturated (max q used = 43)
- Checking 17-saturation
+ Checking 17-saturation...
Points have successfully been 17-saturated (max q used = 31)
- Checking 19-saturation
+ Checking 19-saturation...
Points have successfully been 19-saturated (max q used = 37)
done
- P2 = [-2:3:1] is generator number 2
- saturating up to 20...Checking 2-saturation
+ P2 = [-2:3:1] is generator number 2
+ saturating up to 20...Checking 2-saturation...
possible kernel vector = [1,1]
This point may be in 2E(Q): [14:-52:1]
...and it is!
Replacing old generator #1 with new generator [1:-1:1]
Points have successfully been 2-saturated (max q used = 7)
Index gain = 2^1
- Checking 3-saturation
+ Checking 3-saturation...
Points have successfully been 3-saturated (max q used = 13)
- Checking 5-saturation
+ Checking 5-saturation...
Points have successfully been 5-saturated (max q used = 67)
- Checking 7-saturation
+ Checking 7-saturation...
Points have successfully been 7-saturated (max q used = 53)
- Checking 11-saturation
+ Checking 11-saturation...
Points have successfully been 11-saturated (max q used = 73)
- Checking 13-saturation
+ Checking 13-saturation...
Points have successfully been 13-saturated (max q used = 103)
- Checking 17-saturation
+ Checking 17-saturation...
Points have successfully been 17-saturated (max q used = 113)
- Checking 19-saturation
+ Checking 19-saturation...
Points have successfully been 19-saturated (max q used = 47)
done (index = 2).
Gained index 2, new generators = [ [1:-1:1] [-2:3:1] ]
- P3 = [-14:25:8] is generator number 3
- saturating up to 20...Checking 2-saturation
+ P3 = [-14:25:8] is generator number 3
+ saturating up to 20...Checking 2-saturation...
Points have successfully been 2-saturated (max q used = 11)
- Checking 3-saturation
+ Checking 3-saturation...
Points have successfully been 3-saturated (max q used = 13)
- Checking 5-saturation
+ Checking 5-saturation...
Points have successfully been 5-saturated (max q used = 71)
- Checking 7-saturation
+ Checking 7-saturation...
Points have successfully been 7-saturated (max q used = 101)
- Checking 11-saturation
+ Checking 11-saturation...
Points have successfully been 11-saturated (max q used = 127)
- Checking 13-saturation
+ Checking 13-saturation...
Points have successfully been 13-saturated (max q used = 151)
- Checking 17-saturation
+ Checking 17-saturation...
Points have successfully been 17-saturated (max q used = 139)
- Checking 19-saturation
+ Checking 19-saturation...
Points have successfully been 19-saturated (max q used = 179)
done (index = 1).
- P4 = [-1:3:1] = -1*P1 + -1*P2 + -1*P3 (mod torsion)
- P4 = [0:2:1] = 2*P1 + 0*P2 + 1*P3 (mod torsion)
- P4 = [2:13:8] = -3*P1 + 1*P2 + -1*P3 (mod torsion)
- P4 = [1:0:1] = -1*P1 + 0*P2 + 0*P3 (mod torsion)
- P4 = [2:0:1] = -1*P1 + 1*P2 + 0*P3 (mod torsion)
- P4 = [18:7:8] = -2*P1 + -1*P2 + -1*P3 (mod torsion)
- P4 = [3:3:1] = 1*P1 + 0*P2 + 1*P3 (mod torsion)
- P4 = [4:6:1] = 0*P1 + -1*P2 + -1*P3 (mod torsion)
- P4 = [36:69:64] = 1*P1 + -2*P2 + 0*P3 (mod torsion)
- P4 = [68:-25:64] = -2*P1 + -1*P2 + -2*P3 (mod torsion)
- P4 = [12:35:27] = 1*P1 + -1*P2 + -1*P3 (mod torsion)
+ P4 = [-1:3:1] = -1*P1 + -1*P2 + -1*P3 (mod torsion)
+ P4 = [0:2:1] = 2*P1 + 0*P2 + 1*P3 (mod torsion)
+ P4 = [2:13:8] = -3*P1 + 1*P2 + -1*P3 (mod torsion)
+ P4 = [1:0:1] = -1*P1 + 0*P2 + 0*P3 (mod torsion)
+ P4 = [2:0:1] = -1*P1 + 1*P2 + 0*P3 (mod torsion)
+ P4 = [18:7:8] = -2*P1 + -1*P2 + -1*P3 (mod torsion)
+ P4 = [3:3:1] = 1*P1 + 0*P2 + 1*P3 (mod torsion)
+ P4 = [4:6:1] = 0*P1 + -1*P2 + -1*P3 (mod torsion)
+ P4 = [36:69:64] = 1*P1 + -2*P2 + 0*P3 (mod torsion)
+ P4 = [68:-25:64] = -2*P1 + -1*P2 + -2*P3 (mod torsion)
+ P4 = [12:35:27] = 1*P1 + -1*P2 + -1*P3 (mod torsion)
sage: EQ
Subgroup of Mordell-Weil group: [[1:-1:1], [-2:3:1], [-14:25:8]]
@@ -1076,7 +1076,7 @@ class mwrank_MordellWeil(SageObject):
sage: EQ.search(1)
P1 = [0:1:0] is torsion point, order 1
P1 = [-3:0:1] is generator number 1
- saturating up to 20...Checking 2-saturation
+ saturating up to 20...Checking 2-saturation...
...
P4 = [12:35:27] = 1*P1 + -1*P2 + -1*P3 (mod torsion)
sage: EQ

View file

@ -1,8 +1,8 @@
diff --git a/src/sage_setup/docbuild/__init__.py b/src/sage_setup/docbuild/__init__.py diff --git a/src/sage_docbuild/__init__.py b/src/sage_docbuild/__init__.py
index 73a078e619..059125c59f 100644 index 79005b903a..fbe6fe2595 100644
--- a/src/sage_setup/docbuild/__init__.py --- a/src/sage_docbuild/__init__.py
+++ b/src/sage_setup/docbuild/__init__.py +++ b/src/sage_docbuild/__init__.py
@@ -86,27 +86,6 @@ def builder_helper(type): @@ -85,27 +85,6 @@ def builder_helper(type):
""" """
Returns a function which builds the documentation for Returns a function which builds the documentation for
output type ``type``. output type ``type``.
@ -11,16 +11,16 @@ index 73a078e619..059125c59f 100644
- -
- Check that :trac:`25161` has been resolved:: - Check that :trac:`25161` has been resolved::
- -
- sage: from sage_setup.docbuild import DocBuilder, setup_parser - sage: from sage_docbuild import DocBuilder, setup_parser
- sage: DocBuilder._options = setup_parser().parse_args([])[0] # builder_helper needs _options to be set - sage: DocBuilder._options = setup_parser().parse_args([])[0] # builder_helper needs _options to be set
- -
- sage: import sage_setup.docbuild.sphinxbuild - sage: import sage_docbuild.sphinxbuild
- sage: def raiseBaseException(): - sage: def raiseBaseException():
- ....: raise BaseException("abort pool operation") - ....: raise BaseException("abort pool operation")
- sage: original_runsphinx, sage_setup.docbuild.sphinxbuild.runsphinx = sage_setup.docbuild.sphinxbuild.runsphinx, raiseBaseException - sage: original_runsphinx, sage_docbuild.sphinxbuild.runsphinx = sage_docbuild.sphinxbuild.runsphinx, raiseBaseException
- -
- sage: from sage_setup.docbuild import builder_helper, build_ref_doc - sage: from sage_docbuild import builder_helper, build_ref_doc
- sage: from sage_setup.docbuild import _build_many as build_many - sage: from sage_docbuild import _build_many as build_many
- sage: helper = builder_helper("html") - sage: helper = builder_helper("html")
- sage: try: - sage: try:
- ....: build_many(build_ref_doc, [("docname", "en", "html", {})]) - ....: build_many(build_ref_doc, [("docname", "en", "html", {})])
@ -30,24 +30,24 @@ index 73a078e619..059125c59f 100644
""" """
def f(self, *args, **kwds): def f(self, *args, **kwds):
output_dir = self._output_dir(type) output_dir = self._output_dir(type)
@@ -128,10 +107,9 @@ def builder_helper(type): @@ -127,10 +106,9 @@ def builder_helper(type):
logger.debug(build_command) logger.debug(build_command)
# Run Sphinx with Sage's special logger # Run Sphinx with Sage's special logger
- sys.argv = ["sphinx-build"] + build_command.split() - sys.argv = ["sphinx-build"] + build_command.split()
- from .sphinxbuild import runsphinx - from .sphinxbuild import runsphinx
+ args = "python3 -um sage_setup.docbuild.sphinxbuild -N".split() + build_command.split() + args = "python3 -um sage_docbuild.sphinxbuild -N".split() + build_command.split()
try: try:
- runsphinx() - runsphinx()
+ subprocess.check_call(args) + subprocess.check_call(args)
except Exception: except Exception:
if ABORT_ON_ERROR: if ABORT_ON_ERROR:
raise raise
diff --git a/src/sage_setup/docbuild/sphinxbuild.py b/src/sage_setup/docbuild/sphinxbuild.py diff --git a/src/sage_docbuild/sphinxbuild.py b/src/sage_docbuild/sphinxbuild.py
index fe7eba43b2..463790965c 100644 index f58f6c61d7..ef51a55411 100644
--- a/src/sage_setup/docbuild/sphinxbuild.py --- a/src/sage_docbuild/sphinxbuild.py
+++ b/src/sage_setup/docbuild/sphinxbuild.py +++ b/src/sage_docbuild/sphinxbuild.py
@@ -321,3 +321,8 @@ def runsphinx(): @@ -326,3 +326,8 @@ def runsphinx():
sys.stderr = saved_stderr sys.stderr = saved_stderr
sys.stdout.flush() sys.stdout.flush()
sys.stderr.flush() sys.stderr.flush()

View file

@ -1,25 +0,0 @@
diff --git a/src/sage/interfaces/sympy.py b/src/sage/interfaces/sympy.py
index cc35a42a9f..6e577d5d8d 100644
--- a/src/sage/interfaces/sympy.py
+++ b/src/sage/interfaces/sympy.py
@@ -397,7 +397,7 @@ def _sympysage_rf(self):
sage: from sympy import Symbol, rf
sage: _ = var('x, y')
sage: rfxy = rf(Symbol('x'), Symbol('y'))
- sage: assert rising_factorial(x,y)._sympy_() == rfxy.rewrite('gamma')
+ sage: assert rising_factorial(x,y)._sympy_() == rfxy.rewrite('gamma', piecewise=False)
sage: assert rising_factorial(x,y) == rfxy._sage_()
"""
from sage.arith.all import rising_factorial
diff --git a/src/sage/symbolic/expression.pyx b/src/sage/symbolic/expression.pyx
index 7c18ec1efa..c2619ac42d 100644
--- a/src/sage/symbolic/expression.pyx
+++ b/src/sage/symbolic/expression.pyx
@@ -955,6 +955,6 @@ cdef class Expression(CommutativeRingElement):
sage: unicode_art(13 - I)
13 -
sage: unicode_art(1.3 - I)
- 1.3 - 1.0⋅ⅈ
+ 1.3 -
sage: unicode_art(cos(I))
cosh(1)

View file

@ -2,6 +2,7 @@
, lib , lib
, writeTextFile , writeTextFile
, sagelib , sagelib
, sage_docbuild
, env-locations , env-locations
, gfortran , gfortran
, bash , bash
@ -191,6 +192,7 @@ writeTextFile rec {
# for find_library # for find_library
export DYLD_LIBRARY_PATH="${lib.makeLibraryPath [stdenv.cc.libc singular]}''${DYLD_LIBRARY_PATH:+:}$DYLD_LIBRARY_PATH" export DYLD_LIBRARY_PATH="${lib.makeLibraryPath [stdenv.cc.libc singular]}''${DYLD_LIBRARY_PATH:+:}$DYLD_LIBRARY_PATH"
''; '';
} // { } // { # equivalent of `passthru`, which `writeTextFile` doesn't support
lib = sagelib; # equivalent of `passthru`, which `writeTextFile` doesn't support lib = sagelib;
docbuild = sage_docbuild;
} }

View file

@ -24,14 +24,14 @@ let
); );
in in
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
version = "9.2"; version = "9.3.rc4";
pname = "sage-src"; pname = "sage-src";
src = fetchFromGitHub { src = fetchFromGitHub {
owner = "sagemath"; owner = "sagemath";
repo = "sage"; repo = "sage";
rev = version; rev = version;
sha256 = "103j8d5x6szl9fxaz0dvdi4y47q1af9h9y5hmjh2xayi62qmp5ql"; sha256 = "sha256-LDY07By2j6JagkgT9zeDJ93+m2/oXXEnDRTDzmR8ftk=";
}; };
# Patches needed because of particularities of nix or the way this is packaged. # Patches needed because of particularities of nix or the way this is packaged.
@ -53,14 +53,6 @@ stdenv.mkDerivation rec {
# Parallelize docubuild using subprocesses, fixing an isolation issue. See # Parallelize docubuild using subprocesses, fixing an isolation issue. See
# https://groups.google.com/forum/#!topic/sage-packaging/YGOm8tkADrE # https://groups.google.com/forum/#!topic/sage-packaging/YGOm8tkADrE
./patches/sphinx-docbuild-subprocesses.patch ./patches/sphinx-docbuild-subprocesses.patch
# Register sorted dict pprinter earlier (https://trac.sagemath.org/ticket/31053)
(fetchSageDiff {
base = "9.3.beta4";
name = "register-pretty-printer-earlier.patch";
rev = "d658230ce06ca19f4a3b3a4576297ee82f2d2151";
sha256 = "sha256-9mPUV7K5PoLDH2vVaYaOfvDLDpmxU0Aj7m/eaXYotDs=";
})
]; ];
# Since sage unfortunately does not release bugfix releases, packagers must # Since sage unfortunately does not release bugfix releases, packagers must
@ -70,31 +62,6 @@ stdenv.mkDerivation rec {
# To help debug the transient error in # To help debug the transient error in
# https://trac.sagemath.org/ticket/23087 when it next occurs. # https://trac.sagemath.org/ticket/23087 when it next occurs.
./patches/configurationpy-error-verbose.patch ./patches/configurationpy-error-verbose.patch
# fix intermittent errors in Sage 9.2's psage.py (this patch is
# already included in Sage 9.3): https://trac.sagemath.org/ticket/30730
(fetchSageDiff {
base = "9.2.rc2";
name = "fix-psage-is-locked.patch";
rev = "75df605f216ddc7b6ca719be942d666b241520e9";
sha256 = "0g9pl1wbb3sgs26d3bvv70cpa77sfskylv4kd255y1794f1fgk4q";
})
# fix intermittent errors in sagespawn.pyx: https://trac.sagemath.org/ticket/31052
(fetchSageDiff {
base = "9.2";
name = "sagespawn-implicit-casting.patch";
rev = "2959ac792ebd6107fe87c9af1541083de5ba02d6";
sha256 = "sha256-bWIpEGir9Kawak5CJegBMNcHm/CqhWmdru+emeSsvO0=";
})
# fix intermittent errors in doctest/test.py: https://trac.sagemath.org/ticket/26912
(fetchSageDiff {
base = "9.3.beta8";
name = "set-cysignals-crash-ndebug.patch";
rev = "ca5257a5d0f32efc9f8f07e126020856270b1a18";
sha256 = "sha256-KViw63xE3O0eUiOYzoxNrr4NL+csql9GPJLDJCf/EZs=";
})
]; ];
# Patches needed because of package updates. We could just pin the versions of # Patches needed because of package updates. We could just pin the versions of
@ -111,120 +78,29 @@ stdenv.mkDerivation rec {
# ignore a deprecation warning for usage of `cmp` in the attrs library in the doctests # ignore a deprecation warning for usage of `cmp` in the attrs library in the doctests
./patches/ignore-cmp-deprecation.patch ./patches/ignore-cmp-deprecation.patch
# adapt sage's Image class to pillow 8.0.1 (https://trac.sagemath.org/ticket/30971)
(fetchSageDiff {
base = "9.3.beta2";
name = "pillow-8.0.1-update.patch";
rev = "f05f2d0aac9c4b5abe68105cee2cc7f2c8461847";
sha256 = "sha256-uY2UlgSd5hhOUUukB4Xc3Gjy0/e7p/qyq9jdvz10IOs=";
})
# don't use deprecated numpy type aliases (https://trac.sagemath.org/ticket/31364)
(fetchSageDiff {
base = "9.3.beta7";
name = "dont-use-deprecated-numpy-type-aliases.patch";
rev = "dfdef60515d4a4269e82d91280f76a7fdf10bf97";
sha256 = "sha256-77/3LkT5J7DQN8IPlGJKB6ZcJPaF7xwje06JNns+0AE=";
})
# fix test output with sympy 1.7 (https://trac.sagemath.org/ticket/30985)
./patches/sympy-1.7-update.patch
# workaround until we use sage's fork of threejs, which contains a "version" file
./patches/dont-grep-threejs-version-from-minified-js.patch
# updated eclib output has punctuation changes and tidier whitespace
./patches/eclib-20210223-test-formatting.patch
# upgrade arb to 2.18.1 (https://trac.sagemath.org/ticket/28623)
(fetchSageDiff {
base = "9.3.beta3";
name = "arb-2.18.1-update.patch";
rev = "0c9c4ed35c2eaf34ae0d19387c07b7f460e4abce";
sha256 = "sha256-CjOJIsyyVCziAfvE6pWSihPO35IZMcY2/taXAsqhPLY=";
})
# giac 1.6.0-47 update (https://trac.sagemath.org/ticket/30537)
(fetchSageDiff {
base = "9.3.beta7";
name = "giac-1.6.0-47-update.patch";
rev = "f05720bf63dfaf33a4e3b6d3ed2c2c0ec46b5d31";
sha256 = "sha256-gDUq+84eXd5GxLBWUSI61GMJpBF2KX4LBVOt3mS1NF8=";
})
# Make gcd/lcm interact better with pari and gmpy2 (https://trac.sagemath.org/ticket/30849)
# needed for pari 2.13.1 update, which we will do in the future
(fetchSageDiff {
base = "9.3.beta0";
name = "make-gcd-lcm-interact-better-with-pari-and-gmpy2.patch";
rev = "75c1516f0abb9e6f8c335e38e4031f6ef674ed30";
sha256 = "sha256-RukkieIZcXNrju904H2oyGKdtpdE+9vNzvyjN2IBNg0=";
})
# cypari 2.1.2 update (https://trac.sagemath.org/ticket/31029)
(fetchSageDiff {
base = "9.3.beta3";
name = "cypari-2.1.2-update.patch";
rev = "b9aadfd08e81d74ca7c229bb80eb853b592887d0";
sha256 = "sha256-eKaMy7kpu+YKdL8bPStgocxBCTfc2Z/10RrGy2LENFw=";
})
]; ];
patches = nixPatches ++ bugfixPatches ++ packageUpgradePatches; patches = nixPatches ++ bugfixPatches ++ packageUpgradePatches;
postPatch = '' postPatch = ''
# make sure shebangs etc are fixed, but sage-python23 still works
find . -type f -exec sed \
-e 's/sage-python23/python3/g' \
-i {} \;
echo '#!${runtimeShell}
python3 "$@"' > build/bin/sage-python23
# Make sure sage can at least be imported without setting any environment # Make sure sage can at least be imported without setting any environment
# variables. It won't be close to feature complete though. # variables. It won't be close to feature complete though.
sed -i \ sed -i \
"s|var('SAGE_ROOT'.*|var('SAGE_ROOT', '$out')|" \ "s|var(\"SAGE_ROOT\".*|var(\"SAGE_ROOT\", \"$out\")|" \
src/sage/env.py src/sage/env.py
# Do not use sage-env-config (generated by ./configure). # src/doc/en/reference/spkg/conf.py expects index.rst in its directory,
# Instead variables are set manually. # a list of external packages in the sage distribution (build/pkgs)
echo '# do nothing' > src/bin/sage-env-config # generated by the bootstrap script (which we don't run). this is not
''; # relevant for other distributions, so remove it.
rm src/doc/en/reference/spkg/conf.py
sed -i "/spkg/d" src/doc/en/reference/index.rst
# Test src/doc/en/reference/spkg/conf.py will fail if # the bootstrap script also generates installation instructions for
# src/doc/en/reference/spkg/index.rst is not generated. It is # arch, debian, fedora, cygwin and homebrew using data from build/pkgs.
# generated by src/doc/bootstrap, so I've copied the relevant part # we don't run the bootstrap script, so disable including the generated
# here. An alternative would be to create an empty # files. docbuilding fails otherwise.
# src/doc/en/reference/spkg/index.rst file. sed -i "/literalinclude/d" src/doc/en/installation/source.rst
configurePhase = ''
OUTPUT_DIR="src/doc/en/reference/spkg"
mkdir -p "$OUTPUT_DIR"
OUTPUT_INDEX="$OUTPUT_DIR"/index.rst
cat > "$OUTPUT_INDEX" <<EOF
External Packages
=================
.. toctree::
:maxdepth: 1
EOF
for PKG_SCRIPTS in build/pkgs/*; do
if [ -d "$PKG_SCRIPTS" ]; then
PKG_BASE=$(basename "$PKG_SCRIPTS")
if [ -f "$PKG_SCRIPTS"/SPKG.rst ]; then
# Instead of just copying, we may want to call
# a version of sage-spkg-info to format extra information.
cp "$PKG_SCRIPTS"/SPKG.rst "$OUTPUT_DIR"/$PKG_BASE.rst
echo >> "$OUTPUT_INDEX" " $PKG_BASE"
fi
fi
done
cat >> "$OUTPUT_INDEX" <<EOF
.. include:: ../footer.txt
EOF
''; '';
buildPhase = "# do nothing"; buildPhase = "# do nothing";

View file

@ -0,0 +1,20 @@
{ buildPythonPackage
, sage-src
, sphinx
}:
buildPythonPackage rec {
version = src.version;
pname = "sage_docbuild";
src = sage-src;
propagatedBuildInputs = [
sphinx
];
preBuild = ''
cd build/pkgs/sage_docbuild/src
'';
doCheck = false; # we will run tests in sagedoc.nix
}

View file

@ -23,6 +23,7 @@ stdenv.mkDerivation rec {
jmol jmol
cddlib cddlib
] ++ (with python3.pkgs; [ ] ++ (with python3.pkgs; [
sage_docbuild
psutil psutil
future future
sphinx sphinx
@ -44,13 +45,6 @@ stdenv.mkDerivation rec {
chmod -R 755 "$SAGE_DOC_SRC_OVERRIDE" chmod -R 755 "$SAGE_DOC_SRC_OVERRIDE"
''; '';
postPatch = ''
# src/doc/bootstrap generates installation instructions for
# arch, debian, fedora, cygwin and homebrew. as a hack, disable
# including the generated files.
sed -i "/literalinclude/d" $SAGE_DOC_SRC_OVERRIDE/en/installation/source.rst
'';
buildPhase = '' buildPhase = ''
export SAGE_NUM_THREADS="$NIX_BUILD_CORES" export SAGE_NUM_THREADS="$NIX_BUILD_CORES"
export HOME="$TMPDIR/sage_home" export HOME="$TMPDIR/sage_home"
@ -59,13 +53,13 @@ stdenv.mkDerivation rec {
# needed to link them in the sage docs using intersphinx # needed to link them in the sage docs using intersphinx
export PPLPY_DOCS=${python3.pkgs.pplpy.doc}/share/doc/pplpy export PPLPY_DOCS=${python3.pkgs.pplpy.doc}/share/doc/pplpy
# adapted from src/doc/bootstrap # adapted from src/doc/bootstrap (which we don't run)
OUTPUT_DIR="$SAGE_DOC_SRC_OVERRIDE/en/reference/repl" OUTPUT_DIR="$SAGE_DOC_SRC_OVERRIDE/en/reference/repl"
mkdir -p "$OUTPUT_DIR" mkdir -p "$OUTPUT_DIR"
OUTPUT="$OUTPUT_DIR/options.txt" OUTPUT="$OUTPUT_DIR/options.txt"
${sage-with-env}/bin/sage -advanced > "$OUTPUT" ${sage-with-env}/bin/sage -advanced > "$OUTPUT"
${sage-with-env}/bin/sage -python -m sage_setup.docbuild \ ${sage-with-env}/bin/sage --docbuild \
--mathjax \ --mathjax \
--no-pdf-links \ --no-pdf-links \
all html all html

View file

@ -63,7 +63,6 @@ assert (!blas.isILP64) && (!lapack.isILP64);
# `sage-tests` and will not have html docs without `sagedoc`. # `sage-tests` and will not have html docs without `sagedoc`.
buildPythonPackage rec { buildPythonPackage rec {
format = "other";
version = src.version; version = src.version;
pname = "sagelib"; pname = "sagelib";
src = sage-src; src = sage-src;
@ -74,6 +73,7 @@ buildPythonPackage rec {
jupyter_core jupyter_core
pkg-config pkg-config
pip # needed to query installed packages pip # needed to query installed packages
ecl
]; ];
buildInputs = [ buildInputs = [
@ -130,7 +130,7 @@ buildPythonPackage rec {
sqlite sqlite
]; ];
buildPhase = '' preBuild = ''
export SAGE_ROOT="$PWD" export SAGE_ROOT="$PWD"
export SAGE_LOCAL="$SAGE_ROOT" export SAGE_LOCAL="$SAGE_ROOT"
export SAGE_SHARE="$SAGE_LOCAL/share" export SAGE_SHARE="$SAGE_LOCAL/share"
@ -146,15 +146,13 @@ buildPythonPackage rec {
mkdir -p "$SAGE_SHARE/sage/ext/notebook-ipython" mkdir -p "$SAGE_SHARE/sage/ext/notebook-ipython"
mkdir -p "var/lib/sage/installed" mkdir -p "var/lib/sage/installed"
source build/bin/sage-dist-helpers # src/setup.py should not be used, see https://trac.sagemath.org/ticket/31377#comment:124
cd src cd build/pkgs/sagelib/src
${python.interpreter} -u setup.py --no-user-cfg build
''; '';
installPhase = '' postInstall = ''
${python.interpreter} -u setup.py --no-user-cfg install --prefix=$out
rm -r "$out/${python.sitePackages}/sage/cython_debug" rm -r "$out/${python.sitePackages}/sage/cython_debug"
''; '';
doCheck = false; # we will run tests in sage-tests.nix
} }

View file

@ -0,0 +1,18 @@
{ stdenv, fetchFromGitHub }:
stdenv.mkDerivation rec {
pname = "threejs-sage";
version = "r122";
src = fetchFromGitHub {
owner = "sagemath";
repo = "threejs-sage";
rev = version;
sha256 = "sha256-xPAPt36Fon3hYQq6SOmGkIyUzAII2LMl10nqYG4UPI0=";
};
installPhase = ''
mkdir -p $out/lib/node_modules/three
cp -r build version $out/lib/node_modules/three
'';
}

View file

@ -1,57 +1,96 @@
{ stdenv, fetchurl, gmp, bison, perl, ncurses, readline, coreutils, pkg-config { stdenv, fetchFromGitHub, gmp, bison, perl, ncurses, readline, coreutils, pkg-config
, lib , lib
, fetchpatch , fetchpatch
, autoreconfHook , autoreconfHook
, sharutils
, file , file
, flint , flint
, ntl , ntl
, cddlib , cddlib
, enableFactory ? true , gfan
, lrcalc
, doxygen
, graphviz
# upstream generates docs with texinfo 4. later versions of texinfo
# use letters instead of numbers for post-appendix chapters, and we
# want it to match the upstream format because sage depends on it.
, texinfo4
, texlive
, enableDocs ? true
, enableGfanlib ? true , enableGfanlib ? true
}: }:
stdenv.mkDerivation rec { stdenv.mkDerivation rec {
pname = "singular"; pname = "singular";
version = "4.1.1p2"; version = "4.2.0p2";
src = let # since the tarball does not contain tests or documentation (and
# singular sorts its tarballs in directories by base release (without patch version) # there is no separate tests tarball for 4.2.0), we fetch from
# for example 4.1.1p1 will be in the directory 4-1-1 # GitHub.
baseVersion = builtins.head (lib.splitString "p" version); src = fetchFromGitHub {
urlVersion = builtins.replaceStrings [ "." ] [ "-" ] baseVersion; owner = "Singular";
in repo = "Singular";
fetchurl {
url = "http://www.mathematik.uni-kl.de/ftp/pub/Math/Singular/SOURCES/${urlVersion}/singular-${version}.tar.gz"; # 4.2.0p2 is not tagged, but the tarball matches commit
sha256 = "07x9kri8vl4galik7lr6pscq3c51n8570pyw64i7gbj0m706f7wf"; # 6f68939ddf612d96e3caaaaa8275f77613ac1da8. the commit below has
# two extra fixes.
rev = "3cda50c00a849455efa2502e56596955491a353a";
sha256 = "sha256-OizPhGE6L2LTOrKfeDdDB6BSdvYkDVXvbbYjV14hnHM=";
# if a release is tagged it will be in the format below.
# rev = "Release${lib.replaceStrings ["."] ["-"] version}";
# the repository's .gitattributes file contains the lines "/Tst/
# export-ignore" and "/doc/ export-ignore" so some directories are
# not included in the tarball downloaded by fetchzip. setting
# fetchSubmodules works around this by using fetchgit instead of
# fetchzip.
fetchSubmodules = true;
}; };
patches = [
# add aarch64 support to cpu-check.m4. copied from redhat.
./redhat-aarch64.patch
# vspace causes hangs in modstd and other libraries on aarch64
./disable-vspace-on-aarch64.patch
# the newest version of ax-prog-cc-for-build.m4 seems to trigger
# linker errors. see
# https://github.com/alsa-project/alsa-firmware/issues/3 for a
# related issue.
./use-older-ax-prog-cc-for-build.patch
] ++ lib.optionals enableDocs [
# singular supports building without 4ti2, bertini, normaliz or
# topcom just fine, but the docbuilding does not skip manual pages
# tagged as depending on those binaries (probably a bug in
# doc2tex.pl::HandleLib, since it seems to ignore "-exclude"
# argumens). skip them manually.
./disable-docs-for-optional-unpackaged-deps.patch
];
configureFlags = [ configureFlags = [
"--with-ntl=${ntl}" "--with-ntl=${ntl}"
] ++ lib.optionals enableFactory [ "--disable-pyobject-module"
"--enable-factory" ] ++ lib.optionals enableDocs [
"--enable-doc-build"
] ++ lib.optionals enableGfanlib [ ] ++ lib.optionals enableGfanlib [
"--enable-gfanlib" "--enable-gfanlib"
]; ];
postUnpack = '' prePatch = ''
# don't let the tests depend on `hostname`
substituteInPlace Tst/regress.cmd --replace 'mysystem_catch("hostname")' 'nix_test_runner'
patchShebangs . patchShebangs .
'' + lib.optionalString enableDocs ''
# work around encoding problem
sed -i -e 's/\xb7/@cdot{}/g' doc/decodegb.doc
''; '';
patches = [
# NTL error handler was introduced in the library part, preventing users of
# the library from implementing their own error handling
# https://www.singular.uni-kl.de/forum/viewtopic.php?t=2769
(fetchpatch {
name = "move_error_handler_out_of_libsingular.patch";
# rebased version of https://github.com/Singular/Sources/commit/502cf86d0bb2a96715be6764774b64a69c1ca34c.patch
url = "https://git.sagemath.org/sage.git/plain/build/pkgs/singular/patches/singular-ntl-error-handler.patch?h=50b9ae2fd233c30860e1cbb3e63a26f2cc10560a";
sha256 = "0vgh4m9zn1kjl0br68n04j4nmn5i1igfn28cph0chnwf7dvr9194";
})
];
# For reference (last checked on commit 75f460d): # For reference (last checked on commit 75f460d):
# https://github.com/Singular/Sources/blob/spielwiese/doc/Building-Singular-from-source.md # https://github.com/Singular/Singular/blob/spielwiese/doc/Building-Singular-from-source.md
# https://github.com/Singular/Sources/blob/spielwiese/doc/external-packages-dynamic-modules.md # https://github.com/Singular/Singular/blob/spielwiese/doc/external-packages-dynamic-modules.md
buildInputs = [ buildInputs = [
# necessary # necessary
gmp gmp
@ -60,6 +99,8 @@ stdenv.mkDerivation rec {
readline readline
ntl ntl
flint flint
lrcalc
gfan
] ++ lib.optionals enableGfanlib [ ] ++ lib.optionals enableGfanlib [
cddlib cddlib
]; ];
@ -68,6 +109,12 @@ stdenv.mkDerivation rec {
perl perl
pkg-config pkg-config
autoreconfHook autoreconfHook
sharutils # needed for regress.cmd install checks
] ++ lib.optionals enableDocs [
doxygen
graphviz
texinfo4
texlive.combined.scheme-small
]; ];
preAutoreconf = '' preAutoreconf = ''
@ -85,23 +132,62 @@ stdenv.mkDerivation rec {
# do nothing # do nothing
''; '';
doCheck = true; # very basic checks, does not test any libraries
installPhase = '' installPhase = ''
mkdir -p "$out" mkdir -p "$out"
cp -r Singular/LIB "$out/lib" cp -r Singular/LIB "$out/lib"
make install make install
'' + lib.optionalString enableDocs ''
# Sage uses singular.hlp (which is not in the tarball)
mkdir -p $out/share/info
cp doc/singular.hlp $out/share/info
'' + ''
# Make sure patchelf picks up the right libraries # Make sure patchelf picks up the right libraries
rm -rf libpolys factory resources omalloc Singular rm -rf libpolys factory resources omalloc Singular
''; '';
# singular tests are a bit complicated, see
# https://github.com/Singular/Singular/tree/spielwiese/Tst
# https://www.singular.uni-kl.de/forum/viewtopic.php&t=2773
testsToRun = [
"Old/universal.lst"
"Buch/buch.lst"
"Plural/short.lst"
"Old/factor.tst"
] ++ lib.optionals enableGfanlib [
# tests that require gfanlib
"Short/ok_s.lst"
];
# simple test to make sure singular starts and finds its libraries # simple test to make sure singular starts and finds its libraries
doInstallCheck = true; doInstallCheck = true;
installCheckPhase = '' installCheckPhase = ''
# Very basic sanity check to make sure singular starts and finds its libraries.
# This is redundant with the below tests. It is only kept because the singular test
# runner is a bit complicated. In case we decide to give up those tests in the future,
# this will still be useful. It takes barely any time.
"$out/bin/Singular" -c 'LIB "freegb.lib"; exit;' "$out/bin/Singular" -c 'LIB "freegb.lib"; exit;'
if [ $? -ne 0 ]; then if [ $? -ne 0 ]; then
echo >&2 "Error loading the freegb library in Singular." echo >&2 "Error loading the freegb library in Singular."
exit 1 exit 1
fi fi
# Run the test suite
cd Tst
perl ./regress.cmd \
-s "$out/bin/Singular" \
${lib.concatStringsSep " " (map lib.escapeShellArg testsToRun)} \
2>"$TMPDIR/out-err.log"
# unfortunately regress.cmd always returns exit code 0, so check stderr
# https://www.singular.uni-kl.de/forum/viewtopic.php&t=2773
if [[ -s "$TMPDIR/out-err.log" ]]; then
cat "$TMPDIR/out-err.log"
exit 1
fi
echo "Exit status $?"
''; '';
enableParallelBuilding = true; enableParallelBuilding = true;
@ -110,6 +196,7 @@ stdenv.mkDerivation rec {
description = "A CAS for polynomial computations"; description = "A CAS for polynomial computations";
maintainers = teams.sage.members; maintainers = teams.sage.members;
# 32 bit x86 fails with some link error: `undefined reference to `__divmoddi4@GCC_7.0.0'` # 32 bit x86 fails with some link error: `undefined reference to `__divmoddi4@GCC_7.0.0'`
# https://www.singular.uni-kl.de:8002/trac/ticket/837
platforms = subtractLists platforms.i686 platforms.unix; platforms = subtractLists platforms.i686 platforms.unix;
license = licenses.gpl3; # Or GPLv2 at your option - but not GPLv4 license = licenses.gpl3; # Or GPLv2 at your option - but not GPLv4
homepage = "http://www.singular.uni-kl.de"; homepage = "http://www.singular.uni-kl.de";

View file

@ -0,0 +1,112 @@
commit 9e8b044d982e132cf35a106a1cc0cf7e77b27f7c
Author: Mauricio Collares <mauricio@collares.org>
Date: Thu Apr 15 20:33:21 2021 -0300
Disable manual sections using optional packages not yet in Nixpkgs
* normaliz.lib depends on normaliz.
* polymake.lib depends on topcom.
* recover.lib depends on bertini.
* sing4ti2.lib depends on 4ti2.
* tateProdCplxNegGrad.lib uses multigrading.lib, which depends on 4ti2.
diff --git a/doc/singular.doc b/doc/singular.doc
index 64b969d39..e704f95f0 100644
--- a/doc/singular.doc
+++ b/doc/singular.doc
@@ -407,7 +407,6 @@ LIB "all.lib";
* nfmodsyz_lib:: Syzygy modules of submodules of free modules over algebraic number fields
* noether_lib:: Noether normalization of an ideal
* normal_lib:: procedure for normalization
-* normaliz_lib:: integral closure, normalization for monomial ideals, toric ideals
* pointid_lib:: factorized lex GB of the vanishing ideal of a set of points
* primdec_lib:: procedures for primary decomposition
* primdecint_lib:: primary decomposition over the integers
@@ -416,7 +415,6 @@ LIB "all.lib";
* reesclos_lib:: Rees Algebra and integral closure of an ideal
* rstandard_lib:: Janet bases and border bases for ideals
* sagbi_lib:: Subalgebras bases Analogous to Groebner bases for ideals
-* sing4ti2_lib:: interface to program 4ti2
* symodstd_lib:: Groebner bases for symmetric ideals
* toric_lib:: toric ideals
@end menu
@@ -521,10 +519,6 @@ LIB "all.lib";
@node normal_lib
@subsection normal_lib
@c lib normal.lib
-@c ---------------------------------------------------------
-@node normaliz_lib
-@subsection normaliz_lib
-@c lib normaliz.lib tag:normaliz
@c ----------------------------------------------------------
@node pointid_lib
@subsection pointid_lib
@@ -558,10 +552,6 @@ LIB "all.lib";
@subsection sagbi_lib
@c lib sagbi.lib
@c ---------------------------------------------------------
-@node sing4ti2_lib
-@subsection sing4ti2_lib
-@c lib sing4ti2.lib tag:sing4ti2
-@c ----------------------------------------------------------
@node symodstd_lib
@subsection symodstd_lib
@c lib symodstd.lib
@@ -873,7 +863,6 @@ iniD, reslist, sumlist, dividelist, createlist
* solve_lib:: procedures to solve polynomial systems
* triang_lib:: procedures for decomposing zero-dimensional ideals
* ntsolve_lib:: one real solution of polynomial systems (Newton iteration)
-* recover_lib:: Hybrid numerical/symbolical algorithms
* rootisolation_lib:: real root isolation with intervals
* signcond_lib:: computing realizable sign conditions
* zeroset_lib:: procedures for roots and factorization
@@ -904,10 +893,6 @@ iniD, reslist, sumlist, dividelist, createlist
@subsection ntsolve_lib
@c lib ntsolve.lib
@c ---------------------------------------------------------
-@node recover_lib
-@subsection recover_lib
-@c lib recover.lib tag:bertini
-@c ----------------------------------------------------------
@node rootisolation_lib
@subsection rootisolation_lib
@c lib rootisolation.lib
@@ -1108,7 +1093,6 @@ but not for serious computations.
* cimonom_lib:: complete intersection for toric ideals
* gfan_lib:: A gfanlib interface for Singular
* gitfan_lib:: Compute GIT-fans
-* polymake_lib:: interface to TOPCOM
* realizationMatroids_lib:: Realizability for Tropical Fan Curves
* tropical_lib:: interface to gfan
* tropicalNewton_lib:: Newton polygons in tropical geometry
@@ -1125,10 +1109,7 @@ but not for serious computations.
@node gitfan_lib
@subsection gitfan_lib
@c lib gitfan.lib
-@c ----------------------------------------------------------
-@node polymake_lib
-@subsection polymake_lib
-@c lib polymake.lib tag:topcom
+
@c ----------------------------------------------------------
@node realizationMatroids_lib
@subsection realizationMatroids_lib
@@ -1219,7 +1200,6 @@ Comments should be send to the author of the library directly.
* stanleyreisner_lib:: T1 and T2 for a general Stanley-Reiser ring
* swalk_lib:: Sagbi Walk Conversion Algorithm
* systhreads_lib:: multi-threaded objects
-* tateProdCplxNegGrad_lib:: sheaf cohomology on product of projective spaces
* VecField_lib:: vector fields
@end menu
@c ----------------------------------------------------------
@@ -1310,10 +1290,6 @@ Todos/Issues:
@subsection systhreads_lib
@c lib systhreads.lib
@c ---------------------------------------------------------
-@node tateProdCplxNegGrad_lib
-@subsection tateProdCplxNegGrad_lib
-@c lib tateProdCplxNegGrad.lib
-@c ---------------------------------------------------------
@node VecField_lib
@subsection VecField_lib
@c lib VecField.lib

View file

@ -0,0 +1,15 @@
diff --git a/kernel/mod2.h b/kernel/mod2.h
index 867fcae47..2abd84f23 100644
--- a/kernel/mod2.h
+++ b/kernel/mod2.h
@@ -60,8 +60,10 @@
/* define for parallel processes with shared memory */
#ifndef __CCYGWIN__
+#ifndef SI_CPU_AARCH64
#define HAVE_VSPACE 1
#endif
+#endif
/*#define PROFILING*/
#ifdef PROFILING

View file

@ -0,0 +1,38 @@
diff --git a/m4/cpu-check.m4 b/m4/cpu-check.m4
index 3cf0a7f08..12bb926ac 100644
--- a/m4/cpu-check.m4
+++ b/m4/cpu-check.m4
@@ -37,6 +37,18 @@ if test "$ac_cv_singcpuname" = ppc; then
AC_DEFINE(SI_CPU_PPC,1,"PPC")
AC_SUBST(SI_CPU_PPC)
fi
+if test "$ac_cv_singcpuname" = arm -o "$ac_cv_singcpuname" = armel; then
+ AC_DEFINE(SI_CPU_ARM,1,"ARM")
+ AC_SUBST(SI_CPU_ARM)
+fi
+if test "$ac_cv_singcpuname" = aarch64; then
+ AC_DEFINE(SI_CPU_AARCH64,1,"AARCH64")
+ AC_SUBST(SI_CPU_AARCH64)
+fi
+if test "$ac_cv_singcpuname" = s390; then
+ AC_DEFINE(SI_CPU_S390,1,"S390")
+ AC_SUBST(SI_CPU_S390)
+fi
# UNAME and PATH
AC_MSG_CHECKING(uname for Singular)
@@ -65,6 +77,14 @@ dnl testet on: ppc_Linux, 740/750 PowerMac G3, 512k L2 cache
[powerpc*|ppc*], [AC_DEFINE(HAVE_GENERIC_MULT,1,multiplication is fast on the cpu: a*b is with mod otherwise using tables of logartihms)],
dnl the following settings seems to be better on arm processors
[arm*], [],
+dnl FIXME: need to run some tests
+ [aarch64*], [
+ AC_DEFINE(HAVE_MULT_MOD,1,multiplication is fast on the cpu: a*b is with mod otherwise using tables of logartihms)
+ AC_DEFINE(HAVE_GENERIC_ADD,1,use branch for addition in Z/p otherwise it uses a generic add)
+ AC_DEFINE(HAVE_DIV_MOD,1,division using extend euclidian algorithm otherwise using tables of logartihms)
+ ],
+dnl FIXME: need to run some tests
+ [s390*], [AC_DEFINE(HAVE_GENERIC_ADD,1,use branch for addition in Z/p otherwise it uses a generic add)],
[]
)

View file

@ -0,0 +1,194 @@
diff --git a/m4/ax_prog_cc_for_build.m4 b/m4/ax_prog_cc_for_build.m4
index f7410d74b..12cb005a5 100644
--- a/m4/ax_prog_cc_for_build.m4
+++ b/m4/ax_prog_cc_for_build.m4
@@ -32,35 +32,31 @@
# and this notice are preserved. This file is offered as-is, without any
# warranty.
-#serial 18
+#serial 9
AU_ALIAS([AC_PROG_CC_FOR_BUILD], [AX_PROG_CC_FOR_BUILD])
AC_DEFUN([AX_PROG_CC_FOR_BUILD], [dnl
AC_REQUIRE([AC_PROG_CC])dnl
AC_REQUIRE([AC_PROG_CPP])dnl
-AC_REQUIRE([AC_CANONICAL_BUILD])dnl
+AC_REQUIRE([AC_EXEEXT])dnl
+AC_REQUIRE([AC_CANONICAL_HOST])dnl
dnl Use the standard macros, but make them use other variable names
dnl
pushdef([ac_cv_prog_CPP], ac_cv_build_prog_CPP)dnl
-pushdef([ac_cv_prog_cc_c89], ac_cv_build_prog_cc_c89)dnl
pushdef([ac_cv_prog_gcc], ac_cv_build_prog_gcc)dnl
pushdef([ac_cv_prog_cc_works], ac_cv_build_prog_cc_works)dnl
pushdef([ac_cv_prog_cc_cross], ac_cv_build_prog_cc_cross)dnl
pushdef([ac_cv_prog_cc_g], ac_cv_build_prog_cc_g)dnl
-pushdef([ac_cv_c_compiler_gnu], ac_cv_build_c_compiler_gnu)dnl
pushdef([ac_cv_exeext], ac_cv_build_exeext)dnl
pushdef([ac_cv_objext], ac_cv_build_objext)dnl
pushdef([ac_exeext], ac_build_exeext)dnl
pushdef([ac_objext], ac_build_objext)dnl
pushdef([CC], CC_FOR_BUILD)dnl
pushdef([CPP], CPP_FOR_BUILD)dnl
-pushdef([GCC], GCC_FOR_BUILD)dnl
pushdef([CFLAGS], CFLAGS_FOR_BUILD)dnl
pushdef([CPPFLAGS], CPPFLAGS_FOR_BUILD)dnl
-pushdef([EXEEXT], BUILD_EXEEXT)dnl
pushdef([LDFLAGS], LDFLAGS_FOR_BUILD)dnl
-pushdef([OBJEXT], BUILD_OBJEXT)dnl
pushdef([host], build)dnl
pushdef([host_alias], build_alias)dnl
pushdef([host_cpu], build_cpu)dnl
@@ -71,29 +67,27 @@ pushdef([ac_cv_host_alias], ac_cv_build_alias)dnl
pushdef([ac_cv_host_cpu], ac_cv_build_cpu)dnl
pushdef([ac_cv_host_vendor], ac_cv_build_vendor)dnl
pushdef([ac_cv_host_os], ac_cv_build_os)dnl
-pushdef([ac_tool_prefix], ac_build_tool_prefix)dnl
-pushdef([am_cv_CC_dependencies_compiler_type], am_cv_build_CC_dependencies_compiler_type)dnl
-pushdef([am_cv_prog_cc_c_o], am_cv_build_prog_cc_c_o)dnl
-pushdef([cross_compiling], cross_compiling_build)dnl
+pushdef([ac_cpp], ac_build_cpp)dnl
+pushdef([ac_compile], ac_build_compile)dnl
+pushdef([ac_link], ac_build_link)dnl
-cross_compiling_build=no
+save_cross_compiling=$cross_compiling
+save_ac_tool_prefix=$ac_tool_prefix
+cross_compiling=no
+ac_tool_prefix=
-ac_build_tool_prefix=
-AS_IF([test -n "$build"], [ac_build_tool_prefix="$build-"],
- [test -n "$build_alias"],[ac_build_tool_prefix="$build_alias-"])
-
-AC_LANG_PUSH([C])
AC_PROG_CC
-_AC_COMPILER_EXEEXT
-_AC_COMPILER_OBJEXT
AC_PROG_CPP
+AC_EXEEXT
+
+ac_tool_prefix=$save_ac_tool_prefix
+cross_compiling=$save_cross_compiling
dnl Restore the old definitions
dnl
-popdef([cross_compiling])dnl
-popdef([am_cv_prog_cc_c_o])dnl
-popdef([am_cv_CC_dependencies_compiler_type])dnl
-popdef([ac_tool_prefix])dnl
+popdef([ac_link])dnl
+popdef([ac_compile])dnl
+popdef([ac_cpp])dnl
popdef([ac_cv_host_os])dnl
popdef([ac_cv_host_vendor])dnl
popdef([ac_cv_host_cpu])dnl
@@ -104,33 +98,25 @@ popdef([host_vendor])dnl
popdef([host_cpu])dnl
popdef([host_alias])dnl
popdef([host])dnl
-popdef([OBJEXT])dnl
popdef([LDFLAGS])dnl
-popdef([EXEEXT])dnl
popdef([CPPFLAGS])dnl
popdef([CFLAGS])dnl
-popdef([GCC])dnl
popdef([CPP])dnl
popdef([CC])dnl
popdef([ac_objext])dnl
popdef([ac_exeext])dnl
popdef([ac_cv_objext])dnl
popdef([ac_cv_exeext])dnl
-popdef([ac_cv_c_compiler_gnu])dnl
popdef([ac_cv_prog_cc_g])dnl
popdef([ac_cv_prog_cc_cross])dnl
popdef([ac_cv_prog_cc_works])dnl
-popdef([ac_cv_prog_cc_c89])dnl
popdef([ac_cv_prog_gcc])dnl
popdef([ac_cv_prog_CPP])dnl
-dnl restore global variables ac_ext, ac_cpp, ac_compile,
-dnl ac_link, ac_compiler_gnu (dependant on the current
-dnl language after popping):
-AC_LANG_POP([C])
-
dnl Finally, set Makefile variables
dnl
+BUILD_EXEEXT=$ac_build_exeext
+BUILD_OBJEXT=$ac_build_objext
AC_SUBST(BUILD_EXEEXT)dnl
AC_SUBST(BUILD_OBJEXT)dnl
AC_SUBST([CFLAGS_FOR_BUILD])dnl
diff --git a/m4/ax_prog_cxx_for_build.m4 b/m4/ax_prog_cxx_for_build.m4
index 4d976769f..17c19a89f 100644
--- a/m4/ax_prog_cxx_for_build.m4
+++ b/m4/ax_prog_cxx_for_build.m4
@@ -31,7 +31,7 @@
# and this notice are preserved. This file is offered as-is, without any
# warranty.
-#serial 4
+#serial 3
AU_ALIAS([AC_PROG_CXX_FOR_BUILD], [AX_PROG_CXX_FOR_BUILD])
AC_DEFUN([AX_PROG_CXX_FOR_BUILD], [dnl
@@ -49,7 +49,6 @@ pushdef([ac_cv_prog_cxx_cross], ac_cv_build_prog_cxx_cross)dnl
pushdef([ac_cv_prog_cxx_g], ac_cv_build_prog_cxx_g)dnl
pushdef([CXX], CXX_FOR_BUILD)dnl
pushdef([CXXCPP], CXXCPP_FOR_BUILD)dnl
-pushdef([GXX], GXX_FOR_BUILD)dnl
pushdef([CXXFLAGS], CXXFLAGS_FOR_BUILD)dnl
pushdef([CPPFLAGS], CPPFLAGS_FOR_BUILD)dnl
pushdef([CXXCPPFLAGS], CXXCPPFLAGS_FOR_BUILD)dnl
@@ -63,25 +62,26 @@ pushdef([ac_cv_host_alias], ac_cv_build_alias)dnl
pushdef([ac_cv_host_cpu], ac_cv_build_cpu)dnl
pushdef([ac_cv_host_vendor], ac_cv_build_vendor)dnl
pushdef([ac_cv_host_os], ac_cv_build_os)dnl
-pushdef([ac_tool_prefix], ac_build_tool_prefix)dnl
-pushdef([am_cv_CXX_dependencies_compiler_type], am_cv_build_CXX_dependencies_compiler_type)dnl
-pushdef([cross_compiling], cross_compiling_build)dnl
+pushdef([ac_cxxcpp], ac_build_cxxcpp)dnl
+pushdef([ac_compile], ac_build_compile)dnl
+pushdef([ac_link], ac_build_link)dnl
-cross_compiling_build=no
+save_cross_compiling=$cross_compiling
+save_ac_tool_prefix=$ac_tool_prefix
+cross_compiling=no
+ac_tool_prefix=
-ac_build_tool_prefix=
-AS_IF([test -n "$build"], [ac_build_tool_prefix="$build-"],
- [test -n "$build_alias"],[ac_build_tool_prefix="$build_alias-"])
-
-AC_LANG_PUSH([C++])
AC_PROG_CXX
AC_PROG_CXXCPP
+ac_tool_prefix=$save_ac_tool_prefix
+cross_compiling=$save_cross_compiling
+
dnl Restore the old definitions
dnl
-popdef([cross_compiling])dnl
-popdef([am_cv_CXX_dependencies_compiler_type])dnl
-popdef([ac_tool_prefix])dnl
+popdef([ac_link])dnl
+popdef([ac_compile])dnl
+popdef([ac_cxxcpp])dnl
popdef([ac_cv_host_os])dnl
popdef([ac_cv_host_vendor])dnl
popdef([ac_cv_host_cpu])dnl
@@ -103,10 +103,6 @@ popdef([ac_cv_prog_cxx_works])dnl
popdef([ac_cv_prog_gxx])dnl
popdef([ac_cv_prog_CXXCPP])dnl
-dnl restore global variables (dependant on the current
-dnl language after popping):
-AC_LANG_POP([C++])
-
dnl Finally, set Makefile variables
dnl
AC_SUBST([CXXFLAGS_FOR_BUILD])dnl

View file

@ -1,8 +1,14 @@
{ lib, fetchurl, buildPythonApplication, pbr, requests, setuptools }: { lib
, fetchurl
, buildPythonApplication
, pbr
, requests
, setuptools
}:
buildPythonApplication rec { buildPythonApplication rec {
pname = "git-review"; pname = "git-review";
version = "2.0.0"; version = "2.1.0";
# Manually set version because prb wants to get it from the git # Manually set version because prb wants to get it from the git
# upstream repository (and we are installing from tarball instead) # upstream repository (and we are installing from tarball instead)
@ -10,17 +16,28 @@ buildPythonApplication rec {
src = fetchurl { src = fetchurl {
url = "https://opendev.org/opendev/${pname}/archive/${version}.tar.gz"; url = "https://opendev.org/opendev/${pname}/archive/${version}.tar.gz";
sha256 = "0dkyd5g2xmvsa114is3cd9qmki3hi6c06wjnra0f4xq3aqm0ajnj"; hash = "sha256-3A1T+/iXhNeMS2Aww5jISoiNExdv9N9/kwyATSuwVTE=";
}; };
propagatedBuildInputs = [ pbr requests setuptools ]; nativeBuildInputs = [
pbr
];
# Don't do tests because they require gerrit which is not packaged propagatedBuildInputs = [
requests
setuptools # implicit dependency, used to get package version through pkg_resources
];
# Don't run tests because they pull in external dependencies
# (a specific build of gerrit + maven plugins), and I haven't figured
# out how to work around this yet.
doCheck = false; doCheck = false;
pythonImportsCheck = [ "git_review" ];
meta = with lib; { meta = with lib; {
homepage = "https://opendev.org/opendev/git-review";
description = "Tool to submit code to Gerrit"; description = "Tool to submit code to Gerrit";
homepage = "https://opendev.org/opendev/git-review";
license = licenses.asl20; license = licenses.asl20;
maintainers = with maintainers; [ metadark ]; maintainers = with maintainers; [ metadark ];
}; };

View file

@ -0,0 +1,15 @@
Patch included in advisory @ https://subversion.apache.org/security/CVE-2020-17525-advisory.txt
--- a/subversion/libsvn_repos/config_file.c
+++ b/subversion/libsvn_repos/config_file.c
@@ -237,6 +237,10 @@ get_repos_config(svn_stream_t **stream,
{
/* Search for a repository in the full path. */
repos_root_dirent = svn_repos_find_root_path(dirent, scratch_pool);
+ if (repos_root_dirent == NULL)
+ return svn_error_trace(handle_missing_file(stream, checksum, access,
+ url, must_exist,
+ svn_node_none));
/* Attempt to open a repository at repos_root_dirent. */
SVN_ERR(svn_repos_open3(&access->repos, repos_root_dirent, NULL,

View file

@ -17,7 +17,7 @@ assert javahlBindings -> jdk != null && perl != null;
let let
common = { version, sha256 }: stdenv.mkDerivation (rec { common = { version, sha256, extraPatches ? [ ] }: stdenv.mkDerivation (rec {
inherit version; inherit version;
pname = "subversion"; pname = "subversion";
@ -35,7 +35,7 @@ let
++ lib.optional perlBindings perl ++ lib.optional perlBindings perl
++ lib.optional saslSupport sasl; ++ lib.optional saslSupport sasl;
patches = [ ./apr-1.patch ]; patches = [ ./apr-1.patch ] ++ extraPatches;
# We are hitting the following issue even with APR 1.6.x # We are hitting the following issue even with APR 1.6.x
# -> https://issues.apache.org/jira/browse/SVN-4813 # -> https://issues.apache.org/jira/browse/SVN-4813
@ -118,5 +118,6 @@ in {
subversion = common { subversion = common {
version = "1.12.2"; version = "1.12.2";
sha256 = "0wgpw3kzsiawzqk4y0xgh1z93kllxydgv4lsviim45y5wk4bbl1v"; sha256 = "0wgpw3kzsiawzqk4y0xgh1z93kllxydgv4lsviim45y5wk4bbl1v";
extraPatches = [ ./CVE-2020-17525.patch ];
}; };
} }

View file

@ -19,7 +19,7 @@ stdenv.mkDerivation {
NIX_CFLAGS_COMPILE = "-Wno-error=deprecated-declarations"; NIX_CFLAGS_COMPILE = "-Wno-error=deprecated-declarations";
nativeBuildInputs = [ pkg-config ]; nativeBuildInputs = [ pkg-config ];
buildInputs = [ which gnome3.gnome-common glib intltool libtool cairo gtk3 xorg.xwininfo ] buildInputs = [ which gnome3.gnome-common glib intltool libtool cairo gtk3 xorg.xwininfo xorg.libXdamage ]
++ (with gst_all_1; [ gstreamer gst-plugins-base gst-plugins-bad gst-plugins-good gst-plugins-ugly gst-libav wrapGAppsHook ]); ++ (with gst_all_1; [ gstreamer gst-plugins-base gst-plugins-bad gst-plugins-good gst-plugins-ugly gst-libav wrapGAppsHook ]);
meta = with lib; { meta = with lib; {

Some files were not shown because too many files have changed in this diff Show more