Project import generated by Copybara.

GitOrigin-RevId: 70088dc29994c32f8520150e34c6e57e8453f895
This commit is contained in:
Default email 2021-10-07 22:46:35 +08:00
parent 6e4d2ed6b0
commit b4e89fc316
131 changed files with 1773 additions and 1194 deletions

View file

@ -373,11 +373,11 @@ Additional file types can be supported by setting the `unpackCmd` variable (see
##### `srcs` / `src` {#var-stdenv-src}
The list of source files or directories to be unpacked or copied. One of these must be set.
The list of source files or directories to be unpacked or copied. One of these must be set. Note that if you use `srcs`, you should also set `sourceRoot` or `setSourceRoot`.
##### `sourceRoot` {#var-stdenv-sourceRoot}
After running `unpackPhase`, the generic builder changes the current directory to the directory created by unpacking the sources. If there are multiple source directories, you should set `sourceRoot` to the name of the intended directory.
After running `unpackPhase`, the generic builder changes the current directory to the directory created by unpacking the sources. If there are multiple source directories, you should set `sourceRoot` to the name of the intended directory. Set `sourceRoot = ".";` if you use `srcs` and control the unpack phase yourself.
##### `setSourceRoot` {#var-stdenv-setSourceRoot}

View file

@ -16,6 +16,7 @@ rec {
];
tier3 = [
"aarch64-darwin"
"armv6l-linux"
"armv7l-linux"
"i686-linux"

View file

@ -12245,6 +12245,12 @@
githubId = 4113027;
name = "Jesper Geertsen Jonsson";
};
yinfeng = {
email = "lin.yinfeng@outlook.com";
github = "linyinfeng";
githubId = 11229748;
name = "Lin Yinfeng";
};
ylwghst = {
email = "ylwghst@onionmail.info";
github = "ylwghst";

View file

@ -321,6 +321,14 @@
<link linkend="opt-programs.pantheon-tweaks.enable">programs.pantheon-tweaks</link>.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="https://github.com/DanielOgorchock/joycond">joycond</link>,
a service that uses <literal>hid-nintendo</literal> to provide
nintendo joycond pairing and better nintendo switch pro
controller support.
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="sec-release-21.11-incompatibilities">

View file

@ -99,6 +99,8 @@ In addition to numerous new and upgraded packages, this release has the followin
- [pantheon-tweaks](https://github.com/pantheon-tweaks/pantheon-tweaks), an unofficial system settings panel for Pantheon. Available as [programs.pantheon-tweaks](#opt-programs.pantheon-tweaks.enable).
- [joycond](https://github.com/DanielOgorchock/joycond), a service that uses `hid-nintendo` to provide nintendo joycond pairing and better nintendo switch pro controller support.
## Backward Incompatibilities {#sec-release-21.11-incompatibilities}
- The `security.wrappers` option now requires to always specify an owner, group and whether the setuid/setgid bit should be set.

View file

@ -21,7 +21,6 @@ import shutil
import socket
import subprocess
import sys
import telnetlib
import tempfile
import time
import unicodedata
@ -89,55 +88,6 @@ CHAR_TO_KEY = {
")": "shift-0x0B",
}
global log, machines, test_script
def eprint(*args: object, **kwargs: Any) -> None:
print(*args, file=sys.stderr, **kwargs)
def make_command(args: list) -> str:
return " ".join(map(shlex.quote, (map(str, args))))
def create_vlan(vlan_nr: str) -> Tuple[str, str, "subprocess.Popen[bytes]", Any]:
log.log("starting VDE switch for network {}".format(vlan_nr))
vde_socket = tempfile.mkdtemp(
prefix="nixos-test-vde-", suffix="-vde{}.ctl".format(vlan_nr)
)
pty_master, pty_slave = pty.openpty()
vde_process = subprocess.Popen(
["vde_switch", "-s", vde_socket, "--dirmode", "0700"],
stdin=pty_slave,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False,
)
fd = os.fdopen(pty_master, "w")
fd.write("version\n")
# TODO: perl version checks if this can be read from
# an if not, dies. we could hang here forever. Fix it.
assert vde_process.stdout is not None
vde_process.stdout.readline()
if not os.path.exists(os.path.join(vde_socket, "ctl")):
raise Exception("cannot start vde_switch")
return (vlan_nr, vde_socket, vde_process, fd)
def retry(fn: Callable, timeout: int = 900) -> None:
"""Call the given function repeatedly, with 1 second intervals,
until it returns True or a timeout is reached.
"""
for _ in range(timeout):
if fn(False):
return
time.sleep(1)
if not fn(True):
raise Exception(f"action timed out after {timeout} seconds")
class Logger:
def __init__(self) -> None:
@ -151,6 +101,10 @@ class Logger:
self._print_serial_logs = True
@staticmethod
def _eprint(*args: object, **kwargs: Any) -> None:
print(*args, file=sys.stderr, **kwargs)
def close(self) -> None:
self.xml.endElement("logfile")
self.xml.endDocument()
@ -169,15 +123,27 @@ class Logger:
self.xml.characters(message)
self.xml.endElement("line")
def info(self, *args, **kwargs) -> None: # type: ignore
self.log(*args, **kwargs)
def warning(self, *args, **kwargs) -> None: # type: ignore
self.log(*args, **kwargs)
def error(self, *args, **kwargs) -> None: # type: ignore
self.log(*args, **kwargs)
sys.exit(1)
def log(self, message: str, attributes: Dict[str, str] = {}) -> None:
eprint(self.maybe_prefix(message, attributes))
self._eprint(self.maybe_prefix(message, attributes))
self.drain_log_queue()
self.log_line(message, attributes)
def log_serial(self, message: str, machine: str) -> None:
self.enqueue({"msg": message, "machine": machine, "type": "serial"})
if self._print_serial_logs:
eprint(Style.DIM + "{} # {}".format(machine, message) + Style.RESET_ALL)
self._eprint(
Style.DIM + "{} # {}".format(machine, message) + Style.RESET_ALL
)
def enqueue(self, item: Dict[str, str]) -> None:
self.queue.put(item)
@ -194,7 +160,7 @@ class Logger:
@contextmanager
def nested(self, message: str, attributes: Dict[str, str] = {}) -> Iterator[None]:
eprint(self.maybe_prefix(message, attributes))
self._eprint(self.maybe_prefix(message, attributes))
self.xml.startElement("nest", attrs={})
self.xml.startElement("head", attributes)
@ -211,6 +177,27 @@ class Logger:
self.xml.endElement("nest")
rootlog = Logger()
def make_command(args: list) -> str:
return " ".join(map(shlex.quote, (map(str, args))))
def retry(fn: Callable, timeout: int = 900) -> None:
"""Call the given function repeatedly, with 1 second intervals,
until it returns True or a timeout is reached.
"""
for _ in range(timeout):
if fn(False):
return
time.sleep(1)
if not fn(True):
raise Exception(f"action timed out after {timeout} seconds")
def _perform_ocr_on_screenshot(
screenshot_path: str, model_ids: Iterable[int]
) -> List[str]:
@ -242,113 +229,256 @@ def _perform_ocr_on_screenshot(
return model_results
class StartCommand:
"""The Base Start Command knows how to append the necesary
runtime qemu options as determined by a particular test driver
run. Any such start command is expected to happily receive and
append additional qemu args.
"""
_cmd: str
def cmd(
self,
monitor_socket_path: pathlib.Path,
shell_socket_path: pathlib.Path,
allow_reboot: bool = False, # TODO: unused, legacy?
) -> str:
display_opts = ""
display_available = any(x in os.environ for x in ["DISPLAY", "WAYLAND_DISPLAY"])
if not display_available:
display_opts += " -nographic"
# qemu options
qemu_opts = ""
qemu_opts += (
""
if allow_reboot
else " -no-reboot"
" -device virtio-serial"
" -device virtconsole,chardev=shell"
" -device virtio-rng-pci"
" -serial stdio"
)
# TODO: qemu script already catpures this env variable, legacy?
qemu_opts += " " + os.environ.get("QEMU_OPTS", "")
return (
f"{self._cmd}"
f" -monitor unix:{monitor_socket_path}"
f" -chardev socket,id=shell,path={shell_socket_path}"
f"{qemu_opts}"
f"{display_opts}"
)
@staticmethod
def build_environment(
state_dir: pathlib.Path,
shared_dir: pathlib.Path,
) -> dict:
# We make a copy to not update the current environment
env = dict(os.environ)
env.update(
{
"TMPDIR": str(state_dir),
"SHARED_DIR": str(shared_dir),
"USE_TMPDIR": "1",
}
)
return env
def run(
self,
state_dir: pathlib.Path,
shared_dir: pathlib.Path,
monitor_socket_path: pathlib.Path,
shell_socket_path: pathlib.Path,
) -> subprocess.Popen:
return subprocess.Popen(
self.cmd(monitor_socket_path, shell_socket_path),
stdin=subprocess.DEVNULL,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True,
cwd=state_dir,
env=self.build_environment(state_dir, shared_dir),
)
class NixStartScript(StartCommand):
"""A start script from nixos/modules/virtualiation/qemu-vm.nix
that also satisfies the requirement of the BaseStartCommand.
These Nix commands have the particular charactersitic that the
machine name can be extracted out of them via a regex match.
(Admittedly a _very_ implicit contract, evtl. TODO fix)
"""
def __init__(self, script: str):
self._cmd = script
@property
def machine_name(self) -> str:
match = re.search("run-(.+)-vm$", self._cmd)
name = "machine"
if match:
name = match.group(1)
return name
class LegacyStartCommand(StartCommand):
"""Used in some places to create an ad-hoc machine instead of
using nix test instrumentation + module system for that purpose.
Legacy.
"""
def __init__(
self,
netBackendArgs: Optional[str] = None,
netFrontendArgs: Optional[str] = None,
hda: Optional[Tuple[pathlib.Path, str]] = None,
cdrom: Optional[str] = None,
usb: Optional[str] = None,
bios: Optional[str] = None,
qemuFlags: Optional[str] = None,
):
self._cmd = "qemu-kvm -m 384"
# networking
net_backend = "-netdev user,id=net0"
net_frontend = "-device virtio-net-pci,netdev=net0"
if netBackendArgs is not None:
net_backend += "," + netBackendArgs
if netFrontendArgs is not None:
net_frontend += "," + netFrontendArgs
self._cmd += f" {net_backend} {net_frontend}"
# hda
hda_cmd = ""
if hda is not None:
hda_path = hda[0].resolve()
hda_interface = hda[1]
if hda_interface == "scsi":
hda_cmd += (
f" -drive id=hda,file={hda_path},werror=report,if=none"
" -device scsi-hd,drive=hda"
)
else:
hda_cmd += f" -drive file={hda_path},if={hda_interface},werror=report"
self._cmd += hda_cmd
# cdrom
if cdrom is not None:
self._cmd += f" -cdrom {cdrom}"
# usb
usb_cmd = ""
if usb is not None:
# https://github.com/qemu/qemu/blob/master/docs/usb2.txt
usb_cmd += (
" -device usb-ehci"
f" -drive id=usbdisk,file={usb},if=none,readonly"
" -device usb-storage,drive=usbdisk "
)
self._cmd += usb_cmd
# bios
if bios is not None:
self._cmd += f" -bios {bios}"
# qemu flags
if qemuFlags is not None:
self._cmd += f" {qemuFlags}"
class Machine:
"""A handle to the machine with this name, that also knows how to manage
the machine lifecycle with the help of a start script / command."""
name: str
tmp_dir: pathlib.Path
shared_dir: pathlib.Path
state_dir: pathlib.Path
monitor_path: pathlib.Path
shell_path: pathlib.Path
start_command: StartCommand
keep_vm_state: bool
allow_reboot: bool
process: Optional[subprocess.Popen] = None
pid: Optional[int] = None
monitor: Optional[socket.socket] = None
shell: Optional[socket.socket] = None
booted: bool = False
connected: bool = False
# Store last serial console lines for use
# of wait_for_console_text
last_lines: Queue = Queue()
def __repr__(self) -> str:
return f"<Machine '{self.name}'>"
def __init__(self, args: Dict[str, Any]) -> None:
if "name" in args:
self.name = args["name"]
else:
self.name = "machine"
cmd = args.get("startCommand", None)
if cmd:
match = re.search("run-(.+)-vm$", cmd)
if match:
self.name = match.group(1)
self.logger = args["log"]
self.script = args.get("startCommand", self.create_startcommand(args))
def __init__(
self,
tmp_dir: pathlib.Path,
start_command: StartCommand,
name: str = "machine",
keep_vm_state: bool = False,
allow_reboot: bool = False,
) -> None:
self.tmp_dir = tmp_dir
self.keep_vm_state = keep_vm_state
self.allow_reboot = allow_reboot
self.name = name
self.start_command = start_command
tmp_dir = os.environ.get("TMPDIR", tempfile.gettempdir())
# set up directories
self.shared_dir = self.tmp_dir / "shared-xchg"
self.shared_dir.mkdir(mode=0o700, exist_ok=True)
def create_dir(name: str) -> str:
path = os.path.join(tmp_dir, name)
os.makedirs(path, mode=0o700, exist_ok=True)
return path
self.state_dir = os.path.join(tmp_dir, f"vm-state-{self.name}")
if not args.get("keepVmState", False):
self.state_dir = self.tmp_dir / f"vm-state-{self.name}"
self.monitor_path = self.state_dir / "monitor"
self.shell_path = self.state_dir / "shell"
if (not self.keep_vm_state) and self.state_dir.exists():
self.cleanup_statedir()
os.makedirs(self.state_dir, mode=0o700, exist_ok=True)
self.shared_dir = create_dir("shared-xchg")
self.booted = False
self.connected = False
self.pid: Optional[int] = None
self.socket = None
self.monitor: Optional[socket.socket] = None
self.allow_reboot = args.get("allowReboot", False)
self.state_dir.mkdir(mode=0o700, exist_ok=True)
@staticmethod
def create_startcommand(args: Dict[str, str]) -> str:
net_backend = "-netdev user,id=net0"
net_frontend = "-device virtio-net-pci,netdev=net0"
if "netBackendArgs" in args:
net_backend += "," + args["netBackendArgs"]
if "netFrontendArgs" in args:
net_frontend += "," + args["netFrontendArgs"]
start_command = (
args.get("qemuBinary", "qemu-kvm")
+ " -m 384 "
+ net_backend
+ " "
+ net_frontend
+ " $QEMU_OPTS "
def create_startcommand(args: Dict[str, str]) -> StartCommand:
rootlog.warning(
"Using legacy create_startcommand(),"
"please use proper nix test vm instrumentation, instead"
"to generate the appropriate nixos test vm qemu startup script"
)
hda = None
if args.get("hda"):
hda_arg: str = args.get("hda", "")
hda_arg_path: pathlib.Path = pathlib.Path(hda_arg)
hda = (hda_arg_path, args.get("hdaInterface", ""))
return LegacyStartCommand(
netBackendArgs=args.get("netBackendArgs"),
netFrontendArgs=args.get("netFrontendArgs"),
hda=hda,
cdrom=args.get("cdrom"),
usb=args.get("usb"),
bios=args.get("bios"),
qemuFlags=args.get("qemuFlags"),
)
if "hda" in args:
hda_path = os.path.abspath(args["hda"])
if args.get("hdaInterface", "") == "scsi":
start_command += (
"-drive id=hda,file="
+ hda_path
+ ",werror=report,if=none "
+ "-device scsi-hd,drive=hda "
)
else:
start_command += (
"-drive file="
+ hda_path
+ ",if="
+ args["hdaInterface"]
+ ",werror=report "
)
if "cdrom" in args:
start_command += "-cdrom " + args["cdrom"] + " "
if "usb" in args:
# https://github.com/qemu/qemu/blob/master/docs/usb2.txt
start_command += (
"-device usb-ehci -drive "
+ "id=usbdisk,file="
+ args["usb"]
+ ",if=none,readonly "
+ "-device usb-storage,drive=usbdisk "
)
if "bios" in args:
start_command += "-bios " + args["bios"] + " "
start_command += args.get("qemuFlags", "")
return start_command
def is_up(self) -> bool:
return self.booted and self.connected
def log(self, msg: str) -> None:
self.logger.log(msg, {"machine": self.name})
rootlog.log(msg, {"machine": self.name})
def log_serial(self, msg: str) -> None:
self.logger.log_serial(msg, self.name)
rootlog.log_serial(msg, self.name)
def nested(self, msg: str, attrs: Dict[str, str] = {}) -> _GeneratorContextManager:
my_attrs = {"machine": self.name}
my_attrs.update(attrs)
return self.logger.nested(msg, my_attrs)
return rootlog.nested(msg, my_attrs)
def wait_for_monitor_prompt(self) -> str:
assert self.monitor is not None
@ -446,6 +576,7 @@ class Machine:
self.connect()
out_command = "( set -euo pipefail; {} ); echo '|!=EOF' $?\n".format(command)
assert self.shell
self.shell.send(out_command.encode())
output = ""
@ -466,6 +597,8 @@ class Machine:
Should only be used during test development, not in the production test."""
self.connect()
self.log("Terminal is ready (there is no prompt):")
assert self.shell
subprocess.run(
["socat", "READLINE", f"FD:{self.shell.fileno()}"],
pass_fds=[self.shell.fileno()],
@ -534,6 +667,7 @@ class Machine:
with self.nested("waiting for the VM to power off"):
sys.stdout.flush()
assert self.process
self.process.wait()
self.pid = None
@ -611,6 +745,8 @@ class Machine:
with self.nested("waiting for the VM to finish booting"):
self.start()
assert self.shell
tic = time.time()
self.shell.recv(1024)
# TODO: Timeout
@ -750,65 +886,35 @@ class Machine:
self.log("starting vm")
def create_socket(path: str) -> socket.socket:
if os.path.exists(path):
os.unlink(path)
def clear(path: pathlib.Path) -> pathlib.Path:
if path.exists():
path.unlink()
return path
def create_socket(path: pathlib.Path) -> socket.socket:
s = socket.socket(family=socket.AF_UNIX, type=socket.SOCK_STREAM)
s.bind(path)
s.bind(str(path))
s.listen(1)
return s
monitor_path = os.path.join(self.state_dir, "monitor")
self.monitor_socket = create_socket(monitor_path)
shell_path = os.path.join(self.state_dir, "shell")
self.shell_socket = create_socket(shell_path)
display_available = any(x in os.environ for x in ["DISPLAY", "WAYLAND_DISPLAY"])
qemu_options = (
" ".join(
[
"" if self.allow_reboot else "-no-reboot",
"-monitor unix:{}".format(monitor_path),
"-chardev socket,id=shell,path={}".format(shell_path),
"-device virtio-serial",
"-device virtconsole,chardev=shell",
"-device virtio-rng-pci",
"-serial stdio" if display_available else "-nographic",
]
)
+ " "
+ os.environ.get("QEMU_OPTS", "")
monitor_socket = create_socket(clear(self.monitor_path))
shell_socket = create_socket(clear(self.shell_path))
self.process = self.start_command.run(
self.state_dir,
self.shared_dir,
self.monitor_path,
self.shell_path,
)
environment = dict(os.environ)
environment.update(
{
"TMPDIR": self.state_dir,
"SHARED_DIR": self.shared_dir,
"USE_TMPDIR": "1",
"QEMU_OPTS": qemu_options,
}
)
self.process = subprocess.Popen(
self.script,
stdin=subprocess.DEVNULL,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
shell=True,
cwd=self.state_dir,
env=environment,
)
self.monitor, _ = self.monitor_socket.accept()
self.shell, _ = self.shell_socket.accept()
self.monitor, _ = monitor_socket.accept()
self.shell, _ = shell_socket.accept()
# Store last serial console lines for use
# of wait_for_console_text
self.last_lines: Queue = Queue()
def process_serial_output() -> None:
assert self.process.stdout is not None
assert self.process
assert self.process.stdout
for _line in self.process.stdout:
# Ignore undecodable bytes that may occur in boot menus
line = _line.decode(errors="ignore").replace("\r", "").rstrip()
@ -825,15 +931,15 @@ class Machine:
self.log("QEMU running (pid {})".format(self.pid))
def cleanup_statedir(self) -> None:
if os.path.isdir(self.state_dir):
shutil.rmtree(self.state_dir)
self.logger.log(f"deleting VM state directory {self.state_dir}")
self.logger.log("if you want to keep the VM state, pass --keep-vm-state")
shutil.rmtree(self.state_dir)
rootlog.log(f"deleting VM state directory {self.state_dir}")
rootlog.log("if you want to keep the VM state, pass --keep-vm-state")
def shutdown(self) -> None:
if not self.booted:
return
assert self.shell
self.shell.send("poweroff\n".encode())
self.wait_for_shutdown()
@ -908,41 +1014,215 @@ class Machine:
"""Make the machine reachable."""
self.send_monitor_command("set_link virtio-net-pci.1 on")
def create_machine(args: Dict[str, Any]) -> Machine:
args["log"] = log
return Machine(args)
def release(self) -> None:
if self.pid is None:
return
rootlog.info(f"kill machine (pid {self.pid})")
assert self.process
assert self.shell
assert self.monitor
self.process.terminate()
self.shell.close()
self.monitor.close()
def start_all() -> None:
with log.nested("starting all VMs"):
for machine in machines:
machine.start()
class VLan:
"""This class handles a VLAN that the run-vm scripts identify via its
number handles. The network's lifetime equals the object's lifetime.
"""
nr: int
socket_dir: pathlib.Path
process: subprocess.Popen
pid: int
fd: io.TextIOBase
def __repr__(self) -> str:
return f"<Vlan Nr. {self.nr}>"
def __init__(self, nr: int, tmp_dir: pathlib.Path):
self.nr = nr
self.socket_dir = tmp_dir / f"vde{self.nr}.ctl"
# TODO: don't side-effect environment here
os.environ[f"QEMU_VDE_SOCKET_{self.nr}"] = str(self.socket_dir)
rootlog.info("start vlan")
pty_master, pty_slave = pty.openpty()
self.process = subprocess.Popen(
["vde_switch", "-s", self.socket_dir, "--dirmode", "0700"],
stdin=pty_slave,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=False,
)
self.pid = self.process.pid
self.fd = os.fdopen(pty_master, "w")
self.fd.write("version\n")
# TODO: perl version checks if this can be read from
# an if not, dies. we could hang here forever. Fix it.
assert self.process.stdout is not None
self.process.stdout.readline()
if not (self.socket_dir / "ctl").exists():
rootlog.error("cannot start vde_switch")
rootlog.info(f"running vlan (pid {self.pid})")
def __del__(self) -> None:
rootlog.info(f"kill vlan (pid {self.pid})")
self.fd.close()
self.process.terminate()
def join_all() -> None:
with log.nested("waiting for all VMs to finish"):
for machine in machines:
machine.wait_for_shutdown()
class Driver:
"""A handle to the driver that sets up the environment
and runs the tests"""
tests: str
vlans: List[VLan]
machines: List[Machine]
def run_tests(interactive: bool = False) -> None:
if interactive:
ptpython.repl.embed(test_symbols(), {})
else:
test_script()
def __init__(
self,
start_scripts: List[str],
vlans: List[int],
tests: str,
keep_vm_state: bool = False,
):
self.tests = tests
tmp_dir = pathlib.Path(os.environ.get("TMPDIR", tempfile.gettempdir()))
tmp_dir.mkdir(mode=0o700, exist_ok=True)
with rootlog.nested("start all VLans"):
self.vlans = [VLan(nr, tmp_dir) for nr in vlans]
def cmd(scripts: List[str]) -> Iterator[NixStartScript]:
for s in scripts:
yield NixStartScript(s)
self.machines = [
Machine(
start_command=cmd,
keep_vm_state=keep_vm_state,
name=cmd.machine_name,
tmp_dir=tmp_dir,
)
for cmd in cmd(start_scripts)
]
@atexit.register
def clean_up() -> None:
with rootlog.nested("clean up"):
for machine in self.machines:
machine.release()
def subtest(self, name: str) -> Iterator[None]:
"""Group logs under a given test name"""
with rootlog.nested(name):
try:
yield
return True
except:
rootlog.error(f'Test "{name}" failed with error:')
raise
def test_symbols(self) -> Dict[str, Any]:
@contextmanager
def subtest(name: str) -> Iterator[None]:
return self.subtest(name)
general_symbols = dict(
start_all=self.start_all,
test_script=self.test_script,
machines=self.machines,
vlans=self.vlans,
driver=self,
log=rootlog,
os=os,
create_machine=self.create_machine,
subtest=subtest,
run_tests=self.run_tests,
join_all=self.join_all,
retry=retry,
serial_stdout_off=self.serial_stdout_off,
serial_stdout_on=self.serial_stdout_on,
Machine=Machine, # for typing
)
machine_symbols = {
m.name: self.machines[idx] for idx, m in enumerate(self.machines)
}
vlan_symbols = {
f"vlan{v.nr}": self.vlans[idx] for idx, v in enumerate(self.vlans)
}
print(
"additionally exposed symbols:\n "
+ ", ".join(map(lambda m: m.name, self.machines))
+ ",\n "
+ ", ".join(map(lambda v: f"vlan{v.nr}", self.vlans))
+ ",\n "
+ ", ".join(list(general_symbols.keys()))
)
return {**general_symbols, **machine_symbols, **vlan_symbols}
def test_script(self) -> None:
"""Run the test script"""
with rootlog.nested("run the VM test script"):
symbols = self.test_symbols() # call eagerly
exec(self.tests, symbols, None)
def run_tests(self) -> None:
"""Run the test script (for non-interactive test runs)"""
self.test_script()
# TODO: Collect coverage data
for machine in machines:
for machine in self.machines:
if machine.is_up():
machine.execute("sync")
def start_all(self) -> None:
"""Start all machines"""
with rootlog.nested("start all VMs"):
for machine in self.machines:
machine.start()
def serial_stdout_on() -> None:
log._print_serial_logs = True
def join_all(self) -> None:
"""Wait for all machines to shut down"""
with rootlog.nested("wait for all VMs to finish"):
for machine in self.machines:
machine.wait_for_shutdown()
def create_machine(self, args: Dict[str, Any]) -> Machine:
rootlog.warning(
"Using legacy create_machine(), please instantiate the"
"Machine class directly, instead"
)
tmp_dir = pathlib.Path(os.environ.get("TMPDIR", tempfile.gettempdir()))
tmp_dir.mkdir(mode=0o700, exist_ok=True)
def serial_stdout_off() -> None:
log._print_serial_logs = False
if args.get("startCommand"):
start_command: str = args.get("startCommand", "")
cmd = NixStartScript(start_command)
name = args.get("name", cmd.machine_name)
else:
cmd = Machine.create_startcommand(args) # type: ignore
name = args.get("name", "machine")
return Machine(
tmp_dir=tmp_dir,
start_command=cmd,
name=name,
keep_vm_state=args.get("keep_vm_state", False),
allow_reboot=args.get("allow_reboot", False),
)
def serial_stdout_on(self) -> None:
rootlog._print_serial_logs = True
def serial_stdout_off(self) -> None:
rootlog._print_serial_logs = False
class EnvDefault(argparse.Action):
@ -970,52 +1250,6 @@ class EnvDefault(argparse.Action):
setattr(namespace, self.dest, values)
@contextmanager
def subtest(name: str) -> Iterator[None]:
with log.nested(name):
try:
yield
return True
except Exception as e:
log.log(f'Test "{name}" failed with error: "{e}"')
raise e
return False
def _test_symbols() -> Dict[str, Any]:
general_symbols = dict(
start_all=start_all,
test_script=globals().get("test_script"), # same
machines=globals().get("machines"), # without being initialized
log=globals().get("log"), # extracting those symbol keys
os=os,
create_machine=create_machine,
subtest=subtest,
run_tests=run_tests,
join_all=join_all,
retry=retry,
serial_stdout_off=serial_stdout_off,
serial_stdout_on=serial_stdout_on,
Machine=Machine, # for typing
)
return general_symbols
def test_symbols() -> Dict[str, Any]:
general_symbols = _test_symbols()
machine_symbols = {m.name: machines[idx] for idx, m in enumerate(machines)}
print(
"additionally exposed symbols:\n "
+ ", ".join(map(lambda m: m.name, machines))
+ ",\n "
+ ", ".join(list(general_symbols.keys()))
)
return {**general_symbols, **machine_symbols}
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser(prog="nixos-test-driver")
arg_parser.add_argument(
@ -1055,44 +1289,18 @@ if __name__ == "__main__":
)
args = arg_parser.parse_args()
testscript = pathlib.Path(args.testscript).read_text()
global log, machines, test_script
if not args.keep_vm_state:
rootlog.info("Machine state will be reset. To keep it, pass --keep-vm-state")
log = Logger()
driver = Driver(
args.start_scripts, args.vlans, args.testscript.read_text(), args.keep_vm_state
)
vde_sockets = [create_vlan(v) for v in args.vlans]
for nr, vde_socket, _, _ in vde_sockets:
os.environ["QEMU_VDE_SOCKET_{}".format(nr)] = vde_socket
machines = [
create_machine({"startCommand": s, "keepVmState": args.keep_vm_state})
for s in args.start_scripts
]
machine_eval = [
"{0} = machines[{1}]".format(m.name, idx) for idx, m in enumerate(machines)
]
exec("\n".join(machine_eval))
@atexit.register
def clean_up() -> None:
with log.nested("cleaning up"):
for machine in machines:
if machine.pid is None:
continue
log.log("killing {} (pid {})".format(machine.name, machine.pid))
machine.process.kill()
for _, _, process, _ in vde_sockets:
process.terminate()
log.close()
def test_script() -> None:
with log.nested("running the VM test script"):
symbols = test_symbols() # call eagerly
exec(testscript, symbols, None)
interactive = args.interactive or (not bool(testscript))
tic = time.time()
run_tests(interactive)
toc = time.time()
print("test script finished in {:.2f}s".format(toc - tic))
if args.interactive:
ptpython.repl.embed(driver.test_symbols(), {})
else:
tic = time.time()
driver.run_tests()
toc = time.time()
rootlog.info(f"test script finished in {(toc-tic):.2f}s")

View file

@ -43,7 +43,8 @@ rec {
from pydoc import importfile
with open('driver-symbols', 'w') as fp:
t = importfile('${testDriverScript}')
test_symbols = t._test_symbols()
d = t.Driver([],[],"")
test_symbols = d.test_symbols()
fp.write(','.join(test_symbols.keys()))
EOF
'';
@ -188,14 +189,6 @@ rec {
--set startScripts "''${vmStartScripts[*]}" \
--set testScript "$out/test-script" \
--set vlans '${toString vlans}'
${lib.optionalString (testScript == "") ''
ln -s ${testDriver}/bin/nixos-test-driver $out/bin/nixos-run-vms
wrapProgram $out/bin/nixos-run-vms \
--set startScripts "''${vmStartScripts[*]}" \
--set testScript "${pkgs.writeText "start-all" "start_all(); join_all();"}" \
--set vlans '${toString vlans}'
''}
'');
# Make a full-blown test

View file

@ -8,11 +8,21 @@ let
_file = "${networkExpr}@node-${vm}";
imports = [ module ];
}) (import networkExpr);
pkgs = import ../../../../.. { inherit system config; };
testing = import ../../../../lib/testing-python.nix {
inherit system pkgs;
};
interactiveDriver = (testing.makeTest { inherit nodes; testScript = "start_all(); join_all();"; }).driverInteractive;
in
with import ../../../../lib/testing-python.nix {
inherit system;
pkgs = import ../../../../.. { inherit system config; };
};
(makeTest { inherit nodes; testScript = ""; }).driverInteractive
pkgs.runCommand "nixos-build-vms" { nativeBuildInputs = [ pkgs.makeWrapper ]; } ''
mkdir -p $out/bin
ln -s ${interactiveDriver}/bin/nixos-test-driver $out/bin/nixos-test-driver
ln -s ${interactiveDriver}/bin/nixos-test-driver $out/bin/nixos-run-vms
wrapProgram $out/bin/nixos-test-driver \
--add-flags "--interactive"
''

View file

@ -412,6 +412,7 @@
./services/hardware/illum.nix
./services/hardware/interception-tools.nix
./services/hardware/irqbalance.nix
./services/hardware/joycond.nix
./services/hardware/lcd.nix
./services/hardware/lirc.nix
./services/hardware/nvidia-optimus.nix
@ -543,6 +544,7 @@
./services/misc/matrix-appservice-discord.nix
./services/misc/matrix-appservice-irc.nix
./services/misc/matrix-synapse.nix
./services/misc/mautrix-facebook.nix
./services/misc/mautrix-telegram.nix
./services/misc/mbpfan.nix
./services/misc/mediatomb.nix

View file

@ -0,0 +1,40 @@
{ config, lib, pkgs, ... }:
let
cfg = config.services.joycond;
kernelPackages = config.boot.kernelPackages;
in
with lib;
{
options.services.joycond = {
enable = mkEnableOption "support for Nintendo Pro Controllers and Joycons";
package = mkOption {
type = types.package;
default = pkgs.joycond;
defaultText = "pkgs.joycond";
description = ''
The joycond package to use.
'';
};
};
config = mkIf cfg.enable {
environment.systemPackages = [
kernelPackages.hid-nintendo
cfg.package
];
boot.extraModulePackages = [ kernelPackages.hid-nintendo ];
boot.kernelModules = [ "hid_nintendo" ];
services.udev.packages = [ cfg.package ];
systemd.packages = [ cfg.package ];
# Workaround for https://github.com/NixOS/nixpkgs/issues/81138
systemd.services.joycond.wantedBy = [ "multi-user.target" ];
};
}

View file

@ -0,0 +1,195 @@
{ config, pkgs, lib, ... }:
with lib;
let
cfg = config.services.mautrix-facebook;
settingsFormat = pkgs.formats.json {};
settingsFile = settingsFormat.generate "mautrix-facebook-config.json" cfg.settings;
puppetRegex = concatStringsSep
".*"
(map
escapeRegex
(splitString
"{userid}"
cfg.settings.bridge.username_template));
in {
options = {
services.mautrix-facebook = {
enable = mkEnableOption "Mautrix-Facebook, a Matrix-Facebook hybrid puppeting/relaybot bridge";
settings = mkOption rec {
apply = recursiveUpdate default;
type = settingsFormat.type;
default = {
homeserver = {
address = "http://localhost:8008";
};
appservice = rec {
address = "http://${hostname}:${toString port}";
hostname = "localhost";
port = 29319;
database = "postgresql://";
bot_username = "facebookbot";
};
metrics.enabled = false;
manhole.enabled = false;
bridge = {
encryption = {
allow = true;
default = true;
};
username_template = "facebook_{userid}";
};
logging = {
version = 1;
formatters.journal_fmt.format = "%(name)s: %(message)s";
handlers.journal = {
class = "systemd.journal.JournalHandler";
formatter = "journal_fmt";
SYSLOG_IDENTIFIER = "mautrix-facebook";
};
root = {
level = "INFO";
handlers = ["journal"];
};
};
};
example = literalExpression ''
{
homeserver = {
address = "http://localhost:8008";
domain = "mydomain.example";
};
bridge.permissions = {
"@admin:mydomain.example" = "admin";
"mydomain.example" = "user";
};
}
'';
description = ''
<filename>config.yaml</filename> configuration as a Nix attribute set.
Configuration options should match those described in
<link xlink:href="https://github.com/mautrix/facebook/blob/master/mautrix_facebook/example-config.yaml">
example-config.yaml</link>.
</para>
<para>
Secret tokens should be specified using <option>environmentFile</option>
instead of this world-readable attribute set.
'';
};
environmentFile = mkOption {
type = types.nullOr types.path;
default = null;
description = ''
File containing environment variables to be passed to the mautrix-telegram service.
Any config variable can be overridden by setting <literal>MAUTRIX_FACEBOOK_SOME_KEY</literal> to override the <literal>some.key</literal> variable.
'';
};
configurePostgresql = mkOption {
type = types.bool;
default = true;
description = ''
Enable PostgreSQL and create a user and database for mautrix-facebook. The default <literal>settings</literal> reference this database, if you disable this option you must provide a database URL.
'';
};
registrationData = mkOption {
type = types.attrs;
default = {};
description = ''
Output data for appservice registration. Simply make any desired changes and serialize to JSON. Note that this data contains secrets so think twice before putting it into the nix store.
Currently <literal>as_token</literal> and <literal>hs_token</literal> need to be added as they are not known to this module.
'';
};
};
};
config = mkIf cfg.enable {
users.users.mautrix-facebook = {
group = "mautrix-facebook";
isSystemUser = true;
};
services.postgresql = mkIf cfg.configurePostgresql {
ensureDatabases = ["mautrix-facebook"];
ensureUsers = [{
name = "mautrix-facebook";
ensurePermissions = {
"DATABASE \"mautrix-facebook\"" = "ALL PRIVILEGES";
};
}];
};
systemd.services.mautrix-facebook = rec {
wantedBy = [ "multi-user.target" ];
wants = [
"network-online.target"
] ++ optional config.services.matrix-synapse.enable "matrix-synapse.service"
++ optional cfg.configurePostgresql "postgresql.service";
after = wants;
serviceConfig = {
Type = "simple";
Restart = "always";
User = "mautrix-facebook";
ProtectSystem = "strict";
ProtectHome = true;
ProtectKernelTunables = true;
ProtectKernelModules = true;
ProtectControlGroups = true;
PrivateTmp = true;
EnvironmentFile = cfg.environmentFile;
ExecStart = ''
${pkgs.mautrix-facebook}/bin/mautrix-facebook --config=${settingsFile}
'';
};
};
services.mautrix-facebook = {
registrationData = {
id = "mautrix-facebook";
namespaces = {
users = [
{
exclusive = true;
regex = escapeRegex "@${cfg.settings.appservice.bot_username}:${cfg.settings.homeserver.domain}";
}
{
exclusive = true;
regex = "@${puppetRegex}:${escapeRegex cfg.settings.homeserver.domain}";
}
];
aliases = [];
};
url = cfg.settings.appservice.address;
sender_localpart = "mautrix-facebook-sender";
rate_limited = false;
"de.sorunome.msc2409.push_ephemeral" = true;
push_ephemeral = true;
};
};
};
meta.maintainers = with maintainers; [ kevincox ];
}

View file

@ -82,6 +82,15 @@ in
nix = {
enable = mkOption {
type = types.bool;
default = true;
description = ''
Whether to enable Nix.
Disabling Nix makes the system hard to modify and the Nix programs and configuration will not be made available by NixOS itself.
'';
};
package = mkOption {
type = types.package;
default = pkgs.nix;
@ -499,7 +508,7 @@ in
###### implementation
config = {
config = mkIf cfg.enable {
nix.binaryCachePublicKeys = [ "cache.nixos.org-1:6NCHdD59X431o0gWypbMrAURkbJ16ZPMQFGspcDShjY=" ];
nix.binaryCaches = [ "https://cache.nixos.org/" ];

View file

@ -185,6 +185,28 @@ let
serviceConfig.DynamicUser = mkDefault enableDynamicUser;
serviceConfig.User = mkDefault conf.user;
serviceConfig.Group = conf.group;
# Hardening
serviceConfig.CapabilityBoundingSet = mkDefault [ "" ];
serviceConfig.DeviceAllow = [ "" ];
serviceConfig.LockPersonality = true;
serviceConfig.MemoryDenyWriteExecute = true;
serviceConfig.NoNewPrivileges = true;
serviceConfig.PrivateDevices = true;
serviceConfig.ProtectClock = true;
serviceConfig.ProtectControlGroups = true;
serviceConfig.ProtectHome = true;
serviceConfig.ProtectHostname = true;
serviceConfig.ProtectKernelLogs = true;
serviceConfig.ProtectKernelModules = true;
serviceConfig.ProtectKernelTunables = true;
serviceConfig.ProtectSystem = mkDefault "strict";
serviceConfig.RemoveIPC = true;
serviceConfig.RestrictAddressFamilies = [ "AF_INET" "AF_INET6" ];
serviceConfig.RestrictNamespaces = true;
serviceConfig.RestrictRealtime = true;
serviceConfig.RestrictSUIDSGID = true;
serviceConfig.SystemCallArchitectures = "native";
serviceConfig.UMask = "0077";
} serviceOpts ]);
};
in

View file

@ -41,6 +41,10 @@ in
-format.new=${if cfg.newMetricFormat then "true" else "false"} \
${concatStringsSep " \\\n " cfg.extraFlags}
'';
RestrictAddressFamilies = [
# Need AF_UNIX to collect data
"AF_UNIX"
];
};
};
}

View file

@ -83,6 +83,10 @@ in
--dovecot.scopes ${concatStringsSep "," cfg.scopes} \
${concatStringsSep " \\\n " cfg.extraFlags}
'';
RestrictAddressFamilies = [
# Need AF_UNIX to collect data
"AF_UNIX"
];
};
};
}

View file

@ -34,6 +34,10 @@ in {
${concatStringsSep " \\n" cfg.controlSocketPaths}
'';
SupplementaryGroups = [ "kea" ];
RestrictAddressFamilies = [
# Need AF_UNIX to collect data
"AF_UNIX"
];
};
};
}

View file

@ -45,6 +45,10 @@ in {
${concatStringsSep " \\\n " cfg.extraFlags}
'';
SupplementaryGroups = [ "knot" ];
RestrictAddressFamilies = [
# Need AF_UNIX to collect data
"AF_UNIX"
];
};
};
}

View file

@ -28,6 +28,10 @@ in
-rate ${cfg.refreshRate} \
${concatStringsSep " \\\n " cfg.extraFlags}
'';
RestrictAddressFamilies = [
# Need AF_UNIX to collect data
"AF_UNIX"
];
};
};
}

View file

@ -79,6 +79,10 @@ in
--web.telemetry-path ${cfg.telemetryPath} \
${concatStringsSep " \\\n " cfg.extraFlags}
'';
RestrictAddressFamilies = [
# Need AF_UNIX to collect data
"AF_UNIX"
];
};
};
}

View file

@ -45,6 +45,7 @@ in
serviceOpts = {
serviceConfig = {
AmbientCapabilities = [ "CAP_NET_RAW" ];
CapabilityBoundingSet = [ "CAP_NET_RAW" ];
ExecStart = ''
${pkgs.prometheus-smokeping-prober}/bin/smokeping_prober \
--web.listen-address ${cfg.listenAddress}:${toString cfg.port} \

View file

@ -99,6 +99,10 @@ in
-config.file ${configFile} \
${concatStringsSep " \\\n " cfg.extraFlags}
'';
RestrictAddressFamilies = [
# Need AF_UNIX to collect data
"AF_UNIX"
];
};
};
}

View file

@ -13,6 +13,10 @@ in {
${pkgs.prometheus-systemd-exporter}/bin/systemd_exporter \
--web.listen-address ${cfg.listenAddress}:${toString cfg.port}
'';
RestrictAddressFamilies = [
# Need AF_UNIX to collect data
"AF_UNIX"
];
};
};
}

View file

@ -49,6 +49,10 @@ in
${optionalString (cfg.controlInterface != null) "--control-interface ${cfg.controlInterface}"} \
${toString cfg.extraFlags}
'';
RestrictAddressFamilies = [
# Need AF_UNIX to collect data
"AF_UNIX"
];
};
}] ++ [
(mkIf config.services.unbound.enable {

View file

@ -52,6 +52,7 @@ in {
serviceConfig = {
AmbientCapabilities = [ "CAP_NET_ADMIN" ];
CapabilityBoundingSet = [ "CAP_NET_ADMIN" ];
ExecStart = ''
${pkgs.prometheus-wireguard-exporter}/bin/prometheus_wireguard_exporter \
-p ${toString cfg.port} \
@ -61,6 +62,10 @@ in {
${optionalString cfg.withRemoteIp "-r"} \
${optionalString (cfg.wireguardConfig != null) "-n ${escapeShellArg cfg.wireguardConfig}"}
'';
RestrictAddressFamilies = [
# Need AF_NETLINK to collect data
"AF_NETLINK"
];
};
};
}

View file

@ -780,7 +780,7 @@ in
in
[
"-net nic,netdev=user.0,model=virtio"
"-netdev user,id=user.0,${forwardingOptions}\${QEMU_NET_OPTS:+,$QEMU_NET_OPTS}"
"-netdev user,id=user.0,${forwardingOptions}\"$QEMU_NET_OPTS\""
];
# FIXME: Consolidate this one day.

View file

@ -478,6 +478,7 @@ in
wasabibackend = handleTest ./wasabibackend.nix {};
wiki-js = handleTest ./wiki-js.nix {};
wireguard = handleTest ./wireguard {};
without-nix = handleTest ./without-nix.nix {};
wmderland = handleTest ./wmderland.nix {};
wpa_supplicant = handleTest ./wpa_supplicant.nix {};
wordpress = handleTest ./wordpress.nix {};

View file

@ -11,10 +11,6 @@ import ./make-test-python.nix (
meta.maintainers = with pkgs.lib.maintainers; [ pborzenkov ];
nodes = {
default = { ... }: {
services.calibre-web.enable = true;
};
customized = { pkgs, ... }: {
services.calibre-web = {
enable = true;
@ -33,12 +29,6 @@ import ./make-test-python.nix (
testScript = ''
start_all()
default.wait_for_unit("calibre-web.service")
default.wait_for_open_port(${toString defaultPort})
default.succeed(
"curl --fail 'http://localhost:${toString defaultPort}/basicconfig' | grep 'Basic Configuration'"
)
customized.succeed(
"mkdir /tmp/books && calibredb --library-path /tmp/books add -e --title test-book"
)

View file

@ -34,7 +34,7 @@ in
with lzma.open(
"${stick}"
) as data, open(machine.state_dir + "/usbstick.img", "wb") as stick:
) as data, open(machine.state_dir / "usbstick.img", "wb") as stick:
stick.write(data.read())
machine.succeed("udisksctl info -b /dev/vda >&2")

View file

@ -22,7 +22,7 @@ import ./make-test-python.nix ({ pkgs, ... }: {
testScript = ''
# create a blank disk image for our fake USB stick
with open(machine.state_dir + "/usbstick.img", "wb") as stick:
with open(machine.state_dir / "usbstick.img", "wb") as stick:
stick.write(b"\x00" * (1024 * 1024))
# wait for machine to have started and the usbguard service to be up

View file

@ -0,0 +1,23 @@
import ./make-test-python.nix ({ lib, ... }: {
name = "without-nix";
meta = with lib.maintainers; {
maintainers = [ ericson2314 ];
};
nixpkgs.overlays = [
(self: super: {
nix = throw "don't want to use this";
})
];
nodes.machine = { ... }: {
nix.enable = false;
};
testScript = ''
start_all()
machine.succeed("which which")
machine.fail("which nix")
'';
})

View file

@ -18,13 +18,13 @@ let
in
pythonPackages.buildPythonApplication rec {
pname = "picard";
version = "2.6.3";
version = "2.6.4";
src = fetchFromGitHub {
owner = "metabrainz";
repo = pname;
rev = "release-${version}";
sha256 = "sha256-bSqGgRXqHGjT+OYCEafsT/btVe+n91+L0kB8fnrywss=";
sha256 = "0lm7s9jy7z4an3xxj3gnxxf2xx045i157qaxysbdhcq5lwlmznc7";
};
nativeBuildInputs = [ gettext qt5.wrapQtAppsHook qt5.qtbase ]

View file

@ -135,7 +135,7 @@ mkDerivation rec {
{ description = "Set of integrated tools for the R language";
homepage = "https://www.rstudio.com/";
license = licenses.agpl3;
maintainers = with maintainers; [ changlinli ciil ];
maintainers = with maintainers; [ ciil ];
platforms = platforms.linux;
};
}

View file

@ -2,7 +2,7 @@
, extra-cmake-modules, kdoctools
, qtscript, qtsvg, qtquickcontrols, qtwebengine
, krunner, shared-mime-info, kparts, knewstuff
, gpsd, perl
, gpsd, perl, fetchpatch
}:
mkDerivation {
@ -18,6 +18,15 @@ mkDerivation {
qtscript qtsvg qtquickcontrols qtwebengine shared-mime-info krunner kparts
knewstuff gpsd
];
patches = [
(fetchpatch {
# Backport fix to allow compilation with gpsd 3.23.1
# Remove when marble compiles without the patch.
# See: https://invent.kde.org/education/marble/-/merge_requests/57
url = "https://invent.kde.org/education/marble/-/commit/8aadc3eb8f9484a65d497d442cd8c61fe1462bef.diff";
sha256 = "sha256-ZkPXyunVItSRctv6SLGIonvyZwLDhCz+wfJrIXeHcDo=";
})
];
preConfigure = ''
cmakeFlags+=" -DINCLUDE_INSTALL_DIR=''${!outputDev}/include"
'';

View file

@ -0,0 +1,35 @@
{ lib
, rustPlatform
, fetchFromGitHub
, pkg-config
, openssl
, stdenv
, Security
}:
rustPlatform.buildRustPackage rec {
pname = "eureka-ideas";
version = "1.8.1";
src = fetchFromGitHub {
owner = "simeg";
repo = "eureka";
rev = "v${version}";
sha256 = "1qjf8nr7m9igy6h228gm9gnav6pi2rfarbd9bc5fchx4rqy59sp7";
};
cargoSha256 = "sha256-QujrFgliH8Mx1ES9KVl+O9UJP+7GDanQ7+z4QJuSOd0=";
nativeBuildInputs = [ pkg-config ];
buildInputs = [ openssl ] ++ lib.optionals stdenv.isDarwin [ Security ];
meta = with lib; {
description = "CLI tool to input and store your ideas without leaving the terminal";
homepage = "https://github.com/simeg/eureka";
changelog = "https://github.com/simeg/eureka/blob/v${version}/CHANGELOG.md";
license = licenses.mit;
maintainers = with maintainers; [ figsoda ];
mainProgram = "eureka";
};
}

View file

@ -6,8 +6,8 @@ let
srcs = {
foxtrot = fetchbzr {
url = "lp:foxtrotgps";
rev = "329";
sha256 = "0fwgnsrah63h1xdgm5xdi5ancrz89shdp5sdzw1qc1m7i9a03rid";
rev = "331";
sha256 = "sha256-/kJv6a3MzAzzwIl98Mqi7jrUJC1kDvouigf9kGtv868=";
};
screenshots = fetchbzr {
url = "lp:foxtrotgps/screenshots";
@ -17,7 +17,7 @@ let
};
in stdenv.mkDerivation rec {
pname = "foxtrotgps";
version = "1.2.2+329";
version = "1.2.2+331";
# Pull directly from bzr because gpsd API version 9 is not supported on latest release
src = srcs.foxtrot;
@ -39,12 +39,20 @@ in stdenv.mkDerivation rec {
];
postUnpack = ''
cp -R ${srcs.screenshots} $sourceRoot/doc/screenshots
chmod -R u+w $sourceRoot/doc/screenshots
cp -R ${srcs.screenshots} $sourceRoot/doc/screenshots
chmod -R u+w $sourceRoot/doc/screenshots
'';
# Remove when foxtrotgps supports gpsd 3.23.1
# Patch for compatibility with gpsd 3.23.1. This was added for foxtrotgps
# 1.2.2+331. The command can be removed if the build of a newer version
# succeeds without it.
postPatch = ''
substituteInPlace src/gps_functions.c --replace "STATUS_NO_FIX" "STATUS_UNK"
'';
preConfigure = ''
intltoolize --automake --copy --force
intltoolize --automake --copy --force
'';
meta = with lib; {

View file

@ -47,7 +47,7 @@ let
k3sCommit = "3e250fdbab72d88f7e6aae57446023a0567ffc97"; # k3s git commit at the above version
k3sRepoSha256 = "1w7drvk0bmlmqrxh1y6dxjy7dk6bdrl72pkd25lc1ir6wbzb05h9";
traefikChartVersion = "9.18.2"; # taken from ./scripts/download at TRAEFIK_VERSION
traefikChartVersion = "9.18.2"; # taken from ./manifests/traefik.yaml at spec.version
traefikChartSha256 = "sha256-9d7p0ngyMN27u4OPgz7yI14Zj9y36t9o/HMX5wyDpUI=";
k3sRootVersion = "0.9.1"; # taken from ./scripts/download at ROOT_VERSION

View file

@ -10,19 +10,16 @@ cd $(dirname "${BASH_SOURCE[0]}")
LATEST_TAG_RAWFILE=${WORKDIR}/latest_tag.json
curl --silent ${GITHUB_TOKEN:+"-u \":$GITHUB_TOKEN\""} \
https://api.github.com/repos/k3s-io/k3s/releases/latest > ${LATEST_TAG_RAWFILE}
LATEST_TAG_NAME=$(jq -r '.tag_name' ${LATEST_TAG_RAWFILE})
https://api.github.com/repos/k3s-io/k3s/releases > ${LATEST_TAG_RAWFILE}
LATEST_TAG_NAME=$(jq 'map(.tag_name)' ${LATEST_TAG_RAWFILE} | grep -v -e rc -e engine | sed 's/["|,| ]//g' | sort -r | head -n1)
K3S_VERSION=$(echo ${LATEST_TAG_NAME} | sed 's/^v//')
LATEST_TAG_TARBALL_URL=$(jq -r '.tarball_url' ${LATEST_TAG_RAWFILE})
K3S_COMMIT=$(curl --silent ${GITHUB_TOKEN:+"-u \":$GITHUB_TOKEN\""} \
https://api.github.com/repos/k3s-io/k3s/tags \
| jq -r "map(select(.name == \"${LATEST_TAG_NAME}\")) | .[0] | .commit.sha")
K3S_REPO_SHA256=$(nix-prefetch-url --quiet --unpack ${LATEST_TAG_TARBALL_URL})
K3S_REPO_SHA256=$(nix-prefetch-url --quiet --unpack https://github.com/k3s-io/k3s/archive/refs/tags/${LATEST_TAG_NAME}.tar.gz)
FILE_SCRIPTS_DOWNLOAD=${WORKDIR}/scripts-download
curl --silent https://raw.githubusercontent.com/k3s-io/k3s/${K3S_COMMIT}/scripts/download > $FILE_SCRIPTS_DOWNLOAD
@ -30,16 +27,19 @@ curl --silent https://raw.githubusercontent.com/k3s-io/k3s/${K3S_COMMIT}/scripts
FILE_SCRIPTS_VERSION=${WORKDIR}/scripts-version.sh
curl --silent https://raw.githubusercontent.com/k3s-io/k3s/${K3S_COMMIT}/scripts/version.sh > $FILE_SCRIPTS_VERSION
TRAEFIK_CHART_VERSION=$(grep TRAEFIK_VERSION= $FILE_SCRIPTS_DOWNLOAD \
| cut -d'=' -f2 | cut -d' ' -f1)
FILE_MANIFESTS_TRAEFIK=${WORKDIR}/manifests-traefik.yaml
curl --silent https://raw.githubusercontent.com/k3s-io/k3s/${K3S_COMMIT}/manifests/traefik.yaml > $FILE_MANIFESTS_TRAEFIK
TRAEFIK_CHART_VERSION=$(awk -F/ '/traefik-([[:digit:]]+\.)/ {sub(/traefik-/, "", $6) ; sub(/\.tgz/, "", $6); print $6}' $FILE_MANIFESTS_TRAEFIK)
TRAEFIK_CHART_SHA256=$(nix-prefetch-url --quiet "https://helm.traefik.io/traefik/traefik-${TRAEFIK_CHART_VERSION}.tgz")
K3S_ROOT_VERSION=$(grep ROOT_VERSION= $FILE_SCRIPTS_DOWNLOAD \
K3S_ROOT_VERSION=$(grep 'ROOT_VERSION=' ${FILE_SCRIPTS_DOWNLOAD} \
| cut -d'=' -f2 | cut -d' ' -f1 | sed 's/^v//')
K3S_ROOT_SHA256=$(nix-prefetch-url --quiet --unpack \
"https://github.com/k3s-io/k3s-root/releases/download/v${K3S_ROOT_VERSION}/k3s-root-amd64.tar")
CNIPLUGINS_VERSION=$(grep VERSION_CNIPLUGINS= $FILE_SCRIPTS_VERSION \
CNIPLUGINS_VERSION=$(grep 'VERSION_CNIPLUGINS=' ${FILE_SCRIPTS_VERSION} \
| cut -d'=' -f2 | cut -d' ' -f1 | sed -e 's/"//g' -e 's/^v//')
CNIPLUGINS_SHA256=$(nix-prefetch-url --quiet --unpack \
"https://github.com/rancher/plugins/archive/refs/tags/v${CNIPLUGINS_VERSION}.tar.gz")

View file

@ -3,10 +3,10 @@
set -eu -o pipefail
version="$(curl -Ls https://www.bluejeans.com/download | \
pup 'a[aria-label~="Linux"] attr{href}' | \
#output contains *.deb and *.rpm
grep "\.rpm" | \
version="$(curl -Ls https://www.bluejeans.com/downloads | \
pup 'a[href$=".rpm"] attr{href}' | \
# output contains app and events
grep "desktop-app" | \
awk -F'[ ._ ]' '{printf $6"."$7"."$8"."$9"\n"}')"
update-source-version bluejeans-gui "$version"

View file

@ -18,14 +18,12 @@ let
# E.g. "de_DE" -> "de-de" (spellcheckerLanguage -> hunspellDict)
spellLangComponents = splitString "_" spellcheckerLanguage;
hunspellDict = elemAt spellLangComponents 0 + "-" + toLower (elemAt spellLangComponents 1);
in if spellcheckerLanguage != null
then ''
--set HUNSPELL_DICTIONARIES "${hunspellDicts.${hunspellDict}}/share/hunspell" \
--set LC_MESSAGES "${spellcheckerLanguage}"''
else "");
in lib.optionalString (spellcheckerLanguage != null) ''
--set HUNSPELL_DICTIONARIES "${hunspellDicts.${hunspellDict}}/share/hunspell" \
--set LC_MESSAGES "${spellcheckerLanguage}"'');
in stdenv.mkDerivation rec {
pname = "signal-desktop";
version = "5.18.1"; # Please backport all updates to the stable channel.
version = "5.19.0"; # Please backport all updates to the stable channel.
# All releases have a limited lifetime and "expire" 90 days after the release.
# When releases "expire" the application becomes unusable until an update is
# applied. The expiration date for the current release can be extracted with:
@ -35,7 +33,7 @@ in stdenv.mkDerivation rec {
src = fetchurl {
url = "https://updates.signal.org/desktop/apt/pool/main/s/signal-desktop/signal-desktop_${version}_amd64.deb";
sha256 = "0x1wrzxyspghv0hwdh3sw8536c9qi7211d2g5cr3f33kz9db5xp4";
sha256 = "0avns5axcfs8x9sv7hyjxi1cr7gag00avfj0h99wgn251b313g1a";
};
nativeBuildInputs = [

View file

@ -118,7 +118,6 @@ stdenv.mkDerivation rec {
'';
platforms = platforms.all;
hydraPlatforms = platforms.linux;
maintainers = with maintainers; [ peti ] ++ teams.sage.members;
};

View file

@ -27,7 +27,7 @@
}:
let
version = "1.9.0";
version = "1.9.2";
# build stimuli file for PGO build and the script to generate it
# independently of the foot's build, so we can cache the result
@ -36,8 +36,7 @@ let
#
# For every bump, make sure that the hash is still accurate.
stimulusGenerator = stdenv.mkDerivation {
pname = "foot-generate-alt-random-writes";
inherit version;
name = "foot-generate-alt-random-writes";
src = fetchurl {
url = "https://codeberg.org/dnkl/foot/raw/tag/${version}/scripts/generate-alt-random-writes.py";
@ -100,7 +99,7 @@ stdenv.mkDerivation rec {
owner = "dnkl";
repo = pname;
rev = version;
sha256 = "0mkzq5lbgl5qp5nj8sk5gyg9hrrklmbjdqzlcr2a6rlmilkxlhwm";
sha256 = "15h01ijx87i60bdgjjap1ymwlxggsxc6iziykh3bahj8432s1836";
};
depsBuildBuild = [
@ -144,16 +143,15 @@ stdenv.mkDerivation rec {
mesonBuildType = "release";
# See https://codeberg.org/dnkl/foot/src/tag/1.9.2/INSTALL.md#options
mesonFlags = [
# Use lto
"-Db_lto=true"
# Prevent foot from installing its terminfo file into a custom location,
# we need to do this manually in postInstall.
# See https://codeberg.org/dnkl/foot/pulls/673,
# https://codeberg.org/dnkl/foot/src/tag/1.9.0/INSTALL.md#options
"-Dterminfo=disabled"
# “Build” and install terminfo db
"-Dterminfo=enabled"
# Ensure TERM=foot is used
"-Ddefault-terminfo=foot"
# Tell foot what to set TERMINFO to
# Tell foot to set TERMINFO and where to install the terminfo files
"-Dcustom-terminfo-install-location=${terminfoDir}"
];
@ -174,13 +172,6 @@ stdenv.mkDerivation rec {
outputs = [ "out" "terminfo" ];
postInstall = ''
# build and install foot's terminfo to the standard location
# instead of its custom location
mkdir -p "${terminfoDir}"
tic -o "${terminfoDir}" -x -e foot,foot-direct "$NIX_BUILD_TOP/$sourceRoot/foot.info"
'';
passthru.tests = {
clang-default-compilation = foot.override {
inherit (llvmPackages) stdenv;
@ -193,6 +184,13 @@ stdenv.mkDerivation rec {
noPgo = foot.override {
allowPgo = false;
};
# By changing name, this will get rebuilt everytime we change version,
# even if the hash stays the same. Consequently it'll fail if we introduce
# a hash mismatch when updating.
stimulus-script-is-current = stimulusGenerator.src.overrideAttrs (_: {
name = "generate-alt-random-writes-${version}.py";
});
};
meta = with lib; {

View file

@ -49,6 +49,7 @@ let
, meta
, buildInputs ? []
, everythingFile ? "./Everything.agda"
, includePaths ? []
, libraryName ? pname
, libraryFile ? "${libraryName}.agda-lib"
, buildPhase ? null
@ -57,6 +58,7 @@ let
, ...
}: let
agdaWithArgs = withPackages (builtins.filter (p: p ? isAgdaDerivation) buildInputs);
includePathArgs = concatMapStrings (path: "-i" + path + " ") (includePaths ++ [(dirOf everythingFile)]);
in
{
inherit libraryName libraryFile;
@ -67,7 +69,7 @@ let
buildPhase = if buildPhase != null then buildPhase else ''
runHook preBuild
agda -i ${dirOf everythingFile} ${everythingFile}
agda ${includePathArgs} ${everythingFile}
runHook postBuild
'';

View file

@ -15,8 +15,9 @@
"--remap-path-prefix=$NIX_BUILD_TOP=/"
(mkRustcDepArgs dependencies crateRenames)
(mkRustcFeatureArgs crateFeatures)
] ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
"--target" (rust.toRustTargetSpec stdenv.hostPlatform)
] ++ extraRustcOpts
++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform) "--target ${rust.toRustTargetSpec stdenv.hostPlatform} -C linker=${stdenv.hostPlatform.config}-gcc"
# since rustc 1.42 the "proc_macro" crate is part of the default crate prelude
# https://github.com/rust-lang/cargo/commit/4d64eb99a4#diff-7f98585dbf9d30aa100c8318e2c77e79R1021-R1022
++ lib.optional (lib.elem "proc-macro" crateType) "--extern proc_macro"

View file

@ -13,7 +13,7 @@
, crateRenames
, crateVersion
, extraLinkFlags
, extraRustcOpts
, extraRustcOptsForBuildRs
, libName
, libPath
, release
@ -24,7 +24,7 @@ let version_ = lib.splitString "-" crateVersion;
version = lib.splitVersion (lib.head version_);
rustcOpts = lib.foldl' (opts: opt: opts + " " + opt)
(if release then "-C opt-level=3" else "-C debuginfo=2")
(["-C codegen-units=$NIX_BUILD_CORES"] ++ extraRustcOpts);
(["-C codegen-units=$NIX_BUILD_CORES"] ++ extraRustcOptsForBuildRs);
buildDeps = mkRustcDepArgs buildDependencies crateRenames;
authors = lib.concatStringsSep ":" crateAuthors;
optLevel = if release then 3 else 0;

View file

@ -172,6 +172,11 @@ crate_: lib.makeOverridable
# Example: [ "-Z debuginfo=2" ]
# Default: []
, extraRustcOpts
# A list of extra options to pass to rustc when building a build.rs.
#
# Example: [ "-Z debuginfo=2" ]
# Default: []
, extraRustcOptsForBuildRs
# Whether to enable building tests.
# Use true to enable.
# Default: false
@ -228,6 +233,7 @@ crate_: lib.makeOverridable
nativeBuildInputs_ = nativeBuildInputs;
buildInputs_ = buildInputs;
extraRustcOpts_ = extraRustcOpts;
extraRustcOptsForBuildRs_ = extraRustcOptsForBuildRs;
buildTests_ = buildTests;
# crate2nix has a hack for the old bash based build script that did split
@ -308,12 +314,16 @@ crate_: lib.makeOverridable
lib.optionals (crate ? extraRustcOpts) crate.extraRustcOpts
++ extraRustcOpts_
++ (lib.optional (edition != null) "--edition ${edition}");
extraRustcOptsForBuildRs =
lib.optionals (crate ? extraRustcOptsForBuildRs) crate.extraRustcOptsForBuildRs
++ extraRustcOptsForBuildRs_
++ (lib.optional (edition != null) "--edition ${edition}");
configurePhase = configureCrate {
inherit crateName buildDependencies completeDeps completeBuildDeps crateDescription
crateFeatures crateRenames libName build workspace_member release libPath crateVersion
extraLinkFlags extraRustcOpts
extraLinkFlags extraRustcOptsForBuildRs
crateAuthors crateHomepage verbose colors;
};
buildPhase = buildCrate {
@ -337,6 +347,7 @@ crate_: lib.makeOverridable
release = crate_.release or true;
verbose = crate_.verbose or true;
extraRustcOpts = [ ];
extraRustcOptsForBuildRs = [ ];
features = [ ];
nativeBuildInputs = [ ];
buildInputs = [ ];

View file

@ -15,6 +15,7 @@
, dbus
, polkit
, switchboard
, wingpanel-indicator-power
}:
stdenv.mkDerivation rec {
@ -51,6 +52,7 @@ stdenv.mkDerivation rec {
libgee
polkit
switchboard
wingpanel-indicator-power # settings schema
];
meta = with lib; {

View file

@ -1,4 +1,4 @@
import ./common.nix {
version = "2.1.2";
sha256 = "sha256-t3EFUJOYVe1JWYxKAUSD7RILaZFliio7avpHcT3OTAs=";
version = "2.0.8";
sha256 = "1xwrwvps7drrpyw3wg5h3g2qajmkwqs9gz0fdw1ns9adp7vld390";
}

View file

@ -94,8 +94,6 @@ self: super: builtins.intersectAttrs super {
# Won't find it's header files without help.
sfml-audio = appendConfigureFlag super.sfml-audio "--extra-include-dirs=${pkgs.openal}/include/AL";
hercules-ci-agent = disableLibraryProfiling super.hercules-ci-agent;
# avoid compiling twice by providing executable as a separate output (with small closure size)
niv = enableSeparateBinOutput super.niv;
ormolu = enableSeparateBinOutput super.ormolu;

View file

@ -11,20 +11,21 @@ let
name = "cbqn-bytecode-files";
owner = "dzaima";
repo = "CBQN";
rev = "94bb312d20919f942eabed3dca33c514de3c3227";
hash = "sha256-aFw5/F7/sYkYmxAnGeK8EwkoVrbEcjuJAD9YT+iW9Rw=";
rev = "4d23479cdbd5ac6eb512c376ade58077b814b2b7";
hash = "sha256-MTvg4lOB26bqvJTqV71p4Y4qDjTYaOE40Jk4Sle/hsY=";
};
in
assert genBytecode -> ((bqn-path != null) && (mbqn-source != null));
stdenv.mkDerivation rec {
pname = "cbqn" + lib.optionalString (!genBytecode) "-standalone";
version = "0.0.0+unstable=2021-10-01";
version = "0.pre+unstable=2021-10-05";
src = fetchFromGitHub {
owner = "dzaima";
repo = "CBQN";
rev = "3725bd58c758a749653080319766a33169551536";
hash = "sha256-xWp64inFZRqGGTrH6Hqbj7aA0vYPyd+FdetowTMTjPs=";
rev = "e23dab20daff9c0dacc2561c616174af72029a3e";
hash = "sha256-amVKKD9hD5A+LbqglXHLKEsYqFSSztdXs1FCoNJyCJ4=";
};
dontConfigure = true;
@ -34,6 +35,9 @@ stdenv.mkDerivation rec {
'';
preBuild = ''
# otherwise cbqn defaults to clang
makeFlagsArray+=("CC=$CC")
# inform make we are providing the runtime ourselves
touch src/gen/customRuntime
'' + (if genBytecode then ''
@ -42,10 +46,6 @@ stdenv.mkDerivation rec {
cp ${cbqn-bytecode-files}/src/gen/{compiler,formatter,runtime0,runtime1,src} src/gen/
'');
makeFlags = [
"CC=${stdenv.cc.targetPrefix}cc"
];
installPhase = ''
runHook preInstall
@ -63,8 +63,7 @@ stdenv.mkDerivation rec {
license = licenses.gpl3Plus;
maintainers = with maintainers; [ AndersonTorres sternenseemann synthetica ];
platforms = platforms.all;
priority = if genBytecode then 0 else 10;
};
}
# TODO: factor and version cbqn-bytecode-files
# TODO: version cbqn-bytecode-files
# TODO: test suite

View file

@ -8,32 +8,30 @@
stdenv.mkDerivation rec {
pname = "dbqn" + lib.optionalString buildNativeImage "-native";
version = "0.0.0+unstable=2021-10-02";
version = "0.pre+unstable=2021-10-05";
src = fetchFromGitHub {
owner = "dzaima";
repo = "BQN";
rev = "d6bd66d26a89b8e9f956ec4f6b6bc5dcb5861a09";
hash = "sha256-BLRep7OGHfDFowIAsBS19PTzgIhrdKMnO2JSjKuwGYo=";
rev = "c31ceef52bbf380e747723f5ffd09c5f006b21c5";
sha256 = "1nzqgwpjawcky85mfrz5izs9lfb3aqlm96dc8syrxhgg20xrziwx";
};
buildInputs = lib.optional (!buildNativeImage) jdk;
nativeBuildInputs = [
makeWrapper
] ++ lib.optional buildNativeImage jdk;
jdk
];
dontConfigure = true;
buildPhase = ''
runHook preBuild
mkdir -p output
javac --release 8 -encoding UTF-8 -d ./output $(find src -name '*.java')
(cd output; jar cvfe ../BQN.jar BQN.Main *)
rm -fr output
patchShebangs --build ./build8
./build8
'' + lib.optionalString buildNativeImage ''
native-image --report-unsupported-elements-at-runtime \
-H:CLibraryPath=${lib.getLib jdk}/lib \
-J-Dfile.encoding=UTF-8 -jar BQN.jar dbqn
'' + ''
runHook postBuild
@ -64,7 +62,6 @@ stdenv.mkDerivation rec {
license = licenses.mit;
maintainers = with maintainers; [ AndersonTorres sternenseemann ];
inherit (jdk.meta) platforms;
priority = if buildNativeImage then 10 else 0;
};
}
# TODO: Processing app

View file

@ -7,13 +7,13 @@
stdenvNoCC.mkDerivation rec {
pname = "bqn";
version = "0.0.0+unstable=2021-10-01";
version = "0.pre+unstable=2021-10-06";
src = fetchFromGitHub {
owner = "mlochbaum";
repo = "BQN";
rev = "b3d68f730d48ccb5e3b3255f9010c95bf9f86e22";
hash = "sha256-Tkgwz7+d25svmjRsXFUQq0S/73QJU+BKSNeGqpUcBTQ=";
rev = "2ce2dc40702431ef3d3ffece9e2f6f8b883ac6c5";
hash = "sha256-bvXKOaBlddG6O0GbmtqU9prklqmOOvlbXuCUaFO+j0M=";
};
nativeBuildInputs = [ makeWrapper ];
@ -21,7 +21,7 @@ stdenvNoCC.mkDerivation rec {
buildInputs = [ nodejs ];
patches = [
# Creates a @libbqn@ substitution variable
# Creates a @libbqn@ substitution variable, to be filled in the fixupPhase
./001-libbqn-path.patch
];

View file

@ -1,14 +1,14 @@
{ lib, mkDerivation, fetchFromGitHub }:
mkDerivation rec {
version = "compat-2.6.1";
version = "compat-2.6.2";
pname = "agda-prelude";
src = fetchFromGitHub {
owner = "UlfNorell";
repo = "agda-prelude";
rev = version;
sha256 = "128rbhd32qlq2nq3wgqni4ih58zzwvs9pkn9j8236ycxxp6x81sl";
sha256 = "0j2nip5fbn61fpkm3qz4dlazl4mzdv7qlgw9zm15bkcvaila0h14";
};
preConfigure = ''
@ -19,8 +19,6 @@ mkDerivation rec {
'';
meta = with lib; {
# Remove if a version compatible with agda 2.6.2 is made
broken = true;
homepage = "https://github.com/UlfNorell/agda-prelude";
description = "Programming library for Agda";
license = lib.licenses.mit;

View file

@ -0,0 +1,28 @@
{ lib, mkDerivation, fetchFromGitHub
, standard-library }:
mkDerivation rec {
pname = "agdarsec";
version = "0.4.1";
src = fetchFromGitHub {
owner = "gallais";
repo = "agdarsec";
rev = "v${version}";
sha256 = "02fqkycvicw6m2xsz8p01aq8n3gj2d2gyx8sgj15l46f8434fy0x";
};
everythingFile = "./index.agda";
includePaths = [ "src" "examples" ];
buildInputs = [ standard-library ];
meta = with lib; {
homepage = "https://gallais.github.io/agdarsec/";
description = "Total Parser Combinators in Agda";
license = licenses.gpl3;
platforms = platforms.unix;
maintainers = with maintainers; [ turion ];
};
}

View file

@ -2,13 +2,13 @@
stdenv.mkDerivation rec {
pname = "libnsl";
version = "1.3.0";
version = "2.0.0";
src = fetchFromGitHub {
owner = "thkukuk";
repo = pname;
rev = "v${version}";
sha256 = "1dayj5i4bh65gn7zkciacnwv2a0ghm6nn58d78rsi4zby4lyj5w5";
sha256 = "sha256-f9kNzzR8baf5mLgrh+bKO/rBRZA5ZYc1tJdyLE7Bi1w=";
};
nativeBuildInputs = [ autoreconfHook pkg-config ];

View file

@ -90,7 +90,7 @@ in buildPythonPackage rec {
pythonImportsCheck = [ "theano" ];
meta = with lib; {
homepage = "http://deeplearning.net/software/theano/";
homepage = "https://github.com/Theano/Theano";
description = "A Python library for large-scale array computation";
license = licenses.bsd3;
maintainers = with maintainers; [ maintainers.bcdarwin ];

View file

@ -11,14 +11,14 @@
buildPythonPackage rec {
pname = "aiodiscover";
version = "1.4.2";
version = "1.4.4";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "bdraco";
repo = pname;
rev = "v${version}";
sha256 = "sha256-xiIN/YLIOdPuqenyxybu0iUpYEy3MyBssXswza5InU0=";
sha256 = "sha256-DobTx6oUr25J8bolo84V4yTT0b0jBsOIzPn93uAmDl0=";
};
propagatedBuildInputs = [

View file

@ -12,14 +12,14 @@
buildPythonPackage rec {
pname = "aiohomekit";
version = "0.6.2";
version = "0.6.3";
format = "pyproject";
src = fetchFromGitHub {
owner = "Jc2k";
repo = pname;
rev = version;
sha256 = "16lfav83g12vzs3ssfva7chcqqb7xdx54djwfwyn9xcwfaa7cwhw";
sha256 = "sha256-XBinbhYUB9BuQxxmWfZUw276uNam4DgBpiCAjT7KDlg=";
};
nativeBuildInputs = [

View file

@ -6,11 +6,11 @@
buildPythonPackage rec {
pname = "aiohue";
version = "2.6.1";
version = "2.6.3";
src = fetchPypi {
inherit pname version;
sha256 = "0101bw2n6vd3c0p323qqr61wwraja48xbrwcw5sn7i5sa3ygfx0k";
sha256 = "sha256-zpwkDKPrE5TFZQO0A1ifTQ7n+TRFpXi3jai3h5plyGM=";
};
propagatedBuildInputs = [

View file

@ -7,13 +7,13 @@
buildPythonPackage rec {
pname = "aioshelly";
version = "0.6.4";
version = "1.0.2";
src = fetchFromGitHub {
owner = "home-assistant-libs";
repo = pname;
rev = version;
sha256 = "sha256-QRCqkaKhPQQjNt9mw8nlTB5YKLmIZbXfrxarb3Ksr5k=";
sha256 = "sha256-STJ9BDVbvlIMvKMiGwkGZ9Z32NvlE+3cyYduYlwTbx4=";
};
propagatedBuildInputs = [

View file

@ -0,0 +1,39 @@
{ lib
, aiohttp
, async-timeout
, buildPythonPackage
, fetchFromGitHub
, pythonOlder
}:
buildPythonPackage rec {
pname = "airthings";
version = "0.0.1";
format = "setuptools";
disabled = pythonOlder "3.8";
src = fetchFromGitHub {
owner = "Danielhiversen";
repo = "pyAirthings";
rev = version;
sha256 = "08cbysx5p9k8hzr6sdykx91j0gx8x15b8807338dsl3qx8nhfb8j";
};
propagatedBuildInputs = [
aiohttp
async-timeout
];
# Project has no tests
doCheck = false;
pythonImportsCheck = [ "airthings" ];
meta = with lib; {
description = "Python module for Airthings";
homepage = "https://github.com/Danielhiversen/pyAirthings";
license = with licenses; [ mit ];
maintainers = with maintainers; [ fab ];
};
}

View file

@ -17,13 +17,13 @@
buildPythonPackage rec {
pname = "bellows";
version = "0.27.0";
version = "0.28.0";
src = fetchFromGitHub {
owner = "zigpy";
repo = "bellows";
rev = version;
sha256 = "sha256-lsGpCd4XgwP91JmRpV6ohXefd1Hm9C51Jk4shU6Irkw=";
sha256 = "sha256-j1vS6PDvvuJapECn0lKGuBkYwWsyzJaTZDRQPjMsuLk=";
};
propagatedBuildInputs = [

View file

@ -8,23 +8,27 @@
buildPythonPackage rec {
pname = "ciso8601";
version = "2.1.3";
version = "2.2.0";
src = fetchFromGitHub {
owner = "closeio";
repo = "ciso8601";
rev = "v${version}";
sha256 = "0g1aiyc1ayh0rnibyy416m5mmck38ksgdm3jsy0z3rxgmgb24951";
sha256 = "sha256-TqB1tQDgCkXu+QuzP6yBEH/xHxhhD/kGR2S0I8Osc5E=";
};
checkInputs = [
pytz
] ++ lib.optional (isPy27) unittest2;
] ++ lib.optional (isPy27) [
unittest2
];
pythonImportsCheck = [ "ciso8601" ];
meta = with lib; {
description = "Fast ISO8601 date time parser for Python written in C";
homepage = "https://github.com/closeio/ciso8601";
license = licenses.mit;
maintainers = [ maintainers.mic92 ];
maintainers = with maintainers; [ mic92 ];
};
}

View file

@ -1,9 +1,7 @@
{ lib
, buildPythonPackage
, fetchPypi
, fetchpatch
, isPy27
, future
, pythonOlder
, h5py
, ipython
, numba
@ -15,25 +13,16 @@
buildPythonPackage rec {
pname = "clifford";
version = "1.3.1";
disabled = isPy27;
version = "1.4.0";
disabled = pythonOlder "3.5";
src = fetchPypi {
inherit pname version;
sha256 = "ade11b20d0631dfc9c2f18ce0149f1e61e4baf114108b27cfd68e5c1619ecc0c";
sha256 = "sha256-eVE8FrD0YHoRreY9CrNb8v4v4KrG83ZU0oFz+V+p+Q0=";
};
patches = [
(fetchpatch {
# Compatibility with h5py 3.
# Will be included in the next releasse after 1.3.1
url = "https://github.com/pygae/clifford/pull/388/commits/955d141662c68d3d61aa50a162b39e656684c208.patch";
sha256 = "0pkpwnk0kfdxsbzsxqlqh8kgif17l5has0mg31g3kyp8lncj89b1";
})
];
propagatedBuildInputs = [
future
h5py
numba
numpy
@ -55,15 +44,24 @@ buildPythonPackage rec {
"veryslow"
"test_algebra_initialisation"
"test_cga"
"test_estimate_rotor_sequential[random_sphere]"
"test_grade_projection"
"test_multiple_grade_projection"
"test_inverse"
"test_inv_g4"
];
disabledTestPaths = [
# Disable failing tests
"test_g3c_tools.py"
"test_multivector_inverse.py"
];
pythonImportsCheck = [ "clifford" ];
meta = with lib; {
description = "Numerical Geometric Algebra Module";
homepage = "https://clifford.readthedocs.io";
license = licenses.bsd3;
maintainers = [ maintainers.costrouc ];
# many TypeError's in tests
broken = true;
maintainers = with maintainers; [ costrouc ];
};
}

View file

@ -1,10 +1,11 @@
{ lib
, buildPythonPackage
, fetchPypi
, dask
, distributed
, docrep
, pytest
, fetchPypi
, pytest-asyncio
, pytestCheckHook
}:
buildPythonPackage rec {
@ -16,19 +17,33 @@ buildPythonPackage rec {
sha256 = "682d7cc0e6b319b6ab83a7a898680c12e9c77ddc77df380b40041290f55d4e79";
};
checkInputs = [ pytest ];
propagatedBuildInputs = [ dask distributed docrep ];
propagatedBuildInputs = [
dask
distributed
docrep
];
# do not run entire tests suite (requires slurm, sge, etc.)
checkPhase = ''
py.test dask_jobqueue/tests/test_jobqueue_core.py
'';
checkInputs = [
pytest-asyncio
pytestCheckHook
];
pytestFlagsArray = [
# Do not run entire tests suite (requires slurm, sge, etc.)
"dask_jobqueue/tests/test_jobqueue_core.py"
];
disabledTests = [
"test_import_scheduler_options_from_config"
"test_security"
];
pythonImportsCheck = [ "dask_jobqueue" ];
meta = with lib; {
homepage = "https://github.com/dask/dask-jobqueue";
description = "Deploy Dask on job schedulers like PBS, SLURM, and SGE";
license = licenses.bsd3;
maintainers = [ maintainers.costrouc ];
broken = true;
maintainers = with maintainers; [ costrouc ];
};
}

View file

@ -22,7 +22,7 @@
buildPythonPackage rec {
pname = "dask";
version = "2021.09.0";
version = "2021.09.1";
format = "setuptools";
disabled = pythonOlder "3.7";
@ -31,7 +31,7 @@ buildPythonPackage rec {
owner = "dask";
repo = pname;
rev = version;
sha256 = "sha256-Gb6eQ5Hebx3mBNGvgB5yvM4dPsIxJl9ka++yYC/Zf7Q=";
sha256 = "sha256-+UkbXbWV5R/QtVb5rWm/5SA+IoWsIfBciL3vg138jkc=";
};
propagatedBuildInputs = [

View file

@ -0,0 +1,31 @@
{ lib
, python
, buildPythonPackage
, fetchPypi
, pythonOlder
}:
buildPythonPackage rec {
pname = "demjson3";
version = "3.0.5";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
sha256 = "103dc4pzwg8791q3zll1vv4gcc17d9v3jvr9zj23cpv9hpfsp6mb";
};
checkPhase = ''
${python.interpreter} test/test_demjson3.py
'';
pythonImportsCheck = [ "demjson3" ];
meta = with lib; {
description = "Encoder/decoder and lint/validator for JSON (JavaScript Object Notation)";
homepage = "https://github.com/nielstron/demjson3/";
license = licenses.lgpl3Plus;
maintainers = with maintainers; [ fab ];
};
}

View file

@ -19,13 +19,13 @@
buildPythonPackage rec {
pname = "distributed";
version = "2021.9.0";
version = "2021.9.1";
disabled = pythonOlder "3.6";
# get full repository need conftest.py to run tests
src = fetchPypi {
inherit pname version;
sha256 = "sha256-IiKc0rJYODCtGC9AAOkjbww/VG7PdfrqJ32IHU9xWbo=";
sha256 = "sha256-9N65ap2+9bBK0DCrkF3+1xuJPXmjaL1Xh7ISaLTtX/g=";
};
propagatedBuildInputs = [

View file

@ -14,14 +14,14 @@
buildPythonPackage rec {
pname = "fsspec";
version = "2021.08.1";
version = "2021.10.0";
disabled = pythonOlder "3.6";
src = fetchFromGitHub {
owner = "intake";
repo = "filesystem_spec";
rev = version;
sha256 = "0xxzcp69div1sy975x82k754snbsksyqr73h6jiasdxj8wka49s0";
sha256 = "sha256-zvOSenK63jFC9vMLsuZT8P9NCXGdkYAB5AxvptROKes=";
};
propagatedBuildInputs = [

View file

@ -17,14 +17,14 @@
buildPythonPackage rec {
pname = "gcsfs";
version = "2021.08.1";
version = "2021.10.0";
disabled = pythonOlder "3.6";
src = fetchFromGitHub {
owner = "dask";
repo = pname;
rev = version;
sha256 = "sha256-SPQcSdEEbU791oqkvuwmvyvQ6HglvoWKMi5SdnRcEZI=";
sha256 = "sha256-GDVIENtNpo8cg7pplOgoDMVguZmxoUUSs860WNfhmfM=";
};
propagatedBuildInputs = [

View file

@ -41,7 +41,7 @@ buildPythonPackage rec {
disabledTests = [ "gridplot_outputs" ];
meta = with lib; {
homepage = "https://graspy.neurodata.io";
homepage = "https://graspologic.readthedocs.io";
description = "A package for graph statistical algorithms";
license = licenses.asl20; # changing to `licenses.mit` in next release
maintainers = with maintainers; [ bcdarwin ];

View file

@ -0,0 +1,36 @@
{ lib
, buildPythonPackage
, fetchPypi
, protobuf
, pythonOlder
}:
buildPythonPackage rec {
pname = "gtfs-realtime-bindings";
version = "0.0.7";
format = "setuptools";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
sha256 = "1vav7ah6gpkpi44rk202bwpl345rydg6n9zibzx5p7gcsblcwd45";
extension = "zip";
};
propagatedBuildInputs = [
protobuf
];
# Tests are not shipped, only a tarball for Java is present
doCheck = false;
pythonImportsCheck = [ "google.transit" ];
meta = with lib; {
description = "Python bindings generated from the GTFS Realtime protocol buffer spec";
homepage = "https://github.com/andystewart999/TransportNSW";
license = with licenses; [ asl20 ];
maintainers = with maintainers; [ fab ];
};
}

View file

@ -15,13 +15,13 @@
buildPythonPackage rec {
pname = "hass-nabucasa";
version = "0.46.0";
version = "0.50.0";
src = fetchFromGitHub {
owner = "nabucasa";
repo = pname;
rev = version;
sha256 = "109ma1qlhifj5hs530zfnvc6mqv5grfmcq3s57wawq9nzq0gpfy8";
sha256 = "sha256-0E8eiHzqbxHbtAd97MbvFMRDWTu25E9x/44oNGC4mUM=";
};
propagatedBuildInputs = [

View file

@ -0,0 +1,52 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, fetchurl
, pytestCheckHook
, python
}:
let
table = fetchurl {
# See https://github.com/dahlia/iso4217/blob/main/setup.py#L18
url = "http://www.currency-iso.org/dam/downloads/lists/list_one.xml";
sha256 = "0frhicc7s8gqglr41hzx61fic3ckvr4sg773ahp1s28n5by3y7ac";
};
in
buildPythonPackage rec {
pname = "iso4217";
version = "1.6";
src = fetchFromGitHub {
owner = "dahlia";
repo = pname;
rev = version;
sha256 = "0mdpf5a0xr5lrcfgvqi1sdn7ln2w6pkc3lg0laqkbx5mhxky0fla";
};
checkInputs = [
pytestCheckHook
];
preBuild = ''
# The table is already downloaded
export ISO4217_DOWNLOAD=0
# Copy the table file to satifiy the build process
cp -r ${table} $pname/table.xml
'';
postInstall = ''
# Copy the table file
cp -r ${table} $out/${python.sitePackages}/$pname/table.xml
'';
pytestFlagsArray = [ "$pname/test.py" ];
pythonImportsCheck = [ "iso4217" ];
meta = with lib; {
description = "ISO 4217 currency data package for Python";
homepage = "https://github.com/dahlia/iso4217";
license = with licenses; [ publicDomain ];
maintainers = with maintainers; [ fab ];
};
}

View file

@ -8,12 +8,12 @@
buildPythonPackage rec {
pname = "mypy-boto3-s3";
version = "1.18.54";
version = "1.18.56";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
sha256 = "fdbb7ff1687fba8e7ac521502cb2d2ca4b845e1a331f5b4fe55aff7c17e1f985";
sha256 = "61c74253cb77a0734970703d58a49e29624cec76d97da31fa912faf6f6d3347b";
};
propagatedBuildInputs = [

View file

@ -2,19 +2,24 @@
buildPythonPackage rec {
pname = "netdisco";
version = "2.9.0";
version = "3.0.0";
disabled = !isPy3k;
src = fetchPypi {
inherit pname version;
sha256 = "sha256-OpLFM+0ZmhggJ1SuLoSO+qWLcKcpS65sd7u2zkzPys4=";
sha256 = "sha256-TbtZBILzd8zEYeAXQnB8y+jx0tGyhXivkdybf+vNy9I=";
};
propagatedBuildInputs = [ requests zeroconf ];
checkInputs = [ pytestCheckHook ];
disabledTestPaths = [
# Broken due to removed discoverables in https://github.com/home-assistant-libs/netdisco/commit/477db5a1dc93919a6c5bd61b4b1d3c80e75785bd
"tests/test_xboxone.py"
];
pythonImportsCheck = [
"netdisco"
"netdisco.discovery"

View file

@ -34,7 +34,13 @@ buildPythonPackage rec {
owner = "jbarlow83";
repo = "OCRmyPDF";
rev = "v${version}";
sha256 = "sha256-gFlQztrRN69HtR6sTJl8tryuTibxQrz97QcS5UkFOVs=";
# The content of .git_archival.txt is substituted upon tarball creation,
# which creates indeterminism if master no longer points to the tag.
# See https://github.com/jbarlow83/OCRmyPDF/issues/841
extraPostFetch = ''
rm "$out/.git_archival.txt"
'';
sha256 = "0zw7c6l9fkf128gxsbd7v4abazlxiygqys6627jpsjbmxg5jgp5w";
};
SETUPTOOLS_SCM_PRETEND_VERSION = version;

View file

@ -24,12 +24,12 @@
buildPythonPackage rec {
pname = "pikepdf";
version = "3.1.0";
version = "3.1.1";
disabled = ! isPy3k;
src = fetchPypi {
inherit pname version;
sha256 = "aeb813b5f36534d2bedf08487ab2b022c43f4c8a3e86e611c5f7c8fb97309db5";
sha256 = "sha256-klSUszWsIIz7o0/Ql8K4CWYujBH0mAbqyUcabpn1SkQ=";
};
patches = [

View file

@ -13,13 +13,13 @@
buildPythonPackage rec {
pname = "pubnub";
version = "5.3.1";
version = "5.4.0";
src = fetchFromGitHub {
owner = pname;
repo = "python";
rev = "v${version}";
sha256 = "0fykqr0agdlrhsy2s4yzadyslyjlhgr9iyj2f7s8hz9j400dhj3h";
sha256 = "sha256-FyDsTqDQTI/Xxu4Sl4eHqwmgwN+ip+8WKGJs/h/kl2Y=";
};
propagatedBuildInputs = [

View file

@ -1,35 +1,37 @@
{ lib
, buildPythonPackage
, pythonOlder
, fetchFromGitHub
, aiohttp
, oauthlib
, requests
, requests_oauthlib
, buildPythonPackage
, fetchFromGitHub
, freezegun
, oauthlib
, pytest-asyncio
, pytest-mock
, pytestCheckHook
, pythonOlder
, requests
, requests_oauthlib
, requests-mock
, setuptools-scm
}:
buildPythonPackage rec {
pname = "pyatmo";
version = "5.2.3";
disabled = pythonOlder "3.7";
version = "6.1.0";
disabled = pythonOlder "3.8";
src = fetchFromGitHub {
owner = "jabesq";
repo = "pyatmo";
rev = "v${version}";
sha256 = "1w9rhh85z9m3c4rbz6zxlrxglsm5sk5d6796dsj1p1l3b3ad476z";
sha256 = "sha256-Iscnv3hfYa8QFiXMUN334Muo0oGqnnK11RPNxQJggG0=";
};
postPatch = ''
substituteInPlace setup.cfg \
--replace "oauthlib~=3.1" "oauthlib" \
--replace "requests~=2.24" "requests"
'';
SETUPTOOLS_SCM_PRETEND_VERSION = version;
nativeBuildInputs = [
setuptools-scm
];
propagatedBuildInputs = [
aiohttp
@ -46,12 +48,18 @@ buildPythonPackage rec {
requests-mock
];
postPatch = ''
substituteInPlace setup.cfg \
--replace "oauthlib~=3.1" "oauthlib" \
--replace "requests~=2.24" "requests"
'';
pythonImportsCheck = [ "pyatmo" ];
meta = with lib; {
description = "Simple API to access Netatmo weather station data";
homepage = "https://github.com/jabesq/pyatmo";
license = licenses.mit;
homepage = "https://github.com/jabesq/netatmo-api-python";
maintainers = with maintainers; [ delroth ];
};
}

View file

@ -11,7 +11,7 @@
buildPythonPackage rec {
pname = "pycarwings2";
version = "2.11";
version = "2.12";
format = "setuptools";
disabled = pythonOlder "3.5";
@ -19,8 +19,9 @@ buildPythonPackage rec {
src = fetchFromGitHub {
owner = "filcole";
repo = pname;
rev = "v${version}";
sha256 = "0daqxnic7kphspqqq8a0bjp009l5a7d1k72q6cz43g7ca6wfq4b1";
# release not tagged: https://github.com/filcole/pycarwings2/issues/33
rev = "0dc9e7e74cb119614c72c7f955801a366f303c56";
sha256 = "sha256-3lyAgLuaNrCDvRT2yYkgaDiLPKW9Hbg05cQlMIBUs6o=";
};
propagatedBuildInputs = [

View file

@ -10,14 +10,14 @@
buildPythonPackage rec {
pname = "pydeconz";
version = "83";
version = "84";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "Kane610";
repo = "deconz";
rev = "v${version}";
sha256 = "0azpdgmfby8plsp22hy1ip9vzbnmvf9brmah7hcwkpypg31rb61y";
sha256 = "sha256-SVWz6r5UiAS7gCpkgN2Swy8dAon26XY9JZucV/eE0t8=";
};
propagatedBuildInputs = [

View file

@ -0,0 +1,42 @@
{ lib
, aiohttp
, buildPythonPackage
, fetchFromGitHub
, iso4217
, pytestCheckHook
, pythonOlder
, pytz
}:
buildPythonPackage rec {
pname = "pyefergy";
version = "0.1.0";
format = "setuptools";
disabled = pythonOlder "3.8";
src = fetchFromGitHub {
owner = "tkdrob";
repo = pname;
rev = version;
sha256 = "0nm7dc5q4wvdpqxpirlc4nwm68lf3n2df6j5yy4m8wr294yb7a1k";
};
propagatedBuildInputs = [
aiohttp
iso4217
pytz
];
# Project has no tests
doCheck = false;
pythonImportsCheck = [ "pyefergy" ];
meta = with lib; {
description = "Python API library for Efergy energy meters";
homepage = "https://github.com/tkdrob/pyefergy";
license = with licenses; [ mit ];
maintainers = with maintainers; [ fab ];
};
}

View file

@ -6,13 +6,13 @@
buildPythonPackage rec {
pname = "pypinyin";
version = "0.42.0";
version = "0.43.0";
src = fetchFromGitHub {
owner = "mozillazg";
repo = "python-pinyin";
rev = "v${version}";
sha256 = "0i0ggizkgd809ylz74j1v5lfpyifz3wypj6f8l8fr5ad7a7r9s09";
sha256 = "0h3lpb8bw9zp8is5sx2zg931wz12x0zfan1kksnbhx16vwv1kgw3";
};
postPatch = ''

View file

@ -7,13 +7,13 @@
buildPythonPackage rec {
pname = "pypoint";
version = "2.1.0";
version = "2.2.0";
src = fetchFromGitHub {
owner = "fredrike";
repo = "pypoint";
rev = "v${version}";
sha256 = "13p68d2qxfj31lfjv94wzpigjfgjw03yjpl2h16zgxbka2k8zf3x";
sha256 = "sha256-2PKZtn+l93de4/gPPM2Wdt04Zw+ekDadwNgL6ZKTqhY=";
};
propagatedBuildInputs = [

View file

@ -3,13 +3,13 @@
, buildPythonPackage
, fetchFromGitHub
, aiohttp
, demjson
, demjson3
, python
}:
buildPythonPackage rec {
pname = "pysyncthru";
version = "0.7.8";
version = "0.7.10";
disabled = isPy27;
@ -17,12 +17,12 @@ buildPythonPackage rec {
owner = "nielstron";
repo = "pysyncthru";
rev = "release-${version}";
sha256 = "17k9dhnya4304gqmkyvvf94jvikmnkf2lqairl3rfrl7w68jm3vp";
sha256 = "1c29w2ldrnq0vxr9cfa2pjhwdvrpw393c84khgg2y56jrkbidq53";
};
propagatedBuildInputs = [
aiohttp
demjson
demjson3
];
checkPhase = ''

View file

@ -13,14 +13,14 @@
buildPythonPackage rec {
pname = "python-smarttub";
version = "0.0.25";
version = "0.0.27";
disabled = pythonOlder "3.8";
src = fetchFromGitHub {
owner = "mdz";
repo = pname;
rev = "v${version}";
sha256 = "13yf75vmn15g2hrbiv78mws96qbk40p5pz7vc6ljyp41y2lc9wpm";
sha256 = "sha256-EoZn5yxj18hi4oEMuUcB5UN2xQFkLbSG/awp+Qh029E=";
};
propagatedBuildInputs = [
@ -43,6 +43,5 @@ buildPythonPackage rec {
homepage = "https://github.com/mdz/python-smarttub";
license = with licenses; [ mit ];
maintainers = with maintainers; [ fab ];
broken = pyjwt.version != "1.7.1";
};
}

View file

@ -2,7 +2,7 @@
buildPythonPackage rec {
pname = "python-tado";
version = "0.11.0";
version = "0.12.0";
disabled = pythonOlder "3.5";
@ -10,7 +10,7 @@ buildPythonPackage rec {
owner = "wmalgadey";
repo = "PyTado";
rev = version;
sha256 = "0fw4f9gqnhxwpxyb34qi8bl5pmzz13h4x3mdk903hhjyccanqncr";
sha256 = "sha256-n+H6H2ORLizv9cn1P5Cd8wHDWMNonPrs+x+XMQbEzZQ=";
};
propagatedBuildInputs = [ requests ];

View file

@ -0,0 +1,36 @@
{ lib
, buildPythonPackage
, fetchPypi
, pythonOlder
, requests
}:
buildPythonPackage rec {
pname = "pytransportnsw";
version = "0.1.1";
format = "setuptools";
disabled = pythonOlder "3.6";
src = fetchPypi {
pname = "PyTransportNSW";
inherit version;
sha256 = "00jklgjirmc58hiaqqc2n2rgixvx91bgrd6lv6hv28k51kid10f3";
};
propagatedBuildInputs = [
requests
];
# Project has no tests
doCheck = false;
pythonImportsCheck = [ "TransportNSW" ];
meta = with lib; {
description = "Python module to access Transport NSW information";
homepage = "https://github.com/Dav0815/TransportNSW";
license = with licenses; [ gpl3Only ];
maintainers = with maintainers; [ fab ];
};
}

View file

@ -0,0 +1,38 @@
{ lib
, buildPythonPackage
, fetchPypi
, pythonOlder
, gtfs-realtime-bindings
, requests
}:
buildPythonPackage rec {
pname = "pytransportnswv2";
version = "0.2.4";
format = "setuptools";
disabled = pythonOlder "3.6";
src = fetchPypi {
pname = "PyTransportNSWv2";
inherit version;
sha256 = "129rrqckqgfrwdx0b83dqphcv55cxs5i8jl1ascia7rpzjn109ah";
};
propagatedBuildInputs = [
gtfs-realtime-bindings
requests
];
# Project has no tests
doCheck = false;
pythonImportsCheck = [ "TransportNSW" ];
meta = with lib; {
description = "Python module to access Transport NSW information";
homepage = "https://github.com/andystewart999/TransportNSW";
license = with licenses; [ gpl3Only ];
maintainers = with maintainers; [ fab ];
};
}

View file

@ -10,14 +10,14 @@
buildPythonPackage rec {
pname = "pyvicare";
version = "2.8.1";
version = "2.9.1";
disabled = pythonOlder "3.7";
src = fetchFromGitHub {
owner = "somm15";
repo = "PyViCare";
rev = version;
sha256 = "sha256-SmbsEN6vZ28ihgUggtcF2AjbmUVaqLLweh7cKipr6u4=";
sha256 = "sha256-Uzz2mWBT5BaMxYeR6YFIP1BqTWye1Hz9CTTg/bg4kSU=";
};
SETUPTOOLS_SCM_PRETEND_VERSION = version;

View file

@ -8,11 +8,11 @@
buildPythonPackage rec {
pname = "s3fs";
version = "2021.8.1";
version = "2021.10.0";
src = fetchPypi {
inherit pname version;
sha256 = "0zwy2fr95s5wzrr2iwbayjh9xh421p6wf0m75szl7rw930v1kb2y";
sha256 = "sha256-mSdMmP5b6pu954GQxBrb0bEghyLLKtSGd6aPhHPwOV0=";
};
buildInputs = [

View file

@ -13,12 +13,12 @@
buildPythonPackage rec {
pname = "sqlite-utils";
version = "3.17";
version = "3.17.1";
disabled = pythonOlder "3.6";
src = fetchPypi {
inherit pname version;
sha256 = "77acd202aa568a1f6888c5d8879f306bb3f8acedc82df0df98eb615caa491abb";
sha256 = "0cfde0c46a2d4c09d6df8609fe53642bc3ab443bcef3106d8f1eabeb3fccbe3d";
};
postPatch = ''

View file

@ -1,22 +1,20 @@
{ lib
, buildPythonPackage
, fetchFromGitHub
, fetchPypi
, pythonOlder
, requests
}:
buildPythonPackage rec {
pname = "streamlabswater";
version = "0.3.2";
version = "1.0.1";
format = "setuptools";
disabled = pythonOlder "3.6";
src = fetchFromGitHub {
owner = pname;
repo = "stream-python";
rev = "v${version}";
sha256 = "1lh1i1ksic9yhxnwc7mqm5qla98x85dfwj846kwldwam0vcrqlk7";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-kXG0Wg3PVryMBQ9RMMtEzudMiwVQq7Ikw2OK7JcBojA=";
};
propagatedBuildInputs = [

View file

@ -18,7 +18,7 @@
buildPythonPackage rec {
pname = "surepy";
version = "0.7.1";
version = "0.7.2";
format = "pyproject";
disabled = pythonOlder "3.8";
@ -26,7 +26,7 @@ buildPythonPackage rec {
owner = "benleb";
repo = pname;
rev = "v${version}";
sha256 = "sha256-h2PEzS3R7NXIUWYOiTpe5ZEU1RopaRj1phudmvcklug=";
sha256 = "sha256-yc+jXA4ndFhRZmFPz11HbVs9qaPFNa6WdwXj6hRyjw4=";
};
postPatch = ''

View file

@ -12,11 +12,11 @@
buildPythonPackage rec {
pname = "transitions";
version = "0.8.9";
version = "0.8.10";
src = fetchPypi {
inherit pname version;
sha256 = "fc2ec6d6b6f986cd7e28e119eeb9ba1c9cc51ab4fbbdb7f2dedad01983fd2de0";
sha256 = "b0385975a842e885c1a55c719d2f90164471665794d39d51f9eb3f11e1d9c8ac";
};
propagatedBuildInputs = [

View file

@ -12,14 +12,14 @@
buildPythonPackage rec {
pname = "twilio";
version = "6.56.0";
version = "7.1.0";
src = fetchFromGitHub {
owner = "twilio";
repo = "twilio-python";
rev = version;
sha256 = "sha256-vVJuuPxVyOqnplPYrjCjIm5IyIFZvsCMoDLrrHpHK+4=";
sha256 = "sha256-pagqetDQ8/1xDCxZJVTZc9T0dmFA1opd7tMDR11wlVs=";
};
propagatedBuildInputs = [

View file

@ -11,11 +11,11 @@
buildPythonPackage rec {
pname = "urlextract";
version = "1.3.0";
version = "1.4.0";
src = fetchPypi {
inherit pname version;
sha256 = "sha256-yxOuiswFOJnAvxwTT++Zhk8nZWK2f4ePsQpUYI7EYS4=";
sha256 = "669f07192584b841b49ba8868fbd6b00e7ddc28367d36a3d8ca8c8e429420748";
};
propagatedBuildInputs = [

View file

@ -9,13 +9,13 @@
buildPythonPackage rec {
pname = "zha-quirks";
version = "0.0.61";
version = "0.0.62";
src = fetchFromGitHub {
owner = "zigpy";
repo = "zha-device-handlers";
rev = version;
sha256 = "sha256-uDQAXH0p8Ly0ZbwNlkVo1b7fAXSu77U7v3BHd0B1YQk=";
sha256 = "sha256-wXXdxE69EABrvJA8utrhLW4+8ixcyCraWHx2M3uE8mw=";
};
propagatedBuildInputs = [

View file

@ -15,13 +15,13 @@
buildPythonPackage rec {
pname = "zigpy";
version = "0.37.1";
version = "0.38.0";
src = fetchFromGitHub {
owner = "zigpy";
repo = "zigpy";
rev = version;
sha256 = "sha256-tDpu6tv8qwIPB3G5GKURtDi6QOYxF5jEVbzmJ2Px5W4=";
sha256 = "sha256-3iS2VMaicbgtsiKUPe6GjFJQV8xKjs+dC8+IeprMa9I=";
};
propagatedBuildInputs = [

View file

@ -9,7 +9,7 @@
buildPythonPackage rec {
pname = "zwave-js-server-python";
version = "0.30.0";
version = "0.31.3";
disabled = pythonOlder "3.8";
@ -17,7 +17,7 @@ buildPythonPackage rec {
owner = "home-assistant-libs";
repo = pname;
rev = version;
sha256 = "sha256-KYMq0qDVLHzgaYljwYeK58aggD5kBAI1J/RsirGcVvs=";
sha256 = "sha256-mOcaxt8pc+d7qBoDtwCsDWoVs3Hw17v5WDKgzIW1WzY=";
};
propagatedBuildInputs = [

Some files were not shown because too many files have changed in this diff Show more