Project import generated by Copybara.

GitOrigin-RevId: fe01052444c1d66ed6ef76df2af798c9769e9e79
This commit is contained in:
Default email 2021-08-10 22:31:46 +08:00
parent 48d4c8cc3c
commit e0d1b75f19
230 changed files with 7734 additions and 3066 deletions

View file

@ -1,7 +1,7 @@
# How to contribute
Note: contributing implies licensing those contributions
under the terms of [COPYING](../COPYING), which is an MIT-like license.
under the terms of [COPYING](COPYING), which is an MIT-like license.
## Opening issues

View file

@ -139,11 +139,9 @@ the whitelist maintainers/scripts/luarocks-packages.csv and updated by running m
[luarocks2nix](https://github.com/nix-community/luarocks) is a tool capable of generating nix derivations from both rockspec and src.rock (and favors the src.rock).
The automation only goes so far though and some packages need to be customized.
These customizations go in `pkgs/development/lua-modules/overrides.nix`.
For instance if the rockspec defines `external_dependencies`, these need to be manually added in its rockspec file then it won't work.
For instance if the rockspec defines `external_dependencies`, these need to be manually added to the overrides.nix.
You can try converting luarocks packages to nix packages with the command `nix-shell -p luarocks-nix` and then `luarocks nix PKG_NAME`.
Nix rely on luarocks to install lua packages, basically it runs:
`luarocks make --deps-mode=none --tree $out`
#### Packaging a library manually {#packaging-a-library-manually}
@ -161,8 +159,8 @@ are not packaged for luarocks. You can see a few examples at `pkgs/top-level/lua
### Lua interpreters {#lua-interpreters}
Versions 5.1, 5.2 and 5.3 of the lua interpreter are available as
respectively `lua5_1`, `lua5_2` and `lua5_3`. Luajit is available too.
Versions 5.1, 5.2, 5.3 and 5.4 of the lua interpreter are available as
respectively `lua5_1`, `lua5_2`, `lua5_3` and `lua5_4`. Luajit is available too.
The Nix expressions for the interpreters can be found in `pkgs/development/interpreters/lua-5`.
#### Attributes on lua interpreters packages {#attributes-on-lua-interpreters-packages}

View file

@ -309,7 +309,7 @@ Sample output2:
## Adding new plugins to nixpkgs {#adding-new-plugins-to-nixpkgs}
Nix expressions for Vim plugins are stored in [pkgs/misc/vim-plugins](/pkgs/misc/vim-plugins). For the vast majority of plugins, Nix expressions are automatically generated by running [`./update.py`](/pkgs/misc/vim-plugins/update.py). This creates a [generated.nix](/pkgs/misc/vim-plugins/generated.nix) file based on the plugins listed in [vim-plugin-names](/pkgs/misc/vim-plugins/vim-plugin-names). Plugins are listed in alphabetical order in `vim-plugin-names` using the format `[github username]/[repository]`. For example https://github.com/scrooloose/nerdtree becomes `scrooloose/nerdtree`.
Nix expressions for Vim plugins are stored in [pkgs/misc/vim-plugins](/pkgs/misc/vim-plugins). For the vast majority of plugins, Nix expressions are automatically generated by running [`./update.py`](/pkgs/misc/vim-plugins/update.py). This creates a [generated.nix](/pkgs/misc/vim-plugins/generated.nix) file based on the plugins listed in [vim-plugin-names](/pkgs/misc/vim-plugins/vim-plugin-names). Plugins are listed in alphabetical order in `vim-plugin-names` using the format `[github username]/[repository]@[gitref]`. For example https://github.com/scrooloose/nerdtree becomes `scrooloose/nerdtree`.
Some plugins require overrides in order to function properly. Overrides are placed in [overrides.nix](/pkgs/misc/vim-plugins/overrides.nix). Overrides are most often required when a plugin requires some dependencies, or extra steps are required during the build process. For example `deoplete-fish` requires both `deoplete-nvim` and `vim-fish`, and so the following override was added:

View file

@ -9668,6 +9668,12 @@
githubId = 1567527;
name = "Sebastian Hyberts";
};
sebtm = {
email = "mail@sebastian-sellmeier.de";
github = "sebtm";
githubId = 17243347;
name = "Sebastian Sellmeier";
};
sellout = {
email = "greg@technomadic.org";
github = "sellout";

View file

@ -1,88 +1,89 @@
# nix name, luarocks name, server, version,luaversion,maintainers
alt-getopt,,,,,arobyn
ansicolors,,,,,
argparse,,,,,
basexx,,,,,
binaryheap,,,,,vcunat
bit32,,,,lua5_1,lblasc
busted,,,,,
cassowary,,,,,marsam alerque
cjson,lua-cjson,,,,
compat53,,,,,vcunat
cosmo,,,,,marsam
coxpcall,,,1.17.0-1,,
cqueues,,,,,vcunat
cyrussasl,,,,,
digestif,,,,lua5_3,
dkjson,,,,,
fifo,,,,,
http,,,,,vcunat
inspect,,,,,
ldbus,,http://luarocks.org/dev,,,
ldoc,,,,,
lgi,,,,,
linenoise,,,,,
ljsyscall,,,,lua5_1,lblasc
lpeg,,,,,vyp
lpeg_patterns,,,,,
lpeglabel,,,,,
lpty,,,,,
lrexlib-gnu,,,,,
lrexlib-pcre,,,,,vyp
lrexlib-posix,,,,,
ltermbox,,,,,
lua-cmsgpack,,,,,
lua-iconv,,,,,
lua-lsp,,http://luarocks.org/dev,,,
lua-messagepack,,,,,
lua-resty-http,,,,,
lua-resty-jwt,,,,,
lua-resty-openidc,,,,,
lua-resty-openssl,,,,,
lua-resty-session,,,,,
lua-term,,,,,
lua-toml,,,,,
lua-zlib,,,,,koral
lua_cliargs,,,,,
luabitop,,,,,
luacheck,,,,,
luacov,,,,,
luadbi,,,,,
luadbi-mysql,,,,,
luadbi-postgresql,,,,,
luadbi-sqlite3,,,,,
luadoc,,,,,
luaepnf,,,,,
luaevent,,,,,
luaexpat,,,1.3.0-1,,arobyn flosse
luaffi,,http://luarocks.org/dev,,,
luafilesystem,,,1.7.0-2,,flosse
lualogging,,,,,
luaossl,,,,lua5_1,
luaposix,,,,,vyp lblasc
luarepl,,,,,
luasec,,,,,flosse
luasocket,,,,,
luasql-sqlite3,,,,,vyp
luassert,,,,,
luasystem,,,,,
luautf8,,,,,pstn
luazip,,,,,
lua-yajl,,,,,pstn
luuid,,,,,
luv,,,,,
lyaml,,,,,lblasc
markdown,,,,,
mediator_lua,,,,,
mpack,,,,,
moonscript,,,,,arobyn
nvim-client,,,,,
penlight,,,,,
plenary.nvim,,,,lua5_1,
rapidjson,,,,,
readline,,,,,
say,,,,,
std-_debug,std._debug,,,,
std_normalize,std.normalize,,,,
stdlib,,,,,vyp
vstruct,,,,,
name,server,version,luaversion,maintainers
alt-getopt,,,,arobyn
ansicolors,,,,
bit32,,5.3.0-1,lua5_1,lblasc
argparse,,,,
basexx,,,,
binaryheap,,,,vcunat
busted,,,,
cassowary,,,,marsam alerque
compat53,,0.7-1,,vcunat
cosmo,,,,marsam
coxpcall,,1.17.0-1,,
cqueues,,,,vcunat
cyrussasl,,,,
digestif,,0.2-1,lua5_3,
dkjson,,,,
fifo,,,,
gitsigns.nvim,,,lua5_1,
http,,0.3-0,,vcunat
inspect,,,,
ldbus,http://luarocks.org/dev,,,
ldoc,,,,
lgi,,,,
linenoise,,,,
ljsyscall,,,lua5_1,lblasc
lpeg,,,,vyp
lpeg_patterns,,,,
lpeglabel,,,,
lpty,,,,
lrexlib-gnu,,,,
lrexlib-pcre,,,,vyp
lrexlib-posix,,,,
ltermbox,,,,
lua-cjson,,,,
lua-cmsgpack,,,,
lua-iconv,,,,
lua-lsp,http://luarocks.org/dev,,,
lua-messagepack,,,,
lua-resty-http,,,,
lua-resty-jwt,,,,
lua-resty-openidc,,,,
lua-resty-openssl,,,,
lua-resty-session,,,,
lua-term,,,,
lua-toml,,,,
lua-zlib,,,,koral
lua_cliargs,,,,
luabitop,,,,
luacheck,,,,
luacov,,,,
luadbi,,,,
luadbi-mysql,,,,
luadbi-postgresql,,,,
luadbi-sqlite3,,,,
luadoc,,,,
luaepnf,,,,
luaevent,,,,
luaexpat,,1.3.0-1,,arobyn flosse
luaffi,http://luarocks.org/dev,,,
luafilesystem,,1.7.0-2,,flosse
lualogging,,,,
luaossl,,,lua5_1,
luaposix,,34.1.1-1,,vyp lblasc
luarepl,,,,
luasec,,,,flosse
luasocket,,,,
luasql-sqlite3,,,,vyp
luassert,,,,
luasystem,,,,
luautf8,,,,pstn
luazip,,,,
lua-yajl,,,,pstn
luuid,,,,
luv,,1.30.0-0,,
lyaml,,,,lblasc
markdown,,,,
mediator_lua,,,,
mpack,,,,
moonscript,,,,arobyn
nvim-client,,,,
penlight,,,,
plenary.nvim,,,lua5_1,
rapidjson,,,,
readline,,,,
say,,,,
std._debug,,,,
std.normalize,,,,
stdlib,,,,vyp
vstruct,,,,

1 # nix name name luarocks name server version luaversion maintainers
2 alt-getopt alt-getopt arobyn
3 ansicolors ansicolors
4 argparse bit32 5.3.0-1 lua5_1 lblasc
5 basexx argparse
6 binaryheap basexx vcunat
7 bit32 binaryheap lua5_1 lblasc vcunat
8 busted busted
9 cassowary cassowary marsam alerque
10 cjson compat53 lua-cjson 0.7-1 vcunat
11 compat53 cosmo vcunat marsam
12 cosmo coxpcall 1.17.0-1 marsam
13 coxpcall cqueues 1.17.0-1 vcunat
14 cqueues cyrussasl vcunat
15 cyrussasl digestif 0.2-1 lua5_3
16 digestif dkjson lua5_3
17 dkjson fifo
18 fifo gitsigns.nvim lua5_1
19 http http 0.3-0 vcunat
20 inspect inspect
21 ldbus ldbus http://luarocks.org/dev
22 ldoc ldoc
23 lgi lgi
24 linenoise linenoise
25 ljsyscall ljsyscall lua5_1 lblasc
26 lpeg lpeg vyp
27 lpeg_patterns lpeg_patterns
28 lpeglabel lpeglabel
29 lpty lpty
30 lrexlib-gnu lrexlib-gnu
31 lrexlib-pcre lrexlib-pcre vyp
32 lrexlib-posix lrexlib-posix
33 ltermbox ltermbox
34 lua-cmsgpack lua-cjson
35 lua-iconv lua-cmsgpack
36 lua-lsp lua-iconv http://luarocks.org/dev
37 lua-messagepack lua-lsp http://luarocks.org/dev
38 lua-resty-http lua-messagepack
39 lua-resty-jwt lua-resty-http
40 lua-resty-openidc lua-resty-jwt
41 lua-resty-openssl lua-resty-openidc
42 lua-resty-session lua-resty-openssl
43 lua-term lua-resty-session
44 lua-toml lua-term
45 lua-zlib lua-toml koral
46 lua_cliargs lua-zlib koral
47 luabitop lua_cliargs
48 luacheck luabitop
49 luacov luacheck
50 luadbi luacov
51 luadbi-mysql luadbi
52 luadbi-postgresql luadbi-mysql
53 luadbi-sqlite3 luadbi-postgresql
54 luadoc luadbi-sqlite3
55 luaepnf luadoc
56 luaevent luaepnf
57 luaexpat luaevent 1.3.0-1 arobyn flosse
58 luaffi luaexpat http://luarocks.org/dev 1.3.0-1 arobyn flosse
59 luafilesystem luaffi http://luarocks.org/dev 1.7.0-2 flosse
60 lualogging luafilesystem 1.7.0-2 flosse
61 luaossl lualogging lua5_1
62 luaposix luaossl lua5_1 vyp lblasc
63 luarepl luaposix 34.1.1-1 vyp lblasc
64 luasec luarepl flosse
65 luasocket luasec flosse
66 luasql-sqlite3 luasocket vyp
67 luassert luasql-sqlite3 vyp
68 luasystem luassert
69 luautf8 luasystem pstn
70 luazip luautf8 pstn
71 lua-yajl luazip pstn
72 luuid lua-yajl pstn
73 luv luuid
74 lyaml luv 1.30.0-0 lblasc
75 markdown lyaml lblasc
76 mediator_lua markdown
77 mpack mediator_lua
78 moonscript mpack arobyn
79 nvim-client moonscript arobyn
80 penlight nvim-client
81 plenary.nvim penlight lua5_1
82 rapidjson plenary.nvim lua5_1
83 readline rapidjson
84 say readline
85 std-_debug say std._debug
86 std_normalize std._debug std.normalize
87 stdlib std.normalize vyp
88 vstruct stdlib vyp
89 vstruct

View file

@ -28,6 +28,7 @@ from pathlib import Path
from typing import Dict, List, Optional, Tuple, Union, Any, Callable
from urllib.parse import urljoin, urlparse
from tempfile import NamedTemporaryFile
from dataclasses import dataclass
import git
@ -82,6 +83,13 @@ def make_request(url: str) -> urllib.request.Request:
headers["Authorization"] = f"token {token}"
return urllib.request.Request(url, headers=headers)
@dataclass
class PluginDesc:
owner: str
repo: str
branch: str
alias: str
class Repo:
def __init__(
@ -201,15 +209,39 @@ class Editor:
deprecated: Optional[Path] = None,
cache_file: Optional[str] = None,
):
log.debug("get_plugins:", get_plugins)
self.name = name
self.root = root
self.get_plugins = get_plugins
self.generate_nix = generate_nix
self._generate_nix = generate_nix
self.default_in = default_in or root.joinpath(f"{name}-plugin-names")
self.default_out = default_out or root.joinpath("generated.nix")
self.deprecated = deprecated or root.joinpath("deprecated.json")
self.cache_file = cache_file or f"{name}-plugin-cache.json"
def get_current_plugins(self):
"""To fill the cache"""
return get_current_plugins(self)
def load_plugin_spec(self, plugin_file) -> List[PluginDesc]:
return load_plugin_spec(plugin_file)
def generate_nix(self, plugins, outfile):
'''Returns nothing for now, writes directly to outfile'''
self._generate_nix(plugins, outfile)
def get_update(self, input_file: str, outfile: str, proc: int):
return get_update(input_file, outfile, proc, editor=self)
@property
def attr_path(self):
return self.name + "Plugins"
def rewrite_input(self, *args, **kwargs):
return rewrite_input(*args, **kwargs)
class CleanEnvironment(object):
def __enter__(self) -> None:
@ -228,7 +260,9 @@ class CleanEnvironment(object):
def get_current_plugins(editor: Editor) -> List[Plugin]:
with CleanEnvironment():
out = subprocess.check_output(["nix", "eval", "--json", editor.get_plugins])
cmd = ["nix", "eval", "--json", editor.get_plugins]
log.debug("Running command %s", cmd)
out = subprocess.check_output(cmd)
data = json.loads(out)
plugins = []
for name, attr in data.items():
@ -244,12 +278,13 @@ def prefetch_plugin(
alias: Optional[str],
cache: "Optional[Cache]" = None,
) -> Tuple[Plugin, Dict[str, str]]:
log.info("Prefetching plugin %s", repo_name)
log.info(f"Fetching last commit for plugin {user}/{repo_name}@{branch}")
repo = Repo(user, repo_name, branch, alias)
commit, date = repo.latest_commit()
has_submodules = repo.has_submodules()
cached_plugin = cache[commit] if cache else None
if cached_plugin is not None:
log.debug("Cache hit !")
cached_plugin.name = alias or repo_name
cached_plugin.date = date
return cached_plugin, repo.redirect
@ -306,8 +341,7 @@ def check_results(
sys.exit(1)
def parse_plugin_line(line: str) -> Tuple[str, str, str, Optional[str]]:
def parse_plugin_line(line: str) -> PluginDesc:
branch = "master"
alias = None
name, repo = line.split("/")
@ -317,15 +351,15 @@ def parse_plugin_line(line: str) -> Tuple[str, str, str, Optional[str]]:
if "@" in repo:
repo, branch = repo.split("@")
return (name.strip(), repo.strip(), branch.strip(), alias)
return PluginDesc(name.strip(), repo.strip(), branch.strip(), alias)
def load_plugin_spec(plugin_file: str) -> List[Tuple[str, str, str, Optional[str]]]:
def load_plugin_spec(plugin_file: str) -> List[PluginDesc]:
plugins = []
with open(plugin_file) as f:
for line in f:
plugin = parse_plugin_line(line)
if not plugin[0]:
if not plugin.owner:
msg = f"Invalid repository {line}, must be in the format owner/repo[ as alias]"
print(msg, file=sys.stderr)
sys.exit(1)
@ -387,12 +421,11 @@ class Cache:
def prefetch(
args: Tuple[str, str, str, Optional[str]], cache: Cache
args: PluginDesc, cache: Cache
) -> Tuple[str, str, Union[Exception, Plugin], dict]:
assert len(args) == 4
owner, repo, branch, alias = args
owner, repo = args.owner, args.repo
try:
plugin, redirect = prefetch_plugin(owner, repo, branch, alias, cache)
plugin, redirect = prefetch_plugin(owner, repo, args.branch, args.alias, cache)
cache[plugin.commit] = plugin
return (owner, repo, plugin, redirect)
except Exception as e:
@ -433,7 +466,7 @@ def rewrite_input(
with open(input_file, "w") as f:
f.writelines(lines)
# TODO move to Editor ?
def parse_args(editor: Editor):
parser = argparse.ArgumentParser(
description=(
@ -446,7 +479,7 @@ def parse_args(editor: Editor):
dest="add_plugins",
default=[],
action="append",
help=f"Plugin to add to {editor.name}Plugins from Github in the form owner/repo",
help=f"Plugin to add to {editor.attr_path} from Github in the form owner/repo",
)
parser.add_argument(
"--input-names",
@ -493,11 +526,11 @@ def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
def get_update(input_file: str, outfile: str, proc: int, editor: Editor):
cache: Cache = Cache(get_current_plugins(editor), editor.cache_file)
cache: Cache = Cache(editor.get_current_plugins(), editor.cache_file)
_prefetch = functools.partial(prefetch, cache=cache)
def update() -> dict:
plugin_names = load_plugin_spec(input_file)
plugin_names = editor.load_plugin_spec(input_file)
try:
pool = Pool(processes=proc)
@ -522,33 +555,33 @@ def update_plugins(editor: Editor):
log.info("Start updating plugins")
nixpkgs_repo = git.Repo(editor.root, search_parent_directories=True)
update = get_update(args.input_file, args.outfile, args.proc, editor)
update = editor.get_update(args.input_file, args.outfile, args.proc)
redirects = update()
rewrite_input(args.input_file, editor.deprecated, redirects)
editor.rewrite_input(args.input_file, editor.deprecated, redirects)
autocommit = not args.no_commit
if autocommit:
commit(nixpkgs_repo, f"{editor.name}Plugins: update", [args.outfile])
commit(nixpkgs_repo, f"{editor.attr_path}: update", [args.outfile])
if redirects:
update()
if autocommit:
commit(
nixpkgs_repo,
f"{editor.name}Plugins: resolve github repository redirects",
f"{editor.attr_path}: resolve github repository redirects",
[args.outfile, args.input_file, editor.deprecated],
)
for plugin_line in args.add_plugins:
rewrite_input(args.input_file, editor.deprecated, append=(plugin_line + "\n",))
editor.rewrite_input(args.input_file, editor.deprecated, append=(plugin_line + "\n",))
update()
plugin = fetch_plugin_from_pluginline(plugin_line)
if autocommit:
commit(
nixpkgs_repo,
"{editor}Plugins.{name}: init at {version}".format(
"{editor.attr_path}.{name}: init at {version}".format(
editor=editor.name, name=plugin.normalized_name, version=plugin.version
),
[args.outfile, args.input_file],

View file

@ -1,140 +1,179 @@
#!/usr/bin/env nix-shell
#!nix-shell update-luarocks-shell.nix -i bash
#!nix-shell -p nix-prefetch-git luarocks-nix python3 python3Packages.GitPython nix -i python3
# You'll likely want to use
# ``
# nixpkgs $ maintainers/scripts/update-luarocks-packages pkgs/development/lua-modules/generated-packages.nix
# ``
# to update all libraries in that folder.
# to debug, redirect stderr to stdout with 2>&1
# format:
# $ nix run nixpkgs.python3Packages.black -c black update.py
# type-check:
# $ nix run nixpkgs.python3Packages.mypy -c mypy update.py
# linted:
# $ nix run nixpkgs.python3Packages.flake8 -c flake8 --ignore E501,E265,E402 update.py
# stop the script upon C-C
set -eu -o pipefail
import inspect
import os
import tempfile
import shutil
from dataclasses import dataclass
import subprocess
import csv
import logging
CSV_FILE="maintainers/scripts/luarocks-packages.csv"
from typing import List
from pathlib import Path
LOG_LEVELS = {
logging.getLevelName(level): level for level in [
logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR ]
}
log = logging.getLogger()
log.addHandler(logging.StreamHandler())
ROOT = Path(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))).parent.parent
from pluginupdate import Editor, parse_args, update_plugins, PluginDesc, CleanEnvironment
PKG_LIST="maintainers/scripts/luarocks-packages.csv"
TMP_FILE="$(mktemp)"
# Set in the update-luarocks-shell.nix
NIXPKGS_PATH="$LUAROCKS_NIXPKGS_PATH"
export LUAROCKS_CONFIG="$NIXPKGS_PATH/maintainers/scripts/luarocks-config.lua"
GENERATED_NIXFILE="pkgs/development/lua-modules/generated-packages.nix"
LUAROCKS_CONFIG="$NIXPKGS_PATH/maintainers/scripts/luarocks-config.lua"
# 10 is a pretty arbitrary number of simultaneous jobs, but it is generally
# impolite to hit a webserver with *too* many simultaneous connections :)
PARALLEL_JOBS=1
exit_trap() {
local lc="$BASH_COMMAND" rc=$?
test $rc -eq 0 || echo -e "*** error $rc: $lc.\nGenerated temporary file in $TMP_FILE" >&2
}
print_help() {
echo "Usage: $0 <GENERATED_FILE>"
echo "(most likely pkgs/development/lua-modules/generated-packages.nix)"
echo ""
echo " -c <CSV_FILE> to set the list of luarocks package to generate"
exit 1
}
if [ $# -lt 1 ]; then
print_help
exit 1
fi
trap exit_trap EXIT
while getopts ":hc:" opt; do
case $opt in
h)
print_help
;;
c)
echo "Loading package list from $OPTARG !" >&2
CSV_FILE="$OPTARG"
;;
\?)
echo "Invalid option: -$OPTARG" >&2
;;
esac
shift $((OPTIND - 1))
done
GENERATED_NIXFILE="$1"
HEADER="
/* ${GENERATED_NIXFILE} is an auto-generated file -- DO NOT EDIT!
HEADER = """
/* {GENERATED_NIXFILE} is an auto-generated file -- DO NOT EDIT!
Regenerate it with:
nixpkgs$ ${0} ${GENERATED_NIXFILE}
nixpkgs$ ./maintainers/scripts/update-luarocks-packages
These packages are manually refined in lua-overrides.nix
You can customize the generated packages in pkgs/development/lua-modules/overrides.nix
*/
{ self, stdenv, lib, fetchurl, fetchgit, pkgs, ... } @ args:
""".format(GENERATED_NIXFILE=GENERATED_NIXFILE)
FOOTER="""
}
/* GENERATED - do not edit this file */
"""
@dataclass
class LuaPlugin:
name: str
version: str
server: str
luaversion: str
maintainers: str
@property
def normalized_name(self) -> str:
return self.name.replace(".", "-")
# rename Editor to LangUpdate/ EcosystemUpdater
class LuaEditor(Editor):
def get_current_plugins(self):
return []
def load_plugin_spec(self, input_file) -> List[PluginDesc]:
luaPackages = []
csvfilename=input_file
log.info("Loading package descriptions from %s", csvfilename)
with open(csvfilename, newline='') as csvfile:
reader = csv.DictReader(csvfile,)
for row in reader:
# name,server,version,luaversion,maintainers
plugin = LuaPlugin(**row)
luaPackages.append(plugin)
return luaPackages
@property
def attr_path(self):
return "luaPackages"
def get_update(self, input_file: str, outfile: str, _: int):
def update() -> dict:
plugin_specs = self.load_plugin_spec(input_file)
self.generate_nix(plugin_specs, outfile)
redirects = []
return redirects
return update
def rewrite_input(self, *args, **kwargs):
# not implemented yet
pass
def generate_nix(
plugins: List[LuaPlugin],
outfilename: str
):
sorted_plugins = sorted(plugins, key=lambda v: v.name.lower())
# plug = {}
# selon le manifest luarocks.org/manifest
def _generate_pkg_nix(plug):
cmd = [ "luarocks", "nix", plug.name]
if plug.server:
cmd.append(f"--only-server={plug.server}")
if plug.maintainers:
cmd.append(f"--maintainers={plug.maintainers}")
if plug.version:
cmd.append(plug.version)
if plug.luaversion:
with CleanEnvironment():
local_pkgs = str(ROOT.resolve())
cmd2 = ["nix-build", "--no-out-link", local_pkgs, "-A", f"{plug.luaversion}"]
log.debug("running %s", cmd2)
lua_drv_path=subprocess.check_output(cmd2, text=True).strip()
cmd.append(f"--lua-dir={lua_drv_path}/bin")
log.debug("running %s", cmd)
output = subprocess.check_output(cmd, text=True)
return output
with tempfile.NamedTemporaryFile("w+") as f:
f.write(HEADER)
f.write("""
{ self, stdenv, lib, fetchurl, fetchgit, ... } @ args:
self: super:
with self;
{
"
""")
FOOTER="
}
/* GENERATED */
"
for plugin in sorted_plugins:
function convert_pkg() {
nix_pkg_name="$1"
lua_pkg_name="$2"
server="$3"
pkg_version="$4"
lua_version="$5"
maintainers="$6"
nix_expr = _generate_pkg_nix(plugin)
f.write(f"{plugin.normalized_name} = {nix_expr}"
)
f.write(FOOTER)
f.flush()
if [ "${nix_pkg_name:0:1}" == "#" ]; then
echo "Skipping comment ${*}" >&2
return
fi
# if everything went fine, move the generated file to its destination
# using copy since move doesn't work across disks
shutil.copy(f.name, outfilename)
# Normalize package name
nix_pkg_name_normalized=$(sed 's/\./-/' <(echo "$nix_pkg_name"))
print(f"updated {outfilename}")
if [ -z "$lua_pkg_name" ]; then
echo "Using nix_name as lua_pkg_name for '$nix_pkg_name'" >&2
lua_pkg_name="$nix_pkg_name"
fi
def load_plugin_spec():
pass
echo "Building expression for $lua_pkg_name (version $pkg_version) from server [$server]" >&2
luarocks_args=(nix)
if [[ -n $server ]]; then
luarocks_args+=("--only-server=$server")
fi
if [[ -n $maintainers ]]; then
luarocks_args+=("--maintainers=$maintainers")
fi
if [[ -n $lua_version ]]; then
lua_drv_path=$(nix-build --no-out-link "$NIXPKGS_PATH" -A "$lua_version")
luarocks_args+=("--lua-dir=$lua_drv_path/bin")
fi
luarocks_args+=("$lua_pkg_name")
if [[ -n $pkg_version ]]; then
luarocks_args+=("$pkg_version")
fi
echo "Running 'luarocks ${luarocks_args[*]}'" >&2
if drv="$nix_pkg_name_normalized = $(luarocks "${luarocks_args[@]}")"; then
echo "$drv"
else
echo "Failed to convert $nix_pkg_name" >&2
return 1
fi
}
# params needed when called via callPackage
echo "$HEADER" | tee "$TMP_FILE"
def main():
# Ensure parallel can run our bash function
export -f convert_pkg
export SHELL=bash
# Read each line in the csv file and run convert_pkg for each, in parallel
parallel --group --keep-order --halt now,fail=1 --jobs "$PARALLEL_JOBS" --colsep ',' convert_pkg {} <"$CSV_FILE" | tee -a "$TMP_FILE"
editor = LuaEditor("lua", ROOT, '', generate_nix,
default_in = ROOT.joinpath(PKG_LIST),
default_out = ROOT.joinpath(GENERATED_NIXFILE)
)
# close the set
echo "$FOOTER" | tee -a "$TMP_FILE"
args = parse_args(editor)
log.setLevel(LOG_LEVELS[args.debug])
cp "$TMP_FILE" "$GENERATED_NIXFILE"
update_plugins(editor)
if __name__ == "__main__":
main()
# vim: set ts=4 sw=4 ft=sh:

View file

@ -6,7 +6,10 @@ set -euf -o pipefail
(
cd pkgs/development/ruby-modules/with-packages
rm -f gemset.nix Gemfile.lock
bundle lock
# Since bundler 2+, the lock command generates a platform-dependent
# Gemfile.lock, hence causing to bundix to generate a gemset tied to the
# platform from where it was executed.
BUNDLE_FORCE_RUBY_PLATFORM=1 bundle lock
bundix
mv gemset.nix ../../../top-level/ruby-packages.nix
rm -f Gemfile.lock

View file

@ -156,6 +156,21 @@
<link linkend="opt-services.moonraker.enable">moonraker</link>.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="https://github.com/influxdata/influxdb">influxdb2</link>,
a Scalable datastore for metrics, events, and real-time
analytics. Available as
<link linkend="opt-services.influxdb2.enable">services.influxdb2</link>.
</para>
</listitem>
<listitem>
<para>
<link xlink:href="https://posativ.org/isso/">isso</link>, a
commenting server similar to Disqus. Available as
<link linkend="opt-services.isso.enable">isso</link>
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="sec-release-21.11-incompatibilities">
@ -646,32 +661,6 @@
to use wildcards in the <literal>source</literal> argument.
</para>
</listitem>
<listitem>
<para>
The <literal>openrazer</literal> and
<literal>openrazer-daemon</literal> packages as well as the
<literal>hardware.openrazer</literal> module now require users
to be members of the <literal>openrazer</literal> group
instead of <literal>plugdev</literal>. With this change, users
no longer need be granted the entire set of
<literal>plugdev</literal> group permissions, which can
include permissions other than those required by
<literal>openrazer</literal>. This is desirable from a
security point of view. The setting
<link xlink:href="options.html#opt-services.hardware.openrazer.users"><literal>harware.openrazer.users</literal></link>
can be used to add users to the <literal>openrazer</literal>
group.
</para>
</listitem>
<listitem>
<para>
The <literal>yambar</literal> package has been split into
<literal>yambar</literal> and
<literal>yambar-wayland</literal>, corresponding to the xorg
and wayland backend respectively. Please switch to
<literal>yambar-wayland</literal> if you are on wayland.
</para>
</listitem>
</itemizedlist>
</section>
<section xml:id="sec-release-21.11-notable-changes">
@ -841,15 +830,6 @@
version of zfs.
</para>
</listitem>
<listitem>
<para>
Nginx will use the value of
<literal>sslTrustedCertificate</literal> if provided for a
virtual host, even if <literal>enableACME</literal> is set.
This is useful for providers not using the same certificate to
sign OCSP responses and server certificates.
</para>
</listitem>
</itemizedlist>
</section>
</section>

View file

@ -48,6 +48,11 @@ pt-services.clipcat.enable).
- [moonraker](https://github.com/Arksine/moonraker), an API web server for Klipper.
Available as [moonraker](#opt-services.moonraker.enable).
- [influxdb2](https://github.com/influxdata/influxdb), a Scalable datastore for metrics, events, and real-time analytics. Available as [services.influxdb2](#opt-services.influxdb2.enable).
- [isso](https://posativ.org/isso/), a commenting server similar to Disqus.
Available as [isso](#opt-services.isso.enable)
## Backward Incompatibilities {#sec-release-21.11-incompatibilities}
- The `staticjinja` package has been upgraded from 1.0.4 to 3.0.1
@ -164,10 +169,6 @@ pt-services.clipcat.enable).
- `programs.neovim.runtime` switched to a `linkFarm` internally, making it impossible to use wildcards in the `source` argument.
- The `openrazer` and `openrazer-daemon` packages as well as the `hardware.openrazer` module now require users to be members of the `openrazer` group instead of `plugdev`. With this change, users no longer need be granted the entire set of `plugdev` group permissions, which can include permissions other than those required by `openrazer`. This is desirable from a security point of view. The setting [`harware.openrazer.users`](options.html#opt-services.hardware.openrazer.users) can be used to add users to the `openrazer` group.
- The `yambar` package has been split into `yambar` and `yambar-wayland`, corresponding to the xorg and wayland backend respectively. Please switch to `yambar-wayland` if you are on wayland.
## Other Notable Changes {#sec-release-21.11-notable-changes}
- The setting [`services.openssh.logLevel`](options.html#opt-services.openssh.logLevel) `"VERBOSE"` `"INFO"`. This brings NixOS in line with upstream and other Linux distributions, and reduces log spam on servers due to bruteforcing botnets.
@ -213,5 +214,3 @@ pt-services.clipcat.enable).
- The [services.syncoid.enable](options.html#opt-services.syncoid.enable) module now properly drops ZFS permissions after usage. Before it delegated permissions to whole pools instead of datasets and didn't clean up after execution. You can manually look this up for your pools by running `zfs allow your-pool-name` and use `zfs unallow syncoid your-pool-name` to clean this up.
- Zfs: `latestCompatibleLinuxPackages` is now exported on the zfs package. One can use `boot.kernelPackages = config.boot.zfs.package.latestCompatibleLinuxPackages;` to always track the latest compatible kernel with a given version of zfs.
- Nginx will use the value of `sslTrustedCertificate` if provided for a virtual host, even if `enableACME` is set. This is useful for providers not using the same certificate to sign OCSP responses and server certificates.

View file

@ -321,6 +321,7 @@
./services/databases/foundationdb.nix
./services/databases/hbase.nix
./services/databases/influxdb.nix
./services/databases/influxdb2.nix
./services/databases/memcached.nix
./services/databases/monetdb.nix
./services/databases/mongodb.nix
@ -959,6 +960,7 @@
./services/web-apps/icingaweb2/icingaweb2.nix
./services/web-apps/icingaweb2/module-monitoring.nix
./services/web-apps/ihatemoney
./services/web-apps/isso.nix
./services/web-apps/jirafeau.nix
./services/web-apps/jitsi-meet.nix
./services/web-apps/keycloak.nix

View file

@ -0,0 +1,53 @@
{ config, lib, pkgs, ... }:
with lib;
let
format = pkgs.formats.json { };
cfg = config.services.influxdb2;
configFile = format.generate "config.json" cfg.settings;
in
{
options = {
services.influxdb2 = {
enable = mkEnableOption "the influxdb2 server";
package = mkOption {
default = pkgs.influxdb2;
defaultText = "pkgs.influxdb2";
description = "influxdb2 derivation to use.";
type = types.package;
};
settings = mkOption {
default = { };
description = "configuration options for influxdb2, see https://docs.influxdata.com/influxdb/v2.0/reference/config-options for details.";
type = format.type;
};
};
};
config = mkIf cfg.enable {
assertions = [{
assertion = !(builtins.hasAttr "bolt-path" cfg.settings) && !(builtins.hasAttr "engine-path" cfg.settings);
message = "services.influxdb2.config: bolt-path and engine-path should not be set as they are managed by systemd";
}];
systemd.services.influxdb2 = {
description = "InfluxDB is an open-source, distributed, time series database";
documentation = [ "https://docs.influxdata.com/influxdb/" ];
wantedBy = [ "multi-user.target" ];
after = [ "network.target" ];
environment = {
INFLUXD_CONFIG_PATH = "${configFile}";
};
serviceConfig = {
ExecStart = "${cfg.package}/bin/influxd --bolt-path \${STATE_DIRECTORY}/influxd.bolt --engine-path \${STATE_DIRECTORY}/engine";
StateDirectory = "influxdb2";
DynamicUser = true;
CapabilityBoundingSet = "";
SystemCallFilter = "@system-service";
LimitNOFILE = 65536;
KillMode = "control-group";
Restart = "on-failure";
};
};
};
meta.maintainers = with lib.maintainers; [ nickcao ];
}

View file

@ -102,8 +102,8 @@ in
plugins = mkOption {
type = types.listOf types.package;
default = with pkgs; [ nagiosPluginsOfficial ssmtp mailutils ];
defaultText = "[pkgs.nagiosPluginsOfficial pkgs.ssmtp pkgs.mailutils]";
default = with pkgs; [ monitoring-plugins ssmtp mailutils ];
defaultText = "[pkgs.monitoring-plugins pkgs.ssmtp pkgs.mailutils]";
description = "
Packages to be added to the Nagios <envar>PATH</envar>.
Typically used to add plugins, but can be anything.

View file

@ -0,0 +1,69 @@
{ config, lib, pkgs, ... }:
let
inherit (lib) mkEnableOption mkIf mkOption types literalExample;
cfg = config.services.isso;
settingsFormat = pkgs.formats.ini { };
configFile = settingsFormat.generate "isso.conf" cfg.settings;
in {
options = {
services.isso = {
enable = mkEnableOption ''
A commenting server similar to Disqus.
Note: The application's author suppose to run isso behind a reverse proxy.
The embedded solution offered by NixOS is also only suitable for small installations
below 20 requests per second.
'';
settings = mkOption {
description = ''
Configuration for <package>isso</package>.
See <link xlink:href="https://posativ.org/isso/docs/configuration/server/">Isso Server Configuration</link>
for supported values.
'';
type = types.submodule {
freeformType = settingsFormat.type;
};
example = literalExample ''
{
general = {
host = "http://localhost";
};
}
'';
};
};
};
config = mkIf cfg.enable {
services.isso.settings.general.dbpath = lib.mkDefault "/var/lib/isso/comments.db";
systemd.services.isso = {
description = "isso, a commenting server similar to Disqus";
wantedBy = [ "multi-user.target" ];
serviceConfig = {
User = "isso";
Group = "isso";
DynamicUser = true;
StateDirectory = "isso";
ExecStart = ''
${pkgs.isso}/bin/isso -c ${configFile}
'';
Restart = "on-failure";
RestartSec = 1;
};
};
};
}

View file

@ -69,6 +69,7 @@ in
cjdns = handleTest ./cjdns.nix {};
clickhouse = handleTest ./clickhouse.nix {};
cloud-init = handleTest ./cloud-init.nix {};
cntr = handleTest ./cntr.nix {};
cockroachdb = handleTestOn ["x86_64-linux"] ./cockroachdb.nix {};
consul = handleTest ./consul.nix {};
containers-bridge = handleTest ./containers-bridge.nix {};
@ -125,8 +126,10 @@ in
fancontrol = handleTest ./fancontrol.nix {};
fcitx = handleTest ./fcitx {};
ferm = handleTest ./ferm.nix {};
firefox = handleTest ./firefox.nix {};
firefox-esr = handleTest ./firefox.nix { esr = true; };
firefox = handleTest ./firefox.nix { firefoxPackage = pkgs.firefox; };
firefox-esr = handleTest ./firefox.nix { firefoxPackage = pkgs.firefox-esr; }; # used in `tested` job
firefox-esr-78 = handleTest ./firefox.nix { firefoxPackage = pkgs.firefox-esr-78; };
firefox-esr-91 = handleTest ./firefox.nix { firefoxPackage = pkgs.firefox-esr-91; };
firejail = handleTest ./firejail.nix {};
firewall = handleTest ./firewall.nix {};
fish = handleTest ./fish.nix {};

View file

@ -0,0 +1,63 @@
# Test for cntr tool
{ system ? builtins.currentSystem, config ? { }
, pkgs ? import ../.. { inherit system config; }, lib ? pkgs.lib }:
let
inherit (import ../lib/testing-python.nix { inherit system pkgs; }) makeTest;
mkOCITest = backend:
makeTest {
name = "cntr-${backend}";
meta = { maintainers = with lib.maintainers; [ srk mic92 ]; };
nodes = {
${backend} = { pkgs, ... }: {
environment.systemPackages = [ pkgs.cntr ];
virtualisation.oci-containers = {
inherit backend;
containers.nginx = {
image = "nginx-container";
imageFile = pkgs.dockerTools.examples.nginx;
ports = [ "8181:80" ];
};
};
};
};
testScript = ''
start_all()
${backend}.wait_for_unit("${backend}-nginx.service")
result = ${backend}.wait_until_succeeds(
"cntr attach -t ${backend} nginx sh -- -c 'curl localhost | grep Hello'"
)
assert "Hello" in result
'';
};
mkContainersTest = makeTest {
name = "cntr-containers";
meta = with pkgs.lib.maintainers; { maintainers = [ sorki mic92 ]; };
machine = { lib, ... }: {
environment.systemPackages = [ pkgs.cntr ];
containers.test = {
autoStart = true;
privateNetwork = true;
hostAddress = "172.16.0.1";
localAddress = "172.16.0.2";
config = { };
};
};
testScript = ''
machine.start()
machine.wait_for_unit("container@test.service")
machine.succeed("cntr attach test sh -- -c 'ping -c5 172.16.0.1'")
'';
};
in {
nixos-container = mkContainersTest;
} // (lib.foldl' (attrs: backend: attrs // { ${backend} = mkOCITest backend; })
{ } [ "docker" "podman" ])

View file

@ -1,4 +1,4 @@
import ./make-test-python.nix ({ pkgs, esr ? false, ... }: {
import ./make-test-python.nix ({ pkgs, firefoxPackage, ... }: {
name = "firefox";
meta = with pkgs.lib.maintainers; {
maintainers = [ eelco shlevy ];
@ -8,9 +8,10 @@ import ./make-test-python.nix ({ pkgs, esr ? false, ... }: {
{ pkgs, ... }:
{ imports = [ ./common/x11.nix ];
environment.systemPackages =
(if esr then [ pkgs.firefox-esr ] else [ pkgs.firefox ])
++ [ pkgs.xdotool ];
environment.systemPackages = [
firefoxPackage
pkgs.xdotool
];
# Need some more memory to record audio.
virtualisation.memorySize = "500";

View file

@ -0,0 +1,30 @@
import ./make-test-python.nix ({ pkgs, ... }: {
name = "isso";
meta = with pkgs.lib.maintainers; {
maintainers = [ asbachb ];
};
machine = { config, pkgs, ... }: {
services.isso = {
enable = true;
settings = {
general = {
dbpath = "/var/lib/isso/comments.db";
host = "http://localhost";
};
};
};
};
testScript = let
port = 8080;
in
''
machine.wait_for_unit("isso.service")
machine.wait_for_open_port("${toString port}")
machine.succeed("curl --fail http://localhost:${toString port}/?uri")
machine.succeed("curl --fail http://localhost:${toString port}/js/embed.min.js")
'';
})

View file

@ -163,7 +163,7 @@ import ./make-test-python.nix ({ pkgs, ... }:
'';
tls-cert = pkgs.runCommandNoCC "selfSignedCerts" { buildInputs = [ pkgs.openssl ]; } ''
openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -nodes -subj '/CN=pleroma.nixos.test'
openssl req -x509 -newkey rsa:4096 -keyout key.pem -out cert.pem -nodes -subj '/CN=pleroma.nixos.test' -days 36500
mkdir -p $out
cp key.pem cert.pem $out
'';
@ -202,6 +202,7 @@ import ./make-test-python.nix ({ pkgs, ... }:
security.pki.certificateFiles = [ "${tls-cert}/cert.pem" ];
networking.extraHosts = hosts nodes;
networking.firewall.enable = false;
virtualisation.memorySize = 512;
environment.systemPackages = with pkgs; [
provision-db
provision-secrets

View file

@ -360,7 +360,6 @@ let
systemd.services.prometheus-kea-exporter.after = [ "kea-dhcp6-server.service" ];
services.kea = {
enable = true;
dhcp6 = {
enable = true;
settings = {

View file

@ -15,13 +15,13 @@ assert withGtk3 -> gtk3 != null;
stdenv.mkDerivation rec {
pname = "carla";
version = "2.3.1";
version = "2.3.2";
src = fetchFromGitHub {
owner = "falkTX";
repo = pname;
rev = "v${version}";
sha256 = "sha256-LM7wRvUg2Q3f4qBZN1MPvsLkdl1ziArCfhdalyD1G3w=";
sha256 = "sha256-en3eQtRUd2schpIccnuD42+wTYOAG9zsD6yNRA73bKE=";
};
nativeBuildInputs = [

View file

@ -2,11 +2,11 @@
mkDerivation rec {
pname = "drumkv1";
version = "0.9.18";
version = "0.9.23";
src = fetchurl {
url = "mirror://sourceforge/drumkv1/${pname}-${version}.tar.gz";
sha256 = "1bzkaz7sqx1pvirja8zm7i2ckzl5ad6xspr4840389ik3l8qpnr5";
sha256 = "sha256-gNscsqGpEfU1CNJDlBAzum9M0vzJSm6Wx5b/zhOt+sk=";
};
buildInputs = [ libjack2 alsa-lib libsndfile liblo lv2 qt5.qtbase qt5.qttools ];

View file

@ -13,6 +13,7 @@
, tor
, psmisc
}:
let
bisq-launcher = writeScript "bisq-launcher" ''
#! ${bash}/bin/bash
@ -46,15 +47,16 @@ let
'';
in
stdenv.mkDerivation rec {
version = "1.7.0";
pname = "bisq-desktop";
nativeBuildInputs = [ makeWrapper copyDesktopItems dpkg ];
version = "1.7.0";
src = fetchurl {
url = "https://github.com/bisq-network/bisq/releases/download/v${version}/Bisq-64bit-${version}.deb";
sha256 = "0crry5k7crmrqn14wxiyrnhk09ac8a9ksqrwwky7jsnyah0bx5k4";
};
nativeBuildInputs = [ makeWrapper copyDesktopItems dpkg ];
desktopItems = [
(makeDesktopItem {
name = "Bisq";
@ -71,6 +73,8 @@ stdenv.mkDerivation rec {
'';
installPhase = ''
runHook preInstall
mkdir -p $out/lib $out/bin
cp opt/bisq/lib/app/desktop-${version}-all.jar $out/lib
@ -80,13 +84,13 @@ stdenv.mkDerivation rec {
makeWrapper ${bisq-launcher} $out/bin/bisq-desktop \
--prefix PATH : $out/bin
copyDesktopItems
for n in 16 24 32 48 64 96 128 256; do
size=$n"x"$n
${imagemagick}/bin/convert opt/bisq/lib/Bisq.png -resize $size bisq.png
install -Dm644 -t $out/share/icons/hicolor/$size/apps bisq.png
done;
runHook postInstall
'';
meta = with lib; {

View file

@ -7,13 +7,13 @@ with lib;
stdenv.mkDerivation rec {
name = "bitcoin" + (toString (optional (!withGui) "d")) + "-unlimited-" + version;
version = "1.9.1.1";
version = "1.9.2.0";
src = fetchFromGitHub {
owner = "bitcoinunlimited";
repo = "bitcoinunlimited";
rev = "BCHunlimited${version}";
sha256 = "sha256-K15SI1F/xI4SkX4a41QHLn89YaHCgrlv+wcbkpwGKhI=";
sha256 = "sha256-qUf/GWZHpI57ATTlvRhjDtAjRa8a4uvUb0G9Xcf0j7w=";
};
nativeBuildInputs = [ pkg-config autoreconfHook python3 ]

View file

@ -20,11 +20,11 @@ let
in
stdenv.mkDerivation rec {
pname = "clightning";
version = "0.10.0";
version = "0.10.1";
src = fetchurl {
url = "https://github.com/ElementsProject/lightning/releases/download/v${version}/clightning-v${version}.zip";
sha256 = "5154e67780dddbf12f64c4b1994c3ee3834236f05b6462adf25e8a5f3fa407ea";
sha256 = "9271e9e89d60332b66afedbf8d6eab2a4a488782ab400ee1f60667d73c5a9a96";
};
nativeBuildInputs = [ autogen autoconf automake gettext libtool pkg-config py3 unzip which ];

View file

@ -2,12 +2,12 @@
let
pname = "ledger-live-desktop";
version = "2.31.1";
version = "2.32.2";
name = "${pname}-${version}";
src = fetchurl {
url = "https://github.com/LedgerHQ/${pname}/releases/download/v${version}/${pname}-${version}-linux-x86_64.AppImage";
sha256 = "0cxf4i58l0kg9c13j7mf0w5ijrkkf9z1375vn6xghd0r8g5hvws5";
sha256 = "14agkl6xf0f9s5qldla6p6kzl8zlx61q5m8qy63lq215hrzh9d50";
};
appimageContents = appimageTools.extractType2 {

View file

@ -70,7 +70,48 @@ let
)
);
overrides = {
overrides = lib.optionalAttrs (variant == "stable") {
# upstream issue: missing file header
speech-tagger = markBroken super.speech-tagger;
# upstream issue: missing file header
textmate = markBroken super.textmate;
# upstream issue: missing file header
window-numbering = markBroken super.window-numbering;
# upstream issue: missing file header
voca-builder = markBroken super.voca-builder;
# upstream issue: missing file header
initsplit = markBroken super.initsplit;
# upstream issue: missing file header
jsfmt = markBroken super.jsfmt;
# upstream issue: missing file header
maxframe = markBroken super.maxframe;
# upstream issue: missing file header
connection = markBroken super.connection;
# upstream issue: missing file header
dictionary = markBroken super.dictionary;
# upstream issue: missing file header
link = markBroken super.link;
# upstream issue: missing file header
bufshow = markBroken super.bufshow;
# upstream issue: missing file header
elmine = markBroken super.elmine;
# upstream issue: missing file header
ido-complete-space-or-hyphen = markBroken super.ido-complete-space-or-hyphen;
} // {
# Expects bash to be at /bin/bash
ac-rtags = fix-rtags super.ac-rtags;
@ -391,33 +432,9 @@ let
package-plus = super."package+";
rect-plus = super."rect+";
# upstream issue: missing file header
bufshow = markBroken super.bufshow;
# upstream issue: missing file header
connection = markBroken super.connection;
# upstream issue: missing file header
dictionary = markBroken super.dictionary;
# upstream issue: missing file header
elmine = markBroken super.elmine;
# upstream issue: missing file header
ido-complete-space-or-hyphen = markBroken super.ido-complete-space-or-hyphen;
# upstream issue: missing file header
initsplit = markBroken super.initsplit;
# upstream issue: missing file header
instapaper = markBroken super.instapaper;
# upstream issue: missing file header
jsfmt = markBroken super.jsfmt;
# upstream issue: missing file header
maxframe = markBroken super.maxframe;
# upstream issue: doesn't build
magit-stgit = markBroken super.magit-stgit;
@ -433,24 +450,9 @@ let
# upstream issue: missing file header
qiita = markBroken super.qiita;
# upstream issue: missing file header
speech-tagger = markBroken super.speech-tagger;
# upstream issue: missing file header
sql-presto = markBroken super.sql-presto;
# upstream issue: missing file header
textmate = markBroken super.textmate;
# upstream issue: missing file header
link = markBroken super.link;
# upstream issue: missing file header
voca-builder = markBroken super.voca-builder;
# upstream issue: missing file header
window-numbering = markBroken super.window-numbering;
editorconfig = super.editorconfig.overrideAttrs (attrs: {
propagatedUserEnvPkgs = [ pkgs.editorconfig-core-c ];
});

View file

@ -14,17 +14,17 @@ let
archive_fmt = if stdenv.isDarwin then "zip" else "tar.gz";
sha256 = {
x86_64-linux = "049spg4c1arkw97mg0h046kiirmcrjj97sy4ldiblwldjn510acw";
x86_64-darwin = "0g6b1891ag4a6p7rlkfka5v4nbmpr4ckkmibhw8l3wa9zdzs77x6";
aarch64-linux = "1qvk6cn5v9bz4vl5ifpdgrba94v6a54xx8s3fxdkj3lqvq27kpd1";
aarch64-darwin = "1whgjkxy70ifx1vaddxr8f1xcg651fhca4x7rzidzbyyf3baghy0";
armv7l-linux = "1k45s81s4ispc0vz7i17a7gss05d82vpymxgangg6f1yxwz944r4";
x86_64-linux = "14j1bss4bqw39ijmyh0kyr5xgzq61bc0if7g94jkvdbngz6fa25f";
x86_64-darwin = "0922r49475j1i8jrx5935bly7cv26hniz9iqf30qj6qs6d8kibci";
aarch64-linux = "11kkys3fsf4a4hvqv524fkdl686addd3ygzz0mav09xh8wjqbisw";
aarch64-darwin = "1xk56ww2ndksi6sqnr42zcqx2fl52aip3jb4fmdmqg1cvllfx0sd";
armv7l-linux = "1jiyjknl2xxivifixcwvyi6qsq7kr71gbalzdj6xca2i6pc1gbvp";
}.${system};
in
callPackage ./generic.nix rec {
# Please backport all compatible updates to the stable release.
# This is important for the extension ecosystem.
version = "1.58.2";
version = "1.59.0";
pname = "vscode";
executableName = "code" + lib.optionalString isInsiders "-insiders";

View file

@ -13,10 +13,10 @@ let
archive_fmt = if system == "x86_64-darwin" then "zip" else "tar.gz";
sha256 = {
x86_64-linux = "11h4c5ghgn3qrg66jh2par3cl3fqzn9xb7gdniww4badnyajnij8";
x86_64-darwin = "0hd3qdxg4cknk3fxv509jlblwmfx65bm2a4arsg255224dpg64n2";
aarch64-linux = "0waakj413kqf68sawajd3n24qdbx6b2svyb4lgbn0sy1apc96s3c";
armv7l-linux = "1ij2bmsk601f1vjljj6gvxsxrcjqf2m74s9kc006hmcz7czjgk8f";
x86_64-linux = "0yx0h7rd8v9j3yq863dj78bm587s8lpisbn1skb5whv6qv88x7c0";
x86_64-darwin = "1b5jr08cgl49rh26id8iwi64d32ssr7kis72zcqg0jkw7larxvvh";
aarch64-linux = "1a62krnilfi7nr7mmxyv3danj7h2yfdwg784q8vhrdjyqjd8gjbs";
armv7l-linux = "1axazx7hf6iw0dq1m2049kfrmk8jndycz9pcn3csj6rm65plg746";
}.${system};
sourceRoot = {
@ -31,7 +31,7 @@ in
# Please backport all compatible updates to the stable release.
# This is important for the extension ecosystem.
version = "1.58.2";
version = "1.59.0";
pname = "vscodium";
executableName = "codium";

View file

@ -3,13 +3,13 @@
stdenv.mkDerivation rec {
pname = "goxel";
version = "0.10.7";
version = "0.10.8";
src = fetchFromGitHub {
owner = "guillaumechereau";
repo = "goxel";
rev = "v${version}";
sha256 = "1v6m6nhl1if8ik5bmblhq46bip6y2qz18a04s8a9awb4yh9ls039";
sha256 = "sha256-M9H9SV8xmU7Jw5rEdV0gfloIEBvWmWSuH+BCrowpf2M=";
};
patches = [ ./disable-imgui_ini.patch ];

View file

@ -1,31 +1,46 @@
{ boost, cmake, fetchFromGitHub, ffmpeg, qtbase, qtx11extras,
qttools, qtxmlpatterns, qtsvg, gdal, gfortran, libXt, makeWrapper,
mkDerivation, ninja, mpi, python3, lib, tbb, libGLU, libGL }:
{ lib, stdenv, fetchFromGitLab, fetchurl
, boost, cmake, ffmpeg, qtbase, qtx11extras
, qttools, qtxmlpatterns, qtsvg, gdal, gfortran, libXt, makeWrapper
, mkDerivation, ninja, mpi, python3, tbb, libGLU, libGL
, withDocs ? true
}:
mkDerivation rec {
let
version = "5.9.1";
docFiles = [
(fetchurl {
url = "https://www.paraview.org/paraview-downloads/download.php?submit=Download&version=v${lib.versions.majorMinor version}&type=data&os=Sources&downloadFile=ParaViewTutorial-${version}.pdf";
name = "Tutorial.pdf";
sha256 = "1knpirjbz3rv8p8n03p39vv8vi5imvxakjsssqgly09g0cnsikkw";
})
(fetchurl {
url = "https://www.paraview.org/paraview-downloads/download.php?submit=Download&version=v${lib.versions.majorMinor version}&type=data&os=Sources&downloadFile=ParaViewGettingStarted-${version}.pdf";
name = "GettingStarted.pdf";
sha256 = "14xhlvg7s7d5amqf4qfyamx2a6b66zf4cmlfm3s7iw3jq01x1lx6";
})
(fetchurl {
url = "https://www.paraview.org/paraview-downloads/download.php?submit=Download&version=v${lib.versions.majorMinor version}&type=data&os=Sources&downloadFile=ParaViewCatalystGuide-${version}.pdf";
name = "CatalystGuide.pdf";
sha256 = "133vcfrbg2nh15igl51ns6gnfn1is20vq6j0rg37wha697pmcr4a";
})
];
in mkDerivation rec {
pname = "paraview";
version = "5.8.0";
inherit version;
src = fetchFromGitHub {
owner = "Kitware";
repo = "ParaView";
src = fetchFromGitLab {
domain = "gitlab.kitware.com";
owner = "paraview";
repo = "paraview";
rev = "v${version}";
sha256 = "1mka6wwg9mbkqi3phs29mvxq6qbc44sspbm4awwamqhilh4grhrj";
sha256 = "0pzic95br0vr785jnpxqmfxcljw3wk7bhm2xy0jfmwm1dh2b7xac";
fetchSubmodules = true;
};
# Avoid error: format not a string literal and
# no format arguments [-Werror=format-security]
preConfigure = ''
substituteInPlace VTK/Common/Core/vtkLogger.h \
--replace 'vtkLogScopeF(verbosity_name, __func__)' 'vtkLogScopeF(verbosity_name, "%s", __func__)'
substituteInPlace VTK/Common/Core/vtkLogger.h \
--replace 'vtkVLogScopeF(level, __func__)' 'vtkVLogScopeF(level, "%s", __func__)'
'';
# Find the Qt platform plugin "minimal"
patchPhase = ''
preConfigure = ''
export QT_PLUGIN_PATH=${qtbase.bin}/${qtbase.qtPluginPrefix}
'';
@ -63,7 +78,8 @@ mkDerivation rec {
];
buildInputs = [
libGLU libGL
libGLU
libGL
libXt
mpi
tbb
@ -77,6 +93,14 @@ mkDerivation rec {
qtsvg
];
postInstall = let docDir = "$out/share/paraview-${lib.versions.majorMinor version}/doc"; in
lib.optionalString withDocs ''
mkdir -p ${docDir};
for docFile in ${lib.concatStringsSep " " docFiles}; do
cp $docFile ${docDir}/$(stripHash $docFile);
done;
'';
propagatedBuildInputs = [
(python3.withPackages (ps: with ps; [ numpy matplotlib mpi4py ]))
];
@ -84,7 +108,7 @@ mkDerivation rec {
meta = with lib; {
homepage = "https://www.paraview.org/";
description = "3D Data analysis and visualization application";
license = licenses.free;
license = licenses.bsd3;
maintainers = with maintainers; [ guibert ];
platforms = platforms.linux;
};

View file

@ -5,13 +5,13 @@
buildGoModule rec {
pname = "dasel";
version = "1.16.1";
version = "1.17.0";
src = fetchFromGitHub {
owner = "TomWright";
repo = pname;
rev = "v${version}";
sha256 = "sha256-BrtTBy/Tb4xfs7UHk1acRzKZWNZJqhufHG1ItcM8TPs=";
sha256 = "sha256-VZsYwsYec6Q9T8xkb60F0CvPVFd2WJgyOfegm5GuN8c=";
};
vendorSha256 = "sha256-BdX4DO77mIf/+aBdkNVFUzClsIml1UMcgvikDbbdgcY=";

View file

@ -1,22 +1,19 @@
{ lib, stdenv, fetchurl, openssl }:
let
version = "6.4.20";
in
stdenv.mkDerivation {
stdenv.mkDerivation rec {
pname = "fetchmail";
inherit version;
version = "6.4.21";
src = fetchurl {
url = "mirror://sourceforge/fetchmail/fetchmail-${version}.tar.xz";
sha256 = "0xk171sbxcwjh1ibpipryw5sv4sy7jjfvhn5n373j04g5sp428f8";
sha256 = "sha256-akWcHK/XodqlzRNxQNpgwYyEtWmc2OckmnnDM0LJnR0=";
};
buildInputs = [ openssl ];
configureFlags = [ "--with-ssl=${openssl.dev}" ];
meta = {
meta = with lib; {
homepage = "https://www.fetchmail.info/";
description = "A full-featured remote-mail retrieval and forwarding utility";
longDescription = ''
@ -27,9 +24,8 @@ stdenv.mkDerivation {
all flavors of IMAP, ETRN, and ODMR. It can even support IPv6 and
IPSEC.
'';
platforms = lib.platforms.unix;
maintainers = [ lib.maintainers.peti ];
license = lib.licenses.gpl2Plus;
platforms = platforms.unix;
maintainers = [ maintainers.peti ];
license = licenses.gpl2Plus;
};
}

View file

@ -46,5 +46,6 @@ stdenv.mkDerivation rec {
license = licenses.gpl3Plus;
maintainers = with maintainers; [ ramkromberg ];
platforms = with platforms; linux;
broken = stdenv.hostPlatform.isAarch64;
};
}

View file

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "logseq";
version = "0.2.10";
version = "0.3.2";
src = fetchurl {
url = "https://github.com/logseq/logseq/releases/download/${version}/logseq-linux-x64-${version}.AppImage";
sha256 = "1YVOyaHDcv+GKx5nQq3cUrViKZ6CEuHQPiDvK0Jv+Qc=";
sha256 = "4gWpB3uTQsm9oRvT9rGizIU7xgrZim7jxjJGfME7WAg=";
name = "${pname}-${version}.AppImage";
};

View file

@ -187,7 +187,7 @@ stdenv.mkDerivation {
# update with:
# $ nix-shell maintainers/scripts/update.nix --argstr package firefox-bin-unwrapped
passthru.updateScript = import ./update.nix {
inherit pname version channel writeScript xidel coreutils gnused gnugrep gnupg curl runtimeShell;
inherit pname channel writeScript xidel coreutils gnused gnugrep gnupg curl runtimeShell;
baseUrl =
if channel == "devedition"
then "http://archive.mozilla.org/pub/devedition/releases/"

View file

@ -1,4 +1,4 @@
{ name
{ pname
, channel
, writeScript
, xidel
@ -17,7 +17,7 @@ let
isBeta =
channel != "release";
in writeScript "update-${name}" ''
in writeScript "update-${pname}" ''
#!${runtimeShell}
PATH=${coreutils}/bin:${gnused}/bin:${gnugrep}/bin:${xidel}/bin:${curl}/bin:${gnupg}/bin
set -eux

View file

@ -8,7 +8,7 @@
, yasm, libGLU, libGL, sqlite, unzip, makeWrapper
, hunspell, libevent, libstartup_notification
, libvpx_1_8
, icu67, libpng, jemalloc, glib, pciutils
, icu69, libpng, jemalloc, glib, pciutils
, autoconf213, which, gnused, rustPackages, rustPackages_1_45
, rust-cbindgen, nodejs, nasm, fetchpatch
, gnum4
@ -173,7 +173,7 @@ buildStdenv.mkDerivation ({
xorg.libXext
libevent libstartup_notification /* cairo */
libpng jemalloc glib
nasm icu67 libvpx_1_8
nasm icu69 libvpx_1_8
# >= 66 requires nasm for the AV1 lib dav1d
# yasm can potentially be removed in future versions
# https://bugzilla.mozilla.org/show_bug.cgi?id=1501796

View file

@ -7,10 +7,10 @@ in
rec {
firefox = common rec {
pname = "firefox";
ffversion = "90.0.2";
ffversion = "91.0";
src = fetchurl {
url = "mirror://mozilla/firefox/releases/${ffversion}/source/firefox-${ffversion}.source.tar.xz";
sha512 = "4fda0b1e666fb0b1d846708fad2b48a5b53d48e7fc2a5da1f234b5b839c55265b41f6509e6b506d5e8a7455f816dfa5ab538589bc9e83b7e3846f0f72210513e";
sha512 = "a02486a3996570e0cc815e92c98890bca1d27ce0018c2ee3d4bff9a6e54dbc8f5926fea8b5864f208e15389d631685b2add1e4e9e51146e40224d16d5c02f730";
};
meta = {
@ -31,6 +31,32 @@ rec {
};
};
firefox-esr-91 = common rec {
pname = "firefox-esr";
ffversion = "91.0esr";
src = fetchurl {
url = "mirror://mozilla/firefox/releases/${ffversion}/source/firefox-${ffversion}.source.tar.xz";
sha512 = "e518e1536094a1da44eb45b3b0f3adc1b5532f17da2dbcc994715419ec4fcec40574fdf633349a8e5de6382942f5706757a35f1b96b11de4754855b9cf7946ae";
};
meta = {
description = "A web browser built from Firefox Extended Support Release source tree";
homepage = "http://www.mozilla.com/en-US/firefox/";
maintainers = with lib.maintainers; [ hexa ];
platforms = lib.platforms.unix;
badPlatforms = lib.platforms.darwin;
broken = stdenv.buildPlatform.is32bit; # since Firefox 60, build on 32-bit platforms fails with "out of memory".
# not in `badPlatforms` because cross-compilation on 64-bit machine might work.
license = lib.licenses.mpl20;
};
tests = [ nixosTests.firefox-esr-91 ];
updateScript = callPackage ./update.nix {
attrPath = "firefox-esr-91-unwrapped";
versionSuffix = "esr";
versionKey = "ffversion";
};
};
firefox-esr-78 = common rec {
pname = "firefox-esr";
ffversion = "78.12.0esr";
@ -49,7 +75,7 @@ rec {
# not in `badPlatforms` because cross-compilation on 64-bit machine might work.
license = lib.licenses.mpl20;
};
tests = [ nixosTests.firefox-esr ];
tests = [ nixosTests.firefox-esr-78 ];
updateScript = callPackage ./update.nix {
attrPath = "firefox-esr-78-unwrapped";
versionSuffix = "esr";

View file

@ -50,11 +50,11 @@
stdenv.mkDerivation rec {
pname = "yandex-browser";
version = "21.5.3.753-1";
version = "21.6.2.817-1";
src = fetchurl {
url = "http://repo.yandex.ru/yandex-browser/deb/pool/main/y/${pname}-beta/${pname}-beta_${version}_amd64.deb";
sha256 = "sha256-sI2p/fCaruUJ3qPMyy+12Bh5I1SH8m7sYX5yDex2rwg=";
sha256 = "sha256-xeZkQzVPPNABxa3/YBLoZl1obbFdzxdqIgLyoA4PN8U=";
};
nativeBuildInputs = [

View file

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "flink";
version = "1.12.1";
version = "1.13.2";
src = fetchurl {
url = "mirror://apache/flink/${pname}-${version}/${pname}-${version}-bin-scala_2.11.tgz";
sha256 = "146azc5wg1xby3nqz8mha959qy99z2h8032rfgs2mcl3d5rrsm2l";
sha256 = "sha256-GPiHV19Z2Htt75hCXK2nCeQMIBQFEEUxXlBembenFL0=";
};
nativeBuildInputs = [ makeWrapper ];

View file

@ -2,7 +2,7 @@
buildGoPackage rec {
pname = "kube-router";
version = "1.1.1";
version = "1.2.2";
goPackagePath = "github.com/cloudnativelabs/kube-router";
@ -10,7 +10,7 @@ buildGoPackage rec {
owner = "cloudnativelabs";
repo = pname;
rev = "v${version}";
sha256 = "sha256-U7vjWtVXmyEPYFS1fAPOgV3WILGPhWsmoawV7B0pZaE=";
sha256 = "sha256-/VToLQexvRtcBU+k8WnGEcfLfxme/hgRnhU8723BEFU=";
};
buildFlagsArray = ''

View file

@ -21,13 +21,13 @@
stdenv.mkDerivation rec {
pname = "kubernetes";
version = "1.21.3";
version = "1.22.0";
src = fetchFromGitHub {
owner = "kubernetes";
repo = "kubernetes";
rev = "v${version}";
sha256 = "sha256-GMigdVuqJN6eIN0nhY5PVUEnCqjAYUzitetk2QmX5wQ=";
sha256 = "sha256-4lqqD3SBLBWrnFWhRzV3QgRLdGRW1Jx/eL6swtHL0Vw=";
};
nativeBuildInputs = [ removeReferencesTo makeWrapper which go rsync installShellFiles ];

View file

@ -0,0 +1,112 @@
{ lib, fetchFromGitHub, buildGoModule, go, removeReferencesTo, buildEnv }:
let
package = buildGoModule rec {
pname = "nomad-autoscaler";
version = "0.3.3";
outputs = [
"out"
"bin"
"aws_asg"
"azure_vmss"
"datadog"
"fixed_value"
"gce_mig"
"nomad_apm"
"nomad_target"
"pass_through"
"prometheus"
"target_value"
"threshold"
];
src = fetchFromGitHub {
owner = "hashicorp";
repo = "nomad-autoscaler";
rev = "v${version}";
sha256 = "sha256-bN/U6aCf33B88ouQwTGG8CqARzWmIvXNr5JPr3l8cVI=";
};
vendorSha256 = "sha256-Ls8gkfLyxfQD8krvxjAPnZhf1r1s2MhtQfMMfp8hJII=";
subPackages = [ "." ];
nativeBuildInputs = [ removeReferencesTo ];
# buildGoModule overrides normal buildPhase, can't use makeTargets
postBuild = ''
make build plugins
'';
# tries to pull tests from network, and fails silently anyway
doCheck = false;
postInstall = ''
mkdir -p $bin/bin
mv $out/bin/nomad-autoscaler $bin/bin/nomad-autoscaler
ln -s $bin/bin/nomad-autoscaler $out/bin/nomad-autoscaler
for d in $outputs; do
mkdir -p ''${!d}/share
done
rmdir $bin/share
# have out contain all of the plugins
for plugin in bin/plugins/*; do
remove-references-to -t ${go} "$plugin"
cp "$plugin" $out/share/
done
# populate the outputs as individual plugins
# can't think of a more generic way to handle this
# bash doesn't allow for dashes '-' to be in a variable name
# this means that the output names will need to differ slightly from the binary
mv bin/plugins/aws-asg $aws_asg/share/
mv bin/plugins/azure-vmss $azure_vmss/share/
mv bin/plugins/datadog $datadog/share/
mv bin/plugins/fixed-value $fixed_value/share/
mv bin/plugins/gce-mig $gce_mig/share/
mv bin/plugins/nomad-apm $nomad_apm/share/
mv bin/plugins/nomad-target $nomad_target/share/
mv bin/plugins/pass-through $pass_through/share/
mv bin/plugins/prometheus $prometheus/share/
mv bin/plugins/target-value $target_value/share/
mv bin/plugins/threshold $threshold/share/
'';
# make toggle-able, so that overrided versions can disable this check if
# they want newer versions of the plugins without having to modify
# the output logic
doInstallCheck = true;
installCheckPhase = ''
rmdir bin/plugins || {
echo "Not all plugins were extracted"
echo "Please move the following to their related output: $(ls bin/plugins)"
exit 1
}
'';
passthru = {
inherit plugins withPlugins;
};
meta = with lib; {
description = "Autoscaling daemon for Nomad";
homepage = "https://github.com/hashicorp/nomad-autoscaler";
license = licenses.mpl20;
maintainers = with maintainers; [ jonringer ];
};
};
plugins = let
plugins = builtins.filter (n: !(lib.elem n [ "out" "bin" ])) package.outputs;
in lib.genAttrs plugins (output: package.${output});
# Intended to be used as: (nomad-autoscaler.withPlugins (ps: [ ps.aws_asg ps.nomad_target ])
withPlugins = f: buildEnv {
name = "nomad-autoscaler-env";
paths = [ package.bin ] ++ f plugins;
};
in
package

View file

@ -2,16 +2,16 @@
buildGoModule rec {
pname = "starboard-octant-plugin";
version = "0.10.3";
version = "0.11.0";
src = fetchFromGitHub {
owner = "aquasecurity";
repo = pname;
rev = "v${version}";
sha256 = "sha256-9vl068ZTw6Czf+cWQ0k1lU0pqh7P0YZgLguHkk3M918=";
sha256 = "sha256-XHc/1rqTEVOjCm0kFniUmmjVeRsr9Npt0OpQ6Oy7Rxo=";
};
vendorSha256 = "sha256-HOvZPDVKZEoL91yyaJRuKThHirY77xlKOtLKARthxn8=";
vendorSha256 = "sha256-EM0lPwwWJuLD+aqZWshz1ILaeEtUU4wJ0Puwv1Ikgf4=";
preBuild = ''
buildFlagsArray+=("-ldflags" "-s -w")

View file

@ -2,16 +2,16 @@
buildGoModule rec {
pname = "terraform-provider-cloudfoundry";
version = "0.12.6";
version = "0.14.2";
src = fetchFromGitHub {
owner = "cloudfoundry-community";
repo = pname;
rev = "v${version}";
sha256 = "0n5ybpzk6zkrnd9vpmbjlkm8fdp7nbfr046wih0jk72pmiyrcygi";
sha256 = "12mx87dip6vn10zvkf4rgrd27k708lnl149j9xj7bmb8v9m1082v";
};
vendorSha256 = "01lfsd9aw9w3kr1a2a5b7ac6d8jaij83lhxl4y4qsnjlqk86fbxq";
vendorSha256 = "0kydjnwzj0fylizvk1vg42zyiy17qhz40z3iwa1r5bb20qkrlz93";
# needs a running cloudfoundry
doCheck = false;

View file

@ -0,0 +1,95 @@
{ lib
, copyDesktopItems
, electron
, esbuild
, fetchFromGitHub
, libdeltachat
, makeDesktopItem
, makeWrapper
, nodePackages
, pkg-config
, stdenv
, CoreServices
}:
let
electronExec = if stdenv.isDarwin then
"${electron}/Applications/Electron.app/Contents/MacOS/Electron"
else
"${electron}/bin/electron";
in nodePackages.deltachat-desktop.override rec {
pname = "deltachat-desktop";
version = "unstable-2021-08-04";
src = fetchFromGitHub {
owner = "deltachat";
repo = "deltachat-desktop";
rev = "2c47d6b7e46f4f68c7eb45508ab9e145af489ea1";
sha256 = "03b6j3cj2yanvsargh6q57bf1llg17yrqgmd14lp0wkam767kkfa";
};
nativeBuildInputs = [
esbuild
makeWrapper
pkg-config
] ++ lib.optionals stdenv.isLinux [
copyDesktopItems
];
buildInputs = [
libdeltachat
] ++ lib.optionals stdenv.isDarwin [
CoreServices
];
ELECTRON_SKIP_BINARY_DOWNLOAD = "1";
USE_SYSTEM_LIBDELTACHAT = "true";
VERSION_INFO_GIT_REF = src.rev;
postInstall = let
keep = lib.concatMapStringsSep " " (file: "! -name ${file}") [
"_locales" "build" "html-dist" "images" "index.js"
"node_modules" "themes" "tsc-dist"
];
in ''
rm -r node_modules/deltachat-node/{deltachat-core-rust,prebuilds,src}
patchShebangs node_modules/sass/sass.js
npm run build
npm prune --production
find . -mindepth 1 -maxdepth 1 ${keep} -print0 | xargs -0 rm -r
mkdir -p $out/share/icons/hicolor/scalable/apps
ln -s $out/lib/node_modules/deltachat-desktop/build/icon.png \
$out/share/icons/hicolor/scalable/apps/deltachat.png
makeWrapper ${electronExec} $out/bin/deltachat \
--add-flags $out/lib/node_modules/deltachat-desktop
'';
desktopItems = lib.singleton (makeDesktopItem {
name = "deltachat";
exec = "deltachat %u";
icon = "deltachat";
desktopName = "Delta Chat";
genericName = "Delta Chat";
comment = meta.description;
categories = "Network;InstantMessaging;Chat;";
extraEntries = ''
StartupWMClass=DeltaChat
MimeType=x-scheme-handler/openpgp4fpr;x-scheme-handler/mailto;
'';
});
passthru.updateScript = ./update.sh;
meta = with lib; {
description = "Email-based instant messaging for Desktop";
homepage = "https://github.com/deltachat/deltachat-desktop";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ dotlambda ];
};
}

View file

@ -0,0 +1,58 @@
{
"name": "deltachat-desktop",
"version": "1.20.3",
"dependencies": {
"@blueprintjs/core": "^3.22.3",
"@mapbox/geojson-extent": "^1.0.0",
"application-config": "^1.0.1",
"array-differ": "^3.0.0",
"classnames": "^2.3.1",
"debounce": "^1.2.0",
"deltachat-node": "1.56.2",
"emoji-js-clean": "^4.0.0",
"emoji-mart": "^3.0.0",
"emoji-regex": "^9.2.2",
"error-stack-parser": "^2.0.6",
"filesize": "^6.1.0",
"fs-extra": "^8.1.0",
"mapbox-gl": "^1.12.0",
"mime-types": "^2.1.31",
"moment": "^2.27.0",
"node-fetch": "^2.6.1",
"path-browserify": "^1.0.1",
"punycode": "^2.1.1",
"rc": "^1.2.8",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"react-qr-reader": "^2.2.1",
"react-qr-svg": "^2.1.0",
"react-string-replace": "^0.4.4",
"react-virtualized": "^9.21.2",
"simple-markdown": "^0.7.1",
"source-map-support": "^0.5.19",
"tempy": "^0.3.0",
"url-parse": "^1.4.7",
"use-debounce": "^3.3.0",
"@babel/core": "^7.7.7",
"@types/debounce": "^1.2.0",
"@babel/preset-env": "^7.7.7",
"@babel/preset-react": "^7.7.4",
"@types/emoji-mart": "^3.0.2",
"@types/fs-extra": "^8.0.1",
"@types/mapbox-gl": "^0.54.5",
"@types/mime-types": "^2.1.0",
"@types/node": "^14.14.20",
"@types/node-fetch": "^2.5.7",
"@types/prop-types": "^15.7.3",
"@types/rc": "^1.1.0",
"@types/react": "^16.9.17",
"@types/react-dom": "^16.9.4",
"@types/react-virtualized": "^9.21.10",
"@types/url-parse": "^1.4.3",
"electron": "^13.1.6",
"glob-watcher": "^5.0.5",
"sass": "^1.26.5",
"typescript": "^3.9.7",
"xml-js": "^1.6.8"
}
}

View file

@ -0,0 +1,42 @@
#! /usr/bin/env nix-shell
#! nix-shell -i bash -p curl jq nix-prefetch
set -euo pipefail
cd "$(dirname "$0")"
owner=deltachat
repo=deltachat-desktop
nixpkgs=../../../../..
rev=$(
curl -s "https://api.github.com/repos/$owner/$repo/releases" |
jq 'map(select(.prerelease | not)) | .[0].tag_name' --raw-output
)
ver=$(echo "$rev" | sed 's/^v//')
old_ver=$(sed -n 's/.*\bversion = "\(.*\)".*/\1/p' default.nix)
if [ "$ver" = "$old_ver" ]; then
echo "Up to date: $ver"
exit
fi
echo "$old_ver -> $ver"
sha256=$(nix-prefetch -f "$nixpkgs" deltachat-desktop --rev "$rev")
sed -e "s#\\bversion = \".*\"#version = \"$ver\"#" \
-e "s#\\bsha256 = \".*\"#sha256 = \"$sha256\"#" \
-i default.nix
src=$(nix-build "$nixpkgs" -A deltachat-desktop.src --no-out-link)
jq '{ name, version, dependencies: (.dependencies + (.devDependencies | del(.["@typescript-eslint/eslint-plugin","@typescript-eslint/parser","esbuild","electron-builder","electron-devtools-installer","electron-notarize","esbuild","eslint","eslint-config-prettier","eslint-plugin-react-hooks","hallmark","prettier","tape","testcafe","testcafe-browser-provider-electron","testcafe-react-selectors","walk"]))) }' \
"$src/package.json" > package.json.new
if cmp --quiet package.json{.new,}; then
echo "package.json not changed, skip updating nodePackages"
rm package.json.new
else
echo "package.json changed, updating nodePackages"
mv package.json{.new,}
pushd ../../../../development/node-packages
./generate.sh
popd
fi

View file

@ -1,40 +0,0 @@
{ lib, fetchurl, appimageTools, gsettings-desktop-schemas, gtk3 }:
let
pname = "deltachat-electron";
version = "1.20.3";
name = "${pname}-${version}";
src = fetchurl {
url = "https://download.delta.chat/desktop/v${version}/DeltaChat-${version}.AppImage";
sha256 = "sha256-u0YjaXb+6BOBWaZANPcaxp7maqlBWAtecSsCGbr67dk=";
};
appimageContents = appimageTools.extract { inherit name src; };
in
appimageTools.wrapType2 {
inherit name src;
profile = ''
export XDG_DATA_DIRS=${gsettings-desktop-schemas}/share/gsettings-schemas/${gsettings-desktop-schemas.name}:${gtk3}/share/gsettings-schemas/${gtk3.name}:$XDG_DATA_DIRS
'';
extraInstallCommands = ''
mv $out/bin/${name} $out/bin/${pname}
install -m 444 -D \
${appimageContents}/deltachat-desktop.desktop \
$out/share/applications/${pname}.desktop
substituteInPlace $out/share/applications/${pname}.desktop \
--replace 'Exec=AppRun' 'Exec=${pname}'
cp -r ${appimageContents}/usr/share/icons $out/share
'';
meta = with lib; {
description = "Electron client for DeltaChat";
homepage = "https://delta.chat/";
license = licenses.gpl3;
maintainers = with maintainers; [ ehmry ];
platforms = [ "x86_64-linux" ];
};
}

View file

@ -66,5 +66,6 @@ in mkDerivation rec {
homepage = "https://git.sr.ht/~link2xt/kdeltachat";
license = licenses.gpl3Plus;
maintainers = with maintainers; [ dotlambda ];
platforms = platforms.linux;
};
}

View file

@ -7,6 +7,7 @@
python3Packages.buildPythonApplication rec {
pname = "zerobin";
version = "1.0.5";
src = fetchFromGitHub {
owner = "Tygs";
repo = "0bin";
@ -21,6 +22,7 @@ python3Packages.buildPythonApplication rec {
python3Packages.pyscss
nodePackages.uglify-js
];
propagatedBuildInputs = with python3Packages; [
appdirs
beaker
@ -30,16 +32,19 @@ python3Packages.buildPythonApplication rec {
lockfile
paste
];
prePatch = ''
# replace /bin/bash in compress.sh
patchShebangs .
# relax version constraints of some dependencies
substituteInPlace setup.cfg \
--replace "clize==4.1.1" "clize" \
--replace "bleach==3.1.5" "bleach>=3.1.5,<4" \
--replace "bottle==0.12.18" "bottle>=0.12.18,<1" \
--replace "Paste==3.4.3" "Paste>=3.4.3,<4"
'';
buildPhase = ''
runHook preBuild
doit build
@ -51,6 +56,8 @@ python3Packages.buildPythonApplication rec {
# See https://github.com/NixOS/nixpkgs/pull/98734#discussion_r495823510
doCheck = false;
pythonImportsCheck = [ "zerobin" ];
meta = with lib; {
description = "A client side encrypted pastebin";
homepage = "https://0bin.net/";

View file

@ -7,12 +7,12 @@
}:
stdenv.mkDerivation rec {
version = "4.0.17";
version = "4.0.19";
pname = "flmsg";
src = fetchurl {
url = "mirror://sourceforge/fldigi/${pname}-${version}.tar.gz";
sha256 = "09xf3f65d3qi69frznf4fdznbfbc7kmgxw716q2c7ccsmh9c5q44";
sha256 = "sha256-Pm5qAUNbenkX9V3OSQWW09iIRR/WB1jB4ioyRCZmjqs=";
};
buildInputs = [

View file

@ -17,14 +17,14 @@ let
};
in
stdenv.mkDerivation rec {
version = "14.31.46";
version = "14.31.49";
pname = "jmol";
src = let
baseVersion = "${lib.versions.major version}.${lib.versions.minor version}";
in fetchurl {
url = "mirror://sourceforge/jmol/Jmol/Version%20${baseVersion}/Jmol%20${version}/Jmol-${version}-binary.tar.gz";
sha256 = "sha256-U8k8xQws0vIJ3ZICzZXxSbtl7boCzRqG9mFSTXvmCvg=";
sha256 = "sha256-P+bzimBVammX5LxE6Yd6CmvmBeG8WdyA1T9bHXd+ifI=";
};
patchPhase = ''

View file

@ -1,11 +1,11 @@
{ stdenv, fetchurl, lib, expat, octave, libxml2, texinfo, zip }:
stdenv.mkDerivation rec {
pname = "gama";
version = "2.12";
version = "2.14";
src = fetchurl {
url = "mirror://gnu/${pname}/${pname}-${version}.tar.gz";
sha256 = "0zfilasalsy29b7viw0iwgnl9bkvp0l87gpxl1hx7379l8agwqyj";
sha256 = "sha256-ltYoJyo4b4Ys70nbZo5gxkjBw/RiEs5Rrdb6he9bsBI=";
};
buildInputs = [ expat ];

View file

@ -1,37 +1,39 @@
{lib, stdenv, fetchurl, makeWrapper, jre, gcc, valgrind}:
{ lib, stdenv, fetchurl, makeWrapper, jre, gcc, valgrind }:
# gcc and valgrind are not strict dependencies, they could be made
# optional. They are here because plm can only help you learn C if you
# have them installed.
stdenv.mkDerivation rec {
major = "2";
minor = "5";
version = "${major}-${minor}";
pname = "plm";
version = "2.9.3";
src = fetchurl {
url = "http://webloria.loria.fr/~quinson/Teaching/PLM/plm-${major}_${minor}.jar";
sha256 = "0m17cxa3nxi2cbswqvlfzp0mlfi3wrkw8ry2xhkxy6aqzm2mlgcc";
url = "https://github.com/BuggleInc/PLM/releases/download/v${version}/plm-${version}.jar";
sha256 = "0i9ghx9pm3kpn9x9n1hl10zdr36v5mv3drx8lvhsqwhlsvz42p5i";
name = "${pname}-${version}.jar";
};
nativeBuildInputs = [ makeWrapper ];
buildInputs = [ jre gcc valgrind ];
phases = [ "installPhase" ];
dontUnpack = true;
installPhase = ''
runHook preInstall
mkdir -p "$prefix/bin"
makeWrapper ${jre}/bin/java $out/bin/plm \
--add-flags "-jar $src" \
--prefix PATH : "$PATH"
runHook postInstall
'';
meta = with lib; {
description = "Free cross-platform programming exerciser";
homepage = "http://people.irisa.fr/Martin.Quinson/Teaching/PLM/";
license = licenses.gpl3;
maintainers = [ ];
platforms = lib.platforms.all;
broken = true;
};
}

View file

@ -1,4 +1,4 @@
{ callPackage, lib, stdenv, fetchFromGitHub, git, zsh, ...}:
{ callPackage, lib, stdenv, fetchFromGitHub, git, zsh }:
stdenv.mkDerivation rec {
pname = "gitstatus";
@ -11,25 +11,33 @@ stdenv.mkDerivation rec {
sha256 = "sha256-MQG4thW73gDqY68bKP2FO8z5uc2R/tED+/X9qas/GOA=";
};
buildInputs = [ (callPackage ./romkatv_libgit2.nix {}) ];
patchPhase = ''
buildInputs = [ (callPackage ./romkatv_libgit2.nix { }) ];
postPatch = ''
sed -i '1i GITSTATUS_AUTO_INSTALL=''${GITSTATUS_AUTO_INSTALL-0}' gitstatus.plugin.sh
sed -i '1i GITSTATUS_AUTO_INSTALL=''${GITSTATUS_AUTO_INSTALL-0}' gitstatus.plugin.zsh
sed -i "1a GITSTATUS_DAEMON=$out/bin/gitstatusd" install
'';
installPhase = ''
install -Dm755 usrbin/gitstatusd $out/bin/gitstatusd
install -Dm444 gitstatus.plugin.sh $out
install -Dm444 gitstatus.plugin.zsh $out
install -Dm555 install $out
install -Dm444 build.info $out
install -Dm444 gitstatus.plugin.sh -t $out/share/gitstatus/
install -Dm444 gitstatus.plugin.zsh -t $out/share/gitstatus/
install -Dm555 install -t $out/share/gitstatus/
install -Dm444 build.info -t $out/share/gitstatus/
# the fallback path is wrong in the case of home-manager
# because the FHS directories don't start at /
substituteInPlace install \
--replace "_gitstatus_install_main ." "_gitstatus_install_main $out"
'';
# Don't install the "install" and "build.info" files, which the end user
# should not need to worry about.
pathsToLink = [
"/bin/gitstatusd"
"/gitstatus.plugin.sh"
"/gitstatus.plugin.zsh"
"/share/gitstatus/gitstatus.plugin.sh"
"/share/gitstatus/gitstatus.plugin.zsh"
];
# The install check sets up an empty Git repository and a minimal zshrc that
@ -47,7 +55,7 @@ stdenv.mkDerivation rec {
echo '
GITSTATUS_LOG_LEVEL=DEBUG
. $out/gitstatus.plugin.zsh || exit 1
. $out/share/gitstatus/gitstatus.plugin.zsh || exit 1
gitstatus_stop NIX_TEST && gitstatus_start NIX_TEST
gitstatus_query NIX_TEST
@ -77,6 +85,6 @@ stdenv.mkDerivation rec {
description = "10x faster implementation of `git status` command";
homepage = "https://github.com/romkatv/gitstatus";
license = licenses.gpl3Only;
maintainers = with maintainers; [ mmlb hexa ];
maintainers = with maintainers; [ mmlb hexa SuperSandro2000 ];
};
}

View file

@ -16,7 +16,7 @@ libgit2.overrideAttrs (oldAttrs: {
src = fetchFromGitHub {
owner = "romkatv";
repo = "libgit2";
rev = "tag-82cefe2b42300224ad3c148f8b1a569757cc617a";
sha256 = "1vhnqynqyxizzkq1h5dfjm75f0jm5637jh0gypwqqz2yjqrscza0";
rev = "tag-5860a42d19bcd226cb6eff2dcbfcbf155d570c73";
sha256 = "sha256-OdGLNGOzXbWQGqw5zYM1RhU4Z2yRXi9cpAt7Vn9+j5I=";
};
})

View file

@ -16,12 +16,12 @@ with lib;
buildGoPackage rec {
pname = "gitea";
version = "1.14.5";
version = "1.14.6";
# not fetching directly from the git repo, because that lacks several vendor files for the web UI
src = fetchurl {
url = "https://github.com/go-gitea/gitea/releases/download/v${version}/gitea-src-${version}.tar.gz";
sha256 = "sha256-8nwLVpe/5IjXJqO179lN80B/3WGUL3LKM8OWdh/bYOE=";
sha256 = "sha256-IIoOJlafMD6Kg8Zde3LcoK97PKLmqOUMQN3nmIgqe1o=";
};
unpackPhase = ''

View file

@ -2,13 +2,13 @@
buildGoModule rec {
pname = "go-chromecast";
version = "0.2.9";
version = "0.2.10";
src = fetchFromGitHub {
owner = "vishen";
repo = pname;
rev = "v${version}";
sha256 = "sha256-KhJMgr57pDnuFLhsbf0/4n9w0EfjuuKA46ENPLXox3A=";
sha256 = "sha256-8216YaDgjy9Fp94Y5SQwEQpAP4NwvEhsJHe6xpQLAk8=";
};
vendorSha256 = "sha256-idxElk4Sy7SE9G1OMRw8YH4o8orBa80qhBXPA+ar620=";

View file

@ -2,13 +2,13 @@
buildKodiBinaryAddon rec {
pname = "inputstream-adaptive";
namespace = "inputstream.adaptive";
version = "2.6.22";
version = "2.6.23";
src = fetchFromGitHub {
owner = "xbmc";
repo = "inputstream.adaptive";
rev = "${version}-${rel}";
sha256 = "sha256-WSFbDuUgw0WHWb3ZZVavwpu1TizU9lMA5JAC5haR7c0=";
sha256 = "sha256-3w/fMaGqaOHSE7GUJtYoWovBKSOv3sNOIX1UOcp8hQE=";
};
extraNativeBuildInputs = [ gtest ];

View file

@ -2,13 +2,13 @@
stdenv.mkDerivation rec {
pname = "obs-multi-rtmp";
version = "0.2.6";
version = "0.2.6.1";
src = fetchFromGitHub {
owner = "sorayuki";
repo = "obs-multi-rtmp";
rev = version;
sha256 = "sha256-SMcVL54HwFIc7/wejEol2XiZhlZCMVCwHHtIKJ/CoYY=";
sha256 = "sha256-ZcvmiE9gbDUHAO36QAIaUdjV14ZfPabD9CW7Ogeqdro=";
};
nativeBuildInputs = [ cmake ];

View file

@ -1,4 +1,4 @@
{ stdenv, lib, edk2, util-linux, nasm, iasl
{ stdenv, lib, edk2, util-linux, nasm, acpica-tools
, csmSupport ? false, seabios ? null
, secureBoot ? false
, httpSupport ? false
@ -25,7 +25,7 @@ edk2.mkDerivation projectDscPath {
outputs = [ "out" "fd" ];
buildInputs = [ util-linux nasm iasl ];
buildInputs = [ util-linux nasm acpica-tools ];
hardeningDisable = [ "format" "stackprotector" "pic" "fortify" ];

View file

@ -1,4 +1,4 @@
{ lib, rustPlatform, fetchFromGitHub }:
{ lib, rustPlatform, fetchFromGitHub, nixosTests }:
rustPlatform.buildRustPackage rec {
pname = "cntr";
@ -13,6 +13,10 @@ rustPlatform.buildRustPackage rec {
cargoSha256 = "sha256-3e5wDne6Idu+kDinHPcAKHfH/d4DrGg90GkiMbyF280=";
passthru.tests = {
nixos = nixosTests.cntr;
};
meta = with lib; {
description = "A container debugging tool based on FUSE";
homepage = "https://github.com/Mic92/cntr";

View file

@ -1,4 +1,4 @@
{ lib, stdenv, fetchurl, iasl, python3 }:
{ lib, stdenv, fetchurl, acpica-tools, python3 }:
stdenv.mkDerivation rec {
@ -12,7 +12,7 @@ stdenv.mkDerivation rec {
nativeBuildInputs = [ python3 ];
buildInputs = [ iasl ];
buildInputs = [ acpica-tools ];
strictDeps = true;
@ -47,4 +47,3 @@ stdenv.mkDerivation rec {
platforms = [ "i686-linux" "x86_64-linux" ];
};
}

View file

@ -1,4 +1,4 @@
{ config, stdenv, fetchurl, lib, iasl, dev86, pam, libxslt, libxml2, wrapQtAppsHook
{ config, stdenv, fetchurl, lib, acpica-tools, dev86, pam, libxslt, libxml2, wrapQtAppsHook
, libX11, xorgproto, libXext, libXcursor, libXmu, libIDL, SDL, libcap, libGL
, libpng, glib, lvm2, libXrandr, libXinerama, libopus, qtbase, qtx11extras
, qttools, qtsvg, qtwayland, pkg-config, which, docbook_xsl, docbook_xml_dtd_43
@ -24,16 +24,6 @@ let
# Use maintainers/scripts/update.nix to update the version and all related hashes or
# change the hashes in extpack.nix and guest-additions/default.nix as well manually.
version = "6.1.26";
iasl' = iasl.overrideAttrs (old: rec {
inherit (old) pname;
version = "20190108";
src = fetchurl {
url = "https://acpica.org/sites/acpica/files/acpica-unix-${version}.tar.gz";
sha256 = "0bqhr3ndchvfhxb31147z8gd81dysyz5dwkvmp56832d0js2564q";
};
NIX_CFLAGS_COMPILE = old.NIX_CFLAGS_COMPILE + " -Wno-error=stringop-truncation";
});
in stdenv.mkDerivation {
pname = "virtualbox";
inherit version;
@ -52,7 +42,7 @@ in stdenv.mkDerivation {
dontWrapQtApps = true;
buildInputs =
[ iasl' dev86 libxslt libxml2 xorgproto libX11 libXext libXcursor libIDL
[ acpica-tools dev86 libxslt libxml2 xorgproto libX11 libXext libXcursor libIDL
libcap glib lvm2 alsa-lib curl libvpx pam makeself perl
libXmu libpng libopus python ]
++ optional javaBindings jdk

View file

@ -3,7 +3,7 @@ config:
# Xen
, bison, bzip2, checkpolicy, dev86, figlet, flex, gettext, glib
, iasl, libaio, libiconv, libuuid, ncurses, openssl, perl
, acpica-tools, libaio, libiconv, libuuid, ncurses, openssl, perl
, python2Packages
# python2Packages.python
, xz, yajl, zlib
@ -71,7 +71,7 @@ stdenv.mkDerivation (rec {
cmake which
# Xen
bison bzip2 checkpolicy dev86 figlet flex gettext glib iasl libaio
bison bzip2 checkpolicy dev86 figlet flex gettext glib acpica-tools libaio
libiconv libuuid ncurses openssl perl python2Packages.python xz yajl zlib
# oxenstored

View file

@ -1,49 +1,47 @@
{
bashInteractive,
buildPackages,
cacert,
callPackage,
closureInfo,
coreutils,
e2fsprogs,
fakeroot,
findutils,
go,
jq,
jshon,
lib,
makeWrapper,
moreutils,
nix,
pigz,
pkgs,
rsync,
runCommand,
runtimeShell,
shadow,
skopeo,
storeDir ? builtins.storeDir,
substituteAll,
symlinkJoin,
util-linux,
vmTools,
writeReferencesToFile,
writeScript,
writeText,
writeTextDir,
writePython3,
system, # Note: This is the cross system we're compiling for
{ bashInteractive
, buildPackages
, cacert
, callPackage
, closureInfo
, coreutils
, e2fsprogs
, fakeroot
, findutils
, go
, jq
, jshon
, lib
, makeWrapper
, moreutils
, nix
, pigz
, pkgs
, rsync
, runCommand
, runtimeShell
, shadow
, skopeo
, storeDir ? builtins.storeDir
, substituteAll
, symlinkJoin
, util-linux
, vmTools
, writeReferencesToFile
, writeScript
, writeText
, writeTextDir
, writePython3
, system
, # Note: This is the cross system we're compiling for
}:
let
inherit (lib)
optionals
;
mkDbExtraCommand = contents: let
mkDbExtraCommand = contents:
let
contentsList = if builtins.isList contents then contents else [ contents ];
in ''
in
''
echo "Generating the nix database..."
echo "Warning: only the database of the deepest Nix layer is loaded."
echo " If you want to use nix commands in the container, it would"
@ -70,13 +68,13 @@ let
in
rec {
examples = callPackage ./examples.nix {
inherit buildImage buildLayeredImage fakeNss pullImage shadowSetup buildImageWithNixDb;
};
pullImage = let
fixName = name: builtins.replaceStrings ["/" ":"] ["-" "-"] name;
pullImage =
let
fixName = name: builtins.replaceStrings [ "/" ":" ] [ "-" "-" ] name;
in
{ imageName
# To find the digest of an image, you can use skopeo:
@ -96,7 +94,8 @@ rec {
, name ? fixName "docker-image-${finalImageName}-${finalImageTag}.tar"
}:
runCommand name {
runCommand name
{
inherit imageDigest;
imageName = finalImageName;
imageTag = finalImageTag;
@ -126,11 +125,12 @@ rec {
tarsum = pkgs.tarsum;
# buildEnv creates symlinks to dirs, which is hard to edit inside the overlay VM
mergeDrvs = {
derivations,
onlyDeps ? false
mergeDrvs =
{ derivations
, onlyDeps ? false
}:
runCommand "merge-drvs" {
runCommand "merge-drvs"
{
inherit derivations onlyDeps;
} ''
if [[ -n "$onlyDeps" ]]; then
@ -180,19 +180,20 @@ rec {
'';
# Run commands in a virtual machine.
runWithOverlay = {
name,
fromImage ? null,
fromImageName ? null,
fromImageTag ? null,
diskSize ? 1024,
preMount ? "",
postMount ? "",
postUmount ? ""
runWithOverlay =
{ name
, fromImage ? null
, fromImageName ? null
, fromImageTag ? null
, diskSize ? 1024
, preMount ? ""
, postMount ? ""
, postUmount ? ""
}:
let
result = vmTools.runInLinuxVM (
runCommand name {
runCommand name
{
preVM = vmTools.createEmptyImage {
size = diskSize;
fullName = "docker-run-disk";
@ -277,7 +278,7 @@ rec {
${postUmount}
'');
in
runCommand name {} ''
runCommand name { } ''
mkdir -p $out
cd ${result}
cp layer.tar json VERSION $out
@ -293,7 +294,6 @@ rec {
'';
};
# Create an executable shell script which has the coreutils in its
# PATH. Since root scripts are executed in a blank environment, even
# things like `ls` or `echo` will be missing.
@ -306,21 +306,25 @@ rec {
'';
# Create a "layer" (set of files).
mkPureLayer = {
mkPureLayer =
{
# Name of the layer
name,
# JSON containing configuration and metadata for this layer.
baseJson,
# Files to add to the layer.
contents ? null,
# When copying the contents into the image, preserve symlinks to
name
, # JSON containing configuration and metadata for this layer.
baseJson
, # Files to add to the layer.
contents ? null
, # When copying the contents into the image, preserve symlinks to
# directories (see `rsync -K`). Otherwise, transform those symlinks
# into directories.
keepContentsDirlinks ? false,
# Additional commands to run on the layer before it is tar'd up.
extraCommands ? "", uid ? 0, gid ? 0
keepContentsDirlinks ? false
, # Additional commands to run on the layer before it is tar'd up.
extraCommands ? ""
, uid ? 0
, gid ? 0
}:
runCommand "docker-layer-${name}" {
runCommand "docker-layer-${name}"
{
inherit baseJson contents extraCommands;
nativeBuildInputs = [ jshon rsync tarsum ];
}
@ -360,40 +364,42 @@ rec {
# Make a "root" layer; required if we need to execute commands as a
# privileged user on the image. The commands themselves will be
# performed in a virtual machine sandbox.
mkRootLayer = {
mkRootLayer =
{
# Name of the image.
name,
# Script to run as root. Bash.
runAsRoot,
# Files to add to the layer. If null, an empty layer will be created.
contents ? null,
# When copying the contents into the image, preserve symlinks to
name
, # Script to run as root. Bash.
runAsRoot
, # Files to add to the layer. If null, an empty layer will be created.
contents ? null
, # When copying the contents into the image, preserve symlinks to
# directories (see `rsync -K`). Otherwise, transform those symlinks
# into directories.
keepContentsDirlinks ? false,
# JSON containing configuration and metadata for this layer.
baseJson,
# Existing image onto which to append the new layer.
fromImage ? null,
# Name of the image we're appending onto.
fromImageName ? null,
# Tag of the image we're appending onto.
fromImageTag ? null,
# How much disk to allocate for the temporary virtual machine.
diskSize ? 1024,
# Commands (bash) to run on the layer; these do not require sudo.
keepContentsDirlinks ? false
, # JSON containing configuration and metadata for this layer.
baseJson
, # Existing image onto which to append the new layer.
fromImage ? null
, # Name of the image we're appending onto.
fromImageName ? null
, # Tag of the image we're appending onto.
fromImageTag ? null
, # How much disk to allocate for the temporary virtual machine.
diskSize ? 1024
, # Commands (bash) to run on the layer; these do not require sudo.
extraCommands ? ""
}:
# Generate an executable script from the `runAsRoot` text.
let
runAsRootScript = shellScript "run-as-root.sh" runAsRoot;
extraCommandsScript = shellScript "extra-commands.sh" extraCommands;
in runWithOverlay {
in
runWithOverlay {
name = "docker-layer-${name}";
inherit fromImage fromImageName fromImageTag diskSize;
preMount = lib.optionalString (contents != null && contents != []) ''
preMount = lib.optionalString (contents != null && contents != [ ]) ''
echo "Adding contents..."
for item in ${toString contents}; do
echo "Adding $item..."
@ -445,11 +451,12 @@ rec {
'';
};
buildLayeredImage = {name, ...}@args:
buildLayeredImage = { name, ... }@args:
let
stream = streamLayeredImage args;
in
runCommand "${baseNameOf name}.tar.gz" {
runCommand "${baseNameOf name}.tar.gz"
{
inherit (stream) imageName;
passthru = { inherit (stream) imageTag; };
nativeBuildInputs = [ pigz ];
@ -461,40 +468,45 @@ rec {
# 4. compute the layer id
# 5. put the layer in the image
# 6. repack the image
buildImage = args@{
buildImage =
args@{
# Image name.
name,
# Image tag, when null then the nix output hash will be used.
tag ? null,
# Parent image, to append to.
fromImage ? null,
# Name of the parent image; will be read from the image otherwise.
fromImageName ? null,
# Tag of the parent image; will be read from the image otherwise.
fromImageTag ? null,
# Files to put on the image (a nix store path or list of paths).
contents ? null,
# When copying the contents into the image, preserve symlinks to
name
, # Image tag, when null then the nix output hash will be used.
tag ? null
, # Parent image, to append to.
fromImage ? null
, # Name of the parent image; will be read from the image otherwise.
fromImageName ? null
, # Tag of the parent image; will be read from the image otherwise.
fromImageTag ? null
, # Files to put on the image (a nix store path or list of paths).
contents ? null
, # When copying the contents into the image, preserve symlinks to
# directories (see `rsync -K`). Otherwise, transform those symlinks
# into directories.
keepContentsDirlinks ? false,
# Docker config; e.g. what command to run on the container.
config ? null,
# Optional bash script to run on the files prior to fixturizing the layer.
extraCommands ? "", uid ? 0, gid ? 0,
# Optional bash script to run as root on the image when provisioning.
runAsRoot ? null,
# Size of the virtual machine disk to provision when building the image.
diskSize ? 1024,
# Time of creation of the image.
created ? "1970-01-01T00:00:01Z",
keepContentsDirlinks ? false
, # Docker config; e.g. what command to run on the container.
config ? null
, # Optional bash script to run on the files prior to fixturizing the layer.
extraCommands ? ""
, uid ? 0
, gid ? 0
, # Optional bash script to run as root on the image when provisioning.
runAsRoot ? null
, # Size of the virtual machine disk to provision when building the image.
diskSize ? 1024
, # Time of creation of the image.
created ? "1970-01-01T00:00:01Z"
,
}:
let
baseName = baseNameOf name;
# Create a JSON blob of the configuration. Set the date to unix zero.
baseJson = let
baseJson =
let
pure = writeText "${baseName}-config.json" (builtins.toJSON {
inherit created config;
architecture = defaultArch;
@ -505,20 +517,25 @@ rec {
''
jq ".created = \"$(TZ=utc date --iso-8601="seconds")\"" ${pure} > $out
'';
in if created == "now" then impure else pure;
in
if created == "now" then impure else pure;
layer =
if runAsRoot == null
then mkPureLayer {
then
mkPureLayer
{
name = baseName;
inherit baseJson contents keepContentsDirlinks extraCommands uid gid;
} else mkRootLayer {
} else
mkRootLayer {
name = baseName;
inherit baseJson fromImage fromImageName fromImageTag
contents keepContentsDirlinks runAsRoot diskSize
extraCommands;
};
result = runCommand "docker-image-${baseName}.tar.gz" {
result = runCommand "docker-image-${baseName}.tar.gz"
{
nativeBuildInputs = [ jshon pigz coreutils findutils jq moreutils ];
# Image name must be lowercase
imageName = lib.toLower name;
@ -732,14 +749,14 @@ rec {
root:x:0:
nobody:x:65534:
'')
(runCommand "var-empty" {} ''
(runCommand "var-empty" { } ''
mkdir -p $out/var/empty
'')
];
};
# This provides /bin/sh, pointing to bashInteractive.
binSh = runCommand "bin-sh" {} ''
binSh = runCommand "bin-sh" { } ''
mkdir -p $out/bin
ln -s ${bashInteractive}/bin/bash $out/bin/sh
'';
@ -760,32 +777,34 @@ rec {
})
);
streamLayeredImage = {
streamLayeredImage =
{
# Image Name
name,
# Image tag, the Nix's output hash will be used if null
tag ? null,
# Parent image, to append to.
fromImage ? null,
# Files to put on the image (a nix store path or list of paths).
contents ? [],
# Docker config; e.g. what command to run on the container.
config ? {},
# Time of creation of the image. Passing "now" will make the
name
, # Image tag, the Nix's output hash will be used if null
tag ? null
, # Parent image, to append to.
fromImage ? null
, # Files to put on the image (a nix store path or list of paths).
contents ? [ ]
, # Docker config; e.g. what command to run on the container.
config ? { }
, # Time of creation of the image. Passing "now" will make the
# created date be the time of building.
created ? "1970-01-01T00:00:01Z",
# Optional bash script to run on the files prior to fixturizing the layer.
extraCommands ? "",
# Optional bash script to run inside fakeroot environment.
created ? "1970-01-01T00:00:01Z"
, # Optional bash script to run on the files prior to fixturizing the layer.
extraCommands ? ""
, # Optional bash script to run inside fakeroot environment.
# Could be used for changing ownership of files in customisation layer.
fakeRootCommands ? "",
# We pick 100 to ensure there is plenty of room for extension. I
fakeRootCommands ? ""
, # We pick 100 to ensure there is plenty of room for extension. I
# believe the actual maximum is 128.
maxLayers ? 100,
# Whether to include store paths in the image. You generally want to leave
maxLayers ? 100
, # Whether to include store paths in the image. You generally want to leave
# this on, but tooling may disable this to insert the store paths more
# efficiently via other means, such as bind mounting the host store.
includeStorePaths ? true,
includeStorePaths ? true
,
}:
assert
(lib.assertMsg (maxLayers > 1)
@ -793,7 +812,7 @@ rec {
let
baseName = baseNameOf name;
streamScript = writePython3 "stream" {} ./stream_layered_image.py;
streamScript = writePython3 "stream" { } ./stream_layered_image.py;
baseJson = writeText "${baseName}-base.json" (builtins.toJSON {
inherit config;
architecture = defaultArch;
@ -833,7 +852,7 @@ rec {
'';
};
closureRoots = optionals includeStorePaths /* normally true */ (
closureRoots = lib.optionals includeStorePaths /* normally true */ (
[ baseJson ] ++ contentsList
);
overallClosure = writeText "closure" (lib.concatStringsSep " " closureRoots);
@ -842,7 +861,8 @@ rec {
# so they'll be excluded from the created images.
unnecessaryDrvs = [ baseJson overallClosure ];
conf = runCommand "${baseName}-conf.json" {
conf = runCommand "${baseName}-conf.json"
{
inherit fromImage maxLayers created;
imageName = lib.toLower name;
passthru.imageTag =
@ -931,7 +951,8 @@ rec {
--arg created "$created" |
tee $out
'';
result = runCommand "stream-${baseName}" {
result = runCommand "stream-${baseName}"
{
inherit (conf) imageName;
passthru = {
inherit (conf) imageTag;
@ -944,5 +965,6 @@ rec {
} ''
makeWrapper ${streamScript} $out --add-flags ${conf}
'';
in result;
in
result;
}

View file

@ -78,7 +78,7 @@ let
in stdenvNoCC.mkDerivation rec {
inherit name makeUInitrd extension uInitrdArch prepend;
${if makeUInitrd then "uinitrdCompression" else null} = uInitrdCompression;
${if makeUInitrd then "uInitrdCompression" else null} = uInitrdCompression;
builder = ./make-initrd.sh;

View file

@ -43,9 +43,9 @@ done
(cd root && find * .[^.*] -print0 | sort -z | cpio -o -H newc -R +0:+0 --reproducible --null | eval -- $compress >> "$out/initrd")
if [ -n "$makeUInitrd" ]; then
mkimage -A $uInitrdArch -O linux -T ramdisk -C "$uInitrdCompression" -d $out/initrd"$extension" $out/initrd.img
mkimage -A "$uInitrdArch" -O linux -T ramdisk -C "$uInitrdCompression" -d "$out/initrd" $out/initrd.img
# Compatibility symlink
ln -s "initrd.img" "$out/initrd"
ln -sf "initrd.img" "$out/initrd"
else
ln -s "initrd" "$out/initrd$extension"
fi

View file

@ -9,13 +9,13 @@
stdenv.mkDerivation rec {
pname = "marwaita";
version = "10.0";
version = "10.2";
src = fetchFromGitHub {
owner = "darkomarko42";
repo = pname;
rev = version;
sha256 = "19xy6p3v4jqpw448ywmx1i6mbjpy06qa1rdwkdpxdlhf45fbdacr";
sha256 = "09xh7yhnc7szk171n0qgr52xr7sw9qq4cb7qwrkhf0184idf0pik";
};
buildInputs = [

View file

@ -1,19 +1,33 @@
{ lib, stdenv, fetchFromGitHub, gtk_engines, gtk-engine-murrine }:
{ lib
, stdenv
, fetchFromGitHub
, gnome-shell
, gtk-engine-murrine
, gtk_engines
}:
stdenv.mkDerivation rec {
pname = "vimix-gtk-themes";
version = "2021-04-25";
version = "2021-08-09";
src = fetchFromGitHub {
owner = "vinceliuice";
repo = pname;
rev = version;
sha256 = "0ak763vs27h5z2pgcqpz1g1hypn5gl0p0ylffawc9zdi1wp2mpxb";
sha256 = "0j6sq7z4zqc9q4hqcq4y9vh4qpgl0v1i353l6rcd6bh1r594rwjm";
};
buildInputs = [ gtk_engines ];
nativeBuildInputs = [
gnome-shell # needed to determine the gnome-shell version
];
propagatedUserEnvPkgs = [ gtk-engine-murrine ];
buildInputs = [
gtk_engines
];
propagatedUserEnvPkgs = [
gtk-engine-murrine
];
installPhase = ''
runHook preInstall

View file

@ -1,15 +1,15 @@
{ lib, fetchurl, file, which, intltool, gobject-introspection,
findutils, xdg-utils, dconf, gtk3, python3Packages,
findutils, xdg-utils, dconf, gtk3, python3Packages, xfconf,
wrapGAppsHook
}:
python3Packages.buildPythonApplication rec {
pname = "catfish";
version = "1.4.13";
version = "4.16.2";
src = fetchurl {
url = "https://archive.xfce.org/src/apps/${pname}/${lib.versions.majorMinor version}/${pname}-${version}.tar.bz2";
sha256 = "sha256-WMDqBuXyhgGSlVRfv9HbyiOup0xiV2K7tMiab0hK6Dk=";
sha256 = "sha256-shouFRlD8LGU04sX/qrzghh5R+0SoCw9ZJKvt0gBKms=";
};
nativeBuildInputs = [
@ -36,6 +36,7 @@ python3Packages.buildPythonApplication rec {
python3Packages.pexpect
xdg-utils
findutils
xfconf
];
# Explicitly set the prefix dir in "setup.py" because setuptools is

View file

@ -3,10 +3,10 @@
mkXfceDerivation {
category = "apps";
pname = "mousepad";
version = "0.5.5";
version = "0.5.6";
odd-unstable = false;
sha256 = "sha256-ViiibikQ90S47stb3egXwK5JbcMYYiJAsKukMVYvKLE=";
sha256 = "sha256-cdM2NHUnN2FITITb4077Je5Z8xwZAJfjmwXfV+WE6jk=";
nativeBuildInputs = [ gobject-introspection ];

View file

@ -8,11 +8,11 @@ in
stdenv.mkDerivation rec {
pname = "xfce4-sensors-plugin";
version = "1.3.95";
version = "1.4.1";
src = fetchurl {
url = "mirror://xfce/src/${category}/${pname}/${lib.versions.majorMinor version}/${pname}-${version}.tar.bz2";
sha256 = "sha256-g8ZK5GGN1ZKXHPoLwoWptHr4AaPthWg1zbKkxTPHhGw=";
sha256 = "sha256-N9DcVp5zXkgqGFRcJOsc4CKdaRDjpNTB3uBoCZkjS+I=";
};
nativeBuildInputs = [

View file

@ -13,13 +13,13 @@ assert enableLTO -> stdenv.cc.isGNU;
stdenv.mkDerivation rec {
pname = "dictu";
version = "0.19.0";
version = "0.20.0";
src = fetchFromGitHub {
owner = "dictu-lang";
repo = pname;
rev = "v${version}";
sha256 = "5Sfmzz4I0dhcbz14LmXx5cHELRFENunLbZmU93uSEJo=";
sha256 = "sha256-GiiNIySrfpjYf5faNNml7ZRXT5pDU0SVvNvMyBh1K8E=";
};
nativeBuildInputs = [ cmake ];

View file

@ -7,15 +7,13 @@
, useBoehmgc ? true, boehmgc
}:
assert useBoehmgc -> boehmgc != null;
let
s = # Generated upstream information
rec {
baseName="ecl";
version="16.1.2";
name="${baseName}-${version}";
url="https://common-lisp.net/project/ecl/static/files/release/ecl-16.1.2.tgz";
url="https://common-lisp.net/project/ecl/static/files/release/ecl-${version}.tgz";
sha256="16ab8qs3awvdxy8xs8jy82v8r04x4wr70l9l2j45vgag18d2nj1d";
};
buildInputs = [
@ -38,12 +36,11 @@ stdenv.mkDerivation {
configureFlags = [
(if threadSupport then "--enable-threads" else "--disable-threads")
"--with-gmp-prefix=${gmp.dev}"
"--with-libffi-prefix=${libffi.dev}"
]
++
(lib.optional (! noUnicode)
"--enable-unicode")
"--with-gmp-incdir=${lib.getDev gmp}/include"
"--with-gmp-libdir=${lib.getLib gmp}/lib"
# -incdir, -libdir doesn't seem to be supported for libffi
"--with-libffi-prefix=${lib.getDev libffi}"
] ++ lib.optional (! noUnicode) "--enable-unicode"
;
patches = [
@ -69,16 +66,31 @@ stdenv.mkDerivation {
postInstall = ''
sed -e 's/@[-a-zA-Z_]*@//g' -i $out/bin/ecl-config
wrapProgram "$out/bin/ecl" \
--prefix PATH ':' "${gcc}/bin" \
--prefix NIX_LDFLAGS ' ' "-L${gmp.lib or gmp.out or gmp}/lib" \
--prefix NIX_LDFLAGS ' ' "-L${libffi.lib or libffi.out or libffi}/lib"
--prefix PATH ':' "${
lib.makeBinPath [
gcc # for the C compiler
gcc.bintools.bintools # for ar
]
}" \
''
# ecl 16.1.2 is too old to have -libdir for libffi and boehmgc, so we need to
# use NIX_LDFLAGS_BEFORE to make gcc find these particular libraries.
# Since it is missing even the prefix flag for boehmgc we also need to inject
# the correct -I flag via NIX_CFLAGS_COMPILE. Since we have access to it, we
# create the variables with suffixSalt (which seems to be necessary for
# NIX_CFLAGS_COMPILE even).
+ lib.optionalString useBoehmgc ''
--prefix NIX_CFLAGS_COMPILE_${gcc.suffixSalt} ' ' "-I${lib.getDev boehmgc}/include" \
--prefix NIX_LDFLAGS_BEFORE_${gcc.bintools.suffixSalt} ' ' "-L${lib.getLib boehmgc}/lib" \
'' + ''
--prefix NIX_LDFLAGS_BEFORE_${gcc.bintools.suffixSalt} ' ' "-L${lib.getLib libffi}/lib"
'';
meta = {
meta = with lib; {
inherit (s) version;
description = "Lisp implementation aiming to be small, fast and easy to embed";
license = lib.licenses.mit ;
maintainers = [lib.maintainers.raskin];
platforms = lib.platforms.unix;
license = licenses.mit ;
maintainers = [ maintainers.raskin ];
platforms = platforms.unix;
};
}

View file

@ -45,20 +45,25 @@ stdenv.mkDerivation {
configureFlags = [
(if threadSupport then "--enable-threads" else "--disable-threads")
"--with-gmp-prefix=${lib.getDev gmp}"
"--with-libffi-prefix=${lib.getDev libffi}"
] ++ lib.optional useBoehmgc "--with-libgc-prefix=${lib.getDev boehmgc}"
++ lib.optional (!noUnicode) "--enable-unicode";
"--with-gmp-incdir=${lib.getDev gmp}/include"
"--with-gmp-libdir=${lib.getLib gmp}/lib"
"--with-libffi-incdir=${lib.getDev libffi}/include"
"--with-libffi-libdir=${lib.getLib libffi}/lib"
] ++ lib.optionals useBoehmgc [
"--with-libgc-incdir=${lib.getDev boehmgc}/include"
"--with-libgc-libdir=${lib.getLib boehmgc}/lib"
] ++ lib.optional (!noUnicode) "--enable-unicode";
hardeningDisable = [ "format" ];
postInstall = let
ldArgs = lib.strings.concatMapStringsSep " "
(l: ''--prefix NIX_LDFLAGS ' ' "-L${l.lib or l.out or l}/lib"'')
([ gmp libffi ] ++ lib.optional useBoehmgc boehmgc);
in ''
postInstall = ''
sed -e 's/@[-a-zA-Z_]*@//g' -i $out/bin/ecl-config
wrapProgram "$out/bin/ecl" --prefix PATH ':' "${gcc}/bin" ${ldArgs}
wrapProgram "$out/bin/ecl" --prefix PATH ':' "${
lib.makeBinPath [
gcc # for the C compiler
gcc.bintools.bintools # for ar
]
}"
'';
meta = with lib; {

View file

@ -5,7 +5,6 @@
fetchpatch,
libuuid,
python3,
iasl,
bc,
clang_9,
llvmPackages_9,

View file

@ -1,33 +0,0 @@
{lib, stdenv, fetchurl, bison, flex}:
stdenv.mkDerivation rec {
pname = "iasl";
version = "20210730";
src = fetchurl {
url = "https://acpica.org/sites/acpica/files/acpica-unix-${version}.tar.gz";
sha256 = "1pmm977nyl3bs71ipzcl4dh30qm8x9wm2p2ml0m62rl62kai832a";
};
NIX_CFLAGS_COMPILE = "-O3";
buildFlags = [ "iasl" ];
nativeBuildInputs = [ bison flex ];
installPhase =
''
runHook preInstall
install -Dm755 generate/unix/bin*/iasl -t $out/bin
runHook postInstall
'';
meta = {
description = "Intel ACPI Compiler";
homepage = "http://www.acpica.org/";
license = lib.licenses.iasl;
platforms = lib.platforms.unix;
};
}

View file

@ -1,4 +1,4 @@
{ lib, stdenv, llvm_meta, src, cmake, libxml2, libllvm, version, python3
{ lib, stdenv, llvm_meta, src, substituteAll, cmake, libxml2, libllvm, version, python3
, buildLlvmTools
, fixDarwinDylibNames
, enableManpages ? false
@ -37,6 +37,10 @@ let
./purity.patch
# https://reviews.llvm.org/D51899
./gnu-install-dirs.patch
(substituteAll {
src = ../../clang-11-12-LLVMgold-path.patch;
libllvmLibdir = "${libllvm.lib}/lib";
})
];
postPatch = ''
@ -52,12 +56,7 @@ let
outputs = [ "out" "lib" "dev" "python" ];
# Clang expects to find LLVMgold in its own prefix
postInstall = ''
if [ -e ${libllvm.lib}/lib/LLVMgold.so ]; then
ln -sv ${libllvm.lib}/lib/LLVMgold.so $lib/lib
fi
ln -sv $out/bin/clang $out/bin/cpp
# Move libclang to 'lib' output

View file

@ -2,9 +2,7 @@ diff --git a/lib/builtins/CMakeLists.txt b/lib/builtins/CMakeLists.txt
index 3a66dd9c3fb..7efc85d9f9f 100644
--- a/lib/builtins/CMakeLists.txt
+++ b/lib/builtins/CMakeLists.txt
@@ -301,6 +301,10 @@ if (NOT MSVC)
i386/umoddi3.S
)
@@ -345,4 +345,8 @@ if (NOT MSVC)
+ set(i486_SOURCES ${i386_SOURCES})
+ set(i586_SOURCES ${i386_SOURCES})

View file

@ -0,0 +1,71 @@
diff --git a/lib/sanitizer_common/sanitizer_mac.cpp b/lib/sanitizer_common/sanitizer_mac.cpp
--- a/lib/sanitizer_common/sanitizer_mac.cpp
+++ b/lib/sanitizer_common/sanitizer_mac.cpp
@@ -613,9 +613,15 @@ HandleSignalMode GetHandleSignalMode(int signum) {
// Offset example:
// XNU 17 -- macOS 10.13 -- iOS 11 -- tvOS 11 -- watchOS 4
constexpr u16 GetOSMajorKernelOffset() {
- if (TARGET_OS_OSX) return 4;
- if (TARGET_OS_IOS || TARGET_OS_TV) return 6;
- if (TARGET_OS_WATCH) return 13;
+#if TARGET_OS_OSX
+ return 4;
+#endif
+#if TARGET_OS_IOS || TARGET_OS_TV
+ return 6;
+#endif
+#if TARGET_OS_WATCH
+ return 13;
+#endif
}
using VersStr = char[64];
@@ -627,13 +633,13 @@ static uptr ApproximateOSVersionViaKernelVersion(VersStr vers) {
u16 os_major = kernel_major - offset;
const char *format = "%d.0";
- if (TARGET_OS_OSX) {
- if (os_major >= 16) { // macOS 11+
- os_major -= 5;
- } else { // macOS 10.15 and below
- format = "10.%d";
- }
+#if TARGET_OS_OSX
+ if (os_major >= 16) { // macOS 11+
+ os_major -= 5;
+ } else { // macOS 10.15 and below
+ format = "10.%d";
}
+#endif
return internal_snprintf(vers, sizeof(VersStr), format, os_major);
}
@@ -681,15 +687,14 @@ void ParseVersion(const char *vers, u16 *major, u16 *minor) {
// Aligned versions example:
// macOS 10.15 -- iOS 13 -- tvOS 13 -- watchOS 6
static void MapToMacos(u16 *major, u16 *minor) {
- if (TARGET_OS_OSX)
- return;
-
- if (TARGET_OS_IOS || TARGET_OS_TV)
+#if !TARGET_OS_OSX
+#if TARGET_OS_IOS || TARGET_OS_TV
*major += 2;
- else if (TARGET_OS_WATCH)
+#elif TARGET_OS_WATCH
*major += 9;
- else
+#else
UNREACHABLE("unsupported platform");
+#endif
if (*major >= 16) { // macOS 11+
*major -= 5;
@@ -697,6 +702,7 @@ static void MapToMacos(u16 *major, u16 *minor) {
*minor = *major;
*major = 10;
}
+#endif
}
static MacosVersion GetMacosAlignedVersionInternal() {

View file

@ -60,6 +60,8 @@ stdenv.mkDerivation {
# extra `/`.
./normalize-var.patch
]# ++ lib.optional stdenv.hostPlatform.isMusl ./sanitizers-nongnu.patch
# Prevent a compilation error on darwin
++ lib.optional stdenv.hostPlatform.isDarwin ./darwin-targetconditionals.patch
++ lib.optional stdenv.hostPlatform.isAarch32 ./armv7l.patch;
# TSAN requires XPC on Darwin, which we have no public/free source files for. We can depend on the Apple frameworks

View file

@ -19,6 +19,7 @@
, Carbon
, Cocoa
, lit
, makeWrapper
, enableManpages ? false
}:
@ -42,7 +43,7 @@ stdenv.mkDerivation (rec {
outputs = [ "out" "lib" "dev" ];
nativeBuildInputs = [
cmake python3 which swig lit
cmake python3 which swig lit makeWrapper
] ++ lib.optionals enableManpages [
python3.pkgs.sphinx python3.pkgs.recommonmark
];
@ -84,7 +85,15 @@ stdenv.mkDerivation (rec {
doCheck = false;
installCheckPhase = ''
if [ ! -e "$lib/${python3.sitePackages}/lldb/_lldb.so" ] ; then
return 1;
fi
'';
postInstall = ''
wrapProgram $out/bin/lldb --prefix PYTHONPATH : $lib/${python3.sitePackages}/
# Editor support
# vscode:
install -D ../tools/lldb-vscode/package.json $out/share/vscode/extensions/llvm-org.lldb-vscode-0.1.0/package.json

View file

@ -63,3 +63,29 @@ index 734167e51bc5..f95761b5df58 100644
install(TARGETS lldbIntelFeatures
- LIBRARY DESTINATION lib${LLVM_LIBDIR_SUFFIX})
+ LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX})
diff --git a/cmake/modules/LLDBStandalone.cmake b/cmake/modules/LLDBStandalone.cmake
index 752113b..010f187 100644
--- a/cmake/modules/LLDBStandalone.cmake
+++ b/cmake/modules/LLDBStandalone.cmake
@@ -62,7 +62,7 @@ endif()
# They are used as destination of target generators.
set(LLVM_RUNTIME_OUTPUT_INTDIR ${CMAKE_BINARY_DIR}/${CMAKE_CFG_INTDIR}/bin)
-set(LLVM_LIBRARY_OUTPUT_INTDIR ${CMAKE_BINARY_DIR}/${CMAKE_CFG_INTDIR}/lib${LLVM_LIBDIR_SUFFIX})
+set(LLVM_LIBRARY_OUTPUT_INTDIR ${CMAKE_INSTALL_LIBDIR}${LLVM_LIBDIR_SUFFIX})
if(WIN32 OR CYGWIN)
# DLL platform -- put DLLs into bin.
set(LLVM_SHLIB_OUTPUT_INTDIR ${LLVM_RUNTIME_OUTPUT_INTDIR})
diff --git a/CMakeLists.txt b/CMakeLists.txt
index b5633e2..86e4738 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -79,7 +79,7 @@ if (LLDB_ENABLE_PYTHON)
if(LLDB_BUILD_FRAMEWORK)
set(lldb_python_target_dir "${LLDB_FRAMEWORK_ABSOLUTE_BUILD_DIR}/LLDB.framework/Resources/Python/lldb")
else()
- set(lldb_python_target_dir "${CMAKE_BINARY_DIR}/${CMAKE_CFG_INTDIR}/${LLDB_PYTHON_RELATIVE_PATH}/lldb")
+ set(lldb_python_target_dir "${CMAKE_INSTALL_LIBDIR}/../${LLDB_PYTHON_RELATIVE_PATH}/lldb")
endif()
get_target_property(lldb_python_bindings_dir swig_wrapper_python BINARY_DIR)
finish_swig_python("lldb-python" "${lldb_python_bindings_dir}" "${lldb_python_target_dir}")

View file

@ -1,16 +1,17 @@
{ stdenv, lib, fetchFromGitHub, jre, coursier }:
{ stdenv, lib, fetchFromGitHub, coursier }:
stdenv.mkDerivation rec {
pname = "scala-runners";
version = "unstable-2020-02-02";
version = "unstable-2021-07-28";
src = fetchFromGitHub {
repo = pname;
owner = "dwijnand";
rev = "95e03c9f9de0fe0ab61eeb6dea2a364f9d081d31";
sha256 = "0mvlc6fxsh5d6gsyak9n3g98g4r061n8pir37jpiqb7z00m9lfrx";
rev = "9bf096ca81f4974d7327e291eac291e22b344a8f";
sha256 = "032fds5nr102h1lc81n9jc60jmxzivi4md4hcjrlqn076hfhj4ax";
};
dontBuild = true;
installPhase = ''
mkdir -p $out/bin $out/lib
sed -ie "s| cs | ${coursier}/bin/coursier |" scala-runner

View file

@ -23,6 +23,12 @@ stdenv.mkDerivation rec {
configureFlags = map (f: "--disable-${f}-port") excludedPorts;
preConfigure = ''
if test -n "''${dontStrip-}"; then
export STRIP=none
fi
'';
meta = {
description = "Small Device C Compiler";
longDescription = ''

View file

@ -10,11 +10,11 @@ assert stdenv ? cc && stdenv.cc.isGNU;
stdenv.mkDerivation rec {
pname = "guile-lib";
version = "0.2.6.1";
version = "0.2.7";
src = fetchurl {
url = "mirror://savannah/${pname}/${pname}-${version}.tar.gz";
hash = "sha256-bR09DxTbnSgLjUJ9bh3sRBfd0Cv/I71Zguy24mLrPyo=";
hash = "sha256-5O87hF8SGILHwM8E+BocuP02DG9ktWuGjeVUYhT5BN4=";
};
nativeBuildInputs = [

View file

@ -198,9 +198,9 @@ in {
major = "3";
minor = "10";
patch = "0";
suffix = "b3";
suffix = "rc1";
};
sha256 = "05fc4mp2ysb372bzkwbn1b1z01bfldnaqig6rxmif58hs3aawrr2";
sha256 = "0f76q6rsvbvrzcnsp0k7sp555krrgvjpcd09l1rybl4249ln2w3r";
inherit (darwin) configd;
inherit passthruFun;
};

View file

@ -8,14 +8,21 @@ stdenv.mkDerivation rec {
sha256 = "02hc5x9vkgng1v9bzvza9985ifrjd7fjr7nlpvazp4mv6dr89k47";
};
patches = [
# Add fallback for missing SIMD functions on ARM
# Source https://github.com/Homebrew/homebrew-core/blob/cad412c7fb4b64925f821fcc9ac5f16a2c40f32d/Formula/aften.rb
./simd-fallback.patch
];
nativeBuildInputs = [ cmake ];
cmakeFlags = [ "-DSHARED=ON" ];
meta = {
meta = with lib; {
description = "An audio encoder which generates compressed audio streams based on ATSC A/52 specification";
homepage = "http://aften.sourceforge.net/";
license = lib.licenses.lgpl2;
platforms = [ "i686-linux" "x86_64-linux" "x86_64-darwin" ];
license = licenses.lgpl21Only;
platforms = platforms.unix;
maintainers = with maintainers; [ angustrau ];
};
}

View file

@ -0,0 +1,25 @@
From dca9c03930d669233258c114e914a01f7c0aeb05 Mon Sep 17 00:00:00 2001
From: jbr79 <jbr79@ef0d8562-5c19-0410-972e-841db63a069c>
Date: Wed, 24 Sep 2008 22:02:59 +0000
Subject: [PATCH] add fallback function for apply_simd_restrictions() on
non-x86/ppc
git-svn-id: https://aften.svn.sourceforge.net/svnroot/aften@766 ef0d8562-5c19-0410-972e-841db63a069c
---
libaften/cpu_caps.h | 1 +
1 file changed, 1 insertion(+)
diff --git a/libaften/cpu_caps.h b/libaften/cpu_caps.h
index b7c6159..4db11f7 100644
--- a/libaften/cpu_caps.h
+++ b/libaften/cpu_caps.h
@@ -26,6 +26,7 @@
#include "ppc_cpu_caps.h"
#else
static inline void cpu_caps_detect(void){}
+static inline void apply_simd_restrictions(AftenSimdInstructions *simd_instructions){}
#endif
#endif /* CPU_CAPS_H */
--
2.24.3 (Apple Git-128)

View file

@ -7,6 +7,14 @@
customMemoryManagement ? true
}:
let
host_os = if stdenv.hostPlatform.isDarwin then "APPLE"
else if stdenv.hostPlatform.isAndroid then "ANDROID"
else if stdenv.hostPlatform.isWindows then "WINDOWS"
else if stdenv.hostPlatform.isLinux then "LINUX"
else throw "Unknown host OS";
in
stdenv.mkDerivation rec {
pname = "aws-sdk-cpp";
version = "1.8.130";
@ -49,6 +57,7 @@ stdenv.mkDerivation rec {
"-DENABLE_TESTING=OFF"
"-DCURL_HAS_H2=1"
"-DCURL_HAS_TLS_PROXY=1"
"-DTARGET_ARCH=${host_os}"
] ++ lib.optional (apis != ["*"])
"-DBUILD_ONLY=${lib.concatStringsSep ";" apis}";

View file

@ -30,13 +30,13 @@ let
];
in stdenv.mkDerivation rec {
pname = "gjs";
version = "1.68.1";
version = "1.68.2";
outputs = [ "out" "dev" "installedTests" ];
src = fetchurl {
url = "mirror://gnome/sources/gjs/${lib.versions.majorMinor version}/${pname}-${version}.tar.xz";
sha256 = "0w2cbfpmc6alz7z8ycchhlkn586av5y8zk2xmgwzq10i0k13xyig";
sha256 = "sha256-cP8CraaC8TAzjsXMTFEQPcDlyrjVN+t2sYHsUSpl7jA=";
};
patches = [
@ -126,6 +126,7 @@ in stdenv.mkDerivation rec {
updateScript = gnome.updateScript {
packageName = "gjs";
versionPolicy = "odd-unstable";
};
};

View file

@ -1,11 +1,11 @@
{ lib, stdenv, fetchurl, makeWrapper, jdk }:
stdenv.mkDerivation rec {
name = "lombok-1.18.16";
name = "lombok-1.18.20";
src = fetchurl {
url = "https://projectlombok.org/downloads/${name}.jar";
sha256 = "1msys7xkaj0d7fi112fmb2z50mk46db58agzrrdyimggsszwn1kj";
sha256 = "sha256-zpR75sL751n7vo7ztCtoJfgUyYyIU/EBPy2WMM7fdLA=";
};
nativeBuildInputs = [ makeWrapper ];

View file

@ -5,13 +5,13 @@
stdenv.mkDerivation rec {
pname = "libaudec";
version = "0.2.4";
version = "0.3.4";
src = fetchFromGitHub {
owner = "zrythm";
repo = "libaudec";
rev = "v${version}";
sha256 = "1570m2dfia17dbkhd2qhx8jjihrpm7g8nnyg6n4wif4vv229s7dz";
sha256 = "sha256-8morbrq8zG+2N3ruMeJa85ci9P0wPQOfZ5H56diFEAo=";
};
buildInputs = [ libsndfile libsamplerate ];

View file

@ -1,12 +1,17 @@
{ lib
, stdenv
, fetchFromGitHub
, fetchpatch
, cmake
, openssl
, perl
, pkg-config
, rustPlatform
, sqlite
, fixDarwinDylibNames
, CoreFoundation
, Security
, libiconv
}:
stdenv.mkDerivation rec {
@ -20,6 +25,15 @@ stdenv.mkDerivation rec {
sha256 = "sha256-ZyVEI6q+GzHLEFH01TxS7NqwT7zqVgg0vduyf/fibB8=";
};
patches = [
# https://github.com/deltachat/deltachat-core-rust/pull/2589
(fetchpatch {
url = "https://github.com/deltachat/deltachat-core-rust/commit/408467e85d04fbbfd6bed5908d84d9e995943487.patch";
sha256 = "1j2ywaazglgl6370js34acrg0wrh0b7krqg05dfjf65n527lzn59";
})
./no-static-lib.patch
];
cargoDeps = rustPlatform.fetchCargoTarball {
inherit src;
name = "${pname}-${version}";
@ -33,11 +47,17 @@ stdenv.mkDerivation rec {
] ++ (with rustPlatform; [
cargoSetupHook
rust.cargo
]);
]) ++ lib.optionals stdenv.isDarwin [
fixDarwinDylibNames
];
buildInputs = [
openssl
sqlite
] ++ lib.optionals stdenv.isDarwin [
CoreFoundation
Security
libiconv
];
checkInputs = with rustPlatform; [
@ -49,7 +69,7 @@ stdenv.mkDerivation rec {
homepage = "https://github.com/deltachat/deltachat-core-rust/";
changelog = "https://github.com/deltachat/deltachat-core-rust/blob/${version}/CHANGELOG.md";
license = licenses.mpl20;
platforms = platforms.linux;
maintainers = with maintainers; [ dotlambda ];
platforms = platforms.unix;
};
}

View file

@ -0,0 +1,39 @@
diff --git a/CMakeLists.txt b/CMakeLists.txt
index fe7abe08..acdbe0d6 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -13,7 +13,6 @@ find_program(CARGO cargo)
add_custom_command(
OUTPUT
- "target/release/libdeltachat.a"
"target/release/libdeltachat.${DYNAMIC_EXT}"
"target/release/pkgconfig/deltachat.pc"
COMMAND
@@ -38,13 +37,11 @@ add_custom_target(
lib_deltachat
ALL
DEPENDS
- "target/release/libdeltachat.a"
"target/release/libdeltachat.${DYNAMIC_EXT}"
"target/release/pkgconfig/deltachat.pc"
)
include(GNUInstallDirs)
install(FILES "deltachat-ffi/deltachat.h" DESTINATION ${CMAKE_INSTALL_INCLUDEDIR})
-install(FILES "target/release/libdeltachat.a" DESTINATION ${CMAKE_INSTALL_LIBDIR})
install(FILES "target/release/libdeltachat.${DYNAMIC_EXT}" DESTINATION ${CMAKE_INSTALL_LIBDIR})
install(FILES "target/release/pkgconfig/deltachat.pc" DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig)
diff --git a/deltachat-ffi/Cargo.toml b/deltachat-ffi/Cargo.toml
index a34a27ba..cf354abb 100644
--- a/deltachat-ffi/Cargo.toml
+++ b/deltachat-ffi/Cargo.toml
@@ -12,7 +12,7 @@ categories = ["cryptography", "std", "email"]
[lib]
name = "deltachat"
-crate-type = ["cdylib", "staticlib"]
+crate-type = ["cdylib"]
[dependencies]
deltachat = { path = "../", default-features = false }

View file

@ -2,11 +2,11 @@
stdenv.mkDerivation rec {
pname = "liblscp";
version = "0.6.0";
version = "0.9.2";
src = fetchurl {
url = "https://download.linuxsampler.org/packages/${pname}-${version}.tar.gz";
sha256 = "1rl7ssdzj0z3658yvdijmb27n2lcwmplx4qxg5mwrm07pvs7i75k";
sha256 = "sha256-GTW6SBQVCDtzeU+arLmAJi6N9jtN68NdI69p6RSazvs=";
};
nativeBuildInputs = [ autoconf automake libtool pkg-config ];

Some files were not shown because too many files have changed in this diff Show more