2020-04-24 23:36:52 +00:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
|
|
"""
|
|
|
|
Update a Python package expression by passing in the `.nix` file, or the directory containing it.
|
|
|
|
You can pass in multiple files or paths.
|
|
|
|
|
|
|
|
You'll likely want to use
|
|
|
|
``
|
|
|
|
$ ./update-python-libraries ../../pkgs/development/python-modules/**/default.nix
|
|
|
|
``
|
|
|
|
to update all non-pinned libraries in that folder.
|
|
|
|
"""
|
|
|
|
|
|
|
|
import argparse
|
2024-01-02 11:29:13 +00:00
|
|
|
import collections
|
2023-03-04 12:14:45 +00:00
|
|
|
import json
|
|
|
|
import logging
|
2020-04-24 23:36:52 +00:00
|
|
|
import os
|
|
|
|
import re
|
2024-01-02 11:29:13 +00:00
|
|
|
import subprocess
|
2020-04-24 23:36:52 +00:00
|
|
|
from concurrent.futures import ThreadPoolExecutor as Pool
|
2024-01-02 11:29:13 +00:00
|
|
|
from typing import Any, Optional
|
|
|
|
|
|
|
|
import requests
|
2020-04-24 23:36:52 +00:00
|
|
|
from packaging.specifiers import SpecifierSet
|
2024-01-02 11:29:13 +00:00
|
|
|
from packaging.version import InvalidVersion
|
|
|
|
from packaging.version import Version as _Version
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
INDEX = "https://pypi.io/pypi"
|
|
|
|
"""url of PyPI"""
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
EXTENSIONS = ["tar.gz", "tar.bz2", "tar", "zip", ".whl"]
|
2020-04-24 23:36:52 +00:00
|
|
|
"""Permitted file extensions. These are evaluated from left to right and the first occurance is returned."""
|
|
|
|
|
|
|
|
PRERELEASES = False
|
|
|
|
|
2023-03-04 12:14:45 +00:00
|
|
|
BULK_UPDATE = False
|
|
|
|
|
2020-04-24 23:36:52 +00:00
|
|
|
GIT = "git"
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
NIXPKGS_ROOT = (
|
|
|
|
subprocess.check_output(["git", "rev-parse", "--show-toplevel"])
|
|
|
|
.decode("utf-8")
|
|
|
|
.strip()
|
|
|
|
)
|
2020-10-07 09:15:18 +00:00
|
|
|
|
2020-04-24 23:36:52 +00:00
|
|
|
logging.basicConfig(level=logging.INFO)
|
|
|
|
|
|
|
|
|
|
|
|
class Version(_Version, collections.abc.Sequence):
|
|
|
|
def __init__(self, version):
|
|
|
|
super().__init__(version)
|
|
|
|
# We cannot use `str(Version(0.04.21))` because that becomes `0.4.21`
|
|
|
|
# https://github.com/avian2/unidecode/issues/13#issuecomment-354538882
|
|
|
|
self.raw_version = version
|
|
|
|
|
|
|
|
def __getitem__(self, i):
|
|
|
|
return self._version.release[i]
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return len(self._version.release)
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
yield from self._version.release
|
|
|
|
|
|
|
|
|
|
|
|
def _get_values(attribute, text):
|
|
|
|
"""Match attribute in text and return all matches.
|
|
|
|
|
|
|
|
:returns: List of matches.
|
|
|
|
"""
|
2024-01-02 11:29:13 +00:00
|
|
|
regex = rf'{re.escape(attribute)}\s+=\s+"(.*)";'
|
2020-04-24 23:36:52 +00:00
|
|
|
regex = re.compile(regex)
|
|
|
|
values = regex.findall(text)
|
|
|
|
return values
|
|
|
|
|
2023-03-04 12:14:45 +00:00
|
|
|
|
|
|
|
def _get_attr_value(attr_path: str) -> Optional[Any]:
|
|
|
|
try:
|
2024-01-02 11:29:13 +00:00
|
|
|
response = subprocess.check_output(
|
|
|
|
[
|
|
|
|
"nix",
|
|
|
|
"--extra-experimental-features",
|
|
|
|
"nix-command",
|
|
|
|
"eval",
|
|
|
|
"-f",
|
|
|
|
f"{NIXPKGS_ROOT}/default.nix",
|
|
|
|
"--json",
|
|
|
|
f"{attr_path}",
|
|
|
|
],
|
|
|
|
stderr=subprocess.DEVNULL,
|
|
|
|
)
|
2023-03-04 12:14:45 +00:00
|
|
|
return json.loads(response.decode())
|
|
|
|
except (subprocess.CalledProcessError, ValueError):
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2020-04-24 23:36:52 +00:00
|
|
|
def _get_unique_value(attribute, text):
|
|
|
|
"""Match attribute in text and return unique match.
|
|
|
|
|
|
|
|
:returns: Single match.
|
|
|
|
"""
|
|
|
|
values = _get_values(attribute, text)
|
|
|
|
n = len(values)
|
|
|
|
if n > 1:
|
|
|
|
raise ValueError("found too many values for {}".format(attribute))
|
|
|
|
elif n == 1:
|
|
|
|
return values[0]
|
|
|
|
else:
|
|
|
|
raise ValueError("no value found for {}".format(attribute))
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
|
2023-03-04 12:14:45 +00:00
|
|
|
def _get_line_and_value(attribute, text, value=None):
|
2020-04-24 23:36:52 +00:00
|
|
|
"""Match attribute in text. Return the line and the value of the attribute."""
|
2023-03-04 12:14:45 +00:00
|
|
|
if value is None:
|
2024-01-02 11:29:13 +00:00
|
|
|
regex = rf"({re.escape(attribute)}\s+=\s+\"(.*)\";)"
|
2023-03-04 12:14:45 +00:00
|
|
|
else:
|
2024-01-02 11:29:13 +00:00
|
|
|
regex = rf"({re.escape(attribute)}\s+=\s+\"({re.escape(value)})\";)"
|
2020-04-24 23:36:52 +00:00
|
|
|
regex = re.compile(regex)
|
2023-03-04 12:14:45 +00:00
|
|
|
results = regex.findall(text)
|
|
|
|
n = len(results)
|
2020-04-24 23:36:52 +00:00
|
|
|
if n > 1:
|
|
|
|
raise ValueError("found too many values for {}".format(attribute))
|
|
|
|
elif n == 1:
|
2023-03-04 12:14:45 +00:00
|
|
|
return results[0]
|
2020-04-24 23:36:52 +00:00
|
|
|
else:
|
|
|
|
raise ValueError("no value found for {}".format(attribute))
|
|
|
|
|
|
|
|
|
2023-03-04 12:14:45 +00:00
|
|
|
def _replace_value(attribute, value, text, oldvalue=None):
|
2020-04-24 23:36:52 +00:00
|
|
|
"""Search and replace value of attribute in text."""
|
2023-03-04 12:14:45 +00:00
|
|
|
if oldvalue is None:
|
|
|
|
old_line, old_value = _get_line_and_value(attribute, text)
|
|
|
|
else:
|
|
|
|
old_line, old_value = _get_line_and_value(attribute, text, oldvalue)
|
2020-04-24 23:36:52 +00:00
|
|
|
new_line = old_line.replace(old_value, value)
|
|
|
|
new_text = text.replace(old_line, new_line)
|
|
|
|
return new_text
|
|
|
|
|
2020-10-07 09:15:18 +00:00
|
|
|
|
2020-04-24 23:36:52 +00:00
|
|
|
def _fetch_page(url):
|
|
|
|
r = requests.get(url)
|
|
|
|
if r.status_code == requests.codes.ok:
|
|
|
|
return r.json()
|
|
|
|
else:
|
|
|
|
raise ValueError("request for {} failed".format(url))
|
|
|
|
|
|
|
|
|
2020-10-07 09:15:18 +00:00
|
|
|
def _fetch_github(url):
|
|
|
|
headers = {}
|
2024-01-02 11:29:13 +00:00
|
|
|
token = os.environ.get("GITHUB_API_TOKEN")
|
2020-10-07 09:15:18 +00:00
|
|
|
if token:
|
|
|
|
headers["Authorization"] = f"token {token}"
|
|
|
|
r = requests.get(url, headers=headers)
|
|
|
|
|
|
|
|
if r.status_code == requests.codes.ok:
|
|
|
|
return r.json()
|
|
|
|
else:
|
|
|
|
raise ValueError("request for {} failed".format(url))
|
|
|
|
|
|
|
|
|
2023-03-04 12:14:45 +00:00
|
|
|
def _hash_to_sri(algorithm, value):
|
|
|
|
"""Convert a hash to its SRI representation"""
|
2024-01-02 11:29:13 +00:00
|
|
|
return (
|
|
|
|
subprocess.check_output(["nix", "hash", "to-sri", "--type", algorithm, value])
|
|
|
|
.decode()
|
|
|
|
.strip()
|
|
|
|
)
|
2023-03-04 12:14:45 +00:00
|
|
|
|
|
|
|
|
|
|
|
def _skip_bulk_update(attr_name: str) -> bool:
|
2024-01-02 11:29:13 +00:00
|
|
|
return bool(_get_attr_value(f"{attr_name}.skipBulkUpdate"))
|
2023-03-04 12:14:45 +00:00
|
|
|
|
|
|
|
|
2020-04-24 23:36:52 +00:00
|
|
|
SEMVER = {
|
2024-01-02 11:29:13 +00:00
|
|
|
"major": 0,
|
|
|
|
"minor": 1,
|
|
|
|
"patch": 2,
|
2020-04-24 23:36:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def _determine_latest_version(current_version, target, versions):
|
2024-01-02 11:29:13 +00:00
|
|
|
"""Determine latest version, given `target`."""
|
2020-04-24 23:36:52 +00:00
|
|
|
current_version = Version(current_version)
|
|
|
|
|
|
|
|
def _parse_versions(versions):
|
|
|
|
for v in versions:
|
|
|
|
try:
|
|
|
|
yield Version(v)
|
|
|
|
except InvalidVersion:
|
|
|
|
pass
|
|
|
|
|
|
|
|
versions = _parse_versions(versions)
|
|
|
|
|
|
|
|
index = SEMVER[target]
|
|
|
|
|
|
|
|
ceiling = list(current_version[0:index])
|
|
|
|
if len(ceiling) == 0:
|
|
|
|
ceiling = None
|
|
|
|
else:
|
2024-01-02 11:29:13 +00:00
|
|
|
ceiling[-1] += 1
|
2020-04-24 23:36:52 +00:00
|
|
|
ceiling = Version(".".join(map(str, ceiling)))
|
|
|
|
|
|
|
|
# We do not want prereleases
|
|
|
|
versions = SpecifierSet(prereleases=PRERELEASES).filter(versions)
|
|
|
|
|
|
|
|
if ceiling is not None:
|
|
|
|
versions = SpecifierSet(f"<{ceiling}").filter(versions)
|
|
|
|
|
|
|
|
return (max(sorted(versions))).raw_version
|
|
|
|
|
|
|
|
|
2024-01-13 08:15:51 +00:00
|
|
|
def _get_latest_version_pypi(attr_path, package, extension, current_version, target):
|
2020-04-24 23:36:52 +00:00
|
|
|
"""Get latest version and hash from PyPI."""
|
|
|
|
url = "{}/{}/json".format(INDEX, package)
|
|
|
|
json = _fetch_page(url)
|
|
|
|
|
2024-04-21 15:54:59 +00:00
|
|
|
versions = {
|
|
|
|
version
|
|
|
|
for version, releases in json["releases"].items()
|
|
|
|
if not all(release["yanked"] for release in releases)
|
|
|
|
}
|
2020-04-24 23:36:52 +00:00
|
|
|
version = _determine_latest_version(current_version, target, versions)
|
|
|
|
|
|
|
|
try:
|
2024-01-02 11:29:13 +00:00
|
|
|
releases = json["releases"][version]
|
2020-04-24 23:36:52 +00:00
|
|
|
except KeyError as e:
|
2024-01-02 11:29:13 +00:00
|
|
|
raise KeyError(
|
|
|
|
"Could not find version {} for {}".format(version, package)
|
|
|
|
) from e
|
2020-04-24 23:36:52 +00:00
|
|
|
for release in releases:
|
2024-01-02 11:29:13 +00:00
|
|
|
if release["filename"].endswith(extension):
|
2020-04-24 23:36:52 +00:00
|
|
|
# TODO: In case of wheel we need to do further checks!
|
2024-01-02 11:29:13 +00:00
|
|
|
sha256 = release["digests"]["sha256"]
|
2020-04-24 23:36:52 +00:00
|
|
|
break
|
|
|
|
else:
|
|
|
|
sha256 = None
|
2020-10-07 09:15:18 +00:00
|
|
|
return version, sha256, None
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
|
2024-01-13 08:15:51 +00:00
|
|
|
def _get_latest_version_github(attr_path, package, extension, current_version, target):
|
2020-10-07 09:15:18 +00:00
|
|
|
def strip_prefix(tag):
|
|
|
|
return re.sub("^[^0-9]*", "", tag)
|
|
|
|
|
|
|
|
def get_prefix(string):
|
|
|
|
matches = re.findall(r"^([^0-9]*)", string)
|
|
|
|
return next(iter(matches), "")
|
|
|
|
|
|
|
|
try:
|
|
|
|
homepage = subprocess.check_output(
|
2024-01-02 11:29:13 +00:00
|
|
|
[
|
|
|
|
"nix",
|
|
|
|
"eval",
|
|
|
|
"-f",
|
|
|
|
f"{NIXPKGS_ROOT}/default.nix",
|
|
|
|
"--raw",
|
|
|
|
f"{attr_path}.src.meta.homepage",
|
|
|
|
]
|
|
|
|
).decode("utf-8")
|
2020-10-07 09:15:18 +00:00
|
|
|
except Exception as e:
|
|
|
|
raise ValueError(f"Unable to determine homepage: {e}")
|
2024-01-02 11:29:13 +00:00
|
|
|
owner_repo = homepage[len("https://github.com/") :] # remove prefix
|
2020-10-07 09:15:18 +00:00
|
|
|
owner, repo = owner_repo.split("/")
|
|
|
|
|
|
|
|
url = f"https://api.github.com/repos/{owner}/{repo}/releases"
|
|
|
|
all_releases = _fetch_github(url)
|
2024-01-02 11:29:13 +00:00
|
|
|
releases = list(filter(lambda x: not x["prerelease"], all_releases))
|
2020-10-07 09:15:18 +00:00
|
|
|
|
|
|
|
if len(releases) == 0:
|
|
|
|
raise ValueError(f"{homepage} does not contain any stable releases")
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
versions = map(lambda x: strip_prefix(x["tag_name"]), releases)
|
2020-10-07 09:15:18 +00:00
|
|
|
version = _determine_latest_version(current_version, target, versions)
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
release = next(filter(lambda x: strip_prefix(x["tag_name"]) == version, releases))
|
|
|
|
prefix = get_prefix(release["tag_name"])
|
2020-10-07 09:15:18 +00:00
|
|
|
|
2023-03-04 12:14:45 +00:00
|
|
|
# some attributes require using the fetchgit
|
|
|
|
git_fetcher_args = []
|
2024-01-02 11:29:13 +00:00
|
|
|
if _get_attr_value(f"{attr_path}.src.fetchSubmodules"):
|
2023-03-04 12:14:45 +00:00
|
|
|
git_fetcher_args.append("--fetch-submodules")
|
2024-01-02 11:29:13 +00:00
|
|
|
if _get_attr_value(f"{attr_path}.src.fetchLFS"):
|
2023-03-04 12:14:45 +00:00
|
|
|
git_fetcher_args.append("--fetch-lfs")
|
2024-01-02 11:29:13 +00:00
|
|
|
if _get_attr_value(f"{attr_path}.src.leaveDotGit"):
|
2023-03-04 12:14:45 +00:00
|
|
|
git_fetcher_args.append("--leave-dotGit")
|
|
|
|
|
|
|
|
if git_fetcher_args:
|
|
|
|
algorithm = "sha256"
|
|
|
|
cmd = [
|
|
|
|
"nix-prefetch-git",
|
|
|
|
f"https://github.com/{owner}/{repo}.git",
|
2024-01-02 11:29:13 +00:00
|
|
|
"--hash",
|
|
|
|
algorithm,
|
|
|
|
"--rev",
|
|
|
|
f"refs/tags/{release['tag_name']}",
|
2023-03-04 12:14:45 +00:00
|
|
|
]
|
|
|
|
cmd.extend(git_fetcher_args)
|
|
|
|
response = subprocess.check_output(cmd)
|
|
|
|
document = json.loads(response.decode())
|
|
|
|
hash = _hash_to_sri(algorithm, document[algorithm])
|
|
|
|
else:
|
|
|
|
try:
|
2024-01-02 11:29:13 +00:00
|
|
|
hash = (
|
|
|
|
subprocess.check_output(
|
|
|
|
[
|
|
|
|
"nix-prefetch-url",
|
|
|
|
"--type",
|
|
|
|
"sha256",
|
|
|
|
"--unpack",
|
|
|
|
f"{release['tarball_url']}",
|
|
|
|
],
|
|
|
|
stderr=subprocess.DEVNULL,
|
|
|
|
)
|
|
|
|
.decode("utf-8")
|
|
|
|
.strip()
|
|
|
|
)
|
2023-03-04 12:14:45 +00:00
|
|
|
except (subprocess.CalledProcessError, UnicodeError):
|
|
|
|
# this may fail if they have both a branch and a tag of the same name, attempt tag name
|
2024-01-02 11:29:13 +00:00
|
|
|
tag_url = str(release["tarball_url"]).replace(
|
|
|
|
"tarball", "tarball/refs/tags"
|
|
|
|
)
|
|
|
|
hash = (
|
|
|
|
subprocess.check_output(
|
|
|
|
["nix-prefetch-url", "--type", "sha256", "--unpack", tag_url],
|
|
|
|
stderr=subprocess.DEVNULL,
|
|
|
|
)
|
|
|
|
.decode("utf-8")
|
|
|
|
.strip()
|
|
|
|
)
|
2023-03-04 12:14:45 +00:00
|
|
|
|
|
|
|
return version, hash, prefix
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
FETCHERS = {
|
2024-01-02 11:29:13 +00:00
|
|
|
"fetchFromGitHub": _get_latest_version_github,
|
|
|
|
"fetchPypi": _get_latest_version_pypi,
|
|
|
|
"fetchurl": _get_latest_version_pypi,
|
2020-04-24 23:36:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
DEFAULT_SETUPTOOLS_EXTENSION = "tar.gz"
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
|
|
|
|
FORMATS = {
|
2024-01-02 11:29:13 +00:00
|
|
|
"setuptools": DEFAULT_SETUPTOOLS_EXTENSION,
|
|
|
|
"wheel": "whl",
|
|
|
|
"pyproject": "tar.gz",
|
|
|
|
"flit": "tar.gz",
|
2020-04-24 23:36:52 +00:00
|
|
|
}
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
|
2020-04-24 23:36:52 +00:00
|
|
|
def _determine_fetcher(text):
|
2022-12-28 21:21:41 +00:00
|
|
|
# Count occurrences of fetchers.
|
2024-01-02 11:29:13 +00:00
|
|
|
nfetchers = sum(
|
|
|
|
text.count("src = {}".format(fetcher)) for fetcher in FETCHERS.keys()
|
|
|
|
)
|
2020-04-24 23:36:52 +00:00
|
|
|
if nfetchers == 0:
|
|
|
|
raise ValueError("no fetcher.")
|
|
|
|
elif nfetchers > 1:
|
|
|
|
raise ValueError("multiple fetchers.")
|
|
|
|
else:
|
|
|
|
# Then we check which fetcher to use.
|
|
|
|
for fetcher in FETCHERS.keys():
|
2024-01-02 11:29:13 +00:00
|
|
|
if "src = {}".format(fetcher) in text:
|
2020-04-24 23:36:52 +00:00
|
|
|
return fetcher
|
|
|
|
|
|
|
|
|
|
|
|
def _determine_extension(text, fetcher):
|
|
|
|
"""Determine what extension is used in the expression.
|
|
|
|
|
|
|
|
If we use:
|
|
|
|
- fetchPypi, we check if format is specified.
|
|
|
|
- fetchurl, we determine the extension from the url.
|
|
|
|
- fetchFromGitHub we simply use `.tar.gz`.
|
|
|
|
"""
|
2024-01-02 11:29:13 +00:00
|
|
|
if fetcher == "fetchPypi":
|
2020-04-24 23:36:52 +00:00
|
|
|
try:
|
2024-01-02 11:29:13 +00:00
|
|
|
src_format = _get_unique_value("format", text)
|
2023-03-04 12:14:45 +00:00
|
|
|
except ValueError:
|
2024-01-02 11:29:13 +00:00
|
|
|
src_format = None # format was not given
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
try:
|
2024-01-02 11:29:13 +00:00
|
|
|
extension = _get_unique_value("extension", text)
|
2023-03-04 12:14:45 +00:00
|
|
|
except ValueError:
|
2024-01-02 11:29:13 +00:00
|
|
|
extension = None # extension was not given
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
if extension is None:
|
|
|
|
if src_format is None:
|
2024-01-02 11:29:13 +00:00
|
|
|
src_format = "setuptools"
|
|
|
|
elif src_format == "other":
|
2020-04-24 23:36:52 +00:00
|
|
|
raise ValueError("Don't know how to update a format='other' package.")
|
|
|
|
extension = FORMATS[src_format]
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
elif fetcher == "fetchurl":
|
|
|
|
url = _get_unique_value("url", text)
|
2020-04-24 23:36:52 +00:00
|
|
|
extension = os.path.splitext(url)[1]
|
2024-01-02 11:29:13 +00:00
|
|
|
if "pypi" not in url:
|
|
|
|
raise ValueError("url does not point to PyPI.")
|
2020-04-24 23:36:52 +00:00
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
elif fetcher == "fetchFromGitHub":
|
2020-10-07 09:15:18 +00:00
|
|
|
extension = "tar.gz"
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
return extension
|
|
|
|
|
|
|
|
|
|
|
|
def _update_package(path, target):
|
|
|
|
# Read the expression
|
2024-01-02 11:29:13 +00:00
|
|
|
with open(path, "r") as f:
|
2020-04-24 23:36:52 +00:00
|
|
|
text = f.read()
|
|
|
|
|
2021-12-06 16:07:01 +00:00
|
|
|
# Determine pname. Many files have more than one pname
|
2024-01-02 11:29:13 +00:00
|
|
|
pnames = _get_values("pname", text)
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
# Determine version.
|
2024-01-02 11:29:13 +00:00
|
|
|
version = _get_unique_value("version", text)
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
# First we check how many fetchers are mentioned.
|
|
|
|
fetcher = _determine_fetcher(text)
|
|
|
|
|
|
|
|
extension = _determine_extension(text, fetcher)
|
|
|
|
|
2021-12-06 16:07:01 +00:00
|
|
|
# Attempt a fetch using each pname, e.g. backports-zoneinfo vs backports.zoneinfo
|
|
|
|
successful_fetch = False
|
|
|
|
for pname in pnames:
|
2024-01-13 08:15:51 +00:00
|
|
|
# when invoked as an updateScript, UPDATE_NIX_ATTR_PATH will be set
|
|
|
|
# this allows us to work with packages which live outside of python-modules
|
|
|
|
attr_path = os.environ.get("UPDATE_NIX_ATTR_PATH", f"python3Packages.{pname}")
|
|
|
|
|
|
|
|
if BULK_UPDATE and _skip_bulk_update(attr_path):
|
2023-03-04 12:14:45 +00:00
|
|
|
raise ValueError(f"Bulk update skipped for {pname}")
|
2024-01-13 08:15:51 +00:00
|
|
|
elif _get_attr_value(f"{attr_path}.cargoDeps") is not None:
|
2024-01-02 11:29:13 +00:00
|
|
|
raise ValueError(f"Cargo dependencies are unsupported, skipping {pname}")
|
2021-12-06 16:07:01 +00:00
|
|
|
try:
|
2024-01-02 11:29:13 +00:00
|
|
|
new_version, new_sha256, prefix = FETCHERS[fetcher](
|
2024-01-13 08:15:51 +00:00
|
|
|
attr_path, pname, extension, version, target
|
2024-01-02 11:29:13 +00:00
|
|
|
)
|
2021-12-06 16:07:01 +00:00
|
|
|
successful_fetch = True
|
|
|
|
break
|
|
|
|
except ValueError:
|
|
|
|
continue
|
|
|
|
|
|
|
|
if not successful_fetch:
|
|
|
|
raise ValueError(f"Unable to find correct package using these pnames: {pnames}")
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
if new_version == version:
|
|
|
|
logging.info("Path {}: no update available for {}.".format(path, pname))
|
|
|
|
return False
|
|
|
|
elif Version(new_version) <= Version(version):
|
|
|
|
raise ValueError("downgrade for {}.".format(pname))
|
|
|
|
if not new_sha256:
|
|
|
|
raise ValueError("no file available for {}.".format(pname))
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
text = _replace_value("version", new_version, text)
|
2023-03-04 12:14:45 +00:00
|
|
|
|
2022-02-10 20:34:41 +00:00
|
|
|
# hashes from pypi are 16-bit encoded sha256's, normalize it to sri to avoid merge conflicts
|
|
|
|
# sri hashes have been the default format since nix 2.4+
|
2023-03-04 12:14:45 +00:00
|
|
|
sri_hash = _hash_to_sri("sha256", new_sha256)
|
2022-02-10 20:34:41 +00:00
|
|
|
|
2023-03-04 12:14:45 +00:00
|
|
|
# retrieve the old output hash for a more precise match
|
2024-01-13 08:15:51 +00:00
|
|
|
if old_hash := _get_attr_value(f"{attr_path}.src.outputHash"):
|
2023-03-04 12:14:45 +00:00
|
|
|
# fetchers can specify a sha256, or a sri hash
|
|
|
|
try:
|
2024-01-02 11:29:13 +00:00
|
|
|
text = _replace_value("hash", sri_hash, text, old_hash)
|
2023-03-04 12:14:45 +00:00
|
|
|
except ValueError:
|
2024-01-02 11:29:13 +00:00
|
|
|
text = _replace_value("sha256", sri_hash, text, old_hash)
|
2023-03-04 12:14:45 +00:00
|
|
|
else:
|
|
|
|
raise ValueError(f"Unable to retrieve old hash for {pname}")
|
2021-12-06 16:07:01 +00:00
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
if fetcher == "fetchFromGitHub":
|
2022-04-15 01:41:22 +00:00
|
|
|
# in the case of fetchFromGitHub, it's common to see `rev = version;` or `rev = "v${version}";`
|
|
|
|
# in which no string value is meant to be substituted. However, we can just overwrite the previous value.
|
2024-01-02 11:29:13 +00:00
|
|
|
regex = r"(rev\s+=\s+[^;]*;)"
|
2022-01-19 23:45:15 +00:00
|
|
|
regex = re.compile(regex)
|
2022-04-15 01:41:22 +00:00
|
|
|
matches = regex.findall(text)
|
|
|
|
n = len(matches)
|
2022-01-19 23:45:15 +00:00
|
|
|
|
|
|
|
if n == 0:
|
2022-04-15 01:41:22 +00:00
|
|
|
raise ValueError("Unable to find rev value for {}.".format(pname))
|
|
|
|
else:
|
|
|
|
# forcefully rewrite rev, incase tagging conventions changed for a release
|
|
|
|
match = matches[0]
|
|
|
|
text = text.replace(match, f'rev = "refs/tags/{prefix}${{version}}";')
|
2022-01-19 23:45:15 +00:00
|
|
|
# incase there's no prefix, just rewrite without interpolation
|
2024-01-02 11:29:13 +00:00
|
|
|
text = text.replace('"${version}";', "version;")
|
2020-04-24 23:36:52 +00:00
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
with open(path, "w") as f:
|
2020-04-24 23:36:52 +00:00
|
|
|
f.write(text)
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
logging.info(
|
|
|
|
"Path {}: updated {} from {} to {}".format(
|
|
|
|
path, pname, version, new_version
|
|
|
|
)
|
|
|
|
)
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
result = {
|
2024-01-02 11:29:13 +00:00
|
|
|
"path": path,
|
|
|
|
"target": target,
|
|
|
|
"pname": pname,
|
|
|
|
"old_version": version,
|
|
|
|
"new_version": new_version,
|
2020-04-24 23:36:52 +00:00
|
|
|
#'fetcher' : fetcher,
|
2024-01-02 11:29:13 +00:00
|
|
|
}
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def _update(path, target):
|
|
|
|
# We need to read and modify a Nix expression.
|
|
|
|
if os.path.isdir(path):
|
2024-01-02 11:29:13 +00:00
|
|
|
path = os.path.join(path, "default.nix")
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
# If a default.nix does not exist, we quit.
|
|
|
|
if not os.path.isfile(path):
|
|
|
|
logging.info("Path {}: does not exist.".format(path))
|
|
|
|
return False
|
|
|
|
|
|
|
|
# If file is not a Nix expression, we quit.
|
|
|
|
if not path.endswith(".nix"):
|
|
|
|
logging.info("Path {}: does not end with `.nix`.".format(path))
|
|
|
|
return False
|
|
|
|
|
|
|
|
try:
|
|
|
|
return _update_package(path, target)
|
|
|
|
except ValueError as e:
|
|
|
|
logging.warning("Path {}: {}".format(path, e))
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
|
|
def _commit(path, pname, old_version, new_version, pkgs_prefix="python: ", **kwargs):
|
2024-01-02 11:29:13 +00:00
|
|
|
"""Commit result."""
|
|
|
|
|
|
|
|
msg = f"{pkgs_prefix}{pname}: {old_version} -> {new_version}"
|
2020-04-24 23:36:52 +00:00
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
if changelog := _get_attr_value(f"{pkgs_prefix}{pname}.meta.changelog"):
|
|
|
|
msg += f"\n\n{changelog}"
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
try:
|
2024-01-02 11:29:13 +00:00
|
|
|
subprocess.check_call([GIT, "add", path])
|
|
|
|
subprocess.check_call([GIT, "commit", "-m", msg])
|
2020-04-24 23:36:52 +00:00
|
|
|
except subprocess.CalledProcessError as e:
|
2024-01-02 11:29:13 +00:00
|
|
|
subprocess.check_call([GIT, "checkout", path])
|
|
|
|
raise subprocess.CalledProcessError(f"Could not commit {path}") from e
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def main():
|
2020-10-07 09:15:18 +00:00
|
|
|
epilog = """
|
|
|
|
environment variables:
|
|
|
|
GITHUB_API_TOKEN\tGitHub API token used when updating github packages
|
|
|
|
"""
|
2024-01-02 11:29:13 +00:00
|
|
|
parser = argparse.ArgumentParser(
|
|
|
|
formatter_class=argparse.RawDescriptionHelpFormatter, epilog=epilog
|
|
|
|
)
|
|
|
|
parser.add_argument("package", type=str, nargs="+")
|
|
|
|
parser.add_argument("--target", type=str, choices=SEMVER.keys(), default="major")
|
|
|
|
parser.add_argument(
|
|
|
|
"--commit", action="store_true", help="Create a commit for each package update"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--use-pkgs-prefix",
|
|
|
|
action="store_true",
|
|
|
|
help="Use python3Packages.${pname}: instead of python: ${pname}: when making commits",
|
|
|
|
)
|
2020-04-24 23:36:52 +00:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
target = args.target
|
|
|
|
|
|
|
|
packages = list(map(os.path.abspath, args.package))
|
|
|
|
|
2023-03-04 12:14:45 +00:00
|
|
|
if len(packages) > 1:
|
|
|
|
global BULK_UPDATE
|
2023-05-24 13:37:59 +00:00
|
|
|
BULK_UPDATE = True
|
2023-03-04 12:14:45 +00:00
|
|
|
|
2020-04-24 23:36:52 +00:00
|
|
|
logging.info("Updating packages...")
|
|
|
|
|
|
|
|
# Use threads to update packages concurrently
|
|
|
|
with Pool() as p:
|
|
|
|
results = list(filter(bool, p.map(lambda pkg: _update(pkg, target), packages)))
|
|
|
|
|
|
|
|
logging.info("Finished updating packages.")
|
|
|
|
|
|
|
|
commit_options = {}
|
|
|
|
if args.use_pkgs_prefix:
|
|
|
|
logging.info("Using python3Packages. prefix for commits")
|
|
|
|
commit_options["pkgs_prefix"] = "python3Packages."
|
|
|
|
|
|
|
|
# Commits are created sequentially.
|
|
|
|
if args.commit:
|
|
|
|
logging.info("Committing updates...")
|
|
|
|
# list forces evaluation
|
|
|
|
list(map(lambda x: _commit(**x, **commit_options), results))
|
|
|
|
logging.info("Finished committing updates")
|
|
|
|
|
|
|
|
count = len(results)
|
|
|
|
logging.info("{} package(s) updated".format(count))
|
|
|
|
|
|
|
|
|
2024-01-02 11:29:13 +00:00
|
|
|
if __name__ == "__main__":
|
2020-04-24 23:36:52 +00:00
|
|
|
main()
|