2022-03-05 16:20:37 +00:00
|
|
|
# Used by pkgs/applications/editors/vim/plugins/update.py and pkgs/applications/editors/kakoune/plugins/update.py
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
# format:
|
|
|
|
# $ nix run nixpkgs.python3Packages.black -c black update.py
|
|
|
|
# type-check:
|
|
|
|
# $ nix run nixpkgs.python3Packages.mypy -c mypy update.py
|
|
|
|
# linted:
|
|
|
|
# $ nix run nixpkgs.python3Packages.flake8 -c flake8 --ignore E501,E265 update.py
|
|
|
|
|
|
|
|
import argparse
|
2022-04-15 01:41:22 +00:00
|
|
|
import csv
|
2021-03-15 08:37:03 +00:00
|
|
|
import functools
|
|
|
|
import http
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import subprocess
|
2021-05-20 23:08:51 +00:00
|
|
|
import logging
|
2021-03-15 08:37:03 +00:00
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import traceback
|
|
|
|
import urllib.error
|
|
|
|
import urllib.parse
|
|
|
|
import urllib.request
|
|
|
|
import xml.etree.ElementTree as ET
|
|
|
|
from datetime import datetime
|
|
|
|
from functools import wraps
|
|
|
|
from multiprocessing.dummy import Pool
|
|
|
|
from pathlib import Path
|
|
|
|
from typing import Dict, List, Optional, Tuple, Union, Any, Callable
|
|
|
|
from urllib.parse import urljoin, urlparse
|
|
|
|
from tempfile import NamedTemporaryFile
|
2022-04-15 01:41:22 +00:00
|
|
|
from dataclasses import dataclass, asdict
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
import git
|
|
|
|
|
|
|
|
ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry" # " vim gets confused here
|
|
|
|
ATOM_LINK = "{http://www.w3.org/2005/Atom}link" # "
|
|
|
|
ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated" # "
|
|
|
|
|
2021-05-20 23:08:51 +00:00
|
|
|
LOG_LEVELS = {
|
|
|
|
logging.getLevelName(level): level for level in [
|
|
|
|
logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR ]
|
|
|
|
}
|
|
|
|
|
|
|
|
log = logging.getLogger()
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
|
|
|
|
"""Retry calling the decorated function using an exponential backoff.
|
|
|
|
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
|
|
|
|
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
|
|
|
|
(BSD licensed)
|
|
|
|
:param ExceptionToCheck: the exception on which to retry
|
|
|
|
:param tries: number of times to try (not retry) before giving up
|
|
|
|
:param delay: initial delay between retries in seconds
|
|
|
|
:param backoff: backoff multiplier e.g. value of 2 will double the delay
|
|
|
|
each retry
|
|
|
|
"""
|
|
|
|
|
|
|
|
def deco_retry(f: Callable) -> Callable:
|
|
|
|
@wraps(f)
|
|
|
|
def f_retry(*args: Any, **kwargs: Any) -> Any:
|
|
|
|
mtries, mdelay = tries, delay
|
|
|
|
while mtries > 1:
|
|
|
|
try:
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
except ExceptionToCheck as e:
|
|
|
|
print(f"{str(e)}, Retrying in {mdelay} seconds...")
|
|
|
|
time.sleep(mdelay)
|
|
|
|
mtries -= 1
|
|
|
|
mdelay *= backoff
|
|
|
|
return f(*args, **kwargs)
|
|
|
|
|
|
|
|
return f_retry # true decorator
|
|
|
|
|
|
|
|
return deco_retry
|
|
|
|
|
2022-03-05 16:20:37 +00:00
|
|
|
@dataclass
|
|
|
|
class FetchConfig:
|
|
|
|
proc: int
|
|
|
|
github_token: str
|
|
|
|
|
2021-03-15 08:37:03 +00:00
|
|
|
|
2022-03-05 16:20:37 +00:00
|
|
|
def make_request(url: str, token=None) -> urllib.request.Request:
|
2021-03-15 08:37:03 +00:00
|
|
|
headers = {}
|
|
|
|
if token is not None:
|
|
|
|
headers["Authorization"] = f"token {token}"
|
|
|
|
return urllib.request.Request(url, headers=headers)
|
|
|
|
|
2022-04-15 01:41:22 +00:00
|
|
|
|
2022-04-27 09:35:20 +00:00
|
|
|
# a dictionary of plugins and their new repositories
|
|
|
|
Redirects = Dict['PluginDesc', 'Repo']
|
2022-04-15 01:41:22 +00:00
|
|
|
|
2021-03-15 08:37:03 +00:00
|
|
|
class Repo:
|
|
|
|
def __init__(
|
2022-04-15 01:41:22 +00:00
|
|
|
self, uri: str, branch: str
|
2021-03-15 08:37:03 +00:00
|
|
|
) -> None:
|
2022-01-22 01:22:15 +00:00
|
|
|
self.uri = uri
|
|
|
|
'''Url to the repo'''
|
2022-04-15 01:41:22 +00:00
|
|
|
self._branch = branch
|
2022-04-27 09:35:20 +00:00
|
|
|
# Redirect is the new Repo to use
|
|
|
|
self.redirect: Optional['Repo'] = None
|
2022-03-05 16:20:37 +00:00
|
|
|
self.token = "dummy_token"
|
2021-03-15 08:37:03 +00:00
|
|
|
|
2022-01-22 01:22:15 +00:00
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
return self.uri.split('/')[-1]
|
2021-03-15 08:37:03 +00:00
|
|
|
|
2022-04-15 01:41:22 +00:00
|
|
|
@property
|
|
|
|
def branch(self):
|
|
|
|
return self._branch or "HEAD"
|
|
|
|
|
|
|
|
def __str__(self) -> str:
|
|
|
|
return f"{self.uri}"
|
2021-03-15 08:37:03 +00:00
|
|
|
def __repr__(self) -> str:
|
2022-01-22 01:22:15 +00:00
|
|
|
return f"Repo({self.name}, {self.uri})"
|
|
|
|
|
|
|
|
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
|
|
|
def has_submodules(self) -> bool:
|
|
|
|
return True
|
|
|
|
|
|
|
|
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
|
|
|
def latest_commit(self) -> Tuple[str, datetime]:
|
2022-04-15 01:41:22 +00:00
|
|
|
log.debug("Latest commit")
|
2022-01-22 01:22:15 +00:00
|
|
|
loaded = self._prefetch(None)
|
|
|
|
updated = datetime.strptime(loaded['date'], "%Y-%m-%dT%H:%M:%S%z")
|
|
|
|
|
|
|
|
return loaded['rev'], updated
|
|
|
|
|
|
|
|
def _prefetch(self, ref: Optional[str]):
|
|
|
|
cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
|
|
|
|
if ref is not None:
|
|
|
|
cmd.append(ref)
|
|
|
|
log.debug(cmd)
|
|
|
|
data = subprocess.check_output(cmd)
|
|
|
|
loaded = json.loads(data)
|
|
|
|
return loaded
|
|
|
|
|
|
|
|
def prefetch(self, ref: Optional[str]) -> str:
|
2022-04-15 01:41:22 +00:00
|
|
|
print("Prefetching")
|
2022-01-22 01:22:15 +00:00
|
|
|
loaded = self._prefetch(ref)
|
|
|
|
return loaded["sha256"]
|
|
|
|
|
|
|
|
def as_nix(self, plugin: "Plugin") -> str:
|
|
|
|
return f'''fetchgit {{
|
2022-11-21 17:40:18 +00:00
|
|
|
url = "{self.uri}";
|
|
|
|
rev = "{plugin.commit}";
|
|
|
|
sha256 = "{plugin.sha256}";
|
|
|
|
}}'''
|
2022-01-22 01:22:15 +00:00
|
|
|
|
|
|
|
|
|
|
|
class RepoGitHub(Repo):
|
|
|
|
def __init__(
|
2022-04-15 01:41:22 +00:00
|
|
|
self, owner: str, repo: str, branch: str
|
2022-01-22 01:22:15 +00:00
|
|
|
) -> None:
|
|
|
|
self.owner = owner
|
|
|
|
self.repo = repo
|
2022-03-05 16:20:37 +00:00
|
|
|
self.token = None
|
2022-01-22 01:22:15 +00:00
|
|
|
'''Url to the repo'''
|
2022-04-15 01:41:22 +00:00
|
|
|
super().__init__(self.url(""), branch)
|
|
|
|
log.debug("Instantiating github repo owner=%s and repo=%s", self.owner, self.repo)
|
2022-01-22 01:22:15 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
return self.repo
|
|
|
|
|
|
|
|
def url(self, path: str) -> str:
|
2022-04-15 01:41:22 +00:00
|
|
|
res = urljoin(f"https://github.com/{self.owner}/{self.repo}/", path)
|
|
|
|
return res
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
|
|
|
def has_submodules(self) -> bool:
|
|
|
|
try:
|
2022-03-05 16:20:37 +00:00
|
|
|
req = make_request(self.url(f"blob/{self.branch}/.gitmodules"), self.token)
|
2021-03-15 08:37:03 +00:00
|
|
|
urllib.request.urlopen(req, timeout=10).close()
|
|
|
|
except urllib.error.HTTPError as e:
|
|
|
|
if e.code == 404:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
raise
|
|
|
|
return True
|
|
|
|
|
|
|
|
@retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
|
|
|
|
def latest_commit(self) -> Tuple[str, datetime]:
|
|
|
|
commit_url = self.url(f"commits/{self.branch}.atom")
|
2022-04-15 01:41:22 +00:00
|
|
|
log.debug("Sending request to %s", commit_url)
|
2022-03-05 16:20:37 +00:00
|
|
|
commit_req = make_request(commit_url, self.token)
|
2021-03-15 08:37:03 +00:00
|
|
|
with urllib.request.urlopen(commit_req, timeout=10) as req:
|
2022-01-22 01:22:15 +00:00
|
|
|
self._check_for_redirect(commit_url, req)
|
2021-03-15 08:37:03 +00:00
|
|
|
xml = req.read()
|
|
|
|
root = ET.fromstring(xml)
|
|
|
|
latest_entry = root.find(ATOM_ENTRY)
|
|
|
|
assert latest_entry is not None, f"No commits found in repository {self}"
|
|
|
|
commit_link = latest_entry.find(ATOM_LINK)
|
|
|
|
assert commit_link is not None, f"No link tag found feed entry {xml}"
|
|
|
|
url = urlparse(commit_link.get("href"))
|
|
|
|
updated_tag = latest_entry.find(ATOM_UPDATED)
|
|
|
|
assert (
|
|
|
|
updated_tag is not None and updated_tag.text is not None
|
|
|
|
), f"No updated tag found feed entry {xml}"
|
|
|
|
updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
|
|
|
|
return Path(str(url.path)).name, updated
|
|
|
|
|
2022-01-22 01:22:15 +00:00
|
|
|
def _check_for_redirect(self, url: str, req: http.client.HTTPResponse):
|
2021-03-15 08:37:03 +00:00
|
|
|
response_url = req.geturl()
|
|
|
|
if url != response_url:
|
|
|
|
new_owner, new_name = (
|
|
|
|
urllib.parse.urlsplit(response_url).path.strip("/").split("/")[:2]
|
|
|
|
)
|
|
|
|
|
2022-04-15 01:41:22 +00:00
|
|
|
new_repo = RepoGitHub(owner=new_owner, repo=new_name, branch=self.branch)
|
2022-04-27 09:35:20 +00:00
|
|
|
self.redirect = new_repo
|
2021-03-15 08:37:03 +00:00
|
|
|
|
2022-01-22 01:22:15 +00:00
|
|
|
|
|
|
|
def prefetch(self, commit: str) -> str:
|
|
|
|
if self.has_submodules():
|
|
|
|
sha256 = super().prefetch(commit)
|
|
|
|
else:
|
|
|
|
sha256 = self.prefetch_github(commit)
|
|
|
|
return sha256
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
def prefetch_github(self, ref: str) -> str:
|
2022-04-15 01:41:22 +00:00
|
|
|
cmd = ["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
|
|
|
|
log.debug("Running %s", cmd)
|
|
|
|
data = subprocess.check_output(cmd)
|
2021-03-15 08:37:03 +00:00
|
|
|
return data.strip().decode("utf-8")
|
|
|
|
|
2022-01-22 01:22:15 +00:00
|
|
|
def as_nix(self, plugin: "Plugin") -> str:
|
|
|
|
if plugin.has_submodules:
|
|
|
|
submodule_attr = "\n fetchSubmodules = true;"
|
|
|
|
else:
|
|
|
|
submodule_attr = ""
|
|
|
|
|
|
|
|
return f'''fetchFromGitHub {{
|
|
|
|
owner = "{self.owner}";
|
|
|
|
repo = "{self.repo}";
|
|
|
|
rev = "{plugin.commit}";
|
|
|
|
sha256 = "{plugin.sha256}";{submodule_attr}
|
|
|
|
}}'''
|
|
|
|
|
|
|
|
|
2022-04-27 09:35:20 +00:00
|
|
|
@dataclass(frozen=True)
|
2022-01-22 01:22:15 +00:00
|
|
|
class PluginDesc:
|
|
|
|
repo: Repo
|
|
|
|
branch: str
|
|
|
|
alias: Optional[str]
|
|
|
|
|
|
|
|
@property
|
|
|
|
def name(self):
|
|
|
|
if self.alias is None:
|
|
|
|
return self.repo.name
|
|
|
|
else:
|
|
|
|
return self.alias
|
|
|
|
|
2022-04-15 01:41:22 +00:00
|
|
|
def __lt__(self, other):
|
|
|
|
return self.repo.name < other.repo.name
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def load_from_csv(config: FetchConfig, row: Dict[str, str]) -> 'PluginDesc':
|
|
|
|
branch = row["branch"]
|
|
|
|
repo = make_repo(row['repo'], branch.strip())
|
|
|
|
repo.token = config.github_token
|
|
|
|
return PluginDesc(repo, branch.strip(), row["alias"])
|
|
|
|
|
|
|
|
|
|
|
|
@staticmethod
|
|
|
|
def load_from_string(config: FetchConfig, line: str) -> 'PluginDesc':
|
|
|
|
branch = "HEAD"
|
|
|
|
alias = None
|
|
|
|
uri = line
|
|
|
|
if " as " in uri:
|
|
|
|
uri, alias = uri.split(" as ")
|
|
|
|
alias = alias.strip()
|
|
|
|
if "@" in uri:
|
|
|
|
uri, branch = uri.split("@")
|
|
|
|
repo = make_repo(uri.strip(), branch.strip())
|
|
|
|
repo.token = config.github_token
|
|
|
|
return PluginDesc(repo, branch.strip(), alias)
|
2021-03-15 08:37:03 +00:00
|
|
|
|
2022-04-15 01:41:22 +00:00
|
|
|
@dataclass
|
2021-03-15 08:37:03 +00:00
|
|
|
class Plugin:
|
2022-04-15 01:41:22 +00:00
|
|
|
name: str
|
|
|
|
commit: str
|
|
|
|
has_submodules: bool
|
|
|
|
sha256: str
|
|
|
|
date: Optional[datetime] = None
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def normalized_name(self) -> str:
|
|
|
|
return self.name.replace(".", "-")
|
|
|
|
|
|
|
|
@property
|
|
|
|
def version(self) -> str:
|
|
|
|
assert self.date is not None
|
|
|
|
return self.date.strftime("%Y-%m-%d")
|
|
|
|
|
|
|
|
def as_json(self) -> Dict[str, str]:
|
|
|
|
copy = self.__dict__.copy()
|
|
|
|
del copy["date"]
|
|
|
|
return copy
|
|
|
|
|
|
|
|
|
2022-04-15 01:41:22 +00:00
|
|
|
def load_plugins_from_csv(config: FetchConfig, input_file: Path,) -> List[PluginDesc]:
|
|
|
|
log.debug("Load plugins from csv %s", input_file)
|
|
|
|
plugins = []
|
|
|
|
with open(input_file, newline='') as csvfile:
|
|
|
|
log.debug("Writing into %s", input_file)
|
|
|
|
reader = csv.DictReader(csvfile,)
|
|
|
|
for line in reader:
|
|
|
|
plugin = PluginDesc.load_from_csv(config, line)
|
|
|
|
plugins.append(plugin)
|
|
|
|
|
|
|
|
return plugins
|
2022-01-22 01:22:15 +00:00
|
|
|
|
2022-04-27 09:35:20 +00:00
|
|
|
def run_nix_expr(expr):
|
|
|
|
with CleanEnvironment():
|
|
|
|
cmd = ["nix", "eval", "--extra-experimental-features",
|
|
|
|
"nix-command", "--impure", "--json", "--expr", expr]
|
|
|
|
log.debug("Running command %s", cmd)
|
|
|
|
out = subprocess.check_output(cmd)
|
|
|
|
data = json.loads(out)
|
|
|
|
return data
|
|
|
|
|
|
|
|
|
2021-03-15 08:37:03 +00:00
|
|
|
class Editor:
|
|
|
|
"""The configuration of the update script."""
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
self,
|
|
|
|
name: str,
|
|
|
|
root: Path,
|
|
|
|
get_plugins: str,
|
|
|
|
default_in: Optional[Path] = None,
|
|
|
|
default_out: Optional[Path] = None,
|
|
|
|
deprecated: Optional[Path] = None,
|
|
|
|
cache_file: Optional[str] = None,
|
|
|
|
):
|
2021-08-10 14:31:46 +00:00
|
|
|
log.debug("get_plugins:", get_plugins)
|
2021-03-15 08:37:03 +00:00
|
|
|
self.name = name
|
|
|
|
self.root = root
|
|
|
|
self.get_plugins = get_plugins
|
|
|
|
self.default_in = default_in or root.joinpath(f"{name}-plugin-names")
|
|
|
|
self.default_out = default_out or root.joinpath("generated.nix")
|
|
|
|
self.deprecated = deprecated or root.joinpath("deprecated.json")
|
|
|
|
self.cache_file = cache_file or f"{name}-plugin-cache.json"
|
2022-11-21 17:40:18 +00:00
|
|
|
self.nixpkgs_repo = None
|
2021-03-15 08:37:03 +00:00
|
|
|
|
2022-04-27 09:35:20 +00:00
|
|
|
def get_current_plugins(self) -> List[Plugin]:
|
2021-08-10 14:31:46 +00:00
|
|
|
"""To fill the cache"""
|
2022-04-27 09:35:20 +00:00
|
|
|
data = run_nix_expr(self.get_plugins)
|
|
|
|
plugins = []
|
|
|
|
for name, attr in data.items():
|
|
|
|
print("get_current_plugins: name %s" % name)
|
|
|
|
p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
|
|
|
|
plugins.append(p)
|
|
|
|
return plugins
|
2021-08-10 14:31:46 +00:00
|
|
|
|
2022-03-05 16:20:37 +00:00
|
|
|
def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
|
2022-04-15 01:41:22 +00:00
|
|
|
'''CSV spec'''
|
|
|
|
return load_plugins_from_csv(config, plugin_file)
|
2021-08-10 14:31:46 +00:00
|
|
|
|
2021-08-12 14:41:47 +00:00
|
|
|
def generate_nix(self, plugins, outfile: str):
|
2021-08-10 14:31:46 +00:00
|
|
|
'''Returns nothing for now, writes directly to outfile'''
|
2021-08-12 14:41:47 +00:00
|
|
|
raise NotImplementedError()
|
2021-08-10 14:31:46 +00:00
|
|
|
|
2022-03-05 16:20:37 +00:00
|
|
|
def get_update(self, input_file: str, outfile: str, config: FetchConfig):
|
|
|
|
cache: Cache = Cache(self.get_current_plugins(), self.cache_file)
|
|
|
|
_prefetch = functools.partial(prefetch, cache=cache)
|
|
|
|
|
|
|
|
def update() -> dict:
|
2022-04-15 01:41:22 +00:00
|
|
|
plugins = self.load_plugin_spec(config, input_file)
|
2022-03-05 16:20:37 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
pool = Pool(processes=config.proc)
|
2022-04-15 01:41:22 +00:00
|
|
|
results = pool.map(_prefetch, plugins)
|
2022-03-05 16:20:37 +00:00
|
|
|
finally:
|
|
|
|
cache.store()
|
|
|
|
|
|
|
|
plugins, redirects = check_results(results)
|
|
|
|
|
|
|
|
self.generate_nix(plugins, outfile)
|
|
|
|
|
|
|
|
return redirects
|
|
|
|
|
|
|
|
return update
|
|
|
|
|
2021-08-10 14:31:46 +00:00
|
|
|
|
|
|
|
@property
|
|
|
|
def attr_path(self):
|
|
|
|
return self.name + "Plugins"
|
|
|
|
|
2021-08-12 14:41:47 +00:00
|
|
|
def get_drv_name(self, name: str):
|
|
|
|
return self.attr_path + "." + name
|
|
|
|
|
2021-08-10 14:31:46 +00:00
|
|
|
def rewrite_input(self, *args, **kwargs):
|
|
|
|
return rewrite_input(*args, **kwargs)
|
|
|
|
|
2021-08-12 14:41:47 +00:00
|
|
|
def create_parser(self):
|
|
|
|
parser = argparse.ArgumentParser(
|
2022-01-22 01:22:15 +00:00
|
|
|
description=(f"""
|
|
|
|
Updates nix derivations for {self.name} plugins.\n
|
|
|
|
By default from {self.default_in} to {self.default_out}"""
|
2021-08-12 14:41:47 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--add",
|
|
|
|
dest="add_plugins",
|
|
|
|
default=[],
|
|
|
|
action="append",
|
|
|
|
help=f"Plugin to add to {self.attr_path} from Github in the form owner/repo",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--input-names",
|
|
|
|
"-i",
|
|
|
|
dest="input_file",
|
|
|
|
default=self.default_in,
|
|
|
|
help="A list of plugins in the form owner/repo",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--out",
|
|
|
|
"-o",
|
|
|
|
dest="outfile",
|
|
|
|
default=self.default_out,
|
|
|
|
help="Filename to save generated nix code",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--proc",
|
|
|
|
"-p",
|
|
|
|
dest="proc",
|
|
|
|
type=int,
|
|
|
|
default=30,
|
2022-03-05 16:20:37 +00:00
|
|
|
help="Number of concurrent processes to spawn. Setting --github-token allows higher values.",
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--github-token",
|
|
|
|
"-t",
|
|
|
|
type=str,
|
|
|
|
default=os.getenv("GITHUB_API_TOKEN"),
|
|
|
|
help="""Allows to set --proc to higher values.
|
|
|
|
Uses GITHUB_API_TOKEN environment variables as the default value.""",
|
2021-08-12 14:41:47 +00:00
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--no-commit", "-n", action="store_true", default=False,
|
|
|
|
help="Whether to autocommit changes"
|
|
|
|
)
|
|
|
|
parser.add_argument(
|
|
|
|
"--debug", "-d", choices=LOG_LEVELS.keys(),
|
|
|
|
default=logging.getLevelName(logging.WARN),
|
|
|
|
help="Adjust log level"
|
|
|
|
)
|
|
|
|
return parser
|
2021-08-10 14:31:46 +00:00
|
|
|
|
|
|
|
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
class CleanEnvironment(object):
|
|
|
|
def __enter__(self) -> None:
|
|
|
|
self.old_environ = os.environ.copy()
|
|
|
|
local_pkgs = str(Path(__file__).parent.parent.parent)
|
|
|
|
os.environ["NIX_PATH"] = f"localpkgs={local_pkgs}"
|
|
|
|
self.empty_config = NamedTemporaryFile()
|
|
|
|
self.empty_config.write(b"{}")
|
|
|
|
self.empty_config.flush()
|
|
|
|
os.environ["NIXPKGS_CONFIG"] = self.empty_config.name
|
|
|
|
|
|
|
|
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
|
|
|
|
os.environ.update(self.old_environ)
|
|
|
|
self.empty_config.close()
|
|
|
|
|
|
|
|
|
|
|
|
def prefetch_plugin(
|
2021-08-18 13:19:15 +00:00
|
|
|
p: PluginDesc,
|
2021-03-15 08:37:03 +00:00
|
|
|
cache: "Optional[Cache]" = None,
|
2022-04-27 09:35:20 +00:00
|
|
|
) -> Tuple[Plugin, Optional[Repo]]:
|
2022-01-22 01:22:15 +00:00
|
|
|
repo, branch, alias = p.repo, p.branch, p.alias
|
|
|
|
name = alias or p.repo.name
|
|
|
|
commit = None
|
|
|
|
log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
|
2021-03-15 08:37:03 +00:00
|
|
|
commit, date = repo.latest_commit()
|
|
|
|
cached_plugin = cache[commit] if cache else None
|
|
|
|
if cached_plugin is not None:
|
2021-08-10 14:31:46 +00:00
|
|
|
log.debug("Cache hit !")
|
2022-01-22 01:22:15 +00:00
|
|
|
cached_plugin.name = name
|
2021-03-15 08:37:03 +00:00
|
|
|
cached_plugin.date = date
|
|
|
|
return cached_plugin, repo.redirect
|
|
|
|
|
2022-01-22 01:22:15 +00:00
|
|
|
has_submodules = repo.has_submodules()
|
2022-04-27 09:35:20 +00:00
|
|
|
log.debug(f"prefetch {name}")
|
2022-01-22 01:22:15 +00:00
|
|
|
sha256 = repo.prefetch(commit)
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
return (
|
2022-01-22 01:22:15 +00:00
|
|
|
Plugin(name, commit, has_submodules, sha256, date=date),
|
2021-03-15 08:37:03 +00:00
|
|
|
repo.redirect,
|
|
|
|
)
|
|
|
|
|
|
|
|
|
2022-04-27 09:35:20 +00:00
|
|
|
def print_download_error(plugin: PluginDesc, ex: Exception):
|
2021-03-15 08:37:03 +00:00
|
|
|
print(f"{plugin}: {ex}", file=sys.stderr)
|
|
|
|
ex_traceback = ex.__traceback__
|
|
|
|
tb_lines = [
|
|
|
|
line.rstrip("\n")
|
|
|
|
for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
|
|
|
|
]
|
|
|
|
print("\n".join(tb_lines))
|
|
|
|
|
|
|
|
def check_results(
|
2022-04-27 09:35:20 +00:00
|
|
|
results: List[Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]]
|
2022-04-15 01:41:22 +00:00
|
|
|
) -> Tuple[List[Tuple[PluginDesc, Plugin]], Redirects]:
|
2022-01-22 01:22:15 +00:00
|
|
|
''' '''
|
2022-04-27 09:35:20 +00:00
|
|
|
failures: List[Tuple[PluginDesc, Exception]] = []
|
2021-03-15 08:37:03 +00:00
|
|
|
plugins = []
|
2022-04-27 09:35:20 +00:00
|
|
|
redirects: Redirects = {}
|
2022-01-22 01:22:15 +00:00
|
|
|
for (pdesc, result, redirect) in results:
|
2021-03-15 08:37:03 +00:00
|
|
|
if isinstance(result, Exception):
|
2022-04-27 09:35:20 +00:00
|
|
|
failures.append((pdesc, result))
|
2021-03-15 08:37:03 +00:00
|
|
|
else:
|
2022-04-27 09:35:20 +00:00
|
|
|
new_pdesc = pdesc
|
|
|
|
if redirect is not None:
|
|
|
|
redirects.update({pdesc: redirect})
|
|
|
|
new_pdesc = PluginDesc(redirect, pdesc.branch, pdesc.alias)
|
|
|
|
plugins.append((new_pdesc, result))
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
print(f"{len(results) - len(failures)} plugins were checked", end="")
|
|
|
|
if len(failures) == 0:
|
|
|
|
print()
|
|
|
|
return plugins, redirects
|
|
|
|
else:
|
|
|
|
print(f", {len(failures)} plugin(s) could not be downloaded:\n")
|
|
|
|
|
|
|
|
for (plugin, exception) in failures:
|
|
|
|
print_download_error(plugin, exception)
|
|
|
|
|
|
|
|
sys.exit(1)
|
|
|
|
|
2022-04-15 01:41:22 +00:00
|
|
|
def make_repo(uri: str, branch) -> Repo:
|
2022-01-22 01:22:15 +00:00
|
|
|
'''Instantiate a Repo with the correct specialization depending on server (gitub spec)'''
|
|
|
|
# dumb check to see if it's of the form owner/repo (=> github) or https://...
|
2022-04-15 01:41:22 +00:00
|
|
|
res = urlparse(uri)
|
|
|
|
if res.netloc in [ "github.com", ""]:
|
|
|
|
res = res.path.strip('/').split('/')
|
|
|
|
repo = RepoGitHub(res[0], res[1], branch)
|
2022-01-22 01:22:15 +00:00
|
|
|
else:
|
2022-04-15 01:41:22 +00:00
|
|
|
repo = Repo(uri.strip(), branch)
|
2022-01-22 01:22:15 +00:00
|
|
|
return repo
|
|
|
|
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
def get_cache_path(cache_file_name: str) -> Optional[Path]:
|
|
|
|
xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
|
|
|
|
if xdg_cache is None:
|
|
|
|
home = os.environ.get("HOME", None)
|
|
|
|
if home is None:
|
|
|
|
return None
|
|
|
|
xdg_cache = str(Path(home, ".cache"))
|
|
|
|
|
|
|
|
return Path(xdg_cache, cache_file_name)
|
|
|
|
|
|
|
|
|
|
|
|
class Cache:
|
|
|
|
def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
|
|
|
|
self.cache_file = get_cache_path(cache_file_name)
|
|
|
|
|
|
|
|
downloads = {}
|
|
|
|
for plugin in initial_plugins:
|
|
|
|
downloads[plugin.commit] = plugin
|
|
|
|
downloads.update(self.load())
|
|
|
|
self.downloads = downloads
|
|
|
|
|
|
|
|
def load(self) -> Dict[str, Plugin]:
|
|
|
|
if self.cache_file is None or not self.cache_file.exists():
|
|
|
|
return {}
|
|
|
|
|
|
|
|
downloads: Dict[str, Plugin] = {}
|
|
|
|
with open(self.cache_file) as f:
|
|
|
|
data = json.load(f)
|
|
|
|
for attr in data.values():
|
|
|
|
p = Plugin(
|
|
|
|
attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
|
|
|
|
)
|
|
|
|
downloads[attr["commit"]] = p
|
|
|
|
return downloads
|
|
|
|
|
|
|
|
def store(self) -> None:
|
|
|
|
if self.cache_file is None:
|
|
|
|
return
|
|
|
|
|
|
|
|
os.makedirs(self.cache_file.parent, exist_ok=True)
|
|
|
|
with open(self.cache_file, "w+") as f:
|
|
|
|
data = {}
|
|
|
|
for name, attr in self.downloads.items():
|
|
|
|
data[name] = attr.as_json()
|
|
|
|
json.dump(data, f, indent=4, sort_keys=True)
|
|
|
|
|
|
|
|
def __getitem__(self, key: str) -> Optional[Plugin]:
|
|
|
|
return self.downloads.get(key, None)
|
|
|
|
|
|
|
|
def __setitem__(self, key: str, value: Plugin) -> None:
|
|
|
|
self.downloads[key] = value
|
|
|
|
|
|
|
|
|
|
|
|
def prefetch(
|
2021-08-18 13:19:15 +00:00
|
|
|
pluginDesc: PluginDesc, cache: Cache
|
2022-04-27 09:35:20 +00:00
|
|
|
) -> Tuple[PluginDesc, Union[Exception, Plugin], Optional[Repo]]:
|
2021-03-15 08:37:03 +00:00
|
|
|
try:
|
2021-08-18 13:19:15 +00:00
|
|
|
plugin, redirect = prefetch_plugin(pluginDesc, cache)
|
2021-03-15 08:37:03 +00:00
|
|
|
cache[plugin.commit] = plugin
|
2022-01-22 01:22:15 +00:00
|
|
|
return (pluginDesc, plugin, redirect)
|
2021-03-15 08:37:03 +00:00
|
|
|
except Exception as e:
|
2022-04-27 09:35:20 +00:00
|
|
|
return (pluginDesc, e, None)
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
|
2022-04-15 01:41:22 +00:00
|
|
|
|
2021-03-15 08:37:03 +00:00
|
|
|
def rewrite_input(
|
2022-03-05 16:20:37 +00:00
|
|
|
config: FetchConfig,
|
2021-03-15 08:37:03 +00:00
|
|
|
input_file: Path,
|
|
|
|
deprecated: Path,
|
2022-04-15 01:41:22 +00:00
|
|
|
# old pluginDesc and the new
|
2022-04-27 09:35:20 +00:00
|
|
|
redirects: Redirects = {},
|
2022-04-15 01:41:22 +00:00
|
|
|
append: List[PluginDesc] = [],
|
2021-03-15 08:37:03 +00:00
|
|
|
):
|
2022-04-15 01:41:22 +00:00
|
|
|
plugins = load_plugins_from_csv(config, input_file,)
|
2021-03-15 08:37:03 +00:00
|
|
|
|
2022-04-15 01:41:22 +00:00
|
|
|
plugins.extend(append)
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
if redirects:
|
|
|
|
|
|
|
|
cur_date_iso = datetime.now().strftime("%Y-%m-%d")
|
|
|
|
with open(deprecated, "r") as f:
|
|
|
|
deprecations = json.load(f)
|
2022-04-27 09:35:20 +00:00
|
|
|
for pdesc, new_repo in redirects.items():
|
|
|
|
new_pdesc = PluginDesc(new_repo, pdesc.branch, pdesc.alias)
|
|
|
|
old_plugin, _ = prefetch_plugin(pdesc)
|
|
|
|
new_plugin, _ = prefetch_plugin(new_pdesc)
|
2021-03-15 08:37:03 +00:00
|
|
|
if old_plugin.normalized_name != new_plugin.normalized_name:
|
|
|
|
deprecations[old_plugin.normalized_name] = {
|
|
|
|
"new": new_plugin.normalized_name,
|
|
|
|
"date": cur_date_iso,
|
|
|
|
}
|
|
|
|
with open(deprecated, "w") as f:
|
|
|
|
json.dump(deprecations, f, indent=4, sort_keys=True)
|
2021-03-20 04:20:00 +00:00
|
|
|
f.write("\n")
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
with open(input_file, "w") as f:
|
2022-04-15 01:41:22 +00:00
|
|
|
log.debug("Writing into %s", input_file)
|
|
|
|
# fields = dataclasses.fields(PluginDesc)
|
|
|
|
fieldnames = ['repo', 'branch', 'alias']
|
|
|
|
writer = csv.DictWriter(f, fieldnames, dialect='unix', quoting=csv.QUOTE_NONE)
|
|
|
|
writer.writeheader()
|
|
|
|
for plugin in sorted(plugins):
|
|
|
|
writer.writerow(asdict(plugin))
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
|
|
|
|
def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
|
|
|
|
repo.index.add([str(f.resolve()) for f in files])
|
|
|
|
|
|
|
|
if repo.index.diff("HEAD"):
|
|
|
|
print(f'committing to nixpkgs "{message}"')
|
|
|
|
repo.index.commit(message)
|
|
|
|
else:
|
|
|
|
print("no changes in working tree to commit")
|
|
|
|
|
|
|
|
|
|
|
|
|
2021-08-12 14:41:47 +00:00
|
|
|
def update_plugins(editor: Editor, args):
|
2021-03-15 08:37:03 +00:00
|
|
|
"""The main entry function of this module. All input arguments are grouped in the `Editor`."""
|
|
|
|
|
2021-05-20 23:08:51 +00:00
|
|
|
log.setLevel(LOG_LEVELS[args.debug])
|
|
|
|
log.info("Start updating plugins")
|
2022-03-05 16:20:37 +00:00
|
|
|
fetch_config = FetchConfig(args.proc, args.github_token)
|
|
|
|
update = editor.get_update(args.input_file, args.outfile, fetch_config)
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
redirects = update()
|
2022-03-05 16:20:37 +00:00
|
|
|
editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, redirects)
|
2021-04-22 02:08:21 +00:00
|
|
|
|
|
|
|
autocommit = not args.no_commit
|
|
|
|
|
|
|
|
if autocommit:
|
2022-11-21 17:40:18 +00:00
|
|
|
editor.nixpkgs_repo = git.Repo(editor.root, search_parent_directories=True)
|
|
|
|
commit(editor.nixpkgs_repo, f"{editor.attr_path}: update", [args.outfile])
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
if redirects:
|
|
|
|
update()
|
2021-04-22 02:08:21 +00:00
|
|
|
if autocommit:
|
|
|
|
commit(
|
2022-11-21 17:40:18 +00:00
|
|
|
editor.nixpkgs_repo,
|
2021-08-10 14:31:46 +00:00
|
|
|
f"{editor.attr_path}: resolve github repository redirects",
|
2021-04-22 02:08:21 +00:00
|
|
|
[args.outfile, args.input_file, editor.deprecated],
|
|
|
|
)
|
2021-03-15 08:37:03 +00:00
|
|
|
|
|
|
|
for plugin_line in args.add_plugins:
|
2022-04-15 01:41:22 +00:00
|
|
|
pdesc = PluginDesc.load_from_string(fetch_config, plugin_line)
|
|
|
|
append = [ pdesc ]
|
|
|
|
editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, append=append)
|
2021-03-15 08:37:03 +00:00
|
|
|
update()
|
2022-04-15 01:41:22 +00:00
|
|
|
plugin, _ = prefetch_plugin(pdesc, )
|
2021-04-22 02:08:21 +00:00
|
|
|
if autocommit:
|
|
|
|
commit(
|
2022-11-21 17:40:18 +00:00
|
|
|
editor.nixpkgs_repo,
|
2021-08-18 13:19:15 +00:00
|
|
|
"{drv_name}: init at {version}".format(
|
|
|
|
drv_name=editor.get_drv_name(plugin.normalized_name),
|
|
|
|
version=plugin.version
|
2021-04-22 02:08:21 +00:00
|
|
|
),
|
|
|
|
[args.outfile, args.input_file],
|
|
|
|
)
|