swann: add ee-scrape-data, for putting allowance data into prometheus
This commit is contained in:
parent
683e6ffc21
commit
58b87a9f0e
2 changed files with 121 additions and 0 deletions
|
@ -513,5 +513,29 @@ in {
|
|||
-format.new=true
|
||||
'';
|
||||
|
||||
systemd.services.ee-scrape-data = let
|
||||
scriptFile = ./ee-scrape-data.py;
|
||||
python = pkgs.python3.withPackages (pm: with pm; [
|
||||
requests
|
||||
beautifulsoup4
|
||||
html5lib
|
||||
]);
|
||||
in {
|
||||
enable = true;
|
||||
serviceConfig = {
|
||||
Type = "oneshot";
|
||||
ExecStart = "${python}/bin/python ${scriptFile} /run/prometheus-textfile-exports/ee-scrape-data.prom";
|
||||
};
|
||||
};
|
||||
systemd.timers.ee-scrape-data = {
|
||||
enable = true;
|
||||
wantedBy = [ "multi-user.target" ];
|
||||
timerConfig = {
|
||||
OnBootSec = "2m";
|
||||
OnUnitInactiveSec = "1m";
|
||||
RandomizedDelaySec = "20";
|
||||
};
|
||||
};
|
||||
|
||||
system.stateVersion = "21.03";
|
||||
}
|
||||
|
|
97
ops/nixos/swann/ee-scrape-data.py
Normal file
97
ops/nixos/swann/ee-scrape-data.py
Normal file
|
@ -0,0 +1,97 @@
|
|||
import re
|
||||
import sys
|
||||
import textwrap
|
||||
|
||||
import bs4
|
||||
import requests
|
||||
|
||||
|
||||
BYTE_SUFFIXES = {
|
||||
"": 1,
|
||||
"B": 1,
|
||||
"kB": 1024,
|
||||
"MB": 1024 * 1024,
|
||||
"GB": 1024 * 1024 * 1024,
|
||||
"TB": 1024 * 1024 * 1024 * 1024,
|
||||
}
|
||||
|
||||
TIME_SUFFIXES = {
|
||||
"Days": 60 * 60 * 24,
|
||||
"Day": 60 * 60 * 24,
|
||||
"Hrs": 60 * 60,
|
||||
"Hr": 60 * 60,
|
||||
"Mins": 60,
|
||||
"Min": 60,
|
||||
}
|
||||
|
||||
|
||||
def uniec(n):
|
||||
m = re.match(r"^([0-9]+(?:[.][0-9]+)?)([a-zA-Z]*)$", n)
|
||||
if not m:
|
||||
raise ValueError(f"invalid suffixed-bytes {n}")
|
||||
val, suffix = m.groups()
|
||||
val = float(val)
|
||||
if suffix not in BYTE_SUFFIXES:
|
||||
raise ValueError(f"unknown suffix {suffix} from {n}")
|
||||
return val * BYTE_SUFFIXES[suffix]
|
||||
|
||||
|
||||
def untime(n):
|
||||
remaining = n
|
||||
x = 0
|
||||
while remaining and (m := re.match(r"^([0-9]+) ([a-zA-Z]+)(?: (.*)|$)", remaining)):
|
||||
val, suffix, remaining = m.groups()
|
||||
val = float(val)
|
||||
if suffix not in TIME_SUFFIXES:
|
||||
raise ValueError(f"unknown suffix {suffix} from {n}")
|
||||
x += val * TIME_SUFFIXES[suffix]
|
||||
return x
|
||||
|
||||
|
||||
def scrape_data():
|
||||
resp = requests.get("http://add-on.ee.co.uk/mbbstatus")
|
||||
resp.raise_for_status()
|
||||
|
||||
soup = bs4.BeautifulSoup(resp.text, "html5lib")
|
||||
|
||||
data = {}
|
||||
|
||||
account_number_el = soup.select("p.account__details > span")[1]
|
||||
data["account_number"] = account_number_el.get_text().strip().replace(" ", "")
|
||||
|
||||
allowance_left_el = soup.select("span.allowance__left")[0]
|
||||
allowance_left_str, allowance_total_str = list(allowance_left_el.stripped_strings)
|
||||
data["allowance_left"] = uniec(allowance_left_str)
|
||||
if allowance_total_str.startswith("left of "):
|
||||
allowance_total_str = allowance_total_str[len("left of ") :]
|
||||
data["allowance_total"] = uniec(allowance_total_str)
|
||||
|
||||
timespan_str = " ".join(soup.select(".allowance__timespan")[0].stripped_strings)
|
||||
if timespan_str.startswith("Lasts for "):
|
||||
timespan_str = timespan_str[len("Lasts for ") :]
|
||||
data["time_remaining"] = untime(timespan_str)
|
||||
|
||||
return textwrap.dedent(
|
||||
f"""
|
||||
ee_allowance_left{{account_number="{data['account_number']}"}} {data['allowance_left']}
|
||||
ee_allowance_total{{account_number="{data['account_number']}"}} {data['allowance_total']}
|
||||
ee_time_remaining{{account_number="{data['account_number']}"}} {data['time_remaining']}
|
||||
"""
|
||||
).strip()
|
||||
|
||||
|
||||
def main(args):
|
||||
if len(args) > 2:
|
||||
print(f"{args[0]} (output path)", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
data = scrape_data()
|
||||
if len(args) == 1:
|
||||
print(data)
|
||||
else:
|
||||
with open(args[1], "w") as f:
|
||||
f.write(data)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(sys.argv)
|
Loading…
Reference in a new issue