summary refs log tree commit diff
path: root/pkgs/common-updater/scripts/list-directory-versions
diff options
context:
space:
mode:
Diffstat (limited to 'pkgs/common-updater/scripts/list-directory-versions')
-rwxr-xr-xpkgs/common-updater/scripts/list-directory-versions65
1 files changed, 65 insertions, 0 deletions
diff --git a/pkgs/common-updater/scripts/list-directory-versions b/pkgs/common-updater/scripts/list-directory-versions
new file mode 100755
index 00000000000..46c9e9d30a5
--- /dev/null
+++ b/pkgs/common-updater/scripts/list-directory-versions
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+
+import argparse
+import requests
+import os
+import subprocess
+import json
+import re
+from bs4 import BeautifulSoup
+
+parser = argparse.ArgumentParser(
+    description="Get all available versions listed for a package in a site."
+)
+
+parser.add_argument(
+    "--pname",
+    default=os.environ.get("UPDATE_NIX_PNAME"),
+    required="UPDATE_NIX_PNAME" not in os.environ,
+    help="name of the package",
+)
+parser.add_argument(
+    "--attr-path",
+    default=os.environ.get("UPDATE_NIX_ATTR_PATH"),
+    help="attribute path of the package",
+)
+parser.add_argument("--url", help="url of the page that lists the package versions")
+parser.add_argument("--file", help="file name for writing debugging information")
+
+
+if __name__ == "__main__":
+    args = parser.parse_args()
+
+    pname = args.pname
+
+    attr_path = args.attr_path or pname
+
+    url = args.url or json.loads(
+        subprocess.check_output(
+            [
+                "nix-instantiate",
+                "--json",
+                "--eval",
+                "-E",
+                f"with import ./. {{}}; dirOf (lib.head {attr_path}.src.urls)",
+            ],
+            text=True,
+        )
+    )
+
+    # print a debugging message
+    if args.file:
+        with open(args.file, "a") as f:
+            f.write(f"# Listing versions for {pname} from {url}\n")
+
+    page = requests.get(url)
+    soup = BeautifulSoup(page.content, "html.parser")
+    links = soup.find_all("a")
+    for link in links:
+        link_url = link.get("href", None)
+        if link_url is not None:
+            match = re.fullmatch(
+                rf"{args.pname}-([\d.]+?(-[\d\w.-]+?)?)(\.tar)?(\.[^.]*)", link_url
+            )
+            if match:
+                print(match.group(1))