summary refs log tree commit diff
path: root/maintainers/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'maintainers/scripts')
-rw-r--r--maintainers/scripts/all-tarballs.nix16
-rw-r--r--maintainers/scripts/build.nix55
-rw-r--r--maintainers/scripts/check-hydra-by-maintainer.nix68
-rwxr-xr-xmaintainers/scripts/check-maintainer-github-handles.sh66
-rwxr-xr-xmaintainers/scripts/copy-tarballs.pl240
-rwxr-xr-xmaintainers/scripts/db-to-md.sh88
-rwxr-xr-xmaintainers/scripts/debian-patches.sh34
-rwxr-xr-xmaintainers/scripts/dep-licenses.sh57
-rwxr-xr-xmaintainers/scripts/doc/escape-code-markup.py97
-rwxr-xr-xmaintainers/scripts/doc/replace-xrefs-by-empty-links.py32
-rw-r--r--maintainers/scripts/doc/unknown-code-language.lua12
-rw-r--r--maintainers/scripts/eval-release.nix24
-rwxr-xr-xmaintainers/scripts/eval-release.sh11
-rwxr-xr-xmaintainers/scripts/fetch-kde-qt.sh61
-rw-r--r--maintainers/scripts/find-tarballs.nix50
-rw-r--r--maintainers/scripts/haskell/dependencies.nix10
-rwxr-xr-xmaintainers/scripts/haskell/hydra-report.hs490
-rw-r--r--maintainers/scripts/haskell/maintainer-handles.nix7
-rwxr-xr-xmaintainers/scripts/haskell/mark-broken.sh47
-rwxr-xr-xmaintainers/scripts/haskell/merge-and-open-pr.sh122
-rwxr-xr-xmaintainers/scripts/haskell/regenerate-hackage-packages.sh46
-rwxr-xr-xmaintainers/scripts/haskell/regenerate-transitive-broken-packages.sh15
-rw-r--r--maintainers/scripts/haskell/test-configurations.nix136
-rw-r--r--maintainers/scripts/haskell/transitive-broken-packages.nix16
-rwxr-xr-xmaintainers/scripts/haskell/update-cabal2nix-unstable.sh17
-rwxr-xr-xmaintainers/scripts/haskell/update-hackage.sh35
-rwxr-xr-xmaintainers/scripts/haskell/update-stackage.sh57
-rwxr-xr-xmaintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh22
-rwxr-xr-xmaintainers/scripts/hydra-eval-failures.py112
-rwxr-xr-xmaintainers/scripts/hydra_eval_check13
-rw-r--r--maintainers/scripts/luarocks-config.lua4
-rw-r--r--maintainers/scripts/luarocks-packages.csv86
-rwxr-xr-xmaintainers/scripts/nix-call-package5
-rwxr-xr-xmaintainers/scripts/nix-diff.sh277
-rw-r--r--maintainers/scripts/nix-generate-from-cpan.nix25
-rwxr-xr-xmaintainers/scripts/nix-generate-from-cpan.pl466
-rw-r--r--maintainers/scripts/nixpkgs-lint.nix24
-rwxr-xr-xmaintainers/scripts/nixpkgs-lint.pl173
-rwxr-xr-xmaintainers/scripts/patchelf-hints.sh84
-rw-r--r--maintainers/scripts/pluginupdate.py674
-rwxr-xr-xmaintainers/scripts/rebuild-amount.sh133
-rwxr-xr-xmaintainers/scripts/remove-old-aliases.py202
-rwxr-xr-xmaintainers/scripts/update-channel-branches.sh112
-rwxr-xr-xmaintainers/scripts/update-luarocks-packages218
-rw-r--r--maintainers/scripts/update-luarocks-shell.nix13
-rwxr-xr-xmaintainers/scripts/update-python-libraries5
-rwxr-xr-xmaintainers/scripts/update-redirected-urls.sh12
-rwxr-xr-xmaintainers/scripts/update-ruby-packages16
-rwxr-xr-xmaintainers/scripts/update.nix212
-rw-r--r--maintainers/scripts/update.py229
-rw-r--r--maintainers/scripts/vanity-manual-equalities.txt7
-rwxr-xr-xmaintainers/scripts/vanity.sh122
52 files changed, 5155 insertions, 0 deletions
diff --git a/maintainers/scripts/all-tarballs.nix b/maintainers/scripts/all-tarballs.nix
new file mode 100644
index 00000000000..6a4de8a4b95
--- /dev/null
+++ b/maintainers/scripts/all-tarballs.nix
@@ -0,0 +1,16 @@
+/* Helper expression for copy-tarballs. This returns (nearly) all
+   tarballs used the free packages in Nixpkgs.
+
+   Typical usage:
+
+   $ copy-tarballs.pl --expr 'import <nixpkgs/maintainers/scripts/all-tarballs.nix>'
+*/
+
+import ../../pkgs/top-level/release.nix
+  { # Don't apply ‘hydraJob’ to jobs, because then we can't get to the
+    # dependency graph.
+    scrubJobs = false;
+    # No need to evaluate on i686.
+    supportedSystems = [ "x86_64-linux" ];
+    limitedSupportedSystems = [];
+  }
diff --git a/maintainers/scripts/build.nix b/maintainers/scripts/build.nix
new file mode 100644
index 00000000000..ca401700b4a
--- /dev/null
+++ b/maintainers/scripts/build.nix
@@ -0,0 +1,55 @@
+{ maintainer
+, localSystem ? { system = args.system or builtins.currentSystem; }
+, system ? localSystem.system
+, crossSystem ? localSystem
+, ...
+}@args:
+
+# based on update.nix
+# nix-build build.nix --argstr maintainer <yourname>
+
+# to build for aarch64-linux using boot.binfmt.emulatedSystems:
+# nix-build build.nix --argstr maintainer <yourname> --argstr system aarch64-linux
+
+let
+  pkgs = import ./../../default.nix (removeAttrs args [ "maintainer" ]);
+  maintainer_ = pkgs.lib.maintainers.${maintainer};
+  packagesWith = cond: return: set:
+    (pkgs.lib.flatten
+      (pkgs.lib.mapAttrsToList
+        (name: pkg:
+          let
+            result = builtins.tryEval
+              (
+                if pkgs.lib.isDerivation pkg && cond name pkg then
+                  # Skip packages whose closure fails on evaluation.
+                  # This happens for pkgs like `python27Packages.djangoql`
+                  # that have disabled Python pkgs as dependencies.
+                  builtins.seq pkg.outPath
+                    [ (return name pkg) ]
+                else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false
+                then packagesWith cond return pkg
+                else [ ]
+              );
+          in
+          if result.success then result.value
+          else [ ]
+        )
+        set
+      )
+    );
+in
+packagesWith
+  (name: pkg:
+    (
+      if builtins.hasAttr "meta" pkg && builtins.hasAttr "maintainers" pkg.meta
+      then (
+        if builtins.isList pkg.meta.maintainers
+        then builtins.elem maintainer_ pkg.meta.maintainers
+        else maintainer_ == pkg.meta.maintainers
+      )
+      else false
+    )
+  )
+  (name: pkg: pkg)
+  pkgs
diff --git a/maintainers/scripts/check-hydra-by-maintainer.nix b/maintainers/scripts/check-hydra-by-maintainer.nix
new file mode 100644
index 00000000000..326aae47f8c
--- /dev/null
+++ b/maintainers/scripts/check-hydra-by-maintainer.nix
@@ -0,0 +1,68 @@
+{ maintainer }:
+let
+  pkgs = import ./../../default.nix { };
+  maintainer_ = pkgs.lib.maintainers.${maintainer};
+  packagesWith = cond: return: prefix: set:
+    (pkgs.lib.flatten
+      (pkgs.lib.mapAttrsToList
+        (name: pkg:
+          let
+            result = builtins.tryEval
+              (
+                if pkgs.lib.isDerivation pkg && cond name pkg then
+                # Skip packages whose closure fails on evaluation.
+                # This happens for pkgs like `python27Packages.djangoql`
+                # that have disabled Python pkgs as dependencies.
+                  builtins.seq pkg.outPath
+                    [ (return "${prefix}${name}") ]
+                else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false
+                # then packagesWith cond return pkg
+                then packagesWith cond return "${name}." pkg
+                else [ ]
+              );
+          in
+          if result.success then result.value
+          else [ ]
+        )
+        set
+      )
+    );
+
+  packages = packagesWith
+    (name: pkg:
+      (
+        if builtins.hasAttr "meta" pkg && builtins.hasAttr "maintainers" pkg.meta
+        then
+          (
+            if builtins.isList pkg.meta.maintainers
+            then builtins.elem maintainer_ pkg.meta.maintainers
+            else maintainer_ == pkg.meta.maintainers
+          )
+        else false
+      )
+    )
+    (name: name)
+    ("")
+    pkgs;
+
+in
+pkgs.stdenv.mkDerivation {
+  name = "nixpkgs-update-script";
+  buildInputs = [ pkgs.hydra-check ];
+  buildCommand = ''
+    echo ""
+    echo "----------------------------------------------------------------"
+    echo ""
+    echo "nix-shell maintainers/scripts/check-hydra-by-maintainer.nix --argstr maintainer SuperSandro2000"
+    echo ""
+    echo "----------------------------------------------------------------"
+    exit 1
+  '';
+  shellHook = ''
+    unset shellHook # do not contaminate nested shells
+    echo "Please stand by"
+    echo nix-shell -p hydra-check --run "hydra-check ${builtins.concatStringsSep " " packages}"
+    nix-shell -p hydra-check --run "hydra-check ${builtins.concatStringsSep " " packages}"
+    exit $?
+  '';
+}
diff --git a/maintainers/scripts/check-maintainer-github-handles.sh b/maintainers/scripts/check-maintainer-github-handles.sh
new file mode 100755
index 00000000000..a5555ca9e90
--- /dev/null
+++ b/maintainers/scripts/check-maintainer-github-handles.sh
@@ -0,0 +1,66 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i bash -p jq parallel
+
+# Example how to work with the `lib.maintainers` attrset.
+# Can be used to check whether all user handles are still valid.
+
+set -o errexit -o noclobber -o nounset -o pipefail
+shopt -s failglob inherit_errexit
+
+function checkCommits {
+    local ret status tmp user
+    user="$1"
+    tmp=$(mktemp)
+    curl --silent -w "%{http_code}" \
+         "https://github.com/NixOS/nixpkgs/commits?author=$user" \
+         > "$tmp"
+    # the last line of tmp contains the http status
+    status=$(tail -n1 "$tmp")
+    ret=
+    case $status in
+        200) if <"$tmp" grep -i "no commits found" > /dev/null; then
+                 ret=1
+             else
+                 ret=0
+             fi
+             ;;
+        # because of github’s hard request limits, this can take some time
+        429) sleep 2
+             printf "."
+             checkCommits "$user"
+             ret=$?
+             ;;
+        *)   printf "BAD STATUS: $(tail -n1 "$tmp") for %s\n" "$user"; ret=1
+             ret=1
+             ;;
+    esac
+    rm "$tmp"
+    return $ret
+}
+export -f checkCommits
+
+function checkUser {
+    local user="$1"
+    local status=
+    status="$(curl --silent --head "https://github.com/${user}" | grep Status)"
+    # checks whether a user handle can be found on github
+    if [[ "$status" =~ 404 ]]; then
+        printf "%s\t\t\t\t%s\n" "$status" "$user"
+    # checks whether the user handle has any nixpkgs commits
+    elif checkCommits "$user"; then
+        printf "OK!\t\t\t\t%s\n" "$user"
+    else
+        printf "No Commits!\t\t\t%s\n" "$user"
+    fi
+}
+export -f checkUser
+
+# output the maintainers set as json
+# and filter out the github username of each maintainer (if it exists)
+# then check some at the same time
+nix-instantiate -A lib.maintainers --eval --strict --json \
+    | jq -r '.[]|.github|select(.)' \
+    | parallel -j5 checkUser
+
+# To check some arbitrary users:
+# parallel -j100 checkUser ::: "eelco" "profpatsch" "Profpatsch" "a"
diff --git a/maintainers/scripts/copy-tarballs.pl b/maintainers/scripts/copy-tarballs.pl
new file mode 100755
index 00000000000..6a08eb88bf8
--- /dev/null
+++ b/maintainers/scripts/copy-tarballs.pl
@@ -0,0 +1,240 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i perl -p perl perlPackages.NetAmazonS3 perlPackages.FileSlurp perlPackages.JSON perlPackages.LWPProtocolHttps nixUnstable nixUnstable.perl-bindings
+
+# This command uploads tarballs to tarballs.nixos.org, the
+# content-addressed cache used by fetchurl as a fallback for when
+# upstream tarballs disappear or change. Usage:
+#
+# 1) To upload one or more files:
+#
+#    $ copy-tarballs.pl --file /path/to/tarball.tar.gz
+#
+# 2) To upload all files obtained via calls to fetchurl in a Nix derivation:
+#
+#    $ copy-tarballs.pl --expr '(import <nixpkgs> {}).hello'
+
+use strict;
+use warnings;
+use File::Basename;
+use File::Path;
+use File::Slurp;
+use JSON;
+use Net::Amazon::S3;
+use Nix::Store;
+
+isValidPath("/nix/store/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa-foo"); # FIXME: forces Nix::Store initialisation
+
+sub usage {
+    die "Syntax: $0 [--dry-run] [--exclude REGEXP] [--expr EXPR | --file FILES...]\n";
+}
+
+my $dryRun = 0;
+my $expr;
+my @fileNames;
+my $exclude;
+
+while (@ARGV) {
+    my $flag = shift @ARGV;
+
+    if ($flag eq "--expr") {
+        $expr = shift @ARGV or die "--expr requires an argument";
+    } elsif ($flag eq "--file") {
+        @fileNames = @ARGV;
+        last;
+    } elsif ($flag eq "--dry-run") {
+        $dryRun = 1;
+    } elsif ($flag eq "--exclude") {
+        $exclude = shift @ARGV or die "--exclude requires an argument";
+    } else {
+        usage();
+    }
+}
+
+
+# S3 setup.
+my $aws_access_key_id = $ENV{'AWS_ACCESS_KEY_ID'} or die "AWS_ACCESS_KEY_ID not set\n";
+my $aws_secret_access_key = $ENV{'AWS_SECRET_ACCESS_KEY'} or die "AWS_SECRET_ACCESS_KEY not set\n";
+
+my $s3 = Net::Amazon::S3->new(
+    { aws_access_key_id     => $aws_access_key_id,
+      aws_secret_access_key => $aws_secret_access_key,
+      retry                 => 1,
+      host                  => "s3-eu-west-1.amazonaws.com",
+    });
+
+my $bucket = $s3->bucket("nixpkgs-tarballs") or die;
+
+my $doWrite = 0;
+my $cacheFile = ($ENV{"HOME"} or die "\$HOME is not set") . "/.cache/nix/copy-tarballs";
+my %cache;
+$cache{$_} = 1 foreach read_file($cacheFile, err_mode => 'quiet', chomp => 1);
+$doWrite = 1;
+
+END() {
+    File::Path::mkpath(dirname($cacheFile), 0, 0755);
+    write_file($cacheFile, map { "$_\n" } keys %cache) if $doWrite;
+}
+
+sub alreadyMirrored {
+    my ($algo, $hash) = @_;
+    my $key = "$algo/$hash";
+    return 1 if defined $cache{$key};
+    my $res = defined $bucket->get_key($key);
+    $cache{$key} = 1 if $res;
+    return $res;
+}
+
+sub uploadFile {
+    my ($fn, $name) = @_;
+
+    my $md5_16 = hashFile("md5", 0, $fn) or die;
+    my $sha1_16 = hashFile("sha1", 0, $fn) or die;
+    my $sha256_32 = hashFile("sha256", 1, $fn) or die;
+    my $sha256_16 = hashFile("sha256", 0, $fn) or die;
+    my $sha512_32 = hashFile("sha512", 1, $fn) or die;
+    my $sha512_16 = hashFile("sha512", 0, $fn) or die;
+
+    my $mainKey = "sha512/$sha512_16";
+
+    # Create redirects from the other hash types.
+    sub redirect {
+        my ($name, $dest) = @_;
+        #print STDERR "linking $name to $dest...\n";
+        $bucket->add_key($name, "", {
+            'x-amz-website-redirect-location' => "/" . $dest,
+            'x-amz-acl' => "public-read"
+        })
+            or die "failed to create redirect from $name to $dest\n";
+        $cache{$name} = 1;
+    }
+    redirect "md5/$md5_16", $mainKey;
+    redirect "sha1/$sha1_16", $mainKey;
+    redirect "sha256/$sha256_32", $mainKey;
+    redirect "sha256/$sha256_16", $mainKey;
+    redirect "sha512/$sha512_32", $mainKey;
+
+    # Upload the file as sha512/<hash-in-base-16>.
+    print STDERR "uploading $fn to $mainKey...\n";
+    $bucket->add_key_filename($mainKey, $fn, {
+        'x-amz-meta-original-name' => $name,
+        'x-amz-acl' => "public-read"
+    })
+        or die "failed to upload $fn to $mainKey\n";
+    $cache{$mainKey} = 1;
+}
+
+if (scalar @fileNames) {
+    my $res = 0;
+    foreach my $fn (@fileNames) {
+        eval {
+            if (alreadyMirrored("sha512", hashFile("sha512", 0, $fn))) {
+                print STDERR "$fn is already mirrored\n";
+            } else {
+                uploadFile($fn, basename $fn);
+            }
+        };
+        if ($@) {
+            warn "$@";
+            $res = 1;
+        }
+    }
+    exit $res;
+}
+
+elsif (defined $expr) {
+
+    # Evaluate find-tarballs.nix.
+    my $pid = open(JSON, "-|", "nix-instantiate", "--eval", "--json", "--strict",
+                   "<nixpkgs/maintainers/scripts/find-tarballs.nix>",
+                   "--arg", "expr", $expr);
+    my $stdout = <JSON>;
+    waitpid($pid, 0);
+    die "$0: evaluation failed\n" if $?;
+    close JSON;
+
+    my $fetches = decode_json($stdout);
+
+    print STDERR "evaluation returned ", scalar(@{$fetches}), " tarballs\n";
+
+    # Check every fetchurl call discovered by find-tarballs.nix.
+    my $mirrored = 0;
+    my $have = 0;
+    foreach my $fetch (sort { $a->{url} cmp $b->{url} } @{$fetches}) {
+        my $url = $fetch->{url};
+        my $algo = $fetch->{type};
+        my $hash = $fetch->{hash};
+        my $name = $fetch->{name};
+
+        if ($hash =~ /^([a-z0-9]+)-([A-Za-z0-9+\/=]+)$/) {
+            $algo = $1;
+            $hash = `nix hash to-base16 $hash` or die;
+            chomp $hash;
+        }
+
+        next unless $algo =~ /^[a-z0-9]+$/;
+
+        # Convert non-SRI base-64 to base-16.
+        if ($hash =~ /^[A-Za-z0-9+\/=]+$/) {
+            $hash = `nix hash to-base16 --type '$algo' $hash` or die;
+            chomp $hash;
+        }
+
+        if (defined $ENV{DEBUG}) {
+            print "$url $algo $hash\n";
+            next;
+        }
+
+        if ($url !~ /^http:/ && $url !~ /^https:/ && $url !~ /^ftp:/ && $url !~ /^mirror:/) {
+            print STDERR "skipping $url (unsupported scheme)\n";
+            next;
+        }
+
+        next if defined $exclude && $url =~ /$exclude/;
+
+        if (alreadyMirrored($algo, $hash)) {
+            $have++;
+            next;
+        }
+
+        my $storePath = makeFixedOutputPath(0, $algo, $hash, $name);
+
+        print STDERR "mirroring $url ($storePath, $algo, $hash)...\n";
+
+        if ($dryRun) {
+            $mirrored++;
+            next;
+        }
+
+        # Substitute the output.
+        if (!isValidPath($storePath)) {
+            system("nix-store", "-r", $storePath);
+        }
+
+        # Otherwise download the file using nix-prefetch-url.
+        if (!isValidPath($storePath)) {
+            $ENV{QUIET} = 1;
+            $ENV{PRINT_PATH} = 1;
+            my $fh;
+            my $pid = open($fh, "-|", "nix-prefetch-url", "--type", $algo, $url, $hash) or die;
+            waitpid($pid, 0) or die;
+            if ($? != 0) {
+                print STDERR "failed to fetch $url: $?\n";
+                next;
+            }
+            <$fh>; my $storePath2 = <$fh>; chomp $storePath2;
+            if ($storePath ne $storePath2) {
+                warn "strange: $storePath != $storePath2\n";
+                next;
+            }
+        }
+
+        uploadFile($storePath, $url);
+        $mirrored++;
+    }
+
+    print STDERR "mirrored $mirrored files, already have $have files\n";
+}
+
+else {
+    usage();
+}
diff --git a/maintainers/scripts/db-to-md.sh b/maintainers/scripts/db-to-md.sh
new file mode 100755
index 00000000000..01357d1e241
--- /dev/null
+++ b/maintainers/scripts/db-to-md.sh
@@ -0,0 +1,88 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -I nixpkgs=. -i bash -p pandoc
+
+# This script is temporarily needed while we transition the manual to
+# CommonMark. It converts DocBook files into our CommonMark flavour.
+
+debug=
+files=()
+
+while [ "$#" -gt 0 ]; do
+    i="$1"; shift 1
+    case "$i" in
+      --debug)
+        debug=1
+        ;;
+      *)
+        files+=("$i")
+        ;;
+    esac
+done
+
+echo "WARNING: This is an experimental script and might not preserve all formatting." > /dev/stderr
+echo "Please report any issues you discover." > /dev/stderr
+
+outExtension="md"
+if [[ $debug ]]; then
+    outExtension="json"
+fi
+
+DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+
+# NOTE: Keep in sync with Nixpkgs manual (/doc/Makefile).
+# TODO: Remove raw-attribute when we can get rid of DocBook altogether.
+pandoc_commonmark_enabled_extensions=+attributes+fenced_divs+footnotes+bracketed_spans+definition_lists+pipe_tables+raw_attribute
+targetLang="commonmark${pandoc_commonmark_enabled_extensions}+smart"
+if [[ $debug ]]; then
+    targetLang=json
+fi
+pandoc_flags=(
+    # Not needed:
+    # - diagram-generator.lua (we do not support that in NixOS manual to limit dependencies)
+    # - media extraction (was only required for diagram generator)
+    # - myst-reader/roles.lua (only relevant for MyST → DocBook)
+    # - link-unix-man-references.lua (links should only be added to display output)
+    # - docbook-writer/rst-roles.lua (only relevant for → DocBook)
+    # - docbook-writer/labelless-link-is-xref.lua (only relevant for → DocBook)
+    "--lua-filter=$DIR/../../doc/build-aux/pandoc-filters/docbook-reader/citerefentry-to-rst-role.lua"
+    "--lua-filter=$DIR/../../doc/build-aux/pandoc-filters/myst-writer/roles.lua"
+    "--lua-filter=$DIR/doc/unknown-code-language.lua"
+    -f docbook
+    -t "$targetLang"
+    --tab-stop=2
+    --wrap=none
+)
+
+for file in "${files[@]}"; do
+    if [[ ! -f "$file" ]]; then
+        echo "db-to-md.sh: $file does not exist" > /dev/stderr
+        exit 1
+    else
+    rootElement=$(xmllint --xpath 'name(//*)' "$file")
+
+    if [[ $rootElement = chapter ]]; then
+        extension=".chapter.$outExtension"
+    elif [[ $rootElement = section ]]; then
+        extension=".section.$outExtension"
+    else
+        echo "db-to-md.sh: $file contains an unsupported root element $rootElement" > /dev/stderr
+        exit 1
+    fi
+
+    outFile="${file%".section.xml"}"
+    outFile="${outFile%".chapter.xml"}"
+    outFile="${outFile%".xml"}$extension"
+    temp1=$(mktemp)
+    $DIR/doc/escape-code-markup.py "$file" "$temp1"
+    if [[ $debug ]]; then
+        echo "Converted $file to $temp1" > /dev/stderr
+    fi
+    temp2=$(mktemp)
+    $DIR/doc/replace-xrefs-by-empty-links.py "$temp1" "$temp2"
+    if [[ $debug ]]; then
+        echo "Converted $temp1 to $temp2" > /dev/stderr
+    fi
+    pandoc "$temp2" -o "$outFile" "${pandoc_flags[@]}"
+    echo "Converted $file to $outFile" > /dev/stderr
+  fi
+done
diff --git a/maintainers/scripts/debian-patches.sh b/maintainers/scripts/debian-patches.sh
new file mode 100755
index 00000000000..de6be136ca7
--- /dev/null
+++ b/maintainers/scripts/debian-patches.sh
@@ -0,0 +1,34 @@
+#!/bin/sh
+
+# Download patches from debian project
+# Usage $0 debian-patches.txt debian-patches.nix
+# An example input and output files can be found in tools/graphics/plotutils
+
+DEB_URL=https://sources.debian.org/data/main
+declare -a deb_patches
+mapfile -t deb_patches < $1
+
+# First letter
+deb_prefix="${deb_patches[0]:0:1}"
+prefix="${DEB_URL}/${deb_prefix}/${deb_patches[0]}/debian/patches"
+
+if [[ -n "$2" ]]; then
+    exec 1> $2
+fi
+
+cat <<EOF
+# Generated by $(basename $0) from $(basename $1)
+let
+  prefix = "${prefix}";
+in
+[
+EOF
+for ((i=1;i < ${#deb_patches[@]}; ++i)); do
+    url="${prefix}/${deb_patches[$i]}"
+    sha256=$(nix-prefetch-url $url)
+    echo "  {"
+    echo "    url = \"\${prefix}/${deb_patches[$i]}\";"
+    echo "    sha256 = \"$sha256\";"
+    echo "  }"
+done
+echo "]"
diff --git a/maintainers/scripts/dep-licenses.sh b/maintainers/scripts/dep-licenses.sh
new file mode 100755
index 00000000000..28ad22c334f
--- /dev/null
+++ b/maintainers/scripts/dep-licenses.sh
@@ -0,0 +1,57 @@
+#!/bin/sh
+
+attr=$1
+
+: ${NIXPKGS=/etc/nixos/nixpkgs}
+
+tmp=$(mktemp --tmpdir -d nixpkgs-dep-license.XXXXXX)
+
+exitHandler() {
+    exitCode=$?
+    rm -rf "$tmp"
+    exit $exitCode
+}
+
+trap "exitHandler" EXIT
+
+# fetch the trace and the drvPath of the attribute.
+nix-instantiate $NIXPKGS -A $attr --show-trace > "$tmp/drvPath" 2> "$tmp/trace" || {
+  cat 1>&2 - "$tmp/trace" <<EOF
+An error occurred while evaluating $attr.
+EOF
+  exit 1
+}
+
+# generate a sed script based on the trace output.
+sed '
+  \,@:.*:@, {
+    # \1  *.drv file
+    # \2  License terms
+    s,.*@:drv:\(.*\):\(.*\):@.*,s!\1!\1: \2!; t;,
+    s!Str(\\\"\([^,]*\)\\\",\[\])!\1!g
+    b
+  }
+  d
+' "$tmp/trace" > "$tmp/filter.sed"
+
+if test $(wc -l "$tmp/filter.sed" | sed 's/ .*//') == 0; then
+  echo 1>&2 "
+No derivation mentionned in the stack trace.  Either your derivation does
+not use stdenv.mkDerivation or you forgot to use the stdenv adapter named
+traceDrvLicenses.
+
+-  defaultStdenv = allStdenvs.stdenv;
++  defaultStdenv = traceDrvLicenses allStdenvs.stdenv;
+"
+  exit 1
+fi
+
+
+# remove all dependencies which are using stdenv.mkDerivation
+echo '
+d
+' >> "$tmp/filter.sed"
+
+nix-store -q --tree $(cat "$tmp/drvPath") | sed -f "$tmp/filter.sed"
+
+exit 0;
diff --git a/maintainers/scripts/doc/escape-code-markup.py b/maintainers/scripts/doc/escape-code-markup.py
new file mode 100755
index 00000000000..015435b698e
--- /dev/null
+++ b/maintainers/scripts/doc/escape-code-markup.py
@@ -0,0 +1,97 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -I nixpkgs=channel:nixos-unstable -i python3 -p python3 -p python3.pkgs.lxml
+
+"""
+Pandoc will strip any markup within code elements so
+let’s escape them so that they can be handled manually.
+"""
+
+import lxml.etree as ET
+import re
+import sys
+
+def replace_element_by_text(el: ET.Element, text: str) -> None:
+    """
+    Author: bernulf
+    Source: https://stackoverflow.com/a/10520552/160386
+    SPDX-License-Identifier: CC-BY-SA-3.0
+    """
+    text = text + (el.tail or "")
+    parent = el.getparent()
+    if parent is not None:
+        previous = el.getprevious()
+        if previous is not None:
+            previous.tail = (previous.tail or "") + text
+        else:
+            parent.text = (parent.text or "") + text
+        parent.remove(el)
+
+DOCBOOK_NS = "http://docbook.org/ns/docbook"
+
+# List of elements that pandoc’s DocBook reader strips markup from.
+# https://github.com/jgm/pandoc/blob/master/src/Text/Pandoc/Readers/DocBook.hs
+code_elements = [
+    # CodeBlock
+    "literallayout",
+    "screen",
+    "programlisting",
+    # Code (inline)
+    "classname",
+    "code",
+    "filename",
+    "envar",
+    "literal",
+    "computeroutput",
+    "prompt",
+    "parameter",
+    "option",
+    "markup",
+    "wordasword",
+    "command",
+    "varname",
+    "function",
+    "type",
+    "symbol",
+    "constant",
+    "userinput",
+    "systemitem",
+]
+
+XMLNS_REGEX = re.compile(r'\s+xmlns(?::[^=]+)?="[^"]*"')
+ROOT_ELEMENT_REGEX = re.compile(r'^\s*<[^>]+>')
+
+def remove_xmlns(match: re.Match) -> str:
+    """
+    Removes xmlns attributes.
+
+    Expects a match containing an opening tag.
+    """
+    return XMLNS_REGEX.sub('', match.group(0))
+
+if __name__ == '__main__':
+    assert len(sys.argv) >= 3, "usage: escape-code-markup.py <input> <output>"
+
+    tree = ET.parse(sys.argv[1])
+    name_predicate = " or ".join([f"local-name()='{el}'" for el in code_elements])
+
+    for markup in tree.xpath(f"//*[({name_predicate}) and namespace-uri()='{DOCBOOK_NS}']/*"):
+        text = ET.tostring(markup, encoding=str)
+
+        # tostring adds xmlns attributes to the element we want to stringify
+        # as if it was supposed to be usable standalone.
+        # We are just converting it to CDATA so we do not care.
+        # Let’s strip the namespace declarations to keep the code clean.
+        #
+        # Note that this removes even namespaces that were potentially
+        # in the original file. Though, that should be very rare –
+        # most of the time, we will stringify empty DocBook elements
+        # like <xref> or <co> or, at worst, <link> with xlink:href attribute.
+        #
+        # Also note that the regex expects the root element to be first
+        # thing in the string. But that should be fine, the tostring method
+        # does not produce XML declaration or doctype by default.
+        text = ROOT_ELEMENT_REGEX.sub(remove_xmlns, text)
+
+        replace_element_by_text(markup, text)
+
+    tree.write(sys.argv[2])
diff --git a/maintainers/scripts/doc/replace-xrefs-by-empty-links.py b/maintainers/scripts/doc/replace-xrefs-by-empty-links.py
new file mode 100755
index 00000000000..2006ef897f7
--- /dev/null
+++ b/maintainers/scripts/doc/replace-xrefs-by-empty-links.py
@@ -0,0 +1,32 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -I nixpkgs=channel:nixos-unstable -i python3 -p python3 -p python3.pkgs.lxml
+
+"""
+Pandoc will try to resolve xrefs and replace them with regular links.
+let’s replace them with links with empty labels which MyST
+and our pandoc filters recognize as cross-references.
+"""
+
+import lxml.etree as ET
+import sys
+
+XLINK_NS = "http://www.w3.org/1999/xlink"
+
+ns = {
+    "db": "http://docbook.org/ns/docbook",
+}
+
+
+if __name__ == '__main__':
+    assert len(sys.argv) >= 3, "usage: replace-xrefs-by-empty-links.py <input> <output>"
+
+    tree = ET.parse(sys.argv[1])
+    for xref in tree.findall(".//db:xref", ns):
+        text = ET.tostring(xref, encoding=str)
+        parent = xref.getparent()
+        link = parent.makeelement('link')
+        target_name = xref.get("linkend")
+        link.set(f"{{{XLINK_NS}}}href", f"#{target_name}")
+        parent.replace(xref, link)
+
+    tree.write(sys.argv[2])
diff --git a/maintainers/scripts/doc/unknown-code-language.lua b/maintainers/scripts/doc/unknown-code-language.lua
new file mode 100644
index 00000000000..85d8df4690b
--- /dev/null
+++ b/maintainers/scripts/doc/unknown-code-language.lua
@@ -0,0 +1,12 @@
+--[[
+Adds “unknown” class to CodeBlock AST nodes without any classes.
+
+This will cause Pandoc to use fenced code block, which we prefer.
+]]
+
+function CodeBlock(elem)
+  if #elem.classes == 0 then
+    elem.classes:insert('unknown')
+    return elem
+  end
+end
diff --git a/maintainers/scripts/eval-release.nix b/maintainers/scripts/eval-release.nix
new file mode 100644
index 00000000000..bb9572cbc79
--- /dev/null
+++ b/maintainers/scripts/eval-release.nix
@@ -0,0 +1,24 @@
+# Evaluate `release.nix' like Hydra would.  Too bad nix-instantiate
+# can't to do this.
+
+with import ../../lib;
+
+let
+  trace = if builtins.getEnv "VERBOSE" == "1" then builtins.trace else (x: y: y);
+
+  rel = removeAttrs (import ../../pkgs/top-level/release.nix { }) [ "tarball" "unstable" "xbursttools" ];
+
+  # Add the ‘recurseForDerivations’ attribute to ensure that
+  # nix-instantiate recurses into nested attribute sets.
+  recurse = path: attrs:
+    if (builtins.tryEval attrs).success then
+      if isDerivation attrs
+      then
+        if (builtins.tryEval attrs.drvPath).success
+        then { inherit (attrs) name drvPath; }
+        else { failed = true; }
+      else { recurseForDerivations = true; } //
+           mapAttrs (n: v: let path' = path ++ [n]; in trace path' (recurse path' v)) attrs
+    else { };
+
+in recurse [] rel
diff --git a/maintainers/scripts/eval-release.sh b/maintainers/scripts/eval-release.sh
new file mode 100755
index 00000000000..e0dfaf1de74
--- /dev/null
+++ b/maintainers/scripts/eval-release.sh
@@ -0,0 +1,11 @@
+#! /bin/sh
+
+if [[ -z "$VERBOSE" ]]; then
+  echo "You may set VERBOSE=1 to see debug output or to any other non-empty string to make this script completely silent"
+fi
+unset HOME NIXPKGS_CONFIG # Force empty config
+
+# With the default heap size (380MB), nix-instantiate fails:
+# Too many heap sections: Increase MAXHINCR or MAX_HEAP_SECTS
+export GC_INITIAL_HEAP_SIZE=${GC_INITIAL_HEAP_SIZE:-2000000000} # 2GB
+nix-instantiate --strict --eval-only --xml --show-trace "$(dirname "$0")"/eval-release.nix 2>&1 > /dev/null
diff --git a/maintainers/scripts/fetch-kde-qt.sh b/maintainers/scripts/fetch-kde-qt.sh
new file mode 100755
index 00000000000..22d78151978
--- /dev/null
+++ b/maintainers/scripts/fetch-kde-qt.sh
@@ -0,0 +1,61 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils findutils gnused nix wget
+
+set -efuo pipefail
+
+SRCS=
+if [ -d "$1" ]; then
+    SRCS="$(pwd)/$1/srcs.nix"
+    . "$1/fetch.sh"
+else
+    SRCS="$(pwd)/$(dirname $1)/srcs.nix"
+    . "$1"
+fi
+
+tmp=$(mktemp -d)
+pushd $tmp >/dev/null
+wget -nH -r -c --no-parent "${WGET_ARGS[@]}" >/dev/null
+
+csv=$(mktemp)
+find . -type f | while read src; do
+    # Sanitize file name
+    filename=$(basename "$src" | tr '@' '_')
+    nameVersion="${filename%.tar.*}"
+    name=$(echo "$nameVersion" | sed -e 's,-[[:digit:]].*,,' | sed -e 's,-opensource-src$,,' | sed -e 's,-everywhere-src$,,')
+    version=$(echo "$nameVersion" | sed -e 's,^\([[:alpha:]][[:alnum:]]*-\)\+,,')
+    echo "$name,$version,$src,$filename" >>$csv
+done
+
+cat >"$SRCS" <<EOF
+# DO NOT EDIT! This file is generated automatically.
+# Command: $0 $@
+{ fetchurl, mirror }:
+
+{
+EOF
+
+gawk -F , "{ print \$1 }" $csv | sort | uniq | while read name; do
+    versions=$(gawk -F , "/^$name,/ { print \$2 }" $csv)
+    latestVersion=$(echo "$versions" | sort -rV | head -n 1)
+    src=$(gawk -F , "/^$name,$latestVersion,/ { print \$3 }" $csv)
+    filename=$(gawk -F , "/^$name,$latestVersion,/ { print \$4 }" $csv)
+    url="${src:2}"
+    sha256=$(nix-hash --type sha256 --base32 --flat "$src")
+    cat >>"$SRCS" <<EOF
+  $name = {
+    version = "$latestVersion";
+    src = fetchurl {
+      url = "\${mirror}/$url";
+      sha256 = "$sha256";
+      name = "$filename";
+    };
+  };
+EOF
+done
+
+echo "}" >>"$SRCS"
+
+popd >/dev/null
+rm -fr $tmp >/dev/null
+
+rm -f $csv >/dev/null
diff --git a/maintainers/scripts/find-tarballs.nix b/maintainers/scripts/find-tarballs.nix
new file mode 100644
index 00000000000..990185bbb3b
--- /dev/null
+++ b/maintainers/scripts/find-tarballs.nix
@@ -0,0 +1,50 @@
+# This expression returns a list of all fetchurl calls used by ‘expr’.
+
+with import ../.. { };
+with lib;
+
+{ expr }:
+
+let
+
+  root = expr;
+
+  uniqueUrls = map (x: x.file) (genericClosure {
+    startSet = map (file: { key = file.url; inherit file; }) urls;
+    operator = const [ ];
+  });
+
+  urls = map (drv: { url = head (drv.urls or [ drv.url ]); hash = drv.outputHash; type = drv.outputHashAlgo; name = drv.name; }) fetchurlDependencies;
+
+  fetchurlDependencies =
+    filter
+      (drv: drv.outputHash or "" != "" && drv.outputHashMode or "flat" == "flat"
+          && drv.postFetch or "" == "" && (drv ? url || drv ? urls))
+      dependencies;
+
+  dependencies = map (x: x.value) (genericClosure {
+    startSet = map keyDrv (derivationsIn' root);
+    operator = { key, value }: map keyDrv (immediateDependenciesOf value);
+  });
+
+  derivationsIn' = x:
+    if !canEval x then []
+    else if isDerivation x then optional (canEval x.drvPath) x
+    else if isList x then concatLists (map derivationsIn' x)
+    else if isAttrs x then concatLists (mapAttrsToList (n: v: addErrorContext "while finding tarballs in '${n}':" (derivationsIn' v)) x)
+    else [ ];
+
+  keyDrv = drv: if canEval drv.drvPath then { key = drv.drvPath; value = drv; } else { };
+
+  immediateDependenciesOf = drv:
+    concatLists (mapAttrsToList (n: v: derivationsIn v) (removeAttrs drv (["meta" "passthru"] ++ optionals (drv?passthru) (attrNames drv.passthru))));
+
+  derivationsIn = x:
+    if !canEval x then []
+    else if isDerivation x then optional (canEval x.drvPath) x
+    else if isList x then concatLists (map derivationsIn x)
+    else [ ];
+
+  canEval = val: (builtins.tryEval val).success;
+
+in uniqueUrls
diff --git a/maintainers/scripts/haskell/dependencies.nix b/maintainers/scripts/haskell/dependencies.nix
new file mode 100644
index 00000000000..f0620902c0e
--- /dev/null
+++ b/maintainers/scripts/haskell/dependencies.nix
@@ -0,0 +1,10 @@
+# Nix script to calculate the Haskell dependencies of every haskellPackage. Used by ./hydra-report.hs.
+let
+  pkgs = import ../../.. {};
+  inherit (pkgs) lib;
+  getDeps = _: pkg: {
+    deps = builtins.filter (x: !isNull x) (map (x: x.pname or null) (pkg.propagatedBuildInputs or []));
+    broken = (pkg.meta.hydraPlatforms or [null]) == [];
+  };
+in
+  lib.mapAttrs getDeps pkgs.haskellPackages
diff --git a/maintainers/scripts/haskell/hydra-report.hs b/maintainers/scripts/haskell/hydra-report.hs
new file mode 100755
index 00000000000..360b9f2058d
--- /dev/null
+++ b/maintainers/scripts/haskell/hydra-report.hs
@@ -0,0 +1,490 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -p "haskellPackages.ghcWithPackages (p: [p.aeson p.req])"
+#! nix-shell -p hydra-unstable
+#! nix-shell -i runhaskell
+
+{-
+
+The purpose of this script is
+
+1) download the state of the nixpkgs/haskell-updates job from hydra (with get-report)
+2) print a summary of the state suitable for pasting into a github comment (with ping-maintainers)
+3) print a list of broken packages suitable for pasting into configuration-hackage2nix.yaml
+
+Because step 1) is quite expensive and takes roughly ~5 minutes the result is cached in a json file in XDG_CACHE.
+
+-}
+{-# LANGUAGE BlockArguments #-}
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE DerivingStrategies #-}
+{-# LANGUAGE DuplicateRecordFields #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE NamedFieldPuns #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE TupleSections #-}
+{-# OPTIONS_GHC -Wall #-}
+{-# LANGUAGE ViewPatterns #-}
+{-# LANGUAGE TupleSections #-}
+
+import Control.Monad (forM_, (<=<))
+import Control.Monad.Trans (MonadIO (liftIO))
+import Data.Aeson (
+   FromJSON,
+   ToJSON,
+   decodeFileStrict',
+   eitherDecodeStrict',
+   encodeFile,
+ )
+import Data.Foldable (Foldable (toList), foldl')
+import Data.List.NonEmpty (NonEmpty, nonEmpty)
+import qualified Data.List.NonEmpty as NonEmpty
+import Data.Map.Strict (Map)
+import qualified Data.Map.Strict as Map
+import Data.Maybe (fromMaybe, mapMaybe, isNothing)
+import Data.Monoid (Sum (Sum, getSum))
+import Data.Sequence (Seq)
+import qualified Data.Sequence as Seq
+import Data.Set (Set)
+import qualified Data.Set as Set
+import Data.Text (Text)
+import qualified Data.Text as Text
+import Data.Text.Encoding (encodeUtf8)
+import Data.Time (defaultTimeLocale, formatTime, getCurrentTime)
+import Data.Time.Clock (UTCTime)
+import GHC.Generics (Generic)
+import Network.HTTP.Req (
+   GET (GET),
+   NoReqBody (NoReqBody),
+   defaultHttpConfig,
+   header,
+   https,
+   jsonResponse,
+   req,
+   responseBody,
+   responseTimeout,
+   runReq,
+   (/:),
+ )
+import System.Directory (XdgDirectory (XdgCache), getXdgDirectory)
+import System.Environment (getArgs)
+import System.Process (readProcess)
+import Prelude hiding (id)
+import Data.List (sortOn)
+import Control.Concurrent.Async (concurrently)
+import Control.Exception (evaluate)
+import qualified Data.IntMap.Strict as IntMap
+import qualified Data.IntSet as IntSet
+import Data.Bifunctor (second)
+
+newtype JobsetEvals = JobsetEvals
+   { evals :: Seq Eval
+   }
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+newtype Nixpkgs = Nixpkgs {revision :: Text}
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+newtype JobsetEvalInputs = JobsetEvalInputs {nixpkgs :: Nixpkgs}
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+data Eval = Eval
+   { id :: Int
+   , jobsetevalinputs :: JobsetEvalInputs
+   }
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+data Build = Build
+   { job :: Text
+   , buildstatus :: Maybe Int
+   , finished :: Int
+   , id :: Int
+   , nixname :: Text
+   , system :: Text
+   , jobsetevals :: Seq Int
+   }
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+main :: IO ()
+main = do
+   args <- getArgs
+   case args of
+      ["get-report"] -> getBuildReports
+      ["ping-maintainers"] -> printMaintainerPing
+      ["mark-broken-list"] -> printMarkBrokenList
+      _ -> putStrLn "Usage: get-report | ping-maintainers | mark-broken-list"
+
+reportFileName :: IO FilePath
+reportFileName = getXdgDirectory XdgCache "haskell-updates-build-report.json"
+
+showT :: Show a => a -> Text
+showT = Text.pack . show
+
+getBuildReports :: IO ()
+getBuildReports = runReq defaultHttpConfig do
+   evalMay <- Seq.lookup 0 . evals <$> myReq (https "hydra.nixos.org" /: "jobset" /: "nixpkgs" /: "haskell-updates" /: "evals") mempty
+   eval@Eval{id} <- maybe (liftIO $ fail "No Evalution found") pure evalMay
+   liftIO . putStrLn $ "Fetching evaluation " <> show id <> " from Hydra. This might take a few minutes..."
+   buildReports :: Seq Build <- myReq (https "hydra.nixos.org" /: "eval" /: showT id /: "builds") (responseTimeout 600000000)
+   liftIO do
+      fileName <- reportFileName
+      putStrLn $ "Finished fetching all builds from Hydra, saving report as " <> fileName
+      now <- getCurrentTime
+      encodeFile fileName (eval, now, buildReports)
+  where
+   myReq query option = responseBody <$> req GET query NoReqBody jsonResponse (header "User-Agent" "hydra-report.hs/v1 (nixpkgs;maintainers/scripts/haskell)" <> option)
+
+hydraEvalCommand :: FilePath
+hydraEvalCommand = "hydra-eval-jobs"
+
+hydraEvalParams :: [String]
+hydraEvalParams = ["-I", ".", "pkgs/top-level/release-haskell.nix"]
+
+nixExprCommand :: FilePath
+nixExprCommand = "nix-instantiate"
+
+nixExprParams :: [String]
+nixExprParams = ["--eval", "--strict", "--json"]
+
+-- | This newtype is used to parse a Hydra job output from @hydra-eval-jobs@.
+-- The only field we are interested in is @maintainers@, which is why this
+-- is just a newtype.
+--
+-- Note that there are occasionally jobs that don't have a maintainers
+-- field, which is why this has to be @Maybe Text@.
+newtype Maintainers = Maintainers { maintainers :: Maybe Text }
+  deriving stock (Generic, Show)
+  deriving anyclass (FromJSON, ToJSON)
+
+-- | This is a 'Map' from Hydra job name to maintainer email addresses.
+--
+-- It has values similar to the following:
+--
+-- @@
+--  fromList
+--    [ ("arion.aarch64-linux", Maintainers (Just "robert@example.com"))
+--    , ("bench.x86_64-linux", Maintainers (Just ""))
+--    , ("conduit.x86_64-linux", Maintainers (Just "snoy@man.com, web@ber.com"))
+--    , ("lens.x86_64-darwin", Maintainers (Just "ek@category.com"))
+--    ]
+-- @@
+--
+-- Note that Hydra jobs without maintainers will have an empty string for the
+-- maintainer list.
+type HydraJobs = Map Text Maintainers
+
+-- | Map of email addresses to GitHub handles.
+-- This is built from the file @../../maintainer-list.nix@.
+--
+-- It has values similar to the following:
+--
+-- @@
+--  fromList
+--    [ ("robert@example.com", "rob22")
+--    , ("ek@category.com", "edkm")
+--    ]
+-- @@
+type EmailToGitHubHandles = Map Text Text
+
+-- | Map of Hydra jobs to maintainer GitHub handles.
+--
+-- It has values similar to the following:
+--
+-- @@
+--  fromList
+--    [ ("arion.aarch64-linux", ["rob22"])
+--    , ("conduit.x86_64-darwin", ["snoyb", "webber"])
+--    ]
+-- @@
+type MaintainerMap = Map Text (NonEmpty Text)
+
+-- | Information about a package which lists its dependencies and whether the
+-- package is marked broken.
+data DepInfo = DepInfo {
+   deps :: Set Text,
+   broken :: Bool
+}
+   deriving stock (Generic, Show)
+   deriving anyclass (FromJSON, ToJSON)
+
+-- | Map from package names to their DepInfo. This is the data we get out of a
+-- nix call.
+type DependencyMap = Map Text DepInfo
+
+-- | Map from package names to its broken state, number of reverse dependencies (fst) and
+-- unbroken reverse dependencies (snd).
+type ReverseDependencyMap = Map Text (Int, Int)
+
+-- | Calculate the (unbroken) reverse dependencies of a package by transitively
+-- going through all packages if it’s a dependency of them.
+calculateReverseDependencies :: DependencyMap -> ReverseDependencyMap
+calculateReverseDependencies depMap = Map.fromDistinctAscList $ zip keys (zip (rdepMap False) (rdepMap True))
+ where
+    -- This code tries to efficiently invert the dependency map and calculate
+    -- it’s transitive closure by internally identifying every pkg with it’s index
+    -- in the package list and then using memoization.
+    keys = Map.keys depMap
+    pkgToIndexMap = Map.fromDistinctAscList (zip keys [0..])
+    intDeps = zip [0..] $ (\DepInfo{broken,deps} -> (broken,mapMaybe (`Map.lookup` pkgToIndexMap) $ Set.toList deps)) <$> Map.elems depMap
+    rdepMap onlyUnbroken = IntSet.size <$> resultList
+     where
+       resultList = go <$> [0..]
+       oneStepMap = IntMap.fromListWith IntSet.union $ (\(key,(_,deps)) -> (,IntSet.singleton key) <$> deps) <=< filter (\(_, (broken,_)) -> not (broken && onlyUnbroken)) $ intDeps
+       go pkg = IntSet.unions (oneStep:((resultList !!) <$> IntSet.toList oneStep))
+        where oneStep = IntMap.findWithDefault mempty pkg oneStepMap
+
+-- | Generate a mapping of Hydra job names to maintainer GitHub handles. Calls
+-- hydra-eval-jobs and the nix script ./maintainer-handles.nix.
+getMaintainerMap :: IO MaintainerMap
+getMaintainerMap = do
+   hydraJobs :: HydraJobs <-
+      readJSONProcess hydraEvalCommand hydraEvalParams "Failed to decode hydra-eval-jobs output: "
+   handlesMap :: EmailToGitHubHandles <-
+      readJSONProcess nixExprCommand ("maintainers/scripts/haskell/maintainer-handles.nix":nixExprParams) "Failed to decode nix output for lookup of github handles: "
+   pure $ Map.mapMaybe (splitMaintainersToGitHubHandles handlesMap) hydraJobs
+   where
+   -- Split a comma-spearated string of Maintainers into a NonEmpty list of
+   -- GitHub handles.
+   splitMaintainersToGitHubHandles
+      :: EmailToGitHubHandles -> Maintainers -> Maybe (NonEmpty Text)
+   splitMaintainersToGitHubHandles handlesMap (Maintainers maint) =
+      nonEmpty .  mapMaybe (`Map.lookup` handlesMap) .  Text.splitOn ", " $ fromMaybe "" maint
+
+-- | Get the a map of all dependencies of every package by calling the nix
+-- script ./dependencies.nix.
+getDependencyMap :: IO DependencyMap
+getDependencyMap =
+   readJSONProcess nixExprCommand ("maintainers/scripts/haskell/dependencies.nix":nixExprParams) "Failed to decode nix output for lookup of dependencies: "
+
+-- | Run a process that produces JSON on stdout and and decode the JSON to a
+-- data type.
+--
+-- If the JSON-decoding fails, throw the JSON-decoding error.
+readJSONProcess
+   :: FromJSON a
+   => FilePath -- ^ Filename of executable.
+   -> [String] -- ^ Arguments
+   -> String -- ^ String to prefix to JSON-decode error.
+   -> IO a
+readJSONProcess exe args err = do
+   output <- readProcess exe args ""
+   let eitherDecodedOutput = eitherDecodeStrict' . encodeUtf8 . Text.pack $ output
+   case eitherDecodedOutput of
+     Left decodeErr -> error $ err <> decodeErr <> "\nRaw: '" <> take 1000 output <> "'"
+     Right decodedOutput -> pure decodedOutput
+
+-- BuildStates are sorted by subjective importance/concerningness
+data BuildState
+  = Failed
+  | DependencyFailed
+  | OutputLimitExceeded
+  | Unknown (Maybe Int)
+  | TimedOut
+  | Canceled
+  | HydraFailure
+  | Unfinished
+  | Success
+  deriving stock (Show, Eq, Ord)
+
+icon :: BuildState -> Text
+icon = \case
+   Failed -> ":x:"
+   DependencyFailed -> ":heavy_exclamation_mark:"
+   OutputLimitExceeded -> ":warning:"
+   Unknown x -> "unknown code " <> showT x
+   TimedOut -> ":hourglass::no_entry_sign:"
+   Canceled -> ":no_entry_sign:"
+   Unfinished -> ":hourglass_flowing_sand:"
+   HydraFailure -> ":construction:"
+   Success -> ":heavy_check_mark:"
+
+platformIcon :: Platform -> Text
+platformIcon (Platform x) = case x of
+   "x86_64-linux" -> ":penguin:"
+   "aarch64-linux" -> ":iphone:"
+   "x86_64-darwin" -> ":apple:"
+   _ -> x
+
+data BuildResult = BuildResult {state :: BuildState, id :: Int} deriving (Show, Eq, Ord)
+newtype Platform = Platform {platform :: Text} deriving (Show, Eq, Ord)
+newtype Table row col a = Table (Map (row, col) a)
+data SummaryEntry = SummaryEntry {
+   summaryBuilds :: Table Text Platform BuildResult,
+   summaryMaintainers :: Set Text,
+   summaryReverseDeps :: Int,
+   summaryUnbrokenReverseDeps :: Int
+}
+type StatusSummary = Map Text SummaryEntry
+
+instance (Ord row, Ord col, Semigroup a) => Semigroup (Table row col a) where
+   Table l <> Table r = Table (Map.unionWith (<>) l r)
+instance (Ord row, Ord col, Semigroup a) => Monoid (Table row col a) where
+   mempty = Table Map.empty
+instance Functor (Table row col) where
+   fmap f (Table a) = Table (fmap f a)
+instance Foldable (Table row col) where
+   foldMap f (Table a) = foldMap f a
+
+buildSummary :: MaintainerMap -> ReverseDependencyMap -> Seq Build -> StatusSummary
+buildSummary maintainerMap reverseDependencyMap = foldl (Map.unionWith unionSummary) Map.empty . fmap toSummary
+  where
+   unionSummary (SummaryEntry (Table lb) lm lr lu) (SummaryEntry (Table rb) rm rr ru) = SummaryEntry (Table $ Map.union lb rb) (lm <> rm) (max lr rr) (max lu ru)
+   toSummary Build{finished, buildstatus, job, id, system} = Map.singleton name (SummaryEntry (Table (Map.singleton (set, Platform system) (BuildResult state id))) maintainers reverseDeps unbrokenReverseDeps)
+     where
+      state :: BuildState
+      state = case (finished, buildstatus) of
+         (0, _) -> Unfinished
+         (_, Just 0) -> Success
+         (_, Just 1) -> Failed
+         (_, Just 2) -> DependencyFailed
+         (_, Just 3) -> HydraFailure
+         (_, Just 4) -> Canceled
+         (_, Just 7) -> TimedOut
+         (_, Just 11) -> OutputLimitExceeded
+         (_, i) -> Unknown i
+      packageName = fromMaybe job (Text.stripSuffix ("." <> system) job)
+      splitted = nonEmpty $ Text.splitOn "." packageName
+      name = maybe packageName NonEmpty.last splitted
+      set = maybe "" (Text.intercalate "." . NonEmpty.init) splitted
+      maintainers = maybe mempty (Set.fromList . toList) (Map.lookup job maintainerMap)
+      (reverseDeps, unbrokenReverseDeps) = Map.findWithDefault (0,0) name reverseDependencyMap
+
+readBuildReports :: IO (Eval, UTCTime, Seq Build)
+readBuildReports = do
+   file <- reportFileName
+   fromMaybe (error $ "Could not decode " <> file) <$> decodeFileStrict' file
+
+sep :: Text
+sep = " | "
+joinTable :: [Text] -> Text
+joinTable t = sep <> Text.intercalate sep t <> sep
+
+type NumSummary = Table Platform BuildState Int
+
+printTable :: (Ord rows, Ord cols) => Text -> (rows -> Text) -> (cols -> Text) -> (entries -> Text) -> Table rows cols entries -> [Text]
+printTable name showR showC showE (Table mapping) = joinTable <$> (name : map showC cols) : replicate (length cols + sepsInName + 1) "---" : map printRow rows
+  where
+   sepsInName = Text.count "|" name
+   printRow row = showR row : map (\col -> maybe "" showE (Map.lookup (row, col) mapping)) cols
+   rows = toList $ Set.fromList (fst <$> Map.keys mapping)
+   cols = toList $ Set.fromList (snd <$> Map.keys mapping)
+
+printJob :: Int -> Text -> (Table Text Platform BuildResult, Text) -> [Text]
+printJob evalId name (Table mapping, maintainers) =
+   if length sets <= 1
+      then map printSingleRow sets
+      else ["- [ ] " <> makeJobSearchLink "" name <> " " <> maintainers] <> map printRow sets
+  where
+   printRow set = "  - " <> printState set <> " " <> makeJobSearchLink set (if Text.null set then "toplevel" else set)
+   printSingleRow set = "- [ ] " <> printState set <> " " <> makeJobSearchLink set (makePkgName set) <> " " <> maintainers
+   makePkgName set = (if Text.null set then "" else set <> ".") <> name
+   printState set = Text.intercalate " " $ map (\pf -> maybe "" (label pf) $ Map.lookup (set, pf) mapping) platforms
+   makeJobSearchLink set linkLabel= makeSearchLink evalId linkLabel (makePkgName set)
+   sets = toList $ Set.fromList (fst <$> Map.keys mapping)
+   platforms = toList $ Set.fromList (snd <$> Map.keys mapping)
+   label pf (BuildResult s i) = "[[" <> platformIcon pf <> icon s <> "]](https://hydra.nixos.org/build/" <> showT i <> ")"
+
+makeSearchLink :: Int -> Text -> Text -> Text
+makeSearchLink evalId linkLabel query = "[" <> linkLabel <> "](" <> "https://hydra.nixos.org/eval/" <> showT evalId <> "?filter=" <> query <> ")"
+
+statusToNumSummary :: StatusSummary -> NumSummary
+statusToNumSummary = fmap getSum . foldMap (fmap Sum . jobTotals)
+
+jobTotals :: SummaryEntry -> Table Platform BuildState Int
+jobTotals (summaryBuilds -> Table mapping) = getSum <$> Table (Map.foldMapWithKey (\(_, platform) (BuildResult buildstate _) -> Map.singleton (platform, buildstate) (Sum 1)) mapping)
+
+details :: Text -> [Text] -> [Text]
+details summary content = ["<details><summary>" <> summary <> " </summary>", ""] <> content <> ["</details>", ""]
+
+printBuildSummary :: Eval -> UTCTime -> StatusSummary -> [(Text, Int)] -> Text
+printBuildSummary
+   Eval{id, jobsetevalinputs = JobsetEvalInputs{nixpkgs = Nixpkgs{revision}}}
+   fetchTime
+   summary
+   topBrokenRdeps =
+      Text.unlines $
+         headline <> [""] <> tldr <> (("  * "<>) <$> (errors <> warnings)) <> [""]
+            <> totals
+            <> optionalList "#### Maintained packages with build failure" (maintainedList fails)
+            <> optionalList "#### Maintained packages with failed dependency" (maintainedList failedDeps)
+            <> optionalList "#### Maintained packages with unknown error" (maintainedList unknownErr)
+            <> optionalHideableList "#### Unmaintained packages with build failure" (unmaintainedList fails)
+            <> optionalHideableList "#### Unmaintained packages with failed dependency" (unmaintainedList failedDeps)
+            <> optionalHideableList "#### Unmaintained packages with unknown error" (unmaintainedList unknownErr)
+            <> optionalHideableList "#### Top 50 broken packages, sorted by number of reverse dependencies" (brokenLine <$> topBrokenRdeps)
+            <> ["","*:arrow_heading_up:: The number of packages that depend (directly or indirectly) on this package (if any). If two numbers are shown the first (lower) number considers only packages which currently have enabled hydra jobs, i.e. are not marked broken. The second (higher) number considers all packages.*",""]
+            <> footer
+     where
+      footer = ["*Report generated with [maintainers/scripts/haskell/hydra-report.hs](https://github.com/NixOS/nixpkgs/blob/haskell-updates/maintainers/scripts/haskell/hydra-report.sh)*"]
+      totals =
+         [ "#### Build summary"
+         , ""
+         ]
+            <> printTable "Platform" (\x -> makeSearchLink id (platform x <> " " <> platformIcon x) ("." <> platform x)) (\x -> showT x <> " " <> icon x) showT numSummary
+      headline =
+         [ "### [haskell-updates build report from hydra](https://hydra.nixos.org/jobset/nixpkgs/haskell-updates)"
+         , "*evaluation ["
+            <> showT id
+            <> "](https://hydra.nixos.org/eval/"
+            <> showT id
+            <> ") of nixpkgs commit ["
+            <> Text.take 7 revision
+            <> "](https://github.com/NixOS/nixpkgs/commits/"
+            <> revision
+            <> ") as of "
+            <> Text.pack (formatTime defaultTimeLocale "%Y-%m-%d %H:%M UTC" fetchTime)
+            <> "*"
+         ]
+      brokenLine (name, rdeps) = "[" <> name <> "](https://packdeps.haskellers.com/reverse/" <> name <> ") :arrow_heading_up: " <> Text.pack (show rdeps) <> "  "
+      numSummary = statusToNumSummary summary
+      jobsByState predicate = Map.filter (predicate . worstState) summary
+      worstState = foldl' min Success . fmap state . summaryBuilds
+      fails = jobsByState (== Failed)
+      failedDeps = jobsByState (== DependencyFailed)
+      unknownErr = jobsByState (\x -> x > DependencyFailed && x < TimedOut)
+      withMaintainer = Map.mapMaybe (\e -> (summaryBuilds e,) <$> nonEmpty (Set.toList (summaryMaintainers e)))
+      withoutMaintainer = Map.mapMaybe (\e -> if Set.null (summaryMaintainers e) then Just e else Nothing)
+      optionalList heading list = if null list then mempty else [heading] <> list
+      optionalHideableList heading list = if null list then mempty else [heading] <> details (showT (length list) <> " job(s)") list
+      maintainedList = showMaintainedBuild <=< Map.toList . withMaintainer
+      unmaintainedList = showBuild <=< sortOn (\(snd -> x) -> (negate (summaryUnbrokenReverseDeps x), negate (summaryReverseDeps x))) . Map.toList . withoutMaintainer
+      showBuild (name, entry) = printJob id name (summaryBuilds entry, Text.pack (if summaryReverseDeps entry > 0 then " :arrow_heading_up: " <> show (summaryUnbrokenReverseDeps entry) <>" | "<> show (summaryReverseDeps entry) else ""))
+      showMaintainedBuild (name, (table, maintainers)) = printJob id name (table, Text.intercalate " " (fmap ("@" <>) (toList maintainers)))
+      tldr = case (errors, warnings) of
+               ([],[]) -> [":green_circle: **Ready to merge**"]
+               ([],_) -> [":yellow_circle: **Potential issues**"]
+               _ -> [":red_circle: **Branch not mergeable**"]
+      warnings =
+         if' (Unfinished > maybe Success worstState maintainedJob) "`maintained` jobset failed." <>
+         if' (Unfinished == maybe Success worstState mergeableJob) "`mergeable` jobset is not finished." <>
+         if' (Unfinished == maybe Success worstState maintainedJob) "`maintained` jobset is not finished."
+      errors =
+         if' (isNothing mergeableJob) "No `mergeable` job found." <>
+         if' (isNothing maintainedJob) "No `maintained` job found." <>
+         if' (Unfinished > maybe Success worstState mergeableJob) "`mergeable` jobset failed." <>
+         if' (outstandingJobs (Platform "x86_64-linux") > 100) "Too many outstanding jobs on x86_64-linux." <>
+         if' (outstandingJobs (Platform "aarch64-linux") > 100) "Too many outstanding jobs on aarch64-linux."
+      if' p e = if p then [e] else mempty
+      outstandingJobs platform | Table m <- numSummary = Map.findWithDefault 0 (platform, Unfinished) m
+      maintainedJob = Map.lookup "maintained" summary
+      mergeableJob = Map.lookup "mergeable" summary
+
+printMaintainerPing :: IO ()
+printMaintainerPing = do
+   (maintainerMap, (reverseDependencyMap, topBrokenRdeps)) <- concurrently getMaintainerMap do
+      depMap <- getDependencyMap
+      rdepMap <- evaluate . calculateReverseDependencies $ depMap
+      let tops = take 50 . sortOn (negate . snd) . fmap (second fst) . filter (\x -> maybe False broken $ Map.lookup (fst x) depMap) . Map.toList $ rdepMap
+      pure (rdepMap, tops)
+   (eval, fetchTime, buildReport) <- readBuildReports
+   putStrLn (Text.unpack (printBuildSummary eval fetchTime (buildSummary maintainerMap reverseDependencyMap buildReport) topBrokenRdeps))
+
+printMarkBrokenList :: IO ()
+printMarkBrokenList = do
+   (_, _, buildReport) <- readBuildReports
+   forM_ buildReport \Build{buildstatus, job} ->
+      case (buildstatus, Text.splitOn "." job) of
+         (Just 1, ["haskellPackages", name, "x86_64-linux"]) -> putStrLn $ "  - " <> Text.unpack name
+         _ -> pure ()
diff --git a/maintainers/scripts/haskell/maintainer-handles.nix b/maintainers/scripts/haskell/maintainer-handles.nix
new file mode 100644
index 00000000000..08c6bc4c96a
--- /dev/null
+++ b/maintainers/scripts/haskell/maintainer-handles.nix
@@ -0,0 +1,7 @@
+# Nix script to lookup maintainer github handles from their email address. Used by ./hydra-report.hs.
+let
+  pkgs = import ../../.. {};
+  maintainers = import ../../maintainer-list.nix;
+  inherit (pkgs) lib;
+  mkMailGithubPair = _: maintainer: if maintainer ? github then { "${maintainer.email}" = maintainer.github; } else {};
+in lib.zipAttrsWith (_: builtins.head) (lib.mapAttrsToList mkMailGithubPair maintainers)
diff --git a/maintainers/scripts/haskell/mark-broken.sh b/maintainers/scripts/haskell/mark-broken.sh
new file mode 100755
index 00000000000..97dd5be8aaa
--- /dev/null
+++ b/maintainers/scripts/haskell/mark-broken.sh
@@ -0,0 +1,47 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils git -I nixpkgs=.
+
+# This script uses the data pulled with
+# maintainers/scripts/haskell/hydra-report.hs get-report to produce a list of
+# failing builds that get written to the hackage2nix config. Then
+# hackage-packages.nix gets regenerated and transitive-broken packages get
+# marked as dont-distribute in the config as well.
+# This should disable builds for most failing jobs in the haskell-updates jobset.
+
+set -euo pipefail
+
+broken_config="pkgs/development/haskell-modules/configuration-hackage2nix/broken.yaml"
+
+tmpfile=$(mktemp)
+trap "rm ${tmpfile}" 0
+
+echo "Remember that you need to manually run 'maintainers/scripts/haskell/hydra-report.hs get-report' sometime before running this script."
+echo "Generating a list of broken builds and displaying for manual confirmation ..."
+maintainers/scripts/haskell/hydra-report.hs mark-broken-list | sort -i > "$tmpfile"
+
+$EDITOR "$tmpfile"
+
+tail -n +3 "$broken_config" >> "$tmpfile"
+
+cat > "$broken_config" << EOF
+broken-packages:
+  # These packages don't compile.
+EOF
+
+# clear environment here to avoid things like allowing broken builds in
+sort -iu "$tmpfile" >> "$broken_config"
+clear="env -u HOME -u NIXPKGS_CONFIG"
+$clear maintainers/scripts/haskell/regenerate-hackage-packages.sh
+$clear maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
+$clear maintainers/scripts/haskell/regenerate-hackage-packages.sh
+
+if [[ "${1:-}" == "--do-commit" ]]; then
+git add $broken_config
+git add pkgs/development/haskell-modules/configuration-hackage2nix/transitive-broken.yaml
+git add pkgs/development/haskell-modules/hackage-packages.nix
+git commit -F - << EOF
+haskellPackages: mark builds failing on hydra as broken
+
+This commit has been generated by maintainers/scripts/haskell/mark-broken.sh
+EOF
+fi
diff --git a/maintainers/scripts/haskell/merge-and-open-pr.sh b/maintainers/scripts/haskell/merge-and-open-pr.sh
new file mode 100755
index 00000000000..18db1da0f2a
--- /dev/null
+++ b/maintainers/scripts/haskell/merge-and-open-pr.sh
@@ -0,0 +1,122 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p git gh -I nixpkgs=.
+#
+# Script to merge the currently open haskell-updates PR into master, bump the
+# Stackage version and Hackage versions, and open the next haskell-updates PR.
+
+set -eu -o pipefail
+
+# exit after printing first argument to this function
+function die {
+  # echo the first argument
+  echo "ERROR: $1"
+  echo "Aborting!"
+
+  exit 1
+}
+
+function help {
+  echo "Usage: $0 HASKELL_UPDATES_PR_NUM"
+  echo "Merge the currently open haskell-updates PR into master, and open the next one."
+  echo
+  echo "  -h, --help                print this help"
+  echo "  HASKELL_UPDATES_PR_NUM    number of the currently open PR on NixOS/nixpkgs"
+  echo "                            for the haskell-updates branch"
+  echo
+  echo "Example:"
+  echo "  \$ $0 137340"
+
+  exit 1
+}
+
+# Read in the current haskell-updates PR number from the command line.
+while [[ $# -gt 0 ]]; do
+  key="$1"
+
+  case $key in
+    -h|--help)
+      help
+      ;;
+    *)
+      curr_haskell_updates_pr_num="$1"
+      shift
+      ;;
+  esac
+done
+
+if [[ -z "${curr_haskell_updates_pr_num-}" ]] ; then
+  die "You must pass the current haskell-updates PR number as the first argument to this script."
+fi
+
+# Make sure you have gh authentication setup.
+if ! gh auth status 2>/dev/null ; then
+  die "You must setup the \`gh\` command.  Run \`gh auth login\`."
+fi
+
+# Fetch nixpkgs to get an up-to-date origin/haskell-updates branch.
+echo "Fetching origin..."
+git fetch origin >/dev/null
+
+# Make sure we are currently on a local haskell-updates branch.
+curr_branch="$(git rev-parse --abbrev-ref HEAD)"
+if [[ "$curr_branch" != "haskell-updates" ]]; then
+    die "Current branch is not called \"haskell-updates\"."
+fi
+
+# Make sure our local haskell-updates branch is on the same commit as
+# origin/haskell-updates.
+curr_branch_commit="$(git rev-parse haskell-updates)"
+origin_haskell_updates_commit="$(git rev-parse origin/haskell-updates)"
+if [[ "$curr_branch_commit" != "$origin_haskell_updates_commit" ]]; then
+    die "Current branch is not at the same commit as origin/haskell-updates"
+fi
+
+# Merge the current open haskell-updates PR.
+echo "Merging https://github.com/NixOS/nixpkgs/pull/${curr_haskell_updates_pr_num}..."
+gh pr merge --repo NixOS/nixpkgs --merge "$curr_haskell_updates_pr_num"
+
+# Update the list of Haskell package versions in NixOS on Hackage.
+echo "Updating list of Haskell package versions in NixOS on Hackage..."
+./maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh
+
+# Update stackage, Hackage hashes, and regenerate Haskell package set
+echo "Updating Stackage..."
+./maintainers/scripts/haskell/update-stackage.sh --do-commit
+echo "Updating Hackage hashes..."
+./maintainers/scripts/haskell/update-hackage.sh --do-commit
+echo "Regenerating Hackage packages..."
+./maintainers/scripts/haskell/regenerate-hackage-packages.sh --do-commit
+
+# Push these new commits to the haskell-updates branch
+echo "Pushing commits just created to the remote haskell-updates branch..."
+git push
+
+# Open new PR
+new_pr_body=$(cat <<EOF
+### This Merge
+
+This PR is the regular merge of the \`haskell-updates\` branch into \`master\`.
+
+This branch is being continually built and tested by hydra at https://hydra.nixos.org/jobset/nixpkgs/haskell-updates. You may be able to find an up-to-date Hydra build report at [cdepillabout/nix-haskell-updates-status](https://github.com/cdepillabout/nix-haskell-updates-status).
+
+We roughly aim to merge these \`haskell-updates\` PRs at least once every two weeks. See the @NixOS/haskell [team calendar](https://cloud.maralorn.de/apps/calendar/p/Mw5WLnzsP7fC4Zky) for who is currently in charge of this branch.
+
+### haskellPackages Workflow Summary
+
+Our workflow is currently described in [\`pkgs/development/haskell-modules/HACKING.md\`](https://github.com/NixOS/nixpkgs/blob/haskell-updates/pkgs/development/haskell-modules/HACKING.md).
+
+The short version is this:
+* We regularly update the Stackage and Hackage pins on \`haskell-updates\` (normally at the beginning of a merge window).
+* The community fixes builds of Haskell packages on that branch.
+* We aim at at least one merge of \`haskell-updates\` into \`master\` every two weeks.
+* We only do the merge if the [\`mergeable\`](https://hydra.nixos.org/job/nixpkgs/haskell-updates/mergeable) job is succeeding on hydra.
+* If a [\`maintained\`](https://hydra.nixos.org/job/nixpkgs/haskell-updates/maintained) package is still broken at the time of merge, we will only merge if the maintainer has been pinged 7 days in advance. (If you care about a Haskell package, become a maintainer!)
+
+---
+
+This is the follow-up to #${curr_haskell_updates_pr_num}. Come to [#haskell:nixos.org](https://matrix.to/#/#haskell:nixos.org) if you have any questions.
+EOF
+)
+
+echo "Opening a PR for the next haskell-updates merge cycle..."
+gh pr create --repo NixOS/nixpkgs --base master --head haskell-updates --title "haskellPackages: update stackage and hackage" --body "$new_pr_body"
diff --git a/maintainers/scripts/haskell/regenerate-hackage-packages.sh b/maintainers/scripts/haskell/regenerate-hackage-packages.sh
new file mode 100755
index 00000000000..9d51eb4ca4a
--- /dev/null
+++ b/maintainers/scripts/haskell/regenerate-hackage-packages.sh
@@ -0,0 +1,46 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils haskellPackages.cabal2nix-unstable git nix -I nixpkgs=.
+
+# This script is used to regenerate nixpkgs' Haskell package set, using the
+# tool hackage2nix from the nixos/cabal2nix repo. hackage2nix looks at the
+# config files in pkgs/development/haskell-modules/configuration-hackage2nix
+# and generates a Nix expression for package version specified there, using the
+# Cabal files from the Hackage database (available under all-cabal-hashes) and
+# its companion tool cabal2nix.
+#
+# Related scripts are update-hackage.sh, for updating the snapshot of the
+# Hackage database used by hackage2nix, and update-cabal2nix-unstable.sh,
+# for updating the version of hackage2nix used to perform this task.
+
+set -euo pipefail
+
+HACKAGE2NIX="${HACKAGE2NIX:-hackage2nix}"
+
+# To prevent hackage2nix fails because of encoding.
+# See: https://github.com/NixOS/nixpkgs/pull/122023
+export LC_ALL=C.UTF-8
+
+extraction_derivation='with import ./. {}; runCommandLocal "unpacked-cabal-hashes" { } "tar xf ${all-cabal-hashes} --strip-components=1 --one-top-level=$out"'
+unpacked_hackage="$(nix-build -E "$extraction_derivation" --no-out-link)"
+config_dir=pkgs/development/haskell-modules/configuration-hackage2nix
+
+echo "Starting hackage2nix to regenerate pkgs/development/haskell-modules/hackage-packages.nix ..."
+"$HACKAGE2NIX" \
+   --hackage "$unpacked_hackage" \
+   --preferred-versions <(for n in "$unpacked_hackage"/*/preferred-versions; do cat "$n"; echo; done) \
+   --nixpkgs "$PWD" \
+   --config "$config_dir/main.yaml" \
+   --config "$config_dir/stackage.yaml" \
+   --config "$config_dir/broken.yaml" \
+   --config "$config_dir/transitive-broken.yaml"
+
+if [[ "${1:-}" == "--do-commit" ]]; then
+git add pkgs/development/haskell-modules/hackage-packages.nix
+git commit -F - << EOF
+haskellPackages: regenerate package set based on current config
+
+This commit has been generated by maintainers/scripts/haskell/regenerate-hackage-packages.sh
+EOF
+fi
+
+echo "Regeneration of hackage-packages.nix finished."
diff --git a/maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh b/maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
new file mode 100755
index 00000000000..94104e00edb
--- /dev/null
+++ b/maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
@@ -0,0 +1,15 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils nix gnused -I nixpkgs=.
+
+config_file=pkgs/development/haskell-modules/configuration-hackage2nix/transitive-broken.yaml
+
+cat > $config_file << EOF
+# This file is automatically generated by
+# maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
+# It is supposed to list all haskellPackages that cannot evaluate because they
+# depend on a dependency marked as broken.
+dont-distribute-packages:
+EOF
+
+echo "Regenerating list of transitive broken packages ..."
+echo -e $(nix-instantiate --eval --strict maintainers/scripts/haskell/transitive-broken-packages.nix) | sed 's/\"//' | LC_ALL=C.UTF-8 sort -i >> $config_file
diff --git a/maintainers/scripts/haskell/test-configurations.nix b/maintainers/scripts/haskell/test-configurations.nix
new file mode 100644
index 00000000000..12287896b50
--- /dev/null
+++ b/maintainers/scripts/haskell/test-configurations.nix
@@ -0,0 +1,136 @@
+/* Nix expression to test for regressions in the Haskell configuration overlays.
+
+   test-configurations.nix determines all attributes touched by given Haskell
+   configuration overlays (i. e. pkgs/development/haskell-modules/configuration-*.nix)
+   and builds all derivations (or at least a reasonable subset) affected by
+   these overrides.
+
+   By default, it checks `configuration-{common,nix,ghc-8.10.x}.nix`. You can
+   invoke it like this:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix --keep-going
+
+   It is possible to specify other configurations:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --arg files '[ "configuration-ghc-9.0.x.nix" "configuration-ghc-9.2.x.nix" ]' \
+       --keep-going
+
+   You can also just supply a single string:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --argstr files "configuration-arm.nix" --keep-going
+
+   You can even supply full paths which is handy, as it allows for tab-completing
+   the configurations:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --argstr files pkgs/development/haskell-modules/configuration-arm.nix \
+       --keep-going
+
+   By default, derivation that fail to evaluate are skipped, unless they are
+   “just” marked as broken. You can check for other eval errors like this:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --arg skipEvalErrors false --keep-going
+
+   You can also disable checking broken packages by passing a nixpkgs config:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --arg config '{ allowBroken = false; }' --keep-going
+
+   By default the haskell.packages.ghc*Binary sets used for bootstrapping GHC
+   are _not_ tested. You can change this using:
+
+     nix-build maintainers/scripts/haskell/test-configurations.nix \
+       --arg skipBinaryGHCs false --keep-going
+
+*/
+{ files ? [
+    "configuration-common.nix"
+    "configuration-nix.nix"
+    "configuration-ghc-8.10.x.nix"
+  ]
+, nixpkgsPath ? ../../..
+, config ? { allowBroken = true; }
+, skipEvalErrors ? true
+, skipBinaryGHCs ? true
+}:
+
+let
+  pkgs = import nixpkgsPath { inherit config; };
+  inherit (pkgs) lib;
+
+  # see usage explanation for the input format `files` allows
+  files' = builtins.map builtins.baseNameOf (
+    if !builtins.isList files then [ files ] else files
+  );
+
+  setsForFile = fileName:
+    let
+      # extract the unique part of the config's file name
+      configName = builtins.head (
+        builtins.match "configuration-(.+).nix" fileName
+      );
+      # match the major and minor version of the GHC the config is intended for, if any
+      configVersion = lib.concatStrings (
+        builtins.match "ghc-([0-9]+).([0-9]+).x" configName
+      );
+      # return all package sets under haskell.packages matching the version components
+      setsForVersion =  builtins.map (name: pkgs.haskell.packages.${name}) (
+        builtins.filter (setName:
+          lib.hasPrefix "ghc${configVersion}" setName
+          && (skipBinaryGHCs -> !(lib.hasInfix "Binary" setName))
+        ) (
+          builtins.attrNames pkgs.haskell.packages
+        )
+      );
+
+      defaultSets = [ pkgs.haskellPackages ];
+    in {
+      # use plain haskellPackages for the version-agnostic files
+      # TODO(@sternenseemann): also consider currently selected versioned sets
+      "common" = defaultSets;
+      "nix" = defaultSets;
+      "arm" = defaultSets;
+      "darwin" = defaultSets;
+    }.${configName} or setsForVersion;
+
+  # attribute set that has all the attributes of haskellPackages set to null
+  availableHaskellPackages = builtins.listToAttrs (
+    builtins.map (attr: lib.nameValuePair attr null) (
+      builtins.attrNames pkgs.haskellPackages
+    )
+  );
+
+  # evaluate a configuration and only return the attributes changed by it,
+  # pass availableHaskellPackages as super in case intersectAttrs is used
+  overriddenAttrs = fileName: builtins.attrNames (
+    lib.fix (self:
+      import (nixpkgsPath + "/pkgs/development/haskell-modules/${fileName}") {
+        haskellLib = pkgs.haskell.lib.compose;
+        inherit pkgs;
+      } self availableHaskellPackages
+    )
+  );
+
+  # list of derivations that are affected by overrides in the given configuration
+  # overlays. For common, nix, darwin etc. only the derivation from the default
+  # package set will be emitted.
+  packages = builtins.filter (v:
+    lib.warnIf (v.meta.broken or false) "${v.pname} is marked as broken" (
+      v != null
+      && (skipEvalErrors -> (builtins.tryEval (v.outPath or v)).success)
+    )
+  ) (
+    lib.concatMap (fileName:
+      let
+        sets = setsForFile fileName;
+        attrs = overriddenAttrs fileName;
+      in
+        lib.concatMap (set: builtins.map (attr: set.${attr}) attrs) sets
+    ) files'
+  );
+in
+
+packages
diff --git a/maintainers/scripts/haskell/transitive-broken-packages.nix b/maintainers/scripts/haskell/transitive-broken-packages.nix
new file mode 100644
index 00000000000..d4ddaa95765
--- /dev/null
+++ b/maintainers/scripts/haskell/transitive-broken-packages.nix
@@ -0,0 +1,16 @@
+let
+  nixpkgs = import ../../..;
+  inherit (nixpkgs {}) pkgs lib;
+  getEvaluating = x:
+    builtins.attrNames (
+      lib.filterAttrs (
+        _: v: (builtins.tryEval (v.outPath or null)).success && lib.isDerivation v && !v.meta.broken
+      ) x
+    );
+  brokenDeps = lib.subtractLists
+    (getEvaluating pkgs.haskellPackages)
+    (getEvaluating (nixpkgs { config.allowBroken = true; }).haskellPackages);
+in
+''
+  ${lib.concatMapStringsSep "\n" (x: "  - ${x}") brokenDeps}
+''
diff --git a/maintainers/scripts/haskell/update-cabal2nix-unstable.sh b/maintainers/scripts/haskell/update-cabal2nix-unstable.sh
new file mode 100755
index 00000000000..41583704560
--- /dev/null
+++ b/maintainers/scripts/haskell/update-cabal2nix-unstable.sh
@@ -0,0 +1,17 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils curl jq gnused haskellPackages.cabal2nix-unstable -I nixpkgs=.
+
+# Updates cabal2nix-unstable to the latest master of the nixos/cabal2nix repository.
+# See regenerate-hackage-packages.sh for details on the purpose of this script.
+
+set -euo pipefail
+
+# fetch current master HEAD from Github
+head_info="$(curl -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/NixOS/cabal2nix/branches/master)"
+# extract commit hash
+commit="$(jq -r .commit.sha <<< "$head_info")"
+# extract commit timestamp and convert to date
+date="$(date "--date=$(jq -r .commit.commit.committer.date <<< "$head_info")" +%F)"
+# generate nix expression from cabal file, replacing the version with the commit date
+echo '# This file defines cabal2nix-unstable, used by maintainers/scripts/haskell/regenerate-hackage-packages.sh.' > pkgs/development/haskell-modules/cabal2nix-unstable.nix
+cabal2nix "https://github.com/NixOS/cabal2nix/archive/$commit.tar.gz" | sed -e 's/version = ".*"/version = "'"unstable-$date"'"/' >> pkgs/development/haskell-modules/cabal2nix-unstable.nix
diff --git a/maintainers/scripts/haskell/update-hackage.sh b/maintainers/scripts/haskell/update-hackage.sh
new file mode 100755
index 00000000000..a7cfecbbb0f
--- /dev/null
+++ b/maintainers/scripts/haskell/update-hackage.sh
@@ -0,0 +1,35 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p nix curl jq nix-prefetch-github git gnused -I nixpkgs=.
+
+# See regenerate-hackage-packages.sh for details on the purpose of this script.
+
+set -euo pipefail
+
+pin_file=pkgs/data/misc/hackage/pin.json
+current_commit="$(jq -r .commit $pin_file)"
+old_date="$(jq -r .msg $pin_file | sed 's/Update from Hackage at //')"
+git_info="$(curl -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/commercialhaskell/all-cabal-hashes/branches/hackage)"
+head_commit="$(echo "$git_info" | jq -r .commit.sha)"
+commit_msg="$(echo "$git_info" | jq -r .commit.commit.message)"
+new_date="$(echo "$commit_msg" | sed 's/Update from Hackage at //')"
+
+if [ "$current_commit" != "$head_commit" ]; then
+   url="https://github.com/commercialhaskell/all-cabal-hashes/archive/$head_commit.tar.gz"
+   hash="$(nix-prefetch-url "$url")"
+   jq -n \
+     --arg commit "$head_commit" \
+     --arg hash "$hash" \
+     --arg url "$url" \
+     --arg commit_msg "$commit_msg" \
+     '{commit: $commit, url: $url, sha256: $hash, msg: $commit_msg}' \
+     > $pin_file
+fi
+
+if [[ "${1:-}" == "--do-commit" ]]; then
+git add pkgs/data/misc/hackage/pin.json
+git commit -F - << EOF
+all-cabal-hashes: $old_date -> $new_date
+
+This commit has been generated by maintainers/scripts/haskell/update-hackage.sh
+EOF
+fi
diff --git a/maintainers/scripts/haskell/update-stackage.sh b/maintainers/scripts/haskell/update-stackage.sh
new file mode 100755
index 00000000000..ecf38dc4b90
--- /dev/null
+++ b/maintainers/scripts/haskell/update-stackage.sh
@@ -0,0 +1,57 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p nix curl jq nix-prefetch-github git gnused gnugrep -I nixpkgs=.
+
+set -eu -o pipefail
+
+tmpfile=$(mktemp "update-stackage.XXXXXXX")
+# shellcheck disable=SC2064
+
+stackage_config="pkgs/development/haskell-modules/configuration-hackage2nix/stackage.yaml"
+
+trap "rm ${tmpfile} ${tmpfile}.new" 0
+touch "$tmpfile" "$tmpfile.new" # Creating files here so that trap creates no errors.
+
+curl -L -s "https://stackage.org/lts/cabal.config" >"$tmpfile"
+old_version=$(grep "# Stackage" $stackage_config | sed -E 's/.*([0-9]{2}\.[0-9]+)/\1/')
+version=$(sed -rn "s/^--.*http:..(www.)?stackage.org.snapshot.lts-//p" "$tmpfile")
+
+if [[ "$old_version" == "$version" ]]; then
+   echo "No new stackage version"
+   exit 0 # Nothing to do
+fi
+
+echo "Updating Stackage LTS from $old_version to $version."
+
+# Create a simple yaml version of the file.
+sed -r \
+    -e '/^--/d' \
+    -e 's|^constraints:||' \
+    -e 's|^ +|  - |' \
+    -e 's|,$||' \
+    -e '/installed$/d' \
+    -e '/^$/d' \
+    < "${tmpfile}" | sort --ignore-case >"${tmpfile}.new"
+
+cat > $stackage_config << EOF
+# Stackage LTS $version
+# This file is auto-generated by
+# maintainers/scripts/haskell/update-stackage.sh
+default-package-overrides:
+EOF
+
+# Drop restrictions on some tools where we always want the latest version.
+sed -r \
+    -e '/ cabal2nix /d' \
+    -e '/ distribution-nixpkgs /d' \
+    -e '/ jailbreak-cabal /d' \
+    -e '/ language-nix /d' \
+    < "${tmpfile}.new" >> $stackage_config
+
+if [[ "${1:-}" == "--do-commit" ]]; then
+git add $stackage_config
+git commit -F - << EOF
+haskellPackages: stackage-lts $old_version -> $version
+
+This commit has been generated by maintainers/scripts/haskell/update-stackage.sh
+EOF
+fi
diff --git a/maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh b/maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh
new file mode 100755
index 00000000000..8c39d289f7a
--- /dev/null
+++ b/maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh
@@ -0,0 +1,22 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p nix curl gnused -I nixpkgs=.
+
+# On Hackage every package description shows a category "Distributions" which
+# lists a "NixOS" version.
+# This script uploads a csv to hackage which will update the displayed versions
+# based on the current versions in nixpkgs. This happens with a simple http
+# request.
+
+# For authorization you just need to have any valid hackage account. This
+# script uses the `username` and `password-command` field from your
+# ~/.cabal/config file.
+
+# e.g. username: maralorn
+#      password-command: pass hackage.haskell.org (this can be any command, but not an arbitrary shell expression. Like cabal we only read the first output line and ignore the rest.)
+# Those fields are specified under `upload` on the `cabal` man page.
+
+package_list="$(nix-build -A haskell.package-list)/nixos-hackage-packages.csv"
+username=$(grep "^username:" ~/.cabal/config | sed "s/^username: //")
+password_command=$(grep "^password-command:" ~/.cabal/config | sed "s/^password-command: //")
+curl -u "$username:$($password_command | head -n1)" --digest -H "Content-type: text/csv" -T "$package_list" http://hackage.haskell.org/distro/NixOS/packages.csv
+echo
diff --git a/maintainers/scripts/hydra-eval-failures.py b/maintainers/scripts/hydra-eval-failures.py
new file mode 100755
index 00000000000..b7518b12857
--- /dev/null
+++ b/maintainers/scripts/hydra-eval-failures.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i python3 -p "python3.withPackages(ps: with ps; [ requests pyquery click ])"
+
+# To use, just execute this script with --help to display help.
+
+import subprocess
+import json
+import sys
+
+import click
+import requests
+from pyquery import PyQuery as pq
+
+def map_dict (f, d):
+    for k,v in d.items():
+        d[k] = f(v)
+
+maintainers_json = subprocess.check_output([
+    'nix-instantiate', '-A', 'lib.maintainers', '--eval', '--strict', '--json'
+])
+maintainers = json.loads(maintainers_json)
+MAINTAINERS = map_dict(lambda v: v.get('github', None), maintainers)
+
+def get_response_text(url):
+    return pq(requests.get(url).text)  # IO
+
+EVAL_FILE = {
+    'nixos': 'nixos/release.nix',
+    'nixpkgs': 'pkgs/top-level/release.nix',
+}
+
+
+def get_maintainers(attr_name):
+    try:
+        nixname = attr_name.split('.')
+        meta_json = subprocess.check_output([
+            'nix-instantiate',
+            '--eval',
+            '--strict',
+            '-A',
+            '.'.join(nixname[1:]) + '.meta',
+            EVAL_FILE[nixname[0]],
+            '--arg',
+            'nixpkgs',
+            './.',
+            '--json'])
+        meta = json.loads(meta_json)
+        return meta.get('maintainers', [])
+    except:
+       return []
+
+def filter_github_users(maintainers):
+    github_only = []
+    for i in maintainers:
+        if i.get('github'):
+            github_only.append(i)
+    return github_only
+
+def print_build(table_row):
+    a = pq(table_row)('a')[1]
+    print("- [ ] [{}]({})".format(a.text, a.get('href')), flush=True)
+
+    job_maintainers = filter_github_users(get_maintainers(a.text))
+    if job_maintainers:
+        print("  - maintainers: {}".format(" ".join(map(lambda u: '@' + u.get('github'), job_maintainers))))
+    # TODO: print last three persons that touched this file
+    # TODO: pinpoint the diff that broke this build, or maybe it's transient or maybe it never worked?
+
+    sys.stdout.flush()
+
+@click.command()
+@click.option(
+    '--jobset',
+    default="nixos/release-19.09",
+    help='Hydra project like nixos/release-19.09')
+def cli(jobset):
+    """
+    Given a Hydra project, inspect latest evaluation
+    and print a summary of failed builds
+    """
+
+    url = "https://hydra.nixos.org/jobset/{}".format(jobset)
+
+    # get the last evaluation
+    click.echo(click.style(
+        'Getting latest evaluation for {}'.format(url), fg='green'))
+    d = get_response_text(url)
+    evaluations = d('#tabs-evaluations').find('a[class="row-link"]')
+    latest_eval_url = evaluations[0].get('href')
+
+    # parse last evaluation page
+    click.echo(click.style(
+        'Parsing evaluation {}'.format(latest_eval_url), fg='green'))
+    d = get_response_text(latest_eval_url + '?full=1')
+
+    # TODO: aborted evaluations
+    # TODO: dependency failed without propagated builds
+    print('\nFailures:')
+    for tr in d('img[alt="Failed"]').parents('tr'):
+        print_build(tr)
+
+    print('\nDependency failures:')
+    for tr in d('img[alt="Dependency failed"]').parents('tr'):
+        print_build(tr)
+
+
+
+if __name__ == "__main__":
+    try:
+        cli()
+    except Exception as e:
+        import pdb;pdb.post_mortem()
diff --git a/maintainers/scripts/hydra_eval_check b/maintainers/scripts/hydra_eval_check
new file mode 100755
index 00000000000..c8e03424f32
--- /dev/null
+++ b/maintainers/scripts/hydra_eval_check
@@ -0,0 +1,13 @@
+#! /bin/sh
+
+# give absolute path of release.nix as argument
+hydra_eval_jobs \
+  --argstr system x86_64-linux \
+  --argstr system i686-linux \
+  --argstr system x86_64-darwin \
+  --argstr system i686-cygwin \
+  --argstr system x86_64-cygwin \
+  --argstr system i686-freebsd \
+  --arg officialRelease false \
+  --arg nixpkgs "{ outPath = builtins.storePath ./. ; rev = 1234; }" \
+  $@
diff --git a/maintainers/scripts/luarocks-config.lua b/maintainers/scripts/luarocks-config.lua
new file mode 100644
index 00000000000..89e74c00ea8
--- /dev/null
+++ b/maintainers/scripts/luarocks-config.lua
@@ -0,0 +1,4 @@
+rocks_servers = {
+	"https://luarocks.org"
+}
+version_check_on_fail = false
diff --git a/maintainers/scripts/luarocks-packages.csv b/maintainers/scripts/luarocks-packages.csv
new file mode 100644
index 00000000000..d69546cdf07
--- /dev/null
+++ b/maintainers/scripts/luarocks-packages.csv
@@ -0,0 +1,86 @@
+name,src,ref,server,version,luaversion,maintainers
+alt-getopt,,,,,,arobyn
+bit32,,,,5.3.0-1,lua5_1,lblasc
+argparse,https://github.com/luarocks/argparse.git,,,,,
+basexx,https://github.com/teto/basexx.git,,,,,
+binaryheap,https://github.com/Tieske/binaryheap.lua,,,,,vcunat
+busted,,,,,,
+cassowary,,,,,,marsam alerque
+compat53,,,,0.7-1,,vcunat
+cosmo,,,,,,marsam
+coxpcall,,,,1.17.0-1,,
+cqueues,,,,,,vcunat
+cyrussasl,https://github.com/JorjBauer/lua-cyrussasl.git,,,,,
+digestif,https://github.com/astoff/digestif.git,,,0.2-1,lua5_3,
+dkjson,,,,,,
+fifo,,,,,,
+gitsigns.nvim,https://github.com/lewis6991/gitsigns.nvim.git,,,,lua5_1,
+http,,,,0.3-0,,vcunat
+inspect,,,,,,
+ldbus,,,http://luarocks.org/dev,,,
+ldoc,https://github.com/stevedonovan/LDoc.git,,,,,
+lgi,,,,,,
+linenoise,https://github.com/hoelzro/lua-linenoise.git,,,,,
+ljsyscall,,,,,lua5_1,lblasc
+lpeg,,,,,,vyp
+lpeg_patterns,,,,,,
+lpeglabel,,,,,,
+lpty,,,,,,
+lrexlib-gnu,,,,,,
+lrexlib-pcre,,,,,,vyp
+lrexlib-posix,,,,,,
+lua-cjson,,,,,,
+lua-cmsgpack,,,,,,
+lua-iconv,,,,,,
+lua-lsp,,,,,,
+lua-messagepack,,,,,,
+lua-resty-http,,,,,,
+lua-resty-jwt,,,,,,
+lua-resty-openidc,,,,,,
+lua-resty-openssl,,,,,,
+lua-resty-session,,,,,,
+lua-term,,,,,,
+lua-toml,,,,,,
+lua-zlib,,,,,,koral
+lua_cliargs,https://github.com/amireh/lua_cliargs.git,,,,,
+luabitop,https://github.com/teto/luabitop.git,,,,,
+luacheck,,,,,,
+luacov,,,,,,
+luadbi,,,,,,
+luadbi-mysql,,,,,,
+luadbi-postgresql,,,,,,
+luadbi-sqlite3,,,,,,
+luaepnf,,,,,,
+luaevent,,,,,,
+luaexpat,,,,1.3.0-1,,arobyn flosse
+luaffi,,,http://luarocks.org/dev,,,
+luafilesystem,,,,1.7.0-2,,flosse
+lualogging,,,,,,
+luaossl,,,,,lua5_1,
+luaposix,,,,34.1.1-1,,vyp lblasc
+luarepl,,,,,,
+luasec,,,,,,flosse
+luasocket,,,,,,
+luasql-sqlite3,,,,,,vyp
+luassert,,,,,,
+luasystem,,,,,,
+luautf8,,,,,,pstn
+luazip,,,,,,
+lua-yajl,,,,,,pstn
+luuid,,,,,,
+luv,,,,1.43.0-0,,
+lyaml,,,,,,lblasc
+markdown,,,,,,
+mediator_lua,,,,,,
+mpack,,,,,,
+moonscript,https://github.com/leafo/moonscript.git,dev-1,,,,arobyn
+nvim-client,https://github.com/neovim/lua-client.git,,,,,
+penlight,https://github.com/lunarmodules/Penlight.git,,,,,alerque
+plenary.nvim,https://github.com/nvim-lua/plenary.nvim.git,,,,lua5_1,
+rapidjson,https://github.com/xpol/lua-rapidjson.git,,,,,
+readline,,,,,,
+say,https://github.com/Olivine-Labs/say.git,,,,,
+std._debug,https://github.com/lua-stdlib/_debug.git,,,,,
+std.normalize,git://github.com/lua-stdlib/normalize.git,,,,,
+stdlib,,,,41.2.2,,vyp
+vstruct,https://github.com/ToxicFrog/vstruct.git,,,,,
diff --git a/maintainers/scripts/nix-call-package b/maintainers/scripts/nix-call-package
new file mode 100755
index 00000000000..be478fca2b7
--- /dev/null
+++ b/maintainers/scripts/nix-call-package
@@ -0,0 +1,5 @@
+#! /bin/sh
+
+echo "let pkgs = import <nixpkgs$2> {}; x = pkgs.callPackage $1 { $3 }; in ${4:-x}" |
+nix-instantiate --show-trace - |
+xargs nix-store -r -K
diff --git a/maintainers/scripts/nix-diff.sh b/maintainers/scripts/nix-diff.sh
new file mode 100755
index 00000000000..0c65e29cf43
--- /dev/null
+++ b/maintainers/scripts/nix-diff.sh
@@ -0,0 +1,277 @@
+#!/usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils gnugrep gnused
+
+################################################################################
+# nix-diff.sh                                                                  #
+################################################################################
+# This script "diffs" Nix profile generations.                                 #
+#                                                                              #
+# Example:                                                                     #
+################################################################################
+# > nix-diff.sh 90 92                                                          #
+# + gnumake-4.2.1                                                              #
+# + gnumake-4.2.1-doc                                                          #
+# - htmldoc-1.8.29                                                             #
+################################################################################
+# The example shows that as of generation 92 and since generation 90,          #
+# gnumake-4.2.1 and gnumake-4.2.1-doc have been installed, while               #
+# htmldoc-1.8.29 has been removed.                                             #
+#                                                                              #
+# The example above shows the default, minimal output mode of this script.     #
+# For more features, run `nix-diff.sh -h` for usage instructions.              #
+################################################################################
+
+usage() {
+    cat <<EOF
+usage: nix-diff.sh [-h | [-p profile | -s] [-q] [-l] [range]]
+-h:         print this message before exiting
+-q:         list the derivations installed in the parent generation
+-l:         diff every available intermediate generation between parent and
+            child
+-p profile: specify the Nix profile to use
+            * defaults to ~/.nix-profile
+-s:         use the system profile
+            * equivalent to: -p /nix/var/nix/profiles/system
+profile:    * should be something like /nix/var/nix/profiles/default, not a
+              generation link like /nix/var/nix/profiles/default-2-link
+range:      the range of generations to diff
+            * the following patterns are allowed, where A, B, and N are positive
+              integers, and G is the currently active generation:
+                A..B => diffs from generation A to generation B
+                ~N   => diffs from the Nth newest generation (older than G) to G
+                A    => diffs from generation A to G
+            * defaults to ~1
+EOF
+}
+
+usage_tip() {
+    echo 'run `nix-diff.sh -h` for usage instructions' >&2
+    exit 1
+}
+
+while getopts :hqlp:s opt; do
+    case $opt in
+        h)
+            usage
+            exit
+            ;;
+        q)
+            opt_query=1
+            ;;
+        l)
+            opt_log=1
+            ;;
+        p)
+            opt_profile=$OPTARG
+            ;;
+        s)
+            opt_profile=/nix/var/nix/profiles/system
+            ;;
+        \?)
+            echo "error: invalid option -$OPTARG" >&2
+            usage_tip
+            ;;
+    esac
+done
+shift $((OPTIND-1))
+
+if [ -n "$opt_profile" ]; then
+    if ! [ -L "$opt_profile" ]; then
+        echo "error: expecting \`$opt_profile\` to be a symbolic link" >&2
+        usage_tip
+    fi
+else
+    opt_profile=$(readlink ~/.nix-profile)
+    if (( $? != 0 )); then
+        echo 'error: unable to dereference `~/.nix-profile`' >&2
+        echo 'specify the profile manually with the `-p` flag' >&2
+        usage_tip
+    fi
+fi
+
+list_gens() {
+    nix-env -p "$opt_profile" --list-generations \
+        | sed -r 's:^\s*::' \
+        | cut -d' ' -f1
+}
+
+current_gen() {
+    nix-env -p "$opt_profile" --list-generations \
+        | grep -E '\(current\)\s*$' \
+        | sed -r 's:^\s*::' \
+        | cut -d' ' -f1
+}
+
+neg_gen() {
+    local i=0 from=$1 n=$2 tmp
+    for gen in $(list_gens | sort -rn); do
+        if ((gen < from)); then
+            tmp=$gen
+            ((i++))
+            ((i == n)) && break
+        fi
+    done
+    if ((i < n)); then
+        echo -n "error: there aren't $n generation(s) older than" >&2
+        echo " generation $from" >&2
+        return 1
+    fi
+    echo $tmp
+}
+
+match() {
+    argv=("$@")
+    for i in $(seq $(($#-1))); do
+        if grep -E "^${argv[$i]}\$" <(echo "$1") >/dev/null; then
+            echo $i
+            return
+        fi
+    done
+    echo 0
+}
+
+case $(match "$1" '' '[0-9]+' '[0-9]+\.\.[0-9]+' '~[0-9]+') in
+    1)
+        diffTo=$(current_gen)
+        diffFrom=$(neg_gen $diffTo 1)
+        (($? == 1)) && usage_tip
+        ;;
+    2)
+        diffFrom=$1
+        diffTo=$(current_gen)
+        ;;
+    3)
+        diffFrom=${1%%.*}
+        diffTo=${1##*.}
+        ;;
+    4)
+        diffTo=$(current_gen)
+        diffFrom=$(neg_gen $diffTo ${1#*~})
+        (($? == 1)) && usage_tip
+        ;;
+    0)
+        echo 'error: invalid invocation' >&2
+        usage_tip
+        ;;
+esac
+
+dirA="${opt_profile}-${diffFrom}-link"
+dirB="${opt_profile}-${diffTo}-link"
+
+declare -a temp_files
+temp_length() {
+    echo -n ${#temp_files[@]}
+}
+temp_make() {
+    temp_files[$(temp_length)]=$(mktemp)
+}
+temp_clean() {
+    rm -f ${temp_files[@]}
+}
+temp_name() {
+    echo -n "${temp_files[$(($(temp_length)-1))]}"
+}
+trap 'temp_clean' EXIT
+
+temp_make
+versA=$(temp_name)
+refs=$(nix-store -q --references "$dirA")
+(( $? != 0 )) && exit 1
+echo "$refs" \
+    | grep -v env-manifest.nix \
+    | sort \
+          > "$versA"
+
+print_tag() {
+    local gen=$1
+    nix-env -p "$opt_profile" --list-generations \
+        | grep -E "^\s*${gen}" \
+        | sed -r 's:^\s*::' \
+        | sed -r 's:\s*$::'
+}
+
+if [ -n "$opt_query" ]; then
+    print_tag $diffFrom
+    cat "$versA" \
+        | sed -r 's:^[^-]+-(.*)$:    \1:'
+
+    print_line=1
+fi
+
+if [ -n "$opt_log" ]; then
+    gens=$(for gen in $(list_gens); do
+               ((diffFrom < gen && gen < diffTo)) && echo $gen
+           done)
+    # Force the $diffTo generation to be included in this list, instead of using
+    # `gen <= diffTo` in the preceding loop, so we encounter an error upon the
+    # event of its nonexistence.
+    gens=$(echo "$gens"
+           echo $diffTo)
+else
+    gens=$diffTo
+fi
+
+temp_make
+add=$(temp_name)
+temp_make
+rem=$(temp_name)
+temp_make
+out=$(temp_name)
+
+for gen in $gens; do
+
+    [ -n "$print_line" ] && echo
+
+    temp_make
+    versB=$(temp_name)
+
+    dirB="${opt_profile}-${gen}-link"
+    refs=$(nix-store -q --references "$dirB")
+    (( $? != 0 )) && exit 1
+    echo "$refs" \
+        | grep -v env-manifest.nix \
+        | sort \
+              > "$versB"
+
+    in=$(comm -3 -1 "$versA" "$versB")
+    sed -r 's:^[^-]*-(.*)$:\1+:' <(echo "$in") \
+        | sort -f \
+               > "$add"
+
+    un=$(comm -3 -2 "$versA" "$versB")
+    sed -r 's:^[^-]*-(.*)$:\1-:' <(echo "$un") \
+        | sort -f \
+               > "$rem"
+
+    cat "$rem" "$add" \
+        | sort -f \
+        | sed -r 's:(.*)-$:- \1:' \
+        | sed -r 's:(.*)\+$:\+ \1:' \
+        | grep -v '^$' \
+              > "$out"
+
+    if [ -n "$opt_query" -o -n "$opt_log" ]; then
+
+        lines=$(wc -l "$out" | cut -d' ' -f1)
+        tag=$(print_tag "$gen")
+        (( $? != 0 )) && exit 1
+        if [ $lines -eq 0 ]; then
+            echo "$tag   (no change)"
+        else
+            echo "$tag"
+        fi
+        cat "$out" \
+            | sed 's:^:    :'
+
+        print_line=1
+
+    else
+        echo "diffing from generation $diffFrom to $diffTo"
+        cat "$out"
+    fi
+
+    versA=$versB
+
+done
+
+exit 0
diff --git a/maintainers/scripts/nix-generate-from-cpan.nix b/maintainers/scripts/nix-generate-from-cpan.nix
new file mode 100644
index 00000000000..fecca7f0c73
--- /dev/null
+++ b/maintainers/scripts/nix-generate-from-cpan.nix
@@ -0,0 +1,25 @@
+{ stdenv, lib, makeWrapper, perl, perlPackages }:
+
+stdenv.mkDerivation {
+  name = "nix-generate-from-cpan-3";
+
+  buildInputs = with perlPackages; [
+    makeWrapper perl GetoptLongDescriptive CPANPLUS Readonly LogLog4perl
+  ];
+
+  phases = [ "installPhase" ];
+
+  installPhase =
+    ''
+      mkdir -p $out/bin
+      cp ${./nix-generate-from-cpan.pl} $out/bin/nix-generate-from-cpan
+      patchShebangs $out/bin/nix-generate-from-cpan
+      wrapProgram $out/bin/nix-generate-from-cpan --set PERL5LIB $PERL5LIB
+    '';
+
+  meta = {
+    maintainers = with lib.maintainers; [ eelco ];
+    description = "Utility to generate a Nix expression for a Perl package from CPAN";
+    platforms = lib.platforms.unix;
+  };
+}
diff --git a/maintainers/scripts/nix-generate-from-cpan.pl b/maintainers/scripts/nix-generate-from-cpan.pl
new file mode 100755
index 00000000000..6494acb50da
--- /dev/null
+++ b/maintainers/scripts/nix-generate-from-cpan.pl
@@ -0,0 +1,466 @@
+#!/usr/bin/env perl
+
+use utf8;
+use strict;
+use warnings;
+
+use CPAN::Meta();
+use CPANPLUS::Backend();
+use Module::CoreList;
+use Getopt::Long::Descriptive qw( describe_options );
+use JSON::PP qw( encode_json );
+use Log::Log4perl qw(:easy);
+use Readonly();
+
+# Readonly hash that maps CPAN style license strings to information
+# necessary to generate a Nixpkgs style license attribute.
+Readonly::Hash my %LICENSE_MAP => (
+
+    # The Perl 5 License (Artistic 1 & GPL 1 or later).
+    perl_5 => {
+        licenses => [qw( artistic1 gpl1Plus )]
+    },
+
+    # GNU Affero General Public License, Version 3.
+    agpl_3 => {
+        licenses => [qw( agpl3Plus )],
+        amb      => 1
+    },
+
+    # Apache Software License, Version 1.1.
+    apache_1_1 => {
+        licenses => ["Apache License 1.1"],
+        in_set   => 0
+    },
+
+    # Apache License, Version 2.0.
+    apache_2_0 => {
+        licenses => [qw( asl20 )]
+    },
+
+    # Artistic License, (Version 1).
+    artistic_1 => {
+        licenses => [qw( artistic1 )]
+    },
+
+    # Artistic License, Version 2.0.
+    artistic_2 => {
+        licenses => [qw( artistic2 )]
+    },
+
+    # BSD License (three-clause).
+    bsd => {
+        licenses => [qw( bsd3 )],
+        amb      => 1
+    },
+
+    # FreeBSD License (two-clause).
+    freebsd => {
+        licenses => [qw( bsd2 )]
+    },
+
+    # GNU Free Documentation License, Version 1.2.
+    gfdl_1_2 => {
+        licenses => [qw( fdl12 )]
+    },
+
+    # GNU Free Documentation License, Version 1.3.
+    gfdl_1_3 => {
+        licenses => [qw( fdl13 )]
+    },
+
+    # GNU General Public License, Version 1.
+    gpl_1 => {
+        licenses => [qw( gpl1Plus )],
+        amb      => 1
+    },
+
+    # GNU General Public License, Version 2. Note, we will interpret
+    # "gpl" alone as GPL v2+.
+    gpl_2 => {
+        licenses => [qw( gpl2Plus )],
+        amb      => 1
+    },
+
+    # GNU General Public License, Version 3.
+    gpl_3 => {
+        licenses => [qw( gpl3Plus )],
+        amb      => 1
+    },
+
+    # GNU Lesser General Public License, Version 2.1. Note, we will
+    # interpret "gpl" alone as LGPL v2.1+.
+    lgpl_2_1 => {
+        licenses => [qw( lgpl21Plus )],
+        amb      => 1
+    },
+
+    # GNU Lesser General Public License, Version 3.0.
+    lgpl_3_0 => {
+        licenses => [qw( lgpl3Plus )],
+        amb      => 1
+    },
+
+    # MIT (aka X11) License.
+    mit => {
+        licenses => [qw( mit )]
+    },
+
+    # Mozilla Public License, Version 1.0.
+    mozilla_1_0 => {
+        licenses => [qw( mpl10 )]
+    },
+
+    # Mozilla Public License, Version 1.1.
+    mozilla_1_1 => {
+        licenses => [qw( mpl11 )]
+    },
+
+    # OpenSSL License.
+    openssl => {
+        licenses => [qw( openssl )]
+    },
+
+    # Q Public License, Version 1.0.
+    qpl_1_0 => {
+        licenses => [qw( qpl )]
+    },
+
+    # Original SSLeay License.
+    ssleay => {
+        licenses => ["Original SSLeay License"],
+        in_set   => 0
+    },
+
+    # Sun Internet Standards Source License (SISSL).
+    sun => {
+        licenses => ["Sun Industry Standards Source License v1.1"],
+        in_set   => 0
+    },
+
+    # zlib License.
+    zlib => {
+        licenses => [qw( zlib )]
+    },
+
+    # Other Open Source Initiative (OSI) approved license.
+    open_source => {
+        licenses => [qw( free )],
+        amb      => 1
+    },
+
+    # Requires special permission from copyright holder.
+    restricted => {
+        licenses => [qw( unfree )],
+        amb      => 1
+    },
+
+    # Not an OSI approved license, but not restricted. Note, we
+    # currently map this to unfreeRedistributable, which is a
+    # conservative choice.
+    unrestricted => {
+        licenses => [qw( unfreeRedistributable )],
+        amb      => 1
+    },
+
+    # License not provided in metadata.
+    unknown => {
+        licenses => [],
+        amb      => 1
+    }
+);
+
+sub handle_opts {
+    my ( $opt, $usage ) = describe_options(
+        'usage: $0 %o MODULE',
+        [ 'maintainer|m=s', 'the package maintainer' ],
+        [ 'debug|d',        'enable debug output' ],
+        [ 'help',           'print usage message and exit' ]
+    );
+
+    if ( $opt->help ) {
+        print $usage->text;
+        exit;
+    }
+
+    my $module_name = $ARGV[0];
+
+    if ( !defined $module_name ) {
+        print STDERR "Missing module name\n";
+        print STDERR $usage->text;
+        exit 1;
+    }
+
+    return ( $opt, $module_name );
+}
+
+# Takes a Perl package attribute name and returns 1 if the name cannot
+# be referred to as a bareword. This typically happens if the package
+# name is a reserved Nix keyword.
+sub is_reserved {
+    my ($pkg) = @_;
+
+    return $pkg =~ /^(?: assert    |
+                         else      |
+                         if        |
+                         import    |
+                         in        |
+                         inherit   |
+                         let       |
+                         rec       |
+                         then      |
+                         while     |
+                         with      )$/x;
+}
+
+sub pkg_to_attr {
+    my ($module) = @_;
+    my $attr_name = $module->package_name;
+    if ( $attr_name eq "libwww-perl" ) {
+        return "LWP";
+    }
+    else {
+        $attr_name =~ s/-//g;
+        return $attr_name;
+    }
+}
+
+sub get_pkg_name {
+    my ($module) = @_;
+    return ( $module->package_name, $module->package_version =~ s/^v(\d)/$1/r );
+}
+
+sub read_meta {
+    my ($pkg_path) = @_;
+
+    my $yaml_path = "$pkg_path/META.yml";
+    my $json_path = "$pkg_path/META.json";
+    my $meta;
+
+    if ( -r $json_path ) {
+        $meta = CPAN::Meta->load_file($json_path);
+    }
+    elsif ( -r $yaml_path ) {
+        $meta = CPAN::Meta->load_file($yaml_path);
+    }
+    else {
+        WARN("package has no META.yml or META.json");
+    }
+
+    return $meta;
+}
+
+# Map a module to the attribute corresponding to its package
+# (e.g. HTML::HeadParser will be mapped to HTMLParser, because that
+# module is in the HTML-Parser package).
+sub module_to_pkg {
+    my ( $cb, $module_name ) = @_;
+    my @modules = $cb->search( type => "name", allow => [$module_name] );
+    if ( scalar @modules == 0 ) {
+
+        # Fallback.
+        $module_name =~ s/:://g;
+        return $module_name;
+    }
+    my $module    = $modules[0];
+    my $attr_name = pkg_to_attr($module);
+    DEBUG("mapped dep $module_name to $attr_name");
+    return $attr_name;
+}
+
+sub get_deps {
+    my ( $cb, $meta, $type ) = @_;
+
+    return if !defined $meta;
+
+    my $prereqs = $meta->effective_prereqs;
+    my $deps = $prereqs->requirements_for( $type, "requires" );
+    my @res;
+    foreach my $n ( $deps->required_modules ) {
+        next if $n eq "perl";
+
+        my @core = Module::CoreList->find_modules(qr/^$n$/);
+        next if (@core);
+
+        my $pkg = module_to_pkg( $cb, $n );
+
+        # If the package name is reserved then we need to refer to it
+        # through the "self" variable.
+        $pkg = "self.\"$pkg\"" if is_reserved($pkg);
+
+        push @res, $pkg;
+    }
+    return @res;
+}
+
+sub uniq {
+    return keys %{ { map { $_ => 1 } @_ } };
+}
+
+sub render_license {
+    my ($cpan_license) = @_;
+
+    return if !defined $cpan_license;
+
+    my $licenses;
+
+    # If the license is ambiguous then we'll print an extra warning.
+    # For example, "gpl_2" is ambiguous since it may refer to exactly
+    # "GPL v2" or to "GPL v2 or later".
+    my $amb = 0;
+
+    # Whether the license is available inside `lib.licenses`.
+    my $in_set = 1;
+
+    my $nix_license = $LICENSE_MAP{$cpan_license};
+    if ( !$nix_license ) {
+        WARN("Unknown license: $cpan_license");
+        $licenses = [$cpan_license];
+        $in_set   = 0;
+    }
+    else {
+        $licenses = $nix_license->{licenses};
+        $amb      = $nix_license->{amb};
+        $in_set   = !$nix_license->{in_set};
+    }
+
+    my $license_line;
+
+    if ( @$licenses == 0 ) {
+
+        # Avoid defining the license line.
+    }
+    elsif ($in_set) {
+        my $lic = 'lib.licenses';
+        if ( @$licenses == 1 ) {
+            $license_line = "$lic.$licenses->[0]";
+        }
+        else {
+            $license_line = "with $lic; [ " . join( ' ', @$licenses ) . " ]";
+        }
+    }
+    else {
+        if ( @$licenses == 1 ) {
+            $license_line = $licenses->[0];
+        }
+        else {
+            $license_line = '[ ' . join( ' ', @$licenses ) . ' ]';
+        }
+    }
+
+    INFO("license: $cpan_license");
+    WARN("License '$cpan_license' is ambiguous, please verify") if $amb;
+
+    return $license_line;
+}
+
+my ( $opt, $module_name ) = handle_opts();
+
+Log::Log4perl->easy_init(
+    {
+        level => $opt->debug ? $DEBUG : $INFO,
+        layout => '%m%n'
+    }
+);
+
+my $cb = CPANPLUS::Backend->new;
+
+my @modules = $cb->search( type => "name", allow => [$module_name] );
+die "module $module_name not found\n" if scalar @modules == 0;
+die "multiple packages that match module $module_name\n" if scalar @modules > 1;
+my $module = $modules[0];
+
+my ($pkg_name, $pkg_version) = get_pkg_name $module;
+my $attr_name = pkg_to_attr $module;
+
+INFO( "attribute name: ", $attr_name );
+INFO( "module: ",         $module->module );
+INFO( "version: ",        $module->version );
+INFO( "package: ", $module->package, " (", "$pkg_name-$pkg_version", ", ", $attr_name, ")" );
+INFO( "path: ",    $module->path );
+
+my $tar_path = $module->fetch();
+INFO( "downloaded to: ", $tar_path );
+INFO( "sha-256: ",       $module->status->checksum_value );
+
+my $pkg_path = $module->extract();
+INFO( "unpacked to: ", $pkg_path );
+
+my $meta = read_meta($pkg_path);
+
+DEBUG( "metadata: ", encode_json( $meta->as_struct ) ) if defined $meta;
+
+my @runtime_deps = sort( uniq( get_deps( $cb, $meta, "runtime" ) ) );
+INFO("runtime deps: @runtime_deps");
+
+my @build_deps = sort( uniq(
+        get_deps( $cb, $meta, "configure" ),
+        get_deps( $cb, $meta, "build" ),
+        get_deps( $cb, $meta, "test" )
+) );
+
+# Filter out runtime dependencies since those are already handled.
+my %in_runtime_deps = map { $_ => 1 } @runtime_deps;
+@build_deps = grep { not $in_runtime_deps{$_} } @build_deps;
+
+INFO("build deps: @build_deps");
+
+my $homepage = $meta ? $meta->resources->{homepage} : undef;
+INFO("homepage: $homepage") if defined $homepage;
+
+my $description = $meta ? $meta->abstract : undef;
+if ( defined $description ) {
+    $description = uc( substr( $description, 0, 1 ) )
+      . substr( $description, 1 );    # capitalise first letter
+    $description =~ s/\.$//;          # remove period at the end
+    $description =~ s/\s*$//;
+    $description =~ s/^\s*//;
+    $description =~ s/\n+/ /;         # Replace new lines by space.
+    INFO("description: $description");
+}
+
+#print(Data::Dumper::Dumper($meta->licenses) . "\n");
+my $license = $meta ? render_license( $meta->licenses ) : undef;
+
+INFO( "RSS feed: https://metacpan.org/feed/distribution/",
+    $module->package_name );
+
+my $build_fun = -e "$pkg_path/Build.PL"
+  && !-e "$pkg_path/Makefile.PL" ? "buildPerlModule" : "buildPerlPackage";
+
+print STDERR "===\n";
+
+print <<EOF;
+  ${\(is_reserved($attr_name) ? "\"$attr_name\"" : $attr_name)} = $build_fun {
+    pname = "$pkg_name";
+    version = "$pkg_version";
+    src = fetchurl {
+      url = "mirror://cpan/${\$module->path}/${\$module->package}";
+      sha256 = "${\$module->status->checksum_value}";
+    };
+EOF
+print <<EOF if scalar @build_deps > 0;
+    buildInputs = [ @build_deps ];
+EOF
+print <<EOF if scalar @runtime_deps > 0;
+    propagatedBuildInputs = [ @runtime_deps ];
+EOF
+print <<EOF;
+    meta = {
+EOF
+print <<EOF if defined $homepage;
+      homepage = "$homepage";
+EOF
+print <<EOF if defined $description && $description ne "Unknown";
+      description = "$description";
+EOF
+print <<EOF if defined $license;
+      license = $license;
+EOF
+print <<EOF if $opt->maintainer;
+      maintainers = [ maintainers.${\$opt->maintainer} ];
+EOF
+print <<EOF;
+    };
+  };
+EOF
diff --git a/maintainers/scripts/nixpkgs-lint.nix b/maintainers/scripts/nixpkgs-lint.nix
new file mode 100644
index 00000000000..873905373af
--- /dev/null
+++ b/maintainers/scripts/nixpkgs-lint.nix
@@ -0,0 +1,24 @@
+{ stdenv, lib, makeWrapper, perl, perlPackages }:
+
+stdenv.mkDerivation {
+  name = "nixpkgs-lint-1";
+
+  nativeBuildInputs = [ makeWrapper ];
+  buildInputs = [ perl perlPackages.XMLSimple ];
+
+  dontUnpack = true;
+  buildPhase = "true";
+
+  installPhase =
+    ''
+      mkdir -p $out/bin
+      cp ${./nixpkgs-lint.pl} $out/bin/nixpkgs-lint
+      wrapProgram $out/bin/nixpkgs-lint --set PERL5LIB $PERL5LIB
+    '';
+
+  meta = with lib; {
+    maintainers = [ maintainers.eelco ];
+    description = "A utility for Nixpkgs contributors to check Nixpkgs for common errors";
+    platforms = platforms.unix;
+  };
+}
diff --git a/maintainers/scripts/nixpkgs-lint.pl b/maintainers/scripts/nixpkgs-lint.pl
new file mode 100755
index 00000000000..43fb3941361
--- /dev/null
+++ b/maintainers/scripts/nixpkgs-lint.pl
@@ -0,0 +1,173 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i perl -p perl perlPackages.XMLSimple
+
+use strict;
+use List::Util qw(min);
+use XML::Simple qw(:strict);
+use Getopt::Long qw(:config gnu_getopt);
+
+# Parse the command line.
+my $path = "<nixpkgs>";
+my $filter = "*";
+my $maintainer;
+
+sub showHelp {
+    print <<EOF;
+Usage: $0 [--package=NAME] [--maintainer=REGEXP] [--file=PATH]
+
+Check Nixpkgs for common errors/problems.
+
+  -p, --package        filter packages by name (default is ‘*’)
+  -m, --maintainer     filter packages by maintainer (case-insensitive regexp)
+  -f, --file           path to Nixpkgs (default is ‘<nixpkgs>’)
+
+Examples:
+  \$ nixpkgs-lint -f /my/nixpkgs -p firefox
+  \$ nixpkgs-lint -f /my/nixpkgs -m eelco
+EOF
+    exit 0;
+}
+
+GetOptions("package|p=s" => \$filter,
+           "maintainer|m=s" => \$maintainer,
+           "file|f=s" => \$path,
+           "help" => sub { showHelp() }
+    ) or exit 1;
+
+# Evaluate Nixpkgs into an XML representation.
+my $xml = `nix-env -f '$path' --arg overlays '[]' -qa '$filter' --xml --meta --drv-path`;
+die "$0: evaluation of ‘$path’ failed\n" if $? != 0;
+
+my $info = XMLin($xml, KeyAttr => { 'item' => '+attrPath', 'meta' => 'name' }, ForceArray => 1, SuppressEmpty => '' ) or die "cannot parse XML output";
+
+# Check meta information.
+print "=== Package meta information ===\n\n";
+my $nrBadNames = 0;
+my $nrMissingMaintainers = 0;
+my $nrMissingPlatforms = 0;
+my $nrMissingDescriptions = 0;
+my $nrBadDescriptions = 0;
+my $nrMissingLicenses = 0;
+
+foreach my $attr (sort keys %{$info->{item}}) {
+    my $pkg = $info->{item}->{$attr};
+
+    my $pkgName = $pkg->{name};
+    my $pkgVersion = "";
+    if ($pkgName =~ /(.*)(-[0-9].*)$/) {
+        $pkgName = $1;
+        $pkgVersion = $2;
+    }
+
+    # Check the maintainers.
+    my @maintainers;
+    my $x = $pkg->{meta}->{maintainers};
+    if (defined $x && $x->{type} eq "strings") {
+        @maintainers = map { $_->{value} } @{$x->{string}};
+    } elsif (defined $x->{value}) {
+        @maintainers = ($x->{value});
+    }
+
+    if (defined $maintainer && scalar(grep { $_ =~ /$maintainer/i } @maintainers) == 0) {
+        delete $info->{item}->{$attr};
+        next;
+    }
+
+    if (scalar @maintainers == 0) {
+        print "$attr: Lacks a maintainer\n";
+        $nrMissingMaintainers++;
+    }
+
+    # Check the platforms.
+    if (!defined $pkg->{meta}->{platforms}) {
+        print "$attr: Lacks a platform\n";
+        $nrMissingPlatforms++;
+    }
+
+    # Package names should not be capitalised.
+    if ($pkgName =~ /^[A-Z]/) {
+        print "$attr: package name ‘$pkgName’ should not be capitalised\n";
+        $nrBadNames++;
+    }
+
+    if ($pkgVersion eq "") {
+        print "$attr: package has no version\n";
+        $nrBadNames++;
+    }
+
+    # Check the license.
+    if (!defined $pkg->{meta}->{license}) {
+        print "$attr: Lacks a license\n";
+        $nrMissingLicenses++;
+    }
+
+    # Check the description.
+    my $description = $pkg->{meta}->{description}->{value};
+    if (!$description) {
+        print "$attr: Lacks a description\n";
+        $nrMissingDescriptions++;
+    } else {
+        my $bad = 0;
+        if ($description =~ /^\s/) {
+            print "$attr: Description starts with whitespace\n";
+            $bad = 1;
+        }
+        if ($description =~ /\s$/) {
+            print "$attr: Description ends with whitespace\n";
+            $bad = 1;
+        }
+        if ($description =~ /\.$/) {
+            print "$attr: Description ends with a period\n";
+            $bad = 1;
+        }
+        if (index(lc($description), lc($attr)) != -1) {
+            print "$attr: Description contains package name\n";
+            $bad = 1;
+        }
+        $nrBadDescriptions++ if $bad;
+    }
+}
+
+print "\n";
+
+# Find packages that have the same name.
+print "=== Package name collisions ===\n\n";
+
+my %pkgsByName;
+
+foreach my $attr (sort keys %{$info->{item}}) {
+    my $pkg = $info->{item}->{$attr};
+    #print STDERR "attr = $attr, name = $pkg->{name}\n";
+    $pkgsByName{$pkg->{name}} //= [];
+    push @{$pkgsByName{$pkg->{name}}}, $pkg;
+}
+
+my $nrCollisions = 0;
+foreach my $name (sort keys %pkgsByName) {
+    my @pkgs = @{$pkgsByName{$name}};
+
+    # Filter attributes that are aliases of each other (e.g. yield the
+    # same derivation path).
+    my %drvsSeen;
+    @pkgs = grep { my $x = $drvsSeen{$_->{drvPath}}; $drvsSeen{$_->{drvPath}} = 1; !defined $x } @pkgs;
+
+    # Filter packages that have a lower priority.
+    my $highest = min (map { $_->{meta}->{priority}->{value} // 0 } @pkgs);
+    @pkgs = grep { ($_->{meta}->{priority}->{value} // 0) == $highest } @pkgs;
+
+    next if scalar @pkgs == 1;
+
+    $nrCollisions++;
+    print "The following attributes evaluate to a package named ‘$name’:\n";
+    print "  ", join(", ", map { $_->{attrPath} } @pkgs), "\n\n";
+}
+
+print "=== Bottom line ===\n";
+print "Number of packages: ", scalar(keys %{$info->{item}}), "\n";
+print "Number of bad names: $nrBadNames\n";
+print "Number of missing maintainers: $nrMissingMaintainers\n";
+print "Number of missing platforms: $nrMissingPlatforms\n";
+print "Number of missing licenses: $nrMissingLicenses\n";
+print "Number of missing descriptions: $nrMissingDescriptions\n";
+print "Number of bad descriptions: $nrBadDescriptions\n";
+print "Number of name collisions: $nrCollisions\n";
diff --git a/maintainers/scripts/patchelf-hints.sh b/maintainers/scripts/patchelf-hints.sh
new file mode 100755
index 00000000000..5fdfc15dc23
--- /dev/null
+++ b/maintainers/scripts/patchelf-hints.sh
@@ -0,0 +1,84 @@
+
+usage() {
+    echo "
+$0 <path to unpacked binary distribution directory>
+
+This program return the list of libraries and where to find them based on
+your currently installed programs.
+";
+    exit 1
+}
+
+if test $# -ne 1; then
+  usage
+fi
+
+binaryDist=$1
+
+hasBinaries=false
+for bin in $(find $binaryDist -executable -type f) :; do
+    if test $bin = ":"; then
+        $hasBinaries || \
+            echo "No patchable found in this directory."
+        break
+    fi
+    hasBinaries=true
+
+    echo ""
+    echo "$bin:"
+    hasLibraries=false
+    unset interpreter
+    unset addRPath
+    for lib in $(strings $bin | grep '^\(/\|\)lib.*\.so' | sort | uniq) :; do
+        if test $lib = ":"; then
+            $hasLibraries || \
+                echo "  This program is a script or it is statically linked."
+            break
+        fi
+        hasLibraries=true
+
+        echo "  $lib:";
+
+        libPath=$lib
+        lib=$(basename $lib)
+
+        #versionLessLib=$(echo $lib | sed 's,[.][.0-9]*$,,')
+
+        libs="$(
+            find /nix/store/*/lib* \( -type f -or -type l \) -name $lib |
+            grep -v '\(bootstrap-tools\|system-path\|user-environment\|extra-utils\)'
+        )"
+
+        echo "$libs" |
+        sed 's,^/nix/store/[a-z0-9]*-\([^/]*\)/.*/\([^/]*\)$,    \1 -> \2,' |
+        sort |
+        uniq;
+
+        names=$(
+            echo "$libs" |
+            sed 's,^/nix/store/[a-z0-9]*-\([^/]*\)-[.0-9]*/.*$,\1,' |
+            sort |
+            uniq;
+        )
+
+        if test "$names" = "glibc"; then names="stdenv.glibc"; fi
+        if echo $names | grep -c "gcc" &> /dev/null; then names="stdenv.cc.cc"; fi
+
+        if test $lib != $libPath; then
+            interpreter="--interpreter \${$names}/lib/$lib"
+        elif echo $addRPath | grep -c "$names" &> /dev/null; then
+            :
+        else
+            addRPath=${addRPath+$addRPath:}"\${$names}/lib"
+        fi
+    done;
+    $hasLibraries && \
+        echo "
+  Patchelf command:
+
+    patchelf $interpreter \\
+      ${addRPath+--set-rpath $addRPath \\
+}      \$out/$bin
+
+"
+done;
diff --git a/maintainers/scripts/pluginupdate.py b/maintainers/scripts/pluginupdate.py
new file mode 100644
index 00000000000..017e3ac758a
--- /dev/null
+++ b/maintainers/scripts/pluginupdate.py
@@ -0,0 +1,674 @@
+# Used by pkgs/applications/editors/vim/plugins/update.py and pkgs/applications/editors/kakoune/plugins/update.py
+
+# format:
+# $ nix run nixpkgs.python3Packages.black -c black update.py
+# type-check:
+# $ nix run nixpkgs.python3Packages.mypy -c mypy update.py
+# linted:
+# $ nix run nixpkgs.python3Packages.flake8 -c flake8 --ignore E501,E265 update.py
+
+import argparse
+import functools
+import http
+import json
+import os
+import subprocess
+import logging
+import sys
+import time
+import traceback
+import urllib.error
+import urllib.parse
+import urllib.request
+import xml.etree.ElementTree as ET
+from datetime import datetime
+from functools import wraps
+from multiprocessing.dummy import Pool
+from pathlib import Path
+from typing import Dict, List, Optional, Tuple, Union, Any, Callable
+from urllib.parse import urljoin, urlparse
+from tempfile import NamedTemporaryFile
+from dataclasses import dataclass
+
+import git
+
+ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry"  # " vim gets confused here
+ATOM_LINK = "{http://www.w3.org/2005/Atom}link"  # "
+ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated"  # "
+
+LOG_LEVELS = {
+    logging.getLevelName(level): level for level in [
+        logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR ]
+}
+
+log = logging.getLogger()
+
+def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
+    """Retry calling the decorated function using an exponential backoff.
+    http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
+    original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
+    (BSD licensed)
+    :param ExceptionToCheck: the exception on which to retry
+    :param tries: number of times to try (not retry) before giving up
+    :param delay: initial delay between retries in seconds
+    :param backoff: backoff multiplier e.g. value of 2 will double the delay
+        each retry
+    """
+
+    def deco_retry(f: Callable) -> Callable:
+        @wraps(f)
+        def f_retry(*args: Any, **kwargs: Any) -> Any:
+            mtries, mdelay = tries, delay
+            while mtries > 1:
+                try:
+                    return f(*args, **kwargs)
+                except ExceptionToCheck as e:
+                    print(f"{str(e)}, Retrying in {mdelay} seconds...")
+                    time.sleep(mdelay)
+                    mtries -= 1
+                    mdelay *= backoff
+            return f(*args, **kwargs)
+
+        return f_retry  # true decorator
+
+    return deco_retry
+
+@dataclass
+class FetchConfig:
+    proc: int
+    github_token: str
+
+
+def make_request(url: str, token=None) -> urllib.request.Request:
+    headers = {}
+    if token is not None:
+        headers["Authorization"] = f"token {token}"
+    return urllib.request.Request(url, headers=headers)
+
+class Repo:
+    def __init__(
+        self, uri: str, branch: str, alias: Optional[str]
+    ) -> None:
+        self.uri = uri
+        '''Url to the repo'''
+        self.branch = branch
+        self.alias = alias
+        self.redirect: Dict[str, str] = {}
+        self.token = "dummy_token"
+
+    @property
+    def name(self):
+        return self.uri.split('/')[-1]
+
+    def __repr__(self) -> str:
+        return f"Repo({self.name}, {self.uri})"
+
+    @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+    def has_submodules(self) -> bool:
+        return True
+
+    @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+    def latest_commit(self) -> Tuple[str, datetime]:
+        loaded = self._prefetch(None)
+        updated = datetime.strptime(loaded['date'], "%Y-%m-%dT%H:%M:%S%z")
+
+        return loaded['rev'], updated
+
+    def _prefetch(self, ref: Optional[str]):
+        cmd = ["nix-prefetch-git", "--quiet", "--fetch-submodules", self.uri]
+        if ref is not None:
+            cmd.append(ref)
+        log.debug(cmd)
+        data = subprocess.check_output(cmd)
+        loaded = json.loads(data)
+        return loaded
+
+    def prefetch(self, ref: Optional[str]) -> str:
+        loaded = self._prefetch(ref)
+        return loaded["sha256"]
+
+    def as_nix(self, plugin: "Plugin") -> str:
+        return f'''fetchgit {{
+            url = "{self.uri}";
+            rev = "{plugin.commit}";
+            sha256 = "{plugin.sha256}";
+        }}'''
+
+
+class RepoGitHub(Repo):
+    def __init__(
+        self, owner: str, repo: str, branch: str, alias: Optional[str]
+    ) -> None:
+        self.owner = owner
+        self.repo = repo
+        self.token = None
+        '''Url to the repo'''
+        super().__init__(self.url(""), branch, alias)
+        log.debug("Instantiating github repo %s/%s", self.owner, self.repo)
+
+    @property
+    def name(self):
+        return self.repo
+
+    def url(self, path: str) -> str:
+        return urljoin(f"https://github.com/{self.owner}/{self.name}/", path)
+
+    @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+    def has_submodules(self) -> bool:
+        try:
+            req = make_request(self.url(f"blob/{self.branch}/.gitmodules"), self.token)
+            urllib.request.urlopen(req, timeout=10).close()
+        except urllib.error.HTTPError as e:
+            if e.code == 404:
+                return False
+            else:
+                raise
+        return True
+
+    @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+    def latest_commit(self) -> Tuple[str, datetime]:
+        commit_url = self.url(f"commits/{self.branch}.atom")
+        commit_req = make_request(commit_url, self.token)
+        with urllib.request.urlopen(commit_req, timeout=10) as req:
+            self._check_for_redirect(commit_url, req)
+            xml = req.read()
+            root = ET.fromstring(xml)
+            latest_entry = root.find(ATOM_ENTRY)
+            assert latest_entry is not None, f"No commits found in repository {self}"
+            commit_link = latest_entry.find(ATOM_LINK)
+            assert commit_link is not None, f"No link tag found feed entry {xml}"
+            url = urlparse(commit_link.get("href"))
+            updated_tag = latest_entry.find(ATOM_UPDATED)
+            assert (
+                updated_tag is not None and updated_tag.text is not None
+            ), f"No updated tag found feed entry {xml}"
+            updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
+            return Path(str(url.path)).name, updated
+
+    def _check_for_redirect(self, url: str, req: http.client.HTTPResponse):
+        response_url = req.geturl()
+        if url != response_url:
+            new_owner, new_name = (
+                urllib.parse.urlsplit(response_url).path.strip("/").split("/")[:2]
+            )
+            end_line = "\n" if self.alias is None else f" as {self.alias}\n"
+            plugin_line = "{owner}/{name}" + end_line
+
+            old_plugin = plugin_line.format(owner=self.owner, name=self.name)
+            new_plugin = plugin_line.format(owner=new_owner, name=new_name)
+            self.redirect[old_plugin] = new_plugin
+
+
+    def prefetch(self, commit: str) -> str:
+        if self.has_submodules():
+            sha256 = super().prefetch(commit)
+        else:
+            sha256 = self.prefetch_github(commit)
+        return sha256
+
+    def prefetch_github(self, ref: str) -> str:
+        data = subprocess.check_output(
+            ["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
+        )
+        return data.strip().decode("utf-8")
+
+    def as_nix(self, plugin: "Plugin") -> str:
+        if plugin.has_submodules:
+            submodule_attr = "\n      fetchSubmodules = true;"
+        else:
+            submodule_attr = ""
+
+        return f'''fetchFromGitHub {{
+      owner = "{self.owner}";
+      repo = "{self.repo}";
+      rev = "{plugin.commit}";
+      sha256 = "{plugin.sha256}";{submodule_attr}
+    }}'''
+
+
+@dataclass
+class PluginDesc:
+    repo: Repo
+    branch: str
+    alias: Optional[str]
+
+    @property
+    def name(self):
+        if self.alias is None:
+            return self.repo.name
+        else:
+            return self.alias
+
+
+class Plugin:
+    def __init__(
+        self,
+        name: str,
+        commit: str,
+        has_submodules: bool,
+        sha256: str,
+        date: Optional[datetime] = None,
+    ) -> None:
+        self.name = name
+        self.commit = commit
+        self.has_submodules = has_submodules
+        self.sha256 = sha256
+        self.date = date
+
+    @property
+    def normalized_name(self) -> str:
+        return self.name.replace(".", "-")
+
+    @property
+    def version(self) -> str:
+        assert self.date is not None
+        return self.date.strftime("%Y-%m-%d")
+
+    def as_json(self) -> Dict[str, str]:
+        copy = self.__dict__.copy()
+        del copy["date"]
+        return copy
+
+
+
+class Editor:
+    """The configuration of the update script."""
+
+    def __init__(
+        self,
+        name: str,
+        root: Path,
+        get_plugins: str,
+        default_in: Optional[Path] = None,
+        default_out: Optional[Path] = None,
+        deprecated: Optional[Path] = None,
+        cache_file: Optional[str] = None,
+    ):
+        log.debug("get_plugins:", get_plugins)
+        self.name = name
+        self.root = root
+        self.get_plugins = get_plugins
+        self.default_in = default_in or root.joinpath(f"{name}-plugin-names")
+        self.default_out = default_out or root.joinpath("generated.nix")
+        self.deprecated = deprecated or root.joinpath("deprecated.json")
+        self.cache_file = cache_file or f"{name}-plugin-cache.json"
+
+    def get_current_plugins(self):
+        """To fill the cache"""
+        return get_current_plugins(self)
+
+    def load_plugin_spec(self, config: FetchConfig, plugin_file) -> List[PluginDesc]:
+        plugins = []
+        with open(plugin_file) as f:
+            for line in f:
+                if line.startswith("#"):
+                    continue
+                plugin = parse_plugin_line(config, line)
+                plugins.append(plugin)
+        return plugins
+
+    def generate_nix(self, plugins, outfile: str):
+        '''Returns nothing for now, writes directly to outfile'''
+        raise NotImplementedError()
+
+    def get_update(self, input_file: str, outfile: str, config: FetchConfig):
+        cache: Cache = Cache(self.get_current_plugins(), self.cache_file)
+        _prefetch = functools.partial(prefetch, cache=cache)
+
+        def update() -> dict:
+            plugin_names = self.load_plugin_spec(config, input_file)
+
+            try:
+                pool = Pool(processes=config.proc)
+                results = pool.map(_prefetch, plugin_names)
+            finally:
+                cache.store()
+
+            plugins, redirects = check_results(results)
+
+            self.generate_nix(plugins, outfile)
+
+            return redirects
+
+        return update
+
+
+    @property
+    def attr_path(self):
+        return self.name + "Plugins"
+
+    def get_drv_name(self, name: str):
+        return self.attr_path + "." + name
+
+    def rewrite_input(self, *args, **kwargs):
+        return rewrite_input(*args, **kwargs)
+
+    def create_parser(self):
+        parser = argparse.ArgumentParser(
+            description=(f"""
+                Updates nix derivations for {self.name} plugins.\n
+                By default from {self.default_in} to {self.default_out}"""
+            )
+        )
+        parser.add_argument(
+            "--add",
+            dest="add_plugins",
+            default=[],
+            action="append",
+            help=f"Plugin to add to {self.attr_path} from Github in the form owner/repo",
+        )
+        parser.add_argument(
+            "--input-names",
+            "-i",
+            dest="input_file",
+            default=self.default_in,
+            help="A list of plugins in the form owner/repo",
+        )
+        parser.add_argument(
+            "--out",
+            "-o",
+            dest="outfile",
+            default=self.default_out,
+            help="Filename to save generated nix code",
+        )
+        parser.add_argument(
+            "--proc",
+            "-p",
+            dest="proc",
+            type=int,
+            default=30,
+            help="Number of concurrent processes to spawn. Setting --github-token allows higher values.",
+        )
+        parser.add_argument(
+            "--github-token",
+            "-t",
+            type=str,
+            default=os.getenv("GITHUB_API_TOKEN"),
+            help="""Allows to set --proc to higher values.
+            Uses GITHUB_API_TOKEN environment variables as the default value.""",
+        )
+        parser.add_argument(
+            "--no-commit", "-n", action="store_true", default=False,
+            help="Whether to autocommit changes"
+        )
+        parser.add_argument(
+            "--debug", "-d", choices=LOG_LEVELS.keys(),
+            default=logging.getLevelName(logging.WARN),
+            help="Adjust log level"
+        )
+        return parser
+
+
+
+class CleanEnvironment(object):
+    def __enter__(self) -> None:
+        self.old_environ = os.environ.copy()
+        local_pkgs = str(Path(__file__).parent.parent.parent)
+        os.environ["NIX_PATH"] = f"localpkgs={local_pkgs}"
+        self.empty_config = NamedTemporaryFile()
+        self.empty_config.write(b"{}")
+        self.empty_config.flush()
+        os.environ["NIXPKGS_CONFIG"] = self.empty_config.name
+
+    def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
+        os.environ.update(self.old_environ)
+        self.empty_config.close()
+
+
+def get_current_plugins(editor: Editor) -> List[Plugin]:
+    with CleanEnvironment():
+        cmd = ["nix", "eval", "--extra-experimental-features", "nix-command", "--impure", "--json", "--expr", editor.get_plugins]
+        log.debug("Running command %s", cmd)
+        out = subprocess.check_output(cmd)
+    data = json.loads(out)
+    plugins = []
+    for name, attr in data.items():
+        p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
+        plugins.append(p)
+    return plugins
+
+
+def prefetch_plugin(
+    p: PluginDesc,
+    cache: "Optional[Cache]" = None,
+) -> Tuple[Plugin, Dict[str, str]]:
+    repo, branch, alias = p.repo, p.branch, p.alias
+    name = alias or p.repo.name
+    commit = None
+    log.info(f"Fetching last commit for plugin {name} from {repo.uri}@{branch}")
+    commit, date = repo.latest_commit()
+    cached_plugin = cache[commit] if cache else None
+    if cached_plugin is not None:
+        log.debug("Cache hit !")
+        cached_plugin.name = name
+        cached_plugin.date = date
+        return cached_plugin, repo.redirect
+
+    has_submodules = repo.has_submodules()
+    print(f"prefetch {name}")
+    sha256 = repo.prefetch(commit)
+
+    return (
+        Plugin(name, commit, has_submodules, sha256, date=date),
+        repo.redirect,
+    )
+
+
+def fetch_plugin_from_pluginline(config: FetchConfig, plugin_line: str) -> Plugin:
+    plugin, _ = prefetch_plugin(parse_plugin_line(config, plugin_line))
+    return plugin
+
+
+def print_download_error(plugin: str, ex: Exception):
+    print(f"{plugin}: {ex}", file=sys.stderr)
+    ex_traceback = ex.__traceback__
+    tb_lines = [
+        line.rstrip("\n")
+        for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
+    ]
+    print("\n".join(tb_lines))
+
+
+def check_results(
+    results: List[Tuple[PluginDesc, Union[Exception, Plugin], Dict[str, str]]]
+) -> Tuple[List[Tuple[PluginDesc, Plugin]], Dict[str, str]]:
+    ''' '''
+    failures: List[Tuple[str, Exception]] = []
+    plugins = []
+    redirects: Dict[str, str] = {}
+    for (pdesc, result, redirect) in results:
+        if isinstance(result, Exception):
+            failures.append((pdesc.name, result))
+        else:
+            plugins.append((pdesc, result))
+            redirects.update(redirect)
+
+    print(f"{len(results) - len(failures)} plugins were checked", end="")
+    if len(failures) == 0:
+        print()
+        return plugins, redirects
+    else:
+        print(f", {len(failures)} plugin(s) could not be downloaded:\n")
+
+        for (plugin, exception) in failures:
+            print_download_error(plugin, exception)
+
+        sys.exit(1)
+
+def make_repo(uri, branch, alias) -> Repo:
+    '''Instantiate a Repo with the correct specialization depending on server (gitub spec)'''
+    # dumb check to see if it's of the form owner/repo (=> github) or https://...
+    res = uri.split('/')
+    if len(res) <= 2:
+        repo = RepoGitHub(res[0], res[1], branch, alias)
+    else:
+        repo = Repo(uri.strip(), branch, alias)
+    return repo
+
+def parse_plugin_line(config: FetchConfig, line: str) -> PluginDesc:
+    branch = "HEAD"
+    alias = None
+    uri = line
+    if " as " in uri:
+        uri, alias = uri.split(" as ")
+        alias = alias.strip()
+    if "@" in uri:
+        uri, branch = uri.split("@")
+
+    repo = make_repo(uri.strip(), branch.strip(), alias)
+    repo.token = config.github_token
+
+    return PluginDesc(repo, branch.strip(), alias)
+
+
+def get_cache_path(cache_file_name: str) -> Optional[Path]:
+    xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
+    if xdg_cache is None:
+        home = os.environ.get("HOME", None)
+        if home is None:
+            return None
+        xdg_cache = str(Path(home, ".cache"))
+
+    return Path(xdg_cache, cache_file_name)
+
+
+class Cache:
+    def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
+        self.cache_file = get_cache_path(cache_file_name)
+
+        downloads = {}
+        for plugin in initial_plugins:
+            downloads[plugin.commit] = plugin
+        downloads.update(self.load())
+        self.downloads = downloads
+
+    def load(self) -> Dict[str, Plugin]:
+        if self.cache_file is None or not self.cache_file.exists():
+            return {}
+
+        downloads: Dict[str, Plugin] = {}
+        with open(self.cache_file) as f:
+            data = json.load(f)
+            for attr in data.values():
+                p = Plugin(
+                    attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
+                )
+                downloads[attr["commit"]] = p
+        return downloads
+
+    def store(self) -> None:
+        if self.cache_file is None:
+            return
+
+        os.makedirs(self.cache_file.parent, exist_ok=True)
+        with open(self.cache_file, "w+") as f:
+            data = {}
+            for name, attr in self.downloads.items():
+                data[name] = attr.as_json()
+            json.dump(data, f, indent=4, sort_keys=True)
+
+    def __getitem__(self, key: str) -> Optional[Plugin]:
+        return self.downloads.get(key, None)
+
+    def __setitem__(self, key: str, value: Plugin) -> None:
+        self.downloads[key] = value
+
+
+def prefetch(
+    pluginDesc: PluginDesc, cache: Cache
+) -> Tuple[PluginDesc, Union[Exception, Plugin], dict]:
+    try:
+        plugin, redirect = prefetch_plugin(pluginDesc, cache)
+        cache[plugin.commit] = plugin
+        return (pluginDesc, plugin, redirect)
+    except Exception as e:
+        return (pluginDesc, e, {})
+
+
+def rewrite_input(
+    config: FetchConfig,
+    input_file: Path,
+    deprecated: Path,
+    redirects: Dict[str, str] = None,
+    append: Tuple = (),
+):
+    with open(input_file, "r") as f:
+        lines = f.readlines()
+
+    lines.extend(append)
+
+    if redirects:
+        lines = [redirects.get(line, line) for line in lines]
+
+        cur_date_iso = datetime.now().strftime("%Y-%m-%d")
+        with open(deprecated, "r") as f:
+            deprecations = json.load(f)
+        for old, new in redirects.items():
+            old_plugin = fetch_plugin_from_pluginline(config, old)
+            new_plugin = fetch_plugin_from_pluginline(config, new)
+            if old_plugin.normalized_name != new_plugin.normalized_name:
+                deprecations[old_plugin.normalized_name] = {
+                    "new": new_plugin.normalized_name,
+                    "date": cur_date_iso,
+                }
+        with open(deprecated, "w") as f:
+            json.dump(deprecations, f, indent=4, sort_keys=True)
+            f.write("\n")
+
+    lines = sorted(lines, key=str.casefold)
+
+    with open(input_file, "w") as f:
+        f.writelines(lines)
+
+
+def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
+    repo.index.add([str(f.resolve()) for f in files])
+
+    if repo.index.diff("HEAD"):
+        print(f'committing to nixpkgs "{message}"')
+        repo.index.commit(message)
+    else:
+        print("no changes in working tree to commit")
+
+
+
+def update_plugins(editor: Editor, args):
+    """The main entry function of this module. All input arguments are grouped in the `Editor`."""
+
+    log.setLevel(LOG_LEVELS[args.debug])
+    log.info("Start updating plugins")
+    fetch_config = FetchConfig(args.proc, args.github_token)
+    update = editor.get_update(args.input_file, args.outfile, fetch_config)
+
+    redirects = update()
+    editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, redirects)
+
+    autocommit = not args.no_commit
+
+    nixpkgs_repo = None
+    if autocommit:
+        nixpkgs_repo = git.Repo(editor.root, search_parent_directories=True)
+        commit(nixpkgs_repo, f"{editor.attr_path}: update", [args.outfile])
+
+    if redirects:
+        update()
+        if autocommit:
+            commit(
+                nixpkgs_repo,
+                f"{editor.attr_path}: resolve github repository redirects",
+                [args.outfile, args.input_file, editor.deprecated],
+            )
+
+    for plugin_line in args.add_plugins:
+        editor.rewrite_input(fetch_config, args.input_file, editor.deprecated, append=(plugin_line + "\n",))
+        update()
+        plugin = fetch_plugin_from_pluginline(fetch_config, plugin_line)
+        if autocommit:
+            commit(
+                nixpkgs_repo,
+                "{drv_name}: init at {version}".format(
+                    drv_name=editor.get_drv_name(plugin.normalized_name),
+                    version=plugin.version
+                ),
+                [args.outfile, args.input_file],
+            )
diff --git a/maintainers/scripts/rebuild-amount.sh b/maintainers/scripts/rebuild-amount.sh
new file mode 100755
index 00000000000..bedd352db5f
--- /dev/null
+++ b/maintainers/scripts/rebuild-amount.sh
@@ -0,0 +1,133 @@
+#!/usr/bin/env bash
+set -e
+
+# --print: avoid dependency on environment
+optPrint=
+if [ "$1" == "--print" ]; then
+    optPrint=true
+    shift
+fi
+
+if [ "$#" != 1 ] && [ "$#" != 2 ]; then
+    cat <<EOF
+    Usage: $0 [--print] from-commit-spec [to-commit-spec]
+        You need to be in a git-controlled nixpkgs tree.
+        The current state of the tree will be used if the second commit is missing.
+
+        Examples:
+          effect of latest commit:
+              $ $0 HEAD^
+              $ $0 --print HEAD^
+          effect of the whole patch series for 'staging' branch:
+              $ $0 origin/staging staging
+EOF
+    exit 1
+fi
+
+# A slightly hacky way to get the config.
+parallel="$(echo 'config.rebuild-amount.parallel or false' | nix-repl . 2>/dev/null \
+            | grep -v '^\(nix-repl.*\)\?$' | tail -n 1 || true)"
+
+echo "Estimating rebuild amount by counting changed Hydra jobs (parallel=${parallel:-unset})."
+
+toRemove=()
+
+cleanup() {
+    rm -rf "${toRemove[@]}"
+}
+trap cleanup EXIT SIGINT SIGQUIT ERR
+
+MKTEMP='mktemp --tmpdir nix-rebuild-amount-XXXXXXXX'
+
+nixexpr() {
+    cat <<EONIX
+        let
+          lib = import $1/lib;
+          hydraJobs = import $1/pkgs/top-level/release.nix
+            # Compromise: accuracy vs. resources needed for evaluation.
+            { supportedSystems = cfg.systems or [ "x86_64-linux" "x86_64-darwin" ]; };
+          cfg = (import $1 {}).config.rebuild-amount or {};
+
+          recurseIntoAttrs = attrs: attrs // { recurseForDerivations = true; };
+
+          # hydraJobs leaves recurseForDerivations as empty attrmaps;
+          # that would break nix-env and we also need to recurse everywhere.
+          tweak = lib.mapAttrs
+            (name: val:
+              if name == "recurseForDerivations" then true
+              else if lib.isAttrs val && val.type or null != "derivation"
+                      then recurseIntoAttrs (tweak val)
+              else val
+            );
+
+          # Some of these contain explicit references to platform(s) we want to avoid;
+          # some even (transitively) depend on ~/.nixpkgs/config.nix (!)
+          blacklist = [
+            "tarball" "metrics" "manual"
+            "darwin-tested" "unstable" "stdenvBootstrapTools"
+            "moduleSystem" "lib-tests" # these just confuse the output
+          ];
+
+        in
+          tweak (builtins.removeAttrs hydraJobs blacklist)
+EONIX
+}
+
+# Output packages in tree $2 that weren't in $1.
+# Changing the output hash or name is taken as a change.
+# Extra nix-env parameters can be in $3
+newPkgs() {
+    # We use files instead of pipes, as running multiple nix-env processes
+    # could eat too much memory for a standard 4GiB machine.
+    local -a list
+    for i in 1 2; do
+        local l="$($MKTEMP)"
+        list[$i]="$l"
+        toRemove+=("$l")
+
+        local expr="$($MKTEMP)"
+        toRemove+=("$expr")
+        nixexpr "${!i}" > "$expr"
+
+        nix-env -f "$expr" -qaP --no-name --out-path --show-trace $3 \
+            | sort > "${list[$i]}" &
+
+        if [ "$parallel" != "true" ]; then
+            wait
+        fi
+    done
+
+    wait
+    comm -13 "${list[@]}"
+}
+
+# Prepare nixpkgs trees.
+declare -a tree
+for i in 1 2; do
+    if [ -n "${!i}" ]; then # use the given commit
+        dir="$($MKTEMP -d)"
+        tree[$i]="$dir"
+        toRemove+=("$dir")
+
+        git clone --shared --no-checkout --quiet . "${tree[$i]}"
+        (cd "${tree[$i]}" && git checkout --quiet "${!i}")
+    else #use the current tree
+        tree[$i]="$(pwd)"
+    fi
+done
+
+newlist="$($MKTEMP)"
+toRemove+=("$newlist")
+# Notes:
+#    - the evaluation is done on x86_64-linux, like on Hydra.
+#    - using $newlist file so that newPkgs() isn't in a sub-shell (because of toRemove)
+newPkgs "${tree[1]}" "${tree[2]}" '--argstr system "x86_64-linux"' > "$newlist"
+
+# Hacky: keep only the last word of each attribute path and sort.
+sed -n 's/\([^. ]*\.\)*\([^. ]*\) .*$/\2/p' < "$newlist" \
+    | sort | uniq -c
+
+if [ -n "$optPrint" ]; then
+    echo
+    cat "$newlist"
+fi
diff --git a/maintainers/scripts/remove-old-aliases.py b/maintainers/scripts/remove-old-aliases.py
new file mode 100755
index 00000000000..5d9398feaa2
--- /dev/null
+++ b/maintainers/scripts/remove-old-aliases.py
@@ -0,0 +1,202 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i python3 -p "python3.withPackages(ps: with ps; [ ])" nix
+"""
+A program to remove old aliases or convert old aliases to throws
+Example usage:
+./maintainers/scripts/remove-old-aliases.py --year 2018 --file ./pkgs/top-level/aliases.nix
+
+Check this file with mypy after every change!
+$ mypy --strict maintainers/scripts/remove-old-aliases.py
+"""
+import argparse
+import shutil
+import subprocess
+from datetime import date as datetimedate
+from datetime import datetime
+from pathlib import Path
+
+
+def process_args() -> argparse.Namespace:
+    """process args"""
+    arg_parser = argparse.ArgumentParser()
+    arg_parser.add_argument(
+        "--year", required=True, type=int, help="operate on aliases older than $year"
+    )
+    arg_parser.add_argument(
+        "--month",
+        type=int,
+        default=1,
+        help="operate on aliases older than $year-$month",
+    )
+    arg_parser.add_argument("--file", required=True, type=Path, help="alias file")
+    arg_parser.add_argument(
+        "--dry-run", action="store_true", help="don't modify files, only print results"
+    )
+    return arg_parser.parse_args()
+
+
+def get_date_lists(
+    txt: list[str], cutoffdate: datetimedate
+) -> tuple[list[str], list[str], list[str]]:
+    """get a list of lines in which the date is older than $cutoffdate"""
+    date_older_list: list[str] = []
+    date_older_throw_list: list[str] = []
+    date_sep_line_list: list[str] = []
+
+    for lineno, line in enumerate(txt, start=1):
+        line = line.rstrip()
+        my_date = None
+        for string in line.split():
+            string = string.strip(":")
+            try:
+                # strip ':' incase there is a string like 2019-11-01:
+                my_date = datetime.strptime(string, "%Y-%m-%d").date()
+            except ValueError:
+                try:
+                    my_date = datetime.strptime(string, "%Y-%m").date()
+                except ValueError:
+                    continue
+
+        if my_date is None or my_date > cutoffdate:
+            continue
+
+        if "=" not in line:
+            date_sep_line_list.append(f"{lineno} {line}")
+        # 'if' lines could be complicated
+        elif "if " in line and "if =" not in line:
+            print(f"RESOLVE MANUALLY {line}")
+        elif "throw" in line:
+            date_older_throw_list.append(line)
+        else:
+            date_older_list.append(line)
+
+    return (
+        date_older_list,
+        date_sep_line_list,
+        date_older_throw_list,
+    )
+
+
+def convert_to_throw(date_older_list: list[str]) -> list[tuple[str, str]]:
+    """convert a list of lines to throws"""
+    converted_list = []
+    for line in date_older_list.copy():
+        indent: str = " " * (len(line) - len(line.lstrip()))
+        before_equal = ""
+        after_equal = ""
+        try:
+            before_equal, after_equal = (x.strip() for x in line.split("=", maxsplit=2))
+        except ValueError as err:
+            print(err, line, "\n")
+            date_older_list.remove(line)
+            continue
+
+        alias = before_equal.strip()
+        after_equal_list = [x.strip(";:") for x in after_equal.split()]
+
+        converted = (
+            f"{indent}{alias} = throw \"'{alias}' has been renamed to/replaced by"
+            f" '{after_equal_list.pop(0)}'\";"
+            f' # Converted to throw {datetime.today().strftime("%Y-%m-%d")}'
+        )
+        converted_list.append((line, converted))
+
+    return converted_list
+
+
+def generate_text_to_write(
+    txt: list[str],
+    date_older_list: list[str],
+    converted_to_throw: list[tuple[str, str]],
+    date_older_throw_list: list[str],
+) -> list[str]:
+    """generate a list of text to be written to the aliasfile"""
+    text_to_write: list[str] = []
+    for line in txt:
+        text_to_append: str = ""
+        if converted_to_throw:
+            for tupl in converted_to_throw:
+                if line == tupl[0]:
+                    text_to_append = f"{tupl[1]}\n"
+        if line not in date_older_list and line not in date_older_throw_list:
+            text_to_append = f"{line}\n"
+        if text_to_append:
+            text_to_write.append(text_to_append)
+
+    return text_to_write
+
+
+def write_file(
+    aliasfile: Path,
+    text_to_write: list[str],
+) -> None:
+    """write file"""
+    temp_aliasfile = Path(f"{aliasfile}.raliases")
+    with open(temp_aliasfile, "w", encoding="utf-8") as far:
+        for line in text_to_write:
+            far.write(line)
+    print("\nChecking the syntax of the new aliasfile")
+    try:
+        subprocess.run(
+            ["nix-instantiate", "--eval", temp_aliasfile],
+            check=True,
+            stdout=subprocess.DEVNULL,
+        )
+    except subprocess.CalledProcessError:
+        print(
+            "\nSyntax check failed,",
+            "there may have been a line which only has\n"
+            'aliasname = "reason why";\n'
+            "when it should have been\n"
+            'aliasname = throw "reason why";',
+        )
+        temp_aliasfile.unlink()
+        return
+    shutil.move(f"{aliasfile}.raliases", aliasfile)
+    print(f"{aliasfile} modified! please verify with 'git diff'.")
+
+
+def main() -> None:
+    """main"""
+    args = process_args()
+
+    aliasfile = Path(args.file).absolute()
+    cutoffdate = (datetime.strptime(f"{args.year}-{args.month}-01", "%Y-%m-%d")).date()
+
+    txt: list[str] = (aliasfile.read_text(encoding="utf-8")).splitlines()
+
+    date_older_list: list[str] = []
+    date_sep_line_list: list[str] = []
+    date_older_throw_list: list[str] = []
+
+    date_older_list, date_sep_line_list, date_older_throw_list = get_date_lists(
+        txt, cutoffdate
+    )
+
+    converted_to_throw: list[tuple[str, str]] = []
+    converted_to_throw = convert_to_throw(date_older_list)
+
+    if date_older_list:
+        print(" Will be converted to throws. ".center(100, "-"))
+        for l_n in date_older_list:
+            print(l_n)
+
+    if date_older_throw_list:
+        print(" Will be removed. ".center(100, "-"))
+        for l_n in date_older_throw_list:
+            print(l_n)
+
+    if date_sep_line_list:
+        print(" On separate line, resolve manually. ".center(100, "-"))
+        for l_n in date_sep_line_list:
+            print(l_n)
+
+    if not args.dry_run:
+        text_to_write = generate_text_to_write(
+            txt, date_older_list, converted_to_throw, date_older_throw_list
+        )
+        write_file(aliasfile, text_to_write)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/maintainers/scripts/update-channel-branches.sh b/maintainers/scripts/update-channel-branches.sh
new file mode 100755
index 00000000000..d65cf3ec5f6
--- /dev/null
+++ b/maintainers/scripts/update-channel-branches.sh
@@ -0,0 +1,112 @@
+#!/bin/sh
+set -e
+
+: ${NIXOS_CHANNELS:=https://nixos.org/channels/}
+: ${CHANNELS_NAMESPACE:=refs/heads/channels/}
+
+# List all channels which are currently in the repository which we would
+# have to remove if they are not found again.
+deadChannels=$(git for-each-ref --format="%(refname)" "$CHANNELS_NAMESPACE")
+
+updateRef() {
+    local channelName=$1
+    local newRev=$2
+
+    # if the inputs are not valid, then we do not update any branch.
+    test -z "$newRev" -o -z "$channelName" && return;
+
+    # Update the local refs/heads/channels/* branches to be in-sync with the
+    # channel references.
+    local branch=$CHANNELS_NAMESPACE$channelName
+    oldRev=$(git rev-parse --short "$branch" 2>/dev/null || true)
+    if test "$oldRev" != "$newRev"; then
+        if git update-ref "$branch" "$newRev" 2>/dev/null; then
+            if test -z "$oldRev"; then
+                echo " * [new branch]      $newRev           -> ${branch#refs/heads/}"
+            else
+                echo "                     $oldRev..$newRev  -> ${branch#refs/heads/}"
+            fi
+        else
+            if test -z "$oldRev"; then
+                echo " * [missing rev]     $newRev           -> ${branch#refs/heads/}"
+            else
+                echo "   [missing rev]     $oldRev..$newRev  -> ${branch#refs/heads/}"
+            fi
+        fi
+    fi
+
+    # Filter out the current channel from the list of dead channels.
+    deadChannels=$(grep -v "$CHANNELS_NAMESPACE$channelName" <<EOF
+$deadChannels
+EOF
+) ||true
+}
+
+# Find the name of all channels which are listed in the directory.
+echo "Fetching channels from $NIXOS_CHANNELS:"
+for channelName in : $(curl -s "$NIXOS_CHANNELS" | sed -n '/folder/ { s,.*href=",,; s,/".*,,; p }'); do
+    test "$channelName" = : && continue;
+
+    # Do not follow redirections, such that we can extract the
+    # short-changeset from the name of the directory where we are
+    # redirected to.
+    sha1=$(curl -sI "$NIXOS_CHANNELS$channelName" | sed -n '/Location/ { s,.*\.\([a-f0-9]*\)[ \r]*$,\1,; p; }')
+
+    updateRef "remotes/$channelName" "$sha1"
+done
+
+echo "Fetching channels from nixos-version:"
+if currentSystem=$(nixos-version 2>/dev/null); then
+    # If the system is entirely build from a custom nixpkgs version,
+    # then the version is not annotated in git version. This sed
+    # expression is basically matching that the expressions end with
+    # ".<sha1> (Name)" to extract the sha1.
+    sha1=$(echo "$currentSystem" | sed -n 's,^.*\.\([a-f0-9]*\) *(.*)$,\1,; T skip; p; :skip;')
+
+    updateRef current-system "$sha1"
+fi
+
+echo "Fetching channels from $HOME/.nix-defexpr:"
+for revFile in : $(find -L "$HOME/.nix-defexpr/" -maxdepth 4 -name svn-revision); do
+    test "$revFile" = : && continue;
+
+    # Deconstruct a path such as, into:
+    #
+    #   /home/luke/.nix-defexpr/channels_root/nixos/nixpkgs/svn-revision
+    #     channelName = root/nixos
+    #
+    #   /home/luke/.nix-defexpr/channels/nixpkgs/svn-revision
+    #     channelName = nixpkgs
+    #
+    user=${revFile#*.nix-defexpr/channels}
+    repo=${user#*/}
+    repo=${repo%%/*}
+    user=${user%%/*}
+    user=${user#_}
+    test -z "$user" && user=$USER
+    channelName="$user${user:+/}$repo"
+
+    sha1=$(sed -n 's,^.*\.\([a-f0-9]*\)$,\1,; T skip; p; :skip;' "$revFile")
+
+    updateRef "$channelName" "$sha1"
+done
+
+# Suggest to remove channel branches which are no longer found by this
+# script. This is to handle the cases where a local/remote channel
+# disappear. We should not attempt to remove manually any branches, as they
+# might be user branches.
+if test -n "$deadChannels"; then
+
+    echo "
+Some old channel branches are still in your repository, if you
+want to remove them, run the following command(s):
+"
+
+    while read branch; do
+        echo "    git update-ref -d $branch"
+    done <<EOF
+$deadChannels
+EOF
+
+    echo
+fi
diff --git a/maintainers/scripts/update-luarocks-packages b/maintainers/scripts/update-luarocks-packages
new file mode 100755
index 00000000000..73a233c5f10
--- /dev/null
+++ b/maintainers/scripts/update-luarocks-packages
@@ -0,0 +1,218 @@
+#!/usr/bin/env nix-shell
+#!nix-shell update-luarocks-shell.nix -i python3
+
+# format:
+# $ nix run nixpkgs.python3Packages.black -c black update.py
+# type-check:
+# $ nix run nixpkgs.python3Packages.mypy -c mypy update.py
+# linted:
+# $ nix run nixpkgs.python3Packages.flake8 -c flake8 --ignore E501,E265,E402 update.py
+
+import inspect
+import os
+import tempfile
+import shutil
+from dataclasses import dataclass
+import subprocess
+import csv
+import logging
+import textwrap
+from multiprocessing.dummy import Pool
+
+from typing import List, Tuple, Optional
+from pathlib import Path
+
+log = logging.getLogger()
+log.addHandler(logging.StreamHandler())
+
+ROOT = Path(os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))).parent.parent # type: ignore
+from pluginupdate import Editor, update_plugins, FetchConfig, CleanEnvironment
+
+PKG_LIST="maintainers/scripts/luarocks-packages.csv"
+TMP_FILE="$(mktemp)"
+GENERATED_NIXFILE="pkgs/development/lua-modules/generated-packages.nix"
+LUAROCKS_CONFIG="$NIXPKGS_PATH/maintainers/scripts/luarocks-config.lua"
+
+HEADER = """/* {GENERATED_NIXFILE} is an auto-generated file -- DO NOT EDIT!
+Regenerate it with:
+nixpkgs$ ./maintainers/scripts/update-luarocks-packages
+
+You can customize the generated packages in pkgs/development/lua-modules/overrides.nix
+*/
+""".format(GENERATED_NIXFILE=GENERATED_NIXFILE)
+
+FOOTER="""
+}
+/* GENERATED - do not edit this file */
+"""
+
+@dataclass
+class LuaPlugin:
+    name: str
+    '''Name of the plugin, as seen on luarocks.org'''
+    src: str
+    '''address to the git repository'''
+    ref: Optional[str]
+    '''git reference (branch name/tag)'''
+    version: Optional[str]
+    '''Set it to pin a package '''
+    server: Optional[str]
+    '''luarocks.org registers packages under different manifests.
+    Its value can be 'http://luarocks.org/dev'
+    '''
+    luaversion: Optional[str]
+    '''Attribue of the lua interpreter if a package is available only for a specific lua version'''
+    maintainers: Optional[str]
+    ''' Optional string listing maintainers separated by spaces'''
+
+    @property
+    def normalized_name(self) -> str:
+        return self.name.replace(".", "-")
+
+# rename Editor to LangUpdate/ EcosystemUpdater
+class LuaEditor(Editor):
+    def get_current_plugins(self):
+        return []
+
+    def load_plugin_spec(self, input_file) -> List[LuaPlugin]:
+        luaPackages = []
+        csvfilename=input_file
+        log.info("Loading package descriptions from %s", csvfilename)
+
+        with open(csvfilename, newline='') as csvfile:
+            reader = csv.DictReader(csvfile,)
+            for row in reader:
+                # name,server,version,luaversion,maintainers
+                plugin = LuaPlugin(**row)
+                luaPackages.append(plugin)
+        return luaPackages
+
+    def generate_nix(
+        self,
+        results: List[Tuple[LuaPlugin, str]],
+        outfilename: str
+        ):
+
+        with tempfile.NamedTemporaryFile("w+") as f:
+            f.write(HEADER)
+            header2 = textwrap.dedent(
+            # header2 = inspect.cleandoc(
+            """
+                { self, stdenv, lib, fetchurl, fetchgit, callPackage, ... } @ args:
+                final: prev:
+                {
+            """)
+            f.write(header2)
+            for (plugin, nix_expr) in results:
+                f.write(f"{plugin.normalized_name} = {nix_expr}")
+            f.write(FOOTER)
+            f.flush()
+
+            # if everything went fine, move the generated file to its destination
+            # using copy since move doesn't work across disks
+            shutil.copy(f.name, outfilename)
+
+        print(f"updated {outfilename}")
+
+    @property
+    def attr_path(self):
+        return "luaPackages"
+
+    def get_update(self, input_file: str, outfile: str, config: FetchConfig):
+        _prefetch = generate_pkg_nix
+
+        def update() -> dict:
+            plugin_specs = self.load_plugin_spec(input_file)
+            sorted_plugin_specs = sorted(plugin_specs, key=lambda v: v.name.lower())
+
+            try:
+                pool = Pool(processes=config.proc)
+                results = pool.map(_prefetch, sorted_plugin_specs)
+            finally:
+                pass
+
+            self.generate_nix(results, outfile)
+
+            redirects = {}
+            return redirects
+
+        return update
+
+    def rewrite_input(self, input_file: str, *args, **kwargs):
+        # vim plugin reads the file before update but that shouldn't be our case
+        # not implemented yet
+        # fieldnames = ['name', 'server', 'version', 'luaversion', 'maintainers']
+        # input_file = "toto.csv"
+        # with open(input_file, newline='') as csvfile:
+        #     writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
+        #     writer.writeheader()
+        #     for row in reader:
+        #         # name,server,version,luaversion,maintainers
+        #         plugin = LuaPlugin(**row)
+        #         luaPackages.append(plugin)
+        pass
+
+def generate_pkg_nix(plug: LuaPlugin):
+    '''
+    Generate nix expression for a luarocks package
+    Our cache key associates "p.name-p.version" to its rockspec
+    '''
+    log.debug("Generating nix expression for %s", plug.name)
+    cmd = [ "luarocks", "nix"]
+
+
+    if plug.maintainers:
+        cmd.append(f"--maintainers={plug.maintainers}")
+
+    # updates plugin directly from its repository
+    print("server: [%s]" % plug.server)
+    # if plug.server == "src":
+    if plug.src != "":
+        if plug.src is None:
+            msg = "src must be set when 'version' is set to \"src\" for package %s" % plug.name
+            log.error(msg)
+            raise RuntimeError(msg)
+        log.debug("Updating from source %s", plug.src)
+        cmd.append(plug.src)
+    # update the plugin from luarocks
+    else:
+        cmd.append(plug.name)
+        if plug.version and plug.version != "src":
+
+            cmd.append(plug.version)
+
+    if plug.server != "src" and plug.server:
+        cmd.append(f"--only-server={plug.server}")
+
+    if plug.luaversion:
+        with CleanEnvironment():
+            local_pkgs = str(ROOT.resolve())
+            cmd2 = ["nix-build", "--no-out-link", local_pkgs, "-A", f"{plug.luaversion}"]
+
+            log.debug("running %s", ' '.join(cmd2))
+            lua_drv_path=subprocess.check_output(cmd2, text=True).strip()
+            cmd.append(f"--lua-dir={lua_drv_path}/bin")
+
+    log.debug("running %s", ' '.join(cmd))
+    output = subprocess.check_output(cmd, text=True)
+    output = "callPackage(" + output.strip() + ") {};\n\n"
+    return (plug, output)
+
+def main():
+
+    editor = LuaEditor("lua", ROOT, '',
+        default_in = ROOT.joinpath(PKG_LIST),
+        default_out = ROOT.joinpath(GENERATED_NIXFILE)
+        )
+
+    parser = editor.create_parser()
+    args = parser.parse_args()
+
+    update_plugins(editor, args)
+
+
+if __name__ == "__main__":
+
+    main()
+
+#  vim: set ft=python noet fdm=manual fenc=utf-8 ff=unix sts=0 sw=4 ts=4 :
diff --git a/maintainers/scripts/update-luarocks-shell.nix b/maintainers/scripts/update-luarocks-shell.nix
new file mode 100644
index 00000000000..a58674fca8d
--- /dev/null
+++ b/maintainers/scripts/update-luarocks-shell.nix
@@ -0,0 +1,13 @@
+{ nixpkgs ? import ../.. { }
+}:
+with nixpkgs;
+let
+  pyEnv = python3.withPackages(ps: [ ps.GitPython ]);
+in
+mkShell {
+  packages = [
+    pyEnv
+    luarocks-nix
+    nix-prefetch-scripts
+  ];
+}
diff --git a/maintainers/scripts/update-python-libraries b/maintainers/scripts/update-python-libraries
new file mode 100755
index 00000000000..4a6024c4038
--- /dev/null
+++ b/maintainers/scripts/update-python-libraries
@@ -0,0 +1,5 @@
+#!/bin/sh
+build=`nix-build -E "with import (fetchTarball "channel:nixpkgs-unstable") {}; python3.withPackages(ps: with ps; [ packaging requests toolz ])"`
+python=${build}/bin/python
+exec ${python} pkgs/development/interpreters/python/update-python-libraries/update-python-libraries.py $@
+
diff --git a/maintainers/scripts/update-redirected-urls.sh b/maintainers/scripts/update-redirected-urls.sh
new file mode 100755
index 00000000000..5ffa9aca5f6
--- /dev/null
+++ b/maintainers/scripts/update-redirected-urls.sh
@@ -0,0 +1,12 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -p bash curl ripgrep jq -i bash
+
+set -euxo pipefail
+
+# Possibly also add non-https redirect, but there were non of those when I first
+# made this script to test that. Feel free to add it when it is relevant.
+curl https://repology.org/api/v1/repository/nix_unstable/problems \
+   | jq -r '.[] | select(.type == "homepage_permanent_https_redirect") | .data | "s@\(.url)@\(.target)@"' \
+   | sort | uniq | tee script.sed
+find -name '*.nix' | xargs -P4 -- sed -f script.sed -i
+rm script.sed
diff --git a/maintainers/scripts/update-ruby-packages b/maintainers/scripts/update-ruby-packages
new file mode 100755
index 00000000000..60da1a1b593
--- /dev/null
+++ b/maintainers/scripts/update-ruby-packages
@@ -0,0 +1,16 @@
+#!/usr/bin/env nix-shell
+#!nix-shell -i bash -p bundler bundix
+
+set -euf -o pipefail
+
+(
+  cd pkgs/development/ruby-modules/with-packages
+  rm -f gemset.nix Gemfile.lock
+  # Since bundler 2+, the lock command generates a platform-dependent
+  # Gemfile.lock, hence causing to bundix to generate a gemset tied to the
+  # platform from where it was executed.
+  BUNDLE_FORCE_RUBY_PLATFORM=1 bundle lock
+  bundix
+  mv gemset.nix ../../../top-level/ruby-packages.nix
+  rm -f Gemfile.lock
+)
diff --git a/maintainers/scripts/update.nix b/maintainers/scripts/update.nix
new file mode 100755
index 00000000000..1a2f06c73a2
--- /dev/null
+++ b/maintainers/scripts/update.nix
@@ -0,0 +1,212 @@
+{ package ? null
+, maintainer ? null
+, predicate ? null
+, path ? null
+, max-workers ? null
+, include-overlays ? false
+, keep-going ? null
+, commit ? null
+}:
+
+# TODO: add assert statements
+
+let
+  pkgs = import ./../../default.nix (
+    if include-overlays == false then
+      { overlays = []; }
+    else if include-overlays == true then
+      { } # Let Nixpkgs include overlays impurely.
+    else { overlays = include-overlays; }
+  );
+
+  inherit (pkgs) lib;
+
+  /* Remove duplicate elements from the list based on some extracted value. O(n^2) complexity.
+   */
+  nubOn = f: list:
+    if list == [] then
+      []
+    else
+      let
+        x = lib.head list;
+        xs = lib.filter (p: f x != f p) (lib.drop 1 list);
+      in
+        [x] ++ nubOn f xs;
+
+  /* Recursively find all packages (derivations) in `pkgs` matching `cond` predicate.
+
+    Type: packagesWithPath :: AttrPath → (AttrPath → derivation → bool) → AttrSet → List<AttrSet{attrPath :: str; package :: derivation; }>
+          AttrPath :: [str]
+
+    The packages will be returned as a list of named pairs comprising of:
+      - attrPath: stringified attribute path (based on `rootPath`)
+      - package: corresponding derivation
+   */
+  packagesWithPath = rootPath: cond: pkgs:
+    let
+      packagesWithPathInner = path: pathContent:
+        let
+          result = builtins.tryEval pathContent;
+
+          dedupResults = lst: nubOn ({ package, attrPath }: package.updateScript) (lib.concatLists lst);
+        in
+          if result.success then
+            let
+              evaluatedPathContent = result.value;
+            in
+              if lib.isDerivation evaluatedPathContent then
+                lib.optional (cond path evaluatedPathContent) { attrPath = lib.concatStringsSep "." path; package = evaluatedPathContent; }
+              else if lib.isAttrs evaluatedPathContent then
+                # If user explicitly points to an attrSet or it is marked for recursion, we recur.
+                if path == rootPath || evaluatedPathContent.recurseForDerivations or false || evaluatedPathContent.recurseForRelease or false then
+                  dedupResults (lib.mapAttrsToList (name: elem: packagesWithPathInner (path ++ [name]) elem) evaluatedPathContent)
+                else []
+              else []
+          else [];
+    in
+      packagesWithPathInner rootPath pkgs;
+
+  /* Recursively find all packages (derivations) in `pkgs` matching `cond` predicate.
+   */
+  packagesWith = packagesWithPath [];
+
+  /* Recursively find all packages in `pkgs` with updateScript matching given predicate.
+   */
+  packagesWithUpdateScriptMatchingPredicate = cond:
+    packagesWith (path: pkg: builtins.hasAttr "updateScript" pkg && cond path pkg);
+
+  /* Recursively find all packages in `pkgs` with updateScript by given maintainer.
+   */
+  packagesWithUpdateScriptAndMaintainer = maintainer':
+    let
+      maintainer =
+        if ! builtins.hasAttr maintainer' lib.maintainers then
+          builtins.throw "Maintainer with name `${maintainer'} does not exist in `maintainers/maintainer-list.nix`."
+        else
+          builtins.getAttr maintainer' lib.maintainers;
+    in
+      packagesWithUpdateScriptMatchingPredicate (path: pkg:
+                         (if builtins.hasAttr "maintainers" pkg.meta
+                           then (if builtins.isList pkg.meta.maintainers
+                                   then builtins.elem maintainer pkg.meta.maintainers
+                                   else maintainer == pkg.meta.maintainers
+                                )
+                           else false
+                         )
+                   );
+
+  /* Recursively find all packages under `path` in `pkgs` with updateScript.
+   */
+  packagesWithUpdateScript = path: pkgs:
+    let
+      prefix = lib.splitString "." path;
+      pathContent = lib.attrByPath prefix null pkgs;
+    in
+      if pathContent == null then
+        builtins.throw "Attribute path `${path}` does not exist."
+      else
+        packagesWithPath prefix (path: pkg: builtins.hasAttr "updateScript" pkg)
+                       pathContent;
+
+  /* Find a package under `path` in `pkgs` and require that it has an updateScript.
+   */
+  packageByName = path: pkgs:
+    let
+        package = lib.attrByPath (lib.splitString "." path) null pkgs;
+    in
+      if package == null then
+        builtins.throw "Package with an attribute name `${path}` does not exist."
+      else if ! builtins.hasAttr "updateScript" package then
+        builtins.throw "Package with an attribute name `${path}` does not have a `passthru.updateScript` attribute defined."
+      else
+        { attrPath = path; inherit package; };
+
+  /* List of packages matched based on the CLI arguments.
+   */
+  packages =
+    if package != null then
+      [ (packageByName package pkgs) ]
+    else if predicate != null then
+      packagesWithUpdateScriptMatchingPredicate predicate pkgs
+    else if maintainer != null then
+      packagesWithUpdateScriptAndMaintainer maintainer pkgs
+    else if path != null then
+      packagesWithUpdateScript path pkgs
+    else
+      builtins.throw "No arguments provided.\n\n${helpText}";
+
+  helpText = ''
+    Please run:
+
+        % nix-shell maintainers/scripts/update.nix --argstr maintainer garbas
+
+    to run all update scripts for all packages that lists \`garbas\` as a maintainer
+    and have \`updateScript\` defined, or:
+
+        % nix-shell maintainers/scripts/update.nix --argstr package gnome.nautilus
+
+    to run update script for specific package, or
+
+        % nix-shell maintainers/scripts/update.nix --arg predicate '(path: pkg: pkg.updateScript.name or null == "gnome-update-script")'
+
+    to run update script for all packages matching given predicate, or
+
+        % nix-shell maintainers/scripts/update.nix --argstr path gnome
+
+    to run update script for all package under an attribute path.
+
+    You can also add
+
+        --argstr max-workers 8
+
+    to increase the number of jobs in parallel, or
+
+        --argstr keep-going true
+
+    to continue running when a single update fails.
+
+    You can also make the updater automatically commit on your behalf from updateScripts
+    that support it by adding
+
+        --argstr commit true
+  '';
+
+  /* Transform a matched package into an object for update.py.
+   */
+  packageData = { package, attrPath }: {
+    name = package.name;
+    pname = lib.getName package;
+    oldVersion = lib.getVersion package;
+    updateScript = map builtins.toString (lib.toList (package.updateScript.command or package.updateScript));
+    supportedFeatures = package.updateScript.supportedFeatures or [];
+    attrPath = package.updateScript.attrPath or attrPath;
+  };
+
+  /* JSON file with data for update.py.
+   */
+  packagesJson = pkgs.writeText "packages.json" (builtins.toJSON (map packageData packages));
+
+  optionalArgs =
+    lib.optional (max-workers != null) "--max-workers=${max-workers}"
+    ++ lib.optional (keep-going == "true") "--keep-going"
+    ++ lib.optional (commit == "true") "--commit";
+
+  args = [ packagesJson ] ++ optionalArgs;
+
+in pkgs.stdenv.mkDerivation {
+  name = "nixpkgs-update-script";
+  buildCommand = ''
+    echo ""
+    echo "----------------------------------------------------------------"
+    echo ""
+    echo "Not possible to update packages using \`nix-build\`"
+    echo ""
+    echo "${helpText}"
+    echo "----------------------------------------------------------------"
+    exit 1
+  '';
+  shellHook = ''
+    unset shellHook # do not contaminate nested shells
+    exec ${pkgs.python3.interpreter} ${./update.py} ${builtins.concatStringsSep " " args}
+  '';
+}
diff --git a/maintainers/scripts/update.py b/maintainers/scripts/update.py
new file mode 100644
index 00000000000..07e0b5c6830
--- /dev/null
+++ b/maintainers/scripts/update.py
@@ -0,0 +1,229 @@
+from __future__ import annotations
+from typing import Dict, Generator, List, Optional, Tuple
+import argparse
+import asyncio
+import contextlib
+import json
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+class CalledProcessError(Exception):
+    process: asyncio.subprocess.Process
+
+def eprint(*args, **kwargs):
+    print(*args, file=sys.stderr, **kwargs)
+
+async def check_subprocess(*args, **kwargs):
+    """
+    Emulate check argument of subprocess.run function.
+    """
+    process = await asyncio.create_subprocess_exec(*args, **kwargs)
+    returncode = await process.wait()
+
+    if returncode != 0:
+        error = CalledProcessError()
+        error.process = process
+
+        raise error
+
+    return process
+
+async def run_update_script(nixpkgs_root: str, merge_lock: asyncio.Lock, temp_dir: Optional[Tuple[str, str]], package: Dict, keep_going: bool):
+    worktree: Optional[str] = None
+
+    update_script_command = package['updateScript']
+
+    if temp_dir is not None:
+        worktree, _branch = temp_dir
+
+        # Ensure the worktree is clean before update.
+        await check_subprocess('git', 'reset', '--hard', '--quiet', 'HEAD', cwd=worktree)
+
+        # Update scripts can use $(dirname $0) to get their location but we want to run
+        # their clones in the git worktree, not in the main nixpkgs repo.
+        update_script_command = map(lambda arg: re.sub(r'^{0}'.format(re.escape(nixpkgs_root)), worktree, arg), update_script_command)
+
+    eprint(f" - {package['name']}: UPDATING ...")
+
+    try:
+        update_process = await check_subprocess('env', f"UPDATE_NIX_ATTR_PATH={package['attrPath']}", *update_script_command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, cwd=worktree)
+        update_info = await update_process.stdout.read()
+
+        await merge_changes(merge_lock, package, update_info, temp_dir)
+    except KeyboardInterrupt as e:
+        eprint('Cancelling…')
+        raise asyncio.exceptions.CancelledError()
+    except CalledProcessError as e:
+        eprint(f" - {package['name']}: ERROR")
+        eprint()
+        eprint(f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------")
+        eprint()
+        stderr = await e.process.stderr.read()
+        eprint(stderr.decode('utf-8'))
+        with open(f"{package['pname']}.log", 'wb') as logfile:
+            logfile.write(stderr)
+        eprint()
+        eprint(f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------")
+
+        if not keep_going:
+            raise asyncio.exceptions.CancelledError()
+
+@contextlib.contextmanager
+def make_worktree() -> Generator[Tuple[str, str], None, None]:
+    with tempfile.TemporaryDirectory() as wt:
+        branch_name = f'update-{os.path.basename(wt)}'
+        target_directory = f'{wt}/nixpkgs'
+
+        subprocess.run(['git', 'worktree', 'add', '-b', branch_name, target_directory])
+        yield (target_directory, branch_name)
+        subprocess.run(['git', 'worktree', 'remove', '--force', target_directory])
+        subprocess.run(['git', 'branch', '-D', branch_name])
+
+async def commit_changes(name: str, merge_lock: asyncio.Lock, worktree: str, branch: str, changes: List[Dict]) -> None:
+    for change in changes:
+        # Git can only handle a single index operation at a time
+        async with merge_lock:
+            await check_subprocess('git', 'add', *change['files'], cwd=worktree)
+            commit_message = '{attrPath}: {oldVersion} → {newVersion}'.format(**change)
+            if 'commitMessage' in change:
+                commit_message = change['commitMessage']
+            elif 'commitBody' in change:
+                commit_message = commit_message + '\n\n' + change['commitBody']
+            await check_subprocess('git', 'commit', '--quiet', '-m', commit_message, cwd=worktree)
+            await check_subprocess('git', 'cherry-pick', branch)
+
+async def check_changes(package: Dict, worktree: str, update_info: str):
+    if 'commit' in package['supportedFeatures']:
+        changes = json.loads(update_info)
+    else:
+        changes = [{}]
+
+    # Try to fill in missing attributes when there is just a single change.
+    if len(changes) == 1:
+        # Dynamic data from updater take precedence over static data from passthru.updateScript.
+        if 'attrPath' not in changes[0]:
+            # update.nix is always passing attrPath
+            changes[0]['attrPath'] = package['attrPath']
+
+        if 'oldVersion' not in changes[0]:
+            # update.nix is always passing oldVersion
+            changes[0]['oldVersion'] = package['oldVersion']
+
+        if 'newVersion' not in changes[0]:
+            attr_path = changes[0]['attrPath']
+            obtain_new_version_process = await check_subprocess('nix-instantiate', '--expr', f'with import ./. {{}}; lib.getVersion {attr_path}', '--eval', '--strict', '--json', stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, cwd=worktree)
+            changes[0]['newVersion'] = json.loads((await obtain_new_version_process.stdout.read()).decode('utf-8'))
+
+        if 'files' not in changes[0]:
+            changed_files_process = await check_subprocess('git', 'diff', '--name-only', 'HEAD', stdout=asyncio.subprocess.PIPE, cwd=worktree)
+            changed_files = (await changed_files_process.stdout.read()).splitlines()
+            changes[0]['files'] = changed_files
+
+            if len(changed_files) == 0:
+                return []
+
+    return changes
+
+async def merge_changes(merge_lock: asyncio.Lock, package: Dict, update_info: str, temp_dir: Optional[Tuple[str, str]]) -> None:
+    if temp_dir is not None:
+        worktree, branch = temp_dir
+        changes = await check_changes(package, worktree, update_info)
+
+        if len(changes) > 0:
+            await commit_changes(package['name'], merge_lock, worktree, branch, changes)
+        else:
+            eprint(f" - {package['name']}: DONE, no changes.")
+    else:
+        eprint(f" - {package['name']}: DONE.")
+
+async def updater(nixpkgs_root: str, temp_dir: Optional[Tuple[str, str]], merge_lock: asyncio.Lock, packages_to_update: asyncio.Queue[Optional[Dict]], keep_going: bool, commit: bool):
+    while True:
+        package = await packages_to_update.get()
+        if package is None:
+            # A sentinel received, we are done.
+            return
+
+        if not ('commit' in package['supportedFeatures'] or 'attrPath' in package):
+            temp_dir = None
+
+        await run_update_script(nixpkgs_root, merge_lock, temp_dir, package, keep_going)
+
+async def start_updates(max_workers: int, keep_going: bool, commit: bool, packages: List[Dict]):
+    merge_lock = asyncio.Lock()
+    packages_to_update: asyncio.Queue[Optional[Dict]] = asyncio.Queue()
+
+    with contextlib.ExitStack() as stack:
+        temp_dirs: List[Optional[Tuple[str, str]]] = []
+
+        # Do not create more workers than there are packages.
+        num_workers = min(max_workers, len(packages))
+
+        nixpkgs_root_process = await check_subprocess('git', 'rev-parse', '--show-toplevel', stdout=asyncio.subprocess.PIPE)
+        nixpkgs_root = (await nixpkgs_root_process.stdout.read()).decode('utf-8').strip()
+
+        # Set up temporary directories when using auto-commit.
+        for i in range(num_workers):
+            temp_dir = stack.enter_context(make_worktree()) if commit else None
+            temp_dirs.append(temp_dir)
+
+        # Fill up an update queue,
+        for package in packages:
+            await packages_to_update.put(package)
+
+        # Add sentinels, one for each worker.
+        # A workers will terminate when it gets sentinel from the queue.
+        for i in range(num_workers):
+            await packages_to_update.put(None)
+
+        # Prepare updater workers for each temp_dir directory.
+        # At most `num_workers` instances of `run_update_script` will be running at one time.
+        updaters = asyncio.gather(*[updater(nixpkgs_root, temp_dir, merge_lock, packages_to_update, keep_going, commit) for temp_dir in temp_dirs])
+
+        try:
+            # Start updater workers.
+            await updaters
+        except asyncio.exceptions.CancelledError as e:
+            # When one worker is cancelled, cancel the others too.
+            updaters.cancel()
+
+def main(max_workers: int, keep_going: bool, commit: bool, packages_path: str) -> None:
+    with open(packages_path) as f:
+        packages = json.load(f)
+
+    eprint()
+    eprint('Going to be running update for following packages:')
+    for package in packages:
+        eprint(f" - {package['name']}")
+    eprint()
+
+    confirm = input('Press Enter key to continue...')
+    if confirm == '':
+        eprint()
+        eprint('Running update for:')
+
+        asyncio.run(start_updates(max_workers, keep_going, commit, packages))
+
+        eprint()
+        eprint('Packages updated!')
+        sys.exit()
+    else:
+        eprint('Aborting!')
+        sys.exit(130)
+
+parser = argparse.ArgumentParser(description='Update packages')
+parser.add_argument('--max-workers', '-j', dest='max_workers', type=int, help='Number of updates to run concurrently', nargs='?', default=4)
+parser.add_argument('--keep-going', '-k', dest='keep_going', action='store_true', help='Do not stop after first failure')
+parser.add_argument('--commit', '-c', dest='commit', action='store_true', help='Commit the changes')
+parser.add_argument('packages', help='JSON file containing the list of package names and their update scripts')
+
+if __name__ == '__main__':
+    args = parser.parse_args()
+
+    try:
+        main(args.max_workers, args.keep_going, args.commit, args.packages)
+    except KeyboardInterrupt as e:
+        # Let’s cancel outside of the main loop too.
+        sys.exit(130)
diff --git a/maintainers/scripts/vanity-manual-equalities.txt b/maintainers/scripts/vanity-manual-equalities.txt
new file mode 100644
index 00000000000..4a7bc3aea44
--- /dev/null
+++ b/maintainers/scripts/vanity-manual-equalities.txt
@@ -0,0 +1,7 @@
+viric	viriketo@gmail.com
+Pjotr Prins	pjotr.public01@thebird.nl
+Pjotr Prins	pjotr.public05@thebird.nl
+Wouter den Breejen	wbreejen
+MarcWeber	marcweber
+Ricardo Correia	Ricardo M. Correia
+ertesx@gmx.de	ertes
diff --git a/maintainers/scripts/vanity.sh b/maintainers/scripts/vanity.sh
new file mode 100755
index 00000000000..b879488165d
--- /dev/null
+++ b/maintainers/scripts/vanity.sh
@@ -0,0 +1,122 @@
+#! /bin/sh
+
+export LANG=C LC_ALL=C LC_COLLATE=C
+
+# Load git log
+raw_git_log="$(git log)"
+git_data="$(echo "$raw_git_log" | grep 'Author:' |
+  sed -e 's/^ *Author://; s/\\//g; s/^ *//; s/ *$//;
+  s/ @ .*//; s/ *[<]/\t/; s/[>]//')"
+
+# Name - nick - email correspondence from log and from maintainer list
+# Also there are a few manual entries
+maintainers="$(cat "$(dirname "$0")/../maintainer-list.nix" |
+  grep '=' | sed -re 's/\\"/''/g;
+  s/[ 	]*([^ 	=]*)[ 	]*=[ 	]*" *(.*[^ ]) *[<](.*)[>] *".*/\1\t\2\t\3/')"
+git_lines="$( ( echo "$git_data";
+    cat "$(dirname "$0")/vanity-manual-equalities.txt") | sort |uniq)"
+
+emails="$(
+    ( echo "$maintainers" | cut -f 3; echo "$git_data" | cut -f 2 ) |
+    sort | uniq | grep -E ".+@.+[.].+"
+    )"
+
+fetchGithubName () {
+    commitid="$(
+        echo "$raw_git_log" | grep -B3 "Author: .*[<]$1[>]" | head -n 3 |
+            grep '^commit ' | tail -n 1 | sed -e 's/^commit //'
+    )"
+    userid="$(
+        curl https://github.com/NixOS/nixpkgs/commit/"$commitid" 2>/dev/null |
+        grep committed -B10 | grep 'href="/' |
+        sed -re 's@.* href="/@@; s@".*@@' |
+        grep -v "/commit/"
+    )";
+    echo "$userid"
+}
+
+[ -n "$NIXPKGS_GITHUB_NAME_CACHE" ] && {
+    echo "$emails" | while read email; do
+        line="$(grep "$email	" "$NIXPKGS_GITHUB_NAME_CACHE")"
+        [ -z "$line" ] && {
+            echo "$email	$(fetchGithubName "$email")" >> \
+                "$NIXPKGS_GITHUB_NAME_CACHE"
+        }
+    done
+}
+
+# For RDF
+normalize_name () {
+    sed -e 's/%/%25/g; s/ /%20/g; s/'\''/%27/g; s/"/%22/g; s/`/%60/g; s/\^/%5e/g; '
+}
+
+denormalize_name () {
+    sed -e 's/%20/ /g; s/%27/'\''/g; s/%22/"/g; s/%60/`/g; s/%5e/^/g; s/%25/%/g;';
+}
+
+n3="$(mktemp --suffix .n3)"
+
+# «The same person» relation and a sorting hint
+# Full name is something with a space
+(
+echo "$git_lines" | sed -re 's@(.*)\t(.*)@<my://name/\1>	<my://can-be>	<my://name/\2>.@'
+echo "$git_lines" | sed -re 's@(.*)\t(.*)@<my://name/\2>	<my://can-be>	<my://name/\1>.@'
+echo "$maintainers" | sed -re 's@(.*)\t(.*)\t(.*)@<my://name/\1>	<my://can-be>	<my://name/\2>.@'
+echo "$maintainers" | sed -re 's@(.*)\t(.*)\t(.*)@<my://name/\2>	<my://can-be>	<my://name/\3>.@'
+echo "$maintainers" | sed -re 's@(.*)\t(.*)\t(.*)@<my://name/\3>	<my://can-be>	<my://name/\1>.@'
+echo "$git_lines" | grep ' ' | cut -f 1 | sed -e 's@.*@<my://name/&>	<my://is-name>	<my://0>.@'
+echo "$git_lines" | grep -v ' ' | cut -f 1 | sed -e 's@.*@<my://name/&>	<my://is-name>	<my://1>.@'
+echo "$maintainers" | cut -f 2 | sed -e 's@.*@<my://name/&>	<my://is-name>	<my://0>.@'
+[ -n "$NIXPKGS_GITHUB_NAME_CACHE" ] && cat "$NIXPKGS_GITHUB_NAME_CACHE" |
+    grep -v "	$" |
+    sed -re 's@(.*)\t(.*)@<my://name/\1>	<my://at-github>	<my://github/\2>.@'
+) | normalize_name | grep -E '<my://[-a-z]+>' | sort | uniq > "$n3"
+
+# Get transitive closure
+sparql="$(nix-build '<nixpkgs>' -Q -A apache-jena --no-out-link)/bin/sparql"
+name_list="$(
+    "$sparql" --results=TSV --data="$n3" "
+    select ?x ?y ?g where {
+      ?x <my://can-be>+ ?y.
+      ?x <my://is-name> ?g.
+        }
+    " | tail -n +2 |
+    sed -re 's@<my://name/@@g; s@<my://@@g; s@>@@g;' |
+    sort -k 2,3 -t '	'
+)"
+github_name_list="$(
+    "$sparql" --results=TSV --data="$n3" "
+    select ?x ?y where {
+      ?x (<my://can-be>+ / <my://at-github>) ?y.
+        }
+    " | tail -n +2 |
+    sed -re 's@<my://(name|github)/@@g; s@<my://@@g; s@>@@g;'
+)"
+
+# Take first spelling option for every person
+name_list_canonical="$(echo "$name_list" | cut -f 1,2 | uniq -f1)"
+
+cleaner_script="$(echo "$name_list_canonical" | denormalize_name |
+  sed -re 's/(.*)\t(.*)/s#^\2$#\1#g/g')"
+
+# Add github usernames
+if [ -n "$NIXPKGS_GITHUB_NAME_CACHE" ]; then
+    github_adder_script="$(mktemp)"
+    echo "$github_name_list" |
+        grep -E "$(echo "$name_list_canonical" | cut -f 2 |
+        tr '\n' '|' )" |
+    sort | uniq |
+        sed -re 's/(.*)\t(.*)/s| \1$| \1\t\2|g;/' |
+    denormalize_name > "$github_adder_script"
+else
+    github_adder_script='/dev/null'
+fi
+
+echo "$name_list" | denormalize_name
+
+echo
+
+echo "$git_data" | cut -f 1 |
+    sed -e "$cleaner_script" |
+    sort | uniq -c | sort -k1n | sed -rf "$github_adder_script" |
+    sed -re 's/^ *([0-9]+) /\1\t/'