summary refs log tree commit diff
path: root/maintainers/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'maintainers/scripts')
-rw-r--r--maintainers/scripts/build.nix22
-rw-r--r--maintainers/scripts/check-hydra-by-maintainer.nix68
-rwxr-xr-xmaintainers/scripts/copy-tarballs.pl16
-rwxr-xr-xmaintainers/scripts/fetch-kde-qt.sh9
-rwxr-xr-xmaintainers/scripts/haskell/hydra-report.hs408
-rwxr-xr-xmaintainers/scripts/haskell/mark-broken.sh47
-rwxr-xr-xmaintainers/scripts/haskell/regenerate-hackage-packages.sh46
-rwxr-xr-xmaintainers/scripts/haskell/regenerate-transitive-broken-packages.sh15
-rw-r--r--maintainers/scripts/haskell/transitive-broken-packages.nix16
-rwxr-xr-xmaintainers/scripts/haskell/update-cabal2nix-unstable.sh17
-rwxr-xr-xmaintainers/scripts/haskell/update-hackage.sh35
-rwxr-xr-xmaintainers/scripts/haskell/update-stackage.sh57
-rwxr-xr-xmaintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh21
-rwxr-xr-xmaintainers/scripts/hydra-eval-failures.py2
-rw-r--r--maintainers/scripts/luarocks-packages.csv7
-rw-r--r--maintainers/scripts/nix-generate-from-cpan.nix6
-rwxr-xr-xmaintainers/scripts/nix-generate-from-cpan.pl6
-rw-r--r--maintainers/scripts/nixpkgs-lint.nix11
-rwxr-xr-xmaintainers/scripts/nixpkgs-lint.pl2
-rw-r--r--maintainers/scripts/pluginupdate.py555
-rwxr-xr-xmaintainers/scripts/update-luarocks-packages2
-rw-r--r--maintainers/scripts/update-luarocks-shell.nix7
-rwxr-xr-xmaintainers/scripts/update-redirected-urls.sh12
-rwxr-xr-xmaintainers/scripts/update.nix139
-rw-r--r--maintainers/scripts/update.py216
25 files changed, 1632 insertions, 110 deletions
diff --git a/maintainers/scripts/build.nix b/maintainers/scripts/build.nix
index c70993cf138..ca401700b4a 100644
--- a/maintainers/scripts/build.nix
+++ b/maintainers/scripts/build.nix
@@ -1,10 +1,18 @@
-{ maintainer }:
+{ maintainer
+, localSystem ? { system = args.system or builtins.currentSystem; }
+, system ? localSystem.system
+, crossSystem ? localSystem
+, ...
+}@args:
 
 # based on update.nix
 # nix-build build.nix --argstr maintainer <yourname>
 
+# to build for aarch64-linux using boot.binfmt.emulatedSystems:
+# nix-build build.nix --argstr maintainer <yourname> --argstr system aarch64-linux
+
 let
-  pkgs = import ./../../default.nix {};
+  pkgs = import ./../../default.nix (removeAttrs args [ "maintainer" ]);
   maintainer_ = pkgs.lib.maintainers.${maintainer};
   packagesWith = cond: return: set:
     (pkgs.lib.flatten
@@ -13,8 +21,12 @@ let
           let
             result = builtins.tryEval
               (
-                if pkgs.lib.isDerivation pkg && cond name pkg
-                then [ (return name pkg) ]
+                if pkgs.lib.isDerivation pkg && cond name pkg then
+                  # Skip packages whose closure fails on evaluation.
+                  # This happens for pkgs like `python27Packages.djangoql`
+                  # that have disabled Python pkgs as dependencies.
+                  builtins.seq pkg.outPath
+                    [ (return name pkg) ]
                 else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false
                 then packagesWith cond return pkg
                 else [ ]
@@ -30,7 +42,7 @@ in
 packagesWith
   (name: pkg:
     (
-      if builtins.hasAttr "maintainers" pkg.meta
+      if builtins.hasAttr "meta" pkg && builtins.hasAttr "maintainers" pkg.meta
       then (
         if builtins.isList pkg.meta.maintainers
         then builtins.elem maintainer_ pkg.meta.maintainers
diff --git a/maintainers/scripts/check-hydra-by-maintainer.nix b/maintainers/scripts/check-hydra-by-maintainer.nix
new file mode 100644
index 00000000000..326aae47f8c
--- /dev/null
+++ b/maintainers/scripts/check-hydra-by-maintainer.nix
@@ -0,0 +1,68 @@
+{ maintainer }:
+let
+  pkgs = import ./../../default.nix { };
+  maintainer_ = pkgs.lib.maintainers.${maintainer};
+  packagesWith = cond: return: prefix: set:
+    (pkgs.lib.flatten
+      (pkgs.lib.mapAttrsToList
+        (name: pkg:
+          let
+            result = builtins.tryEval
+              (
+                if pkgs.lib.isDerivation pkg && cond name pkg then
+                # Skip packages whose closure fails on evaluation.
+                # This happens for pkgs like `python27Packages.djangoql`
+                # that have disabled Python pkgs as dependencies.
+                  builtins.seq pkg.outPath
+                    [ (return "${prefix}${name}") ]
+                else if pkg.recurseForDerivations or false || pkg.recurseForRelease or false
+                # then packagesWith cond return pkg
+                then packagesWith cond return "${name}." pkg
+                else [ ]
+              );
+          in
+          if result.success then result.value
+          else [ ]
+        )
+        set
+      )
+    );
+
+  packages = packagesWith
+    (name: pkg:
+      (
+        if builtins.hasAttr "meta" pkg && builtins.hasAttr "maintainers" pkg.meta
+        then
+          (
+            if builtins.isList pkg.meta.maintainers
+            then builtins.elem maintainer_ pkg.meta.maintainers
+            else maintainer_ == pkg.meta.maintainers
+          )
+        else false
+      )
+    )
+    (name: name)
+    ("")
+    pkgs;
+
+in
+pkgs.stdenv.mkDerivation {
+  name = "nixpkgs-update-script";
+  buildInputs = [ pkgs.hydra-check ];
+  buildCommand = ''
+    echo ""
+    echo "----------------------------------------------------------------"
+    echo ""
+    echo "nix-shell maintainers/scripts/check-hydra-by-maintainer.nix --argstr maintainer SuperSandro2000"
+    echo ""
+    echo "----------------------------------------------------------------"
+    exit 1
+  '';
+  shellHook = ''
+    unset shellHook # do not contaminate nested shells
+    echo "Please stand by"
+    echo nix-shell -p hydra-check --run "hydra-check ${builtins.concatStringsSep " " packages}"
+    nix-shell -p hydra-check --run "hydra-check ${builtins.concatStringsSep " " packages}"
+    exit $?
+  '';
+}
diff --git a/maintainers/scripts/copy-tarballs.pl b/maintainers/scripts/copy-tarballs.pl
index 59696a4432d..6a08eb88bf8 100755
--- a/maintainers/scripts/copy-tarballs.pl
+++ b/maintainers/scripts/copy-tarballs.pl
@@ -165,6 +165,20 @@ elsif (defined $expr) {
         my $hash = $fetch->{hash};
         my $name = $fetch->{name};
 
+        if ($hash =~ /^([a-z0-9]+)-([A-Za-z0-9+\/=]+)$/) {
+            $algo = $1;
+            $hash = `nix hash to-base16 $hash` or die;
+            chomp $hash;
+        }
+
+        next unless $algo =~ /^[a-z0-9]+$/;
+
+        # Convert non-SRI base-64 to base-16.
+        if ($hash =~ /^[A-Za-z0-9+\/=]+$/) {
+            $hash = `nix hash to-base16 --type '$algo' $hash` or die;
+            chomp $hash;
+        }
+
         if (defined $ENV{DEBUG}) {
             print "$url $algo $hash\n";
             next;
@@ -184,7 +198,7 @@ elsif (defined $expr) {
 
         my $storePath = makeFixedOutputPath(0, $algo, $hash, $name);
 
-        print STDERR "mirroring $url ($storePath)...\n";
+        print STDERR "mirroring $url ($storePath, $algo, $hash)...\n";
 
         if ($dryRun) {
             $mirrored++;
diff --git a/maintainers/scripts/fetch-kde-qt.sh b/maintainers/scripts/fetch-kde-qt.sh
index c6c980dd0cb..22d78151978 100755
--- a/maintainers/scripts/fetch-kde-qt.sh
+++ b/maintainers/scripts/fetch-kde-qt.sh
@@ -14,13 +14,12 @@ fi
 
 tmp=$(mktemp -d)
 pushd $tmp >/dev/null
-wget -nH -r -c --no-parent "${WGET_ARGS[@]}" -A '*.tar.xz.sha256' -A '*.mirrorlist' >/dev/null
-find -type f -name '*.mirrorlist' -delete
+wget -nH -r -c --no-parent "${WGET_ARGS[@]}" >/dev/null
 
 csv=$(mktemp)
 find . -type f | while read src; do
     # Sanitize file name
-    filename=$(gawk '{ print $2 }' "$src" | tr '@' '_')
+    filename=$(basename "$src" | tr '@' '_')
     nameVersion="${filename%.tar.*}"
     name=$(echo "$nameVersion" | sed -e 's,-[[:digit:]].*,,' | sed -e 's,-opensource-src$,,' | sed -e 's,-everywhere-src$,,')
     version=$(echo "$nameVersion" | sed -e 's,^\([[:alpha:]][[:alnum:]]*-\)\+,,')
@@ -40,8 +39,8 @@ gawk -F , "{ print \$1 }" $csv | sort | uniq | while read name; do
     latestVersion=$(echo "$versions" | sort -rV | head -n 1)
     src=$(gawk -F , "/^$name,$latestVersion,/ { print \$3 }" $csv)
     filename=$(gawk -F , "/^$name,$latestVersion,/ { print \$4 }" $csv)
-    url="$(dirname "${src:2}")/$filename"
-    sha256=$(gawk '{ print $1 }' "$src")
+    url="${src:2}"
+    sha256=$(nix-hash --type sha256 --base32 --flat "$src")
     cat >>"$SRCS" <<EOF
   $name = {
     version = "$latestVersion";
diff --git a/maintainers/scripts/haskell/hydra-report.hs b/maintainers/scripts/haskell/hydra-report.hs
new file mode 100755
index 00000000000..fd6430d43c9
--- /dev/null
+++ b/maintainers/scripts/haskell/hydra-report.hs
@@ -0,0 +1,408 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -p "haskellPackages.ghcWithPackages (p: [p.aeson p.req])"
+#! nix-shell -p hydra-unstable
+#! nix-shell -i runhaskell
+
+{-
+
+The purpose of this script is
+
+1) download the state of the nixpkgs/haskell-updates job from hydra (with get-report)
+2) print a summary of the state suitable for pasting into a github comment (with ping-maintainers)
+3) print a list of broken packages suitable for pasting into configuration-hackage2nix.yaml
+
+Because step 1) is quite expensive and takes roughly ~5 minutes the result is cached in a json file in XDG_CACHE.
+
+-}
+{-# LANGUAGE BlockArguments #-}
+{-# LANGUAGE DeriveAnyClass #-}
+{-# LANGUAGE DeriveGeneric #-}
+{-# LANGUAGE DerivingStrategies #-}
+{-# LANGUAGE DuplicateRecordFields #-}
+{-# LANGUAGE LambdaCase #-}
+{-# LANGUAGE MultiWayIf #-}
+{-# LANGUAGE NamedFieldPuns #-}
+{-# LANGUAGE OverloadedStrings #-}
+{-# LANGUAGE ScopedTypeVariables #-}
+{-# LANGUAGE TupleSections #-}
+{-# OPTIONS_GHC -Wall #-}
+
+import Control.Monad (forM_, (<=<))
+import Control.Monad.Trans (MonadIO (liftIO))
+import Data.Aeson (
+   FromJSON,
+   ToJSON,
+   decodeFileStrict',
+   eitherDecodeStrict',
+   encodeFile,
+ )
+import Data.Foldable (Foldable (toList), foldl')
+import Data.List.NonEmpty (NonEmpty, nonEmpty)
+import qualified Data.List.NonEmpty as NonEmpty
+import Data.Map.Strict (Map)
+import qualified Data.Map.Strict as Map
+import Data.Maybe (fromMaybe, mapMaybe)
+import Data.Monoid (Sum (Sum, getSum))
+import Data.Sequence (Seq)
+import qualified Data.Sequence as Seq
+import Data.Set (Set)
+import qualified Data.Set as Set
+import Data.Text (Text)
+import qualified Data.Text as Text
+import Data.Text.Encoding (encodeUtf8)
+import Data.Time (defaultTimeLocale, formatTime, getCurrentTime)
+import Data.Time.Clock (UTCTime)
+import GHC.Generics (Generic)
+import Network.HTTP.Req (
+   GET (GET),
+   NoReqBody (NoReqBody),
+   defaultHttpConfig,
+   header,
+   https,
+   jsonResponse,
+   req,
+   responseBody,
+   responseTimeout,
+   runReq,
+   (/:),
+ )
+import System.Directory (XdgDirectory (XdgCache), getXdgDirectory)
+import System.Environment (getArgs)
+import System.Process (readProcess)
+import Prelude hiding (id)
+
+newtype JobsetEvals = JobsetEvals
+   { evals :: Seq Eval
+   }
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+newtype Nixpkgs = Nixpkgs {revision :: Text}
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+newtype JobsetEvalInputs = JobsetEvalInputs {nixpkgs :: Nixpkgs}
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+data Eval = Eval
+   { id :: Int
+   , jobsetevalinputs :: JobsetEvalInputs
+   }
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+data Build = Build
+   { job :: Text
+   , buildstatus :: Maybe Int
+   , finished :: Int
+   , id :: Int
+   , nixname :: Text
+   , system :: Text
+   , jobsetevals :: Seq Int
+   }
+   deriving (Generic, ToJSON, FromJSON, Show)
+
+main :: IO ()
+main = do
+   args <- getArgs
+   case args of
+      ["get-report"] -> getBuildReports
+      ["ping-maintainers"] -> printMaintainerPing
+      ["mark-broken-list"] -> printMarkBrokenList
+      _ -> putStrLn "Usage: get-report | ping-maintainers | mark-broken-list"
+
+reportFileName :: IO FilePath
+reportFileName = getXdgDirectory XdgCache "haskell-updates-build-report.json"
+
+showT :: Show a => a -> Text
+showT = Text.pack . show
+
+getBuildReports :: IO ()
+getBuildReports = runReq defaultHttpConfig do
+   evalMay <- Seq.lookup 0 . evals <$> myReq (https "hydra.nixos.org" /: "jobset" /: "nixpkgs" /: "haskell-updates" /: "evals") mempty
+   eval@Eval{id} <- maybe (liftIO $ fail "No Evalution found") pure evalMay
+   liftIO . putStrLn $ "Fetching evaluation " <> show id <> " from Hydra. This might take a few minutes..."
+   buildReports :: Seq Build <- myReq (https "hydra.nixos.org" /: "eval" /: showT id /: "builds") (responseTimeout 600000000)
+   liftIO do
+      fileName <- reportFileName
+      putStrLn $ "Finished fetching all builds from Hydra, saving report as " <> fileName
+      now <- getCurrentTime
+      encodeFile fileName (eval, now, buildReports)
+  where
+   myReq query option = responseBody <$> req GET query NoReqBody jsonResponse (header "User-Agent" "hydra-report.hs/v1 (nixpkgs;maintainers/scripts/haskell)" <> option)
+
+hydraEvalCommand :: FilePath
+hydraEvalCommand = "hydra-eval-jobs"
+
+hydraEvalParams :: [String]
+hydraEvalParams = ["-I", ".", "pkgs/top-level/release-haskell.nix"]
+
+handlesCommand :: FilePath
+handlesCommand = "nix-instantiate"
+
+handlesParams :: [String]
+handlesParams = ["--eval", "--strict", "--json", "-"]
+
+handlesExpression :: String
+handlesExpression = "with import ./. {}; with lib; zipAttrsWith (_: builtins.head) (mapAttrsToList (_: v: if v ? github then { \"${v.email}\" = v.github; } else {}) (import maintainers/maintainer-list.nix))"
+
+-- | This newtype is used to parse a Hydra job output from @hydra-eval-jobs@.
+-- The only field we are interested in is @maintainers@, which is why this
+-- is just a newtype.
+--
+-- Note that there are occassionally jobs that don't have a maintainers
+-- field, which is why this has to be @Maybe Text@.
+newtype Maintainers = Maintainers { maintainers :: Maybe Text }
+  deriving stock (Generic, Show)
+  deriving anyclass (FromJSON, ToJSON)
+
+-- | This is a 'Map' from Hydra job name to maintainer email addresses.
+--
+-- It has values similar to the following:
+--
+-- @@
+--  fromList
+--    [ ("arion.aarch64-linux", Maintainers (Just "robert@example.com"))
+--    , ("bench.x86_64-linux", Maintainers (Just ""))
+--    , ("conduit.x86_64-linux", Maintainers (Just "snoy@man.com, web@ber.com"))
+--    , ("lens.x86_64-darwin", Maintainers (Just "ek@category.com"))
+--    ]
+-- @@
+--
+-- Note that Hydra jobs without maintainers will have an empty string for the
+-- maintainer list.
+type HydraJobs = Map Text Maintainers
+
+-- | Map of email addresses to GitHub handles.
+-- This is built from the file @../../maintainer-list.nix@.
+--
+-- It has values similar to the following:
+--
+-- @@
+--  fromList
+--    [ ("robert@example.com", "rob22")
+--    , ("ek@category.com", "edkm")
+--    ]
+-- @@
+type EmailToGitHubHandles = Map Text Text
+
+-- | Map of Hydra jobs to maintainer GitHub handles.
+--
+-- It has values similar to the following:
+--
+-- @@
+--  fromList
+--    [ ("arion.aarch64-linux", ["rob22"])
+--    , ("conduit.x86_64-darwin", ["snoyb", "webber"])
+--    ]
+-- @@
+type MaintainerMap = Map Text (NonEmpty Text)
+
+-- | Generate a mapping of Hydra job names to maintainer GitHub handles.
+getMaintainerMap :: IO MaintainerMap
+getMaintainerMap = do
+   hydraJobs :: HydraJobs <-
+      readJSONProcess hydraEvalCommand hydraEvalParams "" "Failed to decode hydra-eval-jobs output: "
+   handlesMap :: EmailToGitHubHandles <-
+      readJSONProcess handlesCommand handlesParams handlesExpression "Failed to decode nix output for lookup of github handles: "
+   pure $ Map.mapMaybe (splitMaintainersToGitHubHandles handlesMap) hydraJobs
+   where
+   -- Split a comma-spearated string of Maintainers into a NonEmpty list of
+   -- GitHub handles.
+   splitMaintainersToGitHubHandles
+      :: EmailToGitHubHandles -> Maintainers -> Maybe (NonEmpty Text)
+   splitMaintainersToGitHubHandles handlesMap (Maintainers maint) =
+      nonEmpty .  mapMaybe (`Map.lookup` handlesMap) .  Text.splitOn ", " $ fromMaybe "" maint
+
+-- | Run a process that produces JSON on stdout and and decode the JSON to a
+-- data type.
+--
+-- If the JSON-decoding fails, throw the JSON-decoding error.
+readJSONProcess
+   :: FromJSON a
+   => FilePath -- ^ Filename of executable.
+   -> [String] -- ^ Arguments
+   -> String -- ^ stdin to pass to the process
+   -> String -- ^ String to prefix to JSON-decode error.
+   -> IO a
+readJSONProcess exe args input err = do
+   output <- readProcess exe args input
+   let eitherDecodedOutput = eitherDecodeStrict' . encodeUtf8 . Text.pack $ output
+   case eitherDecodedOutput of
+     Left decodeErr -> error $ err <> decodeErr <> "\nRaw: '" <> take 1000 output <> "'"
+     Right decodedOutput -> pure decodedOutput
+
+-- BuildStates are sorted by subjective importance/concerningness
+data BuildState
+  = Failed
+  | DependencyFailed
+  | OutputLimitExceeded
+  | Unknown (Maybe Int)
+  | TimedOut
+  | Canceled
+  | HydraFailure
+  | Unfinished
+  | Success
+  deriving stock (Show, Eq, Ord)
+
+icon :: BuildState -> Text
+icon = \case
+   Failed -> ":x:"
+   DependencyFailed -> ":heavy_exclamation_mark:"
+   OutputLimitExceeded -> ":warning:"
+   Unknown x -> "unknown code " <> showT x
+   TimedOut -> ":hourglass::no_entry_sign:"
+   Canceled -> ":no_entry_sign:"
+   Unfinished -> ":hourglass_flowing_sand:"
+   HydraFailure -> ":construction:"
+   Success -> ":heavy_check_mark:"
+
+platformIcon :: Platform -> Text
+platformIcon (Platform x) = case x of
+   "x86_64-linux" -> ":penguin:"
+   "aarch64-linux" -> ":iphone:"
+   "x86_64-darwin" -> ":apple:"
+   _ -> x
+
+data BuildResult = BuildResult {state :: BuildState, id :: Int} deriving (Show, Eq, Ord)
+newtype Platform = Platform {platform :: Text} deriving (Show, Eq, Ord)
+newtype Table row col a = Table (Map (row, col) a)
+type StatusSummary = Map Text (Table Text Platform BuildResult, Set Text)
+
+instance (Ord row, Ord col, Semigroup a) => Semigroup (Table row col a) where
+   Table l <> Table r = Table (Map.unionWith (<>) l r)
+instance (Ord row, Ord col, Semigroup a) => Monoid (Table row col a) where
+   mempty = Table Map.empty
+instance Functor (Table row col) where
+   fmap f (Table a) = Table (fmap f a)
+instance Foldable (Table row col) where
+   foldMap f (Table a) = foldMap f a
+
+buildSummary :: MaintainerMap -> Seq Build -> StatusSummary
+buildSummary maintainerMap = foldl (Map.unionWith unionSummary) Map.empty . fmap toSummary
+  where
+   unionSummary (Table l, l') (Table r, r') = (Table $ Map.union l r, l' <> r')
+   toSummary Build{finished, buildstatus, job, id, system} = Map.singleton name (Table (Map.singleton (set, Platform system) (BuildResult state id)), maintainers)
+     where
+      state :: BuildState
+      state = case (finished, buildstatus) of
+         (0, _) -> Unfinished
+         (_, Just 0) -> Success
+         (_, Just 1) -> Failed
+         (_, Just 2) -> DependencyFailed
+         (_, Just 3) -> HydraFailure
+         (_, Just 4) -> Canceled
+         (_, Just 7) -> TimedOut
+         (_, Just 11) -> OutputLimitExceeded
+         (_, i) -> Unknown i
+      packageName = fromMaybe job (Text.stripSuffix ("." <> system) job)
+      splitted = nonEmpty $ Text.splitOn "." packageName
+      name = maybe packageName NonEmpty.last splitted
+      set = maybe "" (Text.intercalate "." . NonEmpty.init) splitted
+      maintainers = maybe mempty (Set.fromList . toList) (Map.lookup job maintainerMap)
+
+readBuildReports :: IO (Eval, UTCTime, Seq Build)
+readBuildReports = do
+   file <- reportFileName
+   fromMaybe (error $ "Could not decode " <> file) <$> decodeFileStrict' file
+
+sep :: Text
+sep = " | "
+joinTable :: [Text] -> Text
+joinTable t = sep <> Text.intercalate sep t <> sep
+
+type NumSummary = Table Platform BuildState Int
+
+printTable :: (Ord rows, Ord cols) => Text -> (rows -> Text) -> (cols -> Text) -> (entries -> Text) -> Table rows cols entries -> [Text]
+printTable name showR showC showE (Table mapping) = joinTable <$> (name : map showC cols) : replicate (length cols + sepsInName + 1) "---" : map printRow rows
+  where
+   sepsInName = Text.count "|" name
+   printRow row = showR row : map (\col -> maybe "" showE (Map.lookup (row, col) mapping)) cols
+   rows = toList $ Set.fromList (fst <$> Map.keys mapping)
+   cols = toList $ Set.fromList (snd <$> Map.keys mapping)
+
+printJob :: Int -> Text -> (Table Text Platform BuildResult, Text) -> [Text]
+printJob evalId name (Table mapping, maintainers) =
+   if length sets <= 1
+      then map printSingleRow sets
+      else ["- [ ] " <> makeJobSearchLink "" name <> " " <> maintainers] <> map printRow sets
+  where
+   printRow set = "  - " <> printState set <> " " <> makeJobSearchLink set (if Text.null set then "toplevel" else set)
+   printSingleRow set = "- [ ] " <> printState set <> " " <> makeJobSearchLink set (makePkgName set) <> " " <> maintainers
+   makePkgName set = (if Text.null set then "" else set <> ".") <> name
+   printState set = Text.intercalate " " $ map (\pf -> maybe "" (label pf) $ Map.lookup (set, pf) mapping) platforms
+   makeJobSearchLink set linkLabel= makeSearchLink evalId linkLabel (makePkgName set)
+   sets = toList $ Set.fromList (fst <$> Map.keys mapping)
+   platforms = toList $ Set.fromList (snd <$> Map.keys mapping)
+   label pf (BuildResult s i) = "[[" <> platformIcon pf <> icon s <> "]](https://hydra.nixos.org/build/" <> showT i <> ")"
+
+makeSearchLink :: Int -> Text -> Text -> Text
+makeSearchLink evalId linkLabel query = "[" <> linkLabel <> "](" <> "https://hydra.nixos.org/eval/" <> showT evalId <> "?filter=" <> query <> ")"
+
+statusToNumSummary :: StatusSummary -> NumSummary
+statusToNumSummary = fmap getSum . foldMap (fmap Sum . jobTotals)
+
+jobTotals :: (Table Text Platform BuildResult, a) -> Table Platform BuildState Int
+jobTotals (Table mapping, _) = getSum <$> Table (Map.foldMapWithKey (\(_, platform) (BuildResult buildstate _) -> Map.singleton (platform, buildstate) (Sum 1)) mapping)
+
+details :: Text -> [Text] -> [Text]
+details summary content = ["<details><summary>" <> summary <> " </summary>", ""] <> content <> ["</details>", ""]
+
+printBuildSummary :: Eval -> UTCTime -> StatusSummary -> Text
+printBuildSummary
+   Eval{id, jobsetevalinputs = JobsetEvalInputs{nixpkgs = Nixpkgs{revision}}}
+   fetchTime
+   summary =
+      Text.unlines $
+         headline <> totals
+            <> optionalList "#### Maintained packages with build failure" (maintainedList fails)
+            <> optionalList "#### Maintained packages with failed dependency" (maintainedList failedDeps)
+            <> optionalList "#### Maintained packages with unknown error" (maintainedList unknownErr)
+            <> optionalHideableList "#### Unmaintained packages with build failure" (unmaintainedList fails)
+            <> optionalHideableList "#### Unmaintained packages with failed dependency" (unmaintainedList failedDeps)
+            <> optionalHideableList "#### Unmaintained packages with unknown error" (unmaintainedList unknownErr)
+            <> footer
+     where
+      footer = ["*Report generated with [maintainers/scripts/haskell/hydra-report.hs](https://github.com/NixOS/nixpkgs/blob/haskell-updates/maintainers/scripts/haskell/hydra-report.sh)*"]
+      totals =
+         [ "#### Build summary"
+         , ""
+         ]
+            <> printTable "Platform" (\x -> makeSearchLink id (platform x <> " " <> platformIcon x) ("." <> platform x)) (\x -> showT x <> " " <> icon x) showT (statusToNumSummary summary)
+      headline =
+         [ "### [haskell-updates build report from hydra](https://hydra.nixos.org/jobset/nixpkgs/haskell-updates)"
+         , "*evaluation ["
+            <> showT id
+            <> "](https://hydra.nixos.org/eval/"
+            <> showT id
+            <> ") of nixpkgs commit ["
+            <> Text.take 7 revision
+            <> "](https://github.com/NixOS/nixpkgs/commits/"
+            <> revision
+            <> ") as of "
+            <> Text.pack (formatTime defaultTimeLocale "%Y-%m-%d %H:%M UTC" fetchTime)
+            <> "*"
+         ]
+      jobsByState predicate = Map.filter (predicate . foldl' min Success . fmap state . fst) summary
+      fails = jobsByState (== Failed)
+      failedDeps = jobsByState (== DependencyFailed)
+      unknownErr = jobsByState (\x -> x > DependencyFailed && x < TimedOut)
+      withMaintainer = Map.mapMaybe (\(x, m) -> (x,) <$> nonEmpty (Set.toList m))
+      withoutMaintainer = Map.mapMaybe (\(x, m) -> if Set.null m then Just x else Nothing)
+      optionalList heading list = if null list then mempty else [heading] <> list
+      optionalHideableList heading list = if null list then mempty else [heading] <> details (showT (length list) <> " job(s)") list
+      maintainedList = showMaintainedBuild <=< Map.toList . withMaintainer
+      unmaintainedList = showBuild <=< Map.toList . withoutMaintainer
+      showBuild (name, table) = printJob id name (table, "")
+      showMaintainedBuild (name, (table, maintainers)) = printJob id name (table, Text.intercalate " " (fmap ("@" <>) (toList maintainers)))
+
+printMaintainerPing :: IO ()
+printMaintainerPing = do
+   maintainerMap <- getMaintainerMap
+   (eval, fetchTime, buildReport) <- readBuildReports
+   putStrLn (Text.unpack (printBuildSummary eval fetchTime (buildSummary maintainerMap buildReport)))
+
+printMarkBrokenList :: IO ()
+printMarkBrokenList = do
+   (_, _, buildReport) <- readBuildReports
+   forM_ buildReport \Build{buildstatus, job} ->
+      case (buildstatus, Text.splitOn "." job) of
+         (Just 1, ["haskellPackages", name, "x86_64-linux"]) -> putStrLn $ "  - " <> Text.unpack name
+         _ -> pure ()
diff --git a/maintainers/scripts/haskell/mark-broken.sh b/maintainers/scripts/haskell/mark-broken.sh
new file mode 100755
index 00000000000..97dd5be8aaa
--- /dev/null
+++ b/maintainers/scripts/haskell/mark-broken.sh
@@ -0,0 +1,47 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils git -I nixpkgs=.
+
+# This script uses the data pulled with
+# maintainers/scripts/haskell/hydra-report.hs get-report to produce a list of
+# failing builds that get written to the hackage2nix config. Then
+# hackage-packages.nix gets regenerated and transitive-broken packages get
+# marked as dont-distribute in the config as well.
+# This should disable builds for most failing jobs in the haskell-updates jobset.
+
+set -euo pipefail
+
+broken_config="pkgs/development/haskell-modules/configuration-hackage2nix/broken.yaml"
+
+tmpfile=$(mktemp)
+trap "rm ${tmpfile}" 0
+
+echo "Remember that you need to manually run 'maintainers/scripts/haskell/hydra-report.hs get-report' sometime before running this script."
+echo "Generating a list of broken builds and displaying for manual confirmation ..."
+maintainers/scripts/haskell/hydra-report.hs mark-broken-list | sort -i > "$tmpfile"
+
+$EDITOR "$tmpfile"
+
+tail -n +3 "$broken_config" >> "$tmpfile"
+
+cat > "$broken_config" << EOF
+broken-packages:
+  # These packages don't compile.
+EOF
+
+# clear environment here to avoid things like allowing broken builds in
+sort -iu "$tmpfile" >> "$broken_config"
+clear="env -u HOME -u NIXPKGS_CONFIG"
+$clear maintainers/scripts/haskell/regenerate-hackage-packages.sh
+$clear maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
+$clear maintainers/scripts/haskell/regenerate-hackage-packages.sh
+
+if [[ "${1:-}" == "--do-commit" ]]; then
+git add $broken_config
+git add pkgs/development/haskell-modules/configuration-hackage2nix/transitive-broken.yaml
+git add pkgs/development/haskell-modules/hackage-packages.nix
+git commit -F - << EOF
+haskellPackages: mark builds failing on hydra as broken
+
+This commit has been generated by maintainers/scripts/haskell/mark-broken.sh
+EOF
+fi
diff --git a/maintainers/scripts/haskell/regenerate-hackage-packages.sh b/maintainers/scripts/haskell/regenerate-hackage-packages.sh
new file mode 100755
index 00000000000..285f6ed7cf9
--- /dev/null
+++ b/maintainers/scripts/haskell/regenerate-hackage-packages.sh
@@ -0,0 +1,46 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils haskellPackages.cabal2nix-unstable git nix -I nixpkgs=.
+
+# This script is used to regenerate nixpkgs' Haskell package set, using the
+# tool hackage2nix from the nixos/cabal2nix repo. hackage2nix looks at the
+# config files in pkgs/development/haskell-modules/configuration-hackage2nix
+# and generates a Nix expression for package version specified there, using the
+# Cabal files from the Hackage database (available under all-cabal-hashes) and
+# its companion tool cabal2nix.
+#
+# Related scripts are update-hackage.sh, for updating the snapshot of the
+# Hackage database used by hackage2nix, and update-cabal2nix-unstable.sh,
+# for updating the version of hackage2nix used to perform this task.
+
+set -euo pipefail
+
+HACKAGE2NIX="${HACKAGE2NIX:-hackage2nix}"
+
+# To prevent hackage2nix fails because of encoding.
+# See: https://github.com/NixOS/nixpkgs/pull/122023
+export LC_ALL=C.UTF-8
+
+extraction_derivation='with import ./. {}; runCommand "unpacked-cabal-hashes" { } "tar xf ${all-cabal-hashes} --strip-components=1 --one-top-level=$out"'
+unpacked_hackage="$(nix-build -E "$extraction_derivation" --no-out-link)"
+config_dir=pkgs/development/haskell-modules/configuration-hackage2nix
+
+echo "Starting hackage2nix to regenerate pkgs/development/haskell-modules/hackage-packages.nix ..."
+"$HACKAGE2NIX" \
+   --hackage "$unpacked_hackage" \
+   --preferred-versions <(for n in "$unpacked_hackage"/*/preferred-versions; do cat "$n"; echo; done) \
+   --nixpkgs "$PWD" \
+   --config "$config_dir/main.yaml" \
+   --config "$config_dir/stackage.yaml" \
+   --config "$config_dir/broken.yaml" \
+   --config "$config_dir/transitive-broken.yaml"
+
+if [[ "${1:-}" == "--do-commit" ]]; then
+git add pkgs/development/haskell-modules/hackage-packages.nix
+git commit -F - << EOF
+haskellPackages: regenerate package set based on current config
+
+This commit has been generated by maintainers/scripts/haskell/regenerate-hackage-packages.sh
+EOF
+fi
+
+echo "Regeneration of hackage-packages.nix finished."
diff --git a/maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh b/maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
new file mode 100755
index 00000000000..94104e00edb
--- /dev/null
+++ b/maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
@@ -0,0 +1,15 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils nix gnused -I nixpkgs=.
+
+config_file=pkgs/development/haskell-modules/configuration-hackage2nix/transitive-broken.yaml
+
+cat > $config_file << EOF
+# This file is automatically generated by
+# maintainers/scripts/haskell/regenerate-transitive-broken-packages.sh
+# It is supposed to list all haskellPackages that cannot evaluate because they
+# depend on a dependency marked as broken.
+dont-distribute-packages:
+EOF
+
+echo "Regenerating list of transitive broken packages ..."
+echo -e $(nix-instantiate --eval --strict maintainers/scripts/haskell/transitive-broken-packages.nix) | sed 's/\"//' | LC_ALL=C.UTF-8 sort -i >> $config_file
diff --git a/maintainers/scripts/haskell/transitive-broken-packages.nix b/maintainers/scripts/haskell/transitive-broken-packages.nix
new file mode 100644
index 00000000000..d4ddaa95765
--- /dev/null
+++ b/maintainers/scripts/haskell/transitive-broken-packages.nix
@@ -0,0 +1,16 @@
+let
+  nixpkgs = import ../../..;
+  inherit (nixpkgs {}) pkgs lib;
+  getEvaluating = x:
+    builtins.attrNames (
+      lib.filterAttrs (
+        _: v: (builtins.tryEval (v.outPath or null)).success && lib.isDerivation v && !v.meta.broken
+      ) x
+    );
+  brokenDeps = lib.subtractLists
+    (getEvaluating pkgs.haskellPackages)
+    (getEvaluating (nixpkgs { config.allowBroken = true; }).haskellPackages);
+in
+''
+  ${lib.concatMapStringsSep "\n" (x: "  - ${x}") brokenDeps}
+''
diff --git a/maintainers/scripts/haskell/update-cabal2nix-unstable.sh b/maintainers/scripts/haskell/update-cabal2nix-unstable.sh
new file mode 100755
index 00000000000..41583704560
--- /dev/null
+++ b/maintainers/scripts/haskell/update-cabal2nix-unstable.sh
@@ -0,0 +1,17 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p coreutils curl jq gnused haskellPackages.cabal2nix-unstable -I nixpkgs=.
+
+# Updates cabal2nix-unstable to the latest master of the nixos/cabal2nix repository.
+# See regenerate-hackage-packages.sh for details on the purpose of this script.
+
+set -euo pipefail
+
+# fetch current master HEAD from Github
+head_info="$(curl -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/NixOS/cabal2nix/branches/master)"
+# extract commit hash
+commit="$(jq -r .commit.sha <<< "$head_info")"
+# extract commit timestamp and convert to date
+date="$(date "--date=$(jq -r .commit.commit.committer.date <<< "$head_info")" +%F)"
+# generate nix expression from cabal file, replacing the version with the commit date
+echo '# This file defines cabal2nix-unstable, used by maintainers/scripts/haskell/regenerate-hackage-packages.sh.' > pkgs/development/haskell-modules/cabal2nix-unstable.nix
+cabal2nix "https://github.com/NixOS/cabal2nix/archive/$commit.tar.gz" | sed -e 's/version = ".*"/version = "'"unstable-$date"'"/' >> pkgs/development/haskell-modules/cabal2nix-unstable.nix
diff --git a/maintainers/scripts/haskell/update-hackage.sh b/maintainers/scripts/haskell/update-hackage.sh
new file mode 100755
index 00000000000..a7cfecbbb0f
--- /dev/null
+++ b/maintainers/scripts/haskell/update-hackage.sh
@@ -0,0 +1,35 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p nix curl jq nix-prefetch-github git gnused -I nixpkgs=.
+
+# See regenerate-hackage-packages.sh for details on the purpose of this script.
+
+set -euo pipefail
+
+pin_file=pkgs/data/misc/hackage/pin.json
+current_commit="$(jq -r .commit $pin_file)"
+old_date="$(jq -r .msg $pin_file | sed 's/Update from Hackage at //')"
+git_info="$(curl -H "Accept: application/vnd.github.v3+json" https://api.github.com/repos/commercialhaskell/all-cabal-hashes/branches/hackage)"
+head_commit="$(echo "$git_info" | jq -r .commit.sha)"
+commit_msg="$(echo "$git_info" | jq -r .commit.commit.message)"
+new_date="$(echo "$commit_msg" | sed 's/Update from Hackage at //')"
+
+if [ "$current_commit" != "$head_commit" ]; then
+   url="https://github.com/commercialhaskell/all-cabal-hashes/archive/$head_commit.tar.gz"
+   hash="$(nix-prefetch-url "$url")"
+   jq -n \
+     --arg commit "$head_commit" \
+     --arg hash "$hash" \
+     --arg url "$url" \
+     --arg commit_msg "$commit_msg" \
+     '{commit: $commit, url: $url, sha256: $hash, msg: $commit_msg}' \
+     > $pin_file
+fi
+
+if [[ "${1:-}" == "--do-commit" ]]; then
+git add pkgs/data/misc/hackage/pin.json
+git commit -F - << EOF
+all-cabal-hashes: $old_date -> $new_date
+
+This commit has been generated by maintainers/scripts/haskell/update-hackage.sh
+EOF
+fi
diff --git a/maintainers/scripts/haskell/update-stackage.sh b/maintainers/scripts/haskell/update-stackage.sh
new file mode 100755
index 00000000000..ecf38dc4b90
--- /dev/null
+++ b/maintainers/scripts/haskell/update-stackage.sh
@@ -0,0 +1,57 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p nix curl jq nix-prefetch-github git gnused gnugrep -I nixpkgs=.
+
+set -eu -o pipefail
+
+tmpfile=$(mktemp "update-stackage.XXXXXXX")
+# shellcheck disable=SC2064
+
+stackage_config="pkgs/development/haskell-modules/configuration-hackage2nix/stackage.yaml"
+
+trap "rm ${tmpfile} ${tmpfile}.new" 0
+touch "$tmpfile" "$tmpfile.new" # Creating files here so that trap creates no errors.
+
+curl -L -s "https://stackage.org/lts/cabal.config" >"$tmpfile"
+old_version=$(grep "# Stackage" $stackage_config | sed -E 's/.*([0-9]{2}\.[0-9]+)/\1/')
+version=$(sed -rn "s/^--.*http:..(www.)?stackage.org.snapshot.lts-//p" "$tmpfile")
+
+if [[ "$old_version" == "$version" ]]; then
+   echo "No new stackage version"
+   exit 0 # Nothing to do
+fi
+
+echo "Updating Stackage LTS from $old_version to $version."
+
+# Create a simple yaml version of the file.
+sed -r \
+    -e '/^--/d' \
+    -e 's|^constraints:||' \
+    -e 's|^ +|  - |' \
+    -e 's|,$||' \
+    -e '/installed$/d' \
+    -e '/^$/d' \
+    < "${tmpfile}" | sort --ignore-case >"${tmpfile}.new"
+
+cat > $stackage_config << EOF
+# Stackage LTS $version
+# This file is auto-generated by
+# maintainers/scripts/haskell/update-stackage.sh
+default-package-overrides:
+EOF
+
+# Drop restrictions on some tools where we always want the latest version.
+sed -r \
+    -e '/ cabal2nix /d' \
+    -e '/ distribution-nixpkgs /d' \
+    -e '/ jailbreak-cabal /d' \
+    -e '/ language-nix /d' \
+    < "${tmpfile}.new" >> $stackage_config
+
+if [[ "${1:-}" == "--do-commit" ]]; then
+git add $stackage_config
+git commit -F - << EOF
+haskellPackages: stackage-lts $old_version -> $version
+
+This commit has been generated by maintainers/scripts/haskell/update-stackage.sh
+EOF
+fi
diff --git a/maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh b/maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh
new file mode 100755
index 00000000000..c49ea68fbfb
--- /dev/null
+++ b/maintainers/scripts/haskell/upload-nixos-package-list-to-hackage.sh
@@ -0,0 +1,21 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i bash -p nix curl gnused -I nixpkgs=.
+
+# On Hackage every package description shows a category "Distributions" which
+# lists a "NixOS" version.
+# This script uploads a csv to hackage which will update the displayed versions
+# based on the current versions in nixpkgs. This happens with a simple http
+# request.
+
+# For authorization you just need to have any valid hackage account. This
+# script uses the `username` and `password-command` field from your
+# ~/.cabal/config file.
+
+# e.g. username: maralorn
+#      password-command: pass hackage.haskell.org (this can be any command, but not an arbitrary shell expression.)
+# Those fields are specified under `upload` on the `cabal` man page.
+
+package_list="$(nix-build -A haskell.package-list)/nixos-hackage-packages.csv"
+username=$(grep "^username:" ~/.cabal/config | sed "s/^username: //")
+password_command=$(grep "^password-command:" ~/.cabal/config | sed "s/^password-command: //")
+curl -u "$username:$($password_command)" --digest -H "Content-type: text/csv" -T "$package_list" http://hackage.haskell.org/distro/NixOS/packages.csv
diff --git a/maintainers/scripts/hydra-eval-failures.py b/maintainers/scripts/hydra-eval-failures.py
index 0f738c5427b..b7518b12857 100755
--- a/maintainers/scripts/hydra-eval-failures.py
+++ b/maintainers/scripts/hydra-eval-failures.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env nix-shell
-#!nix-shell -i python3 -p 'python3.withPackages(ps: with ps; [ requests pyquery click ])'
+#!nix-shell -i python3 -p "python3.withPackages(ps: with ps; [ requests pyquery click ])"
 
 # To use, just execute this script with --help to display help.
 
diff --git a/maintainers/scripts/luarocks-packages.csv b/maintainers/scripts/luarocks-packages.csv
index 4ccb8483430..a387430245a 100644
--- a/maintainers/scripts/luarocks-packages.csv
+++ b/maintainers/scripts/luarocks-packages.csv
@@ -6,7 +6,7 @@ basexx,,,,,
 binaryheap,,,,,vcunat
 bit32,,,,lua5_1,lblasc
 busted,,,,,
-cassowary,,,,,marsam
+cassowary,,,,,marsam alerque
 cjson,lua-cjson,,,,
 compat53,,,,,vcunat
 cosmo,,,,,marsam
@@ -35,6 +35,11 @@ lua-cmsgpack,,,,,
 lua-iconv,,,,,
 lua-lsp,,http://luarocks.org/dev,,,
 lua-messagepack,,,,,
+lua-resty-http,,,,,
+lua-resty-jwt,,,,,
+lua-resty-openidc,,,,,
+lua-resty-openssl,,,,,
+lua-resty-session,,,,,
 lua-term,,,,,
 lua-toml,,,,,
 lua-zlib,,,,,koral
diff --git a/maintainers/scripts/nix-generate-from-cpan.nix b/maintainers/scripts/nix-generate-from-cpan.nix
index 5c4cf0f6c55..fecca7f0c73 100644
--- a/maintainers/scripts/nix-generate-from-cpan.nix
+++ b/maintainers/scripts/nix-generate-from-cpan.nix
@@ -1,4 +1,4 @@
-{ stdenv, makeWrapper, perl, perlPackages }:
+{ stdenv, lib, makeWrapper, perl, perlPackages }:
 
 stdenv.mkDerivation {
   name = "nix-generate-from-cpan-3";
@@ -18,8 +18,8 @@ stdenv.mkDerivation {
     '';
 
   meta = {
-    maintainers = with stdenv.lib.maintainers; [ eelco rycee ];
+    maintainers = with lib.maintainers; [ eelco ];
     description = "Utility to generate a Nix expression for a Perl package from CPAN";
-    platforms = stdenv.lib.platforms.unix;
+    platforms = lib.platforms.unix;
   };
 }
diff --git a/maintainers/scripts/nix-generate-from-cpan.pl b/maintainers/scripts/nix-generate-from-cpan.pl
index f02af4ea669..6494acb50da 100755
--- a/maintainers/scripts/nix-generate-from-cpan.pl
+++ b/maintainers/scripts/nix-generate-from-cpan.pl
@@ -309,7 +309,7 @@ sub render_license {
     # "GPL v2" or to "GPL v2 or later".
     my $amb = 0;
 
-    # Whether the license is available inside `stdenv.lib.licenses`.
+    # Whether the license is available inside `lib.licenses`.
     my $in_set = 1;
 
     my $nix_license = $LICENSE_MAP{$cpan_license};
@@ -331,7 +331,7 @@ sub render_license {
         # Avoid defining the license line.
     }
     elsif ($in_set) {
-        my $lic = 'stdenv.lib.licenses';
+        my $lic = 'lib.licenses';
         if ( @$licenses == 1 ) {
             $license_line = "$lic.$licenses->[0]";
         }
@@ -449,7 +449,7 @@ print <<EOF;
     meta = {
 EOF
 print <<EOF if defined $homepage;
-      homepage = $homepage;
+      homepage = "$homepage";
 EOF
 print <<EOF if defined $description && $description ne "Unknown";
       description = "$description";
diff --git a/maintainers/scripts/nixpkgs-lint.nix b/maintainers/scripts/nixpkgs-lint.nix
index 6d99c94bf33..873905373af 100644
--- a/maintainers/scripts/nixpkgs-lint.nix
+++ b/maintainers/scripts/nixpkgs-lint.nix
@@ -1,9 +1,10 @@
-{ stdenv, makeWrapper, perl, perlPackages }:
+{ stdenv, lib, makeWrapper, perl, perlPackages }:
 
 stdenv.mkDerivation {
   name = "nixpkgs-lint-1";
 
-  buildInputs = [ makeWrapper perl perlPackages.XMLSimple ];
+  nativeBuildInputs = [ makeWrapper ];
+  buildInputs = [ perl perlPackages.XMLSimple ];
 
   dontUnpack = true;
   buildPhase = "true";
@@ -15,9 +16,9 @@ stdenv.mkDerivation {
       wrapProgram $out/bin/nixpkgs-lint --set PERL5LIB $PERL5LIB
     '';
 
-  meta = {
-    maintainers = [ stdenv.lib.maintainers.eelco ];
+  meta = with lib; {
+    maintainers = [ maintainers.eelco ];
     description = "A utility for Nixpkgs contributors to check Nixpkgs for common errors";
-    platforms = stdenv.lib.platforms.unix;
+    platforms = platforms.unix;
   };
 }
diff --git a/maintainers/scripts/nixpkgs-lint.pl b/maintainers/scripts/nixpkgs-lint.pl
index 638d1b2aaa1..43fb3941361 100755
--- a/maintainers/scripts/nixpkgs-lint.pl
+++ b/maintainers/scripts/nixpkgs-lint.pl
@@ -35,7 +35,7 @@ GetOptions("package|p=s" => \$filter,
     ) or exit 1;
 
 # Evaluate Nixpkgs into an XML representation.
-my $xml = `nix-env -f '$path' -qa '$filter' --xml --meta --drv-path`;
+my $xml = `nix-env -f '$path' --arg overlays '[]' -qa '$filter' --xml --meta --drv-path`;
 die "$0: evaluation of ‘$path’ failed\n" if $? != 0;
 
 my $info = XMLin($xml, KeyAttr => { 'item' => '+attrPath', 'meta' => 'name' }, ForceArray => 1, SuppressEmpty => '' ) or die "cannot parse XML output";
diff --git a/maintainers/scripts/pluginupdate.py b/maintainers/scripts/pluginupdate.py
new file mode 100644
index 00000000000..91c5214d153
--- /dev/null
+++ b/maintainers/scripts/pluginupdate.py
@@ -0,0 +1,555 @@
+# Used by pkgs/misc/vim-plugins/update.py and pkgs/applications/editors/kakoune/plugins/update.py
+
+# format:
+# $ nix run nixpkgs.python3Packages.black -c black update.py
+# type-check:
+# $ nix run nixpkgs.python3Packages.mypy -c mypy update.py
+# linted:
+# $ nix run nixpkgs.python3Packages.flake8 -c flake8 --ignore E501,E265 update.py
+
+import argparse
+import functools
+import http
+import json
+import os
+import subprocess
+import logging
+import sys
+import time
+import traceback
+import urllib.error
+import urllib.parse
+import urllib.request
+import xml.etree.ElementTree as ET
+from datetime import datetime
+from functools import wraps
+from multiprocessing.dummy import Pool
+from pathlib import Path
+from typing import Dict, List, Optional, Tuple, Union, Any, Callable
+from urllib.parse import urljoin, urlparse
+from tempfile import NamedTemporaryFile
+
+import git
+
+ATOM_ENTRY = "{http://www.w3.org/2005/Atom}entry"  # " vim gets confused here
+ATOM_LINK = "{http://www.w3.org/2005/Atom}link"  # "
+ATOM_UPDATED = "{http://www.w3.org/2005/Atom}updated"  # "
+
+LOG_LEVELS = {
+    logging.getLevelName(level): level for level in [
+        logging.DEBUG, logging.INFO, logging.WARN, logging.ERROR ]
+}
+
+log = logging.getLogger()
+log.addHandler(logging.StreamHandler())
+
+
+def retry(ExceptionToCheck: Any, tries: int = 4, delay: float = 3, backoff: float = 2):
+    """Retry calling the decorated function using an exponential backoff.
+    http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
+    original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
+    (BSD licensed)
+    :param ExceptionToCheck: the exception on which to retry
+    :param tries: number of times to try (not retry) before giving up
+    :param delay: initial delay between retries in seconds
+    :param backoff: backoff multiplier e.g. value of 2 will double the delay
+        each retry
+    """
+
+    def deco_retry(f: Callable) -> Callable:
+        @wraps(f)
+        def f_retry(*args: Any, **kwargs: Any) -> Any:
+            mtries, mdelay = tries, delay
+            while mtries > 1:
+                try:
+                    return f(*args, **kwargs)
+                except ExceptionToCheck as e:
+                    print(f"{str(e)}, Retrying in {mdelay} seconds...")
+                    time.sleep(mdelay)
+                    mtries -= 1
+                    mdelay *= backoff
+            return f(*args, **kwargs)
+
+        return f_retry  # true decorator
+
+    return deco_retry
+
+
+def make_request(url: str) -> urllib.request.Request:
+    token = os.getenv("GITHUB_API_TOKEN")
+    headers = {}
+    if token is not None:
+        headers["Authorization"] = f"token {token}"
+    return urllib.request.Request(url, headers=headers)
+
+
+class Repo:
+    def __init__(
+        self, owner: str, name: str, branch: str, alias: Optional[str]
+    ) -> None:
+        self.owner = owner
+        self.name = name
+        self.branch = branch
+        self.alias = alias
+        self.redirect: Dict[str, str] = {}
+
+    def url(self, path: str) -> str:
+        return urljoin(f"https://github.com/{self.owner}/{self.name}/", path)
+
+    def __repr__(self) -> str:
+        return f"Repo({self.owner}, {self.name})"
+
+    @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+    def has_submodules(self) -> bool:
+        try:
+            req = make_request(self.url(f"blob/{self.branch}/.gitmodules"))
+            urllib.request.urlopen(req, timeout=10).close()
+        except urllib.error.HTTPError as e:
+            if e.code == 404:
+                return False
+            else:
+                raise
+        return True
+
+    @retry(urllib.error.URLError, tries=4, delay=3, backoff=2)
+    def latest_commit(self) -> Tuple[str, datetime]:
+        commit_url = self.url(f"commits/{self.branch}.atom")
+        commit_req = make_request(commit_url)
+        with urllib.request.urlopen(commit_req, timeout=10) as req:
+            self.check_for_redirect(commit_url, req)
+            xml = req.read()
+            root = ET.fromstring(xml)
+            latest_entry = root.find(ATOM_ENTRY)
+            assert latest_entry is not None, f"No commits found in repository {self}"
+            commit_link = latest_entry.find(ATOM_LINK)
+            assert commit_link is not None, f"No link tag found feed entry {xml}"
+            url = urlparse(commit_link.get("href"))
+            updated_tag = latest_entry.find(ATOM_UPDATED)
+            assert (
+                updated_tag is not None and updated_tag.text is not None
+            ), f"No updated tag found feed entry {xml}"
+            updated = datetime.strptime(updated_tag.text, "%Y-%m-%dT%H:%M:%SZ")
+            return Path(str(url.path)).name, updated
+
+    def check_for_redirect(self, url: str, req: http.client.HTTPResponse):
+        response_url = req.geturl()
+        if url != response_url:
+            new_owner, new_name = (
+                urllib.parse.urlsplit(response_url).path.strip("/").split("/")[:2]
+            )
+            end_line = "\n" if self.alias is None else f" as {self.alias}\n"
+            plugin_line = "{owner}/{name}" + end_line
+
+            old_plugin = plugin_line.format(owner=self.owner, name=self.name)
+            new_plugin = plugin_line.format(owner=new_owner, name=new_name)
+            self.redirect[old_plugin] = new_plugin
+
+    def prefetch_git(self, ref: str) -> str:
+        data = subprocess.check_output(
+            ["nix-prefetch-git", "--fetch-submodules", self.url(""), ref]
+        )
+        return json.loads(data)["sha256"]
+
+    def prefetch_github(self, ref: str) -> str:
+        data = subprocess.check_output(
+            ["nix-prefetch-url", "--unpack", self.url(f"archive/{ref}.tar.gz")]
+        )
+        return data.strip().decode("utf-8")
+
+
+class Plugin:
+    def __init__(
+        self,
+        name: str,
+        commit: str,
+        has_submodules: bool,
+        sha256: str,
+        date: Optional[datetime] = None,
+    ) -> None:
+        self.name = name
+        self.commit = commit
+        self.has_submodules = has_submodules
+        self.sha256 = sha256
+        self.date = date
+
+    @property
+    def normalized_name(self) -> str:
+        return self.name.replace(".", "-")
+
+    @property
+    def version(self) -> str:
+        assert self.date is not None
+        return self.date.strftime("%Y-%m-%d")
+
+    def as_json(self) -> Dict[str, str]:
+        copy = self.__dict__.copy()
+        del copy["date"]
+        return copy
+
+
+class Editor:
+    """The configuration of the update script."""
+
+    def __init__(
+        self,
+        name: str,
+        root: Path,
+        get_plugins: str,
+        generate_nix: Callable[[List[Tuple[str, str, Plugin]], str], None],
+        default_in: Optional[Path] = None,
+        default_out: Optional[Path] = None,
+        deprecated: Optional[Path] = None,
+        cache_file: Optional[str] = None,
+    ):
+        self.name = name
+        self.root = root
+        self.get_plugins = get_plugins
+        self.generate_nix = generate_nix
+        self.default_in = default_in or root.joinpath(f"{name}-plugin-names")
+        self.default_out = default_out or root.joinpath("generated.nix")
+        self.deprecated = deprecated or root.joinpath("deprecated.json")
+        self.cache_file = cache_file or f"{name}-plugin-cache.json"
+
+
+class CleanEnvironment(object):
+    def __enter__(self) -> None:
+        self.old_environ = os.environ.copy()
+        local_pkgs = str(Path(__file__).parent.parent.parent)
+        os.environ["NIX_PATH"] = f"localpkgs={local_pkgs}"
+        self.empty_config = NamedTemporaryFile()
+        self.empty_config.write(b"{}")
+        self.empty_config.flush()
+        os.environ["NIXPKGS_CONFIG"] = self.empty_config.name
+
+    def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
+        os.environ.update(self.old_environ)
+        self.empty_config.close()
+
+
+def get_current_plugins(editor: Editor) -> List[Plugin]:
+    with CleanEnvironment():
+        out = subprocess.check_output(["nix", "eval", "--json", editor.get_plugins])
+    data = json.loads(out)
+    plugins = []
+    for name, attr in data.items():
+        p = Plugin(name, attr["rev"], attr["submodules"], attr["sha256"])
+        plugins.append(p)
+    return plugins
+
+
+def prefetch_plugin(
+    user: str,
+    repo_name: str,
+    branch: str,
+    alias: Optional[str],
+    cache: "Optional[Cache]" = None,
+) -> Tuple[Plugin, Dict[str, str]]:
+    log.info("Prefetching plugin %s", repo_name)
+    repo = Repo(user, repo_name, branch, alias)
+    commit, date = repo.latest_commit()
+    has_submodules = repo.has_submodules()
+    cached_plugin = cache[commit] if cache else None
+    if cached_plugin is not None:
+        cached_plugin.name = alias or repo_name
+        cached_plugin.date = date
+        return cached_plugin, repo.redirect
+
+    print(f"prefetch {user}/{repo_name}")
+    if has_submodules:
+        sha256 = repo.prefetch_git(commit)
+    else:
+        sha256 = repo.prefetch_github(commit)
+
+    return (
+        Plugin(alias or repo_name, commit, has_submodules, sha256, date=date),
+        repo.redirect,
+    )
+
+
+def fetch_plugin_from_pluginline(plugin_line: str) -> Plugin:
+    plugin, _ = prefetch_plugin(*parse_plugin_line(plugin_line))
+    return plugin
+
+
+def print_download_error(plugin: str, ex: Exception):
+    print(f"{plugin}: {ex}", file=sys.stderr)
+    ex_traceback = ex.__traceback__
+    tb_lines = [
+        line.rstrip("\n")
+        for line in traceback.format_exception(ex.__class__, ex, ex_traceback)
+    ]
+    print("\n".join(tb_lines))
+
+
+def check_results(
+    results: List[Tuple[str, str, Union[Exception, Plugin], Dict[str, str]]]
+) -> Tuple[List[Tuple[str, str, Plugin]], Dict[str, str]]:
+    failures: List[Tuple[str, Exception]] = []
+    plugins = []
+    redirects: Dict[str, str] = {}
+    for (owner, name, result, redirect) in results:
+        if isinstance(result, Exception):
+            failures.append((name, result))
+        else:
+            plugins.append((owner, name, result))
+            redirects.update(redirect)
+
+    print(f"{len(results) - len(failures)} plugins were checked", end="")
+    if len(failures) == 0:
+        print()
+        return plugins, redirects
+    else:
+        print(f", {len(failures)} plugin(s) could not be downloaded:\n")
+
+        for (plugin, exception) in failures:
+            print_download_error(plugin, exception)
+
+        sys.exit(1)
+
+
+def parse_plugin_line(line: str) -> Tuple[str, str, str, Optional[str]]:
+    branch = "master"
+    alias = None
+    name, repo = line.split("/")
+    if " as " in repo:
+        repo, alias = repo.split(" as ")
+        alias = alias.strip()
+    if "@" in repo:
+        repo, branch = repo.split("@")
+
+    return (name.strip(), repo.strip(), branch.strip(), alias)
+
+
+def load_plugin_spec(plugin_file: str) -> List[Tuple[str, str, str, Optional[str]]]:
+    plugins = []
+    with open(plugin_file) as f:
+        for line in f:
+            plugin = parse_plugin_line(line)
+            if not plugin[0]:
+                msg = f"Invalid repository {line}, must be in the format owner/repo[ as alias]"
+                print(msg, file=sys.stderr)
+                sys.exit(1)
+            plugins.append(plugin)
+    return plugins
+
+
+def get_cache_path(cache_file_name: str) -> Optional[Path]:
+    xdg_cache = os.environ.get("XDG_CACHE_HOME", None)
+    if xdg_cache is None:
+        home = os.environ.get("HOME", None)
+        if home is None:
+            return None
+        xdg_cache = str(Path(home, ".cache"))
+
+    return Path(xdg_cache, cache_file_name)
+
+
+class Cache:
+    def __init__(self, initial_plugins: List[Plugin], cache_file_name: str) -> None:
+        self.cache_file = get_cache_path(cache_file_name)
+
+        downloads = {}
+        for plugin in initial_plugins:
+            downloads[plugin.commit] = plugin
+        downloads.update(self.load())
+        self.downloads = downloads
+
+    def load(self) -> Dict[str, Plugin]:
+        if self.cache_file is None or not self.cache_file.exists():
+            return {}
+
+        downloads: Dict[str, Plugin] = {}
+        with open(self.cache_file) as f:
+            data = json.load(f)
+            for attr in data.values():
+                p = Plugin(
+                    attr["name"], attr["commit"], attr["has_submodules"], attr["sha256"]
+                )
+                downloads[attr["commit"]] = p
+        return downloads
+
+    def store(self) -> None:
+        if self.cache_file is None:
+            return
+
+        os.makedirs(self.cache_file.parent, exist_ok=True)
+        with open(self.cache_file, "w+") as f:
+            data = {}
+            for name, attr in self.downloads.items():
+                data[name] = attr.as_json()
+            json.dump(data, f, indent=4, sort_keys=True)
+
+    def __getitem__(self, key: str) -> Optional[Plugin]:
+        return self.downloads.get(key, None)
+
+    def __setitem__(self, key: str, value: Plugin) -> None:
+        self.downloads[key] = value
+
+
+def prefetch(
+    args: Tuple[str, str, str, Optional[str]], cache: Cache
+) -> Tuple[str, str, Union[Exception, Plugin], dict]:
+    assert len(args) == 4
+    owner, repo, branch, alias = args
+    try:
+        plugin, redirect = prefetch_plugin(owner, repo, branch, alias, cache)
+        cache[plugin.commit] = plugin
+        return (owner, repo, plugin, redirect)
+    except Exception as e:
+        return (owner, repo, e, {})
+
+
+def rewrite_input(
+    input_file: Path,
+    deprecated: Path,
+    redirects: Dict[str, str] = None,
+    append: Tuple = (),
+):
+    with open(input_file, "r") as f:
+        lines = f.readlines()
+
+    lines.extend(append)
+
+    if redirects:
+        lines = [redirects.get(line, line) for line in lines]
+
+        cur_date_iso = datetime.now().strftime("%Y-%m-%d")
+        with open(deprecated, "r") as f:
+            deprecations = json.load(f)
+        for old, new in redirects.items():
+            old_plugin = fetch_plugin_from_pluginline(old)
+            new_plugin = fetch_plugin_from_pluginline(new)
+            if old_plugin.normalized_name != new_plugin.normalized_name:
+                deprecations[old_plugin.normalized_name] = {
+                    "new": new_plugin.normalized_name,
+                    "date": cur_date_iso,
+                }
+        with open(deprecated, "w") as f:
+            json.dump(deprecations, f, indent=4, sort_keys=True)
+            f.write("\n")
+
+    lines = sorted(lines, key=str.casefold)
+
+    with open(input_file, "w") as f:
+        f.writelines(lines)
+
+
+def parse_args(editor: Editor):
+    parser = argparse.ArgumentParser(
+        description=(
+            f"Updates nix derivations for {editor.name} plugins"
+            f"By default from {editor.default_in} to {editor.default_out}"
+        )
+    )
+    parser.add_argument(
+        "--add",
+        dest="add_plugins",
+        default=[],
+        action="append",
+        help=f"Plugin to add to {editor.name}Plugins from Github in the form owner/repo",
+    )
+    parser.add_argument(
+        "--input-names",
+        "-i",
+        dest="input_file",
+        default=editor.default_in,
+        help="A list of plugins in the form owner/repo",
+    )
+    parser.add_argument(
+        "--out",
+        "-o",
+        dest="outfile",
+        default=editor.default_out,
+        help="Filename to save generated nix code",
+    )
+    parser.add_argument(
+        "--proc",
+        "-p",
+        dest="proc",
+        type=int,
+        default=30,
+        help="Number of concurrent processes to spawn.",
+    )
+    parser.add_argument(
+        "--no-commit", "-n", action="store_true", default=False,
+        help="Whether to autocommit changes"
+    )
+    parser.add_argument(
+        "--debug", "-d", choices=LOG_LEVELS.keys(),
+        default=logging.getLevelName(logging.WARN),
+        help="Adjust log level"
+    )
+    return parser.parse_args()
+
+
+def commit(repo: git.Repo, message: str, files: List[Path]) -> None:
+    repo.index.add([str(f.resolve()) for f in files])
+
+    if repo.index.diff("HEAD"):
+        print(f'committing to nixpkgs "{message}"')
+        repo.index.commit(message)
+    else:
+        print("no changes in working tree to commit")
+
+
+def get_update(input_file: str, outfile: str, proc: int, editor: Editor):
+    cache: Cache = Cache(get_current_plugins(editor), editor.cache_file)
+    _prefetch = functools.partial(prefetch, cache=cache)
+
+    def update() -> dict:
+        plugin_names = load_plugin_spec(input_file)
+
+        try:
+            pool = Pool(processes=proc)
+            results = pool.map(_prefetch, plugin_names)
+        finally:
+            cache.store()
+
+        plugins, redirects = check_results(results)
+
+        editor.generate_nix(plugins, outfile)
+
+        return redirects
+
+    return update
+
+
+def update_plugins(editor: Editor):
+    """The main entry function of this module. All input arguments are grouped in the `Editor`."""
+
+    args = parse_args(editor)
+    log.setLevel(LOG_LEVELS[args.debug])
+
+    log.info("Start updating plugins")
+    nixpkgs_repo = git.Repo(editor.root, search_parent_directories=True)
+    update = get_update(args.input_file, args.outfile, args.proc, editor)
+
+    redirects = update()
+    rewrite_input(args.input_file, editor.deprecated, redirects)
+
+    autocommit = not args.no_commit
+
+    if autocommit:
+        commit(nixpkgs_repo, f"{editor.name}Plugins: update", [args.outfile])
+
+    if redirects:
+        update()
+        if autocommit:
+            commit(
+                nixpkgs_repo,
+                f"{editor.name}Plugins: resolve github repository redirects",
+                [args.outfile, args.input_file, editor.deprecated],
+            )
+
+    for plugin_line in args.add_plugins:
+        rewrite_input(args.input_file, editor.deprecated, append=(plugin_line + "\n",))
+        update()
+        plugin = fetch_plugin_from_pluginline(plugin_line)
+        if autocommit:
+            commit(
+                nixpkgs_repo,
+                "{editor}Plugins.{name}: init at {version}".format(
+                    editor=editor.name, name=plugin.normalized_name, version=plugin.version
+                ),
+                [args.outfile, args.input_file],
+            )
diff --git a/maintainers/scripts/update-luarocks-packages b/maintainers/scripts/update-luarocks-packages
index 5c42080745e..da4d224bd33 100755
--- a/maintainers/scripts/update-luarocks-packages
+++ b/maintainers/scripts/update-luarocks-packages
@@ -66,7 +66,7 @@ nixpkgs$ ${0} ${GENERATED_NIXFILE}
 
 These packages are manually refined in lua-overrides.nix
 */
-{ self, stdenv, fetchurl, fetchgit, pkgs, ... } @ args:
+{ self, stdenv, lib, fetchurl, fetchgit, pkgs, ... } @ args:
 self: super:
 with self;
 {
diff --git a/maintainers/scripts/update-luarocks-shell.nix b/maintainers/scripts/update-luarocks-shell.nix
index 23a940b3691..d3f342b07a9 100644
--- a/maintainers/scripts/update-luarocks-shell.nix
+++ b/maintainers/scripts/update-luarocks-shell.nix
@@ -2,8 +2,11 @@
 }:
 with nixpkgs;
 mkShell {
-  buildInputs = [
-    bash luarocks-nix nix-prefetch-scripts parallel
+  packages = [
+    bash
+    luarocks-nix
+    nix-prefetch-scripts
+    parallel
   ];
   LUAROCKS_NIXPKGS_PATH = toString nixpkgs.path;
 }
diff --git a/maintainers/scripts/update-redirected-urls.sh b/maintainers/scripts/update-redirected-urls.sh
new file mode 100755
index 00000000000..5ffa9aca5f6
--- /dev/null
+++ b/maintainers/scripts/update-redirected-urls.sh
@@ -0,0 +1,12 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -p bash curl ripgrep jq -i bash
+
+set -euxo pipefail
+
+# Possibly also add non-https redirect, but there were non of those when I first
+# made this script to test that. Feel free to add it when it is relevant.
+curl https://repology.org/api/v1/repository/nix_unstable/problems \
+   | jq -r '.[] | select(.type == "homepage_permanent_https_redirect") | .data | "s@\(.url)@\(.target)@"' \
+   | sort | uniq | tee script.sed
+find -name '*.nix' | xargs -P4 -- sed -f script.sed -i
+rm script.sed
diff --git a/maintainers/scripts/update.nix b/maintainers/scripts/update.nix
index 9568c6cbbcc..7435cf64425 100755
--- a/maintainers/scripts/update.nix
+++ b/maintainers/scripts/update.nix
@@ -1,9 +1,11 @@
 { package ? null
 , maintainer ? null
+, predicate ? null
 , path ? null
 , max-workers ? null
 , include-overlays ? false
 , keep-going ? null
+, commit ? null
 }:
 
 # TODO: add assert statements
@@ -31,30 +33,50 @@ let
       in
         [x] ++ nubOn f xs;
 
-  packagesWithPath = relativePath: cond: return: pathContent:
-    let
-      result = builtins.tryEval pathContent;
+  /* Recursively find all packages (derivations) in `pkgs` matching `cond` predicate.
 
-      dedupResults = lst: nubOn (pkg: pkg.updateScript) (lib.concatLists lst);
-    in
-      if result.success then
+    Type: packagesWithPath :: AttrPath → (AttrPath → derivation → bool) → AttrSet → List<AttrSet{attrPath :: str; package :: derivation; }>
+          AttrPath :: [str]
+
+    The packages will be returned as a list of named pairs comprising of:
+      - attrPath: stringified attribute path (based on `rootPath`)
+      - package: corresponding derivation
+   */
+  packagesWithPath = rootPath: cond: pkgs:
+    let
+      packagesWithPathInner = path: pathContent:
         let
-          pathContent = result.value;
+          result = builtins.tryEval pathContent;
+
+          dedupResults = lst: nubOn ({ package, attrPath }: package.updateScript) (lib.concatLists lst);
         in
-          if lib.isDerivation pathContent then
-            lib.optional (cond relativePath pathContent) (return relativePath pathContent)
-          else if lib.isAttrs pathContent then
-            # If user explicitly points to an attrSet or it is marked for recursion, we recur.
-            if relativePath == [] || pathContent.recurseForDerivations or false || pathContent.recurseForRelease or false then
-              dedupResults (lib.mapAttrsToList (name: elem: packagesWithPath (relativePath ++ [name]) cond return elem) pathContent)
-            else []
-          else if lib.isList pathContent then
-            dedupResults (lib.imap0 (i: elem: packagesWithPath (relativePath ++ [i]) cond return elem) pathContent)
-          else []
-      else [];
+          if result.success then
+            let
+              evaluatedPathContent = result.value;
+            in
+              if lib.isDerivation evaluatedPathContent then
+                lib.optional (cond path evaluatedPathContent) { attrPath = lib.concatStringsSep "." path; package = evaluatedPathContent; }
+              else if lib.isAttrs evaluatedPathContent then
+                # If user explicitly points to an attrSet or it is marked for recursion, we recur.
+                if path == rootPath || evaluatedPathContent.recurseForDerivations or false || evaluatedPathContent.recurseForRelease or false then
+                  dedupResults (lib.mapAttrsToList (name: elem: packagesWithPathInner (path ++ [name]) elem) evaluatedPathContent)
+                else []
+              else []
+          else [];
+    in
+      packagesWithPathInner rootPath pkgs;
 
+  /* Recursively find all packages (derivations) in `pkgs` matching `cond` predicate.
+   */
   packagesWith = packagesWithPath [];
 
+  /* Recursively find all packages in `pkgs` with updateScript matching given predicate.
+   */
+  packagesWithUpdateScriptMatchingPredicate = cond:
+    packagesWith (path: pkg: builtins.hasAttr "updateScript" pkg && cond path pkg);
+
+  /* Recursively find all packages in `pkgs` with updateScript by given maintainer.
+   */
   packagesWithUpdateScriptAndMaintainer = maintainer':
     let
       maintainer =
@@ -63,47 +85,53 @@ let
         else
           builtins.getAttr maintainer' lib.maintainers;
     in
-      packagesWith (relativePath: pkg: builtins.hasAttr "updateScript" pkg &&
-                                 (if builtins.hasAttr "maintainers" pkg.meta
-                                   then (if builtins.isList pkg.meta.maintainers
-                                           then builtins.elem maintainer pkg.meta.maintainers
-                                           else maintainer == pkg.meta.maintainers
-                                        )
-                                   else false
-                                 )
-                   )
-                   (relativePath: pkg: pkg)
-                   pkgs;
-
-  packagesWithUpdateScript = path:
+      packagesWithUpdateScriptMatchingPredicate (path: pkg:
+                         (if builtins.hasAttr "maintainers" pkg.meta
+                           then (if builtins.isList pkg.meta.maintainers
+                                   then builtins.elem maintainer pkg.meta.maintainers
+                                   else maintainer == pkg.meta.maintainers
+                                )
+                           else false
+                         )
+                   );
+
+  /* Recursively find all packages under `path` in `pkgs` with updateScript.
+   */
+  packagesWithUpdateScript = path: pkgs:
     let
-      pathContent = lib.attrByPath (lib.splitString "." path) null pkgs;
+      prefix = lib.splitString "." path;
+      pathContent = lib.attrByPath prefix null pkgs;
     in
       if pathContent == null then
         builtins.throw "Attribute path `${path}` does not exists."
       else
-        packagesWith (relativePath: pkg: builtins.hasAttr "updateScript" pkg)
-                       (relativePath: pkg: pkg)
+        packagesWithPath prefix (path: pkg: builtins.hasAttr "updateScript" pkg)
                        pathContent;
 
-  packageByName = name:
+  /* Find a package under `path` in `pkgs` and require that it has an updateScript.
+   */
+  packageByName = path: pkgs:
     let
-        package = lib.attrByPath (lib.splitString "." name) null pkgs;
+        package = lib.attrByPath (lib.splitString "." path) null pkgs;
     in
       if package == null then
-        builtins.throw "Package with an attribute name `${name}` does not exists."
+        builtins.throw "Package with an attribute name `${path}` does not exists."
       else if ! builtins.hasAttr "updateScript" package then
-        builtins.throw "Package with an attribute name `${name}` does not have a `passthru.updateScript` attribute defined."
+        builtins.throw "Package with an attribute name `${path}` does not have a `passthru.updateScript` attribute defined."
       else
-        package;
+        { attrPath = path; inherit package; };
 
+  /* List of packages matched based on the CLI arguments.
+   */
   packages =
     if package != null then
-      [ (packageByName package) ]
+      [ (packageByName package pkgs) ]
+    else if predicate != null then
+      packagesWithUpdateScriptMatchingPredicate predicate pkgs
     else if maintainer != null then
-      packagesWithUpdateScriptAndMaintainer maintainer
+      packagesWithUpdateScriptAndMaintainer maintainer pkgs
     else if path != null then
-      packagesWithUpdateScript path
+      packagesWithUpdateScript path pkgs
     else
       builtins.throw "No arguments provided.\n\n${helpText}";
 
@@ -115,11 +143,15 @@ let
     to run all update scripts for all packages that lists \`garbas\` as a maintainer
     and have \`updateScript\` defined, or:
 
-        % nix-shell maintainers/scripts/update.nix --argstr package gnome3.nautilus
+        % nix-shell maintainers/scripts/update.nix --argstr package gnome.nautilus
 
     to run update script for specific package, or
 
-        % nix-shell maintainers/scripts/update.nix --argstr path gnome3
+        % nix-shell maintainers/scripts/update.nix --arg predicate '(path: pkg: builtins.isList pkg.updateScript && builtins.length pkg.updateScript >= 1 && (let script = builtins.head pkg.updateScript; in builtins.isAttrs script && script.name == "gnome-update-script"))'
+
+    to run update script for all packages matching given predicate, or
+
+        % nix-shell maintainers/scripts/update.nix --argstr path gnome
 
     to run update script for all package under an attribute path.
 
@@ -132,19 +164,32 @@ let
         --argstr keep-going true
 
     to continue running when a single update fails.
+
+    You can also make the updater automatically commit on your behalf from updateScripts
+    that support it by adding
+
+        --argstr commit true
   '';
 
-  packageData = package: {
+  /* Transform a matched package into an object for update.py.
+   */
+  packageData = { package, attrPath }: {
     name = package.name;
     pname = lib.getName package;
-    updateScript = map builtins.toString (lib.toList package.updateScript);
+    oldVersion = lib.getVersion package;
+    updateScript = map builtins.toString (lib.toList (package.updateScript.command or package.updateScript));
+    supportedFeatures = package.updateScript.supportedFeatures or [];
+    attrPath = package.updateScript.attrPath or attrPath;
   };
 
+  /* JSON file with data for update.py.
+   */
   packagesJson = pkgs.writeText "packages.json" (builtins.toJSON (map packageData packages));
 
   optionalArgs =
     lib.optional (max-workers != null) "--max-workers=${max-workers}"
-    ++ lib.optional (keep-going == "true") "--keep-going";
+    ++ lib.optional (keep-going == "true") "--keep-going"
+    ++ lib.optional (commit == "true") "--commit";
 
   args = [ packagesJson ] ++ optionalArgs;
 
diff --git a/maintainers/scripts/update.py b/maintainers/scripts/update.py
index eb7d0ef2647..eb26a472e92 100644
--- a/maintainers/scripts/update.py
+++ b/maintainers/scripts/update.py
@@ -1,23 +1,192 @@
+from __future__ import annotations
+from typing import Dict, Generator, List, Optional, Tuple
 import argparse
-import concurrent.futures
+import asyncio
+import contextlib
 import json
 import os
+import re
 import subprocess
 import sys
+import tempfile
 
-updates = {}
+class CalledProcessError(Exception):
+    process: asyncio.subprocess.Process
 
 def eprint(*args, **kwargs):
     print(*args, file=sys.stderr, **kwargs)
 
-def run_update_script(package):
+async def check_subprocess(*args, **kwargs):
+    """
+    Emulate check argument of subprocess.run function.
+    """
+    process = await asyncio.create_subprocess_exec(*args, **kwargs)
+    returncode = await process.wait()
+
+    if returncode != 0:
+        error = CalledProcessError()
+        error.process = process
+
+        raise error
+
+    return process
+
+async def run_update_script(nixpkgs_root: str, merge_lock: asyncio.Lock, temp_dir: Optional[Tuple[str, str]], package: Dict, keep_going: bool):
+    worktree: Optional[str] = None
+
+    update_script_command = package['updateScript']
+
+    if temp_dir is not None:
+        worktree, _branch = temp_dir
+
+        # Ensure the worktree is clean before update.
+        await check_subprocess('git', 'reset', '--hard', '--quiet', 'HEAD', cwd=worktree)
+
+        # Update scripts can use $(dirname $0) to get their location but we want to run
+        # their clones in the git worktree, not in the main nixpkgs repo.
+        update_script_command = map(lambda arg: re.sub(r'^{0}'.format(re.escape(nixpkgs_root)), worktree, arg), update_script_command)
+
     eprint(f" - {package['name']}: UPDATING ...")
 
-    subprocess.run(package['updateScript'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, check=True)
+    try:
+        update_process = await check_subprocess('env', f"UPDATE_NIX_ATTR_PATH={package['attrPath']}", *update_script_command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, cwd=worktree)
+        update_info = await update_process.stdout.read()
+
+        await merge_changes(merge_lock, package, update_info, temp_dir)
+    except KeyboardInterrupt as e:
+        eprint('Cancelling…')
+        raise asyncio.exceptions.CancelledError()
+    except CalledProcessError as e:
+        eprint(f" - {package['name']}: ERROR")
+        eprint()
+        eprint(f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------")
+        eprint()
+        stderr = await e.process.stderr.read()
+        eprint(stderr.decode('utf-8'))
+        with open(f"{package['pname']}.log", 'wb') as logfile:
+            logfile.write(stderr)
+        eprint()
+        eprint(f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------")
+
+        if not keep_going:
+            raise asyncio.exceptions.CancelledError()
+
+@contextlib.contextmanager
+def make_worktree() -> Generator[Tuple[str, str], None, None]:
+    with tempfile.TemporaryDirectory() as wt:
+        branch_name = f'update-{os.path.basename(wt)}'
+        target_directory = f'{wt}/nixpkgs'
+
+        subprocess.run(['git', 'worktree', 'add', '-b', branch_name, target_directory])
+        yield (target_directory, branch_name)
+        subprocess.run(['git', 'worktree', 'remove', '--force', target_directory])
+        subprocess.run(['git', 'branch', '-D', branch_name])
+
+async def commit_changes(name: str, merge_lock: asyncio.Lock, worktree: str, branch: str, changes: List[Dict]) -> None:
+    for change in changes:
+        # Git can only handle a single index operation at a time
+        async with merge_lock:
+            await check_subprocess('git', 'add', *change['files'], cwd=worktree)
+            commit_message = '{attrPath}: {oldVersion} → {newVersion}'.format(**change)
+            await check_subprocess('git', 'commit', '--quiet', '-m', commit_message, cwd=worktree)
+            await check_subprocess('git', 'cherry-pick', branch)
+
+async def check_changes(package: Dict, worktree: str, update_info: str):
+    if 'commit' in package['supportedFeatures']:
+        changes = json.loads(update_info)
+    else:
+        changes = [{}]
+
+    # Try to fill in missing attributes when there is just a single change.
+    if len(changes) == 1:
+        # Dynamic data from updater take precedence over static data from passthru.updateScript.
+        if 'attrPath' not in changes[0]:
+            # update.nix is always passing attrPath
+            changes[0]['attrPath'] = package['attrPath']
+
+        if 'oldVersion' not in changes[0]:
+            # update.nix is always passing oldVersion
+            changes[0]['oldVersion'] = package['oldVersion']
+
+        if 'newVersion' not in changes[0]:
+            attr_path = changes[0]['attrPath']
+            obtain_new_version_process = await check_subprocess('nix-instantiate', '--expr', f'with import ./. {{}}; lib.getVersion {attr_path}', '--eval', '--strict', '--json', stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, cwd=worktree)
+            changes[0]['newVersion'] = json.loads((await obtain_new_version_process.stdout.read()).decode('utf-8'))
+
+        if 'files' not in changes[0]:
+            changed_files_process = await check_subprocess('git', 'diff', '--name-only', stdout=asyncio.subprocess.PIPE, cwd=worktree)
+            changed_files = (await changed_files_process.stdout.read()).splitlines()
+            changes[0]['files'] = changed_files
+
+            if len(changed_files) == 0:
+                return []
+
+    return changes
+
+async def merge_changes(merge_lock: asyncio.Lock, package: Dict, update_info: str, temp_dir: Optional[Tuple[str, str]]) -> None:
+    if temp_dir is not None:
+        worktree, branch = temp_dir
+        changes = await check_changes(package, worktree, update_info)
+
+        if len(changes) > 0:
+            await commit_changes(package['name'], merge_lock, worktree, branch, changes)
+        else:
+            eprint(f" - {package['name']}: DONE, no changes.")
+    else:
+        eprint(f" - {package['name']}: DONE.")
+
+async def updater(nixpkgs_root: str, temp_dir: Optional[Tuple[str, str]], merge_lock: asyncio.Lock, packages_to_update: asyncio.Queue[Optional[Dict]], keep_going: bool, commit: bool):
+    while True:
+        package = await packages_to_update.get()
+        if package is None:
+            # A sentinel received, we are done.
+            return
+
+        if not ('commit' in package['supportedFeatures'] or 'attrPath' in package):
+            temp_dir = None
+
+        await run_update_script(nixpkgs_root, merge_lock, temp_dir, package, keep_going)
+
+async def start_updates(max_workers: int, keep_going: bool, commit: bool, packages: List[Dict]):
+    merge_lock = asyncio.Lock()
+    packages_to_update: asyncio.Queue[Optional[Dict]] = asyncio.Queue()
+
+    with contextlib.ExitStack() as stack:
+        temp_dirs: List[Optional[Tuple[str, str]]] = []
+
+        # Do not create more workers than there are packages.
+        num_workers = min(max_workers, len(packages))
 
+        nixpkgs_root_process = await check_subprocess('git', 'rev-parse', '--show-toplevel', stdout=asyncio.subprocess.PIPE)
+        nixpkgs_root = (await nixpkgs_root_process.stdout.read()).decode('utf-8').strip()
 
-def main(max_workers, keep_going, packages):
-    with open(sys.argv[1]) as f:
+        # Set up temporary directories when using auto-commit.
+        for i in range(num_workers):
+            temp_dir = stack.enter_context(make_worktree()) if commit else None
+            temp_dirs.append(temp_dir)
+
+        # Fill up an update queue,
+        for package in packages:
+            await packages_to_update.put(package)
+
+        # Add sentinels, one for each worker.
+        # A workers will terminate when it gets sentinel from the queue.
+        for i in range(num_workers):
+            await packages_to_update.put(None)
+
+        # Prepare updater workers for each temp_dir directory.
+        # At most `num_workers` instances of `run_update_script` will be running at one time.
+        updaters = asyncio.gather(*[updater(nixpkgs_root, temp_dir, merge_lock, packages_to_update, keep_going, commit) for temp_dir in temp_dirs])
+
+        try:
+            # Start updater workers.
+            await updaters
+        except asyncio.exceptions.CancelledError as e:
+            # When one worker is cancelled, cancel the others too.
+            updaters.cancel()
+
+def main(max_workers: int, keep_going: bool, commit: bool, packages_path: str) -> None:
+    with open(packages_path) as f:
         packages = json.load(f)
 
     eprint()
@@ -31,29 +200,7 @@ def main(max_workers, keep_going, packages):
         eprint()
         eprint('Running update for:')
 
-        with concurrent.futures.ProcessPoolExecutor(max_workers=max_workers) as executor:
-            for package in packages:
-                updates[executor.submit(run_update_script, package)] = package
-
-            for future in concurrent.futures.as_completed(updates):
-                package = updates[future]
-
-                try:
-                    future.result()
-                    eprint(f" - {package['name']}: DONE.")
-                except subprocess.CalledProcessError as e:
-                    eprint(f" - {package['name']}: ERROR")
-                    eprint()
-                    eprint(f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------")
-                    eprint()
-                    eprint(e.stdout.decode('utf-8'))
-                    with open(f"{package['pname']}.log", 'wb') as f:
-                        f.write(e.stdout)
-                    eprint()
-                    eprint(f"--- SHOWING ERROR LOG FOR {package['name']} ----------------------")
-
-                    if not keep_going:
-                        sys.exit(1)
+        asyncio.run(start_updates(max_workers, keep_going, commit, packages))
 
         eprint()
         eprint('Packages updated!')
@@ -65,15 +212,14 @@ def main(max_workers, keep_going, packages):
 parser = argparse.ArgumentParser(description='Update packages')
 parser.add_argument('--max-workers', '-j', dest='max_workers', type=int, help='Number of updates to run concurrently', nargs='?', default=4)
 parser.add_argument('--keep-going', '-k', dest='keep_going', action='store_true', help='Do not stop after first failure')
+parser.add_argument('--commit', '-c', dest='commit', action='store_true', help='Commit the changes')
 parser.add_argument('packages', help='JSON file containing the list of package names and their update scripts')
 
 if __name__ == '__main__':
     args = parser.parse_args()
 
     try:
-        main(args.max_workers, args.keep_going, args.packages)
-    except (KeyboardInterrupt, SystemExit) as e:
-        for update in updates:
-            update.cancel()
-
-        sys.exit(e.code if isinstance(e, SystemExit) else 130)
+        main(args.max_workers, args.keep_going, args.commit, args.packages)
+    except KeyboardInterrupt as e:
+        # Let’s cancel outside of the main loop too.
+        sys.exit(130)