summary refs log tree commit diff
path: root/pkgs/build-support
diff options
context:
space:
mode:
authorhacker1024 <hacker1024@users.sourceforge.net>2023-04-25 12:51:56 +1000
committerhacker1024 <hacker1024@users.sourceforge.net>2023-04-25 12:51:56 +1000
commitc4cbf526e9b7838bec1cc6c92226367787d336d0 (patch)
tree7c1d2b11d0beac9dce9c5a818f466ce4f9bfb13a /pkgs/build-support
parent552e3fe49817599c2127a8ab32b72b8e6bf9f724 (diff)
parent671e29a924fa788191597e8bdbee5385d1ff42c5 (diff)
downloadnixpkgs-c4cbf526e9b7838bec1cc6c92226367787d336d0.tar
nixpkgs-c4cbf526e9b7838bec1cc6c92226367787d336d0.tar.gz
nixpkgs-c4cbf526e9b7838bec1cc6c92226367787d336d0.tar.bz2
nixpkgs-c4cbf526e9b7838bec1cc6c92226367787d336d0.tar.lz
nixpkgs-c4cbf526e9b7838bec1cc6c92226367787d336d0.tar.xz
nixpkgs-c4cbf526e9b7838bec1cc6c92226367787d336d0.tar.zst
nixpkgs-c4cbf526e9b7838bec1cc6c92226367787d336d0.zip
Merge remote-tracking branch 'NixOS/master' into feature/futter-linux-desktop
Diffstat (limited to 'pkgs/build-support')
-rw-r--r--pkgs/build-support/appimage/default.nix4
-rw-r--r--pkgs/build-support/bintools-wrapper/default.nix4
-rw-r--r--pkgs/build-support/build-fhsenv-bubblewrap/buildFHSEnv.nix (renamed from pkgs/build-support/build-fhs-userenv-bubblewrap/buildFHSEnv.nix)2
-rw-r--r--pkgs/build-support/build-fhsenv-bubblewrap/default.nix (renamed from pkgs/build-support/build-fhs-userenv-bubblewrap/default.nix)4
-rw-r--r--pkgs/build-support/build-fhsenv-chroot/chrootenv/default.nix (renamed from pkgs/build-support/build-fhs-userenv/chrootenv/default.nix)0
-rw-r--r--pkgs/build-support/build-fhsenv-chroot/chrootenv/src/chrootenv.c (renamed from pkgs/build-support/build-fhs-userenv/chrootenv/src/chrootenv.c)0
-rw-r--r--pkgs/build-support/build-fhsenv-chroot/chrootenv/src/meson.build (renamed from pkgs/build-support/build-fhs-userenv/chrootenv/src/meson.build)0
-rw-r--r--pkgs/build-support/build-fhsenv-chroot/default.nix (renamed from pkgs/build-support/build-fhs-userenv/default.nix)0
-rw-r--r--pkgs/build-support/build-fhsenv-chroot/env.nix (renamed from pkgs/build-support/build-fhs-userenv/env.nix)0
-rw-r--r--pkgs/build-support/cc-wrapper/default.nix11
-rw-r--r--pkgs/build-support/dotnet/build-dotnet-module/default.nix6
-rw-r--r--pkgs/build-support/fetchgit/default.nix2
-rw-r--r--pkgs/build-support/go/module.nix6
-rw-r--r--pkgs/build-support/kernel/make-initrd-ng.nix4
-rw-r--r--pkgs/build-support/node/build-npm-package/default.nix5
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/default.nix11
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/src/cacache.rs2
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/src/main.rs425
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs191
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs370
-rw-r--r--pkgs/build-support/node/fetch-npm-deps/src/tests.rs141
-rw-r--r--pkgs/build-support/rust/build-rust-package/default.nix4
-rw-r--r--pkgs/build-support/rust/default-crate-overrides.nix6
-rw-r--r--pkgs/build-support/setup-hooks/move-build-tree.sh12
-rw-r--r--pkgs/build-support/trivial-builders.nix13
25 files changed, 705 insertions, 518 deletions
diff --git a/pkgs/build-support/appimage/default.nix b/pkgs/build-support/appimage/default.nix
index bcda40c800f..0637964ca62 100644
--- a/pkgs/build-support/appimage/default.nix
+++ b/pkgs/build-support/appimage/default.nix
@@ -6,7 +6,7 @@
 , libarchive
 , pv
 , squashfsTools
-, buildFHSUserEnv
+, buildFHSEnv
 , pkgs
 }:
 
@@ -43,7 +43,7 @@ rec {
     extraPkgs,
     meta ? {},
     ...
-  }: buildFHSUserEnv
+  }: buildFHSEnv
     (defaultFhsEnvArgs // {
       inherit name;
 
diff --git a/pkgs/build-support/bintools-wrapper/default.nix b/pkgs/build-support/bintools-wrapper/default.nix
index 00375db220f..0a4ea1ebf26 100644
--- a/pkgs/build-support/bintools-wrapper/default.nix
+++ b/pkgs/build-support/bintools-wrapper/default.nix
@@ -102,7 +102,7 @@ in
 stdenv.mkDerivation {
   pname = targetPrefix
     + (if name != "" then name else "${bintoolsName}-wrapper");
-  version = if bintools == null then null else bintoolsVersion;
+  version = if bintools == null then "" else bintoolsVersion;
 
   preferLocalBuild = true;
 
@@ -166,7 +166,7 @@ stdenv.mkDerivation {
 
     # If we are asked to wrap `gas` and this bintools has it,
     # then symlink it (`as` will be symlinked next).
-    # This is mainly for the wrapped gnatboot on x86-64 Darwin,
+    # This is mainly for the wrapped gnat-bootstrap on x86-64 Darwin,
     # as it must have both the GNU assembler from cctools (installed as `gas`)
     # and the Clang integrated assembler (installed as `as`).
     # See pkgs/os-specific/darwin/binutils/default.nix for details.
diff --git a/pkgs/build-support/build-fhs-userenv-bubblewrap/buildFHSEnv.nix b/pkgs/build-support/build-fhsenv-bubblewrap/buildFHSEnv.nix
index 78da74c8284..305a959a35d 100644
--- a/pkgs/build-support/build-fhs-userenv-bubblewrap/buildFHSEnv.nix
+++ b/pkgs/build-support/build-fhsenv-bubblewrap/buildFHSEnv.nix
@@ -64,7 +64,7 @@ let
 
   ldconfig = writeShellScriptBin "ldconfig" ''
     # due to a glibc bug, 64-bit ldconfig complains about patchelf'd 32-bit libraries, so we're using 32-bit ldconfig
-    exec ${pkgsi686Linux.glibc.bin}/bin/ldconfig -f /etc/ld.so.conf -C /etc/ld.so.cache "$@"
+    exec ${if stdenv.isx86_64 && stdenv.isLinux then pkgsi686Linux.glibc.bin else pkgs.glibc.bin}/bin/ldconfig -f /etc/ld.so.conf -C /etc/ld.so.cache "$@"
   '';
   etcProfile = writeText "profile" ''
     export PS1='${name}-chrootenv:\u@\h:\w\$ '
diff --git a/pkgs/build-support/build-fhs-userenv-bubblewrap/default.nix b/pkgs/build-support/build-fhsenv-bubblewrap/default.nix
index 5d94c309fe0..6c9b71624c2 100644
--- a/pkgs/build-support/build-fhs-userenv-bubblewrap/default.nix
+++ b/pkgs/build-support/build-fhsenv-bubblewrap/default.nix
@@ -1,4 +1,5 @@
 { lib
+, stdenv
 , callPackage
 , runCommandLocal
 , writeShellScript
@@ -113,6 +114,7 @@ let
     exec ${run} "$@"
   '';
 
+  indentLines = str: lib.concatLines (map (s: "  " + s) (filter (s: s != "") (lib.splitString "\n" str)));
   bwrapCmd = { initArgs ? "" }: ''
     ignored=(/nix /dev /proc /etc)
     ro_mounts=()
@@ -202,11 +204,13 @@ let
       --symlink /etc/ld.so.cache ${glibc}/etc/ld.so.cache \
       --ro-bind ${glibc}/etc/rpc ${glibc}/etc/rpc \
       --remount-ro ${glibc}/etc \
+  '' + lib.optionalString (stdenv.isx86_64 && stdenv.isLinux) (indentLines ''
       --tmpfs ${pkgsi686Linux.glibc}/etc \
       --symlink /etc/ld.so.conf ${pkgsi686Linux.glibc}/etc/ld.so.conf \
       --symlink /etc/ld.so.cache ${pkgsi686Linux.glibc}/etc/ld.so.cache \
       --ro-bind ${pkgsi686Linux.glibc}/etc/rpc ${pkgsi686Linux.glibc}/etc/rpc \
       --remount-ro ${pkgsi686Linux.glibc}/etc \
+  '') + ''
       "''${ro_mounts[@]}"
       "''${symlinks[@]}"
       "''${auto_mounts[@]}"
diff --git a/pkgs/build-support/build-fhs-userenv/chrootenv/default.nix b/pkgs/build-support/build-fhsenv-chroot/chrootenv/default.nix
index 32ac43d4155..32ac43d4155 100644
--- a/pkgs/build-support/build-fhs-userenv/chrootenv/default.nix
+++ b/pkgs/build-support/build-fhsenv-chroot/chrootenv/default.nix
diff --git a/pkgs/build-support/build-fhs-userenv/chrootenv/src/chrootenv.c b/pkgs/build-support/build-fhsenv-chroot/chrootenv/src/chrootenv.c
index c109d7297e1..c109d7297e1 100644
--- a/pkgs/build-support/build-fhs-userenv/chrootenv/src/chrootenv.c
+++ b/pkgs/build-support/build-fhsenv-chroot/chrootenv/src/chrootenv.c
diff --git a/pkgs/build-support/build-fhs-userenv/chrootenv/src/meson.build b/pkgs/build-support/build-fhsenv-chroot/chrootenv/src/meson.build
index 6d0770a0dc4..6d0770a0dc4 100644
--- a/pkgs/build-support/build-fhs-userenv/chrootenv/src/meson.build
+++ b/pkgs/build-support/build-fhsenv-chroot/chrootenv/src/meson.build
diff --git a/pkgs/build-support/build-fhs-userenv/default.nix b/pkgs/build-support/build-fhsenv-chroot/default.nix
index 6f0adfb4e08..6f0adfb4e08 100644
--- a/pkgs/build-support/build-fhs-userenv/default.nix
+++ b/pkgs/build-support/build-fhsenv-chroot/default.nix
diff --git a/pkgs/build-support/build-fhs-userenv/env.nix b/pkgs/build-support/build-fhsenv-chroot/env.nix
index fbc50c2dc25..fbc50c2dc25 100644
--- a/pkgs/build-support/build-fhs-userenv/env.nix
+++ b/pkgs/build-support/build-fhsenv-chroot/env.nix
diff --git a/pkgs/build-support/cc-wrapper/default.nix b/pkgs/build-support/cc-wrapper/default.nix
index 1f1d7489a98..e0ee3dae41f 100644
--- a/pkgs/build-support/cc-wrapper/default.nix
+++ b/pkgs/build-support/cc-wrapper/default.nix
@@ -14,7 +14,7 @@
 , propagateDoc ? cc != null && cc ? man
 , extraTools ? [], extraPackages ? [], extraBuildCommands ? ""
 , nixSupport ? {}
-, isGNU ? false, isClang ? cc.isClang or false, gnugrep ? null
+, isGNU ? false, isClang ? cc.isClang or false, isCcache ? cc.isCcache or false, gnugrep ? null
 , buildPackages ? {}
 , libcxx ? null
 , grossHackForStagingNext ? false
@@ -161,7 +161,7 @@ assert nativePrefix == bintools.nativePrefix;
 stdenv.mkDerivation {
   pname = targetPrefix
     + (if name != "" then name else "${ccName}-wrapper");
-  version = if cc == null then null else ccVersion;
+  version = if cc == null then "" else ccVersion;
 
   preferLocalBuild = true;
 
@@ -208,7 +208,7 @@ stdenv.mkDerivation {
         local dst="$1"
         local wrapper="$2"
         export prog="$3"
-        export use_response_file_by_default=${if isClang then "1" else "0"}
+        export use_response_file_by_default=${if isClang && !isCcache then "1" else "0"}
         substituteAll "$wrapper" "$out/bin/$dst"
         chmod +x "$out/bin/$dst"
       }
@@ -598,8 +598,11 @@ stdenv.mkDerivation {
     expandResponseParams = "${expand-response-params}/bin/expand-response-params";
     shell = getBin shell + shell.shellPath or "";
     gnugrep_bin = if nativeTools then "" else gnugrep;
+    # stdenv.cc.cc should not be null and we have nothing better for now.
+    # if the native impure bootstrap is gotten rid of this can become `inherit cc;` again.
+    cc = if nativeTools then "" else cc;
     wrapperName = "CC_WRAPPER";
-    inherit suffixSalt coreutils_bin bintools cc;
+    inherit suffixSalt coreutils_bin bintools;
     inherit libc_bin libc_dev libc_lib;
     inherit darwinPlatformForCC darwinMinVersion darwinMinVersionVariable;
   };
diff --git a/pkgs/build-support/dotnet/build-dotnet-module/default.nix b/pkgs/build-support/dotnet/build-dotnet-module/default.nix
index 8858d4e9877..bae71482836 100644
--- a/pkgs/build-support/dotnet/build-dotnet-module/default.nix
+++ b/pkgs/build-support/dotnet/build-dotnet-module/default.nix
@@ -284,4 +284,8 @@ stdenvNoCC.mkDerivation (args // {
   } // args.passthru or { };
 
   meta = (args.meta or { }) // { inherit platforms; };
-})
+}
+  # ICU tries to unconditionally load files from /usr/share/icu on Darwin, which makes builds fail
+  # in the sandbox, so disable ICU on Darwin. This, as far as I know, shouldn't cause any built packages
+  # to behave differently, just the dotnet build tool.
+  // lib.optionalAttrs stdenvNoCC.isDarwin { DOTNET_SYSTEM_GLOBALIZATION_INVARIANT = 1; })
diff --git a/pkgs/build-support/fetchgit/default.nix b/pkgs/build-support/fetchgit/default.nix
index 56973f5d367..e920355f460 100644
--- a/pkgs/build-support/fetchgit/default.nix
+++ b/pkgs/build-support/fetchgit/default.nix
@@ -66,7 +66,7 @@ lib.warnIf (builtins.isString sparseCheckout)
 stdenvNoCC.mkDerivation {
   inherit name;
   builder = ./builder.sh;
-  fetcher = ./nix-prefetch-git;  # This must be a string to ensure it's called with bash.
+  fetcher = ./nix-prefetch-git;
 
   nativeBuildInputs = [ git ]
     ++ lib.optionals fetchLFS [ git-lfs ];
diff --git a/pkgs/build-support/go/module.nix b/pkgs/build-support/go/module.nix
index 045ce11fe01..2c5d1827cde 100644
--- a/pkgs/build-support/go/module.nix
+++ b/pkgs/build-support/go/module.nix
@@ -83,12 +83,16 @@ let
     inherit (args) src;
     inherit (go) GOOS GOARCH;
 
+    # The following inheritence behavior is not trivial to expect, and some may
+    # argue it's not ideal. Changing it may break vendor hashes in Nixpkgs and
+    # out in the wild. In anycase, it's documented in:
+    # doc/languages-frameworks/go.section.md
     prePatch = args.prePatch or "";
     patches = args.patches or [];
     patchFlags = args.patchFlags or [];
     postPatch = args.postPatch or "";
     preBuild = args.preBuild or "";
-    postBuild = args.postBuild or "";
+    postBuild = args.modPostBuild or "";
     sourceRoot = args.sourceRoot or "";
 
     GO111MODULE = "on";
diff --git a/pkgs/build-support/kernel/make-initrd-ng.nix b/pkgs/build-support/kernel/make-initrd-ng.nix
index dc0e9b87db2..f3cf3d59f92 100644
--- a/pkgs/build-support/kernel/make-initrd-ng.nix
+++ b/pkgs/build-support/kernel/make-initrd-ng.nix
@@ -78,14 +78,14 @@ in
 
   STRIP = if strip then "${pkgsBuildHost.binutils.targetPrefix}strip" else null;
 }) ''
-  mkdir ./root
+  mkdir -p ./root/var/empty
   make-initrd-ng "$contentsPath" ./root
   mkdir "$out"
   (cd root && find * .[^.*] -exec touch -h -d '@1' '{}' +)
   for PREP in $prepend; do
     cat $PREP >> $out/initrd
   done
-  (cd root && find * .[^.*] -print0 | sort -z | cpio -o -H newc -R +0:+0 --reproducible --null | eval -- $compress >> "$out/initrd")
+  (cd root && find . -print0 | sort -z | cpio -o -H newc -R +0:+0 --reproducible --null | eval -- $compress >> "$out/initrd")
 
   if [ -n "$makeUInitrd" ]; then
       mkimage -A "$uInitrdArch" -O linux -T ramdisk -C "$uInitrdCompression" -d "$out/initrd" $out/initrd.img
diff --git a/pkgs/build-support/node/build-npm-package/default.nix b/pkgs/build-support/node/build-npm-package/default.nix
index 26cc678c571..1c3fb6a74ef 100644
--- a/pkgs/build-support/node/build-npm-package/default.nix
+++ b/pkgs/build-support/node/build-npm-package/default.nix
@@ -12,6 +12,9 @@
   # The output hash of the dependencies for this project.
   # Can be calculated in advance with prefetch-npm-deps.
 , npmDepsHash ? ""
+  # Whether to force the usage of Git dependencies that have install scripts, but not a lockfile.
+  # Use with care.
+, forceGitDeps ? false
   # Whether to make the cache writable prior to installing dependencies.
   # Don't set this unless npm tries to write to the cache directory, as it can slow down the build.
 , makeCacheWritable ? false
@@ -32,7 +35,7 @@
 
 let
   npmDeps = fetchNpmDeps {
-    inherit src srcs sourceRoot prePatch patches postPatch;
+    inherit forceGitDeps src srcs sourceRoot prePatch patches postPatch;
     name = "${name}-npm-deps";
     hash = npmDepsHash;
   };
diff --git a/pkgs/build-support/node/fetch-npm-deps/default.nix b/pkgs/build-support/node/fetch-npm-deps/default.nix
index d87071d8559..41cad9d12ee 100644
--- a/pkgs/build-support/node/fetch-npm-deps/default.nix
+++ b/pkgs/build-support/node/fetch-npm-deps/default.nix
@@ -36,8 +36,8 @@
           '';
         };
 
-        makeTest = { name, src, hash }: testers.invalidateFetcherByDrvHash fetchNpmDeps {
-          inherit name hash;
+        makeTest = { name, src, hash, forceGitDeps ? false }: testers.invalidateFetcherByDrvHash fetchNpmDeps {
+          inherit name hash forceGitDeps;
 
           src = makeTestSrc { inherit name src; };
         };
@@ -108,6 +108,8 @@
           };
 
           hash = "sha256-+KA8/orSBJ4EhuSyQO8IKSxsN/FAsYU3lOzq+awuxNQ=";
+
+          forceGitDeps = true;
         };
       };
 
@@ -121,6 +123,7 @@
   fetchNpmDeps =
     { name ? "npm-deps"
     , hash ? ""
+    , forceGitDeps ? false
     , ...
     } @ args:
     let
@@ -131,6 +134,8 @@
           outputHash = "";
           outputHashAlgo = "sha256";
         };
+
+      forceGitDeps_ = lib.optionalAttrs forceGitDeps { FORCE_GIT_DEPS = true; };
     in
     stdenvNoCC.mkDerivation (args // {
       inherit name;
@@ -161,5 +166,5 @@
       dontInstall = true;
 
       outputHashMode = "recursive";
-    } // hash_);
+    } // hash_ // forceGitDeps_);
 }
diff --git a/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs b/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs
index 715e115e723..5326c3e858b 100644
--- a/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs
+++ b/pkgs/build-support/node/fetch-npm-deps/src/cacache.rs
@@ -72,7 +72,7 @@ impl Cache {
                 &mut p,
                 &hash
                     .into_iter()
-                    .map(|x| format!("{:02x}", x))
+                    .map(|n| format!("{n:02x}"))
                     .collect::<String>(),
             );
 
diff --git a/pkgs/build-support/node/fetch-npm-deps/src/main.rs b/pkgs/build-support/node/fetch-npm-deps/src/main.rs
index 3d2204071a6..57725a922df 100644
--- a/pkgs/build-support/node/fetch-npm-deps/src/main.rs
+++ b/pkgs/build-support/node/fetch-npm-deps/src/main.rs
@@ -1,250 +1,18 @@
 #![warn(clippy::pedantic)]
 
 use crate::cacache::Cache;
-use anyhow::{anyhow, Context};
+use anyhow::anyhow;
 use rayon::prelude::*;
-use serde::Deserialize;
 use serde_json::{Map, Value};
 use std::{
-    collections::{HashMap, HashSet},
-    env, fmt, fs, io,
+    env, fs,
     path::Path,
-    process::{self, Command, Stdio},
+    process::{self, Command},
 };
 use tempfile::tempdir;
-use url::Url;
 
 mod cacache;
-#[cfg(test)]
-mod tests;
-
-#[derive(Deserialize)]
-struct PackageLock {
-    #[serde(rename = "lockfileVersion")]
-    version: u8,
-    dependencies: Option<HashMap<String, OldPackage>>,
-    packages: Option<HashMap<String, Package>>,
-}
-
-#[derive(Deserialize)]
-struct OldPackage {
-    version: UrlOrString,
-    #[serde(default)]
-    bundled: bool,
-    resolved: Option<UrlOrString>,
-    integrity: Option<String>,
-    dependencies: Option<HashMap<String, OldPackage>>,
-}
-
-#[derive(Debug, Deserialize, PartialEq, Eq)]
-struct Package {
-    resolved: Option<UrlOrString>,
-    integrity: Option<String>,
-}
-
-#[derive(Debug, Deserialize, PartialEq, Eq)]
-#[serde(untagged)]
-enum UrlOrString {
-    Url(Url),
-    String(String),
-}
-
-impl fmt::Display for UrlOrString {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        match self {
-            UrlOrString::Url(url) => url.fmt(f),
-            UrlOrString::String(string) => string.fmt(f),
-        }
-    }
-}
-
-#[allow(clippy::case_sensitive_file_extension_comparisons)]
-fn to_new_packages(
-    old_packages: HashMap<String, OldPackage>,
-    initial_url: &Url,
-) -> anyhow::Result<HashMap<String, Package>> {
-    let mut new = HashMap::new();
-
-    for (name, mut package) in old_packages {
-        // In some cases, a bundled dependency happens to have the same version as a non-bundled one, causing
-        // the bundled one without a URL to override the entry for the non-bundled instance, which prevents the
-        // dependency from being downloaded.
-        if package.bundled {
-            continue;
-        }
-
-        if let UrlOrString::Url(v) = &package.version {
-            for (scheme, host) in [
-                ("github", "github.com"),
-                ("bitbucket", "bitbucket.org"),
-                ("gitlab", "gitlab.com"),
-            ] {
-                if v.scheme() == scheme {
-                    package.version = {
-                        let mut new_url = initial_url.clone();
-
-                        new_url.set_host(Some(host))?;
-
-                        if v.path().ends_with(".git") {
-                            new_url.set_path(v.path());
-                        } else {
-                            new_url.set_path(&format!("{}.git", v.path()));
-                        }
-
-                        new_url.set_fragment(v.fragment());
-
-                        UrlOrString::Url(new_url)
-                    };
-
-                    break;
-                }
-            }
-        }
-
-        new.insert(
-            format!("{name}-{}", package.version),
-            Package {
-                resolved: if matches!(package.version, UrlOrString::Url(_)) {
-                    Some(package.version)
-                } else {
-                    package.resolved
-                },
-                integrity: package.integrity,
-            },
-        );
-
-        if let Some(dependencies) = package.dependencies {
-            new.extend(to_new_packages(dependencies, initial_url)?);
-        }
-    }
-
-    Ok(new)
-}
-
-#[allow(clippy::case_sensitive_file_extension_comparisons)]
-fn get_hosted_git_url(url: &Url) -> Option<Url> {
-    if ["git", "http", "git+ssh", "git+https", "ssh", "https"].contains(&url.scheme()) {
-        let mut s = url.path_segments()?;
-
-        match url.host_str()? {
-            "github.com" => {
-                let user = s.next()?;
-                let mut project = s.next()?;
-                let typ = s.next();
-                let mut commit = s.next();
-
-                if typ.is_none() {
-                    commit = url.fragment();
-                } else if typ.is_some() && typ != Some("tree") {
-                    return None;
-                }
-
-                if project.ends_with(".git") {
-                    project = project.strip_suffix(".git")?;
-                }
-
-                let commit = commit.unwrap();
-
-                Some(
-                    Url::parse(&format!(
-                        "https://codeload.github.com/{user}/{project}/tar.gz/{commit}"
-                    ))
-                    .ok()?,
-                )
-            }
-            "bitbucket.org" => {
-                let user = s.next()?;
-                let mut project = s.next()?;
-                let aux = s.next();
-
-                if aux == Some("get") {
-                    return None;
-                }
-
-                if project.ends_with(".git") {
-                    project = project.strip_suffix(".git")?;
-                }
-
-                let commit = url.fragment()?;
-
-                Some(
-                    Url::parse(&format!(
-                        "https://bitbucket.org/{user}/{project}/get/{commit}.tar.gz"
-                    ))
-                    .ok()?,
-                )
-            }
-            "gitlab.com" => {
-                let path = &url.path()[1..];
-
-                if path.contains("/~/") || path.contains("/archive.tar.gz") {
-                    return None;
-                }
-
-                let user = s.next()?;
-                let mut project = s.next()?;
-
-                if project.ends_with(".git") {
-                    project = project.strip_suffix(".git")?;
-                }
-
-                let commit = url.fragment()?;
-
-                Some(
-                    Url::parse(&format!(
-                    "https://gitlab.com/{user}/{project}/repository/archive.tar.gz?ref={commit}"
-                ))
-                    .ok()?,
-                )
-            }
-            "git.sr.ht" => {
-                let user = s.next()?;
-                let mut project = s.next()?;
-                let aux = s.next();
-
-                if aux == Some("archive") {
-                    return None;
-                }
-
-                if project.ends_with(".git") {
-                    project = project.strip_suffix(".git")?;
-                }
-
-                let commit = url.fragment()?;
-
-                Some(
-                    Url::parse(&format!(
-                        "https://git.sr.ht/{user}/{project}/archive/{commit}.tar.gz"
-                    ))
-                    .ok()?,
-                )
-            }
-            _ => None,
-        }
-    } else {
-        None
-    }
-}
-
-fn get_ideal_hash(integrity: &str) -> anyhow::Result<&str> {
-    let split: Vec<_> = integrity.split_ascii_whitespace().collect();
-
-    if split.len() == 1 {
-        Ok(split[0])
-    } else {
-        for hash in ["sha512-", "sha1-"] {
-            if let Some(h) = split.iter().find(|s| s.starts_with(hash)) {
-                return Ok(h);
-            }
-        }
-
-        Err(anyhow!("not sure which hash to select out of {split:?}"))
-    }
-}
-
-fn get_initial_url() -> anyhow::Result<Url> {
-    Url::parse("git+ssh://git@a.b").context("initial url should be valid")
-}
+mod parse;
 
 /// `fixup_lockfile` removes the `integrity` field from Git dependencies.
 ///
@@ -294,7 +62,6 @@ fn fixup_lockfile(mut lock: Map<String, Value>) -> anyhow::Result<Option<Map<Str
     }
 }
 
-#[allow(clippy::too_many_lines)]
 fn main() -> anyhow::Result<()> {
     let args = env::args().collect::<Vec<_>>();
 
@@ -319,7 +86,6 @@ fn main() -> anyhow::Result<()> {
     }
 
     let lock_content = fs::read_to_string(&args[1])?;
-    let lock: PackageLock = serde_json::from_str(&lock_content)?;
 
     let out_tempdir;
 
@@ -331,137 +97,92 @@ fn main() -> anyhow::Result<()> {
         (out_tempdir.path(), true)
     };
 
-    let agent = ureq::agent();
-
-    eprintln!("lockfile version: {}", lock.version);
-
-    let packages = match lock.version {
-        1 => {
-            let initial_url = get_initial_url()?;
-
-            lock.dependencies
-                .map(|p| to_new_packages(p, &initial_url))
-                .transpose()?
-        }
-        2 | 3 => lock.packages,
-        _ => panic!(
-            "We don't support lockfile version {}, please file an issue.",
-            lock.version
-        ),
-    };
-
-    if packages.is_none() {
-        return Ok(());
-    }
-
-    let packages = {
-        let mut seen = HashSet::new();
-        let mut new_packages = HashMap::new();
-
-        for (dep, package) in packages.unwrap().drain() {
-            if let (false, Some(UrlOrString::Url(resolved))) = (dep.is_empty(), &package.resolved) {
-                if !seen.contains(resolved) {
-                    seen.insert(resolved.clone());
-                    new_packages.insert(dep, package);
-                }
-            }
-        }
-
-        new_packages
-    };
+    let packages = parse::lockfile(&lock_content, env::var("FORCE_GIT_DEPS").is_ok())?;
 
     let cache = Cache::new(out.join("_cacache"));
 
-    packages
-        .into_par_iter()
-        .try_for_each(|(dep, mut package)| {
-            eprintln!("{dep}");
+    packages.into_par_iter().try_for_each(|package| {
+        eprintln!("{}", package.name);
 
-            let mut resolved = match package.resolved {
-                Some(UrlOrString::Url(url)) => url,
-                _ => unreachable!(),
-            };
+        let tarball = package.tarball()?;
+        let integrity = package.integrity();
 
-            let mut hosted = false;
+        cache
+            .put(
+                format!("make-fetch-happen:request-cache:{}", package.url),
+                package.url,
+                &tarball,
+                integrity,
+            )
+            .map_err(|e| anyhow!("couldn't insert cache entry for {}: {e:?}", package.name))?;
 
-            if let Some(hosted_git_url) = get_hosted_git_url(&resolved) {
-                resolved = hosted_git_url;
-                package.integrity = None;
-                hosted = true;
-            }
+        Ok::<_, anyhow::Error>(())
+    })?;
 
-            let mut data = Vec::new();
-
-            let mut body = agent.get(resolved.as_str()).call()?.into_reader();
-
-            if hosted {
-                let workdir = tempdir()?;
+    fs::write(out.join("package-lock.json"), lock_content)?;
 
-                let tar_path = workdir.path().join("package");
+    if print_hash {
+        Command::new("nix")
+            .args(["--experimental-features", "nix-command", "hash", "path"])
+            .arg(out.as_os_str())
+            .status()?;
+    }
 
-                fs::create_dir(&tar_path)?;
+    Ok(())
+}
 
-                let mut cmd = Command::new("tar")
-                    .args(["--extract", "--gzip", "--strip-components=1", "-C"])
-                    .arg(&tar_path)
-                    .stdin(Stdio::piped())
-                    .spawn()?;
+#[cfg(test)]
+mod tests {
+    use super::fixup_lockfile;
+    use serde_json::json;
+
+    #[test]
+    fn lockfile_fixup() -> anyhow::Result<()> {
+        let input = json!({
+            "lockfileVersion": 2,
+            "name": "foo",
+            "packages": {
+                "": {
 
-                io::copy(&mut body, &mut cmd.stdin.take().unwrap())?;
+                },
+                "foo": {
+                    "resolved": "https://github.com/NixOS/nixpkgs",
+                    "integrity": "aaa"
+                },
+                "bar": {
+                    "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git",
+                    "integrity": "bbb"
+                }
+            }
+        });
 
-                let exit = cmd.wait()?;
+        let expected = json!({
+            "lockfileVersion": 2,
+            "name": "foo",
+            "packages": {
+                "": {
 
-                if !exit.success() {
-                    return Err(anyhow!(
-                        "failed to extract tarball for {dep}: tar exited with status code {}",
-                        exit.code().unwrap()
-                    ));
+                },
+                "foo": {
+                    "resolved": "https://github.com/NixOS/nixpkgs",
+                    "integrity": "aaa"
+                },
+                "bar": {
+                    "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git",
                 }
-
-                data = Command::new("tar")
-                    .args([
-                        "--sort=name",
-                        "--mtime=@0",
-                        "--owner=0",
-                        "--group=0",
-                        "--numeric-owner",
-                        "--format=gnu",
-                        "-I",
-                        "gzip -n -9",
-                        "--create",
-                        "-C",
-                    ])
-                    .arg(workdir.path())
-                    .arg("package")
-                    .output()?
-                    .stdout;
-            } else {
-                body.read_to_end(&mut data)?;
             }
+        });
 
-            cache
-                .put(
-                    format!("make-fetch-happen:request-cache:{resolved}"),
-                    resolved,
-                    &data,
-                    package
-                        .integrity
-                        .map(|i| Ok::<String, anyhow::Error>(get_ideal_hash(&i)?.to_string()))
-                        .transpose()?,
-                )
-                .map_err(|e| anyhow!("couldn't insert cache entry for {dep}: {e:?}"))?;
-
-            Ok::<_, anyhow::Error>(())
-        })?;
+        assert_eq!(
+            fixup_lockfile(input.as_object().unwrap().clone())?,
+            Some(expected.as_object().unwrap().clone())
+        );
 
-    fs::write(out.join("package-lock.json"), lock_content)?;
+        assert_eq!(
+            fixup_lockfile(json!({"lockfileVersion": 1}).as_object().unwrap().clone())?,
+            None
+        );
 
-    if print_hash {
-        Command::new("nix")
-            .args(["--experimental-features", "nix-command", "hash", "path"])
-            .arg(out.as_os_str())
-            .status()?;
+        Ok(())
     }
-
-    Ok(())
 }
diff --git a/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs b/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs
new file mode 100644
index 00000000000..99bd3020b52
--- /dev/null
+++ b/pkgs/build-support/node/fetch-npm-deps/src/parse/lock.rs
@@ -0,0 +1,191 @@
+use anyhow::{bail, Context};
+use rayon::slice::ParallelSliceMut;
+use serde::Deserialize;
+use std::{collections::HashMap, fmt};
+use url::Url;
+
+pub(super) fn packages(content: &str) -> anyhow::Result<Vec<Package>> {
+    let lockfile: Lockfile = serde_json::from_str(content)?;
+
+    let mut packages = match lockfile.version {
+        1 => {
+            let initial_url = get_initial_url()?;
+
+            lockfile
+                .dependencies
+                .map(|p| to_new_packages(p, &initial_url))
+                .transpose()?
+        }
+        2 | 3 => lockfile.packages.map(|pkgs| {
+            pkgs.into_iter()
+                .filter(|(n, p)| !n.is_empty() && matches!(p.resolved, Some(UrlOrString::Url(_))))
+                .map(|(n, p)| Package { name: Some(n), ..p })
+                .collect()
+        }),
+        _ => bail!(
+            "We don't support lockfile version {}, please file an issue.",
+            lockfile.version
+        ),
+    }
+    .expect("lockfile should have packages");
+
+    packages.par_sort_by(|x, y| {
+        x.resolved
+            .partial_cmp(&y.resolved)
+            .expect("resolved should be comparable")
+    });
+
+    packages.dedup_by(|x, y| x.resolved == y.resolved);
+
+    Ok(packages)
+}
+
+#[derive(Deserialize)]
+struct Lockfile {
+    #[serde(rename = "lockfileVersion")]
+    version: u8,
+    dependencies: Option<HashMap<String, OldPackage>>,
+    packages: Option<HashMap<String, Package>>,
+}
+
+#[derive(Deserialize)]
+struct OldPackage {
+    version: UrlOrString,
+    #[serde(default)]
+    bundled: bool,
+    resolved: Option<UrlOrString>,
+    integrity: Option<String>,
+    dependencies: Option<HashMap<String, OldPackage>>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Eq)]
+pub(super) struct Package {
+    #[serde(default)]
+    pub(super) name: Option<String>,
+    pub(super) resolved: Option<UrlOrString>,
+    pub(super) integrity: Option<String>,
+}
+
+#[derive(Debug, Deserialize, PartialEq, Eq, PartialOrd, Ord)]
+#[serde(untagged)]
+pub(super) enum UrlOrString {
+    Url(Url),
+    String(String),
+}
+
+impl fmt::Display for UrlOrString {
+    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+        match self {
+            UrlOrString::Url(url) => url.fmt(f),
+            UrlOrString::String(string) => string.fmt(f),
+        }
+    }
+}
+
+#[allow(clippy::case_sensitive_file_extension_comparisons)]
+fn to_new_packages(
+    old_packages: HashMap<String, OldPackage>,
+    initial_url: &Url,
+) -> anyhow::Result<Vec<Package>> {
+    let mut new = Vec::new();
+
+    for (name, mut package) in old_packages {
+        // In some cases, a bundled dependency happens to have the same version as a non-bundled one, causing
+        // the bundled one without a URL to override the entry for the non-bundled instance, which prevents the
+        // dependency from being downloaded.
+        if package.bundled {
+            continue;
+        }
+
+        if let UrlOrString::Url(v) = &package.version {
+            for (scheme, host) in [
+                ("github", "github.com"),
+                ("bitbucket", "bitbucket.org"),
+                ("gitlab", "gitlab.com"),
+            ] {
+                if v.scheme() == scheme {
+                    package.version = {
+                        let mut new_url = initial_url.clone();
+
+                        new_url.set_host(Some(host))?;
+
+                        if v.path().ends_with(".git") {
+                            new_url.set_path(v.path());
+                        } else {
+                            new_url.set_path(&format!("{}.git", v.path()));
+                        }
+
+                        new_url.set_fragment(v.fragment());
+
+                        UrlOrString::Url(new_url)
+                    };
+
+                    break;
+                }
+            }
+        }
+
+        new.push(Package {
+            name: Some(name),
+            resolved: if matches!(package.version, UrlOrString::Url(_)) {
+                Some(package.version)
+            } else {
+                package.resolved
+            },
+            integrity: package.integrity,
+        });
+
+        if let Some(dependencies) = package.dependencies {
+            new.append(&mut to_new_packages(dependencies, initial_url)?);
+        }
+    }
+
+    Ok(new)
+}
+
+fn get_initial_url() -> anyhow::Result<Url> {
+    Url::parse("git+ssh://git@a.b").context("initial url should be valid")
+}
+
+#[cfg(test)]
+mod tests {
+    use super::{get_initial_url, to_new_packages, OldPackage, Package, UrlOrString};
+    use std::collections::HashMap;
+    use url::Url;
+
+    #[test]
+    fn git_shorthand_v1() -> anyhow::Result<()> {
+        let old = {
+            let mut o = HashMap::new();
+            o.insert(
+                String::from("sqlite3"),
+                OldPackage {
+                    version: UrlOrString::Url(
+                        Url::parse(
+                            "github:mapbox/node-sqlite3#593c9d498be2510d286349134537e3bf89401c4a",
+                        )
+                        .unwrap(),
+                    ),
+                    bundled: false,
+                    resolved: None,
+                    integrity: None,
+                    dependencies: None,
+                },
+            );
+            o
+        };
+
+        let initial_url = get_initial_url()?;
+
+        let new = to_new_packages(old, &initial_url)?;
+
+        assert_eq!(new.len(), 1, "new packages map should contain 1 value");
+        assert_eq!(new[0], Package {
+            name: Some(String::from("sqlite3")),
+            resolved: Some(UrlOrString::Url(Url::parse("git+ssh://git@github.com/mapbox/node-sqlite3.git#593c9d498be2510d286349134537e3bf89401c4a").unwrap())),
+            integrity: None
+        });
+
+        Ok(())
+    }
+}
diff --git a/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs b/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs
new file mode 100644
index 00000000000..387b3add7ec
--- /dev/null
+++ b/pkgs/build-support/node/fetch-npm-deps/src/parse/mod.rs
@@ -0,0 +1,370 @@
+use anyhow::{anyhow, bail, Context};
+use lock::UrlOrString;
+use rayon::prelude::*;
+use serde_json::{Map, Value};
+use std::{
+    fs, io,
+    process::{Command, Stdio},
+};
+use tempfile::{tempdir, TempDir};
+use url::Url;
+
+mod lock;
+
+pub fn lockfile(content: &str, force_git_deps: bool) -> anyhow::Result<Vec<Package>> {
+    let mut packages = lock::packages(content)
+        .context("failed to extract packages from lockfile")?
+        .into_par_iter()
+        .map(|p| {
+            let n = p.name.clone().unwrap();
+
+            Package::from_lock(p).with_context(|| format!("failed to parse data for {n}"))
+        })
+        .collect::<anyhow::Result<Vec<_>>>()?;
+
+    let mut new = Vec::new();
+
+    for pkg in packages
+        .iter()
+        .filter(|p| matches!(p.specifics, Specifics::Git { .. }))
+    {
+        let dir = match &pkg.specifics {
+            Specifics::Git { workdir } => workdir,
+            Specifics::Registry { .. } => unimplemented!(),
+        };
+
+        let path = dir.path().join("package");
+
+        let lockfile_contents = fs::read_to_string(path.join("package-lock.json"));
+
+        let package_json_path = path.join("package.json");
+        let mut package_json: Map<String, Value> =
+            serde_json::from_str(&fs::read_to_string(package_json_path)?)?;
+
+        if let Some(scripts) = package_json
+            .get_mut("scripts")
+            .and_then(Value::as_object_mut)
+        {
+            // https://github.com/npm/pacote/blob/272edc1bac06991fc5f95d06342334bbacfbaa4b/lib/git.js#L166-L172
+            for typ in [
+                "postinstall",
+                "build",
+                "preinstall",
+                "install",
+                "prepack",
+                "prepare",
+            ] {
+                if scripts.contains_key(typ) && lockfile_contents.is_err() && !force_git_deps {
+                    bail!("Git dependency {} contains install scripts, but has no lockfile, which is something that will probably break. Open an issue if you can't feasibly patch this dependency out, and we'll come up with a workaround.\nIf you'd like to attempt to try to use this dependency anyways, set `forceGitDeps = true`.", pkg.name);
+                }
+            }
+        }
+
+        if let Ok(lockfile_contents) = lockfile_contents {
+            new.append(&mut lockfile(&lockfile_contents, force_git_deps)?);
+        }
+    }
+
+    packages.append(&mut new);
+
+    packages.par_sort_by(|x, y| {
+        x.url
+            .partial_cmp(&y.url)
+            .expect("resolved should be comparable")
+    });
+
+    packages.dedup_by(|x, y| x.url == y.url);
+
+    Ok(packages)
+}
+
+#[derive(Debug)]
+pub struct Package {
+    pub name: String,
+    pub url: Url,
+    specifics: Specifics,
+}
+
+#[derive(Debug)]
+enum Specifics {
+    Registry { integrity: String },
+    Git { workdir: TempDir },
+}
+
+impl Package {
+    fn from_lock(pkg: lock::Package) -> anyhow::Result<Package> {
+        let mut resolved = match pkg
+            .resolved
+            .expect("at this point, packages should have URLs")
+        {
+            UrlOrString::Url(u) => u,
+            UrlOrString::String(_) => panic!("at this point, all packages should have URLs"),
+        };
+
+        let specifics = match get_hosted_git_url(&resolved)? {
+            Some(hosted) => {
+                let mut body = ureq::get(hosted.as_str()).call()?.into_reader();
+
+                let workdir = tempdir()?;
+
+                let tar_path = workdir.path().join("package");
+
+                fs::create_dir(&tar_path)?;
+
+                let mut cmd = Command::new("tar")
+                    .args(["--extract", "--gzip", "--strip-components=1", "-C"])
+                    .arg(&tar_path)
+                    .stdin(Stdio::piped())
+                    .spawn()?;
+
+                io::copy(&mut body, &mut cmd.stdin.take().unwrap())?;
+
+                let exit = cmd.wait()?;
+
+                if !exit.success() {
+                    bail!(
+                        "failed to extract tarball for {}: tar exited with status code {}",
+                        pkg.name.unwrap(),
+                        exit.code().unwrap()
+                    );
+                }
+
+                resolved = hosted;
+
+                Specifics::Git { workdir }
+            }
+            None => Specifics::Registry {
+                integrity: get_ideal_hash(
+                    &pkg.integrity
+                        .expect("non-git dependencies should have assosciated integrity"),
+                )?
+                .to_string(),
+            },
+        };
+
+        Ok(Package {
+            name: pkg.name.unwrap(),
+            url: resolved,
+            specifics,
+        })
+    }
+
+    pub fn tarball(&self) -> anyhow::Result<Vec<u8>> {
+        match &self.specifics {
+            Specifics::Registry { .. } => {
+                let mut body = Vec::new();
+
+                ureq::get(self.url.as_str())
+                    .call()?
+                    .into_reader()
+                    .read_to_end(&mut body)?;
+
+                Ok(body)
+            }
+            Specifics::Git { workdir } => Ok(Command::new("tar")
+                .args([
+                    "--sort=name",
+                    "--mtime=@0",
+                    "--owner=0",
+                    "--group=0",
+                    "--numeric-owner",
+                    "--format=gnu",
+                    "-I",
+                    "gzip -n -9",
+                    "--create",
+                    "-C",
+                ])
+                .arg(workdir.path())
+                .arg("package")
+                .output()?
+                .stdout),
+        }
+    }
+
+    pub fn integrity(&self) -> Option<String> {
+        match &self.specifics {
+            Specifics::Registry { integrity } => Some(integrity.clone()),
+            Specifics::Git { .. } => None,
+        }
+    }
+}
+
+#[allow(clippy::case_sensitive_file_extension_comparisons)]
+fn get_hosted_git_url(url: &Url) -> anyhow::Result<Option<Url>> {
+    if ["git", "git+ssh", "git+https", "ssh"].contains(&url.scheme()) {
+        let mut s = url
+            .path_segments()
+            .ok_or_else(|| anyhow!("bad URL: {url}"))?;
+
+        let mut get_url = || match url.host_str()? {
+            "github.com" => {
+                let user = s.next()?;
+                let mut project = s.next()?;
+                let typ = s.next();
+                let mut commit = s.next();
+
+                if typ.is_none() {
+                    commit = url.fragment();
+                } else if typ.is_some() && typ != Some("tree") {
+                    return None;
+                }
+
+                if project.ends_with(".git") {
+                    project = project.strip_suffix(".git")?;
+                }
+
+                let commit = commit.unwrap();
+
+                Some(
+                    Url::parse(&format!(
+                        "https://codeload.github.com/{user}/{project}/tar.gz/{commit}"
+                    ))
+                    .ok()?,
+                )
+            }
+            "bitbucket.org" => {
+                let user = s.next()?;
+                let mut project = s.next()?;
+                let aux = s.next();
+
+                if aux == Some("get") {
+                    return None;
+                }
+
+                if project.ends_with(".git") {
+                    project = project.strip_suffix(".git")?;
+                }
+
+                let commit = url.fragment()?;
+
+                Some(
+                    Url::parse(&format!(
+                        "https://bitbucket.org/{user}/{project}/get/{commit}.tar.gz"
+                    ))
+                    .ok()?,
+                )
+            }
+            "gitlab.com" => {
+                /* let path = &url.path()[1..];
+
+                if path.contains("/~/") || path.contains("/archive.tar.gz") {
+                    return None;
+                }
+
+                let user = s.next()?;
+                let mut project = s.next()?;
+
+                if project.ends_with(".git") {
+                    project = project.strip_suffix(".git")?;
+                }
+
+                let commit = url.fragment()?;
+
+                Some(
+                    Url::parse(&format!(
+                    "https://gitlab.com/{user}/{project}/repository/archive.tar.gz?ref={commit}"
+                ))
+                    .ok()?,
+                ) */
+
+                // lmao: https://github.com/npm/hosted-git-info/pull/109
+                None
+            }
+            "git.sr.ht" => {
+                let user = s.next()?;
+                let mut project = s.next()?;
+                let aux = s.next();
+
+                if aux == Some("archive") {
+                    return None;
+                }
+
+                if project.ends_with(".git") {
+                    project = project.strip_suffix(".git")?;
+                }
+
+                let commit = url.fragment()?;
+
+                Some(
+                    Url::parse(&format!(
+                        "https://git.sr.ht/{user}/{project}/archive/{commit}.tar.gz"
+                    ))
+                    .ok()?,
+                )
+            }
+            _ => None,
+        };
+
+        match get_url() {
+            Some(u) => Ok(Some(u)),
+            None => Err(anyhow!("This lockfile either contains a Git dependency with an unsupported host, or a malformed URL in the lockfile: {url}"))
+        }
+    } else {
+        Ok(None)
+    }
+}
+
+fn get_ideal_hash(integrity: &str) -> anyhow::Result<&str> {
+    let split: Vec<_> = integrity.split_ascii_whitespace().collect();
+
+    if split.len() == 1 {
+        Ok(split[0])
+    } else {
+        for hash in ["sha512-", "sha1-"] {
+            if let Some(h) = split.iter().find(|s| s.starts_with(hash)) {
+                return Ok(h);
+            }
+        }
+
+        Err(anyhow!("not sure which hash to select out of {split:?}"))
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::{get_hosted_git_url, get_ideal_hash};
+    use url::Url;
+
+    #[test]
+    fn hosted_git_urls() {
+        for (input, expected) in [
+            (
+                "git+ssh://git@github.com/castlabs/electron-releases.git#fc5f78d046e8d7cdeb66345a2633c383ab41f525",
+                Some("https://codeload.github.com/castlabs/electron-releases/tar.gz/fc5f78d046e8d7cdeb66345a2633c383ab41f525"),
+            ),
+            (
+                "git+ssh://bitbucket.org/foo/bar#branch",
+                Some("https://bitbucket.org/foo/bar/get/branch.tar.gz")
+            ),
+            (
+                "git+ssh://git.sr.ht/~foo/bar#branch",
+                Some("https://git.sr.ht/~foo/bar/archive/branch.tar.gz")
+            ),
+        ] {
+            assert_eq!(
+                get_hosted_git_url(&Url::parse(input).unwrap()).unwrap(),
+                expected.map(|u| Url::parse(u).unwrap())
+            );
+        }
+
+        assert!(
+            get_hosted_git_url(&Url::parse("ssh://git@gitlab.com/foo/bar.git#fix/bug").unwrap())
+                .is_err(),
+            "GitLab URLs should be marked as invalid (lol)"
+        );
+    }
+
+    #[test]
+    fn ideal_hashes() {
+        for (input, expected) in [
+            ("sha512-foo sha1-bar", Some("sha512-foo")),
+            ("sha1-bar md5-foo", Some("sha1-bar")),
+            ("sha1-bar", Some("sha1-bar")),
+            ("sha512-foo", Some("sha512-foo")),
+            ("foo-bar sha1-bar", Some("sha1-bar")),
+            ("foo-bar baz-foo", None),
+        ] {
+            assert_eq!(get_ideal_hash(input).ok(), expected);
+        }
+    }
+}
diff --git a/pkgs/build-support/node/fetch-npm-deps/src/tests.rs b/pkgs/build-support/node/fetch-npm-deps/src/tests.rs
deleted file mode 100644
index a3317207c42..00000000000
--- a/pkgs/build-support/node/fetch-npm-deps/src/tests.rs
+++ /dev/null
@@ -1,141 +0,0 @@
-use super::{
-    fixup_lockfile, get_hosted_git_url, get_ideal_hash, get_initial_url, to_new_packages,
-    OldPackage, Package, UrlOrString,
-};
-use serde_json::json;
-use std::collections::HashMap;
-use url::Url;
-
-#[test]
-fn hosted_git_urls() {
-    for (input, expected) in [
-        (
-            "git+ssh://git@github.com/castlabs/electron-releases.git#fc5f78d046e8d7cdeb66345a2633c383ab41f525",
-            Some("https://codeload.github.com/castlabs/electron-releases/tar.gz/fc5f78d046e8d7cdeb66345a2633c383ab41f525"),
-        ),
-        (
-            "https://user@github.com/foo/bar#fix/bug",
-            Some("https://codeload.github.com/foo/bar/tar.gz/fix/bug")
-        ),
-        (
-            "https://github.com/eligrey/classList.js/archive/1.2.20180112.tar.gz",
-            None
-        ),
-        (
-            "git+ssh://bitbucket.org/foo/bar#branch",
-            Some("https://bitbucket.org/foo/bar/get/branch.tar.gz")
-        ),
-        (
-            "ssh://git@gitlab.com/foo/bar.git#fix/bug",
-            Some("https://gitlab.com/foo/bar/repository/archive.tar.gz?ref=fix/bug")
-        ),
-        (
-            "git+ssh://git.sr.ht/~foo/bar#branch",
-            Some("https://git.sr.ht/~foo/bar/archive/branch.tar.gz")
-        ),
-    ] {
-        assert_eq!(
-            get_hosted_git_url(&Url::parse(input).unwrap()),
-            expected.map(|u| Url::parse(u).unwrap())
-        );
-    }
-}
-
-#[test]
-fn ideal_hashes() {
-    for (input, expected) in [
-        ("sha512-foo sha1-bar", Some("sha512-foo")),
-        ("sha1-bar md5-foo", Some("sha1-bar")),
-        ("sha1-bar", Some("sha1-bar")),
-        ("sha512-foo", Some("sha512-foo")),
-        ("foo-bar sha1-bar", Some("sha1-bar")),
-        ("foo-bar baz-foo", None),
-    ] {
-        assert_eq!(get_ideal_hash(input).ok(), expected);
-    }
-}
-
-#[test]
-fn git_shorthand_v1() -> anyhow::Result<()> {
-    let old = {
-        let mut o = HashMap::new();
-        o.insert(
-            String::from("sqlite3"),
-            OldPackage {
-                version: UrlOrString::Url(
-                    Url::parse(
-                        "github:mapbox/node-sqlite3#593c9d498be2510d286349134537e3bf89401c4a",
-                    )
-                    .unwrap(),
-                ),
-                bundled: false,
-                resolved: None,
-                integrity: None,
-                dependencies: None,
-            },
-        );
-        o
-    };
-
-    let initial_url = get_initial_url()?;
-
-    let new = to_new_packages(old, &initial_url)?;
-
-    assert_eq!(new.len(), 1, "new packages map should contain 1 value");
-    assert_eq!(new.into_values().next().unwrap(), Package {
-        resolved: Some(UrlOrString::Url(Url::parse("git+ssh://git@github.com/mapbox/node-sqlite3.git#593c9d498be2510d286349134537e3bf89401c4a").unwrap())),
-        integrity: None
-    });
-
-    Ok(())
-}
-
-#[test]
-fn lockfile_fixup() -> anyhow::Result<()> {
-    let input = json!({
-        "lockfileVersion": 2,
-        "name": "foo",
-        "packages": {
-            "": {
-
-            },
-            "foo": {
-                "resolved": "https://github.com/NixOS/nixpkgs",
-                "integrity": "aaa"
-            },
-            "bar": {
-                "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git",
-                "integrity": "bbb"
-            }
-        }
-    });
-
-    let expected = json!({
-        "lockfileVersion": 2,
-        "name": "foo",
-        "packages": {
-            "": {
-
-            },
-            "foo": {
-                "resolved": "https://github.com/NixOS/nixpkgs",
-                "integrity": "aaa"
-            },
-            "bar": {
-                "resolved": "git+ssh://git@github.com/NixOS/nixpkgs.git",
-            }
-        }
-    });
-
-    assert_eq!(
-        fixup_lockfile(input.as_object().unwrap().clone())?,
-        Some(expected.as_object().unwrap().clone())
-    );
-
-    assert_eq!(
-        fixup_lockfile(json!({"lockfileVersion": 1}).as_object().unwrap().clone())?,
-        None
-    );
-
-    Ok(())
-}
diff --git a/pkgs/build-support/rust/build-rust-package/default.nix b/pkgs/build-support/rust/build-rust-package/default.nix
index a1bddeb6c49..2cd30af56b7 100644
--- a/pkgs/build-support/rust/build-rust-package/default.nix
+++ b/pkgs/build-support/rust/build-rust-package/default.nix
@@ -11,7 +11,7 @@
 , cargoSetupHook
 , cargo
 , cargo-auditable
-, cargo-auditable-cargo-wrapper
+, buildPackages
 , rustc
 , libiconv
 , windows
@@ -121,7 +121,7 @@ stdenv.mkDerivation ((removeAttrs args [ "depsExtraArgs" "cargoUpdateHook" "carg
   patchRegistryDeps = ./patch-registry-deps;
 
   nativeBuildInputs = nativeBuildInputs ++ lib.optionals auditable [
-    (cargo-auditable-cargo-wrapper.override {
+    (buildPackages.cargo-auditable-cargo-wrapper.override {
       inherit cargo cargo-auditable;
     })
   ] ++ [
diff --git a/pkgs/build-support/rust/default-crate-overrides.nix b/pkgs/build-support/rust/default-crate-overrides.nix
index ce8217b403e..e4db2c8a057 100644
--- a/pkgs/build-support/rust/default-crate-overrides.nix
+++ b/pkgs/build-support/rust/default-crate-overrides.nix
@@ -5,6 +5,7 @@
 , curl
 , darwin
 , libgit2
+, gtk3
 , libssh2
 , openssl
 , sqlite
@@ -145,6 +146,11 @@ in
     buildInputs = [ gdk-pixbuf ];
   };
 
+  gtk-sys = attrs: {
+    buildInputs = [ gtk3 ];
+    nativeBuildInputs = [ pkg-config ];
+  };
+
   gtk4-sys = attrs: {
     buildInputs = [ gtk4 ];
     nativeBuildInputs = [ pkg-config ];
diff --git a/pkgs/build-support/setup-hooks/move-build-tree.sh b/pkgs/build-support/setup-hooks/move-build-tree.sh
new file mode 100644
index 00000000000..2718070f393
--- /dev/null
+++ b/pkgs/build-support/setup-hooks/move-build-tree.sh
@@ -0,0 +1,12 @@
+prePhases+=" moveBuildDir"
+
+moveBuildDir() {
+    mkdir -p $out/.build
+    cd $out/.build
+}
+
+postPhases+=" removeBuildDir"
+
+removeBuildDir() {
+    rm -rf $out/.build
+}
diff --git a/pkgs/build-support/trivial-builders.nix b/pkgs/build-support/trivial-builders.nix
index 8694c602a3b..e90d0a6d202 100644
--- a/pkgs/build-support/trivial-builders.nix
+++ b/pkgs/build-support/trivial-builders.nix
@@ -785,12 +785,13 @@ rec {
   requireFile = { name ? null
                 , sha256 ? null
                 , sha1 ? null
+                , hash ? null
                 , url ? null
                 , message ? null
                 , hashMode ? "flat"
                 } :
     assert (message != null) || (url != null);
-    assert (sha256 != null) || (sha1 != null);
+    assert (sha256 != null) || (sha1 != null) || (hash != null);
     assert (name != null) || (url != null);
     let msg =
       if message != null then message
@@ -802,15 +803,19 @@ rec {
         or
           nix-prefetch-url --type ${hashAlgo} file:///path/to/${name_}
       '';
-      hashAlgo = if sha256 != null then "sha256" else "sha1";
-      hash = if sha256 != null then sha256 else sha1;
+      hashAlgo = if hash != null then ""
+            else if sha256 != null then "sha256"
+            else "sha1";
+      hash_ = if hash != null then hash
+         else if sha256 != null then sha256
+         else sha1;
       name_ = if name == null then baseNameOf (toString url) else name;
     in
     stdenvNoCC.mkDerivation {
       name = name_;
       outputHashMode = hashMode;
       outputHashAlgo = hashAlgo;
-      outputHash = hash;
+      outputHash = hash_;
       preferLocalBuild = true;
       allowSubstitutes = false;
       builder = writeScript "restrict-message" ''