summary refs log tree commit diff
path: root/pkgs/build-support
diff options
context:
space:
mode:
Diffstat (limited to 'pkgs/build-support')
-rw-r--r--pkgs/build-support/autonix/default.nix136
-rwxr-xr-xpkgs/build-support/autonix/manifest.sh44
-rw-r--r--pkgs/build-support/build-fhs-userenv/default.nix4
-rwxr-xr-xpkgs/build-support/buildenv/builder.pl41
-rw-r--r--pkgs/build-support/buildenv/default.nix8
-rw-r--r--pkgs/build-support/cc-wrapper/cc-wrapper.sh5
-rw-r--r--pkgs/build-support/cc-wrapper/default.nix12
-rw-r--r--pkgs/build-support/cc-wrapper/gnat-wrapper.sh5
-rw-r--r--pkgs/build-support/cc-wrapper/ld-wrapper.sh5
-rw-r--r--pkgs/build-support/docker/detjson.py4
-rw-r--r--pkgs/build-support/docker/pull.nix9
-rw-r--r--pkgs/build-support/docker/pull.sh53
-rw-r--r--pkgs/build-support/emacs/elpa.nix30
-rw-r--r--pkgs/build-support/emacs/elpa2nix.el30
-rw-r--r--pkgs/build-support/emacs/generic.nix7
-rw-r--r--pkgs/build-support/emacs/melpa.nix40
-rw-r--r--pkgs/build-support/emacs/melpa2nix.el23
-rw-r--r--pkgs/build-support/fetchgit/default.nix2
-rwxr-xr-xpkgs/build-support/fetchgit/nix-prefetch-git34
-rw-r--r--pkgs/build-support/fetchurl/builder.sh5
-rw-r--r--pkgs/build-support/fetchurl/default.nix7
-rw-r--r--pkgs/build-support/grsecurity/default.nix3
-rw-r--r--pkgs/build-support/release/source-tarball.nix2
-rw-r--r--pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh4
24 files changed, 231 insertions, 282 deletions
diff --git a/pkgs/build-support/autonix/default.nix b/pkgs/build-support/autonix/default.nix
deleted file mode 100644
index 1f71d2cbb3b..00000000000
--- a/pkgs/build-support/autonix/default.nix
+++ /dev/null
@@ -1,136 +0,0 @@
-{ pkgs }:
-
-let inherit (pkgs) bash coreutils findutils nix wget;
-    inherit (pkgs) callPackage fetchurl runCommand stdenv substituteAll writeText;
-in
-
-/* autonix is a collection of tools to automate packaging large collections
- * of software, particularly KDE. It consists of three components:
- *   1. a script (manifest) to download and hash the packages
- *   2. a dependency scanner (autonix-deps) written in Haskell that examines
- *      the package sources and tries to guess their dependencies
- *   3. a library of Nix routines (generateCollection) to generate Nix
- *      expressions from the output of the previous steps.
- */
-
-let inherit (stdenv) lib; in
-
-let
-
-  resolveDeps = scope: deps:
-    let resolve = dep:
-          let res = scope."${dep}" or [];
-          in if lib.isList res then res else [res];
-    in lib.concatMap resolve deps;
-
-in rec {
-
-  /* Download the packages into the Nix store, compute their hashes,
-   * and generate a package manifest in ./manifest.nix.
-   */
-  manifest =
-    let
-      script =
-        substituteAll
-          {
-            src = ./manifest.sh;
-            inherit bash coreutils findutils nix wget;
-          };
-    in
-      runCommand "autonix-manifest" {}
-        ''
-          cp ${script} $out
-          chmod +x $out
-        '';
-
-  mkPackage = callPackage: defaultOverride: name: pkg: let drv =
-    { mkDerivation, fetchurl, scope }:
-
-    mkDerivation (defaultOverride {
-      inherit (pkg) name;
-
-      src = fetchurl pkg.src;
-
-      buildInputs = resolveDeps scope pkg.buildInputs;
-      nativeBuildInputs = resolveDeps scope pkg.nativeBuildInputs;
-      propagatedBuildInputs = resolveDeps scope pkg.propagatedBuildInputs;
-      propagatedNativeBuildInputs =
-        resolveDeps scope pkg.propagatedNativeBuildInputs;
-      propagatedUserEnvPkgs = resolveDeps scope pkg.propagatedUserEnvPkgs;
-
-      enableParallelBuilding = true;
-    });
-  in callPackage drv {};
-
-  renameDeps = renames: lib.mapAttrs (name: pkg:
-    let breakCycles = lib.filter (dep: dep != name);
-        rename = dep: renames."${dep}" or dep;
-    in pkg // {
-      buildInputs = breakCycles (map rename pkg.buildInputs);
-      nativeBuildInputs = breakCycles (map rename pkg.nativeBuildInputs);
-      propagatedBuildInputs = breakCycles (map rename pkg.propagatedBuildInputs);
-      propagatedNativeBuildInputs =
-        breakCycles (map rename pkg.propagatedNativeBuildInputs);
-      propagatedUserEnvPkgs = breakCycles (map rename pkg.propagatedUserEnvPkgs);
-    });
-
-  propagateDeps = propagated: lib.mapAttrs (name: pkg:
-    let isPropagated = dep: lib.elem dep propagated;
-        isNotPropagated = dep: !(isPropagated dep);
-    in pkg // {
-      buildInputs = lib.filter isNotPropagated pkg.buildInputs;
-      nativeBuildInputs = lib.filter isNotPropagated pkg.nativeBuildInputs;
-      propagatedBuildInputs =
-        pkg.propagatedBuildInputs
-        ++ lib.filter isPropagated pkg.buildInputs;
-      propagatedNativeBuildInputs =
-        pkg.propagatedNativeBuildInputs
-        ++ lib.filter isPropagated pkg.nativeBuildInputs;
-    });
-
-  nativeDeps = native: lib.mapAttrs (name: pkg:
-    let isNative = dep: lib.elem dep native;
-        isNotNative = dep: !(isNative dep);
-    in pkg // {
-      buildInputs = lib.filter isNotNative pkg.buildInputs;
-      nativeBuildInputs =
-        pkg.nativeBuildInputs
-        ++ lib.filter isNative pkg.buildInputs;
-      propagatedBuildInputs = lib.filter isNotNative pkg.propagatedBuildInputs;
-      propagatedNativeBuildInputs =
-        pkg.propagatedNativeBuildInputs
-        ++ lib.filter isNative pkg.propagatedBuildInputs;
-    });
-
-  userEnvDeps = user: lib.mapAttrs (name: pkg:
-    let allDeps = with pkg; lib.concatLists [
-          buildInputs
-          nativeBuildInputs
-          propagatedBuildInputs
-          propagatedNativeBuildInputs
-        ];
-    in assert (lib.isList allDeps); pkg // {
-      propagatedUserEnvPkgs = lib.filter (dep: lib.elem dep user) allDeps;
-    });
-
-  overrideDerivation = pkg: f: pkg.override (super: super // {
-    mkDerivation = drv: super.mkDerivation (drv // f drv);
-  });
-
-  extendDerivation = pkg: attrs:
-    let mergeAttrBy = lib.mergeAttrBy // {
-          propagatedNativeBuildInputs = a: b: a ++ b;
-          NIX_CFLAGS_COMPILE = a: b: "${a} ${b}";
-          cmakeFlags = a: b: a ++ b;
-        };
-        mergeAttrsByFunc = sets:
-          let merged = lib.foldl lib.mergeAttrByFunc { inherit mergeAttrBy; } sets;
-          in builtins.removeAttrs merged ["mergeAttrBy"];
-    in overrideDerivation pkg (drv: mergeAttrsByFunc [ drv attrs ]);
-
-  overrideScope = pkg: fnOrSet: pkg.override (super: super // {
-    scope = if builtins.isFunction fnOrSet
-              then super.scope // fnOrSet super.scope
-            else super.scope // fnOrSet;
-  });
-}
diff --git a/pkgs/build-support/autonix/manifest.sh b/pkgs/build-support/autonix/manifest.sh
deleted file mode 100755
index 5be69cc6175..00000000000
--- a/pkgs/build-support/autonix/manifest.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#!@bash@/bin/bash
-
-@coreutils@/bin/mkdir tmp; cd tmp
-
-@wget@/bin/wget -nH -r -c --no-parent $*
-
-cat >../manifest.json <<EOF
-[
-EOF
-
-workdir=$(pwd)
-sep=""
-
-@findutils@/bin/find . | while read path; do
-    if [[ -f "${path}" ]]; then
-        [[ -n "${sep}" ]] && echo "$sep" >>../manifest.json
-        url="${path:2}"
-        # Sanitize file name
-        filename=$(@coreutils@/bin/basename "${path}" | tr '@' '_')
-        nameversion="${filename%.tar.*}"
-        name="${nameversion%-*}"
-        dirname=$(@coreutils@/bin/dirname "${path}")
-        mv "${workdir}/${path}" "${workdir}/${dirname}/${filename}"
-        # Prefetch and hash source file
-        sha256=$(@nix@/bin/nix-prefetch-url "file://${workdir}/${dirname}/${filename}")
-        store=$(@nix@/bin/nix-store --print-fixed-path sha256 "$sha256" "$filename")
-        cat >>../manifest.json <<EOF
-  {
-    "name": "${nameversion}",
-    "store": "${store}",
-    "src": {
-      "url": "${url}",
-      "sha256": "${sha256}",
-      "name": "${filename}"
-    }
-  }
-EOF
-        sep=","
-    fi
-done
-
-echo "]" >>../manifest.json
-
-cd ..
diff --git a/pkgs/build-support/build-fhs-userenv/default.nix b/pkgs/build-support/build-fhs-userenv/default.nix
index 5db0d98b79a..4177846c433 100644
--- a/pkgs/build-support/build-fhs-userenv/default.nix
+++ b/pkgs/build-support/build-fhs-userenv/default.nix
@@ -12,6 +12,8 @@ let
   '';
 
   init = run: writeText "${name}-init" ''
+    source /etc/profile
+
     # Make /tmp directory
     mkdir -m 1777 /tmp
 
@@ -44,7 +46,7 @@ in runCommand name {
   cat <<EOF >$out/bin/${name}
   #! ${stdenv.shell}
   export CHROOTENV_EXTRA_BINDS="${lib.concatStringsSep ":" extraBindMounts}:\$CHROOTENV_EXTRA_BINDS"
-  exec ${chroot-user}/bin/chroot-user ${env} ${bash'} -l ${init runScript} "\$(pwd)" "\$@"
+  exec ${chroot-user}/bin/chroot-user ${env} ${bash'} ${init runScript} "\$(pwd)" "\$@"
   EOF
   chmod +x $out/bin/${name}
   ${extraInstallCommands}
diff --git a/pkgs/build-support/buildenv/builder.pl b/pkgs/build-support/buildenv/builder.pl
index 155af314397..f6cfe52dc31 100755
--- a/pkgs/build-support/buildenv/builder.pl
+++ b/pkgs/build-support/buildenv/builder.pl
@@ -5,6 +5,7 @@ use Cwd 'abs_path';
 use IO::Handle;
 use File::Path;
 use File::Basename;
+use File::Compare;
 use JSON::PP;
 
 STDOUT->autoflush(1);
@@ -38,7 +39,7 @@ for my $p (@pathsToLink) {
 sub findFiles;
 
 sub findFilesInDir {
-    my ($relName, $target, $ignoreCollisions, $priority) = @_;
+    my ($relName, $target, $ignoreCollisions, $checkCollisionContents, $priority) = @_;
 
     opendir DIR, "$target" or die "cannot open `$target': $!";
     my @names = readdir DIR or die;
@@ -46,12 +47,28 @@ sub findFilesInDir {
 
     foreach my $name (@names) {
         next if $name eq "." || $name eq "..";
-        findFiles("$relName/$name", "$target/$name", $name, $ignoreCollisions, $priority);
+        findFiles("$relName/$name", "$target/$name", $name, $ignoreCollisions, $checkCollisionContents, $priority);
     }
 }
 
+sub checkCollision {
+    my ($path1, $path2) = @_;
+
+    my $stat1 = (stat($path1))[2];
+    my $stat2 = (stat($path2))[2];
+
+    if ($stat1 != $stat2) {
+        warn "different permissions in `$path1' and `$path2': "
+           . sprintf("%04o", $stat1 & 07777) . " <-> "
+           . sprintf("%04o", $stat2 & 07777);
+        return 0;
+    }
+
+    return compare($path1, $path2) == 0;
+}
+
 sub findFiles {
-    my ($relName, $target, $baseName, $ignoreCollisions, $priority) = @_;
+    my ($relName, $target, $baseName, $ignoreCollisions, $checkCollisionContents, $priority) = @_;
 
     # Urgh, hacky...
     return if
@@ -82,13 +99,15 @@ sub findFiles {
         if ($ignoreCollisions) {
             warn "collision between `$target' and `$oldTarget'\n" if $ignoreCollisions == 1;
             return;
+        } elsif ($checkCollisionContents && checkCollision($oldTarget, $target)) {
+            return;
         } else {
             die "collision between `$target' and `$oldTarget'\n";
         }
     }
 
-    findFilesInDir($relName, $oldTarget, $ignoreCollisions, $oldPriority) unless $oldTarget eq "";
-    findFilesInDir($relName, $target, $ignoreCollisions, $priority);
+    findFilesInDir($relName, $oldTarget, $ignoreCollisions, $checkCollisionContents, $oldPriority) unless $oldTarget eq "";
+    findFilesInDir($relName, $target, $ignoreCollisions, $checkCollisionContents, $priority);
 
     $symlinks{$relName} = ["", $priority]; # denotes directory
 }
@@ -98,12 +117,12 @@ my %done;
 my %postponed;
 
 sub addPkg {
-    my ($pkgDir, $ignoreCollisions, $priority)  = @_;
+    my ($pkgDir, $ignoreCollisions, $checkCollisionContents, $priority)  = @_;
 
     return if (defined $done{$pkgDir});
     $done{$pkgDir} = 1;
 
-    findFiles("", $pkgDir, "", $ignoreCollisions, $priority);
+    findFiles("", $pkgDir, "", $ignoreCollisions, $checkCollisionContents, $priority);
 
     my $propagatedFN = "$pkgDir/nix-support/propagated-user-env-packages";
     if (-e $propagatedFN) {
@@ -132,7 +151,11 @@ if (exists $ENV{"pkgsPath"}) {
 # user.
 for my $pkg (@{decode_json $pkgs}) {
     for my $path (@{$pkg->{paths}}) {
-        addPkg($path, $ENV{"ignoreCollisions"} eq "1", $pkg->{priority}) if -e $path;
+        addPkg($path,
+               $ENV{"ignoreCollisions"} eq "1",
+               $ENV{"checkCollisionContents"} eq "1",
+               $pkg->{priority})
+           if -e $path;
     }
 }
 
@@ -146,7 +169,7 @@ while (scalar(keys %postponed) > 0) {
     my @pkgDirs = keys %postponed;
     %postponed = ();
     foreach my $pkgDir (sort @pkgDirs) {
-        addPkg($pkgDir, 2, $priorityCounter++);
+        addPkg($pkgDir, 2, $ENV{"checkCollisionContents"} eq "1", $priorityCounter++);
     }
 }
 
diff --git a/pkgs/build-support/buildenv/default.nix b/pkgs/build-support/buildenv/default.nix
index 1a0726d1543..5de02c8ed25 100644
--- a/pkgs/build-support/buildenv/default.nix
+++ b/pkgs/build-support/buildenv/default.nix
@@ -16,6 +16,10 @@
 , # Whether to ignore collisions or abort.
   ignoreCollisions ? false
 
+, # If there is a collision, check whether the contents and permissions match
+  # and only if not, throw a collision error.
+  checkCollisionContents ? true
+
 , # The paths (relative to each element of `paths') that we want to
   # symlink (e.g., ["/bin"]).  Any file not inside any of the
   # directories in the list is not symlinked.
@@ -35,7 +39,9 @@
 }:
 
 runCommand name
-  rec { inherit manifest ignoreCollisions passthru meta pathsToLink extraPrefix postBuild buildInputs;
+  rec {
+    inherit manifest ignoreCollisions checkCollisionContents passthru
+            meta pathsToLink extraPrefix postBuild buildInputs;
     pkgs = builtins.toJSON (map (drv: {
       paths = [ drv ];
       priority = drv.meta.priority or 5;
diff --git a/pkgs/build-support/cc-wrapper/cc-wrapper.sh b/pkgs/build-support/cc-wrapper/cc-wrapper.sh
index 5bd59f8c585..6e12a0d8bc8 100644
--- a/pkgs/build-support/cc-wrapper/cc-wrapper.sh
+++ b/pkgs/build-support/cc-wrapper/cc-wrapper.sh
@@ -1,4 +1,8 @@
 #! @shell@ -e
+path_backup="$PATH"
+if [ -n "@coreutils@" ]; then
+  PATH="@coreutils@/bin:@gnugrep@/bin"
+fi
 
 if [ -n "$NIX_CC_WRAPPER_START_HOOK" ]; then
     source "$NIX_CC_WRAPPER_START_HOOK"
@@ -141,4 +145,5 @@ if [ -n "$NIX_CC_WRAPPER_EXEC_HOOK" ]; then
     source "$NIX_CC_WRAPPER_EXEC_HOOK"
 fi
 
+PATH="$path_backup"
 exec @prog@ ${extraBefore[@]} "${params[@]}" "${extraAfter[@]}"
diff --git a/pkgs/build-support/cc-wrapper/default.nix b/pkgs/build-support/cc-wrapper/default.nix
index 2eadb89e0c1..0458eedc62b 100644
--- a/pkgs/build-support/cc-wrapper/default.nix
+++ b/pkgs/build-support/cc-wrapper/default.nix
@@ -9,13 +9,14 @@
 , cc ? null, libc ? null, binutils ? null, coreutils ? null, shell ? stdenv.shell
 , zlib ? null, extraPackages ? [], extraBuildCommands ? ""
 , dyld ? null # TODO: should this be a setup-hook on dyld?
-, isGNU ? false, isClang ? cc.isClang or false
+, isGNU ? false, isClang ? cc.isClang or false, gnugrep ? null
 }:
 
 with stdenv.lib;
 
 assert nativeTools -> nativePrefix != "";
-assert !nativeTools -> cc != null && binutils != null && coreutils != null;
+assert !nativeTools ->
+  cc != null && binutils != null && coreutils != null && gnugrep != null;
 assert !nativeLibc -> libc != null;
 
 # For ghdl (the vhdl language provider to gcc) we need zlib in the wrapper.
@@ -30,9 +31,9 @@ let
   libc_dev = if nativeLibc then null else libc.dev or libc;
   libc_lib = if nativeLibc then null else libc.out or libc;
   cc_solib = cc.lib or cc;
-  binutils_bin = if nativeTools then "$binutils" else binutils.bin or binutils;
-  # The wrapper scripts use 'cat', so we may need coreutils.
-  coreutils_bin = if nativeTools then null else coreutils.bin or coreutils;
+  binutils_bin = if nativeTools then "" else binutils.bin or binutils;
+  # The wrapper scripts use 'cat' and 'grep', so we may need coreutils.
+  coreutils_bin = if nativeTools then "" else coreutils.bin or coreutils;
 in
 
 stdenv.mkDerivation {
@@ -43,6 +44,7 @@ stdenv.mkDerivation {
   preferLocalBuild = true;
 
   inherit cc shell libc_bin libc_dev libc_lib binutils_bin coreutils_bin;
+  gnugrep = if nativeTools then "" else gnugrep;
 
   passthru = { inherit libc nativeTools nativeLibc nativePrefix isGNU isClang; };
 
diff --git a/pkgs/build-support/cc-wrapper/gnat-wrapper.sh b/pkgs/build-support/cc-wrapper/gnat-wrapper.sh
index 3514ccd6732..ae46b40ac63 100644
--- a/pkgs/build-support/cc-wrapper/gnat-wrapper.sh
+++ b/pkgs/build-support/cc-wrapper/gnat-wrapper.sh
@@ -1,4 +1,8 @@
 #! @shell@ -e
+path_backup="$PATH"
+if [ -n "@coreutils@" ]; then
+  PATH="@coreutils@/bin"
+fi
 
 if [ -n "$NIX_GNAT_WRAPPER_START_HOOK" ]; then
     source "$NIX_GNAT_WRAPPER_START_HOOK"
@@ -100,4 +104,5 @@ if [ -n "$NIX_GNAT_WRAPPER_EXEC_HOOK" ]; then
     source "$NIX_GNAT_WRAPPER_EXEC_HOOK"
 fi
 
+PATH="$path_backup"
 exec @prog@ ${extraBefore[@]} "${params[@]}" ${extraAfter[@]}
diff --git a/pkgs/build-support/cc-wrapper/ld-wrapper.sh b/pkgs/build-support/cc-wrapper/ld-wrapper.sh
index 30c531b7647..6ef06eb7034 100644
--- a/pkgs/build-support/cc-wrapper/ld-wrapper.sh
+++ b/pkgs/build-support/cc-wrapper/ld-wrapper.sh
@@ -1,4 +1,8 @@
 #! @shell@ -e
+path_backup="$PATH"
+if [ -n "@coreutils@" ]; then
+  PATH="@coreutils@/bin"
+fi
 
 if [ -n "$NIX_LD_WRAPPER_START_HOOK" ]; then
     source "$NIX_LD_WRAPPER_START_HOOK"
@@ -163,4 +167,5 @@ if [ -n "$NIX_LD_WRAPPER_EXEC_HOOK" ]; then
     source "$NIX_LD_WRAPPER_EXEC_HOOK"
 fi
 
+PATH="$path_backup"
 exec @prog@ ${extraBefore[@]} "${params[@]}" ${extra[@]}
diff --git a/pkgs/build-support/docker/detjson.py b/pkgs/build-support/docker/detjson.py
index ba2c20a475a..439c2131387 100644
--- a/pkgs/build-support/docker/detjson.py
+++ b/pkgs/build-support/docker/detjson.py
@@ -24,9 +24,11 @@ SAFEDELS["container_config"] = SAFEDELS["config"]
 
 def makedet(j, safedels):
     for k,v in safedels.items():
+        if k not in j:
+            continue
         if type(v) == dict:
             makedet(j[k], v)
-        elif k in j and j[k] == v:
+        elif j[k] == v:
             del j[k]
 
 def main():
diff --git a/pkgs/build-support/docker/pull.nix b/pkgs/build-support/docker/pull.nix
index 7115a83df42..a5e7acaf159 100644
--- a/pkgs/build-support/docker/pull.nix
+++ b/pkgs/build-support/docker/pull.nix
@@ -8,13 +8,14 @@
 { imageName, imageTag ? "latest", imageId ? null
 , sha256, name ? "${imageName}-${imageTag}"
 , indexUrl ? "https://index.docker.io"
-, registryUrl ? "https://registry-1.docker.io"
 , registryVersion ? "v1"
 , curlOpts ? "" }:
 
+assert registryVersion == "v1";
+
 let layer = stdenv.mkDerivation {
   inherit name imageName imageTag imageId
-          indexUrl registryUrl registryVersion curlOpts;
+          indexUrl registryVersion curlOpts;
 
   builder = ./pull.sh;
   detjson = ./detjson.py;
@@ -34,10 +35,6 @@ let layer = stdenv.mkDerivation {
 
     # This variable allows the user to pass additional options to curl
     "NIX_CURL_FLAGS"
-
-    # This variable allows overriding the timeout for connecting to
-    # the hashed mirrors.
-    "NIX_CONNECT_TIMEOUT"
   ];
   
   # Doing the download on a remote machine just duplicates network
diff --git a/pkgs/build-support/docker/pull.sh b/pkgs/build-support/docker/pull.sh
index 8a0782780af..7ba146e9de0 100644
--- a/pkgs/build-support/docker/pull.sh
+++ b/pkgs/build-support/docker/pull.sh
@@ -6,17 +6,20 @@ source $stdenv/setup
 # servers to need them during redirects, and work on SSL without a
 # certificate (this isn't a security problem because we check the
 # cryptographic hash of the output anyway).
-curl="curl \
- --location --max-redirs 20 \
- --retry 3 \
- --fail \
- --disable-epsv \
- --cookie-jar cookies \
- --insecure \
- $curlOpts \
- $NIX_CURL_FLAGS"
-
-baseUrl="$registryUrl/$registryVersion"
+curl=$(command -v curl)
+curl() {
+  [[ -n ${token:-} ]] && set -- -H "Authorization: Token $token" "$@"
+  $curl \
+    --location --max-redirs 20 \
+    --retry 3 \
+    --fail \
+    --disable-epsv \
+    --cookie-jar cookies \
+    --insecure \
+    $curlOpts \
+    $NIX_CURL_FLAGS \
+    "$@"
+}
 
 fetchLayer() {
     local url="$1"
@@ -26,7 +29,7 @@ fetchLayer() {
     # if we get error code 18, resume partial download
     while [ $curlexit -eq 18 ]; do
         # keep this inside an if statement, since on failure it doesn't abort the script
-        if $curl -H "Authorization: Token $token" "$url" --output "$dest"; then
+        if curl -C - "$url" --output "$dest"; then
             return 0
         else
             curlexit=$?;
@@ -36,17 +39,25 @@ fetchLayer() {
     return $curlexit
 }
 
-token="$($curl -o /dev/null -D- -H 'X-Docker-Token: true' "$indexUrl/$registryVersion/repositories/$imageName/images" | grep X-Docker-Token | tr -d '\r' | cut -d ' ' -f 2)"
+headers=$(curl -o /dev/null -D- -H 'X-Docker-Token: true' \
+          "$indexUrl/$registryVersion/repositories/$imageName/images")
+
+header() {
+  grep $1 <<< "$headers" | tr -d '\r' | cut -d ' ' -f 2
+}
 
-if [ -z "$token" ]; then
-    echo "error: registry returned no token"
-    exit 1
+# this only takes the first endpoint, more may be provided
+# https://docs.docker.com/v1.6/reference/api/docker-io_api/
+if ! registryUrl=$(header X-Docker-Endpoints); then
+  echo "error: index returned no endpoint"
+  exit 1
 fi
+baseUrl="https://$registryUrl/$registryVersion"
 
-# token="${token//\"/\\\"}"
+token="$(header X-Docker-Token || true)";
 
 if [ -z "$imageId" ]; then
-    imageId="$($curl -H "Authorization: Token $token" "$baseUrl/repositories/$imageName/tags/$imageTag")"
+    imageId="$(curl "$baseUrl/repositories/$imageName/tags/$imageTag")"
     imageId="${imageId//\"/}"
     if [ -z "$imageId" ]; then
         echo "error: no image ID found for ${imageName}:${imageTag}"
@@ -62,7 +73,7 @@ jshon -n object \
   -n object -s "$imageId" -i "$imageTag" \
   -i "$imageName" > $out/repositories
 
-$curl -H "Authorization: Token $token" "$baseUrl/images/$imageId/ancestry" -o ancestry.json
+curl "$baseUrl/images/$imageId/ancestry" -o ancestry.json
 
 layerIds=$(jshon -a -u < ancestry.json)
 for layerId in $layerIds; do
@@ -70,6 +81,6 @@ for layerId in $layerIds; do
     
     mkdir "$out/$layerId"
     echo '1.0' > "$out/$layerId/VERSION"
-    $curl -H "Authorization: Token $token" "$baseUrl/images/$layerId/json" | python $detjson > "$out/$layerId/json"
+    curl "$baseUrl/images/$layerId/json" | python $detjson > "$out/$layerId/json"
     fetchLayer "$baseUrl/images/$layerId/layer" "$out/$layerId/layer.tar"
-done
\ No newline at end of file
+done
diff --git a/pkgs/build-support/emacs/elpa.nix b/pkgs/build-support/emacs/elpa.nix
new file mode 100644
index 00000000000..79a26abcb83
--- /dev/null
+++ b/pkgs/build-support/emacs/elpa.nix
@@ -0,0 +1,30 @@
+# builder for Emacs packages built for packages.el
+
+{ lib, stdenv, fetchurl, emacs, texinfo }:
+
+with lib;
+
+{ pname
+, version
+, src
+, ...
+}@args:
+
+import ./generic.nix { inherit lib stdenv emacs texinfo; } ({
+
+  phases = "installPhase fixupPhase distPhase";
+
+  installPhase = ''
+    runHook preInstall
+
+    emacs --batch -Q -l ${./elpa2nix.el} \
+        -f elpa2nix-install-package \
+        "${src}" "$out/share/emacs/site-lisp/elpa"
+
+    runHook postInstall
+  '';
+}
+
+// removeAttrs args [ "files" "fileSpecs"
+                      "meta"
+                    ])
diff --git a/pkgs/build-support/emacs/elpa2nix.el b/pkgs/build-support/emacs/elpa2nix.el
new file mode 100644
index 00000000000..7eef81b9e7a
--- /dev/null
+++ b/pkgs/build-support/emacs/elpa2nix.el
@@ -0,0 +1,30 @@
+(require 'package)
+(package-initialize)
+
+(defun elpa2nix-install-package ()
+  (if (not noninteractive)
+      (error "`elpa2nix-install-package' is to be used only with -batch"))
+  (pcase command-line-args-left
+    (`(,archive ,elpa)
+     (progn (setq package-user-dir elpa)
+            (elpa2nix-install-file archive)))))
+
+(defun elpa2nix-install-from-buffer ()
+  "Install a package from the current buffer."
+  (let ((pkg-desc (if (derived-mode-p 'tar-mode)
+                      (package-tar-file-info)
+                    (package-buffer-info))))
+    ;; Install the package itself.
+    (package-unpack pkg-desc)
+    pkg-desc))
+
+(defun elpa2nix-install-file (file)
+  "Install a package from a file.
+The file can either be a tar file or an Emacs Lisp file."
+  (let ((is-tar (string-match "\\.tar\\'" file)))
+    (with-temp-buffer
+      (if is-tar
+          (insert-file-contents-literally file)
+        (insert-file-contents file))
+      (when is-tar (tar-mode))
+      (elpa2nix-install-from-buffer))))
diff --git a/pkgs/build-support/emacs/generic.nix b/pkgs/build-support/emacs/generic.nix
index d41f90ebd05..6fa27f09839 100644
--- a/pkgs/build-support/emacs/generic.nix
+++ b/pkgs/build-support/emacs/generic.nix
@@ -32,8 +32,11 @@ stdenv.mkDerivation ({
   unpackCmd = ''
     case "$curSrc" in
       *.el)
-        cp $curSrc $pname.el
-        chmod +w $pname.el
+        # keep original source filename without the hash
+        local filename=$(basename "$curSrc")
+        filename="''${filename:33}"
+        cp $curSrc $filename
+        chmod +w $filename
         sourceRoot="."
         ;;
       *)
diff --git a/pkgs/build-support/emacs/melpa.nix b/pkgs/build-support/emacs/melpa.nix
index 8fd2a00b50a..3b8a23d8c2a 100644
--- a/pkgs/build-support/emacs/melpa.nix
+++ b/pkgs/build-support/emacs/melpa.nix
@@ -28,8 +28,6 @@ let
     sha256 = "1biwg2pqmmdz5iwqbjdszljazqymvgyyjcnc255nr6qz8mhnx67j";
   };
 
-  fname = "${pname}-${version}";
-
   targets = concatStringsSep " " (if files == null then fileSpecs else files);
 
   defaultMeta = {
@@ -41,31 +39,33 @@ in
 import ./generic.nix { inherit lib stdenv emacs texinfo; } ({
   inherit packageBuild;
 
-  buildPhase = ''
-    runHook preBuild
+  buildPhase =
+    if recipeFile == null
+      then ''
+        runHook preBuild
 
-    emacs --batch -Q -l $packageBuild -l ${./melpa2nix.el} \
-    ${if recipeFile == null
-      then
-      ''
-      -f melpa2nix-build-package \
-      ${pname} ${version} ${targets}
-      ''
-      else
+        export archive=$(emacs --batch -Q -l $packageBuild -l ${./melpa2nix.el} \
+            -f melpa2nix-build-package \
+            ${pname} ${version} ${targets})
+
+        runHook postBuild
       ''
-      -f melpa2nix-build-package-from-recipe \
-      ${pname} ${version} ${recipeFile}
-      ''}
+      else ''
+        runHook preBuild
 
-    runHook postBuild
-  '';
+        export archive=$(emacs --batch -Q -l $packageBuild -l ${./melpa2nix.el} \
+            -f melpa2nix-build-package-from-recipe \
+            ${recipeFile} ${version})
+
+        runHook postBuild
+      '';
 
   installPhase = ''
     runHook preInstall
 
-    emacs --batch -Q -l $packageBuild -l ${./melpa2nix.el} \
-      -f melpa2nix-install-package \
-      ${fname}.* $out/share/emacs/site-lisp/elpa
+    emacs --batch -Q -l ${./elpa2nix.el} \
+        -f elpa2nix-install-package \
+        "$archive" "$out/share/emacs/site-lisp/elpa"
 
     runHook postInstall
   '';
diff --git a/pkgs/build-support/emacs/melpa2nix.el b/pkgs/build-support/emacs/melpa2nix.el
index f1309fc0d57..3cd5bbdb954 100644
--- a/pkgs/build-support/emacs/melpa2nix.el
+++ b/pkgs/build-support/emacs/melpa2nix.el
@@ -6,14 +6,6 @@
 (setq package-build-working-dir (expand-file-name ".")
       package-build-archive-dir (expand-file-name "."))
 
-(defun melpa2nix-install-package ()
-  (if (not noninteractive)
-      (error "`melpa2nix-install-package' is to be used only with -batch"))
-  (pcase command-line-args-left
-    (`(,archive ,elpa)
-     (progn (setq package-user-dir elpa)
-            (package-install-file archive)))))
-
 (defun melpa2nix-build-package ()
   (if (not noninteractive)
       (error "`melpa2nix-build-package' is to be used only with -batch"))
@@ -25,8 +17,10 @@
   (if (not noninteractive)
       (error "`melpa2nix-build-package' is to be used only with -batch"))
   (pcase command-line-args-left
-    (`(,package ,version ,recipe-file)
-     (let* ((rcp (cdr (package-build--read-from-file recipe-file)))
+    (`(,recipe-file ,version)
+     (let* ((recipe (package-build--read-from-file recipe-file))
+            (rcp (cdr recipe))
+            (package (car recipe))
             (files (package-build--config-file-list rcp)))
        (melpa2nix-package-build-archive package version files)))))
 
@@ -38,8 +32,11 @@
                                                version
                                                files
                                                package-build-working-dir
-                                               package-build-archive-dir)))
+                                               package-build-archive-dir))
+         (archive-file (package-build--archive-file-name archive-entry)))
 
-    (package-build--message "Built in %.3fs, finished at %s"
+    (progn
+      (package-build--message "Built in %.3fs, finished at %s"
                             (time-to-seconds (time-since start-time))
-                            (current-time-string))))
+                            (current-time-string))
+      (princ (format "%s\n" archive-file)))))
diff --git a/pkgs/build-support/fetchgit/default.nix b/pkgs/build-support/fetchgit/default.nix
index 127693d42f2..7f98c97fc55 100644
--- a/pkgs/build-support/fetchgit/default.nix
+++ b/pkgs/build-support/fetchgit/default.nix
@@ -61,7 +61,7 @@ stdenv.mkDerivation {
     # easy proxy configuration.  This is impure, but a fixed-output
     # derivation like fetchurl is allowed to do so since its result is
     # by definition pure.
-    "http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy"
+    "http_proxy" "https_proxy" "ftp_proxy" "all_proxy" "no_proxy" "GIT_PROXY_COMMAND" "SOCKS_SERVER"
     ];
 
   preferLocalBuild = true;
diff --git a/pkgs/build-support/fetchgit/nix-prefetch-git b/pkgs/build-support/fetchgit/nix-prefetch-git
index fbefba5ccc0..6cf694dd2b1 100755
--- a/pkgs/build-support/fetchgit/nix-prefetch-git
+++ b/pkgs/build-support/fetchgit/nix-prefetch-git
@@ -24,6 +24,22 @@ else
     leaveDotGit=true
 fi
 
+usage(){
+    echo  >&2 "syntax: nix-prefetch-git [options] [URL [REVISION [EXPECTED-HASH]]]
+
+Options:
+      --out path      Path where the output would be stored.
+      --url url       Any url understand by 'git clone'.
+      --rev ref       Any sha1 or references (such as refs/heads/master)
+      --hash h        Expected hash.
+      --deepClone     Clone submodules recursively.
+      --no-deepClone  Do not clone submodules.
+      --leave-dotGit  Keep the .git directories.
+      --fetch-submodules Fetch submodules.
+      --builder       Clone as fetchgit does, but url, rev, and out option are mandatory.
+"
+    exit 1
+}
 
 argi=0
 argfun=""
@@ -40,6 +56,7 @@ for arg; do
             --leave-dotGit) leaveDotGit=true;;
             --fetch-submodules) fetchSubmodules=true;;
             --builder) builder=true;;
+            --help) usage; exit;;
             *)
                 argi=$(($argi + 1))
                 case $argi in
@@ -61,23 +78,6 @@ for arg; do
     fi
 done
 
-usage(){
-    echo  >&2 "syntax: nix-prefetch-git [options] [URL [REVISION [EXPECTED-HASH]]]
-
-Options:
-      --out path      Path where the output would be stored.
-      --url url       Any url understand by 'git clone'.
-      --rev ref       Any sha1 or references (such as refs/heads/master)
-      --hash h        Expected hash.
-      --deepClone     Clone submodules recursively.
-      --no-deepClone  Do not clone submodules.
-      --leave-dotGit  Keep the .git directories.
-      --fetch-submodules Fetch submodules.
-      --builder       Clone as fetchgit does, but url, rev, and out option are mandatory.
-"
-    exit 1
-}
-
 if test -z "$url"; then
     usage
 fi
diff --git a/pkgs/build-support/fetchurl/builder.sh b/pkgs/build-support/fetchurl/builder.sh
index 29565d7cdb9..c4fd18e46ca 100644
--- a/pkgs/build-support/fetchurl/builder.sh
+++ b/pkgs/build-support/fetchurl/builder.sh
@@ -45,6 +45,11 @@ tryDownload() {
 
 finish() {
     set +o noglob
+
+    if [[ $executable == "1" ]]; then
+      chmod +x $downloadedFile
+    fi
+
     runHook postFetch
     stopNest
     exit 0
diff --git a/pkgs/build-support/fetchurl/default.nix b/pkgs/build-support/fetchurl/default.nix
index b1dc6e7be31..804974954d1 100644
--- a/pkgs/build-support/fetchurl/default.nix
+++ b/pkgs/build-support/fetchurl/default.nix
@@ -73,6 +73,9 @@ in
   # is communicated to postFetch via $downloadedFile.
   downloadToTemp ? false
 
+, # If true, set executable bit on downloaded file
+  executable ? false
+
 , # If set, don't download the file, but write a list of all possible
   # URLs (resulting from resolving mirror:// URLs) to $out.
   showURLs ? false
@@ -116,9 +119,9 @@ if (!hasHash) then throw "Specify hash for fetchurl fixed-output derivation: ${s
   outputHash = if outputHash != "" then outputHash else
       if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
 
-  outputHashMode = if recursiveHash then "recursive" else "flat";
+  outputHashMode = if (recursiveHash || executable) then "recursive" else "flat";
 
-  inherit curlOpts showURLs mirrorsFile impureEnvVars postFetch downloadToTemp;
+  inherit curlOpts showURLs mirrorsFile impureEnvVars postFetch downloadToTemp executable;
 
   # Doing the download on a remote machine just duplicates network
   # traffic, so don't do that.
diff --git a/pkgs/build-support/grsecurity/default.nix b/pkgs/build-support/grsecurity/default.nix
index e3b257a82d0..474dbfaff4d 100644
--- a/pkgs/build-support/grsecurity/default.nix
+++ b/pkgs/build-support/grsecurity/default.nix
@@ -33,7 +33,7 @@ let
 
     grKernel = if cfg.stable
                then mkKernel pkgs.linux_3_14 stable-patch
-               else mkKernel pkgs.linux_4_2 test-patch;
+               else mkKernel pkgs.linux_4_3 test-patch;
 
     ## -- grsecurity configuration ---------------------------------------------
 
@@ -142,6 +142,7 @@ let
         };
         extraConfig = grsecConfig;
         features.grsecurity = true;
+        ignoreConfigErrors = true; # Too lazy to model the config options that work with grsecurity and don't for now
       })) (args: grsecurityOverrider args grkern));
 
     mkGrsecPkg = grkern: pkgs.linuxPackagesFor grkern (mkGrsecPkg grkern);
diff --git a/pkgs/build-support/release/source-tarball.nix b/pkgs/build-support/release/source-tarball.nix
index b50077975c1..376cd0e1d15 100644
--- a/pkgs/build-support/release/source-tarball.nix
+++ b/pkgs/build-support/release/source-tarball.nix
@@ -66,7 +66,7 @@ stdenv.mkDerivation (
               KEEPBUILDDIR="$out/`basename $TMPDIR`"
               header "Copying build directory to $KEEPBUILDDIR"
               mkdir -p $KEEPBUILDDIR
-              cp -R $TMPDIR/* $KEEPBUILDDIR
+              cp -R "$TMPDIR/"* $KEEPBUILDDIR
               stopNest
           fi
       fi
diff --git a/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh b/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh
index 9e325106f82..fe3458cd21e 100644
--- a/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh
+++ b/pkgs/build-support/setup-hooks/set-source-date-epoch-to-latest.sh
@@ -27,5 +27,7 @@ updateSourceDateEpoch() {
 postUnpackHooks+=(_updateSourceDateEpochFromSourceRoot)
 
 _updateSourceDateEpochFromSourceRoot() {
-    updateSourceDateEpoch "$sourceRoot"
+    if [ -n "$sourceRoot" ]; then
+        updateSourceDateEpoch "$sourceRoot"
+    fi
 }