summary refs log tree commit diff
path: root/pkgs/build-support
diff options
context:
space:
mode:
authorEelco Dolstra <eelco.dolstra@logicblox.com>2014-05-08 14:57:20 +0200
committerEelco Dolstra <eelco.dolstra@logicblox.com>2014-05-08 15:30:17 +0200
commitc8df88885891bdb6d1f207cf203e4b0af48d5486 (patch)
tree1578f69f8f46789733fa03c21050eae7b6091b19 /pkgs/build-support
parent2a43a4163a237b963bd68629735b7303e077ec94 (diff)
downloadnixpkgs-c8df88885891bdb6d1f207cf203e4b0af48d5486.tar
nixpkgs-c8df88885891bdb6d1f207cf203e4b0af48d5486.tar.gz
nixpkgs-c8df88885891bdb6d1f207cf203e4b0af48d5486.tar.bz2
nixpkgs-c8df88885891bdb6d1f207cf203e4b0af48d5486.tar.lz
nixpkgs-c8df88885891bdb6d1f207cf203e4b0af48d5486.tar.xz
nixpkgs-c8df88885891bdb6d1f207cf203e4b0af48d5486.tar.zst
nixpkgs-c8df88885891bdb6d1f207cf203e4b0af48d5486.zip
Add a function "fetchzip"
This function downloads and unpacks a file in one fixed-output
derivation. This is primarily useful for dynamically generated zip
files, such as GitHub's /archive URLs, where the unpacked content of
the zip file doesn't change, but the zip file itself may (e.g. due to
minor changes in the compression algorithm, or changes in timestamps).

Fetchzip is implemented by extending fetchurl with a "postFetch" hook
that is executed after the file has been downloaded. This hook can
thus perform arbitrary checks or transformations on the downloaded
file.
Diffstat (limited to 'pkgs/build-support')
-rw-r--r--pkgs/build-support/fetchurl/builder.sh8
-rw-r--r--pkgs/build-support/fetchurl/default.nix22
-rw-r--r--pkgs/build-support/fetchzip/default.nix42
3 files changed, 68 insertions, 4 deletions
diff --git a/pkgs/build-support/fetchurl/builder.sh b/pkgs/build-support/fetchurl/builder.sh
index 402fe0cba5b..0c6e16551b4 100644
--- a/pkgs/build-support/fetchurl/builder.sh
+++ b/pkgs/build-support/fetchurl/builder.sh
@@ -17,12 +17,16 @@ curl="curl \
  $NIX_CURL_FLAGS"
 
 
+downloadedFile="$out"
+if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi
+
+
 tryDownload() {
     local url="$1"
     echo
     header "trying $url"
     success=
-    if $curl --fail "$url" --output "$out"; then
+    if $curl --fail "$url" --output "$downloadedFile"; then
         success=1
     fi
     stopNest
@@ -30,6 +34,8 @@ tryDownload() {
 
 
 finish() {
+    set +o noglob
+    runHook postFetch
     stopNest
     exit 0
 }
diff --git a/pkgs/build-support/fetchurl/default.nix b/pkgs/build-support/fetchurl/default.nix
index af4a6700153..eac38a773c1 100644
--- a/pkgs/build-support/fetchurl/default.nix
+++ b/pkgs/build-support/fetchurl/default.nix
@@ -54,6 +54,9 @@ in
   # first element of `urls').
   name ? ""
 
+, # A string to be appended to the name, if the name is derived from `url'.
+  nameSuffix ? ""
+
   # Different ways of specifying the hash.
 , outputHash ? ""
 , outputHashAlgo ? ""
@@ -61,6 +64,17 @@ in
 , sha1 ? ""
 , sha256 ? ""
 
+, recursiveHash ? false
+
+, # Shell code executed after the file has been fetched
+  # succesfully. This can do things like check or transform the file.
+  postFetch ? ""
+
+, # Whether to download to a temporary path rather than $out. Useful
+  # in conjunction with postFetch. The location of the temporary file
+  # is communicated to postFetch via $downloadedFile.
+  downloadToTemp ? false
+
 , # If set, don't download the file, but write a list of all possible
   # URLs (resulting from resolving mirror:// URLs) to $out.
   showURLs ? false
@@ -83,11 +97,11 @@ stdenv.mkDerivation {
   name =
     if showURLs then "urls"
     else if name != "" then name
-    else baseNameOf (toString (builtins.head urls_));
+    else baseNameOf (toString (builtins.head urls_)) + nameSuffix;
 
   builder = ./builder.sh;
 
-  buildInputs = [curl];
+  buildInputs = [ curl ];
 
   urls = urls_;
 
@@ -101,7 +115,9 @@ stdenv.mkDerivation {
   outputHash = if outputHash != "" then outputHash else
       if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
 
-  inherit curlOpts showURLs mirrorsFile impureEnvVars;
+  outputHashMode = if recursiveHash then "recursive" else "flat";
+
+  inherit curlOpts showURLs mirrorsFile impureEnvVars postFetch downloadToTemp;
 
   # Doing the download on a remote machine just duplicates network
   # traffic, so don't do that.
diff --git a/pkgs/build-support/fetchzip/default.nix b/pkgs/build-support/fetchzip/default.nix
new file mode 100644
index 00000000000..6b77b6474ef
--- /dev/null
+++ b/pkgs/build-support/fetchzip/default.nix
@@ -0,0 +1,42 @@
+# This function downloads and unpacks a zip file. This is primarily
+# useful for dynamically generated zip files, such as GitHub's
+# /archive URLs, where the unpacked content of the zip file doesn't
+# change, but the zip file itself may (e.g. due to minor changes in
+# the compression algorithm, or changes in timestamps).
+
+{ lib, fetchurl, unzip }:
+
+{ # Optionally move the contents of the unpacked tree up one level.
+  stripRoot ? true
+, ... } @ args:
+
+fetchurl (args // {
+  # Apply a suffix to the name. Otherwise, unpackPhase will get
+  # confused by the .zip extension.
+  nameSuffix = "-unpacked";
+
+  recursiveHash = true;
+
+  downloadToTemp = true;
+
+  postFetch =
+    ''
+      export PATH=${unzip}/bin:$PATH
+      mkdir $out
+      cd $out
+      renamed="$TMPDIR/''${name%-unpacked}"
+      mv "$downloadedFile" "$renamed"
+      unpackFile "$renamed"
+    ''
+    # FIXME: handle zip files that contain a single regular file.
+    + lib.optionalString stripRoot ''
+      shopt -s dotglob
+      if [ "$(ls -d $out/* | wc -l)" != 1 ]; then
+        echo "error: zip file must contain a single directory."
+        exit 1
+      fi
+      fn=$(cd "$out" && echo *)
+      mv $out/$fn/* "$out/"
+      rmdir "$out/$fn"
+    '';
+})