summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--pkgs/build-support/fetchurl/builder.sh8
-rw-r--r--pkgs/build-support/fetchurl/default.nix22
-rw-r--r--pkgs/build-support/fetchzip/default.nix42
-rw-r--r--pkgs/tools/networking/dd-agent/default.nix6
-rw-r--r--pkgs/top-level/all-packages.nix2
5 files changed, 73 insertions, 7 deletions
diff --git a/pkgs/build-support/fetchurl/builder.sh b/pkgs/build-support/fetchurl/builder.sh
index 402fe0cba5b..0c6e16551b4 100644
--- a/pkgs/build-support/fetchurl/builder.sh
+++ b/pkgs/build-support/fetchurl/builder.sh
@@ -17,12 +17,16 @@ curl="curl \
  $NIX_CURL_FLAGS"
 
 
+downloadedFile="$out"
+if [ -n "$downloadToTemp" ]; then downloadedFile="$TMPDIR/file"; fi
+
+
 tryDownload() {
     local url="$1"
     echo
     header "trying $url"
     success=
-    if $curl --fail "$url" --output "$out"; then
+    if $curl --fail "$url" --output "$downloadedFile"; then
         success=1
     fi
     stopNest
@@ -30,6 +34,8 @@ tryDownload() {
 
 
 finish() {
+    set +o noglob
+    runHook postFetch
     stopNest
     exit 0
 }
diff --git a/pkgs/build-support/fetchurl/default.nix b/pkgs/build-support/fetchurl/default.nix
index af4a6700153..eac38a773c1 100644
--- a/pkgs/build-support/fetchurl/default.nix
+++ b/pkgs/build-support/fetchurl/default.nix
@@ -54,6 +54,9 @@ in
   # first element of `urls').
   name ? ""
 
+, # A string to be appended to the name, if the name is derived from `url'.
+  nameSuffix ? ""
+
   # Different ways of specifying the hash.
 , outputHash ? ""
 , outputHashAlgo ? ""
@@ -61,6 +64,17 @@ in
 , sha1 ? ""
 , sha256 ? ""
 
+, recursiveHash ? false
+
+, # Shell code executed after the file has been fetched
+  # succesfully. This can do things like check or transform the file.
+  postFetch ? ""
+
+, # Whether to download to a temporary path rather than $out. Useful
+  # in conjunction with postFetch. The location of the temporary file
+  # is communicated to postFetch via $downloadedFile.
+  downloadToTemp ? false
+
 , # If set, don't download the file, but write a list of all possible
   # URLs (resulting from resolving mirror:// URLs) to $out.
   showURLs ? false
@@ -83,11 +97,11 @@ stdenv.mkDerivation {
   name =
     if showURLs then "urls"
     else if name != "" then name
-    else baseNameOf (toString (builtins.head urls_));
+    else baseNameOf (toString (builtins.head urls_)) + nameSuffix;
 
   builder = ./builder.sh;
 
-  buildInputs = [curl];
+  buildInputs = [ curl ];
 
   urls = urls_;
 
@@ -101,7 +115,9 @@ stdenv.mkDerivation {
   outputHash = if outputHash != "" then outputHash else
       if sha256 != "" then sha256 else if sha1 != "" then sha1 else md5;
 
-  inherit curlOpts showURLs mirrorsFile impureEnvVars;
+  outputHashMode = if recursiveHash then "recursive" else "flat";
+
+  inherit curlOpts showURLs mirrorsFile impureEnvVars postFetch downloadToTemp;
 
   # Doing the download on a remote machine just duplicates network
   # traffic, so don't do that.
diff --git a/pkgs/build-support/fetchzip/default.nix b/pkgs/build-support/fetchzip/default.nix
new file mode 100644
index 00000000000..6b77b6474ef
--- /dev/null
+++ b/pkgs/build-support/fetchzip/default.nix
@@ -0,0 +1,42 @@
+# This function downloads and unpacks a zip file. This is primarily
+# useful for dynamically generated zip files, such as GitHub's
+# /archive URLs, where the unpacked content of the zip file doesn't
+# change, but the zip file itself may (e.g. due to minor changes in
+# the compression algorithm, or changes in timestamps).
+
+{ lib, fetchurl, unzip }:
+
+{ # Optionally move the contents of the unpacked tree up one level.
+  stripRoot ? true
+, ... } @ args:
+
+fetchurl (args // {
+  # Apply a suffix to the name. Otherwise, unpackPhase will get
+  # confused by the .zip extension.
+  nameSuffix = "-unpacked";
+
+  recursiveHash = true;
+
+  downloadToTemp = true;
+
+  postFetch =
+    ''
+      export PATH=${unzip}/bin:$PATH
+      mkdir $out
+      cd $out
+      renamed="$TMPDIR/''${name%-unpacked}"
+      mv "$downloadedFile" "$renamed"
+      unpackFile "$renamed"
+    ''
+    # FIXME: handle zip files that contain a single regular file.
+    + lib.optionalString stripRoot ''
+      shopt -s dotglob
+      if [ "$(ls -d $out/* | wc -l)" != 1 ]; then
+        echo "error: zip file must contain a single directory."
+        exit 1
+      fi
+      fn=$(cd "$out" && echo *)
+      mv $out/$fn/* "$out/"
+      rmdir "$out/$fn"
+    '';
+})
diff --git a/pkgs/tools/networking/dd-agent/default.nix b/pkgs/tools/networking/dd-agent/default.nix
index 3f06f2d1354..ca240f76d84 100644
--- a/pkgs/tools/networking/dd-agent/default.nix
+++ b/pkgs/tools/networking/dd-agent/default.nix
@@ -1,13 +1,13 @@
-{ stdenv, fetchurl, python, pythonPackages, sysstat, unzip, tornado
+{ stdenv, fetchzip, python, pythonPackages, sysstat, unzip, tornado
 , makeWrapper }:
 
 stdenv.mkDerivation rec {
   version = "4.2.1";
   name = "dd-agent-${version}";
 
-  src = fetchurl {
+  src = fetchzip {
     url = "https://github.com/DataDog/dd-agent/archive/${version}.zip";
-    sha256 = "0s1lg7rqx86z0y111105gwkknzplq149cxd7v3yg30l22wn68dmv";
+    sha256 = "06f9nkvnpfzs2nw75cac2y9wnp2bay4sg94zz0wjm8886rigjjjm";
   };
 
   buildInputs = [ python unzip makeWrapper pythonPackages.psycopg2 ];
diff --git a/pkgs/top-level/all-packages.nix b/pkgs/top-level/all-packages.nix
index ea44c4934bb..f402eb50f48 100644
--- a/pkgs/top-level/all-packages.nix
+++ b/pkgs/top-level/all-packages.nix
@@ -338,6 +338,8 @@ let
   # linked curl in the case of stdenv-linux).
   fetchurlBoot = stdenv.fetchurlBoot;
 
+  fetchzip = import ../build-support/fetchzip { inherit lib fetchurl unzip; };
+
   resolveMirrorURLs = {url}: fetchurl {
     showURLs = true;
     inherit url;