about summary refs log tree commit diff
diff options
context:
space:
mode:
authorEelco Dolstra <eelco.dolstra@logicblox.com>2015-10-07T12·40+0200
committerEelco Dolstra <eelco.dolstra@logicblox.com>2015-10-07T12·40+0200
commitb54f447df9def487317bfcf78628ebf207879c3a (patch)
tree44320d8cfde11b2a50f0aea670d942f95ce3d6d1
parent1abda8e17311fbfc27b751b90aff4f691f496b0f (diff)
nix-prefetch-url: Support unpacking tarballs
This allows nix-prefetch-url to prefetch the output of fetchzip and
its wrappers (like fetchFromGitHub). For example:

  $ nix-prefetch-url --unpack https://github.com/NixOS/patchelf/archive/0.8.tar.gz

or from a Nix expression:

  $ nix-prefetch-url -A nix-repl.src

In the latter case, --unpack can be omitted because nix-repl.src is a
fetchFromGitHub derivation and thus has "outputHashMode" set to
"recursive".
-rw-r--r--doc/manual/command-ref/nix-prefetch-url.xml17
-rw-r--r--src/nix-prefetch-url/nix-prefetch-url.cc62
2 files changed, 63 insertions, 16 deletions
diff --git a/doc/manual/command-ref/nix-prefetch-url.xml b/doc/manual/command-ref/nix-prefetch-url.xml
index 9cbaa42a1b1f..e1f0c8e5447c 100644
--- a/doc/manual/command-ref/nix-prefetch-url.xml
+++ b/doc/manual/command-ref/nix-prefetch-url.xml
@@ -81,6 +81,16 @@ downloaded file in the Nix store is also printed.</para>
 
   </varlistentry>
 
+  <varlistentry><term><option>--unpack</option></term>
+
+    <listitem><para>Unpack the archive (which must be a tarball or zip
+    file) and add the result to the Nix store. The resulting hash can
+    be used with functions such as Nixpkgs’s
+    <varname>fetchzip</varname> or
+    <varname>fetchFromGitHub</varname>.</para></listitem>
+
+  </varlistentry>
+
 </variablelist>
 
 </refsection>
@@ -94,7 +104,12 @@ $ nix-prefetch-url ftp://ftp.gnu.org/pub/gnu/hello/hello-2.10.tar.gz
 
 $ nix-prefetch-url --print-path mirror://gnu/hello/hello-2.10.tar.gz
 0ssi1wpaf7plaswqqjwigppsg5fyh99vdlb9kzl7c9lng89ndq1i
-/nix/store/3x7dwzq014bblazs7kq20p9hyzz0qh8g-hello-2.10.tar.gz</screen>
+/nix/store/3x7dwzq014bblazs7kq20p9hyzz0qh8g-hello-2.10.tar.gz
+
+$ nix-prefetch-url --unpack --print-path https://github.com/NixOS/patchelf/archive/0.8.tar.gz
+079agjlv0hrv7fxnx9ngipx14gyncbkllxrp9cccnh3a50fxcmy7
+/nix/store/19zrmhm3m40xxaw81c8cqm6aljgrnwj2-0.8.tar.gz
+</screen>
 
 </refsection>
 
diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc
index 112d303e0c16..5da8d968b9f9 100644
--- a/src/nix-prefetch-url/nix-prefetch-url.cc
+++ b/src/nix-prefetch-url/nix-prefetch-url.cc
@@ -53,6 +53,7 @@ int main(int argc, char * * argv)
         bool fromExpr = false;
         string attrPath;
         std::map<string, string> autoArgs_;
+        bool unpack = false;
 
         parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) {
             if (*arg == "--help")
@@ -71,6 +72,8 @@ int main(int argc, char * * argv)
                 fromExpr = true;
                 attrPath = getArg(*arg, arg, end);
             }
+            else if (*arg == "--unpack")
+                unpack = true;
             else if (parseAutoArgs(arg, end, autoArgs_))
                 ;
             else if (parseSearchPathArg(arg, end, searchPath))
@@ -103,13 +106,22 @@ int main(int argc, char * * argv)
             state.evalFile(path, vRoot);
             Value & v(*findAlongAttrPath(state, attrPath, autoArgs, vRoot));
             state.forceAttrs(v);
-            auto urls = v.attrs->find(state.symbols.create("urls"));
-            if (urls == v.attrs->end())
+
+            /* Extract the URI. */
+            auto attr = v.attrs->find(state.symbols.create("urls"));
+            if (attr == v.attrs->end())
                 throw Error("attribute set does not contain a ‘urls’ attribute");
-            state.forceList(*urls->value);
-            if (urls->value->listSize() < 1)
+            state.forceList(*attr->value);
+            if (attr->value->listSize() < 1)
                 throw Error("‘urls’ list is empty");
-            uri = state.forceString(*urls->value->listElems()[0]);
+            uri = state.forceString(*attr->value->listElems()[0]);
+
+            /* Extract the hash mode. */
+            attr = v.attrs->find(state.symbols.create("outputHashMode"));
+            if (attr == v.attrs->end())
+                printMsg(lvlInfo, "warning: this does not look like a fetchurl call");
+            else
+                unpack = state.forceString(*attr->value) == "recursive";
         }
 
         /* Figure out a name in the Nix store. */
@@ -123,7 +135,7 @@ int main(int argc, char * * argv)
         Path storePath;
         if (args.size() == 2) {
             expectedHash = parseHash16or32(ht, args[1]);
-            storePath = makeFixedOutputPath(false, ht, expectedHash, name);
+            storePath = makeFixedOutputPath(unpack, ht, expectedHash, name);
             if (store->isValidPath(storePath))
                 hash = expectedHash;
             else
@@ -134,28 +146,48 @@ int main(int argc, char * * argv)
 
             auto actualUri = resolveMirrorUri(state, uri);
 
-            if (uri != actualUri)
-                printMsg(lvlInfo, format("‘%1%’ expands to ‘%2%’") % uri % actualUri);
-
             /* Download the file. */
+            printMsg(lvlInfo, format("downloading ‘%1%’...") % actualUri);
             auto result = downloadFile(actualUri);
 
-            /* Copy the file to the Nix store. FIXME: if RemoteStore
-               implemented addToStoreFromDump() and downloadFile()
-               supported a sink, we could stream the download directly
-               into the Nix store. */
             AutoDelete tmpDir(createTempDir(), true);
             Path tmpFile = (Path) tmpDir + "/tmp";
             writeFile(tmpFile, result.data);
 
+            /* Optionally unpack the file. */
+            if (unpack) {
+                printMsg(lvlInfo, "unpacking...");
+                Path unpacked = (Path) tmpDir + "/unpacked";
+                createDirs(unpacked);
+                if (hasSuffix(baseNameOf(uri), ".zip"))
+                    runProgram("unzip", true, {"-qq", tmpFile, "-d", unpacked}, "");
+                else
+                    // FIXME: this requires GNU tar for decompression.
+                    runProgram("tar", true, {"xf", tmpFile, "-C", unpacked}, "");
+
+                /* If the archive unpacks to a single file/directory, then use
+                   that as the top-level. */
+                auto entries = readDirectory(unpacked);
+                if (entries.size() == 1)
+                    tmpFile = unpacked + "/" + entries[0].name;
+                else
+                    tmpFile = unpacked;
+            }
+
             /* FIXME: inefficient; addToStore() will also hash
                this. */
-            hash = hashString(ht, result.data);
+            hash = unpack ? hashPath(ht, tmpFile).first : hashString(ht, result.data);
 
             if (expectedHash != Hash(ht) && expectedHash != hash)
                 throw Error(format("hash mismatch for ‘%1%’") % uri);
 
-            storePath = store->addToStore(name, tmpFile, false, ht);
+            /* Copy the file to the Nix store. FIXME: if RemoteStore
+               implemented addToStoreFromDump() and downloadFile()
+               supported a sink, we could stream the download directly
+               into the Nix store. */
+            storePath = store->addToStore(name, tmpFile, unpack, ht);
+
+            assert(storePath == makeFixedOutputPath(unpack, ht, hash, name));
         }
 
         if (!printPath)