about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--.gitignore1
-rw-r--r--configure.ac12
-rw-r--r--corepkgs/config.nix.in3
-rw-r--r--corepkgs/nar.nix21
-rw-r--r--doc/manual/conf-file.xml20
-rw-r--r--doc/manual/release-notes.xml5
-rw-r--r--perl/lib/Nix/Config.pm.in12
-rw-r--r--release.nix3
-rw-r--r--scripts/Makefile.am8
-rwxr-xr-xscripts/copy-from-other-stores.pl.in61
-rw-r--r--scripts/download-from-binary-cache.pl.in478
-rwxr-xr-xscripts/download-using-manifests.pl.in78
-rwxr-xr-xscripts/nix-build.in5
-rwxr-xr-xscripts/nix-channel.in5
-rwxr-xr-xscripts/nix-push.in293
-rw-r--r--src/libexpr/get-drvs.hh7
-rw-r--r--src/libexpr/primops.cc6
-rw-r--r--src/libstore/build.cc46
-rw-r--r--src/libstore/globals.cc1
-rw-r--r--src/libstore/local-store.cc83
-rw-r--r--src/libstore/local-store.hh16
-rw-r--r--src/libstore/misc.cc106
-rw-r--r--src/libstore/optimise-store.cc2
-rw-r--r--src/libstore/remote-store.cc98
-rw-r--r--src/libstore/remote-store.hh10
-rw-r--r--src/libstore/store-api.hh32
-rw-r--r--src/libstore/worker-protocol.hh7
-rw-r--r--src/nix-env/nix-env.cc28
-rw-r--r--src/nix-store/nix-store.cc2
-rw-r--r--src/nix-worker/nix-worker.cc62
-rw-r--r--substitute.mk1
31 files changed, 1120 insertions, 392 deletions
diff --git a/.gitignore b/.gitignore
index 3fb96f124991..9160fe43d1ec 100644
--- a/.gitignore
+++ b/.gitignore
@@ -60,6 +60,7 @@ Makefile.in
 /scripts/GeneratePatches.pm
 /scripts/download-using-manifests.pl
 /scripts/copy-from-other-stores.pl
+/scripts/download-from-binary-cache.pl
 /scripts/find-runtime-roots.pl
 /scripts/build-remote.pl
 /scripts/nix-reduce-build
diff --git a/configure.ac b/configure.ac
index f920b8c10e75..21a87194f477 100644
--- a/configure.ac
+++ b/configure.ac
@@ -178,6 +178,7 @@ NEED_PROG(perl, perl)
 NEED_PROG(sed, sed)
 NEED_PROG(tar, tar)
 NEED_PROG(bzip2, bzip2)
+NEED_PROG(xz, xz)
 AC_PATH_PROG(dot, dot)
 AC_PATH_PROG(dblatex, dblatex)
 AC_PATH_PROG(gzip, gzip)
@@ -277,6 +278,10 @@ AC_ARG_WITH(dbd-sqlite, AC_HELP_STRING([--with-dbd-sqlite=PATH],
   [prefix of the Perl DBD::SQLite library]),
   perlFlags="$perlFlags -I$withval")
 
+AC_ARG_WITH(www-curl, AC_HELP_STRING([--with-www-curl=PATH],
+  [prefix of the Perl WWW::Curl library]),
+  perlFlags="$perlFlags -I$withval")
+
 AC_MSG_CHECKING([whether DBD::SQLite works])
 if ! $perl $perlFlags -e 'use DBI; use DBD::SQLite;' 2>&5; then
     AC_MSG_RESULT(no)
@@ -284,6 +289,13 @@ if ! $perl $perlFlags -e 'use DBI; use DBD::SQLite;' 2>&5; then
 fi
 AC_MSG_RESULT(yes)
   
+AC_MSG_CHECKING([whether WWW::Curl works])
+if ! $perl $perlFlags -e 'use WWW::Curl;' 2>&5; then
+    AC_MSG_RESULT(no)
+    AC_MSG_FAILURE([The Perl module WWW::Curl is missing.])
+fi
+AC_MSG_RESULT(yes)
+  
 AC_SUBST(perlFlags)
 
 
diff --git a/corepkgs/config.nix.in b/corepkgs/config.nix.in
index 98924c4ef779..26e821d0e2d7 100644
--- a/corepkgs/config.nix.in
+++ b/corepkgs/config.nix.in
@@ -6,7 +6,8 @@ in {
   perl = "@perl@";
   shell = "@shell@";
   coreutils = "@coreutils@";
-  bzip2 = fromEnv "NIX_BZIP2" "@bzip2@";
+  bzip2 = "@bzip2@";
+  xz = "@xz@";
   tar = "@tar@";
   tr = "@tr@";
   curl = "@curl@";
diff --git a/corepkgs/nar.nix b/corepkgs/nar.nix
index 4747dc31def2..fc9687af776c 100644
--- a/corepkgs/nar.nix
+++ b/corepkgs/nar.nix
@@ -6,28 +6,37 @@ let
     ''
       export PATH=${nixBinDir}:${coreutils}
 
+      if [ $compressionType = "xz" ]; then
+        ext=xz
+        compressor="${xz} -9"
+      else
+        ext=bz2
+        compressor="${bzip2}"
+      fi
+
       echo "packing ‘$storePath’..."
       mkdir $out
-      dst=$out/tmp.nar.bz2
+      dst=$out/tmp.nar.$ext
 
       set -o pipefail
-      nix-store --dump "$storePath" | ${bzip2} > $dst
+      nix-store --dump "$storePath" | $compressor > $dst
 
-      nix-hash --flat --type $hashAlgo --base32 $dst > $out/narbz2-hash
+      hash=$(nix-hash --flat --type $hashAlgo --base32 $dst)
+      echo -n $hash > $out/nar-compressed-hash
 
-      mv $out/tmp.nar.bz2 $out/$(cat $out/narbz2-hash).nar.bz2
+      mv $dst $out/$hash.nar.$ext
     '';
 
 in
 
-{ storePath, hashAlgo }:
+{ storePath, hashAlgo, compressionType }:
 
 derivation {
   name = "nar";
   system = builtins.currentSystem;
   builder = shell;
   args = [ "-e" builder ];
-  inherit storePath hashAlgo;
+  inherit storePath hashAlgo compressionType;
 
   # Don't build in a chroot because Nix's dependencies may not be there.
   __noChroot = true;
diff --git a/doc/manual/conf-file.xml b/doc/manual/conf-file.xml
index c095a001c169..cccee8d46202 100644
--- a/doc/manual/conf-file.xml
+++ b/doc/manual/conf-file.xml
@@ -307,6 +307,26 @@ build-use-chroot = /dev /proc /bin</programlisting>
   </varlistentry>
 
 
+  <varlistentry><term><literal>binary-caches</literal></term>
+
+    <listitem><para>A list of URLs of binary caches, separated by
+    whitespace.  It can be overriden through the environment variable
+    <envar>NIX_BINARY_CACHES</envar>.  The default is
+    <literal>http://nixos.org/binary-cache</literal>.</para></listitem>
+
+  </varlistentry>
+
+
+  <varlistentry><term><literal>binary-caches-parallel-connections</literal></term>
+
+    <listitem><para>The maximum number of parallel HTTP connections
+    used by the binary cache substituter to get NAR info files.  This
+    number should be high to minimise latency.  It defaults to
+    150.</para></listitem>
+
+  </varlistentry>
+
+
   <varlistentry><term><literal>system</literal></term>
 
     <listitem><para>This option specifies the canonical Nix system
diff --git a/doc/manual/release-notes.xml b/doc/manual/release-notes.xml
index ed06b638adae..7c78122f0267 100644
--- a/doc/manual/release-notes.xml
+++ b/doc/manual/release-notes.xml
@@ -94,6 +94,11 @@
     <citerefentry><refentrytitle>nix.conf</refentrytitle><manvolnum>5</manvolnum></citerefentry>.</para>
   </listitem>
 
+  <listitem>
+    <para>When using the Nix daemon, the <option>-s</option> flag in
+    <command>nix-env -qa</command> is now much faster.</para>
+  </listitem>
+    
 </itemizedlist>
 
 </section>
diff --git a/perl/lib/Nix/Config.pm.in b/perl/lib/Nix/Config.pm.in
index b6d346651807..ed197821e89f 100644
--- a/perl/lib/Nix/Config.pm.in
+++ b/perl/lib/Nix/Config.pm.in
@@ -1,27 +1,31 @@
 package Nix::Config;
 
+$version = "@version@";
+
 $binDir = $ENV{"NIX_BIN_DIR"} || "@bindir@";
 $libexecDir = $ENV{"NIX_LIBEXEC_DIR"} || "@libexecdir@";
 $stateDir = $ENV{"NIX_STATE_DIR"} || "@localstatedir@/nix";
 $manifestDir = $ENV{"NIX_MANIFESTS_DIR"} || "@localstatedir@/nix/manifests";
 $logDir = $ENV{"NIX_LOG_DIR"} || "@localstatedir@/log/nix";
 $confDir = $ENV{"NIX_CONF_DIR"} || "@sysconfdir@/nix";
+$storeDir = $ENV{"NIX_STORE_DIR"} || "@storedir@";
 
-$bzip2 = $ENV{"NIX_BZIP2"} || "@bzip2@";
+$bzip2 = "@bzip2@";
+$xz = "@xz@";
 $curl = "@curl@";
 
 $useBindings = "@perlbindings@" eq "yes";
 
+%config = ();
+
 sub readConfig {
-    my %config;
-    my $config = "@sysconfdir@/nix/nix.conf";
+    my $config = "$confDir/nix.conf";
     return unless -f $config;
     
     open CONFIG, "<$config" or die "cannot open `$config'";
     while (<CONFIG>) {
         /^\s*([\w|-]+)\s*=\s*(.*)$/ or next;
         $config{$1} = $2;
-        print "|$1| -> |$2|\n";
     }
     close CONFIG;
 }
diff --git a/release.nix b/release.nix
index 47d296c622c6..0b382fff31b2 100644
--- a/release.nix
+++ b/release.nix
@@ -29,6 +29,7 @@ let
           --with-xml-flags=--nonet
           --with-dbi=${perlPackages.DBI}/${perl.libPrefix}
           --with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}
+          --with-www-curl=${perlPackages.WWWCurl}/${perl.libPrefix}
         '';
 
         postUnpack = ''
@@ -77,6 +78,7 @@ let
           --disable-init-state
           --with-dbi=${perlPackages.DBI}/${perl.libPrefix}
           --with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}
+          --with-www-curl=${perlPackages.WWWCurl}/${perl.libPrefix}
           --enable-gc
         '';
 
@@ -134,6 +136,7 @@ let
           --disable-init-state
           --with-dbi=${perlPackages.DBI}/${perl.libPrefix}
           --with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}
+          --with-www-curl=${perlPackages.WWWCurl}/${perl.libPrefix}
         '';
 
         dontInstall = false;
diff --git a/scripts/Makefile.am b/scripts/Makefile.am
index 18a59dbdb6f6..506b1aeb454f 100644
--- a/scripts/Makefile.am
+++ b/scripts/Makefile.am
@@ -7,17 +7,14 @@ noinst_SCRIPTS = nix-profile.sh \
   find-runtime-roots.pl build-remote.pl nix-reduce-build \
   copy-from-other-stores.pl nix-http-export.cgi
 
-nix-pull nix-push: download-using-manifests.pl
-
-install-exec-local: download-using-manifests.pl copy-from-other-stores.pl find-runtime-roots.pl
+install-exec-local: download-using-manifests.pl copy-from-other-stores.pl download-from-binary-cache.pl find-runtime-roots.pl
 	$(INSTALL) -d $(DESTDIR)$(sysconfdir)/profile.d
 	$(INSTALL_DATA) nix-profile.sh $(DESTDIR)$(sysconfdir)/profile.d/nix.sh
 	$(INSTALL) -d $(DESTDIR)$(libexecdir)/nix
 	$(INSTALL_PROGRAM) find-runtime-roots.pl $(DESTDIR)$(libexecdir)/nix 
 	$(INSTALL_PROGRAM) build-remote.pl $(DESTDIR)$(libexecdir)/nix 
 	$(INSTALL) -d $(DESTDIR)$(libexecdir)/nix/substituters
-	$(INSTALL_PROGRAM) download-using-manifests.pl $(DESTDIR)$(libexecdir)/nix/substituters
-	$(INSTALL_PROGRAM) copy-from-other-stores.pl $(DESTDIR)$(libexecdir)/nix/substituters
+	$(INSTALL_PROGRAM) download-using-manifests.pl copy-from-other-stores.pl download-from-binary-cache.pl $(DESTDIR)$(libexecdir)/nix/substituters
 	$(INSTALL) -d $(DESTDIR)$(sysconfdir)/nix
 
 include ../substitute.mk
@@ -29,6 +26,7 @@ EXTRA_DIST = nix-collect-garbage.in \
   nix-build.in \
   download-using-manifests.pl.in \
   copy-from-other-stores.pl.in \
+  download-from-binary-cache.pl.in \
   nix-copy-closure.in \
   find-runtime-roots.pl.in \
   build-remote.pl.in \
diff --git a/scripts/copy-from-other-stores.pl.in b/scripts/copy-from-other-stores.pl.in
index b930b720725e..92869ee7a107 100755
--- a/scripts/copy-from-other-stores.pl.in
+++ b/scripts/copy-from-other-stores.pl.in
@@ -36,42 +36,45 @@ sub findStorePath {
 if ($ARGV[0] eq "--query") {
 
     while (<STDIN>) {
-        my $cmd = $_; chomp $cmd;
+        chomp;
+        my ($cmd, @args) = split " ", $_;
 
         if ($cmd eq "have") {
-            my $storePath = <STDIN>; chomp $storePath;
-            print STDOUT (defined findStorePath($storePath) ? "1\n" : "0\n");
+            foreach my $storePath (@args) {
+                print "$storePath\n" if defined findStorePath($storePath);
+            }
+            print "\n";
         }
 
         elsif ($cmd eq "info") {
-            my $storePath = <STDIN>; chomp $storePath;
-            my ($store, $sourcePath) = findStorePath($storePath);
-            if (!defined $store) {
-                print "0\n";
-                next; # not an error
-            }
-            print "1\n";
+            foreach my $storePath (@args) {
+                my ($store, $sourcePath) = findStorePath($storePath);
+                next unless defined $store;
 
-            $ENV{"NIX_DB_DIR"} = "$store/var/nix/db";
+                $ENV{"NIX_DB_DIR"} = "$store/var/nix/db";
             
-            my $deriver = `@bindir@/nix-store --query --deriver $storePath`;
-            die "cannot query deriver of `$storePath'" if $? != 0;
-            chomp $deriver;
-            $deriver = "" if $deriver eq "unknown-deriver";
-
-            my @references = split "\n",
-                `@bindir@/nix-store --query --references $storePath`;
-            die "cannot query references of `$storePath'" if $? != 0;
-
-            my $narSize = `@bindir@/nix-store --query --size $storePath`;
-            die "cannot query size of `$storePath'" if $? != 0;
-            chomp $narSize;
-
-            print "$deriver\n";
-            print scalar @references, "\n";
-            print "$_\n" foreach @references;
-            print "$narSize\n";
-            print "$narSize\n";
+                my $deriver = `@bindir@/nix-store --query --deriver $storePath`;
+                die "cannot query deriver of `$storePath'" if $? != 0;
+                chomp $deriver;
+                $deriver = "" if $deriver eq "unknown-deriver";
+
+                my @references = split "\n",
+                    `@bindir@/nix-store --query --references $storePath`;
+                die "cannot query references of `$storePath'" if $? != 0;
+
+                my $narSize = `@bindir@/nix-store --query --size $storePath`;
+                die "cannot query size of `$storePath'" if $? != 0;
+                chomp $narSize;
+
+                print "$storePath\n";
+                print "$deriver\n";
+                print scalar @references, "\n";
+                print "$_\n" foreach @references;
+                print "$narSize\n";
+                print "$narSize\n";
+            }
+
+            print "\n";
         }
 
         else { die "unknown command `$cmd'"; }
diff --git a/scripts/download-from-binary-cache.pl.in b/scripts/download-from-binary-cache.pl.in
new file mode 100644
index 000000000000..5fb0419f181a
--- /dev/null
+++ b/scripts/download-from-binary-cache.pl.in
@@ -0,0 +1,478 @@
+#! @perl@ -w @perlFlags@
+
+use DBI;
+use File::Basename;
+use IO::Select;
+use Nix::Config;
+use Nix::Store;
+use WWW::Curl::Easy;
+use WWW::Curl::Multi;
+use strict;
+
+
+Nix::Config::readConfig;
+
+my @binaryCacheUrls = map { s/\/+$//; $_ } split(/ /,
+    ($ENV{"NIX_BINARY_CACHES"} // $Nix::Config::config{"binary-caches"} // "http://nixos.org/binary-cache"));
+
+my $maxParallelRequests = int($Nix::Config::config{"binary-caches-parallel-connections"} // 150);
+$maxParallelRequests = 1 if $maxParallelRequests < 1;
+
+my $debug = ($ENV{"NIX_DEBUG_SUBST"} // "") eq 1;
+
+my ($dbh, $insertNAR, $queryNAR, $insertNARExistence, $queryNARExistence);
+my %cacheIds;
+
+my $curlm = WWW::Curl::Multi->new;
+my $activeRequests = 0;
+my $curlIdCount = 1;
+my %requests;
+my %scheduled;
+my $caBundle = $ENV{"CURL_CA_BUNDLE"} // $ENV{"OPENSSL_X509_CERT_FILE"};
+
+
+sub addRequest {
+    my ($storePath, $url, $head) = @_;
+    
+    my $curl = WWW::Curl::Easy->new;
+    my $curlId = $curlIdCount++;
+    $requests{$curlId} = { storePath => $storePath, url => $url, handle => $curl, content => "", type => $head ? "HEAD" : "GET" };
+
+    $curl->setopt(CURLOPT_PRIVATE, $curlId);
+    $curl->setopt(CURLOPT_URL, $url);
+    $curl->setopt(CURLOPT_WRITEDATA, \$requests{$curlId}->{content});
+    $curl->setopt(CURLOPT_FOLLOWLOCATION, 1);
+    $curl->setopt(CURLOPT_CAINFO, $caBundle) if defined $caBundle;
+    $curl->setopt(CURLOPT_USERAGENT, "Nix/$Nix::Config::version");
+    $curl->setopt(CURLOPT_NOBODY, 1) if $head;
+
+    if ($activeRequests >= $maxParallelRequests) {
+        $scheduled{$curlId} = 1;
+    } else {
+        $curlm->add_handle($curl);
+        $activeRequests++;
+    }
+
+    return $requests{$curlId};
+}
+
+
+sub processRequests {
+    while ($activeRequests) {
+        my ($rfds, $wfds, $efds) = $curlm->fdset();
+        #print STDERR "R = @{$rfds}, W = @{$wfds}, E = @{$efds}\n";
+
+        # Sleep until we can read or write some data.
+        if (scalar @{$rfds} + scalar @{$wfds} + scalar @{$efds} > 0) {
+            IO::Select->select(IO::Select->new(@{$rfds}), IO::Select->new(@{$wfds}), IO::Select->new(@{$efds}), 0.1);
+        }
+        
+        if ($curlm->perform() != $activeRequests) {
+            while (my ($id, $result) = $curlm->info_read) {
+                if ($id) {
+                    my $request = $requests{$id} or die;
+                    my $handle = $request->{handle};
+                    $request->{result} = $result;
+                    $request->{httpStatus} = $handle->getinfo(CURLINFO_HTTP_CODE);
+                    
+                    print STDERR "$request->{type} on $request->{url} [$request->{result}, $request->{httpStatus}]\n" if $debug;
+                    
+                    $activeRequests--;
+                    delete $request->{handle};
+
+                    if (scalar(keys %scheduled) > 0) {
+                        my $id2 = (keys %scheduled)[0];
+                        $curlm->add_handle($requests{$id2}->{handle});
+                        $activeRequests++;
+                        delete $scheduled{$id2};
+                    }
+                }
+            }
+        }
+    }
+}
+
+
+sub initCache {
+    my $dbPath = "$Nix::Config::stateDir/binary-cache-v1.sqlite";
+
+    # Open/create the database.
+    $dbh = DBI->connect("dbi:SQLite:dbname=$dbPath", "", "")
+        or die "cannot open database `$dbPath'";
+    $dbh->{RaiseError} = 1;
+    $dbh->{PrintError} = 0;
+
+    $dbh->do("pragma synchronous = off"); # we can always reproduce the cache
+    $dbh->do("pragma journal_mode = truncate");
+
+    # Initialise the database schema, if necessary.
+    $dbh->do(<<EOF);
+        create table if not exists BinaryCaches (
+            id        integer primary key autoincrement not null,
+            url       text unique not null
+        );
+EOF
+    
+    $dbh->do(<<EOF);
+        create table if not exists NARs (
+            cache            integer not null,
+            storePath        text not null,
+            url              text not null,
+            compression      text not null,
+            fileHash         text,
+            fileSize         integer,
+            narHash          text,
+            narSize          integer,
+            refs             text,
+            deriver          text,
+            system           text,
+            timestamp        integer not null,
+            primary key (cache, storePath),
+            foreign key (cache) references BinaryCaches(id) on delete cascade
+        );
+EOF
+
+    $dbh->do(<<EOF);
+        create table if not exists NARExistence (
+            cache            integer not null,
+            storePath        text not null,
+            exist            integer not null,
+            timestamp        integer not null,
+            primary key (cache, storePath),
+            foreign key (cache) references BinaryCaches(id) on delete cascade
+        );
+EOF
+
+    $insertNAR = $dbh->prepare(
+        "insert or replace into NARs(cache, storePath, url, compression, fileHash, fileSize, narHash, " .
+        "narSize, refs, deriver, system, timestamp) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)") or die;
+
+    $queryNAR = $dbh->prepare("select * from NARs where cache = ? and storePath = ?") or die;
+
+    $insertNARExistence = $dbh->prepare(
+        "insert or replace into NARExistence(cache, storePath, exist, timestamp) values (?, ?, ?, ?)") or die;
+
+    $queryNARExistence = $dbh->prepare("select exist from NARExistence where cache = ? and storePath = ?") or die;
+}
+
+
+
+sub negativeHit {
+    my ($storePath, $binaryCacheUrl) = @_;
+    $queryNARExistence->execute(getCacheId($binaryCacheUrl), basename($storePath));
+    my $res = $queryNARExistence->fetchrow_hashref();
+    return defined $res && $res->{exist} == 0;
+}
+
+
+sub positiveHit {
+    my ($storePath, $binaryCacheUrl) = @_;
+    return 1 if defined getCachedInfoFrom($storePath, $binaryCacheUrl);
+    $queryNARExistence->execute(getCacheId($binaryCacheUrl), basename($storePath));
+    my $res = $queryNARExistence->fetchrow_hashref();
+    return defined $res && $res->{exist} == 1;
+}
+
+
+sub processNARInfo {
+    my ($storePath, $binaryCacheUrl, $request) = @_;
+    
+    my $cacheId = getCacheId($binaryCacheUrl);
+
+    if ($request->{result} != 0 || $request->{httpStatus} != 200) {
+        if ($request->{httpStatus} != 404) {
+            print STDERR "could not download ‘$request->{url}’ (" .
+                ($request->{result} != 0 ? "Curl error $request->{result}" : "HTTP status $request->{httpStatus}") . ")\n";
+        } else {
+            $insertNARExistence->execute($cacheId, basename($storePath), 0, time());
+        }
+        return undef;
+    }
+    
+    my ($storePath2, $url, $fileHash, $fileSize, $narHash, $narSize, $deriver, $system);
+    my $compression = "bzip2";
+    my @refs;
+    foreach my $line (split "\n", $request->{content}) {
+        unless ($line =~ /^(.*): (.*)$/) {
+            print STDERR "bad NAR info file ‘$request->{url}’\n";
+            return undef;
+        }
+        if ($1 eq "StorePath") { $storePath2 = $2; }
+        elsif ($1 eq "URL") { $url = $2; }
+        elsif ($1 eq "Compression") { $compression = $2; }
+        elsif ($1 eq "FileHash") { $fileHash = $2; }
+        elsif ($1 eq "FileSize") { $fileSize = int($2); }
+        elsif ($1 eq "NarHash") { $narHash = $2; }
+        elsif ($1 eq "NarSize") { $narSize = int($2); }
+        elsif ($1 eq "References") { @refs = split / /, $2; }
+        elsif ($1 eq "Deriver") { $deriver = $2; }
+        elsif ($1 eq "System") { $system = $2; }
+    }
+    return undef if $storePath ne $storePath2;
+    if ($storePath ne $storePath2 || !defined $url || !defined $narHash) {
+        print STDERR "bad NAR info file ‘$request->{url}’\n";
+        return undef;
+    }
+
+    # FIXME: validate $url etc. for security.
+    
+    # Cache the result.
+    $insertNAR->execute(
+        $cacheId, basename($storePath), $url, $compression, $fileHash, $fileSize,
+        $narHash, $narSize, join(" ", @refs), $deriver, $system, time());
+    
+    return
+        { url => $url
+        , compression => $compression
+        , fileHash => $fileHash
+        , fileSize => $fileSize
+        , narHash => $narHash
+        , narSize => $narSize
+        , refs => [ @refs ]
+        , deriver => $deriver
+        , system => $system
+        };
+}
+
+
+sub getCacheId {
+    my ($binaryCacheUrl) = @_;
+    
+    my $cacheId = $cacheIds{$binaryCacheUrl};
+    return $cacheId if defined $cacheId;
+    
+    # FIXME: not atomic.
+    my @res = @{$dbh->selectcol_arrayref("select id from BinaryCaches where url = ?", {}, $binaryCacheUrl)};
+    if (scalar @res == 1) {
+        $cacheId = $res[0];
+    } else {
+        $dbh->do("insert into BinaryCaches(url) values (?)",
+                 {}, $binaryCacheUrl);
+        $cacheId = $dbh->last_insert_id("", "", "", "");
+    }
+
+    $cacheIds{$binaryCacheUrl} = $cacheId;
+    return $cacheId;
+}
+
+
+sub getCachedInfoFrom {
+    my ($storePath, $binaryCacheUrl) = @_;
+
+    $queryNAR->execute(getCacheId($binaryCacheUrl), basename($storePath));
+    my $res = $queryNAR->fetchrow_hashref();
+    return undef unless defined $res;
+    
+    return 
+        { url => $res->{url}
+        , compression => $res->{compression}
+        , fileHash => $res->{fileHash}
+        , fileSize => $res->{fileSize}
+        , narHash => $res->{narHash}
+        , narSize => $res->{narSize}
+        , refs => [ split " ", $res->{refs} ]
+        , deriver => $res->{deriver}
+        } if defined $res;
+}
+
+
+sub printInfo {
+    my ($storePath, $info) = @_;
+    print "$storePath\n";
+    print $info->{deriver} ? "$Nix::Config::storeDir/$info->{deriver}" : "", "\n";
+    print scalar @{$info->{refs}}, "\n";
+    print "$Nix::Config::storeDir/$_\n" foreach @{$info->{refs}};
+    print $info->{fileSize} || 0, "\n";
+    print $info->{narSize} || 0, "\n";
+}
+
+
+sub infoUrl {
+    my ($binaryCacheUrl, $storePath) = @_;
+    my $pathHash = substr(basename($storePath), 0, 32);
+    my $infoUrl = "$binaryCacheUrl/$pathHash.narinfo";
+}
+
+
+sub printInfoParallel {
+    my @paths = @_;
+
+    # First print all paths for which we have cached info.
+    my @left;
+    foreach my $storePath (@paths) {
+        my $found = 0;
+        foreach my $binaryCacheUrl (@binaryCacheUrls) {
+            my $info = getCachedInfoFrom($storePath, $binaryCacheUrl);
+            if (defined $info) {
+                printInfo($storePath, $info);
+                $found = 1;
+                last;
+            }
+        }
+        push @left, $storePath if !$found;
+    }
+
+    return if scalar @left == 0;
+
+    foreach my $binaryCacheUrl (@binaryCacheUrls) {
+
+        my @left2;
+        %requests = ();
+        foreach my $storePath (@left) {
+            if (negativeHit($storePath, $binaryCacheUrl)) {
+                push @left2, $storePath;
+                next;
+            }
+            addRequest($storePath, infoUrl($binaryCacheUrl, $storePath));
+        }
+
+        processRequests;
+
+        foreach my $request (values %requests) {
+            my $info = processNARInfo($request->{storePath}, $binaryCacheUrl, $request);
+            if (defined $info) {
+                printInfo($request->{storePath}, $info);
+            } else {
+                push @left2, $request->{storePath};
+            }
+        }
+
+        @left = @left2;
+    }
+}
+
+
+sub printSubstitutablePaths {
+    my @paths = @_;
+
+    # First look for paths that have cached info.
+    my @left;
+    foreach my $storePath (@paths) {
+        my $found = 0;
+        foreach my $binaryCacheUrl (@binaryCacheUrls) {
+            if (positiveHit($storePath, $binaryCacheUrl)) {
+                print "$storePath\n";
+                $found = 1;
+                last;
+            }
+        }
+        push @left, $storePath if !$found;
+    }
+
+    return if scalar @left == 0;
+
+    # For remaining paths, do HEAD requests.
+    foreach my $binaryCacheUrl (@binaryCacheUrls) {
+        my $cacheId = getCacheId($binaryCacheUrl);
+
+        my @left2;
+        %requests = ();
+        foreach my $storePath (@left) {
+            if (negativeHit($storePath, $binaryCacheUrl)) {
+                push @left2, $storePath;
+                next;
+            }
+            addRequest($storePath, infoUrl($binaryCacheUrl, $storePath), 1);
+        }
+        
+        processRequests;
+
+        foreach my $request (values %requests) {
+            if ($request->{result} != 0 || $request->{httpStatus} != 200) {
+                if ($request->{httpStatus} != 404) {
+                    print STDERR "could not check ‘$request->{url}’ (" .
+                        ($request->{result} != 0 ? "Curl error $request->{result}" : "HTTP status $request->{httpStatus}") . ")\n";
+                } else {
+                    $insertNARExistence->execute($cacheId, basename($request->{storePath}), 0, time());
+                }
+                push @left2, $request->{storePath};
+            } else {
+                $insertNARExistence->execute($cacheId, basename($request->{storePath}), 1, time());
+                print "$request->{storePath}\n";
+            }
+        }
+
+        @left = @left2;
+    }
+}
+
+
+sub downloadBinary {
+    my ($storePath) = @_;
+    
+    foreach my $binaryCacheUrl (@binaryCacheUrls) {
+        my $info = getCachedInfoFrom($storePath, $binaryCacheUrl);
+
+        unless (defined $info) {
+            next if negativeHit($storePath, $binaryCacheUrl);
+            my $request = addRequest($storePath, infoUrl($binaryCacheUrl, $storePath));
+            processRequests;
+            $info = processNARInfo($storePath, $binaryCacheUrl, $request);
+        }
+
+        next unless defined $info;
+        
+        my $decompressor;
+        if ($info->{compression} eq "bzip2") { $decompressor = "$Nix::Config::bzip2 -d"; }
+        elsif ($info->{compression} eq "xz") { $decompressor = "$Nix::Config::xz -d"; }
+        else {
+            print STDERR "unknown compression method ‘$info->{compression}’\n";
+            next;
+        }
+        my $url = "$binaryCacheUrl/$info->{url}"; # FIXME: handle non-relative URLs
+        print STDERR "\n*** Downloading ‘$url’ into ‘$storePath’...\n";
+        if (system("$Nix::Config::curl --fail --location '$url' | $decompressor | $Nix::Config::binDir/nix-store --restore $storePath") != 0) {
+            die "download of `$info->{url}' failed" . ($! ? ": $!" : "") . "\n" unless $? == 0;
+            next;
+        }
+        # The hash in the manifest can be either in base-16 or
+        # base-32.  Handle both.
+        $info->{narHash} =~ /^sha256:(.*)$/ or die "invalid hash";
+        my $hash = $1;
+        my $hash2 = hashPath("sha256", 1, $storePath);
+        die "hash mismatch in downloaded path ‘$storePath’; expected $hash, got $hash2\n"
+            if $hash ne $hash2;
+        print STDERR "\n";
+        return 1;
+    }
+
+    return 0;
+}
+
+
+initCache();
+
+
+if ($ARGV[0] eq "--query") {
+
+    while (<STDIN>) {
+        chomp;
+        my ($cmd, @args) = split " ", $_;
+        
+        if ($cmd eq "have") {
+            printSubstitutablePaths(@args);
+            print "\n";
+        }
+
+        elsif ($cmd eq "info") {
+            printInfoParallel(@args);
+            print "\n";
+        }
+
+        else { die "unknown command `$cmd'"; }
+
+        flush STDOUT;
+    }
+
+}
+
+elsif ($ARGV[0] eq "--substitute") {
+    my $storePath = $ARGV[1] or die;
+    if (!downloadBinary($storePath)) {
+        print STDERR "could not download ‘$storePath’ from any binary cache\n";
+    }
+}
+
+else {
+    die;
+}
diff --git a/scripts/download-using-manifests.pl.in b/scripts/download-using-manifests.pl.in
index ef663dabb1ef..f00debc68546 100755
--- a/scripts/download-using-manifests.pl.in
+++ b/scripts/download-using-manifests.pl.in
@@ -173,56 +173,54 @@ sub computeSmallestDownload {
 if ($ARGV[0] eq "--query") {
 
     while (<STDIN>) {
-        my $cmd = $_; chomp $cmd;
+        chomp;
+        my ($cmd, @args) = split " ", $_;
 
         if ($cmd eq "have") {
-            my $storePath = <STDIN>; chomp $storePath;
-            print STDOUT (
-                scalar @{$dbh->selectcol_arrayref("select 1 from NARs where storePath = ?", {}, $storePath)} > 0
-                ? "1\n" : "0\n");
+            foreach my $storePath (@args) {
+                print "$storePath\n" if scalar @{$dbh->selectcol_arrayref("select 1 from NARs where storePath = ?", {}, $storePath)} > 0;
+            }
+            print "\n";
         }
 
         elsif ($cmd eq "info") {
-            my $storePath = <STDIN>; chomp $storePath;
+            foreach my $storePath (@args) {
 
-            my $infos = $dbh->selectall_arrayref(
-                "select * from NARs where storePath = ?",
-                { Slice => {} }, $storePath);
+                my $infos = $dbh->selectall_arrayref(
+                    "select * from NARs where storePath = ?",
+                    { Slice => {} }, $storePath);
             
-            my $info;
-            if (scalar @{$infos} > 0) {
-                $info = @{$infos}[0];
-            }
-            else {
-                print "0\n";
-                next; # not an error
-            }
-
-            print "1\n";
-            print "$info->{deriver}\n";
-            my @references = split " ", $info->{refs};
-            print scalar @references, "\n";
-            print "$_\n" foreach @references;
-
-            my @path = computeSmallestDownload $storePath;
-
-            my $downloadSize = 0;
-            while (scalar @path > 0) {
-                my $edge = pop @path;
-                my $u = $edge->{start};
-                my $v = $edge->{end};
-                if ($edge->{type} eq "patch") {
-                    $downloadSize += $edge->{info}->{size} || 0;
-                }
-                elsif ($edge->{type} eq "narfile") {
-                    $downloadSize += $edge->{info}->{size} || 0;
+                next unless scalar @{$infos} > 0;
+                my $info = @{$infos}[0];
+
+                print "$storePath\n";
+                print "$info->{deriver}\n";
+                my @references = split " ", $info->{refs};
+                print scalar @references, "\n";
+                print "$_\n" foreach @references;
+
+                my @path = computeSmallestDownload $storePath;
+
+                my $downloadSize = 0;
+                while (scalar @path > 0) {
+                    my $edge = pop @path;
+                    my $u = $edge->{start};
+                    my $v = $edge->{end};
+                    if ($edge->{type} eq "patch") {
+                        $downloadSize += $edge->{info}->{size} || 0;
+                    }
+                    elsif ($edge->{type} eq "narfile") {
+                        $downloadSize += $edge->{info}->{size} || 0;
+                    }
                 }
-            }
 
-            print "$downloadSize\n";
+                print "$downloadSize\n";
             
-            my $narSize = $info->{narSize} || 0;
-            print "$narSize\n";
+                my $narSize = $info->{narSize} || 0;
+                print "$narSize\n";
+            }
+
+            print "\n";
         }
         
         else { die "unknown command `$cmd'"; }
diff --git a/scripts/nix-build.in b/scripts/nix-build.in
index afe0679a47d3..aa3f4661aeee 100755
--- a/scripts/nix-build.in
+++ b/scripts/nix-build.in
@@ -58,6 +58,11 @@ EOF
         # '` hack
     }
 
+    elsif ($arg eq "--version") {
+        print "nix-build (Nix) $Nix::Config::version\n";
+        exit 0;
+    }
+    
     elsif ($arg eq "--add-drv-link") {
         $drvLink = "./derivation";
     }
diff --git a/scripts/nix-channel.in b/scripts/nix-channel.in
index 6883ffd18db2..e7a4b0900e86 100755
--- a/scripts/nix-channel.in
+++ b/scripts/nix-channel.in
@@ -194,6 +194,11 @@ while (scalar @ARGV) {
         usageError;
     }
 
+    elsif ($arg eq "--version") {
+        print "nix-channel (Nix) $Nix::Config::version\n";
+        exit 0;
+    }
+    
     else {
         die "unknown argument `$arg'; try `--help'";
     }
diff --git a/scripts/nix-push.in b/scripts/nix-push.in
index a1c02190bd6c..35ad43d283bb 100755
--- a/scripts/nix-push.in
+++ b/scripts/nix-push.in
@@ -1,10 +1,13 @@
 #! @perl@ -w @perlFlags@
 
 use strict;
+use File::Basename;
 use File::Temp qw(tempdir);
+use File::Path qw(mkpath);
 use File::stat;
+use File::Copy;
 use Nix::Config;
-use Nix::Manifest;
+use Nix::Store;
 
 my $hashAlgo = "sha256";
 
@@ -12,7 +15,6 @@ my $tmpDir = tempdir("nix-push.XXXXXX", CLEANUP => 1, TMPDIR => 1)
     or die "cannot create a temporary directory";
 
 my $nixExpr = "$tmpDir/create-nars.nix";
-my $manifest = "$tmpDir/MANIFEST";
 
 my $curl = "$Nix::Config::curl --fail --silent";
 my $extraCurlFlags = ${ENV{'CURL_FLAGS'}};
@@ -20,20 +22,21 @@ $curl = "$curl $extraCurlFlags" if defined $extraCurlFlags;
 
 
 # Parse the command line.
+my $compressionType = "xz";
+my $force = 0;
+
 my $localCopy;
 my $localArchivesDir;
-my $localManifestFile;
-
-my $targetArchivesUrl;
 
 my $archivesPutURL;
 my $archivesGetURL;
-my $manifestPutURL;
+
+my @roots;
 
 sub showSyntax {
     print STDERR <<EOF
-Usage: nix-push --copy ARCHIVES_DIR MANIFEST_FILE PATHS...
-   or: nix-push ARCHIVES_PUT_URL ARCHIVES_GET_URL MANIFEST_PUT_URL PATHS...
+Usage: nix-push --copy ARCHIVES_DIR PATHS...
+   or: nix-push --upload ARCHIVES_PUT_URL ARCHIVES_GET_URL PATHS...
 
 `nix-push' copies or uploads the closure of PATHS to the given
 destination.
@@ -42,36 +45,42 @@ EOF
     exit 1;
 }
 
-showSyntax if scalar @ARGV < 1;
-
-if ($ARGV[0] eq "--copy") {
-    showSyntax if scalar @ARGV < 3;
-    $localCopy = 1;
-    shift @ARGV;
-    $localArchivesDir = shift @ARGV;
-    $localManifestFile = shift @ARGV;
-    if ($ARGV[0] eq "--target") {
-       shift @ARGV;
-       $targetArchivesUrl = shift @ARGV;
-    }
-    else {
-       $targetArchivesUrl = "file://$localArchivesDir";
+for (my $n = 0; $n < scalar @ARGV; $n++) {
+    my $arg = $ARGV[$n];
+
+    if ($arg eq "--help") {
+        showSyntax;
+    } elsif ($arg eq "--bzip2") {
+        $compressionType = "bzip2";
+    } elsif ($arg eq "--force") {
+        $force = 1;
+    } elsif ($arg eq "--copy") {
+        $n++;
+        die "$0: `$arg' requires an argument\n" unless $n < scalar @ARGV;
+        $localCopy = 1;
+        $localArchivesDir = $ARGV[$n];
+        mkpath($localArchivesDir, 0, 0755);
+    } elsif ($arg eq "--upload") {
+        die "$0: `$arg' requires two arguments\n" unless $n + 2 < scalar @ARGV;
+        $localCopy = 0;
+        $archivesPutURL = $ARGV[$n + 1];
+        $archivesGetURL = $ARGV[$n + 2];
+        $n++;
+    } elsif (substr($arg, 0, 1) eq "-") {
+        showSyntax;
+    } else {
+        push @roots, $arg;
     }
 }
-else {
-    showSyntax if scalar @ARGV < 3;
-    $localCopy = 0;
-    $archivesPutURL = shift @ARGV;
-    $archivesGetURL = shift @ARGV;
-    $manifestPutURL = shift @ARGV;
-}
+        
+showSyntax if !defined $localCopy;
 
 
 # From the given store paths, determine the set of requisite store
 # paths, i.e, the paths required to realise them.
 my %storePaths;
 
-foreach my $path (@ARGV) {
+foreach my $path (@roots) {
     die unless $path =~ /^\//;
 
     # Get all paths referenced by the normalisation of the given 
@@ -92,8 +101,8 @@ foreach my $path (@ARGV) {
 my @storePaths = keys %storePaths;
 
 
-# For each path, create a Nix expression that turns the path into
-# a Nix archive.
+# Create a list of Nix derivations that turn each path into a Nix
+# archive.
 open NIX, ">$nixExpr";
 print NIX "[";
 
@@ -103,7 +112,7 @@ foreach my $storePath (@storePaths) {
     # Construct a Nix expression that creates a Nix archive.
     my $nixexpr = 
         "(import <nix/nar.nix> " .
-        "{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"$hashAlgo\"; }) ";
+        "{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"$hashAlgo\"; compressionType = \"$compressionType\"; }) ";
     
     print NIX $nixexpr;
 }
@@ -112,172 +121,118 @@ print NIX "]";
 close NIX;
 
 
-# Instantiate store derivations from the Nix expression.
-my @storeExprs;
-print STDERR "instantiating store derivations...\n";
-my $pid = open(READ, "$Nix::Config::binDir/nix-instantiate $nixExpr|")
-    or die "cannot run nix-instantiate";
+# Build the Nix expression.
+print STDERR "building compressed archives...\n";
+my @narPaths;
+my $pid = open(READ, "$Nix::Config::binDir/nix-build $nixExpr -o $tmpDir/result |")
+    or die "cannot run nix-build";
 while (<READ>) {
     chomp;
     die unless /^\//;
-    push @storeExprs, $_;
+    push @narPaths, $_;
 }
-close READ or die "nix-instantiate failed: $?";
+close READ or die "nix-build failed: $?";
 
 
-# Build the derivations.
-print STDERR "creating archives...\n";
-
-my @narPaths;
-
-my @tmp = @storeExprs;
-while (scalar @tmp > 0) {
-    my $n = scalar @tmp;
-    if ($n > 256) { $n = 256 };
-    my @tmp2 = @tmp[0..$n - 1];
-    @tmp = @tmp[$n..scalar @tmp - 1];
-
-    my $pid = open(READ, "$Nix::Config::binDir/nix-store --realise @tmp2|")
-        or die "cannot run nix-store";
-    while (<READ>) {
-        chomp;
-        die unless (/^\//);
-        push @narPaths, "$_";
-    }
-    close READ or die "nix-store failed: $?";
-}
-
-
-# Create the manifest.
-print STDERR "creating manifest...\n";
+# Upload the archives and the corresponding info files.
+print STDERR "uploading/copying archives...\n";
 
-my %narFiles;
-my %patches;
+my $totalNarSize = 0;
+my $totalCompressedSize = 0;
 
-my @narArchives;
 for (my $n = 0; $n < scalar @storePaths; $n++) {
     my $storePath = $storePaths[$n];
     my $narDir = $narPaths[$n];
-    
-    $storePath =~ /\/([^\/]*)$/;
-    my $basename = $1;
-    defined $basename or die;
-
-    open HASH, "$narDir/narbz2-hash" or die "cannot open narbz2-hash";
-    my $narbz2Hash = <HASH>;
-    chomp $narbz2Hash;
-    $narbz2Hash =~ /^[0-9a-z]+$/ or die "invalid hash";
-    close HASH;
-
-    my $narName = "$narbz2Hash.nar.bz2";
-
-    my $narFile = "$narDir/$narName";
-    (-f $narFile) or die "narfile for $storePath not found";
-    push @narArchives, $narFile;
-
-    my $narbz2Size = stat($narFile)->size;
-
-    my $references = `$Nix::Config::binDir/nix-store --query --references '$storePath'`;
-    die "cannot query references for `$storePath'" if $? != 0;
-    $references = join(" ", split(" ", $references));
-
-    my $deriver = `$Nix::Config::binDir/nix-store --query --deriver '$storePath'`;
-    die "cannot query deriver for `$storePath'" if $? != 0;
-    chomp $deriver;
-    $deriver = "" if $deriver eq "unknown-deriver";
+    my $baseName = basename $storePath;
 
-    my $narHash = `$Nix::Config::binDir/nix-store --query --hash '$storePath'`;
-    die "cannot query hash for `$storePath'" if $? != 0;
-    chomp $narHash;
+    # Get info about the store path.
+    my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($storePath, 1);
 
     # In some exceptional cases (such as VM tests that use the Nix
     # store of the host), the database doesn't contain the hash.  So
     # compute it.
     if ($narHash =~ /^sha256:0*$/) {
-        $narHash = `$Nix::Config::binDir/nix-hash --type sha256 --base32 '$storePath'`;
-        die "cannot hash `$storePath'" if $? != 0;
+        my $nar = "$tmpDir/nar";
+        system("$Nix::Config::binDir/nix-store --dump $storePath > $nar") == 0
+            or die "cannot dump $storePath\n";
+        $narHash = `$Nix::Config::binDir/nix-hash --type sha256 --base32 --flat $nar`;
+        die "cannot hash `$nar'" if $? != 0;
         chomp $narHash;
         $narHash = "sha256:$narHash";
+        $narSize = stat("$nar")->size;
+        unlink $nar or die;
     }
 
-    my $narSize = `$Nix::Config::binDir/nix-store --query --size '$storePath'`;
-    die "cannot query size for `$storePath'" if $? != 0;
-    chomp $narSize;
-
-    my $url;
-    if ($localCopy) {
-        $url = "$targetArchivesUrl/$narName";
-    } else {
-        $url = "$archivesGetURL/$narName";
-    }
-    $narFiles{$storePath} = [
-        { url => $url
-        , hash => "$hashAlgo:$narbz2Hash"
-        , size => $narbz2Size
-        , narHash => "$narHash"
-        , narSize => $narSize
-        , references => $references
-        , deriver => $deriver
-        }
-    ];
-}
-
-writeManifest $manifest, \%narFiles, \%patches;
-
-
-sub copyFile {
-    my $src = shift;
-    my $dst = shift;
-    my $tmp = "$dst.tmp.$$";
-    system("@coreutils@/cp", $src, $tmp) == 0 or die "cannot copy file";
-    rename($tmp, $dst) or die "cannot rename file: $!";
-}
-
+    $totalNarSize += $narSize;
+    
+    # Get info about the compressed NAR.
+    open HASH, "$narDir/nar-compressed-hash" or die "cannot open nar-compressed-hash";
+    my $compressedHash = <HASH>;
+    chomp $compressedHash;
+    $compressedHash =~ /^[0-9a-z]+$/ or die "invalid hash";
+    close HASH;
 
-# Upload/copy the archives.
-print STDERR "uploading/copying archives...\n";
+    my $narName = "$compressedHash.nar." . ($compressionType eq "xz" ? "xz" : "bz2");
 
-sub archiveExists {
-    my $name = shift;
-    print STDERR "  HEAD on $archivesGetURL/$name\n";
-    return system("$curl --head $archivesGetURL/$name > /dev/null") == 0;
-}
+    my $narFile = "$narDir/$narName";
+    (-f $narFile) or die "NAR file for $storePath not found";
 
-foreach my $narArchive (@narArchives) {
+    my $compressedSize = stat($narFile)->size;    
+    $totalCompressedSize += $compressedSize;
 
-    $narArchive =~ /\/([^\/]*)$/;
-    my $basename = $1;
+    printf STDERR "%s [%.2f MiB, %.1f%%]\n", $storePath,
+        $compressedSize / (1024 * 1024), $compressedSize / $narSize * 100;
 
+    # Upload the compressed NAR.
     if ($localCopy) {
-        # Since nix-push creates $dst atomically, if it exists we
-        # don't have to copy again.
-        my $dst = "$localArchivesDir/$basename";
-        if (! -f "$localArchivesDir/$basename") {
-            print STDERR "  $narArchive\n";
-            copyFile $narArchive, $dst;
+        my $dst = "$localArchivesDir/$narName";
+        if (! -f $dst) {
+            my $tmp = "$localArchivesDir/.tmp.$$.$narName";
+            copy($narFile, $tmp) or die "cannot copy $narFile to $tmp: $!\n";
+            rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
         }
+    } else {
+        die "unimplemented";
+        #if (!archiveExists("$basename")) {
+        #    system("$curl --show-error --upload-file " .
+        #           "'$narArchive' '$archivesPutURL/$basename' > /dev/null") == 0 or
+        #           die "curl failed on $narArchive: $?";
+        #}
     }
-    else {
-        if (!archiveExists("$basename")) {
-            print STDERR "  $narArchive\n";
-            system("$curl --show-error --upload-file " .
-                   "'$narArchive' '$archivesPutURL/$basename' > /dev/null") == 0 or
-                   die "curl failed on $narArchive: $?";
+
+    # Upload the info file.
+    my $info;
+    $info .= "StorePath: $storePath\n";
+    $info .= "URL: $narName\n";
+    $info .= "Compression: $compressionType\n";
+    $info .= "FileHash: sha256:$compressedHash\n";
+    $info .= "FileSize: $compressedSize\n";
+    $info .= "NarHash: $narHash\n";
+    $info .= "NarSize: $narSize\n";
+    $info .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n";
+    if (defined $deriver) {
+        $info .= "Deriver: " . basename $deriver . "\n";
+        if (isValidPath($deriver)) {
+            my $drv = derivationFromPath($deriver);
+            $info .= "System: $drv->{platform}\n";
         }
     }
-}
-
 
-# Upload the manifest.
-print STDERR "uploading manifest...\n";
-if ($localCopy) {
-    copyFile $manifest, $localManifestFile;
-    copyFile "$manifest.bz2", "$localManifestFile.bz2";
-} else {
-    system("$curl --show-error --upload-file " .
-           "'$manifest' '$manifestPutURL' > /dev/null") == 0 or
-           die "curl failed on $manifest: $?";
-    system("$curl --show-error --upload-file " .
-           "'$manifest'.bz2 '$manifestPutURL'.bz2 > /dev/null") == 0 or
-           die "curl failed on $manifest: $?";
+    my $pathHash = substr(basename($storePath), 0, 32);
+    
+    if ($localCopy) {
+        my $dst = "$localArchivesDir/$pathHash.narinfo";
+        if ($force || ! -f $dst) {
+            my $tmp = "$localArchivesDir/.tmp.$$.$pathHash.narinfo";
+            open INFO, ">$tmp" or die;
+            print INFO "$info" or die;
+            close INFO or die;
+            rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
+        }
+    } else {
+        die "unimplemented";
+    }
 }
+
+printf STDERR "total compressed size %.2f MiB, %.1f%%\n",
+    $totalCompressedSize / (1024 * 1024), $totalCompressedSize / $totalNarSize * 100;
diff --git a/src/libexpr/get-drvs.hh b/src/libexpr/get-drvs.hh
index 879dc8dbb45d..25d8baa559b2 100644
--- a/src/libexpr/get-drvs.hh
+++ b/src/libexpr/get-drvs.hh
@@ -31,6 +31,8 @@ private:
 
     bool metaInfoRead;
     MetaInfo meta;
+
+    bool failed; // set if we get an AssertionError
     
 public:
     string name;
@@ -40,7 +42,7 @@ public:
     /* !!! make this private */
     Bindings * attrs;
 
-    DrvInfo() : metaInfoRead(false), attrs(0) { };
+    DrvInfo() : metaInfoRead(false), failed(false), attrs(0) { };
 
     string queryDrvPath(EvalState & state) const;
     string queryOutPath(EvalState & state) const;
@@ -58,6 +60,9 @@ public:
     }
 
     void setMetaInfo(const MetaInfo & meta);
+
+    void setFailed() { failed = true; };
+    bool hasFailed() { return failed; };
 };
 
 
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 0d4efc47e6d6..5c011c43e31c 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -51,6 +51,12 @@ static void prim_import(EvalState & state, Value * * args, Value & v)
                 % path % ctx);
         if (isDerivation(ctx))
             try {
+                /* For performance, prefetch all substitute info. */
+                PathSet willBuild, willSubstitute, unknown;
+                unsigned long long downloadSize, narSize;
+                queryMissing(*store, singleton<PathSet>(ctx),
+                    willBuild, willSubstitute, unknown, downloadSize, narSize);
+                  
                 /* !!! If using a substitute, we only need to fetch
                    the selected output of this derivation. */
                 store->buildPaths(singleton<PathSet>(ctx));
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 290635695e05..c57a63db69dd 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -94,7 +94,7 @@ typedef map<Path, WeakGoalPtr> WeakGoalMap;
 class Goal : public boost::enable_shared_from_this<Goal>
 {
 public:
-    typedef enum {ecBusy, ecSuccess, ecFailed} ExitCode;
+    typedef enum {ecBusy, ecSuccess, ecFailed, ecNoSubstituters} ExitCode;
     
 protected:
     
@@ -111,6 +111,10 @@ protected:
     /* Number of goals we are/were waiting for that have failed. */
     unsigned int nrFailed;
 
+    /* Number of substitution goals we are/were waiting for that
+       failed because there are no substituters. */
+    unsigned int nrNoSubstituters;
+
     /* Name of this goal for debugging purposes. */
     string name;
 
@@ -119,7 +123,7 @@ protected:
 
     Goal(Worker & worker) : worker(worker)
     {
-        nrFailed = 0;
+        nrFailed = nrNoSubstituters = 0;
         exitCode = ecBusy;
     }
 
@@ -306,7 +310,9 @@ void Goal::waiteeDone(GoalPtr waitee, ExitCode result)
     trace(format("waitee `%1%' done; %2% left") %
         waitee->name % waitees.size());
     
-    if (result == ecFailed) ++nrFailed;
+    if (result == ecFailed || result == ecNoSubstituters) ++nrFailed;
+
+    if (result == ecNoSubstituters) ++nrNoSubstituters;
     
     if (waitees.empty() || (result == ecFailed && !keepGoing)) {
 
@@ -330,7 +336,7 @@ void Goal::amDone(ExitCode result)
 {
     trace("done");
     assert(exitCode == ecBusy);
-    assert(result == ecSuccess || result == ecFailed);
+    assert(result == ecSuccess || result == ecFailed || result == ecNoSubstituters);
     exitCode = result;
     foreach (WeakGoals::iterator, i, waiters) {
         GoalPtr goal = i->lock();
@@ -736,6 +742,8 @@ HookInstance::~HookInstance()
 
 typedef enum {rpAccept, rpDecline, rpPostpone} HookReply;
 
+class SubstitutionGoal;
+
 class DerivationGoal : public Goal
 {
 private:
@@ -986,10 +994,8 @@ void DerivationGoal::haveDerivation()
     /* We are first going to try to create the invalid output paths
        through substitutes.  If that doesn't work, we'll build
        them. */
-    foreach (PathSet::iterator, i, invalidOutputs)
-        /* Don't bother creating a substitution goal if there are no
-           substitutes. */
-        if (queryBoolSetting("build-use-substitutes", true) && worker.store.hasSubstitutes(*i))
+    if (queryBoolSetting("build-use-substitutes", true))
+        foreach (PathSet::iterator, i, invalidOutputs)
             addWaitee(worker.makeSubstitutionGoal(*i));
     
     if (waitees.empty()) /* to prevent hang (no wake-up event) */
@@ -1003,10 +1009,10 @@ void DerivationGoal::outputsSubstituted()
 {
     trace("all outputs substituted (maybe)");
 
-    if (nrFailed > 0 && !tryFallback)
+    if (nrFailed > 0 && nrFailed > nrNoSubstituters && !tryFallback)
         throw Error(format("some substitutes for the outputs of derivation `%1%' failed; try `--fallback'") % drvPath);
 
-    nrFailed = 0;
+    nrFailed = nrNoSubstituters = 0;
 
     if (checkPathValidity(false).size() == 0) {
         amDone(ecSuccess);
@@ -2261,6 +2267,9 @@ private:
     /* The current substituter. */
     Path sub;
 
+    /* Whether any substituter can realise this path */
+    bool hasSubstitute;
+
     /* Path info returned by the substituter's query info operation. */
     SubstitutablePathInfo info;
 
@@ -2302,6 +2311,7 @@ public:
 
 SubstitutionGoal::SubstitutionGoal(const Path & storePath, Worker & worker)
     : Goal(worker)
+    , hasSubstitute(false)
 {
     this->storePath = storePath;
     state = &SubstitutionGoal::init;
@@ -2365,17 +2375,23 @@ void SubstitutionGoal::tryNext()
         /* None left.  Terminate this goal and let someone else deal
            with it. */
         debug(format("path `%1%' is required, but there is no substituter that can build it") % storePath);
-        amDone(ecFailed);
+        /* Hack: don't indicate failure if there were no substituters.
+           In that case the calling derivation should just do a
+           build. */
+        amDone(hasSubstitute ? ecFailed : ecNoSubstituters);
         return;
     }
 
     sub = subs.front();
     subs.pop_front();
 
-    if (!worker.store.querySubstitutablePathInfo(sub, storePath, info)) {
-        tryNext();
-        return;
-    }
+    SubstitutablePathInfos infos;
+    PathSet dummy(singleton<PathSet>(storePath));
+    worker.store.querySubstitutablePathInfos(sub, dummy, infos);
+    SubstitutablePathInfos::iterator k = infos.find(storePath);
+    if (k == infos.end()) { tryNext(); return; }
+    info = k->second;
+    hasSubstitute = true;
 
     /* To maintain the closure invariant, we first have to realise the
        paths referenced by this one. */
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index 5c22f1406649..9636bf49d987 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -157,6 +157,7 @@ void setDefaultsFromEnvironment()
     if (subs == "default") {
         substituters.push_back(nixLibexecDir + "/nix/substituters/copy-from-other-stores.pl");
         substituters.push_back(nixLibexecDir + "/nix/substituters/download-using-manifests.pl");
+        substituters.push_back(nixLibexecDir + "/nix/substituters/download-from-binary-cache.pl");
     } else
         substituters = tokenizeString(subs, ":");
 
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 05b2b9c6e542..ebfcc946716a 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -756,7 +756,16 @@ bool LocalStore::isValidPath(const Path & path)
 }
 
 
-PathSet LocalStore::queryValidPaths()
+PathSet LocalStore::queryValidPaths(const PathSet & paths)
+{
+    PathSet res;
+    foreach (PathSet::const_iterator, i, paths)
+        if (isValidPath(*i)) res.insert(*i);
+    return res;
+}
+
+
+PathSet LocalStore::queryAllValidPaths()
 {
     SQLiteStmt stmt;
     stmt.create(db, "select path from ValidPaths");
@@ -955,50 +964,66 @@ template<class T> T getIntLine(int fd)
 }
 
 
-bool LocalStore::hasSubstitutes(const Path & path)
+PathSet LocalStore::querySubstitutablePaths(const PathSet & paths)
 {
+    PathSet res;
     foreach (Paths::iterator, i, substituters) {
+        if (res.size() == paths.size()) break;
         RunningSubstituter & run(runningSubstituters[*i]);
         startSubstituter(*i, run);
-        writeLine(run.to, "have\n" + path);
-        if (getIntLine<int>(run.from)) return true;
+        string s = "have ";
+        foreach (PathSet::const_iterator, i, paths)
+            if (res.find(*i) == res.end()) { s += *i; s += " "; }
+        writeLine(run.to, s);
+        while (true) {
+            Path path = readLine(run.from);
+            if (path == "") break;
+            res.insert(path);
+        }
     }
-
-    return false;
+    return res;
 }
 
 
-bool LocalStore::querySubstitutablePathInfo(const Path & substituter,
-    const Path & path, SubstitutablePathInfo & info)
+void LocalStore::querySubstitutablePathInfos(const Path & substituter,
+    PathSet & paths, SubstitutablePathInfos & infos)
 {
     RunningSubstituter & run(runningSubstituters[substituter]);
     startSubstituter(substituter, run);
 
-    writeLine(run.to, "info\n" + path);
+    string s = "info ";
+    foreach (PathSet::const_iterator, i, paths)
+        if (infos.find(*i) == infos.end()) { s += *i; s += " "; }
+    writeLine(run.to, s);
 
-    if (!getIntLine<int>(run.from)) return false;
-    
-    info.deriver = readLine(run.from);
-    if (info.deriver != "") assertStorePath(info.deriver);
-    int nrRefs = getIntLine<int>(run.from);
-    while (nrRefs--) {
-        Path p = readLine(run.from);
-        assertStorePath(p);
-        info.references.insert(p);
+    while (true) {
+        Path path = readLine(run.from);
+        if (path == "") break;
+        assert(paths.find(path) != paths.end());
+        paths.erase(path);
+        SubstitutablePathInfo & info(infos[path]);
+        info.deriver = readLine(run.from);
+        if (info.deriver != "") assertStorePath(info.deriver);
+        int nrRefs = getIntLine<int>(run.from);
+        while (nrRefs--) {
+            Path p = readLine(run.from);
+            assertStorePath(p);
+            info.references.insert(p);
+        }
+        info.downloadSize = getIntLine<long long>(run.from);
+        info.narSize = getIntLine<long long>(run.from);
     }
-    info.downloadSize = getIntLine<long long>(run.from);
-    info.narSize = getIntLine<long long>(run.from);
-    
-    return true;
 }
 
 
-bool LocalStore::querySubstitutablePathInfo(const Path & path,
-    SubstitutablePathInfo & info)
+void LocalStore::querySubstitutablePathInfos(const PathSet & paths,
+    SubstitutablePathInfos & infos)
 {
-    foreach (Paths::iterator, i, substituters)
-        if (querySubstitutablePathInfo(*i, path, info)) return true;
-    return false;
+    PathSet todo = paths;
+    foreach (Paths::iterator, i, substituters) {
+        if (todo.empty()) break;
+        querySubstitutablePathInfos(*i, todo, infos);
+    }
 }
 
 
@@ -1144,7 +1169,7 @@ Path LocalStore::addToStore(const Path & _srcPath,
        method for very large paths, but `copyPath' is mainly used for
        small files. */
     StringSink sink;
-    if (recursive) 
+    if (recursive)
         dumpPath(srcPath, sink, filter);
     else
         sink.s = readFile(srcPath);
@@ -1479,7 +1504,7 @@ void LocalStore::verifyStore(bool checkContents)
     /* Check whether all valid paths actually exist. */
     printMsg(lvlInfo, "checking path existence...");
 
-    PathSet validPaths2 = queryValidPaths(), validPaths, done;
+    PathSet validPaths2 = queryAllValidPaths(), validPaths, done;
 
     foreach (PathSet::iterator, i, validPaths2)
         verifyPath(*i, store, done, validPaths);
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index 50910f353ad1..15dff1d02052 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -100,7 +100,9 @@ public:
     
     bool isValidPath(const Path & path);
 
-    PathSet queryValidPaths();
+    PathSet queryValidPaths(const PathSet & paths);
+    
+    PathSet queryAllValidPaths();
     
     ValidPathInfo queryPathInfo(const Path & path);
 
@@ -124,15 +126,13 @@ public:
     
     Path queryPathFromHashPart(const string & hashPart);
     
-    PathSet querySubstitutablePaths();
-    
-    bool hasSubstitutes(const Path & path);
+    PathSet querySubstitutablePaths(const PathSet & paths);
 
-    bool querySubstitutablePathInfo(const Path & path,
-        SubstitutablePathInfo & info);
+    void querySubstitutablePathInfos(const Path & substituter,
+        PathSet & paths, SubstitutablePathInfos & infos);
     
-    bool querySubstitutablePathInfo(const Path & substituter,
-        const Path & path, SubstitutablePathInfo & info);
+    void querySubstitutablePathInfos(const PathSet & paths,
+        SubstitutablePathInfos & infos);
     
     Path addToStore(const Path & srcPath,
         bool recursive = true, HashType hashAlgo = htSHA256,
diff --git a/src/libstore/misc.cc b/src/libstore/misc.cc
index 093499936349..aa5f6ff727c9 100644
--- a/src/libstore/misc.cc
+++ b/src/libstore/misc.cc
@@ -55,45 +55,97 @@ void queryMissing(StoreAPI & store, const PathSet & targets,
     
     PathSet todo(targets.begin(), targets.end()), done;
 
+    bool useSubstitutes = queryBoolSetting("build-use-substitutes", true);
+
+    /* Getting substitute info has high latency when using the binary
+       cache substituter.  Thus it's essential to do substitute
+       queries in parallel as much as possible.  To accomplish this
+       we do the following:
+
+       - For all paths still to be processed (‘todo’), we add all
+         paths for which we need info to the set ‘query’.  For an
+         unbuilt derivation this is the output paths; otherwise, it's
+         the path itself.
+
+       - We get info about all paths in ‘query’ in parallel.
+
+       - We process the results and add new items to ‘todo’ if
+         necessary.  E.g. if a path is substitutable, then we need to
+         get info on its references.
+
+       - Repeat until ‘todo’ is empty.
+    */
+
     while (!todo.empty()) {
-        Path p = *(todo.begin());
-        todo.erase(p);
-        if (done.find(p) != done.end()) continue;
-        done.insert(p);
-
-        if (isDerivation(p)) {
-            if (!store.isValidPath(p)) {
-                unknown.insert(p);
-                continue;
+              
+        PathSet query, todoDrv, todoNonDrv;
+
+        foreach (PathSet::iterator, i, todo) {
+            if (done.find(*i) != done.end()) continue;
+            done.insert(*i);
+
+            if (isDerivation(*i)) {
+                if (!store.isValidPath(*i)) {
+                    // FIXME: we could try to substitute p.
+                    unknown.insert(*i);
+                    continue;
+                }
+                Derivation drv = derivationFromPath(store, *i);
+
+                PathSet invalid;
+                foreach (DerivationOutputs::iterator, j, drv.outputs)
+                    if (!store.isValidPath(j->second.path)) invalid.insert(j->second.path);
+                if (invalid.empty()) continue;
+                
+                todoDrv.insert(*i);
+                if (useSubstitutes) query.insert(invalid.begin(), invalid.end());
+            }
+
+            else {
+                if (store.isValidPath(*i)) continue;
+                query.insert(*i);
+                todoNonDrv.insert(*i);
             }
-            Derivation drv = derivationFromPath(store, p);
+        }
+
+        todo.clear();
+        
+        SubstitutablePathInfos infos;
+        store.querySubstitutablePathInfos(query, infos);
+
+        foreach (PathSet::iterator, i, todoDrv) {
+            // FIXME: cache this
+            Derivation drv = derivationFromPath(store, *i);
 
             bool mustBuild = false;
-            foreach (DerivationOutputs::iterator, i, drv.outputs)
-                if (!store.isValidPath(i->second.path) &&
-                    !(queryBoolSetting("build-use-substitutes", true) && store.hasSubstitutes(i->second.path)))
-                    mustBuild = true;
+            if (useSubstitutes) {
+                foreach (DerivationOutputs::iterator, j, drv.outputs)
+                    if (!store.isValidPath(j->second.path) &&
+                        infos.find(j->second.path) == infos.end())
+                        mustBuild = true;
+            } else
+                mustBuild = true;
 
             if (mustBuild) {
-                willBuild.insert(p);
+                willBuild.insert(*i);
                 todo.insert(drv.inputSrcs.begin(), drv.inputSrcs.end());
                 foreach (DerivationInputs::iterator, i, drv.inputDrvs)
                     todo.insert(i->first);
-            } else 
+            } else
                 foreach (DerivationOutputs::iterator, i, drv.outputs)
-                    todo.insert(i->second.path);
+                    todoNonDrv.insert(i->second.path);
         }
-
-        else {
-            if (store.isValidPath(p)) continue;
-            SubstitutablePathInfo info;
-            if (store.querySubstitutablePathInfo(p, info)) {
-                willSubstitute.insert(p);
-                downloadSize += info.downloadSize;
-                narSize += info.narSize;
-                todo.insert(info.references.begin(), info.references.end());
+        
+        foreach (PathSet::iterator, i, todoNonDrv) {
+            done.insert(*i);
+            SubstitutablePathInfos::iterator info = infos.find(*i);
+            if (info != infos.end()) {
+                willSubstitute.insert(*i);
+                downloadSize += info->second.downloadSize;
+                narSize += info->second.narSize;
+                todo.insert(info->second.references.begin(), info->second.references.end());
             } else
-                unknown.insert(p);
+                unknown.insert(*i);
         }
     }
 }
diff --git a/src/libstore/optimise-store.cc b/src/libstore/optimise-store.cc
index 486538bd29f6..84a72604bba2 100644
--- a/src/libstore/optimise-store.cc
+++ b/src/libstore/optimise-store.cc
@@ -179,7 +179,7 @@ void LocalStore::optimisePath_(OptimiseStats & stats, const Path & path)
 
 void LocalStore::optimiseStore(OptimiseStats & stats)
 {
-    PathSet paths = queryValidPaths();
+    PathSet paths = queryAllValidPaths();
 
     foreach (PathSet::iterator, i, paths) {
         addTempRoot(*i);
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index cbb70b2fd726..35530acab1af 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -217,42 +217,96 @@ bool RemoteStore::isValidPath(const Path & path)
 }
 
 
-PathSet RemoteStore::queryValidPaths()
+PathSet RemoteStore::queryValidPaths(const PathSet & paths)
 {
     openConnection();
-    writeInt(wopQueryValidPaths, to);
+    if (GET_PROTOCOL_MINOR(daemonVersion) < 12) {
+        PathSet res;
+        foreach (PathSet::const_iterator, i, paths)
+            if (isValidPath(*i)) res.insert(*i);
+        return res;
+    } else {
+        writeInt(wopQueryValidPaths, to);
+        writeStrings(paths, to);
+        processStderr();
+        return readStorePaths<PathSet>(from);
+    }
+}
+
+
+PathSet RemoteStore::queryAllValidPaths()
+{
+    openConnection();
+    writeInt(wopQueryAllValidPaths, to);
     processStderr();
     return readStorePaths<PathSet>(from);
 }
 
 
-bool RemoteStore::hasSubstitutes(const Path & path)
+PathSet RemoteStore::querySubstitutablePaths(const PathSet & paths)
 {
     openConnection();
-    writeInt(wopHasSubstitutes, to);
-    writeString(path, to);
-    processStderr();
-    unsigned int reply = readInt(from);
-    return reply != 0;
+    if (GET_PROTOCOL_MINOR(daemonVersion) < 12) {
+        PathSet res;
+        foreach (PathSet::const_iterator, i, paths) {
+            writeInt(wopHasSubstitutes, to);
+            writeString(*i, to);
+            processStderr();
+            if (readInt(from)) res.insert(*i);
+        }
+        return res;
+    } else {
+        writeInt(wopQuerySubstitutablePaths, to);
+        writeStrings(paths, to);
+        processStderr();
+        return readStorePaths<PathSet>(from);
+    }
 }
 
 
-bool RemoteStore::querySubstitutablePathInfo(const Path & path,
-    SubstitutablePathInfo & info)
+void RemoteStore::querySubstitutablePathInfos(const PathSet & paths,
+    SubstitutablePathInfos & infos)
 {
+    if (paths.empty()) return;
+
     openConnection();
-    if (GET_PROTOCOL_MINOR(daemonVersion) < 3) return false;
-    writeInt(wopQuerySubstitutablePathInfo, to);
-    writeString(path, to);
-    processStderr();
-    unsigned int reply = readInt(from);
-    if (reply == 0) return false;
-    info.deriver = readString(from);
-    if (info.deriver != "") assertStorePath(info.deriver);
-    info.references = readStorePaths<PathSet>(from);
-    info.downloadSize = readLongLong(from);
-    info.narSize = GET_PROTOCOL_MINOR(daemonVersion) >= 7 ? readLongLong(from) : 0;
-    return true;
+    
+    if (GET_PROTOCOL_MINOR(daemonVersion) < 3) return;
+    
+    if (GET_PROTOCOL_MINOR(daemonVersion) < 12) {
+        
+        foreach (PathSet::const_iterator, i, paths) {
+            SubstitutablePathInfo info;
+            writeInt(wopQuerySubstitutablePathInfo, to);
+            writeString(*i, to);
+            processStderr();
+            unsigned int reply = readInt(from);
+            if (reply == 0) continue;
+            info.deriver = readString(from);
+            if (info.deriver != "") assertStorePath(info.deriver);
+            info.references = readStorePaths<PathSet>(from);
+            info.downloadSize = readLongLong(from);
+            info.narSize = GET_PROTOCOL_MINOR(daemonVersion) >= 7 ? readLongLong(from) : 0;
+            infos[*i] = info;
+        }
+        
+    } else {
+        
+        writeInt(wopQuerySubstitutablePathInfos, to);
+        writeStrings(paths, to);
+        processStderr();
+        unsigned int count = readInt(from);
+        for (unsigned int n = 0; n < count; n++) {
+            Path path = readStorePath(from);
+            SubstitutablePathInfo & info(infos[path]);
+            info.deriver = readString(from);
+            if (info.deriver != "") assertStorePath(info.deriver);
+            info.references = readStorePaths<PathSet>(from);
+            info.downloadSize = readLongLong(from);
+            info.narSize = readLongLong(from);
+        }
+        
+    }
 }
 
 
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index c57b49ce15e4..ae4c48dad6ab 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -26,7 +26,9 @@ public:
     
     bool isValidPath(const Path & path);
 
-    PathSet queryValidPaths();
+    PathSet queryValidPaths(const PathSet & paths);
+    
+    PathSet queryAllValidPaths();
     
     ValidPathInfo queryPathInfo(const Path & path);
 
@@ -44,10 +46,10 @@ public:
 
     Path queryPathFromHashPart(const string & hashPart);
     
-    bool hasSubstitutes(const Path & path);
+    PathSet querySubstitutablePaths(const PathSet & paths);
     
-    bool querySubstitutablePathInfo(const Path & path,
-        SubstitutablePathInfo & info);
+    void querySubstitutablePathInfos(const PathSet & paths,
+        SubstitutablePathInfos & infos);
     
     Path addToStore(const Path & srcPath,
         bool recursive = true, HashType hashAlgo = htSHA256,
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 9ba67852efec..324d802dc450 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -85,6 +85,8 @@ struct SubstitutablePathInfo
     unsigned long long narSize; /* 0 = unknown */
 };
 
+typedef std::map<Path, SubstitutablePathInfo> SubstitutablePathInfos;
+
 
 struct ValidPathInfo 
 {
@@ -107,20 +109,23 @@ public:
 
     virtual ~StoreAPI() { }
 
-    /* Checks whether a path is valid. */ 
+    /* Check whether a path is valid. */ 
     virtual bool isValidPath(const Path & path) = 0;
 
-    /* Query the set of valid paths. */
-    virtual PathSet queryValidPaths() = 0;
+    /* Query which of the given paths is valid. */
+    virtual PathSet queryValidPaths(const PathSet & paths) = 0;
+
+    /* Query the set of all valid paths. */
+    virtual PathSet queryAllValidPaths() = 0;
 
     /* Query information about a valid path. */
     virtual ValidPathInfo queryPathInfo(const Path & path) = 0;
 
-    /* Queries the hash of a valid path. */ 
+    /* Query the hash of a valid path. */ 
     virtual Hash queryPathHash(const Path & path) = 0;
 
-    /* Queries the set of outgoing FS references for a store path.
-       The result is not cleared. */
+    /* Query the set of outgoing FS references for a store path.  The
+       result is not cleared. */
     virtual void queryReferences(const Path & path,
         PathSet & references) = 0;
 
@@ -143,13 +148,14 @@ public:
        path, or "" if the path doesn't exist. */
     virtual Path queryPathFromHashPart(const string & hashPart) = 0;
     
-    /* Query whether a path has substitutes. */
-    virtual bool hasSubstitutes(const Path & path) = 0;
-
-    /* Query the references, deriver and download size of a
-       substitutable path. */
-    virtual bool querySubstitutablePathInfo(const Path & path,
-        SubstitutablePathInfo & info) = 0;
+    /* Query which of the given paths have substitutes. */
+    virtual PathSet querySubstitutablePaths(const PathSet & paths) = 0;
+
+    /* Query substitute info (i.e. references, derivers and download
+       sizes) of a set of paths.  If a path does not have substitute
+       info, it's omitted from the resulting ‘infos’ map. */
+    virtual void querySubstitutablePathInfos(const PathSet & paths,
+        SubstitutablePathInfos & infos) = 0;
     
     /* Copy the contents of a path to the store and register the
        validity the resulting path.  The resulting path is returned.
diff --git a/src/libstore/worker-protocol.hh b/src/libstore/worker-protocol.hh
index 501c0b3db5b9..9677a46c2896 100644
--- a/src/libstore/worker-protocol.hh
+++ b/src/libstore/worker-protocol.hh
@@ -6,7 +6,7 @@ namespace nix {
 #define WORKER_MAGIC_1 0x6e697863
 #define WORKER_MAGIC_2 0x6478696f
 
-#define PROTOCOL_VERSION 0x10b
+#define PROTOCOL_VERSION 0x10c
 #define GET_PROTOCOL_MAJOR(x) ((x) & 0xff00)
 #define GET_PROTOCOL_MINOR(x) ((x) & 0x00ff)
 
@@ -32,13 +32,16 @@ typedef enum {
     wopCollectGarbage = 20,
     wopQuerySubstitutablePathInfo = 21,
     wopQueryDerivationOutputs = 22,
-    wopQueryValidPaths = 23,
+    wopQueryAllValidPaths = 23,
     wopQueryFailedPaths = 24,
     wopClearFailedPaths = 25,
     wopQueryPathInfo = 26,
     wopImportPaths = 27,
     wopQueryDerivationOutputNames = 28,
     wopQueryPathFromHashPart = 29,
+    wopQuerySubstitutablePathInfos = 30,
+    wopQueryValidPaths = 31,
+    wopQuerySubstitutablePaths = 32,
 } WorkerOp;
 
 
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 7aa6276e3a0a..f06f23dad523 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -211,9 +211,12 @@ static int comparePriorities(EvalState & state,
 
 static bool isPrebuilt(EvalState & state, const DrvInfo & elem)
 {
+    assert(false);
+#if 0
     return
         store->isValidPath(elem.queryOutPath(state)) ||
         store->hasSubstitutes(elem.queryOutPath(state));
+#endif
 }
 
 
@@ -929,6 +932,22 @@ static void opQuery(Globals & globals,
             installed.insert(i->queryOutPath(globals.state));
     }
 
+
+    /* Query which paths have substitutes. */
+    PathSet validPaths, substitutablePaths;
+    if (printStatus) {
+        PathSet paths;
+        foreach (vector<DrvInfo>::iterator, i, elems2)
+            try {
+                paths.insert(i->queryOutPath(globals.state));
+            } catch (AssertionError & e) {
+                printMsg(lvlTalkative, format("skipping derivation named `%1%' which gives an assertion failure") % i->name);
+                i->setFailed();
+            }
+        validPaths = store->queryValidPaths(paths);
+        substitutablePaths = store->querySubstitutablePaths(paths);
+    }
+
     
     /* Print the desired columns, or XML output. */
     Table table;
@@ -938,6 +957,8 @@ static void opQuery(Globals & globals,
     
     foreach (vector<DrvInfo>::iterator, i, elems2) {
         try {
+            if (i->hasFailed()) continue;
+            
             startNest(nest, lvlDebug, format("outputting query result `%1%'") % i->attrPath);
 
             if (globals.prebuiltOnly && !isPrebuilt(globals.state, *i)) continue;
@@ -949,9 +970,10 @@ static void opQuery(Globals & globals,
             XMLAttrs attrs;
 
             if (printStatus) {
-                bool hasSubs = store->hasSubstitutes(i->queryOutPath(globals.state));
-                bool isInstalled = installed.find(i->queryOutPath(globals.state)) != installed.end();
-                bool isValid = store->isValidPath(i->queryOutPath(globals.state));
+                Path outPath = i->queryOutPath(globals.state);
+                bool hasSubs = substitutablePaths.find(outPath) != substitutablePaths.end();
+                bool isInstalled = installed.find(outPath) != installed.end();
+                bool isValid = validPaths.find(outPath) != validPaths.end();
                 if (xmlOutput) {
                     attrs["installed"] = isInstalled ? "1" : "0";
                     attrs["valid"] = isValid ? "1" : "0";
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 82e08fecf22a..941301d2e7a1 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -465,7 +465,7 @@ static void opDumpDB(Strings opFlags, Strings opArgs)
     if (!opFlags.empty()) throw UsageError("unknown flag");
     if (!opArgs.empty())
         throw UsageError("no arguments expected");
-    PathSet validPaths = store->queryValidPaths();
+    PathSet validPaths = store->queryAllValidPaths();
     foreach (PathSet::iterator, i, validPaths)
         cout << store->makeValidityRegistration(singleton<PathSet>(*i), true, true);
 }
diff --git a/src/nix-worker/nix-worker.cc b/src/nix-worker/nix-worker.cc
index 74a619c71d0a..f2ca0a89233e 100644
--- a/src/nix-worker/nix-worker.cc
+++ b/src/nix-worker/nix-worker.cc
@@ -297,15 +297,33 @@ static void performOp(unsigned int clientVersion,
         break;
     }
 
+    case wopQueryValidPaths: {
+        PathSet paths = readStorePaths<PathSet>(from);
+        startWork();
+        PathSet res = store->queryValidPaths(paths);
+        stopWork();
+        writeStrings(res, to);
+        break;
+    }
+
     case wopHasSubstitutes: {
         Path path = readStorePath(from);
         startWork();
-        bool result = store->hasSubstitutes(path);
+        PathSet res = store->querySubstitutablePaths(singleton<PathSet>(path));
         stopWork();
-        writeInt(result, to);
+        writeInt(res.find(path) != res.end(), to);
         break;
     }
 
+    case wopQuerySubstitutablePaths: {
+        PathSet paths = readStorePaths<PathSet>(from);
+        startWork();
+        PathSet res = store->querySubstitutablePaths(paths);
+        stopWork();
+        writeStrings(res, to);
+        break;
+    }
+        
     case wopQueryPathHash: {
         Path path = readStorePath(from);
         startWork();
@@ -538,23 +556,43 @@ static void performOp(unsigned int clientVersion,
     case wopQuerySubstitutablePathInfo: {
         Path path = absPath(readString(from));
         startWork();
-        SubstitutablePathInfo info;
-        bool res = store->querySubstitutablePathInfo(path, info);
+        SubstitutablePathInfos infos;
+        store->querySubstitutablePathInfos(singleton<PathSet>(path), infos);
         stopWork();
-        writeInt(res ? 1 : 0, to);
-        if (res) {
-            writeString(info.deriver, to);
-            writeStrings(info.references, to);
-            writeLongLong(info.downloadSize, to);
+        SubstitutablePathInfos::iterator i = infos.find(path);
+        if (i == infos.end())
+            writeInt(0, to);
+        else {
+            writeInt(1, to);
+            writeString(i->second.deriver, to);
+            writeStrings(i->second.references, to);
+            writeLongLong(i->second.downloadSize, to);
             if (GET_PROTOCOL_MINOR(clientVersion) >= 7)
-                writeLongLong(info.narSize, to);
+                writeLongLong(i->second.narSize, to);
         }
         break;
     }
             
-    case wopQueryValidPaths: {
+    case wopQuerySubstitutablePathInfos: {
+        PathSet paths = readStorePaths<PathSet>(from);
+        startWork();
+        SubstitutablePathInfos infos;
+        store->querySubstitutablePathInfos(paths, infos);
+        stopWork();
+        writeInt(infos.size(), to);
+        foreach (SubstitutablePathInfos::iterator, i, infos) {
+            writeString(i->first, to);
+            writeString(i->second.deriver, to);
+            writeStrings(i->second.references, to);
+            writeLongLong(i->second.downloadSize, to);
+            writeLongLong(i->second.narSize, to);
+        }
+        break;
+    }
+            
+    case wopQueryAllValidPaths: {
         startWork();
-        PathSet paths = store->queryValidPaths();
+        PathSet paths = store->queryAllValidPaths();
         stopWork();
         writeStrings(paths, to);
         break;
diff --git a/substitute.mk b/substitute.mk
index eb489c97a509..77c5afc28117 100644
--- a/substitute.mk
+++ b/substitute.mk
@@ -16,6 +16,7 @@
 	 -e "s^@shell\@^$(bash)^g" \
 	 -e "s^@curl\@^$(curl)^g" \
 	 -e "s^@bzip2\@^$(bzip2)^g" \
+	 -e "s^@xz\@^$(xz)^g" \
 	 -e "s^@perl\@^$(perl)^g" \
 	 -e "s^@perlFlags\@^$(perlFlags)^g" \
 	 -e "s^@coreutils\@^$(coreutils)^g" \