about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--.gitignore9
-rw-r--r--Makefile3
-rw-r--r--configure.ac17
-rw-r--r--corepkgs/buildenv.nix3
-rw-r--r--corepkgs/buildenv.pl168
-rw-r--r--corepkgs/config.nix.in2
-rw-r--r--corepkgs/fetchurl.nix2
-rw-r--r--corepkgs/local.mk2
-rw-r--r--corepkgs/nar.nix48
-rw-r--r--doc/manual/advanced-topics/distributed-builds.xml8
-rw-r--r--doc/manual/command-ref/conf-file.xml6
-rw-r--r--doc/manual/command-ref/env-common.xml9
-rw-r--r--doc/manual/command-ref/nix-channel.xml4
-rw-r--r--doc/manual/command-ref/nix-install-package.xml208
-rw-r--r--doc/manual/command-ref/nix-push.xml449
-rw-r--r--doc/manual/command-ref/nix-store.xml3
-rw-r--r--doc/manual/command-ref/utilities.xml2
-rw-r--r--doc/manual/expressions/builtins.xml11
-rw-r--r--doc/manual/expressions/derivations.xml2
-rw-r--r--doc/manual/expressions/language-constructs.xml18
-rw-r--r--doc/manual/expressions/language-values.xml17
-rw-r--r--doc/manual/introduction/quick-start.xml12
-rw-r--r--doc/manual/local.mk4
-rw-r--r--doc/manual/packages/one-click.xml37
-rw-r--r--doc/manual/packages/package-management.xml1
-rwxr-xr-xmaintainers/upload-release.pl117
-rw-r--r--misc/docker/Dockerfile4
-rw-r--r--misc/launchd/org.nixos.nix-daemon.plist.in5
-rw-r--r--misc/systemd/nix-daemon.service.in1
-rw-r--r--nix.spec.in2
-rw-r--r--release.nix13
-rw-r--r--scripts/download-from-binary-cache.pl.in632
-rw-r--r--scripts/local.mk10
-rwxr-xr-xscripts/nix-channel.in228
-rwxr-xr-xscripts/nix-install-package.in127
-rwxr-xr-xscripts/nix-push.in296
-rwxr-xr-xscripts/resolve-system-dependencies.pl.in122
-rwxr-xr-xscripts/show-duplication.pl73
-rw-r--r--src/buildenv/buildenv.cc186
-rw-r--r--src/buildenv/local.mk9
-rw-r--r--src/libexpr/eval.cc28
-rw-r--r--src/libexpr/eval.hh15
-rw-r--r--src/libexpr/get-drvs.cc2
-rw-r--r--src/libexpr/json-to-value.cc9
-rw-r--r--src/libexpr/primops.cc101
-rw-r--r--src/libexpr/value-to-json.cc55
-rw-r--r--src/libexpr/value-to-json.hh71
-rw-r--r--src/libexpr/value.hh17
-rw-r--r--src/libmain/shared.cc1
-rw-r--r--src/libstore/binary-cache-store.cc2
-rw-r--r--src/libstore/build.cc50
-rw-r--r--src/libstore/derivations.cc7
-rw-r--r--src/libstore/derivations.hh2
-rw-r--r--src/libstore/download.cc68
-rw-r--r--src/libstore/download.hh17
-rw-r--r--src/libstore/globals.cc3
-rw-r--r--src/libstore/globals.hh3
-rw-r--r--src/libstore/http-binary-cache-store.cc3
-rw-r--r--src/libstore/local-fs-store.cc8
-rw-r--r--src/libstore/local-store.cc56
-rw-r--r--src/libstore/local-store.hh4
-rw-r--r--src/libstore/nar-accessor.cc2
-rw-r--r--src/libstore/nar-info-disk-cache.cc13
-rw-r--r--src/libstore/nar-info.cc7
-rw-r--r--src/libstore/remote-store.cc35
-rw-r--r--src/libstore/schema.sql3
-rw-r--r--src/libstore/sqlite.cc13
-rw-r--r--src/libstore/sqlite.hh10
-rw-r--r--src/libstore/store-api.cc60
-rw-r--r--src/libstore/store-api.hh75
-rw-r--r--src/libutil/json.cc176
-rw-r--r--src/libutil/json.hh184
-rw-r--r--src/nix-channel/local.mk7
-rwxr-xr-xsrc/nix-channel/nix-channel.cc270
-rw-r--r--src/nix-daemon/nix-daemon.cc46
-rw-r--r--src/nix-env/nix-env.cc18
-rw-r--r--src/nix-store/nix-store.cc8
-rw-r--r--src/nix/installables.cc42
-rw-r--r--src/nix/installables.hh12
-rw-r--r--src/nix/path-info.cc99
-rw-r--r--src/nix/verify.cc6
-rw-r--r--src/resolve-system-dependencies/local.mk11
-rw-r--r--src/resolve-system-dependencies/resolve-system-dependencies.cc194
-rw-r--r--tests/binary-cache.sh6
-rw-r--r--tests/common.sh.in7
-rw-r--r--tests/config.nix2
-rw-r--r--tests/dump-db.sh3
-rw-r--r--tests/init.sh5
-rw-r--r--tests/install-package.sh20
-rw-r--r--tests/lang/eval-okay-partition.exp1
-rw-r--r--tests/lang/eval-okay-partition.nix5
-rw-r--r--tests/local.mk7
-rw-r--r--tests/nix-channel.sh2
-rw-r--r--tests/nix-push.sh12
-rw-r--r--tests/placeholders.sh22
-rw-r--r--tests/referrers.sh2
-rw-r--r--tests/remote-store.sh2
-rw-r--r--tests/repair.sh2
-rw-r--r--tests/user-envs.sh5
99 files changed, 1875 insertions, 2911 deletions
diff --git a/.gitignore b/.gitignore
index 2252e512af8c..04dd791964f2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -34,12 +34,9 @@ Makefile.config
 
 # /scripts/
 /scripts/nix-profile.sh
-/scripts/nix-push
 /scripts/nix-switch
 /scripts/nix-collect-garbage
 /scripts/nix-prefetch-url
-/scripts/nix-install-package
-/scripts/nix-channel
 /scripts/nix-copy-closure
 /scripts/NixConfig.pm
 /scripts/NixManifest.pm
@@ -73,9 +70,15 @@ Makefile.config
 # /src/nix-daemon/
 /src/nix-daemon/nix-daemon
 
+# /src/nix-channel/
+/src/nix-channel/nix-channel
+
 # /src/download-via-ssh/
 /src/download-via-ssh/download-via-ssh
 
+# /src/buildenv/
+/src/buildenv/buildenv
+
 # /src/nix-build/
 /src/nix-build/nix-build
 
diff --git a/Makefile b/Makefile
index 52286b1fa673..2ee40b56b1d3 100644
--- a/Makefile
+++ b/Makefile
@@ -12,6 +12,9 @@ makefiles = \
   src/nix-daemon/local.mk \
   src/nix-collect-garbage/local.mk \
   src/nix-prefetch-url/local.mk \
+  src/buildenv/local.mk \
+  src/resolve-system-dependencies/local.mk \
+  src/nix-channel/local.mk \
   src/nix-build/local.mk \
   perl/local.mk \
   scripts/local.mk \
diff --git a/configure.ac b/configure.ac
index a9e6b4313644..1ae0e782d3b5 100644
--- a/configure.ac
+++ b/configure.ac
@@ -246,23 +246,6 @@ AC_MSG_RESULT(yes)
 AC_SUBST(perlFlags)
 
 
-# Check for otool, an optional dependency on Darwin.
-AC_PATH_PROG(otool, otool)
-AC_MSG_CHECKING([that otool works])
-case $host_os in
-  darwin*)
-    if test -z "$otool" || ! $otool --version 2>/dev/null; then
-      AC_MSG_RESULT(no)
-      AC_MSG_ERROR([Can't get version from otool; do you need to install developer tools?])
-    fi
-    AC_MSG_RESULT(yes)
-    ;;
-  *)
-    AC_MSG_RESULT(not needed)
-    ;;
-esac
-
-
 # Whether to build the Perl bindings
 AC_MSG_CHECKING([whether to build the Perl bindings])
 AC_ARG_ENABLE(perl-bindings, AC_HELP_STRING([--enable-perl-bindings],
diff --git a/corepkgs/buildenv.nix b/corepkgs/buildenv.nix
index 70981a752c3c..5e7b40eaa0cb 100644
--- a/corepkgs/buildenv.nix
+++ b/corepkgs/buildenv.nix
@@ -5,8 +5,7 @@ with import <nix/config.nix>;
 derivation {
   name = "user-environment";
   system = builtins.currentSystem;
-  builder = perl;
-  args = [ "-w" ./buildenv.pl ];
+  builder = nixLibexecDir + "/nix/buildenv";
 
   inherit manifest;
 
diff --git a/corepkgs/buildenv.pl b/corepkgs/buildenv.pl
deleted file mode 100644
index dacc53701a01..000000000000
--- a/corepkgs/buildenv.pl
+++ /dev/null
@@ -1,168 +0,0 @@
-use strict;
-use Cwd;
-use IO::Handle;
-use utf8;
-
-STDOUT->autoflush(1);
-
-my $out = $ENV{"out"};
-mkdir "$out", 0755 || die "error creating $out";
-
-
-my $symlinks = 0;
-
-my %priorities;
-
-
-# For each activated package, create symlinks.
-
-sub createLinks {
-    my $srcDir = shift;
-    my $dstDir = shift;
-    my $priority = shift;
-
-    my @srcFiles = glob("$srcDir/*");
-
-    foreach my $srcFile (@srcFiles) {
-        my $baseName = $srcFile;
-        $baseName =~ s/^.*\///g; # strip directory
-        my $dstFile = "$dstDir/$baseName";
-
-        # The files below are special-cased so that they don't show up
-        # in user profiles, either because they are useless, or
-        # because they would cause pointless collisions (e.g., each
-        # Python package brings its own
-        # `$out/lib/pythonX.Y/site-packages/easy-install.pth'.)
-        # Urgh, hacky...
-        if ($srcFile =~ /\/propagated-build-inputs$/ ||
-            $srcFile =~ /\/nix-support$/ ||
-            $srcFile =~ /\/perllocal.pod$/ ||
-            $srcFile =~ /\/info\/dir$/ ||
-            $srcFile =~ /\/log$/)
-        {
-            # Do nothing.
-        }
-
-        elsif (-d $srcFile) {
-
-            lstat $dstFile;
-
-            if (-d _) {
-                createLinks($srcFile, $dstFile, $priority);
-            }
-
-            elsif (-l _) {
-                my $target = readlink $dstFile or die;
-                if (!-d $target) {
-                    die "collision between directory ‘$srcFile’ and non-directory ‘$target’";
-                }
-                unlink $dstFile or die "error unlinking ‘$dstFile’: $!";
-                mkdir $dstFile, 0755 ||
-                    die "error creating directory ‘$dstFile’: $!";
-                createLinks($target, $dstFile, $priorities{$dstFile});
-                createLinks($srcFile, $dstFile, $priority);
-            }
-
-            else {
-                symlink($srcFile, $dstFile) ||
-                    die "error creating link ‘$dstFile’: $!";
-                $priorities{$dstFile} = $priority;
-                $symlinks++;
-            }
-        }
-
-        else {
-
-            if (-l $dstFile) {
-                my $target = readlink $dstFile;
-                my $prevPriority = $priorities{$dstFile};
-                die("collision between ‘$srcFile’ and ‘$target’; " .
-                    "use ‘nix-env --set-flag priority NUMBER PKGNAME’ " .
-                    "to change the priority of one of the conflicting packages\n")
-                    if $prevPriority == $priority;
-                next if $prevPriority < $priority;
-                unlink $dstFile or die;
-            }
-
-            symlink($srcFile, $dstFile) ||
-                die "error creating link ‘$dstFile’: $!";
-            $priorities{$dstFile} = $priority;
-            $symlinks++;
-        }
-    }
-}
-
-
-my %done;
-my %postponed;
-
-sub addPkg;
-sub addPkg {
-    my $pkgDir = shift;
-    my $priority = shift;
-
-    return if (defined $done{$pkgDir});
-    $done{$pkgDir} = 1;
-
-#    print "symlinking $pkgDir\n";
-    createLinks("$pkgDir", "$out", $priority);
-
-    my $propagatedFN = "$pkgDir/nix-support/propagated-user-env-packages";
-    if (-e $propagatedFN) {
-        open PROP, "<$propagatedFN" or die;
-        my $propagated = <PROP>;
-        close PROP;
-        my @propagated = split ' ', $propagated;
-        foreach my $p (@propagated) {
-            $postponed{$p} = 1 unless defined $done{$p};
-        }
-    }
-}
-
-
-# Convert the stuff we get from the environment back into a coherent
-# data type.
-my @pkgs;
-my @derivations = split ' ', $ENV{"derivations"};
-while (scalar @derivations) {
-    my $active = shift @derivations;
-    my $priority = shift @derivations;
-    my $outputs = shift @derivations;
-    for (my $n = 0; $n < $outputs; $n++) {
-        my $path = shift @derivations;
-        push @pkgs,
-            { path => $path
-            , active => $active ne "false"
-            , priority => int($priority) };
-    }
-}
-
-
-# Symlink to the packages that have been installed explicitly by the
-# user.  Process in priority order to reduce unnecessary
-# symlink/unlink steps.
-@pkgs = sort { $a->{priority} <=> $b->{priority} || $a->{path} cmp $b->{path} } @pkgs;
-foreach my $pkg (@pkgs) {
-    #print $pkg, " ", $pkgs{$pkg}->{priority}, "\n";
-    addPkg($pkg->{path}, $pkg->{priority}) if $pkg->{active};
-}
-
-
-# Symlink to the packages that have been "propagated" by packages
-# installed by the user (i.e., package X declares that it wants Y
-# installed as well).  We do these later because they have a lower
-# priority in case of collisions.
-my $priorityCounter = 1000; # don't care about collisions
-while (scalar(keys %postponed) > 0) {
-    my @pkgDirs = keys %postponed;
-    %postponed = ();
-    foreach my $pkgDir (sort @pkgDirs) {
-        addPkg($pkgDir, $priorityCounter++);
-    }
-}
-
-
-print STDERR "created $symlinks symlinks in user environment\n";
-
-
-symlink($ENV{"manifest"}, "$out/manifest.nix") or die "cannot create manifest";
diff --git a/corepkgs/config.nix.in b/corepkgs/config.nix.in
index 90e8edbea833..f0f4890a32fd 100644
--- a/corepkgs/config.nix.in
+++ b/corepkgs/config.nix.in
@@ -3,7 +3,6 @@ let
     let val = builtins.getEnv var; in
     if val != "" then val else def;
 in rec {
-  perl = "@perl@";
   shell = "@bash@";
   coreutils = "@coreutils@";
   bzip2 = "@bzip2@";
@@ -14,6 +13,7 @@ in rec {
   tr = "@tr@";
   nixBinDir = fromEnv "NIX_BIN_DIR" "@bindir@";
   nixPrefix = "@prefix@";
+  nixLibexecDir = fromEnv "NIX_LIBEXEC_DIR" "@libexecdir@";
 
   # If Nix is installed in the Nix store, then automatically add it as
   # a dependency to the core packages. This ensures that they work
diff --git a/corepkgs/fetchurl.nix b/corepkgs/fetchurl.nix
index 5e0ad9da3c68..613c25364cc3 100644
--- a/corepkgs/fetchurl.nix
+++ b/corepkgs/fetchurl.nix
@@ -1,5 +1,3 @@
-with import <nix/config.nix>;
-
 { system ? builtins.currentSystem
 , url
 , outputHash ? ""
diff --git a/corepkgs/local.mk b/corepkgs/local.mk
index 19c1d06962c0..362c8eb612eb 100644
--- a/corepkgs/local.mk
+++ b/corepkgs/local.mk
@@ -1,4 +1,4 @@
-corepkgs_FILES = nar.nix buildenv.nix buildenv.pl unpack-channel.nix derivation.nix fetchurl.nix imported-drv-to-derivation.nix
+corepkgs_FILES = buildenv.nix unpack-channel.nix derivation.nix fetchurl.nix imported-drv-to-derivation.nix
 
 $(foreach file,config.nix $(corepkgs_FILES),$(eval $(call install-data-in,$(d)/$(file),$(datadir)/nix/corepkgs)))
 
diff --git a/corepkgs/nar.nix b/corepkgs/nar.nix
deleted file mode 100644
index 61b3fc6772c4..000000000000
--- a/corepkgs/nar.nix
+++ /dev/null
@@ -1,48 +0,0 @@
-with import <nix/config.nix>;
-
-let
-
-  builder = builtins.toFile "nar.sh"
-    ''
-      export PATH=${nixBinDir}:${coreutils}
-
-      if [ $compressionType = xz ]; then
-        ext=.xz
-        compressor="| ${xz} -7"
-      elif [ $compressionType = bzip2 ]; then
-        ext=.bz2
-        compressor="| ${bzip2}"
-      else
-        ext=
-        compressor=
-      fi
-
-      echo "packing ‘$storePath’..."
-      mkdir $out
-      dst=$out/tmp.nar$ext
-
-      set -o pipefail
-      eval "nix-store --dump \"$storePath\" $compressor > $dst"
-
-      hash=$(nix-hash --flat --type $hashAlgo --base32 $dst)
-      echo -n $hash > $out/nar-compressed-hash
-
-      mv $dst $out/$hash.nar$ext
-    '';
-
-in
-
-{ storePath, hashAlgo, compressionType }:
-
-derivation {
-  name = "nar";
-  system = builtins.currentSystem;
-  builder = shell;
-  args = [ "-e" builder ];
-  inherit storePath hashAlgo compressionType;
-
-  # Remote machines may not have ${nixBinDir} or ${coreutils} in the same prefixes
-  preferLocalBuild = true;
-
-  inherit chrootDeps;
-}
diff --git a/doc/manual/advanced-topics/distributed-builds.xml b/doc/manual/advanced-topics/distributed-builds.xml
index 1f8d98f5d8fe..f8583700393c 100644
--- a/doc/manual/advanced-topics/distributed-builds.xml
+++ b/doc/manual/advanced-topics/distributed-builds.xml
@@ -11,9 +11,9 @@ forward Nix builds to other machines over the network.  This allows
 multiple builds to be performed in parallel (thus improving
 performance) and allows Nix to perform multi-platform builds in a
 semi-transparent way.  For instance, if you perform a build for a
-<literal>powerpc-darwin</literal> on an <literal>i686-linux</literal>
+<literal>x86_64-darwin</literal> on an <literal>i686-linux</literal>
 machine, Nix can automatically forward the build to a
-<literal>powerpc-darwin</literal> machine, if available.</para>
+<literal>x86_64-darwin</literal> machine, if available.</para>
 
 <para>You can enable distributed builds by setting the environment
 variable <envar>NIX_BUILD_HOOK</envar> to point to a program that Nix
@@ -30,7 +30,7 @@ variable</link>.</para>
 <example xml:id='ex-remote-systems'><title>Remote machine configuration:
 <filename>remote-systems.conf</filename></title>
 <programlisting>
-nix@mcflurry.labs.cs.uu.nl  powerpc-darwin  /home/nix/.ssh/id_quarterpounder_auto  2
+nix@mcflurry.labs.cs.uu.nl  x86_64-darwin   /home/nix/.ssh/id_quarterpounder_auto  2
 nix@scratchy.labs.cs.uu.nl  i686-linux      /home/nix/.ssh/id_scratchy_auto        8 1 kvm
 nix@itchy.labs.cs.uu.nl     i686-linux      /home/nix/.ssh/id_scratchy_auto        8 2
 nix@poochie.labs.cs.uu.nl   i686-linux      /home/nix/.ssh/id_scratchy_auto        8 2 kvm perf
@@ -59,7 +59,7 @@ bits of information:
   <filename>~/.ssh/config</filename>.</para></listitem>
 
   <listitem><para>A comma-separated list of Nix platform type
-  identifiers, such as <literal>powerpc-darwin</literal>.  It is
+  identifiers, such as <literal>x86_64-darwin</literal>.  It is
   possible for a machine to support multiple platform types, e.g.,
   <literal>i686-linux,x86_64-linux</literal>.</para></listitem>
 
diff --git a/doc/manual/command-ref/conf-file.xml b/doc/manual/command-ref/conf-file.xml
index 4c8f3d9d3809..fb7bdf573397 100644
--- a/doc/manual/command-ref/conf-file.xml
+++ b/doc/manual/command-ref/conf-file.xml
@@ -430,15 +430,15 @@ flag, e.g. <literal>--option gc-keep-outputs false</literal>.</para>
     <listitem><para>This option specifies the canonical Nix system
     name of the current installation, such as
     <literal>i686-linux</literal> or
-    <literal>powerpc-darwin</literal>.  Nix can only build derivations
+    <literal>x86_64-darwin</literal>.  Nix can only build derivations
     whose <literal>system</literal> attribute equals the value
     specified here.  In general, it never makes sense to modify this
     value from its default, since you can use it to ‘lie’ about the
     platform you are building on (e.g., perform a Mac OS build on a
     Linux machine; the result would obviously be wrong).  It only
     makes sense if the Nix binaries can run on multiple platforms,
-    e.g., ‘universal binaries’ that run on <literal>powerpc-darwin</literal> and
-    <literal>i686-darwin</literal>.</para>
+    e.g., ‘universal binaries’ that run on <literal>x86_64-linux</literal> and
+    <literal>i686-linux</literal>.</para>
 
     <para>It defaults to the canonical Nix system name detected by
     <filename>configure</filename> at build time.</para></listitem>
diff --git a/doc/manual/command-ref/env-common.xml b/doc/manual/command-ref/env-common.xml
index 27efef945f15..c757cb17bd10 100644
--- a/doc/manual/command-ref/env-common.xml
+++ b/doc/manual/command-ref/env-common.xml
@@ -129,15 +129,6 @@ $ mount -o bind /mnt/otherdisk/nix /nix</screen>
 </varlistentry>
 
 
-<varlistentry><term><envar>NIX_DB_DIR</envar></term>
-
-  <listitem><para>Overrides the location of the Nix database (default
-  <filename><replaceable>$NIX_STATE_DIR</replaceable>/db</filename>, i.e.,
-  <filename><replaceable>prefix</replaceable>/var/nix/db</filename>).</para></listitem>
-
-</varlistentry>
-
-
 <varlistentry><term><envar>NIX_CONF_DIR</envar></term>
 
   <listitem><para>Overrides the location of the Nix configuration
diff --git a/doc/manual/command-ref/nix-channel.xml b/doc/manual/command-ref/nix-channel.xml
index 0a1f2a8b722d..9acf44e52984 100644
--- a/doc/manual/command-ref/nix-channel.xml
+++ b/doc/manual/command-ref/nix-channel.xml
@@ -180,9 +180,7 @@ following files:</para>
     sufficient rights to add binary caches. For instance, in a
     multi-user Nix setup, the binary caches provided by the channels
     of the root user are used automatically, but caches corresponding
-    to the channels of non-root users are ignored. Binary caches can
-    be created and maintained using
-    <command>nix-push</command>.</para></listitem>
+    to the channels of non-root users are ignored.</para></listitem>
 
   </varlistentry>
 
diff --git a/doc/manual/command-ref/nix-install-package.xml b/doc/manual/command-ref/nix-install-package.xml
deleted file mode 100644
index e17166caaaf3..000000000000
--- a/doc/manual/command-ref/nix-install-package.xml
+++ /dev/null
@@ -1,208 +0,0 @@
-<refentry xmlns="http://docbook.org/ns/docbook"
-      xmlns:xlink="http://www.w3.org/1999/xlink"
-      xmlns:xi="http://www.w3.org/2001/XInclude"
-      version="5.0"
-      xml:id="sec-nix-install-package">
-
-<refmeta>
-  <refentrytitle>nix-install-package</refentrytitle>
-  <manvolnum>1</manvolnum>
-  <refmiscinfo class="source">Nix</refmiscinfo>
-  <refmiscinfo class="version"><xi:include href="../version.txt" parse="text"/></refmiscinfo>
-</refmeta>
-
-<refnamediv>
-  <refname>nix-install-package</refname>
-  <refpurpose>install a Nix Package file</refpurpose>
-</refnamediv>
-
-<refsynopsisdiv>
-  <cmdsynopsis>
-    <command>nix-install-package</command>
-    <arg><option>--non-interactive</option></arg>
-    <arg>
-      <group choice='req'>
-        <arg choice='plain'><option>--profile</option></arg>
-        <arg choice='plain'><option>-p</option></arg>
-      </group>
-      <replaceable>path</replaceable>
-    </arg>
-    <arg><option>--set</option></arg>
-    <sbr />
-    <group choice='req'>
-      <arg choice='req'>
-        <option>--url</option>
-        <arg choice='plain'><replaceable>url</replaceable></arg>
-      </arg>
-      <arg choice='req'>
-        <arg choice='plain'><replaceable>file</replaceable></arg>
-      </arg>
-    </group>
-  </cmdsynopsis>
-</refsynopsisdiv>
-
-
-<refsection><title>Description</title>
-
-<para>The command <command>nix-install-package</command> interactively
-installs a Nix Package file (<filename>*.nixpkg</filename>), which is
-a small file that contains a store path to be installed along with the
-URL of a binary cache.  The Nix Package file is either
-<replaceable>file</replaceable>, or automatically downloaded from
-<replaceable>url</replaceable> if the <option>--url</option> switch is
-used.</para>
-
-<para><command>nix-install-package</command> is used in <link
-linkend="sec-one-click">one-click installs</link> to download and
-install pre-built binary packages with all necessary dependencies.
-<command>nix-install-package</command> is intended to be associated
-with the MIME type <literal>application/nix-package</literal> in a web
-browser so that it is invoked automatically when you click on
-<filename>*.nixpkg</filename> files.  When invoked, it restarts itself
-in a terminal window (since otherwise it would be invisible when run
-from a browser), asks the user to confirm whether to install the
-package, and if so downloads and installs the package into the user’s
-current profile.</para>
-
-<para>To obtain a window, <command>nix-install-package</command> tries
-to restart itself with <command>xterm</command>,
-<command>konsole</command> and
-<command>gnome-terminal</command>.</para>
-
-</refsection>
-
-
-<refsection><title>Options</title>
-
-<variablelist>
-
-  <varlistentry><term><option>--non-interactive</option></term>
-
-    <listitem><para>Do not open a new terminal window and do not ask
-    for confirmation.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><option>--profile</option></term>
-    <term><option>-p</option></term>
-
-    <listitem><para>Install the package into the specified profile
-    rather than the user’s current profile.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><option>--set</option></term>
-
-    <listitem><para>Install the package as the profile so that the
-    profile contains exactly the contents of the package.</para></listitem>
-
-  </varlistentry>
-
-</variablelist>
-
-</refsection>
-
-
-<refsection><title>Examples</title>
-
-<para>To install <filename>subversion-1.4.0.nixpkg</filename> into the
-user’s current profile, without any prompting:
-
-<screen>
-$ nix-install-package --non-interactive subversion-1.4.0.nixpkg</screen>
-
-</para>
-
-<para>To install the same package from some URL into a different
-profile:
-
-<screen>
-$ nix-install-package --non-interactive -p /nix/var/nix/profiles/eelco \
-    --url http://nix.cs.uu.nl/dist/nix/nixpkgs-0.10pre6622/pkgs/subversion-1.4.0-i686-linux.nixpkg</screen>
-
-</para>
-
-</refsection>
-
-
-<refsection><title>Format of <literal>nixpkg</literal> files</title>
-
-<para>A Nix Package file consists of a single line with the following
-format:
-
-<screen>
-NIXPKG1 <replaceable>manifestURL</replaceable> <replaceable>name</replaceable> <replaceable>system</replaceable> <replaceable>drvPath</replaceable> <replaceable>outPath</replaceable></screen>
-
-The elements are as follows:
-
-<variablelist>
-
-  <varlistentry><term><literal>NIXPKG1</literal></term>
-
-    <listitem><para>The version of the Nix Package
-    file.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><replaceable>manifestURL</replaceable></term>
-
-    <listitem><para>Obsolete.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><replaceable>name</replaceable></term>
-
-    <listitem><para>The symbolic name and version of the
-    package.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><replaceable>system</replaceable></term>
-
-    <listitem><para>The platform identifier of the platform for which
-    this binary package is intended.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><replaceable>drvPath</replaceable></term>
-
-    <listitem><para>The path in the Nix store of the derivation from
-    which <replaceable>outPath</replaceable> was built.  Not currently
-    used.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><replaceable>outPath</replaceable></term>
-
-    <listitem><para>The path in the Nix store of the
-    package.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><replaceable>binaryCacheURL</replaceable></term>
-
-    <listitem><para>The URL of a binary cache containing the closure
-    of <replaceable>outPath</replaceable>.</para></listitem>
-
-  </varlistentry>
-
-</variablelist>
-
-</para>
-
-<para>An example follows:
-
-<screen>
-NIXPKG1 http://.../nixpkgs-0.10pre6622/MANIFEST subversion-1.4.0 i686-darwin \
-  /nix/store/4kh60jkp...-subversion-1.4.0.drv \
-  /nix/store/nkw7wpgb...-subversion-1.4.0</screen>
-
-(The line breaks (<literal>\</literal>) are for presentation purposes
-and not part of the actual file.)
-
-</para>
-
-</refsection>
-
-
-</refentry>
diff --git a/doc/manual/command-ref/nix-push.xml b/doc/manual/command-ref/nix-push.xml
deleted file mode 100644
index 0749824a0ad4..000000000000
--- a/doc/manual/command-ref/nix-push.xml
+++ /dev/null
@@ -1,449 +0,0 @@
-<refentry xmlns="http://docbook.org/ns/docbook"
-      xmlns:xlink="http://www.w3.org/1999/xlink"
-      xmlns:xi="http://www.w3.org/2001/XInclude"
-      version="5.0"
-      xml:id="sec-nix-push">
-
-<refmeta>
-  <refentrytitle>nix-push</refentrytitle>
-  <manvolnum>1</manvolnum>
-  <refmiscinfo class="source">Nix</refmiscinfo>
-  <refmiscinfo class="version"><xi:include href="../version.txt" parse="text"/></refmiscinfo>
-</refmeta>
-
-<refnamediv>
-  <refname>nix-push</refname>
-  <refpurpose>generate a binary cache</refpurpose>
-</refnamediv>
-
-<refsynopsisdiv>
-  <cmdsynopsis>
-    <command>nix-push</command>
-    <arg choice='plain'><option>--dest</option> <replaceable>dest-dir</replaceable></arg>
-    <arg><option>--bzip2</option></arg>
-    <arg><option>--none</option></arg>
-    <arg><option>--force</option></arg>
-    <arg><option>--link</option></arg>
-    <arg><option>--manifest</option></arg>
-    <arg><option>--manifest-path</option> <replaceable>filename</replaceable></arg>
-    <arg><option>--url-prefix</option> <replaceable>url</replaceable></arg>
-    <arg><option>--key-file</option> <replaceable>path</replaceable></arg>
-    <arg choice='plain' rep='repeat'><replaceable>paths</replaceable></arg>
-  </cmdsynopsis>
-</refsynopsisdiv>
-
-
-<refsection><title>Description</title>
-
-<para>The command <command>nix-push</command> produces a
-<emphasis>binary cache</emphasis>, a directory containing compressed
-Nix archives (NARs) plus some metadata of the closure of the specified
-store paths.  This directory can then be made available through a web
-server to other Nix installations, allowing them to skip building from
-source and instead download binaries from the cache
-automatically.</para>
-
-<para><command>nix-push</command> performs the following actions.
-
-<orderedlist>
-
-  <listitem><para>Each path in <replaceable>paths</replaceable> is
-  built (using <link
-  linkend='rsec-nix-store-realise'><command>nix-store
-  --realise</command></link>).</para></listitem>
-
-  <listitem><para>All paths in the closure of
-  <replaceable>paths</replaceable> are determined (using
-  <command>nix-store --query --requisites
-  --include-outputs</command>).  Note that since the
-  <option>--include-outputs</option> flag is used, if
-  <replaceable>paths</replaceable> includes a store derivation, you
-  get a combined source/binary distribution (e.g., source tarballs
-  will be included).</para></listitem>
-
-  <listitem><para>All store paths determined in the previous step are
-  packaged into a NAR (using <command>nix-store --dump</command>) and
-  compressed using <command>xz</command> or <command>bzip2</command>.
-  The resulting files have the extension <filename>.nar.xz</filename>
-  or <filename>.nar.bz2</filename>.  Also for each store path, Nix
-  generates a file with extension <filename>.narinfo</filename>
-  containing metadata such as the references, cryptographic hash and
-  size of each path.</para></listitem>
-
-  <listitem><para>Optionally, a single <emphasis>manifest</emphasis>
-  file is created that contains the same metadata as the
-  <filename>.narinfo</filename> files.  This is for compatibility with
-  Nix versions prior to 1.2.</para></listitem>
-
-  <listitem><para>A file named <option>nix-cache-info</option> is
-  placed in the destination directory.  The existence of this file
-  marks the directory as a binary cache.</para></listitem>
-
-</orderedlist>
-
-</para>
-
-</refsection>
-
-
-<refsection><title>Options</title>
-
-<variablelist>
-
-  <varlistentry><term><option>--dest</option> <replaceable>dest-dir</replaceable></term>
-
-    <listitem><para>Set the destination directory to
-    <replaceable>dir</replaceable>, which is created if it does not
-    exist.  This flag is required.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><option>--bzip2</option></term>
-
-    <listitem><para>Compress NARs using <command>bzip2</command>
-    instead of <command>xz</command>.  The latter compresses about 30%
-    better on typical archives, decompresses about twice as fast, but
-    compresses a lot slower and is not supported by Nix prior to
-    version 1.2.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><option>--none</option></term>
-
-    <listitem><para>Do not compress NARs.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><option>--force</option></term>
-
-    <listitem><para>Overwrite <filename>.narinfo</filename> files if
-    they already exist.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><option>--link</option></term>
-
-    <listitem><para>By default, NARs are generated in the Nix store
-    and then copied to <replaceable>dest-dir</replaceable>.  If this
-    option is given, hard links are used instead.  This only works if
-    <replaceable>dest-dir</replaceable> is on the same filesystem as
-    the Nix store.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><option>--manifest</option></term>
-
-    <listitem><para>Force the generation of a manifest suitable for
-    use by old versions of Nix.  The manifest is stored as
-    <filename><replaceable>dest-dir</replaceable>/MANIFEST</filename>.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><option>--manifest-path</option> <replaceable>filename</replaceable></term>
-
-    <listitem><para>Like <option>--manifest</option>, but store the
-    manifest in <replaceable>filename</replaceable>.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><option>--url-prefix</option> <replaceable>url</replaceable></term>
-
-    <listitem><para>Manifests are expected to contain the absolute
-    URLs of NARs.  For generating these URLs, the prefix
-    <replaceable>url</replaceable> is used.  It defaults to
-    <uri>file://<replaceable>dest-dir</replaceable></uri>.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><option>--key-file</option> <replaceable>path</replaceable></term>
-
-    <listitem><para>Sign the binary cache using the secret key stored
-    in <replaceable>path</replaceable>. This secret key must have been
-    created using <command
-    linkend="rsec-nix-store-generate-binary-cache-key">nix-store
-    --generate-binary-cache-key</command>. Users of this binary cache
-    should add the corresponding public key to the option
-    <option>binary-cache-public-keys</option> in
-    <filename>nix.conf</filename>.</para></listitem>
-
-  </varlistentry>
-
-</variablelist>
-
-</refsection>
-
-
-<refsection><title>Examples</title>
-
-<para>To add the closure of Thunderbird to a binary cache:
-
-<screen>
-$ nix-push --dest /tmp/cache $(nix-build -A thunderbird)
-</screen>
-
-Assuming that <filename>/tmp/cache</filename> is exported by a web
-server as <uri>http://example.org/cache</uri>, you can then use this
-cache on another machine to speed up the installation of Thunderbird:
-
-<screen>
-$ nix-build -A thunderbird --option binary-caches http://example.org/cache
-</screen>
-
-Alternatively, you could add <literal>binary-caches =
-http://example.org/cache</literal> to
-<filename>nix.conf</filename>.</para>
-
-<para>To also include build-time dependencies (such as source
-tarballs):
-
-<screen>
-$ nix-push --dest /tmp/cache $(nix-instantiate -A thunderbird)
-</screen>
-
-</para>
-
-<para>To generate a signed binary cache, you must first generate a key
-pair, in this example called <literal>cache.example.org-1</literal>,
-storing the secret key in <filename>./sk</filename> and the public key
-in <filename>./pk</filename>:
-
-<screen>
-$ nix-store --generate-binary-cache-key cache.example.org-1 sk pk
-
-$ cat sk
-cache.example.org-1:jcMRQYFo8pQKzTtimpQLIPeHkMYZjfhB24hGfwF+u9PuX8H8FO7q564+X3G/JDlqqIqGar3OXRRwS9N3Wh3vbw==
-
-$ cat pk
-cache.example.org-1:7l/B/BTu6ueuPl9xvyQ5aqiKhmq9zl0UcEvTd1od728=
-</screen>
-
-You can then generate a binary cache signed with the secret key:
-
-<screen>
-$ nix-push --dest /tmp/cache --key-file ./sk $(type -p firefox)
-</screen>
-
-Users who wish to verify the integrity of binaries downloaded from
-your cache would add the following to their
-<filename>nix.conf</filename>:
-
-<programlisting>
-binary-caches = http://cache.example.org
-signed-binary-caches = *
-binary-cache-public-keys = cache.example.org-1:7l/B/BTu6ueuPl9xvyQ5aqiKhmq9zl0UcEvTd1od728=
-</programlisting>
-
-Nix will then ignore any binary that has a missing, incorrect or
-unrecognised signature.</para>
-
-</refsection>
-
-
-<refsection><title>Binary cache format and operation</title>
-
-<para>A binary cache with URL <replaceable>url</replaceable> only
-denotes a valid binary cache if the file
-<uri><replaceable>url</replaceable>/nix-cache-info</uri> exists.  If
-this file does not exist (or cannot be downloaded), the cache is
-ignored.  If it does exist, it must be a text file containing cache
-properties.  Here’s an example:
-
-<screen>
-StoreDir: /nix/store
-WantMassQuery: 1
-Priority: 10
-</screen>
-
-The properties that are currently supported are:
-
-<variablelist>
-
-  <varlistentry><term><literal>StoreDir</literal></term>
-
-    <listitem><para>The path of the Nix store to which this binary
-    cache applies.  Binaries are not relocatable — a binary built for
-    <filename>/nix/store</filename> won’t generally work in
-    <filename>/home/alice/store</filename> — so to prevent binaries
-    from being used in a wrong store, a binary cache is only used if
-    its <literal>StoreDir</literal> matches the local Nix
-    configuration.  The default is
-    <filename>/nix/store</filename>.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>WantMassQuery</literal></term>
-
-    <listitem><para>Query operations such as <command>nix-env
-    -qas</command> can cause thousands of cache queries, and thus
-    thousands of HTTP requests, to determine which packages are
-    available in binary form.  While these requests are small, not
-    every server may appreciate a potential onslaught of queries.  If
-    <literal>WantMassQuery</literal> is set to <literal>0</literal>
-    (default), “mass queries” such as <command>nix-env -qas</command>
-    will skip this cache.  Thus a package may appear not to have a
-    binary substitute.  However, the binary will still be used when
-    you actually install the package.  If
-    <literal>WantMassQuery</literal> is set to <literal>1</literal>,
-    mass queries will use this cache.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>Priority</literal></term>
-
-    <listitem><para>Each binary cache has a priority (defaulting to
-    50).  Binary caches are checked for binaries in order of ascending
-    priority; thus a higher number denotes a lower priority.  The
-    binary cache <uri>https://cache.nixos.org</uri> has priority
-    40.</para></listitem>
-
-  </varlistentry>
-
-</variablelist>
-
-</para>
-
-<para>Every time Nix needs to build some store path
-<replaceable>p</replaceable>, it will check each configured binary
-cache to see if it has a NAR file for <replaceable>p</replaceable>,
-until it finds one.  If no cache has a NAR, Nix will fall back to
-building the path from source (if applicable).  To see if a cache with
-URL <replaceable>url</replaceable> has a binary for
-<replaceable>p</replaceable>, Nix fetches
-<replaceable>url/h</replaceable>, where <replaceable>h</replaceable>
-is the hash part of <replaceable>p</replaceable>.  Thus, if we have a
-cache <uri>https://cache.nixos.org</uri> and we want to obtain the
-store path
-<screen>
-/nix/store/a8922c0h87iilxzzvwn2hmv8x210aqb9-glibc-2.7
-</screen>
-then Nix will attempt to fetch
-<screen>
-https://cache.nixos.org/a8922c0h87iilxzzvwn2hmv8x210aqb9.narinfo
-</screen>
-(Commands such as <command>nix-env -qas</command> will issue an HTTP
-HEAD request, since it only needs to know if the
-<filename>.narinfo</filename> file exists.)  The
-<filename>.narinfo</filename> file is a simple text file that looks
-like this:
-
-<screen>
-StorePath: /nix/store/a8922c0h87iilxzzvwn2hmv8x210aqb9-glibc-2.7
-URL: nar/0zzjpdz46mdn74v09m053yczlz4am038g8r74iy8w43gx8801h70.nar.bz2
-Compression: bzip2
-FileHash: sha256:0zzjpdz46mdn74v09m053yczlz4am038g8r74iy8w43gx8801h70
-FileSize: 24473768
-NarHash: sha256:0s491y1h9hxj5ghiizlxk7ax6jwbha00zwn7lpyd5xg5bhf60vzg
-NarSize: 109521136
-References: 2ma2k0ys8knh4an48n28vigcmc2z8773-linux-headers-2.6.23.16 ...
-Deriver: 7akyyc87ka32xwmqza9dvyg5pwx3j212-glibc-2.7.drv
-Sig: cache.example.org-1:WepnSp2UT0odDpR3NRjPVhJBHmdBgSBSTbHpdh4SCz92nGXwFY82bkPEmISoC0hGqBXDXEmB6y3Ohgna3mMgDg==
-</screen>
-
-The fields are as follows:
-
-<variablelist>
-
-  <varlistentry><term><literal>StorePath</literal></term>
-
-    <listitem><para>The full store path, including the name part
-    (e.g., <literal>glibc-2.7</literal>).  It must match the
-    requested store path.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>URL</literal></term>
-
-    <listitem><para>The URL of the NAR, relative to the binary cache
-    URL.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>Compression</literal></term>
-
-    <listitem><para>The compression method; either
-    <literal>xz</literal> or
-    <literal>bzip2</literal>.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>FileHash</literal></term>
-
-    <listitem><para>The SHA-256 hash of the compressed
-    NAR.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>FileSize</literal></term>
-
-    <listitem><para>The size of the compressed NAR.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>NarHash</literal></term>
-
-    <listitem><para>The SHA-256 hash of the uncompressed NAR.  This is
-    equal to the hash of the store path as returned by
-    <command>nix-store -q --hash
-    <replaceable>p</replaceable></command>.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>NarSize</literal></term>
-
-    <listitem><para>The size of the uncompressed NAR.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>References</literal></term>
-
-    <listitem><para>The references of the store path, without the Nix
-    store prefix.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>Deriver</literal></term>
-
-    <listitem><para>The deriver of the store path, without the Nix
-    store prefix.  This field is optional.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>System</literal></term>
-
-    <listitem><para>The Nix platform type of this binary, if known.
-    This field is optional.</para></listitem>
-
-  </varlistentry>
-
-  <varlistentry><term><literal>Sig</literal></term>
-
-    <listitem><para>A signature of the the form
-    <literal><replaceable>key-name</replaceable>:<replaceable>sig</replaceable></literal>,
-    where <replaceable>key-name</replaceable> is the symbolic name of
-    the key pair used to sign and verify the cache
-    (e.g. <literal>cache.example.org-1</literal>), and
-    <replaceable>sig</replaceable> is the actual signature, computed
-    over the <varname>StorePath</varname>, <varname>NarHash</varname>,
-    <varname>NarSize</varname> and <varname>References</varname>
-    fields using the <link
-    xlink:href="http://ed25519.cr.yp.to/">Ed25519 public-key signature
-    system</link>.</para></listitem>
-
-  </varlistentry>
-
-</variablelist>
-
-</para>
-
-<para>Thus, in our example, after recursively ensuring that the
-references exist (e.g.,
-<filename>/nix/store/2ma2k0ys8knh4an48n28vigcmc2z8773-linux-headers-2.6.23.16</filename>),
-Nix will fetch <screen>
-https://cache.nixos.org/nar/0zzjpdz46mdn74v09m053yczlz4am038g8r74iy8w43gx8801h70.nar.bz2
-</screen> and decompress and unpack it to
-<filename>/nix/store/a8922c0h87iilxzzvwn2hmv8x210aqb9-glibc-2.7</filename>.</para>
-
-</refsection>
-
-
-</refentry>
diff --git a/doc/manual/command-ref/nix-store.xml b/doc/manual/command-ref/nix-store.xml
index 346718588b77..0f6172defb38 100644
--- a/doc/manual/command-ref/nix-store.xml
+++ b/doc/manual/command-ref/nix-store.xml
@@ -1390,8 +1390,7 @@ parameters:
 
 </orderedlist>
 
-For an example, see the manual page for <command
-linkend="sec-nix-push">nix-push</command>.</para>
+</para>
 
 </refsection>
 
diff --git a/doc/manual/command-ref/utilities.xml b/doc/manual/command-ref/utilities.xml
index 25e457e4e554..893f5b5b5260 100644
--- a/doc/manual/command-ref/utilities.xml
+++ b/doc/manual/command-ref/utilities.xml
@@ -14,9 +14,7 @@ work with Nix.</para>
 <xi:include href="nix-copy-closure.xml" />
 <xi:include href="nix-daemon.xml" />
 <xi:include href="nix-hash.xml" />
-<xi:include href="nix-install-package.xml" />
 <xi:include href="nix-instantiate.xml" />
 <xi:include href="nix-prefetch-url.xml" />
-<xi:include href="nix-push.xml" />
 
 </chapter>
diff --git a/doc/manual/expressions/builtins.xml b/doc/manual/expressions/builtins.xml
index eae5f5a029bf..6ac802343236 100644
--- a/doc/manual/expressions/builtins.xml
+++ b/doc/manual/expressions/builtins.xml
@@ -142,7 +142,7 @@ if builtins ? getEnv then builtins.getEnv "PATH" else ""</programlisting>
     evaluates to the Nix platform identifier for the Nix installation
     on which the expression is being evaluated, such as
     <literal>"i686-linux"</literal> or
-    <literal>"powerpc-darwin"</literal>.</para></listitem>
+    <literal>"x86_64-darwin"</literal>.</para></listitem>
 
   </varlistentry>
 
@@ -853,7 +853,14 @@ builtins.sort builtins.lessThan [ 483 249 526 147 42 77 ]
     len</replaceable> lies beyond the end of the string, only the
     substring up to the end of the string is returned.
     <replaceable>start</replaceable> must be
-    non-negative.</para></listitem>
+    non-negative. For example,
+
+<programlisting>
+builtins.substring 0 3 "nixos"
+</programlisting>
+
+   evaluates to <literal>"nix"</literal>.
+   </para></listitem>
 
   </varlistentry>
 
diff --git a/doc/manual/expressions/derivations.xml b/doc/manual/expressions/derivations.xml
index f2a73dccfe18..5efe2213e370 100644
--- a/doc/manual/expressions/derivations.xml
+++ b/doc/manual/expressions/derivations.xml
@@ -16,7 +16,7 @@ of which specify the inputs of the build.</para>
   <listitem xml:id="attr-system"><para>There must be an attribute named
   <varname>system</varname> whose value must be a string specifying a
   Nix platform identifier, such as <literal>"i686-linux"</literal> or
-  <literal>"powerpc-darwin"</literal><footnote><para>To figure out
+  <literal>"x86_64-darwin"</literal><footnote><para>To figure out
   your platform identifier, look at the line <quote>Checking for the
   canonical Nix system name</quote> in the output of Nix's
   <filename>configure</filename> script.</para></footnote> The build
diff --git a/doc/manual/expressions/language-constructs.xml b/doc/manual/expressions/language-constructs.xml
index 7535e64ea9a5..fe69dba837a1 100644
--- a/doc/manual/expressions/language-constructs.xml
+++ b/doc/manual/expressions/language-constructs.xml
@@ -202,24 +202,6 @@ in concat { x = "foo"; y = "bar"; }</programlisting>
 
 </para>
 
-<para>A set that has a <literal>__functor</literal> attribute whose value
-is callable (i.e. is itself a function or a set with a
-<literal>__functor</literal> attribute whose value is callable) can be
-applied as if it were a function, with the set itself passed in first
-, e.g.,
-
-<programlisting>
-let add = { __functor = self: x: x + self.x; };
-    inc = add // { x = 1; };
-in inc 1
-</programlisting>
-
-evaluates to <literal>2</literal>. This can be used to attach metadata to a
-function without the caller needing to treat it specially, or to implement
-a form of object-oriented programming, for example.
-
-</para>
-
 </simplesect>
 
 
diff --git a/doc/manual/expressions/language-values.xml b/doc/manual/expressions/language-values.xml
index f1174ecb5d8d..b90baac5054c 100644
--- a/doc/manual/expressions/language-values.xml
+++ b/doc/manual/expressions/language-values.xml
@@ -276,6 +276,23 @@ added to the set:
 This will evaluate to <literal>{}</literal> if <literal>foo</literal>
 evaluates to <literal>false</literal>.</para>
 
+<para>A set that has a <literal>__functor</literal> attribute whose value
+is callable (i.e. is itself a function or a set with a
+<literal>__functor</literal> attribute whose value is callable) can be
+applied as if it were a function, with the set itself passed in first
+, e.g.,
+
+<programlisting>
+let add = { __functor = self: x: x + self.x; };
+    inc = add // { x = 1; };
+in inc 1
+</programlisting>
+
+evaluates to <literal>2</literal>. This can be used to attach metadata to a
+function without the caller needing to treat it specially, or to implement
+a form of object-oriented programming, for example.
+
+</para>
 
 </simplesect>
 
diff --git a/doc/manual/introduction/quick-start.xml b/doc/manual/introduction/quick-start.xml
index 0d13651e0ab3..5ae9f6ad543b 100644
--- a/doc/manual/introduction/quick-start.xml
+++ b/doc/manual/introduction/quick-start.xml
@@ -95,18 +95,6 @@ The latter command will upgrade each installed package for which there
 is a “newer” version (as determined by comparing the version
 numbers).</para></step>
 
-<!--
-<step><para>You can also install specific packages directly from
-your web browser.  For instance, you can go to <link
-xlink:href="http://hydra.nixos.org/jobset/nixpkgs/trunk/channel/latest"
-/> and click on any link for the individual packages for your
-platform.  Associate <literal>application/nix-package</literal> with
-the program <command>nix-install-package</command>.  A window should
-appear asking you whether it’s okay to install the package.  Say
-<literal>Y</literal>.  The package and all its dependencies will be
-installed.</para></step>
--->
-
 <step><para>If you're unhappy with the result of a
 <command>nix-env</command> action (e.g., an upgraded package turned
 out not to work properly), you can go back:
diff --git a/doc/manual/local.mk b/doc/manual/local.mk
index 3c4fc52dfd67..d89555899a70 100644
--- a/doc/manual/local.mk
+++ b/doc/manual/local.mk
@@ -39,9 +39,9 @@ dist-files += $(d)/manual.xmli $(d)/version.txt $(d)/manual.is-valid
 # Generate man pages.
 man-pages := $(foreach n, \
   nix-env.1 nix-build.1 nix-shell.1 nix-store.1 nix-instantiate.1 \
-  nix-collect-garbage.1 nix-push.1 \
+  nix-collect-garbage.1 \
   nix-prefetch-url.1 nix-channel.1 \
-  nix-install-package.1 nix-hash.1 nix-copy-closure.1 \
+  nix-hash.1 nix-copy-closure.1 \
   nix.conf.5 nix-daemon.8, \
   $(d)/$(n))
 
diff --git a/doc/manual/packages/one-click.xml b/doc/manual/packages/one-click.xml
deleted file mode 100644
index cef9a2bbff4e..000000000000
--- a/doc/manual/packages/one-click.xml
+++ /dev/null
@@ -1,37 +0,0 @@
-<chapter xmlns="http://docbook.org/ns/docbook"
-      xmlns:xlink="http://www.w3.org/1999/xlink"
-      xmlns:xi="http://www.w3.org/2001/XInclude"
-      version="5.0"
-      xml:id="sec-one-click">
-
-<title>One-Click Installation</title>
-
-<para>Often, when you want to install a specific package (e.g., from
-the <link
-xlink:href="http://nixos.org/nixpkgs/">Nix
-Packages collection</link>), subscribing to a channel is a bit
-cumbersome.  And channels don’t help you at all if you want to install
-an older version of a package than the one provided by the current
-contents of the channel, or a package that has been removed from the
-channel.  That’s when <emphasis>one-click installs</emphasis> come in
-handy: you can just go to the web page that contains the package,
-click on it, and it will be installed with all the necessary
-dependencies.</para>
-
-<para>For instance, you can go to <link
-xlink:href="http://hydra.nixos.org/jobset/nixpkgs/trunk/channel/latest"
-/> and click on any link for the individual packages for your
-platform.  The first time you do this, your browser will ask what to
-do with <literal>application/nix-package</literal> files.  You should
-open them with <filename>/nix/bin/nix-install-package</filename>.
-This will open a window that asks you to confirm that you want to
-install the package.  When you answer <literal>Y</literal>, the
-package and all its dependencies will be installed.  This is a binary
-deployment mechanism — you get packages pre-compiled for the selected
-platform type.</para>
-
-<para>You can also install <literal>application/nix-package</literal>
-files from the command line directly.  See <xref
-linkend='sec-nix-install-package' /> for details.</para>
-
-</chapter>
\ No newline at end of file
diff --git a/doc/manual/packages/package-management.xml b/doc/manual/packages/package-management.xml
index 5cc5c381bb43..61e55faeb311 100644
--- a/doc/manual/packages/package-management.xml
+++ b/doc/manual/packages/package-management.xml
@@ -18,7 +18,6 @@ who want to <emphasis>create</emphasis> packages should consult
 <xi:include href="profiles.xml" />
 <xi:include href="garbage-collection.xml" />
 <xi:include href="channels.xml" />
-<xi:include href="one-click.xml" />
 <xi:include href="sharing-packages.xml" />
 
 </part>
diff --git a/maintainers/upload-release.pl b/maintainers/upload-release.pl
new file mode 100755
index 000000000000..5c6bb5685fcc
--- /dev/null
+++ b/maintainers/upload-release.pl
@@ -0,0 +1,117 @@
+#! /usr/bin/env nix-shell
+#! nix-shell -i perl -p perl perlPackages.LWPUserAgent perlPackages.LWPProtocolHttps perlPackages.FileSlurp gnupg1
+
+use strict;
+use Data::Dumper;
+use File::Basename;
+use File::Path;
+use File::Slurp;
+use JSON::PP;
+use LWP::UserAgent;
+
+my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n";
+
+my $releasesDir = "/home/eelco/mnt/releases";
+
+# FIXME: cut&paste from nixos-channel-scripts.
+sub fetch {
+    my ($url, $type) = @_;
+
+    my $ua = LWP::UserAgent->new;
+    $ua->default_header('Accept', $type) if defined $type;
+
+    my $response = $ua->get($url);
+    die "could not download $url: ", $response->status_line, "\n" unless $response->is_success;
+
+    return $response->decoded_content;
+}
+
+my $evalUrl = "https://hydra.nixos.org/eval/$evalId";
+my $evalInfo = decode_json(fetch($evalUrl, 'application/json'));
+#print Dumper($evalInfo);
+
+my $nixRev = $evalInfo->{jobsetevalinputs}->{nix}->{revision} or die;
+
+my $tarballInfo = decode_json(fetch("$evalUrl/job/tarball", 'application/json'));
+
+my $releaseName = $tarballInfo->{releasename};
+$releaseName =~ /nix-(.*)$/ or die;
+my $version = $1;
+
+print STDERR "Nix revision is $nixRev, version is $version\n";
+
+File::Path::make_path($releasesDir);
+if (system("mountpoint -q $releasesDir") != 0) {
+    system("sshfs hydra-mirror:/releases $releasesDir") == 0 or die;
+}
+
+my $releaseDir = "$releasesDir/nix/$releaseName";
+File::Path::make_path($releaseDir);
+
+sub downloadFile {
+    my ($jobName, $productNr, $dstName) = @_;
+
+    my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
+
+    my $srcFile = $buildInfo->{buildproducts}->{$productNr}->{path} or die;
+    $dstName //= basename($srcFile);
+    my $dstFile = "$releaseDir/" . $dstName;
+
+    if (! -e $dstFile) {
+        print STDERR "downloading $srcFile to $dstFile...\n";
+        system("NIX_REMOTE=https://cache.nixos.org/ nix cat-store '$srcFile' > '$dstFile.tmp'") == 0
+            or die "unable to fetch $srcFile\n";
+        rename("$dstFile.tmp", $dstFile) or die;
+    }
+
+    my $sha256_expected = $buildInfo->{buildproducts}->{$productNr}->{sha256hash} or die;
+    my $sha256_actual = `nix hash-file --type sha256 '$dstFile'`;
+    chomp $sha256_actual;
+    if ($sha256_expected ne $sha256_actual) {
+        print STDERR "file $dstFile is corrupt\n";
+        exit 1;
+    }
+
+    write_file("$dstFile.sha256", $sha256_expected);
+
+    return ($dstFile, $sha256_expected);
+}
+
+downloadFile("tarball", "2"); # PDF
+downloadFile("tarball", "3"); # .tar.bz2
+my ($tarball, $tarballHash) = downloadFile("tarball", "4"); # .tar.xz
+my ($tarball_i686_linux, $tarball_i686_linux_hash) = downloadFile("binaryTarball.i686-linux", "1");
+my ($tarball_x86_64_linux, $tarball_x86_64_linux_hash) = downloadFile("binaryTarball.x86_64-linux", "1");
+my ($tarball_x86_64_darwin, $tarball_x86_64_darwin_hash) = downloadFile("binaryTarball.x86_64-darwin", "1");
+
+# Extract the HTML manual.
+File::Path::make_path("$releaseDir/manual");
+
+system("tar xvf $tarball --strip-components=3 -C $releaseDir/manual --wildcards '*/doc/manual/*.html' '*/doc/manual/*.css' '*/doc/manual/*.gif' '*/doc/manual/*.png'") == 0 or die;
+
+if (! -e "$releaseDir/manual/index.html") {
+    symlink("manual.html", "$releaseDir/manual/index.html") or die;
+}
+
+# Update the "latest" symlink.
+symlink("$releaseName", "$releasesDir/nix/latest-tmp") or die;
+rename("$releasesDir/nix/latest-tmp", "$releasesDir/nix/latest") or die;
+
+# Tag the release in Git.
+chdir("/home/eelco/Dev/nix-pristine") or die;
+system("git remote update origin") == 0 or die;
+system("git tag --force --sign $version $nixRev -m 'Tagging release $version'") == 0 or die;
+
+# Update the website.
+my $siteDir = "/home/eelco/Dev/nixos-homepage-pristine";
+write_file("$siteDir/nix-release.tt",
+           "[%-\n" .
+           "latestNixVersion = \"$version\"\n" .
+           "nix_hash_i686_linux = \"$tarball_i686_linux_hash\"\n" .
+           "nix_hash_x86_64_linux = \"$tarball_x86_64_linux_hash\"\n" .
+           "nix_hash_x86_64_darwin = \"$tarball_x86_64_darwin_hash\"\n" .
+           "-%]\n");
+
+system("cd $siteDir && nix-shell --run 'make nix/install nix/install.sig'") == 0 or die;
+
+system("cd $siteDir && git commit -a -m 'Nix $version released'") == 0 or die;
diff --git a/misc/docker/Dockerfile b/misc/docker/Dockerfile
index 098633fc3c86..20171d250207 100644
--- a/misc/docker/Dockerfile
+++ b/misc/docker/Dockerfile
@@ -5,7 +5,9 @@ RUN wget -O- http://nixos.org/releases/nix/nix-1.11.2/nix-1.11.2-x86_64-linux.ta
     && for i in $(seq 1 30); do echo "nixbld$i:x:$((30000 + $i)):30000:::" >> /etc/passwd; done \
     && mkdir -m 0755 /nix && USER=root sh nix-*-x86_64-linux/install \
     && echo ". /root/.nix-profile/etc/profile.d/nix.sh" >> /etc/profile \
-    && rm -r /nix-*-x86_64-linux
+    && rm -r /nix-*-x86_64-linux \
+    && apk --update add bash tar \
+    && rm -rf /var/cache/apk/*
 
 ONBUILD ENV \
     ENV=/etc/profile \
diff --git a/misc/launchd/org.nixos.nix-daemon.plist.in b/misc/launchd/org.nixos.nix-daemon.plist.in
index 66fcd155ee9b..0dd665db635f 100644
--- a/misc/launchd/org.nixos.nix-daemon.plist.in
+++ b/misc/launchd/org.nixos.nix-daemon.plist.in
@@ -12,5 +12,10 @@
     <string>/var/log/nix-daemon.log</string>
     <key>StandardOutPath</key>
     <string>/dev/null</string>
+    <key>EnvironmentVariables</key>
+    <dict>
+      <key>SSL_CERT_FILE</key>
+      <string>/nix/var/nix/profiles/default/etc/ssl/certs/ca-bundle.crt</string>
+    </dict>
   </dict>
 </plist>
diff --git a/misc/systemd/nix-daemon.service.in b/misc/systemd/nix-daemon.service.in
index 5fc04a3f5713..fcd799e177d0 100644
--- a/misc/systemd/nix-daemon.service.in
+++ b/misc/systemd/nix-daemon.service.in
@@ -7,3 +7,4 @@ ConditionPathIsReadWrite=@localstatedir@/nix/daemon-socket
 [Service]
 ExecStart=@@bindir@/nix-daemon nix-daemon --daemon
 KillMode=process
+Environment=XDG_CACHE_HOME=/root/.cache
diff --git a/nix.spec.in b/nix.spec.in
index 61da404a3844..edbc12d8f05c 100644
--- a/nix.spec.in
+++ b/nix.spec.in
@@ -10,7 +10,7 @@ License: LGPLv2+
 Group: Applications/System
 %endif
 URL: http://nixos.org/
-Source0: %{name}-%{version}.tar.xz
+Source0: %{name}-%{version}.tar.bz2
 %if 0%{?el5}
 BuildRoot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
 %endif
diff --git a/release.nix b/release.nix
index 1da34443dd4f..d236cdae9bf0 100644
--- a/release.nix
+++ b/release.nix
@@ -172,10 +172,6 @@ let
       };
 
 
-    rpm_fedora19i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora19i386) [];
-    rpm_fedora19x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora19x86_64) [];
-    rpm_fedora20i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora20i386) [];
-    rpm_fedora20x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora20x86_64) [];
     rpm_fedora21i386 = makeRPM_i686 (diskImageFuns: diskImageFuns.fedora21i386) [ "libsodium-devel" ];
     rpm_fedora21x86_64 = makeRPM_x86_64 (diskImageFunsFun: diskImageFunsFun.fedora21x86_64) [ "libsodium-devel" ];
 
@@ -183,10 +179,6 @@ let
     deb_debian8i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.debian8i386) [ "libsodium-dev" ] [ "libsodium13" ];
     deb_debian8x86_64 = makeDeb_x86_64 (diskImageFunsFun: diskImageFunsFun.debian8x86_64) [ "libsodium-dev" ] [ "libsodium13" ];
 
-    deb_ubuntu1310i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1310i386) [] [];
-    deb_ubuntu1310x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1310x86_64) [] [];
-    deb_ubuntu1404i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1404i386) [] [];
-    deb_ubuntu1404x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1404x86_64) [] [];
     deb_ubuntu1410i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1410i386) [] [];
     deb_ubuntu1410x86_64 = makeDeb_x86_64 (diskImageFuns: diskImageFuns.ubuntu1410x86_64) [] [];
     deb_ubuntu1504i386 = makeDeb_i686 (diskImageFuns: diskImageFuns.ubuntu1504i386) [ "libsodium-dev" ] [ "libsodium13" ];
@@ -236,7 +228,6 @@ let
     tests.evalNixOS =
       pkgs.runCommand "eval-nixos" { buildInputs = [ build.x86_64-linux ]; }
         ''
-          export NIX_DB_DIR=$TMPDIR
           export NIX_STATE_DIR=$TMPDIR
           nix-store --init
 
@@ -264,12 +255,8 @@ let
           binaryTarball.x86_64-linux
           deb_debian8i386
           deb_debian8x86_64
-          deb_ubuntu1404i386 # LTS
-          deb_ubuntu1404x86_64 # LTS
           deb_ubuntu1504i386
           deb_ubuntu1504x86_64
-          rpm_fedora20i386
-          rpm_fedora20x86_64
           rpm_fedora21i386
           rpm_fedora21x86_64
           tests.remoteBuilds
diff --git a/scripts/download-from-binary-cache.pl.in b/scripts/download-from-binary-cache.pl.in
deleted file mode 100644
index a4f858610aca..000000000000
--- a/scripts/download-from-binary-cache.pl.in
+++ /dev/null
@@ -1,632 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use DBI;
-use DBD::SQLite;
-use File::Basename;
-use IO::Select;
-use Nix::Config;
-use Nix::Store;
-use Nix::Utils;
-use Nix::Manifest;
-use WWW::Curl::Easy;
-use WWW::Curl::Multi;
-use strict;
-
-STDERR->autoflush(1);
-binmode STDERR, ":encoding(utf8)";
-
-Nix::Config::readConfig;
-
-my @caches;
-my $gotCaches = 0;
-
-my $maxParallelRequests = int($Nix::Config::config{"binary-caches-parallel-connections"} // 25);
-$maxParallelRequests = 1 if $maxParallelRequests < 1;
-
-my $ttlNegative = 24 * 3600; # when to purge negative lookups from the database
-my $ttlNegativeUse = 3600; # how long negative lookups are valid for non-"have" lookups
-my $didExpiration = 0;
-
-my $showAfter = 5; # show that we're waiting for a request after this many seconds
-
-my $debug = ($Nix::Config::config{"debug-subst"} // "") eq 1 || ($Nix::Config::config{"untrusted-debug-subst"} // "") eq 1;
-
-my $cacheFileURLs = ($ENV{"_NIX_CACHE_FILE_URLS"} // "") eq 1; # for testing
-
-my ($dbh, $queryCache, $insertNAR, $queryNAR, $insertNARExistence, $queryNARExistence, $expireNARExistence);
-
-my $curlm = WWW::Curl::Multi->new;
-my $activeRequests = 0;
-my $curlIdCount = 1;
-my %requests;
-my %scheduled;
-my $caBundle = $ENV{"SSL_CERT_FILE"} // $ENV{"CURL_CA_BUNDLE"} // $ENV{"OPENSSL_X509_CERT_FILE"};
-$caBundle = "/etc/ssl/certs/ca-bundle.crt" if !$caBundle && -f "/etc/ssl/certs/ca-bundle.crt";
-$caBundle = "/etc/ssl/certs/ca-certificates.crt" if !$caBundle && -f "/etc/ssl/certs/ca-certificates.crt";
-
-my $userName = getpwuid($<) || $ENV{"USER"} or die "cannot figure out user name";
-
-my $userAgent = "Nix/$Nix::Config::version";
-
-sub isTrue {
-    my ($x) = @_;
-    return $x eq "true" || $x eq "1";
-}
-
-# FIXME: this should be cache URLs required to have valid signatures,
-# or "*" to require signatures on all binary caches.
-# FIXME: should binary caches using a key in
-# ‘binary-cache-public-keys’ be trusted by default?
-my $requireSignedBinaryCaches = ($Nix::Config::config{"signed-binary-caches"} // "0") ne "0";
-
-my $curlConnectTimeout = int(
-    $Nix::Config::config{"untrusted-connect-timeout"} //
-    $Nix::Config::config{"connect-timeout"} //
-    $ENV{"NIX_CONNECT_TIMEOUT"} // 0);
-
-
-sub addRequest {
-    my ($storePath, $url, $head) = @_;
-
-    my $curl = WWW::Curl::Easy->new;
-    my $curlId = $curlIdCount++;
-    $requests{$curlId} = { storePath => $storePath, url => $url, handle => $curl, content => "", type => $head ? "HEAD" : "GET"
-                         , shown => 0, started => time() };
-
-    $curl->setopt(CURLOPT_PRIVATE, $curlId);
-    $curl->setopt(CURLOPT_URL, $url);
-    open (my $fh, ">", \$requests{$curlId}->{content});
-    $curl->setopt(CURLOPT_WRITEDATA, $fh);
-    $curl->setopt(CURLOPT_FOLLOWLOCATION, 1);
-    $curl->setopt(CURLOPT_CAINFO, $caBundle) if defined $caBundle;
-
-    unless (isTrue($Nix::Config::config{"verify-https-binary-caches"} // "1")) {
-        $curl->setopt(CURLOPT_SSL_VERIFYPEER, 0);
-        $curl->setopt(CURLOPT_SSL_VERIFYHOST, 0);
-    }
-
-    $curl->setopt(CURLOPT_USERAGENT, $userAgent);
-    $curl->setopt(CURLOPT_NOBODY, 1) if $head;
-    $curl->setopt(CURLOPT_FAILONERROR, 1);
-    $curl->setopt(CURLOPT_CONNECTTIMEOUT, $curlConnectTimeout);
-    $curl->setopt(CURLOPT_TIMEOUT, 20 * 60);
-
-    if ($activeRequests >= $maxParallelRequests) {
-        $scheduled{$curlId} = 1;
-    } else {
-        $curlm->add_handle($curl);
-        $activeRequests++;
-    }
-
-    return $requests{$curlId};
-}
-
-
-sub processRequests {
-    while ($activeRequests) {
-        my ($rfds, $wfds, $efds) = $curlm->fdset();
-        #print STDERR "R = @{$rfds}, W = @{$wfds}, E = @{$efds}\n";
-
-        # Sleep until we can read or write some data.
-        if (scalar @{$rfds} + scalar @{$wfds} + scalar @{$efds} > 0) {
-            IO::Select->select(IO::Select->new(@{$rfds}), IO::Select->new(@{$wfds}), IO::Select->new(@{$efds}), 1.0);
-        }
-
-        if ($curlm->perform() != $activeRequests) {
-            while (my ($id, $result) = $curlm->info_read) {
-                if ($id) {
-                    my $request = $requests{$id} or die;
-                    my $handle = $request->{handle};
-                    $request->{result} = $result;
-                    $request->{httpStatus} = $handle->getinfo(CURLINFO_RESPONSE_CODE);
-
-                    print STDERR "$request->{type} on $request->{url} [$request->{result}, $request->{httpStatus}]\n" if $debug;
-
-                    $activeRequests--;
-                    delete $request->{handle};
-
-                    if (scalar(keys %scheduled) > 0) {
-                        my $id2 = (keys %scheduled)[0];
-                        $curlm->add_handle($requests{$id2}->{handle});
-                        $activeRequests++;
-                        delete $scheduled{$id2};
-                    }
-                }
-            }
-        }
-
-        my $time = time();
-        while (my ($key, $request) = each %requests) {
-            next unless defined $request->{handle};
-            next if $request->{shown};
-            if ($time > $request->{started} + $showAfter) {
-                print STDERR "still waiting for ‘$request->{url}’ after $showAfter seconds...\n";
-                $request->{shown} = 1;
-            }
-        }
-    }
-}
-
-
-sub initCache {
-    my $dbPath = "$Nix::Config::stateDir/binary-cache-v3.sqlite";
-
-    unlink "$Nix::Config::stateDir/binary-cache-v1.sqlite";
-    unlink "$Nix::Config::stateDir/binary-cache-v2.sqlite";
-
-    # Open/create the database.
-    $dbh = DBI->connect("dbi:SQLite:dbname=$dbPath", "", "")
-        or die "cannot open database ‘$dbPath’";
-    $dbh->{RaiseError} = 1;
-    $dbh->{PrintError} = 0;
-
-    $dbh->sqlite_busy_timeout(60 * 60 * 1000);
-
-    $dbh->do("pragma synchronous = off"); # we can always reproduce the cache
-    $dbh->do("pragma journal_mode = truncate");
-
-    # Initialise the database schema, if necessary.
-    $dbh->do(<<EOF);
-        create table if not exists BinaryCaches (
-            id        integer primary key autoincrement not null,
-            url       text unique not null,
-            timestamp integer not null,
-            storeDir  text not null,
-            wantMassQuery integer not null,
-            priority  integer not null
-        );
-EOF
-
-    $dbh->do(<<EOF);
-        create table if not exists NARs (
-            cache            integer not null,
-            storePath        text not null,
-            url              text not null,
-            compression      text not null,
-            fileHash         text,
-            fileSize         integer,
-            narHash          text,
-            narSize          integer,
-            refs             text,
-            deriver          text,
-            signedBy         text,
-            timestamp        integer not null,
-            primary key (cache, storePath),
-            foreign key (cache) references BinaryCaches(id) on delete cascade
-        );
-EOF
-
-    $dbh->do(<<EOF);
-        create table if not exists NARExistence (
-            cache            integer not null,
-            storePath        text not null,
-            exist            integer not null,
-            timestamp        integer not null,
-            primary key (cache, storePath),
-            foreign key (cache) references BinaryCaches(id) on delete cascade
-        );
-EOF
-
-    $dbh->do("create index if not exists NARExistenceByExistTimestamp on NARExistence (exist, timestamp)");
-
-    $queryCache = $dbh->prepare("select id, storeDir, wantMassQuery, priority from BinaryCaches where url = ?") or die;
-
-    $insertNAR = $dbh->prepare(
-        "insert or replace into NARs(cache, storePath, url, compression, fileHash, fileSize, narHash, " .
-        "narSize, refs, deriver, signedBy, timestamp) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)") or die;
-
-    $queryNAR = $dbh->prepare("select * from NARs where cache = ? and storePath = ?") or die;
-
-    $insertNARExistence = $dbh->prepare(
-        "insert or replace into NARExistence(cache, storePath, exist, timestamp) values (?, ?, ?, ?)") or die;
-
-    $queryNARExistence = $dbh->prepare("select exist, timestamp from NARExistence where cache = ? and storePath = ?") or die;
-
-    $expireNARExistence = $dbh->prepare("delete from NARExistence where exist = ? and timestamp < ?") or die;
-}
-
-
-sub getAvailableCaches {
-    return if $gotCaches;
-    $gotCaches = 1;
-
-    sub strToList {
-        my ($s) = @_;
-        return map { s/\/+$//; $_ } split(/ /, $s);
-    }
-
-    my @urls = strToList($Nix::Config::config{"binary-caches"} //
-        ($Nix::Config::storeDir eq "/nix/store" ? "https://cache.nixos.org" : ""));
-
-    my $urlsFiles = $Nix::Config::config{"binary-cache-files"}
-        // "$Nix::Config::stateDir/profiles/per-user/$userName/channels/binary-caches/*";
-    foreach my $urlFile (glob $urlsFiles) {
-        next unless -f $urlFile;
-        open FILE, "<$urlFile" or die "cannot open ‘$urlFile’\n";
-        my $url = <FILE>; chomp $url;
-        close FILE;
-        push @urls, strToList($url);
-    }
-
-    push @urls, strToList($Nix::Config::config{"extra-binary-caches"} // "");
-
-    # Allow Nix daemon users to override the binary caches to a subset
-    # of those listed in the config file.  Note that ‘untrusted-*’
-    # denotes options passed by the client.
-    my @trustedUrls = uniq(@urls, strToList($Nix::Config::config{"trusted-binary-caches"} // ""));
-
-    if (defined $Nix::Config::config{"untrusted-binary-caches"}) {
-        my @untrustedUrls = strToList $Nix::Config::config{"untrusted-binary-caches"};
-        @urls = ();
-        foreach my $url (@untrustedUrls) {
-            die "binary cache ‘$url’ is not trusted (please add it to ‘trusted-binary-caches’ in $Nix::Config::confDir/nix.conf)\n"
-                unless scalar(grep { $url eq $_ } @trustedUrls) > 0;
-            push @urls, $url;
-        }
-    }
-
-    my @untrustedUrls = strToList $Nix::Config::config{"untrusted-extra-binary-caches"} // "";
-    foreach my $url (@untrustedUrls) {
-        unless (scalar(grep { $url eq $_ } @trustedUrls) > 0) {
-            warn "binary cache ‘$url’ is not trusted (please add it to ‘trusted-binary-caches’ in $Nix::Config::confDir/nix.conf)\n";
-            next;
-        }
-        push @urls, $url;
-    }
-
-    foreach my $url (uniq @urls) {
-
-        # FIXME: not atomic.
-        $queryCache->execute($url);
-        my $res = $queryCache->fetchrow_hashref();
-        if (defined $res) {
-            next if $res->{storeDir} ne $Nix::Config::storeDir;
-            push @caches, { id => $res->{id}, url => $url, wantMassQuery => $res->{wantMassQuery}, priority => $res->{priority} };
-            next;
-        }
-
-        # Get the cache info file.
-        my $request = addRequest(undef, $url . "/nix-cache-info");
-        processRequests;
-
-        if ($request->{result} != 0) {
-            print STDERR "could not download ‘$request->{url}’ (" .
-                ($request->{result} != 0 ? "Curl error $request->{result}" : "HTTP status $request->{httpStatus}") . ")\n";
-            next;
-        }
-
-        my $storeDir = "/nix/store";
-        my $wantMassQuery = 0;
-        my $priority = 50;
-        foreach my $line (split "\n", $request->{content}) {
-            unless ($line =~ /^(.*): (.*)$/) {
-                print STDERR "bad cache info file ‘$request->{url}’\n";
-                return undef;
-            }
-            if ($1 eq "StoreDir") { $storeDir = $2; }
-            elsif ($1 eq "WantMassQuery") { $wantMassQuery = int($2); }
-            elsif ($1 eq "Priority") { $priority = int($2); }
-        }
-
-        $dbh->do("insert or replace into BinaryCaches(url, timestamp, storeDir, wantMassQuery, priority) values (?, ?, ?, ?, ?)",
-                 {}, $url, time(), $storeDir, $wantMassQuery, $priority);
-        $queryCache->execute($url);
-        $res = $queryCache->fetchrow_hashref() or die;
-        next if $storeDir ne $Nix::Config::storeDir;
-        push @caches, { id => $res->{id}, url => $url, wantMassQuery => $wantMassQuery, priority => $priority };
-    }
-
-    @caches = sort { $a->{priority} <=> $b->{priority} } @caches;
-
-    expireNegative();
-}
-
-
-sub shouldCache {
-    my ($url) = @_;
-    return $cacheFileURLs || $url !~ /^file:/;
-}
-
-
-sub processNARInfo {
-    my ($storePath, $cache, $request) = @_;
-
-    if ($request->{result} != 0) {
-        if ($request->{result} != 37 && $request->{httpStatus} != 404 && $request->{httpStatus} != 403) {
-            print STDERR "could not download ‘$request->{url}’ (" .
-                ($request->{result} != 0 ? "Curl error $request->{result}" : "HTTP status $request->{httpStatus}") . ")\n";
-        } else {
-            $insertNARExistence->execute($cache->{id}, basename($storePath), 0, time())
-                if shouldCache $request->{url};
-        }
-        return undef;
-    }
-
-    my $narInfo = parseNARInfo($storePath, $request->{content}, $requireSignedBinaryCaches, $request->{url});
-    return undef unless defined $narInfo;
-
-    die if $requireSignedBinaryCaches && !defined $narInfo->{signedBy};
-
-    # Cache the result.
-    $insertNAR->execute(
-        $cache->{id}, basename($storePath), $narInfo->{url}, $narInfo->{compression},
-        $narInfo->{fileHash}, $narInfo->{fileSize}, $narInfo->{narHash}, $narInfo->{narSize},
-        join(" ", @{$narInfo->{refs}}), $narInfo->{deriver}, $narInfo->{signedBy}, time())
-        if shouldCache $request->{url};
-
-    return $narInfo;
-}
-
-
-sub getCachedInfoFrom {
-    my ($storePath, $cache) = @_;
-
-    $queryNAR->execute($cache->{id}, basename($storePath));
-    my $res = $queryNAR->fetchrow_hashref();
-    return undef unless defined $res;
-
-    # We may previously have cached this info when signature checking
-    # was disabled.  In that case, ignore the cached info.
-    return undef if $requireSignedBinaryCaches && !defined $res->{signedBy};
-
-    return
-        { url => $res->{url}
-        , compression => $res->{compression}
-        , fileHash => $res->{fileHash}
-        , fileSize => $res->{fileSize}
-        , narHash => $res->{narHash}
-        , narSize => $res->{narSize}
-        , refs => [ split " ", $res->{refs} ]
-        , deriver => $res->{deriver}
-        , signedBy => $res->{signedBy}
-        } if defined $res;
-}
-
-
-sub negativeHit {
-    my ($storePath, $cache) = @_;
-    $queryNARExistence->execute($cache->{id}, basename($storePath));
-    my $res = $queryNARExistence->fetchrow_hashref();
-    return defined $res && $res->{exist} == 0 && time() - $res->{timestamp} < $ttlNegativeUse;
-}
-
-
-sub positiveHit {
-    my ($storePath, $cache) = @_;
-    return 1 if defined getCachedInfoFrom($storePath, $cache);
-    $queryNARExistence->execute($cache->{id}, basename($storePath));
-    my $res = $queryNARExistence->fetchrow_hashref();
-    return defined $res && $res->{exist} == 1;
-}
-
-
-sub expireNegative {
-    return if $didExpiration;
-    $didExpiration = 1;
-    my $time = time();
-    # Round up to the next multiple of the TTL to ensure that we do
-    # expiration only once per time interval.  E.g. if $ttlNegative ==
-    # 3600, we expire entries at most once per hour.  This is
-    # presumably faster than expiring a few entries per request (and
-    # thus doing a transaction).
-    my $limit = (int($time / $ttlNegative) - 1) * $ttlNegative;
-    $expireNARExistence->execute($limit, 0);
-    print STDERR "expired ", $expireNARExistence->rows, " negative entries\n" if $debug;
-}
-
-
-sub printInfo {
-    my ($storePath, $info) = @_;
-    print "$storePath\n";
-    print $info->{deriver} ? "$Nix::Config::storeDir/$info->{deriver}" : "", "\n";
-    print scalar @{$info->{refs}}, "\n";
-    print "$Nix::Config::storeDir/$_\n" foreach @{$info->{refs}};
-    print $info->{fileSize} || 0, "\n";
-    print $info->{narSize} || 0, "\n";
-}
-
-
-sub infoUrl {
-    my ($binaryCacheUrl, $storePath) = @_;
-    my $pathHash = substr(basename($storePath), 0, 32);
-    my $infoUrl = "$binaryCacheUrl/$pathHash.narinfo";
-}
-
-
-sub printInfoParallel {
-    my @paths = @_;
-
-    # First print all paths for which we have cached info.
-    my @left;
-    foreach my $storePath (@paths) {
-        my $found = 0;
-        foreach my $cache (@caches) {
-            my $info = getCachedInfoFrom($storePath, $cache);
-            if (defined $info) {
-                printInfo($storePath, $info);
-                $found = 1;
-                last;
-            }
-        }
-        push @left, $storePath if !$found;
-    }
-
-    return if scalar @left == 0;
-
-    foreach my $cache (@caches) {
-
-        my @left2;
-        %requests = ();
-        foreach my $storePath (@left) {
-            if (negativeHit($storePath, $cache)) {
-                push @left2, $storePath;
-                next;
-            }
-            addRequest($storePath, infoUrl($cache->{url}, $storePath));
-        }
-
-        processRequests;
-
-        foreach my $request (values %requests) {
-            my $info = processNARInfo($request->{storePath}, $cache, $request);
-            if (defined $info) {
-                printInfo($request->{storePath}, $info);
-            } else {
-                push @left2, $request->{storePath};
-            }
-        }
-
-        @left = @left2;
-    }
-}
-
-
-sub printSubstitutablePaths {
-    my @paths = @_;
-
-    # First look for paths that have cached info.
-    my @left;
-    foreach my $storePath (@paths) {
-        my $found = 0;
-        foreach my $cache (@caches) {
-            next unless $cache->{wantMassQuery};
-            if (positiveHit($storePath, $cache)) {
-                print "$storePath\n";
-                $found = 1;
-                last;
-            }
-        }
-        push @left, $storePath if !$found;
-    }
-
-    return if scalar @left == 0;
-
-    # For remaining paths, do HEAD requests.
-    foreach my $cache (@caches) {
-        next unless $cache->{wantMassQuery};
-        my @left2;
-        %requests = ();
-        foreach my $storePath (@left) {
-            if (negativeHit($storePath, $cache)) {
-                push @left2, $storePath;
-                next;
-            }
-            addRequest($storePath, infoUrl($cache->{url}, $storePath), 1);
-        }
-
-        processRequests;
-
-        foreach my $request (values %requests) {
-            if ($request->{result} != 0) {
-                if ($request->{result} != 37 && $request->{httpStatus} != 404 && $request->{httpStatus} != 403) {
-                    print STDERR "could not check ‘$request->{url}’ (" .
-                        ($request->{result} != 0 ? "Curl error $request->{result}" : "HTTP status $request->{httpStatus}") . ")\n";
-                } else {
-                    $insertNARExistence->execute($cache->{id}, basename($request->{storePath}), 0, time())
-                        if shouldCache $request->{url};
-                }
-                push @left2, $request->{storePath};
-            } else {
-                $insertNARExistence->execute($cache->{id}, basename($request->{storePath}), 1, time())
-                    if shouldCache $request->{url};
-                print "$request->{storePath}\n";
-            }
-        }
-
-        @left = @left2;
-    }
-}
-
-
-sub downloadBinary {
-    my ($storePath, $destPath) = @_;
-
-    foreach my $cache (@caches) {
-        my $info = getCachedInfoFrom($storePath, $cache);
-
-        unless (defined $info) {
-            next if negativeHit($storePath, $cache);
-            my $request = addRequest($storePath, infoUrl($cache->{url}, $storePath));
-            processRequests;
-            $info = processNARInfo($storePath, $cache, $request);
-        }
-
-        next unless defined $info;
-
-        my $decompressor;
-        if ($info->{compression} eq "bzip2") { $decompressor = "| $Nix::Config::bzip2 -d"; }
-        elsif ($info->{compression} eq "xz") { $decompressor = "| $Nix::Config::xz -d"; }
-        elsif ($info->{compression} eq "none") { $decompressor = ""; }
-        else {
-            print STDERR "unknown compression method ‘$info->{compression}’\n";
-            next;
-        }
-        my $url = "$cache->{url}/$info->{url}"; # FIXME: handle non-relative URLs
-        die if $requireSignedBinaryCaches && !defined $info->{signedBy};
-        print STDERR "\n*** Downloading ‘$url’ ", ($requireSignedBinaryCaches ? "(signed by ‘$info->{signedBy}’) " : ""), "to ‘$storePath’...\n";
-        checkURL $url;
-        if (system("$Nix::Config::curl --fail --location --connect-timeout $curlConnectTimeout -A '$userAgent' '$url' $decompressor | $Nix::Config::binDir/nix-store --restore $destPath") != 0) {
-            warn "download of ‘$url’ failed" . ($! ? ": $!" : "") . "\n";
-            next;
-        }
-
-        # Tell Nix about the expected hash so it can verify it.
-        die unless defined $info->{narHash} && $info->{narHash} ne "";
-        print "$info->{narHash}\n";
-
-        print STDERR "\n";
-        return;
-    }
-
-    print STDERR "could not download ‘$storePath’ from any binary cache\n";
-    exit 1;
-}
-
-
-# Bail out right away if binary caches are disabled.
-exit 0 if
-    ($Nix::Config::config{"use-binary-caches"} // "true") eq "false" ||
-    ($Nix::Config::config{"untrusted-use-binary-caches"} // "true") eq "false";
-print "\n";
-flush STDOUT;
-
-initCache();
-
-
-if ($ARGV[0] eq "--query") {
-
-    while (<STDIN>) {
-        getAvailableCaches;
-        chomp;
-        my ($cmd, @args) = split " ", $_;
-
-        if ($cmd eq "have") {
-            print STDERR "checking binary caches for existence of @args\n" if $debug;
-            printSubstitutablePaths(@args);
-            print "\n";
-        }
-
-        elsif ($cmd eq "info") {
-            print STDERR "checking binary caches for info on @args\n" if $debug;
-            printInfoParallel(@args);
-            print "\n";
-        }
-
-        else { die "unknown command ‘$cmd’"; }
-
-        flush STDOUT;
-    }
-
-}
-
-elsif ($ARGV[0] eq "--substitute") {
-    my $storePath = $ARGV[1] or die;
-    my $destPath = $ARGV[2] or die;
-    getAvailableCaches;
-    downloadBinary($storePath, $destPath);
-}
-
-else {
-    die;
-}
diff --git a/scripts/local.mk b/scripts/local.mk
index 142188ad5f47..ee8ae6845dc1 100644
--- a/scripts/local.mk
+++ b/scripts/local.mk
@@ -1,8 +1,5 @@
 nix_bin_scripts := \
-  $(d)/nix-channel \
   $(d)/nix-copy-closure \
-  $(d)/nix-install-package \
-  $(d)/nix-push
 
 bin-scripts += $(nix_bin_scripts)
 
@@ -12,18 +9,11 @@ nix_noinst_scripts := \
   $(d)/nix-profile.sh \
   $(d)/nix-reduce-build
 
-ifeq ($(OS), Darwin)
-  nix_noinst_scripts += $(d)/resolve-system-dependencies.pl
-endif
-
 noinst-scripts += $(nix_noinst_scripts)
 
 profiledir = $(sysconfdir)/profile.d
 
 $(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
 $(eval $(call install-program-in, $(d)/build-remote.pl, $(libexecdir)/nix))
-ifeq ($(OS), Darwin)
-  $(eval $(call install-program-in, $(d)/resolve-system-dependencies.pl, $(libexecdir)/nix))
-endif
 
 clean-files += $(nix_bin_scripts) $(nix_noinst_scripts)
diff --git a/scripts/nix-channel.in b/scripts/nix-channel.in
deleted file mode 100755
index 65084ff1f34a..000000000000
--- a/scripts/nix-channel.in
+++ /dev/null
@@ -1,228 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use strict;
-use File::Basename;
-use File::Path qw(mkpath);
-use Nix::Config;
-use Nix::Manifest;
-use File::Temp qw(tempdir);
-
-binmode STDERR, ":encoding(utf8)";
-
-Nix::Config::readConfig;
-
-
-# Turn on caching in nix-prefetch-url.
-my $channelCache = "$Nix::Config::stateDir/channel-cache";
-mkdir $channelCache, 0755 unless -e $channelCache;
-$ENV{'NIX_DOWNLOAD_CACHE'} = $channelCache if -W $channelCache;
-
-# Figure out the name of the `.nix-channels' file to use.
-my $home = $ENV{"HOME"} or die '$HOME not set\n';
-my $channelsList = "$home/.nix-channels";
-my $nixDefExpr = "$home/.nix-defexpr";
-
-# Figure out the name of the channels profile.
-my $userName = getpwuid($<) || $ENV{"USER"} or die "cannot figure out user name";
-my $profile = "$Nix::Config::stateDir/profiles/per-user/$userName/channels";
-mkpath(dirname $profile, 0, 0755);
-
-my %channels;
-
-
-# Reads the list of channels.
-sub readChannels {
-    return if (!-f $channelsList);
-    open CHANNELS, "<$channelsList" or die "cannot open ‘$channelsList’: $!";
-    while (<CHANNELS>) {
-        chomp;
-        next if /^\s*\#/;
-        my ($url, $name) = split ' ', $_;
-        $url =~ s/\/*$//; # remove trailing slashes
-        $name = basename $url unless defined $name;
-        $channels{$name} = $url;
-    }
-    close CHANNELS;
-}
-
-
-# Writes the list of channels.
-sub writeChannels {
-    open CHANNELS, ">$channelsList" or die "cannot open ‘$channelsList’: $!";
-    foreach my $name (keys %channels) {
-        print CHANNELS "$channels{$name} $name\n";
-    }
-    close CHANNELS;
-}
-
-
-# Adds a channel.
-sub addChannel {
-    my ($url, $name) = @_;
-    die "invalid channel URL ‘$url’" unless $url =~ /^(file|http|https):\/\//;
-    die "invalid channel identifier ‘$name’" unless $name =~ /^[a-zA-Z0-9_][a-zA-Z0-9_\-\.]*$/;
-    readChannels;
-    $channels{$name} = $url;
-    writeChannels;
-}
-
-
-# Remove a channel.
-sub removeChannel {
-    my ($name) = @_;
-    readChannels;
-    my $url = $channels{$name};
-    delete $channels{$name};
-    writeChannels;
-
-    system("$Nix::Config::binDir/nix-env --profile '$profile' -e '$name'") == 0
-        or die "cannot remove channel ‘$name’\n";
-}
-
-
-# Fetch Nix expressions and binary cache URLs from the subscribed channels.
-sub update {
-    my @channelNames = @_;
-
-    readChannels;
-
-    # Download each channel.
-    my $exprs = "";
-    foreach my $name (keys %channels) {
-        next if scalar @channelNames > 0 && ! grep { $_ eq $name } @{channelNames};
-
-        my $url = $channels{$name};
-
-        # We want to download the url to a file to see if it's a tarball while also checking if we
-        # got redirected in the process, so that we can grab the various parts of a nix channel
-        # definition from a consistent location if the redirect changes mid-download.
-        my $tmpdir = tempdir( CLEANUP => 1 );
-        my $filename;
-        ($url, $filename) = `cd $tmpdir && $Nix::Config::curl --silent --write-out '%{url_effective}\n%{filename_effective}' -L '$url' -O`;
-        chomp $url;
-        die "$0: unable to check ‘$url’\n" if $? != 0;
-
-        # If the URL contains a version number, append it to the name
-        # attribute (so that "nix-env -q" on the channels profile
-        # shows something useful).
-        my $cname = $name;
-        $cname .= $1 if basename($url) =~ /(-\d.*)$/;
-
-        my $path;
-        my $ret = -1;
-        if (-e "$tmpdir/$filename" && $filename =~ /\.tar\.(gz|bz2|xz)$/) {
-            # Get our temporary download into the store.
-            (my $hash, $path) = `PRINT_PATH=1 QUIET=1 $Nix::Config::binDir/nix-prefetch-url 'file://$tmpdir/$filename'`;
-            chomp $path;
-
-            # Try unpacking the expressions to see if they'll be valid for us to process later.
-            # Like anything in nix, this will cache the result so we don't do it again outside of the loop below.
-            $ret = system("$Nix::Config::binDir/nix-build --no-out-link -E 'import <nix/unpack-channel.nix> " .
-                          "{ name = \"$cname\"; channelName = \"$name\"; src = builtins.storePath \"$path\"; }'");
-        }
-
-        # The URL doesn't unpack directly, so let's try treating it like a full channel folder with files in it
-        my $extraAttrs = "";
-        if ($ret != 0) {
-            # Check if the channel advertises a binary cache.
-            my $binaryCacheURL = `$Nix::Config::curl --silent '$url'/binary-cache-url`;
-            $extraAttrs .= "binaryCacheURL = \"$binaryCacheURL\"; "
-                if $? == 0 && $binaryCacheURL ne "";
-
-            # Download the channel tarball.
-            my $fullURL = "$url/nixexprs.tar.xz";
-            system("$Nix::Config::curl --fail --silent --head '$fullURL' > /dev/null") == 0 or
-                $fullURL = "$url/nixexprs.tar.bz2";
-            print STDERR "downloading Nix expressions from ‘$fullURL’...\n";
-            (my $hash, $path) = `PRINT_PATH=1 QUIET=1 $Nix::Config::binDir/nix-prefetch-url '$fullURL'`;
-            die "cannot fetch ‘$fullURL’\n" if $? != 0;
-            chomp $path;
-        }
-
-        # Regardless of where it came from, add the expression representing this channel to accumulated expression
-        $exprs .= "'f: f { name = \"$cname\"; channelName = \"$name\"; src = builtins.storePath \"$path\"; $extraAttrs }' ";
-    }
-
-    # Unpack the channel tarballs into the Nix store and install them
-    # into the channels profile.
-    print STDERR "unpacking channels...\n";
-    system("$Nix::Config::binDir/nix-env --profile '$profile' " .
-           "-f '<nix/unpack-channel.nix>' -i -E $exprs --quiet") == 0
-           or die "cannot unpack the channels";
-
-    # Make the channels appear in nix-env.
-    unlink $nixDefExpr if -l $nixDefExpr; # old-skool ~/.nix-defexpr
-    mkdir $nixDefExpr or die "cannot create directory ‘$nixDefExpr’" if !-e $nixDefExpr;
-    my $channelLink = "$nixDefExpr/channels";
-    unlink $channelLink; # !!! not atomic
-    symlink($profile, $channelLink) or die "cannot symlink ‘$channelLink’ to ‘$profile’";
-}
-
-
-die "$0: argument expected\n" if scalar @ARGV == 0;
-
-
-while (scalar @ARGV) {
-    my $arg = shift @ARGV;
-
-    if ($arg eq "--add") {
-        die "$0: ‘--add’ requires one or two arguments\n" if scalar @ARGV < 1 || scalar @ARGV > 2;
-        my $url = shift @ARGV;
-        my $name = shift @ARGV;
-        unless (defined $name) {
-            $name = basename $url;
-            $name =~ s/-unstable//;
-            $name =~ s/-stable//;
-        }
-        addChannel($url, $name);
-        last;
-    }
-
-    if ($arg eq "--remove") {
-        die "$0: ‘--remove’ requires one argument\n" if scalar @ARGV != 1;
-        removeChannel(shift @ARGV);
-        last;
-    }
-
-    if ($arg eq "--list") {
-        die "$0: ‘--list’ requires one argument\n" if scalar @ARGV != 0;
-        readChannels;
-        foreach my $name (keys %channels) {
-            print "$name $channels{$name}\n";
-        }
-        last;
-    }
-
-    elsif ($arg eq "--update") {
-        update(@ARGV);
-        last;
-    }
-
-    elsif ($arg eq "--rollback") {
-        die "$0: ‘--rollback’ has at most one argument\n" if scalar @ARGV > 1;
-        my $generation = shift @ARGV;
-        my @args = ("$Nix::Config::binDir/nix-env", "--profile", $profile);
-        if (defined $generation) {
-            die "invalid channel generation number ‘$generation’" unless $generation =~ /^[0-9]+$/;
-            push @args, "--switch-generation", $generation;
-        } else {
-            push @args, "--rollback";
-        }
-        system(@args) == 0 or exit 1;
-        last;
-    }
-
-    elsif ($arg eq "--help") {
-        exec "man nix-channel" or die;
-    }
-
-    elsif ($arg eq "--version") {
-        print "nix-channel (Nix) $Nix::Config::version\n";
-        exit 0;
-    }
-
-    else {
-        die "unknown argument ‘$arg’; try ‘--help’\n";
-    }
-}
diff --git a/scripts/nix-install-package.in b/scripts/nix-install-package.in
deleted file mode 100755
index ba349774af54..000000000000
--- a/scripts/nix-install-package.in
+++ /dev/null
@@ -1,127 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use strict;
-use Nix::Config;
-use Nix::Utils;
-
-binmode STDERR, ":encoding(utf8)";
-
-
-# Parse the command line arguments.
-my @args = @ARGV;
-
-my $source;
-my $fromURL = 0;
-my @extraNixEnvArgs = ();
-my $interactive = 1;
-my $op = "--install";
-
-while (scalar @args) {
-    my $arg = shift @args;
-    if ($arg eq "--help") {
-        exec "man nix-install-package" or die;
-    }
-    elsif ($arg eq "--url") {
-        $fromURL = 1;
-    }
-    elsif ($arg eq "--profile" || $arg eq "-p") {
-        my $profile = shift @args;
-        die "$0: ‘--profile’ requires an argument\n" if !defined $profile;
-        push @extraNixEnvArgs, "-p", $profile;
-    }
-    elsif ($arg eq "--set") {
-        $op = "--set";
-    }
-    elsif ($arg eq "--non-interactive") {
-        $interactive = 0;
-    }
-    else {
-        $source = $arg;
-    }
-}
-
-die "$0: please specify a .nixpkg file or URL\n" unless defined $source;
-
-
-# Re-execute in a terminal, if necessary, so that if we're executed
-# from a web browser, the user gets to see us.
-if ($interactive && !defined $ENV{"NIX_HAVE_TERMINAL"}) {
-    $ENV{"NIX_HAVE_TERMINAL"} = "1";
-    $ENV{"LD_LIBRARY_PATH"} = "";
-    foreach my $term ("xterm", "konsole", "gnome-terminal", "xterm") {
-        exec($term, "-e", "$Nix::Config::binDir/nix-install-package", @ARGV);
-    }
-    die "cannot execute ‘xterm’";
-}
-
-
-my $tmpDir = mkTempDir("nix-install-package");
-
-
-sub barf {
-    my $msg = shift;
-    print "\nInstallation failed: $msg\n";
-    <STDIN> if $interactive;
-    exit 1;
-}
-
-
-# Download the package description, if necessary.
-my $pkgFile = $source;
-if ($fromURL) {
-    $pkgFile = "$tmpDir/tmp.nixpkg";
-    system("@curl@", "-L", "--silent", $source, "-o", $pkgFile) == 0
-        or barf "curl failed: $?";
-}
-
-
-# Read and parse the package file.
-open PKGFILE, "<$pkgFile" or barf "cannot open ‘$pkgFile’: $!";
-my $contents = <PKGFILE>;
-close PKGFILE;
-
-my $nameRE = "(?: [A-Za-z0-9\+\-\.\_\?\=]+ )"; # see checkStoreName()
-my $systemRE = "(?: [A-Za-z0-9\+\-\_]+ )";
-my $pathRE = "(?: \/ [\/A-Za-z0-9\+\-\.\_\?\=]* )";
-
-# Note: $pathRE doesn't check that whether we're looking at a valid
-# store path.  We'll let nix-env do that.
-
-$contents =~
-    / ^ \s* (\S+) \s+ (\S+) \s+ ($nameRE) \s+ ($systemRE) \s+ ($pathRE) \s+ ($pathRE) ( \s+ ($Nix::Utils::urlRE) )?  /x
-    or barf "invalid package contents";
-my $version = $1;
-my $manifestURL = $2;
-my $drvName = $3;
-my $system = $4;
-my $drvPath = $5;
-my $outPath = $6;
-my $binaryCacheURL = $8;
-
-barf "invalid package version ‘$version’" unless $version eq "NIXPKG1";
-
-
-if ($interactive) {
-    # Ask confirmation.
-    print "Do you want to install ‘$drvName’ (Y/N)? ";
-    my $reply = <STDIN>;
-    chomp $reply;
-    exit if $reply ne "y" && $reply ne "Y";
-}
-
-
-die "$0: package does not supply a binary cache\n" unless defined $binaryCacheURL;
-
-push @extraNixEnvArgs, "--option", "extra-binary-caches", $binaryCacheURL;
-
-
-print "\nInstalling package...\n";
-system("$Nix::Config::binDir/nix-env", $op, $outPath, "--force-name", $drvName, @extraNixEnvArgs) == 0
-    or barf "nix-env failed: $?";
-
-
-if ($interactive) {
-    print "\nInstallation succeeded! Press Enter to continue.\n";
-    <STDIN>;
-}
diff --git a/scripts/nix-push.in b/scripts/nix-push.in
deleted file mode 100755
index 54456ac9512e..000000000000
--- a/scripts/nix-push.in
+++ /dev/null
@@ -1,296 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use strict;
-use File::Basename;
-use File::Path qw(mkpath);
-use File::stat;
-use File::Copy;
-use MIME::Base64;
-use Nix::Config;
-use Nix::Store;
-use Nix::Manifest;
-use Nix::Utils;
-
-binmode STDERR, ":encoding(utf8)";
-
-my $tmpDir = mkTempDir("nix-push");
-
-my $nixExpr = "$tmpDir/create-nars.nix";
-
-
-# Parse the command line.
-my $compressionType = "xz";
-my $force = 0;
-my $destDir;
-my $writeManifest = 0;
-my $manifestPath;
-my $archivesURL;
-my $link = 0;
-my $secretKeyFile;
-my @roots;
-my @buildArgs;
-
-for (my $n = 0; $n < scalar @ARGV; $n++) {
-    my $arg = $ARGV[$n];
-
-    if ($arg eq "--help") {
-        exec "man nix-push" or die;
-    } elsif ($arg eq "--bzip2") {
-        $compressionType = "bzip2";
-    } elsif ($arg eq "--none") {
-        $compressionType = "none";
-    } elsif ($arg eq "--force") {
-        $force = 1;
-    } elsif ($arg eq "--dest") {
-        $n++;
-        die "$0: ‘$arg’ requires an argument\n" unless $n < scalar @ARGV;
-        $destDir = $ARGV[$n];
-        mkpath($destDir, 0, 0755);
-    } elsif ($arg eq "--manifest") {
-        $writeManifest = 1;
-    } elsif ($arg eq "--manifest-path") {
-        $n++;
-        die "$0: ‘$arg’ requires an argument\n" unless $n < scalar @ARGV;
-        $manifestPath = $ARGV[$n];
-        $writeManifest = 1;
-        mkpath(dirname($manifestPath), 0, 0755);
-    } elsif ($arg eq "--url-prefix") {
-        $n++;
-        die "$0: ‘$arg’ requires an argument\n" unless $n < scalar @ARGV;
-        $archivesURL = $ARGV[$n];
-    } elsif ($arg eq "--link") {
-        $link = 1;
-    } elsif ($arg eq "--key-file") {
-        $n++;
-        die "$0: ‘$arg’ requires an argument\n" unless $n < scalar @ARGV;
-        $secretKeyFile = $ARGV[$n];
-    } elsif ($arg eq "--max-jobs" || $arg eq "-j") {
-        $n++;
-        die "$0: ‘$arg’ requires an argument\n" unless $n < scalar @ARGV;
-        push @buildArgs, ($arg, $ARGV[$n]);
-    } elsif (substr($arg, 0, 1) eq "-") {
-        die "$0: unknown flag ‘$arg’\n";
-    } else {
-        push @roots, $arg;
-    }
-}
-
-die "$0: please specify a destination directory\n" if !defined $destDir;
-
-$archivesURL = "file://$destDir" unless defined $archivesURL;
-
-
-# From the given store paths, determine the set of requisite store
-# paths, i.e, the paths required to realise them.
-my %storePaths;
-
-foreach my $path (@roots) {
-    # Get all paths referenced by the normalisation of the given
-    # Nix expression.
-    my $pid = open(READ,
-        "$Nix::Config::binDir/nix-store --query --requisites --force-realise " .
-        "--include-outputs '$path'|") or die;
-
-    while (<READ>) {
-        chomp;
-        die "bad: $_" unless /^\//;
-        $storePaths{$_} = "";
-    }
-
-    close READ or die "nix-store failed: $?";
-}
-
-my @storePaths = keys %storePaths;
-
-
-# Don't create archives for files that are already in the binary cache.
-my @storePaths2;
-my %narFiles;
-foreach my $storePath (@storePaths) {
-    my $pathHash = substr(basename($storePath), 0, 32);
-    my $narInfoFile = "$destDir/$pathHash.narinfo";
-    if (!$force && -e $narInfoFile) {
-        my $narInfo = parseNARInfo($storePath, readFile($narInfoFile), 0, $narInfoFile) or die "cannot read ‘$narInfoFile’\n";
-        my $narFile = "$destDir/$narInfo->{url}";
-        if (-e $narFile) {
-            print STDERR "skipping existing $storePath\n";
-            # Add the NAR info to $narFiles if we're writing a
-            # manifest.
-            $narFiles{$storePath} = [
-                { url => ("$archivesURL/" . basename $narInfo->{url})
-                  , hash => $narInfo->{fileHash}
-                  , size => $narInfo->{fileSize}
-                  , compressionType => $narInfo->{compression}
-                  , narHash => $narInfo->{narHash}
-                  , narSize => $narInfo->{narSize}
-                  , references => join(" ", map { "$Nix::Config::storeDir/$_" } @{$narInfo->{refs}})
-                  , deriver => $narInfo->{deriver} ? "$Nix::Config::storeDir/$narInfo->{deriver}" : undef
-                  }
-            ] if $writeManifest;
-            next;
-        }
-    }
-    push @storePaths2, $storePath;
-}
-
-
-# Create a list of Nix derivations that turn each path into a Nix
-# archive.
-open NIX, ">$nixExpr";
-print NIX "[";
-
-foreach my $storePath (@storePaths2) {
-    die unless ($storePath =~ /\/[0-9a-z]{32}[^\"\\\$]*$/);
-
-    # Construct a Nix expression that creates a Nix archive.
-    my $nixexpr =
-        "(import <nix/nar.nix> " .
-        "{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"sha256\"; compressionType = \"$compressionType\"; }) ";
-
-    print NIX $nixexpr;
-}
-
-print NIX "]";
-close NIX;
-
-
-# Build the Nix expression.
-print STDERR "building compressed archives...\n";
-my @narPaths;
-my $pid = open(READ, "-|", "$Nix::Config::binDir/nix-build", $nixExpr, "-o", "$tmpDir/result", @buildArgs)
-    or die "cannot run nix-build";
-while (<READ>) {
-    chomp;
-    die unless /^\//;
-    push @narPaths, $_;
-}
-close READ or die "nix-build failed: $?";
-
-
-# Write the cache info file.
-my $cacheInfoFile = "$destDir/nix-cache-info";
-if (! -e $cacheInfoFile) {
-    open FILE, ">$cacheInfoFile" or die "cannot create $cacheInfoFile: $!";
-    print FILE "StoreDir: $Nix::Config::storeDir\n";
-    print FILE "WantMassQuery: 0\n"; # by default, don't hit this cache for "nix-env -qas"
-    close FILE;
-}
-
-
-# Copy the archives and the corresponding NAR info files.
-print STDERR "copying archives...\n";
-
-my $totalNarSize = 0;
-my $totalCompressedSize = 0;
-
-for (my $n = 0; $n < scalar @storePaths2; $n++) {
-    my $storePath = $storePaths2[$n];
-    my $narDir = $narPaths[$n];
-    my $baseName = basename $storePath;
-
-    # Get info about the store path.
-    my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($storePath, 1);
-
-    # In some exceptional cases (such as VM tests that use the Nix
-    # store of the host), the database doesn't contain the hash.  So
-    # compute it.
-    if ($narHash =~ /^sha256:0*$/) {
-        my $nar = "$tmpDir/nar";
-        system("$Nix::Config::binDir/nix-store --dump $storePath > $nar") == 0
-            or die "cannot dump $storePath\n";
-        $narHash = `$Nix::Config::binDir/nix-hash --type sha256 --base32 --flat $nar`;
-        die "cannot hash ‘$nar’" if $? != 0;
-        chomp $narHash;
-        $narHash = "sha256:$narHash";
-        $narSize = stat("$nar")->size;
-        unlink $nar or die;
-    }
-
-    $totalNarSize += $narSize;
-
-    # Get info about the compressed NAR.
-    open HASH, "$narDir/nar-compressed-hash" or die "cannot open nar-compressed-hash";
-    my $compressedHash = <HASH>;
-    chomp $compressedHash;
-    $compressedHash =~ /^[0-9a-z]+$/ or die "invalid hash";
-    close HASH;
-
-    my $narName = "$compressedHash.nar" . ($compressionType eq "xz" ? ".xz" : $compressionType eq "bzip2" ? ".bz2" : "");
-
-    my $narFile = "$narDir/$narName";
-    (-f $narFile) or die "NAR file for $storePath not found";
-
-    my $compressedSize = stat($narFile)->size;
-    $totalCompressedSize += $compressedSize;
-
-    printf STDERR "%s [%.2f MiB, %.1f%%]\n", $storePath,
-        $compressedSize / (1024 * 1024), $compressedSize / $narSize * 100;
-
-    # Copy the compressed NAR.
-    my $dst = "$destDir/$narName";
-    if (! -f $dst) {
-        my $tmp = "$destDir/.tmp.$$.$narName";
-        if ($link) {
-            link($narFile, $tmp) or die "cannot link $tmp to $narFile: $!\n";
-        } else {
-            copy($narFile, $tmp) or die "cannot copy $narFile to $tmp: $!\n";
-        }
-        rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
-    }
-
-    # Write the info file.
-    my $info;
-    $info .= "StorePath: $storePath\n";
-    $info .= "URL: $narName\n";
-    $info .= "Compression: $compressionType\n";
-    $info .= "FileHash: sha256:$compressedHash\n";
-    $info .= "FileSize: $compressedSize\n";
-    $info .= "NarHash: $narHash\n";
-    $info .= "NarSize: $narSize\n";
-    $info .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n";
-    if (defined $deriver) {
-        $info .= "Deriver: " . basename $deriver . "\n";
-        if (isValidPath($deriver)) {
-            my $drv = derivationFromPath($deriver);
-            $info .= "System: $drv->{platform}\n";
-        }
-    }
-
-    if (defined $secretKeyFile) {
-        my $secretKey = readFile $secretKeyFile;
-        my $fingerprint = fingerprintPath($storePath, $narHash, $narSize, $refs);
-        my $sig = signString($secretKey, $fingerprint);
-        $info .= "Sig: $sig\n";
-    }
-
-    my $pathHash = substr(basename($storePath), 0, 32);
-
-    $dst = "$destDir/$pathHash.narinfo";
-    if ($force || ! -f $dst) {
-        my $tmp = "$destDir/.tmp.$$.$pathHash.narinfo";
-        open INFO, ">$tmp" or die;
-        print INFO "$info" or die;
-        close INFO or die;
-        rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
-    }
-
-    $narFiles{$storePath} = [
-        { url => "$archivesURL/$narName"
-        , hash => "sha256:$compressedHash"
-        , size => $compressedSize
-        , compressionType => $compressionType
-        , narHash => "$narHash"
-        , narSize => $narSize
-        , references => join(" ", @{$refs})
-        , deriver => $deriver
-        }
-    ] if $writeManifest;
-}
-
-printf STDERR "total compressed size %.2f MiB, %.1f%%\n",
-    $totalCompressedSize / (1024 * 1024), $totalCompressedSize / ($totalNarSize || 1) * 100;
-
-
-# Optionally write a manifest.
-writeManifest($manifestPath // "$destDir/MANIFEST", \%narFiles, \()) if $writeManifest;
diff --git a/scripts/resolve-system-dependencies.pl.in b/scripts/resolve-system-dependencies.pl.in
deleted file mode 100755
index a20f0dc020fe..000000000000
--- a/scripts/resolve-system-dependencies.pl.in
+++ /dev/null
@@ -1,122 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use strict;
-use warnings;
-use Cwd qw(realpath);
-use Errno;
-use File::Basename qw(dirname);
-use File::Path qw(make_path);
-use File::Spec::Functions qw(catfile);
-use List::Util qw(reduce);
-use IPC::Open3;
-use Nix::Config;
-use Nix::Store qw(derivationFromPath);
-use POSIX qw(uname);
-use Storable qw(lock_retrieve lock_store);
-
-my ($sysname, undef, $version, undef, $machine) = uname;
-$sysname =~ /Darwin/ or die "This tool is only meant to be used on Darwin systems.";
-
-my $cache = "$Nix::Config::stateDir/dependency-maps/$machine-$sysname-$version.map";
-
-make_path dirname($cache);
-
-our $DEPS;
-eval {
-  $DEPS = lock_retrieve($cache);
-};
-
-if($!{ENOENT}) {
-  lock_store {}, $cache;
-  $DEPS = {};
-} elsif($@) {
-  die "Unable to obtain a lock on dependency-map file $cache: $@";
-}
-
-sub mkset(@) {
-  my %set;
-  @set{@_} = ();
-  \%set
-}
-
-sub union($$) {
-  my ($set1, $set2) = @_;
-  my %new = (%$set1, %$set2);
-  \%new
-}
-
-sub cache_filepath($) {
-  my $fp = shift;
-  $fp =~ s/-/--/g;
-  $fp =~ s/\//-/g;
-  $fp =~ s/^-//g;
-  catfile $cache, $fp
-}
-
-sub resolve_tree {
-  sub resolve_tree_inner {
-    my ($lib, $TREE) = @_;
-    return if (defined $TREE->{$lib});
-    $TREE->{$lib} = mkset(@{cache_get($lib)});
-    foreach my $dep (keys %{$TREE->{$lib}}) {
-      resolve_tree_inner($dep, $TREE);
-    }
-    values %$TREE
-  }
-
-  reduce { union($a, $b) } {}, resolve_tree_inner(@_)
-}
-
-sub cache_get {
-  my $key = shift;
-  if (defined $DEPS->{$key}) {
-    $DEPS->{$key}
-  } else {
-    cache_insert($key);
-    cache_get($key)
-  }
-}
-
-sub cache_insert($) {
-  my $key = shift;
-  print STDERR "Finding dependencies for $key...\n";
-  my @deps = find_deps($key);
-  $DEPS->{$key} = \@deps;
-}
-
-sub find_deps($) {
-  my $lib = shift;
-  my($chld_in, $chld_out, $chld_err);
-  my $pid = open3($chld_in, $chld_out, $chld_err, "@otool@", "-L", "-arch", "x86_64", $lib);
-  waitpid($pid, 0);
-  my $line = readline $chld_out;
-  if($? == 0 and $line !~ /not an object file/) {
-    my @libs;
-    while(<$chld_out>) {
-      my $dep = (split /\s+/)[1];
-      push @libs, $dep unless $dep eq $lib or $dep =~ /\@rpath/;
-    }
-    @libs
-  } elsif (-l $lib) {
-    (realpath($lib))
-  } else {
-    ()
-  }
-}
-
-if (defined $ARGV[0]) {
-  my $deps = derivationFromPath($ARGV[0])->{"env"}->{"__impureHostDeps"};
-  if (defined $deps) {
-    my @files = split(/\s+/, $deps);
-    my $depcache = {};
-    my $depset = reduce { union($a, $b) } (map { resolve_tree($_, $depcache) } @files);
-    print "extra-chroot-dirs\n";
-    print join("\n", keys %$depset);
-    print "\n";
-  }
-  lock_store($DEPS, $cache);
-} else {
-  print STDERR "Usage: $0 path/to/derivation.drv\n";
-  exit 1
-}
diff --git a/scripts/show-duplication.pl b/scripts/show-duplication.pl
deleted file mode 100755
index 0604c6696c7a..000000000000
--- a/scripts/show-duplication.pl
+++ /dev/null
@@ -1,73 +0,0 @@
-#! /usr/bin/perl -w
-
-if (scalar @ARGV != 1) {
-    print "syntax: show-duplication.pl PATH\n";
-    exit 1;
-}
-
-my $root = $ARGV[0];
-
-
-my $nameRE = "(?:(?:[A-Za-z0-9\+\_]|(?:-[^0-9]))+)";
-my $versionRE = "(?:[A-Za-z0-9\.\-]+)";
-
-
-my %pkgInstances;
-
-
-my $pid = open(PATHS, "-|") || exec "nix-store", "-qR", $root;
-while (<PATHS>) {
-    chomp;
-    /^.*\/[0-9a-z]*-(.*)$/;
-    my $nameVersion = $1;
-    $nameVersion =~ /^($nameRE)(-($versionRE))?$/;
-    $name = $1;
-    $version = $3;
-    $version = "(unnumbered)" unless defined $version;
-#    print "$nameVersion $name $version\n";
-    push @{$pkgInstances{$name}}, {version => $version, path => $_};
-}
-close PATHS or exit 1;
-
-
-sub pathSize {
-    my $path = shift;
-    my @st = lstat $path or die;
-
-    my $size = $st[7];
-
-    if (-d $path) {
-        opendir DIR, $path or die;
-        foreach my $name (readdir DIR) {
-            next if $name eq "." || $name eq "..";
-            $size += pathSize("$path/$name");
-        }
-    }
-    
-    return $size;
-}
-
-
-my $totalPaths = 0;
-my $totalSize = 0, $totalWaste = 0;
-
-foreach my $name (sort {scalar @{$pkgInstances{$b}} <=> scalar @{$pkgInstances{$a}}} (keys %pkgInstances)) {
-    print "$name ", scalar @{$pkgInstances{$name}}, "\n";
-    my $allSize = 0;
-    foreach my $x (sort {$a->{version} cmp $b->{version}} @{$pkgInstances{$name}}) {
-        $totalPaths++;
-        my $size = pathSize $x->{path};
-        $allSize += $size;
-        print "    $x->{version} $size\n";
-    }
-    my $avgSize = int($allSize / scalar @{$pkgInstances{$name}});
-    my $waste = $allSize - $avgSize;
-    $totalSize += $allSize;
-    $totalWaste += $waste;
-    print "    average $avgSize, waste $waste\n";
-}
-
-
-my $avgDupl = $totalPaths / scalar (keys %pkgInstances);
-my $wasteFactor = ($totalWaste / $totalSize) * 100;
-print "average package duplication $avgDupl, total size $totalSize, total waste $totalWaste, $wasteFactor% wasted\n";
diff --git a/src/buildenv/buildenv.cc b/src/buildenv/buildenv.cc
new file mode 100644
index 000000000000..f997096eddbb
--- /dev/null
+++ b/src/buildenv/buildenv.cc
@@ -0,0 +1,186 @@
+#include "shared.hh"
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <fcntl.h>
+#include <algorithm>
+
+using namespace nix;
+
+typedef std::map<Path,int> Priorities;
+
+static bool isDirectory (const Path & path)
+{
+    struct stat st;
+    if (stat(path.c_str(), &st) == -1)
+        throw SysError(format("getting status of ‘%1%’") % path);
+    return S_ISDIR(st.st_mode);
+}
+
+static auto priorities = Priorities{};
+
+static auto symlinks = 0;
+
+/* For each activated package, create symlinks */
+static void createLinks(const Path & srcDir, const Path & dstDir, int priority)
+{
+    auto srcFiles = readDirectory(srcDir);
+    for (const auto & ent : srcFiles) {
+        if (ent.name[0] == '.')
+            /* not matched by glob */
+            continue;
+        const auto & srcFile = srcDir + "/" + ent.name;
+        auto dstFile = dstDir + "/" + ent.name;
+
+        /* The files below are special-cased to that they don't show up
+         * in user profiles, either because they are useless, or
+         * because they would cauase pointless collisions (e.g., each
+         * Python package brings its own
+         * `$out/lib/pythonX.Y/site-packages/easy-install.pth'.)
+         */
+        if (hasSuffix(srcFile, "/propagated-build-inputs") ||
+            hasSuffix(srcFile, "/nix-support") ||
+            hasSuffix(srcFile, "/perllocal.pod") ||
+            hasSuffix(srcFile, "/info/dir") ||
+            hasSuffix(srcFile, "/log")) {
+            continue;
+        } else if (isDirectory(srcFile)) {
+            struct stat dstSt;
+            auto res = lstat(dstFile.c_str(), &dstSt);
+            if (res == 0) {
+                if (S_ISDIR(dstSt.st_mode)) {
+                    createLinks(srcFile, dstFile, priority);
+                    continue;
+                } else if (S_ISLNK(dstSt.st_mode)) {
+                    auto target = readLink(dstFile);
+                    if (!isDirectory(target))
+                        throw Error(format("collision between ‘%1%’ and non-directory ‘%2%’")
+                            % srcFile % target);
+                    if (unlink(dstFile.c_str()) == -1)
+                        throw SysError(format("unlinking ‘%1%’") % dstFile);
+                    if (mkdir(dstFile.c_str(), 0755) == -1)
+                        throw SysError(format("creating directory ‘%1%’"));
+                    createLinks(target, dstFile, priorities[dstFile]);
+                    createLinks(srcFile, dstFile, priority);
+                    continue;
+                }
+            } else if (errno != ENOENT)
+                throw SysError(format("getting status of ‘%1%’") % dstFile);
+        } else {
+            struct stat dstSt;
+            auto res = lstat(dstFile.c_str(), &dstSt);
+            if (res == 0) {
+                if (S_ISLNK(dstSt.st_mode)) {
+                    auto target = readLink(dstFile);
+                    auto prevPriority = priorities[dstFile];
+                    if (prevPriority == priority)
+                        throw Error(format(
+                                "collision between ‘%1%’ and ‘%2%’; "
+                                "use ‘nix-env --set-flag priority NUMBER PKGNAME’ "
+                                "to change the priority of one of the conflicting packages"
+                                ) % srcFile % target);
+                    if (prevPriority < priority)
+                        continue;
+                    if (unlink(dstFile.c_str()) == -1)
+                        throw SysError(format("unlinking ‘%1%’") % dstFile);
+                }
+            } else if (errno != ENOENT)
+                throw SysError(format("getting status of ‘%1%’") % dstFile);
+        }
+        createSymlink(srcFile, dstFile);
+        priorities[dstFile] = priority;
+        symlinks++;
+    }
+}
+
+typedef std::set<Path> FileProp;
+
+static auto done = FileProp{};
+static auto postponed = FileProp{};
+
+static auto out = string{};
+
+static void addPkg(const Path & pkgDir, int priority)
+{
+    if (done.find(pkgDir) != done.end())
+        return;
+    done.insert(pkgDir);
+    createLinks(pkgDir, out, priority);
+    auto propagatedFN = pkgDir + "/nix-support/propagated-user-env-packages";
+    auto propagated = string{};
+    {
+        AutoCloseFD fd = open(propagatedFN.c_str(), O_RDONLY | O_CLOEXEC);
+        if (!fd) {
+            if (errno == ENOENT)
+                return;
+            throw SysError(format("opening ‘%1%’") % propagatedFN);
+        }
+        propagated = readLine(fd.get());
+    }
+    for (const auto & p : tokenizeString<std::vector<string>>(propagated, " "))
+        if (done.find(p) == done.end())
+            postponed.insert(p);
+}
+
+struct Package {
+    Path path;
+    bool active;
+    int priority;
+    Package(Path path, bool active, int priority) : path{std::move(path)}, active{active}, priority{priority} {}
+};
+
+typedef std::vector<Package> Packages;
+
+int main(int argc, char ** argv)
+{
+    return handleExceptions(argv[0], [&]() {
+        initNix();
+        out = getEnv("out");
+        if (mkdir(out.c_str(), 0755) == -1)
+            throw SysError(format("creating %1%") % out);
+
+        /* Convert the stuff we get from the environment back into a coherent
+         * data type.
+         */
+        auto pkgs = Packages{};
+        auto derivations = tokenizeString<Strings>(getEnv("derivations"));
+        while (!derivations.empty()) {
+            /* !!! We're trusting the caller to structure derivations env var correctly */
+            auto active = derivations.front(); derivations.pop_front();
+            auto priority = stoi(derivations.front()); derivations.pop_front();
+            auto outputs = stoi(derivations.front()); derivations.pop_front();
+            for (auto n = 0; n < outputs; n++) {
+                auto path = derivations.front(); derivations.pop_front();
+                pkgs.emplace_back(path, active != "false", priority);
+            }
+        }
+
+        /* Symlink to the packages that have been installed explicitly by the
+         * user. Process in priority order to reduce unnecessary
+         * symlink/unlink steps.
+         */
+        std::sort(pkgs.begin(), pkgs.end(), [](const Package & a, const Package & b) {
+            return a.priority < b.priority || (a.priority == b.priority && a.path < b.path);
+        });
+        for (const auto & pkg : pkgs)
+            if (pkg.active)
+                addPkg(pkg.path, pkg.priority);
+
+        /* Symlink to the packages that have been "propagated" by packages
+         * installed by the user (i.e., package X declares that it wants Y
+         * installed as well). We do these later because they have a lower
+         * priority in case of collisions.
+         */
+        auto priorityCounter = 1000;
+        while (!postponed.empty()) {
+            auto pkgDirs = postponed;
+            postponed = FileProp{};
+            for (const auto & pkgDir : pkgDirs)
+                addPkg(pkgDir, priorityCounter++);
+        }
+
+        std::cerr << "created " << symlinks << " symlinks in user environment\n";
+
+        createSymlink(getEnv("manifest"), out + "/manifest.nix");
+    });
+}
+
diff --git a/src/buildenv/local.mk b/src/buildenv/local.mk
new file mode 100644
index 000000000000..17ec13b235f4
--- /dev/null
+++ b/src/buildenv/local.mk
@@ -0,0 +1,9 @@
+programs += buildenv
+
+buildenv_DIR := $(d)
+
+buildenv_INSTALL_DIR := $(libexecdir)/nix
+
+buildenv_LIBS = libmain libstore libutil libformat
+
+buildenv_SOURCES := $(d)/buildenv.cc
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index 0833603b2a9e..64f3874db614 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -293,6 +293,8 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
     , sColumn(symbols.create("column"))
     , sFunctor(symbols.create("__functor"))
     , sToString(symbols.create("__toString"))
+    , sRight(symbols.create("right"))
+    , sWrong(symbols.create("wrong"))
     , store(store)
     , baseEnv(allocEnv(128))
     , staticBaseEnv(false, 0)
@@ -379,9 +381,9 @@ void EvalState::addPrimOp(const string & name,
 }
 
 
-void EvalState::getBuiltin(const string & name, Value & v)
+Value & EvalState::getBuiltin(const string & name)
 {
-    v = *baseEnv.values[0]->attrs->find(symbols.create(name))->value;
+    return *baseEnv.values[0]->attrs->find(symbols.create(name))->value;
 }
 
 
@@ -462,7 +464,7 @@ void mkString(Value & v, const char * s)
 }
 
 
-void mkString(Value & v, const string & s, const PathSet & context)
+Value & mkString(Value & v, const string & s, const PathSet & context)
 {
     mkString(v, s.c_str());
     if (!context.empty()) {
@@ -473,6 +475,7 @@ void mkString(Value & v, const string & s, const PathSet & context)
             v.string.context[n++] = dupString(i.c_str());
         v.string.context[n] = 0;
     }
+    return v;
 }
 
 
@@ -993,11 +996,18 @@ void EvalState::callFunction(Value & fun, Value & arg, Value & v, const Pos & po
     if (fun.type == tAttrs) {
       auto found = fun.attrs->find(sFunctor);
       if (found != fun.attrs->end()) {
+        /* fun may be allocated on the stack of the calling function,
+         * but for functors we may keep a reference, so heap-allocate
+         * a copy and use that instead.
+         */
+        auto & fun2 = *allocValue();
+        fun2 = fun;
+        /* !!! Should we use the attr pos here? */
         forceValue(*found->value, pos);
-        Value * v2 = allocValue();
-        callFunction(*found->value, fun, *v2, pos);
-        forceValue(*v2, pos);
-        return callFunction(*v2, arg, v, pos);
+        Value v2;
+        callFunction(*found->value, fun2, v2, pos);
+        forceValue(v2, pos);
+        return callFunction(v2, arg, v, pos);
       }
     }
 
@@ -1368,11 +1378,11 @@ NixFloat EvalState::forceFloat(Value & v, const Pos & pos)
 }
 
 
-bool EvalState::forceBool(Value & v)
+bool EvalState::forceBool(Value & v, const Pos & pos)
 {
     forceValue(v);
     if (v.type != tBool)
-        throwTypeError("value is %1% while a Boolean was expected", v);
+        throwTypeError("value is %1% while a Boolean was expected, at %2%", v, pos);
     return v.boolean;
 }
 
diff --git a/src/libexpr/eval.hh b/src/libexpr/eval.hh
index 80e369f2d68f..195cb0db3acc 100644
--- a/src/libexpr/eval.hh
+++ b/src/libexpr/eval.hh
@@ -8,10 +8,6 @@
 
 #include <map>
 
-#if HAVE_BOEHMGC
-#include <gc/gc_allocator.h>
-#endif
-
 
 namespace nix {
 
@@ -43,7 +39,7 @@ struct Env
 };
 
 
-void mkString(Value & v, const string & s, const PathSet & context = PathSet());
+Value & mkString(Value & v, const string & s, const PathSet & context = PathSet());
 
 void copyContext(const Value & v, PathSet & context);
 
@@ -71,7 +67,8 @@ public:
 
     const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue,
         sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
-        sFile, sLine, sColumn, sFunctor, sToString;
+        sFile, sLine, sColumn, sFunctor, sToString,
+        sRight, sWrong;
     Symbol sDerivationNix;
 
     /* If set, force copying files to the Nix store even if they
@@ -108,6 +105,8 @@ public:
 
     void addToSearchPath(const string & s);
 
+    SearchPath getSearchPath() { return searchPath; }
+
     Path checkSourcePath(const Path & path);
 
     /* Parse a Nix expression from the specified file. */
@@ -154,7 +153,7 @@ public:
     /* Force `v', and then verify that it has the expected type. */
     NixInt forceInt(Value & v, const Pos & pos);
     NixFloat forceFloat(Value & v, const Pos & pos);
-    bool forceBool(Value & v);
+    bool forceBool(Value & v, const Pos & pos);
     inline void forceAttrs(Value & v);
     inline void forceAttrs(Value & v, const Pos & pos);
     inline void forceList(Value & v);
@@ -204,7 +203,7 @@ private:
 
 public:
 
-    void getBuiltin(const string & name, Value & v);
+    Value & getBuiltin(const string & name);
 
 private:
 
diff --git a/src/libexpr/get-drvs.cc b/src/libexpr/get-drvs.cc
index b06c539de0fb..dc5def911ca0 100644
--- a/src/libexpr/get-drvs.cc
+++ b/src/libexpr/get-drvs.cc
@@ -301,7 +301,7 @@ static void getDerivations(EvalState & state, Value & vIn,
                    `recurseForDerivations = true' attribute. */
                 if (v2.type == tAttrs) {
                     Bindings::iterator j = v2.attrs->find(state.symbols.create("recurseForDerivations"));
-                    if (j != v2.attrs->end() && state.forceBool(*j->value))
+                    if (j != v2.attrs->end() && state.forceBool(*j->value, *j->pos))
                         getDerivations(state, v2, pathPrefix2, autoArgs, drvs, done, ignoreAssertionFailures);
                 }
             }
diff --git a/src/libexpr/json-to-value.cc b/src/libexpr/json-to-value.cc
index 1daf84600dca..f671802bcc24 100644
--- a/src/libexpr/json-to-value.cc
+++ b/src/libexpr/json-to-value.cc
@@ -12,15 +12,6 @@ static void skipWhitespace(const char * & s)
 }
 
 
-#if HAVE_BOEHMGC
-typedef std::vector<Value *, gc_allocator<Value *> > ValueVector;
-typedef std::map<Symbol, Value *, std::less<Symbol>, gc_allocator<Value *> > ValueMap;
-#else
-typedef std::vector<Value *> ValueVector;
-typedef std::map<Symbol, Value *> ValueMap;
-#endif
-
-
 static string parseJSONString(const char * & s)
 {
     string res;
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index c456e9b96a53..3b965f209bb2 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -477,7 +477,7 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
     bool ignoreNulls = false;
     attr = args[0]->attrs->find(state.sIgnoreNulls);
     if (attr != args[0]->attrs->end())
-        ignoreNulls = state.forceBool(*attr->value);
+        ignoreNulls = state.forceBool(*attr->value, pos);
 
     /* Build the derivation expression by processing the attributes. */
     Derivation drv;
@@ -673,6 +673,19 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
 }
 
 
+/* Return a placeholder string for the specified output that will be
+   substituted by the corresponding output path at build time. For
+   example, ‘placeholder "out"’ returns the string
+   /1rz4g4znpzjwh1xymhjpm42vipw92pr73vdgl6xs1hycac8kf2n9. At build
+   time, any occurence of this string in an derivation attribute will
+   be replaced with the concrete path in the Nix store of the output
+   ‘out’. */
+static void prim_placeholder(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+    mkString(v, hashPlaceholder(state.forceStringNoCtx(*args[0], pos)));
+}
+
+
 /*************************************************************
  * Paths
  *************************************************************/
@@ -912,9 +925,10 @@ struct FilterFromExpr : PathFilter
 {
     EvalState & state;
     Value & filter;
+    Pos pos;
 
-    FilterFromExpr(EvalState & state, Value & filter)
-        : state(state), filter(filter)
+    FilterFromExpr(EvalState & state, Value & filter, const Pos & pos)
+        : state(state), filter(filter), pos(pos)
     {
     }
 
@@ -942,7 +956,7 @@ struct FilterFromExpr : PathFilter
         Value res;
         state.callFunction(fun2, arg2, res, noPos);
 
-        return state.forceBool(res);
+        return state.forceBool(res, pos);
     }
 };
 
@@ -958,7 +972,7 @@ static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args
     if (args[0]->type != tLambda)
         throw TypeError(format("first argument in call to ‘filterSource’ is not a function but %1%, at %2%") % showType(*args[0]) % pos);
 
-    FilterFromExpr filter(state, *args[0]);
+    FilterFromExpr filter(state, *args[0], pos);
 
     path = state.checkSourcePath(path);
 
@@ -1278,7 +1292,7 @@ static void prim_filter(EvalState & state, const Pos & pos, Value * * args, Valu
     for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
         Value res;
         state.callFunction(*args[0], *args[1]->listElems()[n], res, noPos);
-        if (state.forceBool(res))
+        if (state.forceBool(res, pos))
             vs[k++] = args[1]->listElems()[n];
         else
             same = false;
@@ -1354,7 +1368,7 @@ static void anyOrAll(bool any, EvalState & state, const Pos & pos, Value * * arg
     Value vTmp;
     for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
         state.callFunction(*args[0], *args[1]->listElems()[n], vTmp, pos);
-        bool res = state.forceBool(vTmp);
+        bool res = state.forceBool(vTmp, pos);
         if (res == any) {
             mkBool(v, any);
             return;
@@ -1420,7 +1434,7 @@ static void prim_sort(EvalState & state, const Pos & pos, Value * * args, Value
         Value vTmp1, vTmp2;
         state.callFunction(*args[0], *a, vTmp1, pos);
         state.callFunction(vTmp1, *b, vTmp2, pos);
-        return state.forceBool(vTmp2);
+        return state.forceBool(vTmp2, pos);
     };
 
     /* FIXME: std::sort can segfault if the comparator is not a strict
@@ -1430,6 +1444,40 @@ static void prim_sort(EvalState & state, const Pos & pos, Value * * args, Value
 }
 
 
+static void prim_partition(EvalState & state, const Pos & pos, Value * * args, Value & v)
+{
+    state.forceFunction(*args[0], pos);
+    state.forceList(*args[1], pos);
+
+    auto len = args[1]->listSize();
+
+    ValueVector right, wrong;
+
+    for (unsigned int n = 0; n < len; ++n) {
+        auto vElem = args[1]->listElems()[n];
+        state.forceValue(*vElem);
+        Value res;
+        state.callFunction(*args[0], *vElem, res, pos);
+        if (state.forceBool(res, pos))
+            right.push_back(vElem);
+        else
+            wrong.push_back(vElem);
+    }
+
+    state.mkAttrs(v, 2);
+
+    Value * vRight = state.allocAttr(v, state.sRight);
+    state.mkList(*vRight, right.size());
+    memcpy(vRight->listElems(), right.data(), sizeof(Value *) * right.size());
+
+    Value * vWrong = state.allocAttr(v, state.sWrong);
+    state.mkList(*vWrong, wrong.size());
+    memcpy(vWrong->listElems(), wrong.data(), sizeof(Value *) * wrong.size());
+
+    v.attrs->sort();
+}
+
+
 /*************************************************************
  * Integer arithmetic
  *************************************************************/
@@ -1620,13 +1668,18 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
     if (args[0]->listSize() != args[1]->listSize())
         throw EvalError(format("‘from’ and ‘to’ arguments to ‘replaceStrings’ have different lengths, at %1%") % pos);
 
-    Strings from;
+    vector<string> from;
+    from.reserve(args[0]->listSize());
     for (unsigned int n = 0; n < args[0]->listSize(); ++n)
-        from.push_back(state.forceStringNoCtx(*args[0]->listElems()[n], pos));
+        from.push_back(state.forceString(*args[0]->listElems()[n], pos));
 
-    Strings to;
-    for (unsigned int n = 0; n < args[1]->listSize(); ++n)
-        to.push_back(state.forceStringNoCtx(*args[1]->listElems()[n], pos));
+    vector<std::pair<string, PathSet>> to;
+    to.reserve(args[1]->listSize());
+    for (unsigned int n = 0; n < args[1]->listSize(); ++n) {
+        PathSet ctx;
+        auto s = state.forceString(*args[1]->listElems()[n], ctx, pos);
+        to.push_back(std::make_pair(std::move(s), std::move(ctx)));
+    }
 
     PathSet context;
     auto s = state.forceString(*args[2], context, pos);
@@ -1634,11 +1687,16 @@ static void prim_replaceStrings(EvalState & state, const Pos & pos, Value * * ar
     string res;
     for (size_t p = 0; p < s.size(); ) {
         bool found = false;
-        for (auto i = from.begin(), j = to.begin(); i != from.end(); ++i, ++j)
+        auto i = from.begin();
+        auto j = to.begin();
+        for (; i != from.end(); ++i, ++j)
             if (s.compare(p, i->size(), *i) == 0) {
                 found = true;
                 p += i->size();
-                res += *j;
+                res += j->first;
+                for (auto& path : j->second)
+                    context.insert(path);
+                j->second.clear();
                 break;
             }
         if (!found) res += s[p++];
@@ -1682,6 +1740,7 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
 {
     string url;
     Hash expectedHash;
+    string name;
 
     state.forceValue(*args[0]);
 
@@ -1690,11 +1749,13 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
         state.forceAttrs(*args[0], pos);
 
         for (auto & attr : *args[0]->attrs) {
-            string name(attr.name);
-            if (name == "url")
+            string n(attr.name);
+            if (n == "url")
                 url = state.forceStringNoCtx(*attr.value, *attr.pos);
-            else if (name == "sha256")
+            else if (n == "sha256")
                 expectedHash = parseHash16or32(htSHA256, state.forceStringNoCtx(*attr.value, *attr.pos));
+            else if (n == "name")
+                name = state.forceStringNoCtx(*attr.value, *attr.pos);
             else
                 throw EvalError(format("unsupported argument ‘%1%’ to ‘%2%’, at %3%") % attr.name % who % attr.pos);
         }
@@ -1708,7 +1769,7 @@ void fetch(EvalState & state, const Pos & pos, Value * * args, Value & v,
     if (state.restricted && !expectedHash)
         throw Error(format("‘%1%’ is not allowed in restricted mode") % who);
 
-    Path res = makeDownloader()->downloadCached(state.store, url, unpack, expectedHash);
+    Path res = makeDownloader()->downloadCached(state.store, url, unpack, name, expectedHash);
     mkString(v, res, PathSet({res}));
 }
 
@@ -1855,6 +1916,7 @@ void EvalState::createBaseEnv()
     addPrimOp("__all", 2, prim_all);
     addPrimOp("__genList", 2, prim_genList);
     addPrimOp("__sort", 2, prim_sort);
+    addPrimOp("__partition", 2, prim_partition);
 
     // Integer arithmetic
     addPrimOp("__add", 2, prim_add);
@@ -1880,6 +1942,7 @@ void EvalState::createBaseEnv()
 
     // Derivations
     addPrimOp("derivationStrict", 1, prim_derivationStrict);
+    addPrimOp("placeholder", 1, prim_placeholder);
 
     // Networking
     addPrimOp("__fetchurl", 1, prim_fetchurl);
diff --git a/src/libexpr/value-to-json.cc b/src/libexpr/value-to-json.cc
index 47ee324a6e4f..72e413e4491e 100644
--- a/src/libexpr/value-to-json.cc
+++ b/src/libexpr/value-to-json.cc
@@ -1,4 +1,5 @@
 #include "value-to-json.hh"
+#include "json.hh"
 #include "eval-inline.hh"
 #include "util.hh"
 
@@ -8,24 +9,8 @@
 
 namespace nix {
 
-
-void escapeJSON(std::ostream & str, const string & s)
-{
-    str << "\"";
-    for (auto & i : s)
-        if (i == '\"' || i == '\\') str << "\\" << i;
-        else if (i == '\n') str << "\\n";
-        else if (i == '\r') str << "\\r";
-        else if (i == '\t') str << "\\t";
-        else if (i >= 0 && i < 32)
-            str << "\\u" << std::setfill('0') << std::setw(4) << std::hex << (uint16_t) i << std::dec;
-        else str << i;
-    str << "\"";
-}
-
-
 void printValueAsJSON(EvalState & state, bool strict,
-    Value & v, std::ostream & str, PathSet & context)
+    Value & v, JSONPlaceholder & out, PathSet & context)
 {
     checkInterrupt();
 
@@ -34,58 +19,58 @@ void printValueAsJSON(EvalState & state, bool strict,
     switch (v.type) {
 
         case tInt:
-            str << v.integer;
+            out.write(v.integer);
             break;
 
         case tBool:
-            str << (v.boolean ? "true" : "false");
+            out.write(v.boolean);
             break;
 
         case tString:
             copyContext(v, context);
-            escapeJSON(str, v.string.s);
+            out.write(v.string.s);
             break;
 
         case tPath:
-            escapeJSON(str, state.copyPathToStore(context, v.path));
+            out.write(state.copyPathToStore(context, v.path));
             break;
 
         case tNull:
-            str << "null";
+            out.write(nullptr);
             break;
 
         case tAttrs: {
             Bindings::iterator i = v.attrs->find(state.sOutPath);
             if (i == v.attrs->end()) {
-                JSONObject json(str);
+                auto obj(out.object());
                 StringSet names;
                 for (auto & j : *v.attrs)
                     names.insert(j.name);
                 for (auto & j : names) {
                     Attr & a(*v.attrs->find(state.symbols.create(j)));
-                    json.attr(j);
-                    printValueAsJSON(state, strict, *a.value, str, context);
+                    auto placeholder(obj.placeholder(j));
+                    printValueAsJSON(state, strict, *a.value, placeholder, context);
                 }
             } else
-                printValueAsJSON(state, strict, *i->value, str, context);
+                printValueAsJSON(state, strict, *i->value, out, context);
             break;
         }
 
         case tList1: case tList2: case tListN: {
-            JSONList json(str);
+            auto list(out.list());
             for (unsigned int n = 0; n < v.listSize(); ++n) {
-                json.elem();
-                printValueAsJSON(state, strict, *v.listElems()[n], str, context);
+                auto placeholder(list.placeholder());
+                printValueAsJSON(state, strict, *v.listElems()[n], placeholder, context);
             }
             break;
         }
 
         case tExternal:
-            v.external->printValueAsJSON(state, strict, str, context);
+            v.external->printValueAsJSON(state, strict, out, context);
             break;
 
         case tFloat:
-            str << v.fpoint;
+            out.write(v.fpoint);
             break;
 
         default:
@@ -93,9 +78,15 @@ void printValueAsJSON(EvalState & state, bool strict,
     }
 }
 
+void printValueAsJSON(EvalState & state, bool strict,
+    Value & v, std::ostream & str, PathSet & context)
+{
+    JSONPlaceholder out(str);
+    printValueAsJSON(state, strict, v, out, context);
+}
 
 void ExternalValueBase::printValueAsJSON(EvalState & state, bool strict,
-      std::ostream & str, PathSet & context) const
+    JSONPlaceholder & out, PathSet & context) const
 {
     throw TypeError(format("cannot convert %1% to JSON") % showType());
 }
diff --git a/src/libexpr/value-to-json.hh b/src/libexpr/value-to-json.hh
index c59caf5641bc..67fed6487dd9 100644
--- a/src/libexpr/value-to-json.hh
+++ b/src/libexpr/value-to-json.hh
@@ -8,73 +8,12 @@
 
 namespace nix {
 
-void printValueAsJSON(EvalState & state, bool strict,
-    Value & v, std::ostream & out, PathSet & context);
-
-void escapeJSON(std::ostream & str, const string & s);
+class JSONPlaceholder;
 
-struct JSONObject
-{
-    std::ostream & str;
-    bool first;
-    JSONObject(std::ostream & str) : str(str), first(true)
-    {
-        str << "{";
-    }
-    ~JSONObject()
-    {
-        str << "}";
-    }
-    void attr(const string & s)
-    {
-        if (!first) str << ","; else first = false;
-        escapeJSON(str, s);
-        str << ":";
-    }
-    void attr(const string & s, const string & t)
-    {
-        attr(s);
-        escapeJSON(str, t);
-    }
-    void attr(const string & s, const char * t)
-    {
-        attr(s);
-        escapeJSON(str, t);
-    }
-    void attr(const string & s, bool b)
-    {
-        attr(s);
-        str << (b ? "true" : "false");
-    }
-    template<typename T>
-    void attr(const string & s, const T & n)
-    {
-        attr(s);
-        str << n;
-    }
-};
+void printValueAsJSON(EvalState & state, bool strict,
+    Value & v, JSONPlaceholder & out, PathSet & context);
 
-struct JSONList
-{
-    std::ostream & str;
-    bool first;
-    JSONList(std::ostream & str) : str(str), first(true)
-    {
-        str << "[";
-    }
-    ~JSONList()
-    {
-        str << "]";
-    }
-    void elem()
-    {
-        if (!first) str << ","; else first = false;
-    }
-    void elem(const string & s)
-    {
-        elem();
-        escapeJSON(str, s);
-    }
-};
+void printValueAsJSON(EvalState & state, bool strict,
+    Value & v, std::ostream & str, PathSet & context);
 
 }
diff --git a/src/libexpr/value.hh b/src/libexpr/value.hh
index 62bdd9281f08..271e6a1b24a2 100644
--- a/src/libexpr/value.hh
+++ b/src/libexpr/value.hh
@@ -1,7 +1,12 @@
 #pragma once
 
+#include "config.h"
 #include "symbol-table.hh"
 
+#if HAVE_BOEHMGC
+#include <gc/gc_allocator.h>
+#endif
+
 namespace nix {
 
 
@@ -36,6 +41,7 @@ class Symbol;
 struct Pos;
 class EvalState;
 class XMLWriter;
+class JSONPlaceholder;
 
 
 typedef long NixInt;
@@ -73,7 +79,7 @@ class ExternalValueBase
 
     /* Print the value as JSON. Defaults to unconvertable, i.e. throws an error */
     virtual void printValueAsJSON(EvalState & state, bool strict,
-        std::ostream & str, PathSet & context) const;
+        JSONPlaceholder & out, PathSet & context) const;
 
     /* Print the value as XML. Defaults to unevaluated */
     virtual void printValueAsXML(EvalState & state, bool strict, bool location,
@@ -249,4 +255,13 @@ void mkPath(Value & v, const char * s);
 size_t valueSize(Value & v);
 
 
+#if HAVE_BOEHMGC
+typedef std::vector<Value *, gc_allocator<Value *> > ValueVector;
+typedef std::map<Symbol, Value *, std::less<Symbol>, gc_allocator<Value *> > ValueMap;
+#else
+typedef std::vector<Value *> ValueVector;
+typedef std::map<Symbol, Value *> ValueMap;
+#endif
+
+
 }
diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc
index 515d80091de3..5af7c46e86da 100644
--- a/src/libmain/shared.cc
+++ b/src/libmain/shared.cc
@@ -252,7 +252,6 @@ void printVersion(const string & programName)
         std::cout << "Configuration file: " << settings.nixConfDir + "/nix.conf" << "\n";
         std::cout << "Store directory: " << settings.nixStore << "\n";
         std::cout << "State directory: " << settings.nixStateDir << "\n";
-        std::cout << "Database directory: " << settings.nixDBPath << "\n";
     }
     throw Exit();
 }
diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc
index 801ecd368a6b..e71ea6a57a34 100644
--- a/src/libstore/binary-cache-store.cc
+++ b/src/libstore/binary-cache-store.cc
@@ -254,7 +254,7 @@ struct BinaryCacheStoreAccessor : public FSAccessor
         std::string restPath = std::string(path, storePath.size());
 
         if (!store->isValidPath(storePath))
-            throw Error(format("path ‘%1%’ is not a valid store path") % storePath);
+            throw InvalidPath(format("path ‘%1%’ is not a valid store path") % storePath);
 
         auto i = nars.find(storePath);
         if (i != nars.end()) return {i->second, restPath};
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index 10ae574f9e49..e0eb702a4f82 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -193,6 +193,7 @@ bool CompareGoalPtrs::operator() (const GoalPtr & a, const GoalPtr & b) {
 struct Child
 {
     WeakGoalPtr goal;
+    Goal * goal2; // ugly hackery
     set<int> fds;
     bool respectTimeouts;
     bool inBuildSlot;
@@ -284,7 +285,7 @@ public:
        false if there is no sense in waking up goals that are sleeping
        because they can't run yet (e.g., there is no free build slot,
        or the hook would still say `postpone'). */
-    void childTerminated(GoalPtr goal, bool wakeSleepers = true);
+    void childTerminated(Goal * goal, bool wakeSleepers = true);
 
     /* Put `goal' to sleep until a build slot becomes available (which
        might be right away). */
@@ -652,18 +653,15 @@ HookInstance::~HookInstance()
 //////////////////////////////////////////////////////////////////////
 
 
-typedef map<string, string> HashRewrites;
+typedef map<std::string, std::string> StringRewrites;
 
 
-string rewriteHashes(string s, const HashRewrites & rewrites)
+std::string rewriteStrings(std::string s, const StringRewrites & rewrites)
 {
     for (auto & i : rewrites) {
-        assert(i.first.size() == i.second.size());
         size_t j = 0;
-        while ((j = s.find(i.first, j)) != string::npos) {
-            debug(format("rewriting @ %1%") % j);
-            s.replace(j, i.second.size(), i.second);
-        }
+        while ((j = s.find(i.first, j)) != string::npos)
+            s.replace(j, i.first.size(), i.second);
     }
     return s;
 }
@@ -782,7 +780,7 @@ private:
 #endif
 
     /* Hash rewriting. */
-    HashRewrites rewritesToTmp, rewritesFromTmp;
+    StringRewrites inputRewrites, outputRewrites;
     typedef map<Path, Path> RedirectedOutputs;
     RedirectedOutputs redirectedOutputs;
 
@@ -938,7 +936,7 @@ DerivationGoal::~DerivationGoal()
 void DerivationGoal::killChild()
 {
     if (pid != -1) {
-        worker.childTerminated(shared_from_this());
+        worker.childTerminated(this);
 
         if (buildUser.enabled()) {
             /* If we're using a build user, then there is a tricky
@@ -1412,7 +1410,7 @@ void DerivationGoal::buildDone()
     debug(format("builder process for ‘%1%’ finished") % drvPath);
 
     /* So the child is gone now. */
-    worker.childTerminated(shared_from_this());
+    worker.childTerminated(this);
 
     /* Close the read side of the logger pipe. */
     if (hook) {
@@ -1774,6 +1772,10 @@ void DerivationGoal::startBuilder()
         for (auto & i : varNames) env[i] = getEnv(i);
     }
 
+    /* Substitute output placeholders with the actual output paths. */
+    for (auto & output : drv->outputs)
+        inputRewrites[hashPlaceholder(output.first)] = output.second.path;
+
     /* The `exportReferencesGraph' feature allows the references graph
        to be passed to a builder.  This attribute should be a list of
        pairs [name1 path1 name2 path2 ...].  The references graph of
@@ -2418,7 +2420,7 @@ void DerivationGoal::runChild()
         /* Fill in the environment. */
         Strings envStrs;
         for (auto & i : env)
-            envStrs.push_back(rewriteHashes(i.first + "=" + i.second, rewritesToTmp));
+            envStrs.push_back(rewriteStrings(i.first + "=" + i.second, inputRewrites));
 
         /* If we are running in `build-users' mode, then switch to the
            user we allocated above.  Make sure that we drop all root
@@ -2560,7 +2562,7 @@ void DerivationGoal::runChild()
         }
 
         for (auto & i : drv->args)
-            args.push_back(rewriteHashes(i, rewritesToTmp));
+            args.push_back(rewriteStrings(i, inputRewrites));
 
         restoreSIGPIPE();
 
@@ -2682,7 +2684,7 @@ void DerivationGoal::registerOutputs()
 
         /* Apply hash rewriting if necessary. */
         bool rewritten = false;
-        if (!rewritesFromTmp.empty()) {
+        if (!outputRewrites.empty()) {
             printMsg(lvlError, format("warning: rewriting hashes in ‘%1%’; cross fingers") % path);
 
             /* Canonicalise first.  This ensures that the path we're
@@ -2694,7 +2696,7 @@ void DerivationGoal::registerOutputs()
             StringSink sink;
             dumpPath(actualPath, sink);
             deletePath(actualPath);
-            sink.s = make_ref<std::string>(rewriteHashes(*sink.s, rewritesFromTmp));
+            sink.s = make_ref<std::string>(rewriteStrings(*sink.s, outputRewrites));
             StringSource source(*sink.s);
             restorePath(actualPath, source);
 
@@ -2910,7 +2912,7 @@ Path DerivationGoal::openLogFile()
     string baseName = baseNameOf(drvPath);
 
     /* Create a log file. */
-    Path dir = (format("%1%/%2%/%3%/") % settings.nixLogDir % drvsLogDir % string(baseName, 0, 2)).str();
+    Path dir = (format("%1%/%2%/%3%/") % worker.store.logDir % drvsLogDir % string(baseName, 0, 2)).str();
     createDirs(dir);
 
     Path logFileName = (format("%1%/%2%%3%")
@@ -3033,8 +3035,8 @@ Path DerivationGoal::addHashRewrite(const Path & path)
     Path p = worker.store.storeDir + "/" + h2 + string(path, worker.store.storeDir.size() + 33);
     deletePath(p);
     assert(path.size() == p.size());
-    rewritesToTmp[h1] = h2;
-    rewritesFromTmp[h2] = h1;
+    inputRewrites[h1] = h2;
+    outputRewrites[h2] = h1;
     redirectedOutputs[path] = p;
     return p;
 }
@@ -3140,8 +3142,9 @@ SubstitutionGoal::~SubstitutionGoal()
 {
     try {
         if (thr.joinable()) {
+            // FIXME: signal worker thread to quit.
             thr.join();
-            //worker.childTerminated(shared_from_this()); // FIXME
+            worker.childTerminated(this);
         }
     } catch (...) {
         ignoreException();
@@ -3213,7 +3216,7 @@ void SubstitutionGoal::tryNext()
     /* Bail out early if this substituter lacks a valid
        signature. LocalStore::addToStore() also checks for this, but
        only after we've downloaded the path. */
-    if (worker.store.requireSigs && !info->checkSignatures(worker.store.publicKeys)) {
+    if (worker.store.requireSigs && !info->checkSignatures(worker.store, worker.store.publicKeys)) {
         printMsg(lvlInfo, format("warning: substituter ‘%s’ does not have a valid signature for path ‘%s’")
             % sub->getUri() % storePath);
         tryNext();
@@ -3296,7 +3299,7 @@ void SubstitutionGoal::finished()
     trace("substitute finished");
 
     thr.join();
-    worker.childTerminated(shared_from_this());
+    worker.childTerminated(this);
 
     try {
         promise.get_future().get();
@@ -3449,6 +3452,7 @@ void Worker::childStarted(GoalPtr goal, const set<int> & fds,
 {
     Child child;
     child.goal = goal;
+    child.goal2 = goal.get();
     child.fds = fds;
     child.timeStarted = child.lastOutput = time(0);
     child.inBuildSlot = inBuildSlot;
@@ -3458,10 +3462,10 @@ void Worker::childStarted(GoalPtr goal, const set<int> & fds,
 }
 
 
-void Worker::childTerminated(GoalPtr goal, bool wakeSleepers)
+void Worker::childTerminated(Goal * goal, bool wakeSleepers)
 {
     auto i = std::find_if(children.begin(), children.end(),
-        [&](const Child & child) { return child.goal.lock() == goal; });
+        [&](const Child & child) { return child.goal2 == goal; });
     assert(i != children.end());
 
     if (i->inBuildSlot) {
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 7dcf71d468df..f051f10bd018 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -390,4 +390,11 @@ Sink & operator << (Sink & out, const BasicDerivation & drv)
 }
 
 
+std::string hashPlaceholder(const std::string & outputName)
+{
+    // FIXME: memoize?
+    return "/" + printHash32(hashString(htSHA256, "nix-output:" + outputName));
+}
+
+
 }
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index 974de78c58d5..9717a81e469c 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -117,4 +117,6 @@ struct Sink;
 Source & readDerivation(Source & in, Store & store, BasicDerivation & drv);
 Sink & operator << (Sink & out, const BasicDerivation & drv);
 
+std::string hashPlaceholder(const std::string & outputName);
+
 }
diff --git a/src/libstore/download.cc b/src/libstore/download.cc
index cf3929cadd65..ed7e124d25f4 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/download.cc
@@ -8,6 +8,7 @@
 #include <curl/curl.h>
 
 #include <iostream>
+#include <thread>
 
 
 namespace nix {
@@ -31,7 +32,7 @@ struct CurlDownloader : public Downloader
 {
     CURL * curl;
     ref<std::string> data;
-    string etag, status, expectedETag;
+    string etag, status, expectedETag, effectiveUrl;
 
     struct curl_slist * requestHeaders;
 
@@ -194,11 +195,24 @@ struct CurlDownloader : public Downloader
         if (res != CURLE_OK) {
             Error err =
                 httpStatus == 404 ? NotFound :
-                httpStatus == 403 ? Forbidden : Misc;
-            throw DownloadError(err, format("unable to download ‘%1%’: %2% (%3%)")
-                % url % curl_easy_strerror(res) % res);
+                httpStatus == 403 ? Forbidden :
+                (httpStatus == 408 || httpStatus == 500 || httpStatus == 503
+                 || httpStatus == 504  || httpStatus == 522 || httpStatus == 524
+                 || res == CURLE_COULDNT_RESOLVE_HOST) ? Transient :
+                Misc;
+            if (res == CURLE_HTTP_RETURNED_ERROR && httpStatus != -1)
+                throw DownloadError(err, format("unable to download ‘%s’: HTTP error %d")
+                    % url % httpStatus);
+            else
+                throw DownloadError(err, format("unable to download ‘%s’: %s (%d)")
+                    % url % curl_easy_strerror(res) % res);
         }
 
+        char *effectiveUrlCStr;
+        curl_easy_getinfo(curl, CURLINFO_EFFECTIVE_URL, &effectiveUrlCStr);
+        if (effectiveUrlCStr)
+            effectiveUrl = effectiveUrlCStr;
+
         if (httpStatus == 304) return false;
 
         return true;
@@ -206,14 +220,27 @@ struct CurlDownloader : public Downloader
 
     DownloadResult download(string url, const DownloadOptions & options) override
     {
-        DownloadResult res;
-        if (fetch(resolveUri(url), options)) {
-            res.cached = false;
-            res.data = data;
-        } else
-            res.cached = true;
-        res.etag = etag;
-        return res;
+        size_t attempt = 0;
+
+        while (true) {
+            try {
+                DownloadResult res;
+                if (fetch(resolveUri(url), options)) {
+                    res.cached = false;
+                    res.data = data;
+                } else
+                    res.cached = true;
+                res.effectiveUrl = effectiveUrl;
+                res.etag = etag;
+                return res;
+            } catch (DownloadError & e) {
+                attempt++;
+                if (e.error != Transient || attempt >= options.tries) throw;
+                auto ms = options.baseRetryTimeMs * (1 << (attempt - 1));
+                printMsg(lvlError, format("warning: %s; retrying in %d ms") % e.what() % ms);
+                std::this_thread::sleep_for(std::chrono::milliseconds(ms));
+            }
+        }
     }
 };
 
@@ -222,13 +249,14 @@ ref<Downloader> makeDownloader()
     return make_ref<CurlDownloader>();
 }
 
-Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpack, const Hash & expectedHash)
+Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpack, string name, const Hash & expectedHash, string * effectiveUrl)
 {
     auto url = resolveUri(url_);
 
-    string name;
-    auto p = url.rfind('/');
-    if (p != string::npos) name = string(url, p + 1);
+    if (name == "") {
+        auto p = url.rfind('/');
+        if (p != string::npos) name = string(url, p + 1);
+    }
 
     Path expectedStorePath;
     if (expectedHash) {
@@ -259,9 +287,11 @@ Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpa
             auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
             if (ss.size() >= 3 && ss[0] == url) {
                 time_t lastChecked;
-                if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0))
+                if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0)) {
                     skip = true;
-                else if (!ss[1].empty()) {
+                    if (effectiveUrl)
+                        *effectiveUrl = url_;
+                } else if (!ss[1].empty()) {
                     printMsg(lvlDebug, format("verifying previous ETag ‘%1%’") % ss[1]);
                     expectedETag = ss[1];
                 }
@@ -276,6 +306,8 @@ Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpa
             DownloadOptions options;
             options.expectedETag = expectedETag;
             auto res = download(url, options);
+            if (effectiveUrl)
+                *effectiveUrl = res.effectiveUrl;
 
             if (!res.cached) {
                 ValidPathInfo info;
diff --git a/src/libstore/download.hh b/src/libstore/download.hh
index efddc55281fe..011b85f4721b 100644
--- a/src/libstore/download.hh
+++ b/src/libstore/download.hh
@@ -9,16 +9,19 @@ namespace nix {
 
 struct DownloadOptions
 {
-    string expectedETag;
-    bool verifyTLS{true};
-    enum { yes, no, automatic } showProgress{yes};
-    bool head{false};
+    std::string expectedETag;
+    bool verifyTLS = true;
+    enum { yes, no, automatic } showProgress = yes;
+    bool head = false;
+    size_t tries = 1;
+    unsigned int baseRetryTimeMs = 100;
 };
 
 struct DownloadResult
 {
     bool cached;
     string etag;
+    string effectiveUrl;
     std::shared_ptr<std::string> data;
 };
 
@@ -28,10 +31,10 @@ struct Downloader
 {
     virtual DownloadResult download(string url, const DownloadOptions & options) = 0;
 
-    Path downloadCached(ref<Store> store, const string & url, bool unpack,
-        const Hash & expectedHash = Hash());
+    Path downloadCached(ref<Store> store, const string & url, bool unpack, string name = "",
+        const Hash & expectedHash = Hash(), string * effectiveUrl = nullptr);
 
-    enum Error { NotFound, Forbidden, Misc };
+    enum Error { NotFound, Forbidden, Misc, Transient };
 };
 
 ref<Downloader> makeDownloader();
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index c12178e4028a..ecf81e8eb38e 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -69,7 +69,6 @@ void Settings::processEnvironment()
     nixDataDir = canonPath(getEnv("NIX_DATA_DIR", NIX_DATA_DIR));
     nixLogDir = canonPath(getEnv("NIX_LOG_DIR", NIX_LOG_DIR));
     nixStateDir = canonPath(getEnv("NIX_STATE_DIR", NIX_STATE_DIR));
-    nixDBPath = getEnv("NIX_DB_DIR", nixStateDir + "/db");
     nixConfDir = canonPath(getEnv("NIX_CONF_DIR", NIX_CONF_DIR));
     nixLibexecDir = canonPath(getEnv("NIX_LIBEXEC_DIR", NIX_LIBEXEC_DIR));
     nixBinDir = canonPath(getEnv("NIX_BIN_DIR", NIX_BIN_DIR));
@@ -77,7 +76,7 @@ void Settings::processEnvironment()
 
     // should be set with the other config options, but depends on nixLibexecDir
 #ifdef __APPLE__
-    preBuildHook = nixLibexecDir + "/nix/resolve-system-dependencies.pl";
+    preBuildHook = nixLibexecDir + "/nix/resolve-system-dependencies";
 #endif
 }
 
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index 65f763ace3c7..3194193bc842 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -51,9 +51,6 @@ struct Settings {
     /* The directory where state is stored. */
     Path nixStateDir;
 
-    /* The directory where we keep the SQLite database. */
-    Path nixDBPath;
-
     /* The directory where configuration files are stored. */
     Path nixConfDir;
 
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index da80b636c76c..bdcd2fd3998b 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -58,6 +58,7 @@ protected:
             DownloadOptions options;
             options.showProgress = DownloadOptions::no;
             options.head = true;
+            options.tries = 5;
             downloader->download(cacheUri + "/" + path, options);
             return true;
         } catch (DownloadError & e) {
@@ -79,6 +80,8 @@ protected:
         auto downloader(downloaders.get());
         DownloadOptions options;
         options.showProgress = DownloadOptions::no;
+        options.tries = 5;
+        options.baseRetryTimeMs = 1000;
         try {
             return downloader->download(cacheUri + "/" + path, options).data;
         } catch (DownloadError & e) {
diff --git a/src/libstore/local-fs-store.cc b/src/libstore/local-fs-store.cc
index b1b9dc29e40d..4571a2211cd2 100644
--- a/src/libstore/local-fs-store.cc
+++ b/src/libstore/local-fs-store.cc
@@ -7,7 +7,9 @@ namespace nix {
 
 LocalFSStore::LocalFSStore(const Params & params)
     : Store(params)
-    , stateDir(get(params, "state", settings.nixStateDir))
+    , rootDir(get(params, "root"))
+    , stateDir(canonPath(get(params, "state", rootDir != "" ? rootDir + "/nix/var/nix" : settings.nixStateDir)))
+    , logDir(canonPath(get(params, "log", rootDir != "" ? rootDir + "/nix/var/log/nix" : settings.nixLogDir)))
 {
 }
 
@@ -21,7 +23,7 @@ struct LocalStoreAccessor : public FSAccessor
     {
         Path storePath = store->toStorePath(path);
         if (!store->isValidPath(storePath))
-            throw Error(format("path ‘%1%’ is not a valid store path") % storePath);
+            throw InvalidPath(format("path ‘%1%’ is not a valid store path") % storePath);
         return store->getRealStoreDir() + std::string(path, store->storeDir.size());
     }
 
@@ -79,7 +81,7 @@ void LocalFSStore::narFromPath(const Path & path, Sink & sink)
 {
     if (!isValidPath(path))
         throw Error(format("path ‘%s’ is not valid") % path);
-    dumpPath(path, sink);
+    dumpPath(getRealStoreDir() + std::string(path, storeDir.size()), sink);
 }
 
 }
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index 96ce6a0d893b..10056f2f1fd8 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -38,8 +38,8 @@ namespace nix {
 
 LocalStore::LocalStore(const Params & params)
     : LocalFSStore(params)
-    , realStoreDir(get(params, "real", storeDir))
-    , dbDir(get(params, "state", "") != "" ? get(params, "state", "") + "/db" : settings.nixDBPath)
+    , realStoreDir(get(params, "real", rootDir != "" ? rootDir + "/nix/store" : storeDir))
+    , dbDir(stateDir + "/db")
     , linksDir(realStoreDir + "/.links")
     , reservedPath(dbDir + "/reserved")
     , schemaPath(dbDir + "/schema")
@@ -181,17 +181,20 @@ LocalStore::LocalStore(const Params & params)
 
         if (curSchema < 8) {
             SQLiteTxn txn(state->db);
-            if (sqlite3_exec(state->db, "alter table ValidPaths add column ultimate integer", 0, 0, 0) != SQLITE_OK)
-                throwSQLiteError(state->db, "upgrading database schema");
-            if (sqlite3_exec(state->db, "alter table ValidPaths add column sigs text", 0, 0, 0) != SQLITE_OK)
-                throwSQLiteError(state->db, "upgrading database schema");
+            state->db.exec("alter table ValidPaths add column ultimate integer");
+            state->db.exec("alter table ValidPaths add column sigs text");
             txn.commit();
         }
 
         if (curSchema < 9) {
             SQLiteTxn txn(state->db);
-            if (sqlite3_exec(state->db, "drop table FailedPaths", 0, 0, 0) != SQLITE_OK)
-                throwSQLiteError(state->db, "upgrading database schema");
+            state->db.exec("drop table FailedPaths");
+            txn.commit();
+        }
+
+        if (curSchema < 10) {
+            SQLiteTxn txn(state->db);
+            state->db.exec("alter table ValidPaths add column ca text");
             txn.commit();
         }
 
@@ -204,13 +207,13 @@ LocalStore::LocalStore(const Params & params)
 
     /* Prepare SQL statements. */
     state->stmtRegisterValidPath.create(state->db,
-        "insert into ValidPaths (path, hash, registrationTime, deriver, narSize, ultimate, sigs) values (?, ?, ?, ?, ?, ?, ?);");
+        "insert into ValidPaths (path, hash, registrationTime, deriver, narSize, ultimate, sigs, ca) values (?, ?, ?, ?, ?, ?, ?, ?);");
     state->stmtUpdatePathInfo.create(state->db,
-        "update ValidPaths set narSize = ?, hash = ?, ultimate = ?, sigs = ? where path = ?;");
+        "update ValidPaths set narSize = ?, hash = ?, ultimate = ?, sigs = ?, ca = ? where path = ?;");
     state->stmtAddReference.create(state->db,
         "insert or replace into Refs (referrer, reference) values (?, ?);");
     state->stmtQueryPathInfo.create(state->db,
-        "select id, hash, registrationTime, deriver, narSize, ultimate, sigs from ValidPaths where path = ?;");
+        "select id, hash, registrationTime, deriver, narSize, ultimate, sigs, ca from ValidPaths where path = ?;");
     state->stmtQueryReferences.create(state->db,
         "select path from Refs join ValidPaths on reference = id where referrer = ?;");
     state->stmtQueryReferrers.create(state->db,
@@ -279,8 +282,7 @@ void LocalStore::openDB(State & state, bool create)
     if (sqlite3_busy_timeout(db, 60 * 60 * 1000) != SQLITE_OK)
         throwSQLiteError(db, "setting timeout");
 
-    if (sqlite3_exec(db, "pragma foreign_keys = 1;", 0, 0, 0) != SQLITE_OK)
-        throwSQLiteError(db, "enabling foreign keys");
+    db.exec("pragma foreign_keys = 1");
 
     /* !!! check whether sqlite has been built with foreign key
        support */
@@ -290,8 +292,7 @@ void LocalStore::openDB(State & state, bool create)
        all.  This can cause database corruption if the system
        crashes. */
     string syncMode = settings.fsyncMetadata ? "normal" : "off";
-    if (sqlite3_exec(db, ("pragma synchronous = " + syncMode + ";").c_str(), 0, 0, 0) != SQLITE_OK)
-        throwSQLiteError(db, "setting synchronous mode");
+    db.exec("pragma synchronous = " + syncMode);
 
     /* Set the SQLite journal mode.  WAL mode is fastest, so it's the
        default. */
@@ -319,8 +320,7 @@ void LocalStore::openDB(State & state, bool create)
         const char * schema =
 #include "schema.sql.hh"
             ;
-        if (sqlite3_exec(db, (const char *) schema, 0, 0, 0) != SQLITE_OK)
-            throwSQLiteError(db, "initialising database schema");
+        db.exec(schema);
     }
 }
 
@@ -527,6 +527,7 @@ uint64_t LocalStore::addValidPath(State & state,
         (info.narSize, info.narSize != 0)
         (info.ultimate ? 1 : 0, info.ultimate)
         (concatStringsSep(" ", info.sigs), !info.sigs.empty())
+        (info.ca, !info.ca.empty())
         .exec();
     uint64_t id = sqlite3_last_insert_rowid(state.db);
 
@@ -609,6 +610,9 @@ std::shared_ptr<ValidPathInfo> LocalStore::queryPathInfoUncached(const Path & pa
         s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 6);
         if (s) info->sigs = tokenizeString<StringSet>(s, " ");
 
+        s = (const char *) sqlite3_column_text(state->stmtQueryPathInfo, 7);
+        if (s) info->ca = s;
+
         /* Get the references. */
         auto useQueryReferences(state->stmtQueryReferences.use()(info->id));
 
@@ -628,6 +632,7 @@ void LocalStore::updatePathInfo(State & state, const ValidPathInfo & info)
         ("sha256:" + printHash(info.narHash))
         (info.ultimate ? 1 : 0, info.ultimate)
         (concatStringsSep(" ", info.sigs), !info.sigs.empty())
+        (info.ca, !info.ca.empty())
         (info.path)
         .exec();
 }
@@ -755,7 +760,7 @@ Path LocalStore::queryPathFromHashPart(const string & hashPart)
 
     Path prefix = storeDir + "/" + hashPart;
 
-    return retrySQLite<Path>([&]() {
+    return retrySQLite<Path>([&]() -> std::string {
         auto state(_state.lock());
 
         auto useQueryPathFromHashPart(state->stmtQueryPathFromHashPart.use()(prefix));
@@ -898,7 +903,7 @@ void LocalStore::addToStore(const ValidPathInfo & info, const std::string & nar,
         throw Error(format("hash mismatch importing path ‘%s’; expected hash ‘%s’, got ‘%s’") %
             info.path % info.narHash.to_string() % h.to_string());
 
-    if (requireSigs && !dontCheckSigs && !info.checkSignatures(publicKeys))
+    if (requireSigs && !dontCheckSigs && !info.checkSignatures(*this, publicKeys))
         throw Error(format("cannot import path ‘%s’ because it lacks a valid signature") % info.path);
 
     addTempRoot(info.path);
@@ -983,6 +988,7 @@ Path LocalStore::addToStoreFromDump(const string & dump, const string & name,
             info.narHash = hash.first;
             info.narSize = hash.second;
             info.ultimate = true;
+            info.ca = "fixed:" + (recursive ? (std::string) "r:" : "") + h.to_string();
             registerValidPath(info);
         }
 
@@ -1014,7 +1020,8 @@ Path LocalStore::addToStore(const string & name, const Path & _srcPath,
 Path LocalStore::addTextToStore(const string & name, const string & s,
     const PathSet & references, bool repair)
 {
-    Path dstPath = computeStorePathForText(name, s, references);
+    auto hash = hashString(htSHA256, s);
+    auto dstPath = makeTextPath(name, hash, references);
 
     addTempRoot(dstPath);
 
@@ -1034,16 +1041,17 @@ Path LocalStore::addTextToStore(const string & name, const string & s,
 
             StringSink sink;
             dumpString(s, sink);
-            auto hash = hashString(htSHA256, *sink.s);
+            auto narHash = hashString(htSHA256, *sink.s);
 
             optimisePath(realPath);
 
             ValidPathInfo info;
             info.path = dstPath;
-            info.narHash = hash;
+            info.narHash = narHash;
             info.narSize = sink.s->size();
             info.references = references;
             info.ultimate = true;
+            info.ca = "text:" + hash.to_string();
             registerValidPath(info);
         }
 
@@ -1282,9 +1290,7 @@ void LocalStore::upgradeStore7()
 void LocalStore::vacuumDB()
 {
     auto state(_state.lock());
-
-    if (sqlite3_exec(state->db, "vacuum;", 0, 0, 0) != SQLITE_OK)
-        throwSQLiteError(state->db, "vacuuming SQLite database");
+    state->db.exec("vacuum");
 }
 
 
diff --git a/src/libstore/local-store.hh b/src/libstore/local-store.hh
index 7bfc4ad34c3f..5b5960cf245f 100644
--- a/src/libstore/local-store.hh
+++ b/src/libstore/local-store.hh
@@ -17,8 +17,8 @@ namespace nix {
 /* Nix store and database schema version.  Version 1 (or 0) was Nix <=
    0.7.  Version 2 was Nix 0.8 and 0.9.  Version 3 is Nix 0.10.
    Version 4 is Nix 0.11.  Version 5 is Nix 0.12-0.16.  Version 6 is
-   Nix 1.0.  Version 7 is Nix 1.3. Version 9 is 1.12. */
-const int nixSchemaVersion = 9;
+   Nix 1.0.  Version 7 is Nix 1.3. Version 10 is 1.12. */
+const int nixSchemaVersion = 10;
 
 
 extern string drvsLogDir;
diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc
index 8896862be149..ded19c05d2cd 100644
--- a/src/libstore/nar-accessor.cc
+++ b/src/libstore/nar-accessor.cc
@@ -27,7 +27,7 @@ struct NarIndexer : ParseSink, StringSource
 
     Path currentPath;
     std::string currentStart;
-    bool isExec;
+    bool isExec = false;
 
     NarIndexer(const std::string & nar) : StringSource(nar)
     {
diff --git a/src/libstore/nar-info-disk-cache.cc b/src/libstore/nar-info-disk-cache.cc
index 172a918ff453..d28ff42c7f23 100644
--- a/src/libstore/nar-info-disk-cache.cc
+++ b/src/libstore/nar-info-disk-cache.cc
@@ -78,21 +78,16 @@ public:
         Path dbPath = getCacheDir() + "/nix/binary-cache-v5.sqlite";
         createDirs(dirOf(dbPath));
 
-        if (sqlite3_open_v2(dbPath.c_str(), &state->db.db,
-                SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE, 0) != SQLITE_OK)
-            throw Error(format("cannot open store cache ‘%s’") % dbPath);
+        state->db = SQLite(dbPath);
 
         if (sqlite3_busy_timeout(state->db, 60 * 60 * 1000) != SQLITE_OK)
             throwSQLiteError(state->db, "setting timeout");
 
         // We can always reproduce the cache.
-        if (sqlite3_exec(state->db, "pragma synchronous = off", 0, 0, 0) != SQLITE_OK)
-            throwSQLiteError(state->db, "making database asynchronous");
-        if (sqlite3_exec(state->db, "pragma main.journal_mode = truncate", 0, 0, 0) != SQLITE_OK)
-            throwSQLiteError(state->db, "setting journal mode");
+        state->db.exec("pragma synchronous = off");
+        state->db.exec("pragma main.journal_mode = truncate");
 
-        if (sqlite3_exec(state->db, schema, 0, 0, 0) != SQLITE_OK)
-            throwSQLiteError(state->db, "initialising database schema");
+        state->db.exec(schema);
 
         state->insertCache.create(state->db,
             "insert or replace into BinaryCaches(url, timestamp, storeDir, wantMassQuery, priority) values (?, ?, ?, ?, ?)");
diff --git a/src/libstore/nar-info.cc b/src/libstore/nar-info.cc
index b0a8d77c2fba..201cac671a55 100644
--- a/src/libstore/nar-info.cc
+++ b/src/libstore/nar-info.cc
@@ -67,6 +67,10 @@ NarInfo::NarInfo(const Store & store, const std::string & s, const std::string &
             system = value;
         else if (name == "Sig")
             sigs.insert(value);
+        else if (name == "CA") {
+            if (!ca.empty()) corrupt();
+            ca = value;
+        }
 
         pos = eol + 1;
     }
@@ -101,6 +105,9 @@ std::string NarInfo::to_string() const
     for (auto sig : sigs)
         res += "Sig: " + sig + "\n";
 
+    if (!ca.empty())
+        res += "CA: " + ca + "\n";
+
     return res;
 }
 
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index ab05c3844289..94075f3b9b39 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -94,6 +94,8 @@ ref<RemoteStore::Connection> RemoteStore::openConnection()
         conn->daemonVersion = readInt(conn->from);
         if (GET_PROTOCOL_MAJOR(conn->daemonVersion) != GET_PROTOCOL_MAJOR(PROTOCOL_VERSION))
             throw Error("Nix daemon protocol version not supported");
+        if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 10)
+            throw Error("the Nix daemon version is too old");
         conn->to << PROTOCOL_VERSION;
 
         if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 14) {
@@ -127,17 +129,13 @@ void RemoteStore::setOptions(ref<Connection> conn)
        << settings.tryFallback
        << verbosity
        << settings.maxBuildJobs
-       << settings.maxSilentTime;
-    if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 2)
-        conn->to << settings.useBuildHook;
-    if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 4)
-        conn->to << (settings.verboseBuild ? lvlError : lvlVomit)
-                 << 0 // obsolete log type
-                 << 0 /* obsolete print build trace */;
-    if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 6)
-        conn->to << settings.buildCores;
-    if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 10)
-        conn->to << settings.useSubstitutes;
+       << settings.maxSilentTime
+       << settings.useBuildHook
+       << (settings.verboseBuild ? lvlError : lvlVomit)
+       << 0 // obsolete log type
+       << 0 /* obsolete print build trace */
+       << settings.buildCores
+       << settings.useSubstitutes;
 
     if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 12) {
         Settings::SettingsMap overrides = settings.getOverrides();
@@ -213,8 +211,6 @@ void RemoteStore::querySubstitutablePathInfos(const PathSet & paths,
 
     auto conn(connections->get());
 
-    if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 3) return;
-
     if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
 
         for (auto & i : paths) {
@@ -227,7 +223,7 @@ void RemoteStore::querySubstitutablePathInfos(const PathSet & paths,
             if (info.deriver != "") assertStorePath(info.deriver);
             info.references = readStorePaths<PathSet>(*this, conn->from);
             info.downloadSize = readLongLong(conn->from);
-            info.narSize = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 7 ? readLongLong(conn->from) : 0;
+            info.narSize = readLongLong(conn->from);
             infos[i] = info;
         }
 
@@ -277,6 +273,7 @@ std::shared_ptr<ValidPathInfo> RemoteStore::queryPathInfoUncached(const Path & p
     if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 16) {
         info->ultimate = readInt(conn->from) != 0;
         info->sigs = readStrings<StringSet>(conn->from);
+        info->ca = readString(conn->from);
     }
     return info;
 }
@@ -481,11 +478,11 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results)
 {
     auto conn(connections->get());
 
-    conn->to << wopCollectGarbage << options.action << options.pathsToDelete << options.ignoreLiveness
-       << options.maxFreed << 0;
-    if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 5)
+    conn->to
+        << wopCollectGarbage << options.action << options.pathsToDelete << options.ignoreLiveness
+        << options.maxFreed
         /* removed options */
-        conn->to << 0 << 0;
+        << 0 << 0 << 0;
 
     conn->processStderr();
 
@@ -562,7 +559,7 @@ void RemoteStore::Connection::processStderr(Sink * sink, Source * source)
     }
     if (msg == STDERR_ERROR) {
         string error = readString(from);
-        unsigned int status = GET_PROTOCOL_MINOR(daemonVersion) >= 8 ? readInt(from) : 1;
+        unsigned int status = readInt(from);
         throw Error(format("%1%") % error, status);
     }
     else if (msg != STDERR_LAST)
diff --git a/src/libstore/schema.sql b/src/libstore/schema.sql
index 91878af1580d..09c71a2b8dd7 100644
--- a/src/libstore/schema.sql
+++ b/src/libstore/schema.sql
@@ -6,7 +6,8 @@ create table if not exists ValidPaths (
     deriver          text,
     narSize          integer,
     ultimate         integer, -- null implies "false"
-    sigs             text -- space-separated
+    sigs             text, -- space-separated
+    ca               text -- if not null, an assertion that the path is content-addressed; see ValidPathInfo
 );
 
 create table if not exists Refs (
diff --git a/src/libstore/sqlite.cc b/src/libstore/sqlite.cc
index 816f9984d6eb..ea0b843f5752 100644
--- a/src/libstore/sqlite.cc
+++ b/src/libstore/sqlite.cc
@@ -35,6 +35,13 @@ namespace nix {
         throw SQLiteError(format("%1%: %2%") % f.str() % sqlite3_errmsg(db));
 }
 
+SQLite::SQLite(const Path & path)
+{
+    if (sqlite3_open_v2(path.c_str(), &db,
+            SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE, 0) != SQLITE_OK)
+        throw Error(format("cannot open SQLite database ‘%s’") % path);
+}
+
 SQLite::~SQLite()
 {
     try {
@@ -45,6 +52,12 @@ SQLite::~SQLite()
     }
 }
 
+void SQLite::exec(const std::string & stmt)
+{
+    if (sqlite3_exec(db, stmt.c_str(), 0, 0, 0) != SQLITE_OK)
+        throwSQLiteError(db, format("executing SQLite statement ‘%s’") % stmt);
+}
+
 void SQLiteStmt::create(sqlite3 * db, const string & s)
 {
     checkInterrupt();
diff --git a/src/libstore/sqlite.hh b/src/libstore/sqlite.hh
index d6b4a8d9117b..7c1ed538215c 100644
--- a/src/libstore/sqlite.hh
+++ b/src/libstore/sqlite.hh
@@ -13,10 +13,16 @@ namespace nix {
 /* RAII wrapper to close a SQLite database automatically. */
 struct SQLite
 {
-    sqlite3 * db;
-    SQLite() { db = 0; }
+    sqlite3 * db = 0;
+    SQLite() { }
+    SQLite(const Path & path);
+    SQLite(const SQLite & from) = delete;
+    SQLite& operator = (const SQLite & from) = delete;
+    SQLite& operator = (SQLite && from) { db = from.db; from.db = 0; return *this; }
     ~SQLite();
     operator sqlite3 * () { return db; }
+
+    void exec(const std::string & stmt);
 };
 
 /* RAII wrapper to create and destroy SQLite prepared statements. */
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index af002dcc8c33..5dd56f905d57 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -202,6 +202,22 @@ Path Store::makeFixedOutputPath(bool recursive,
 }
 
 
+Path Store::makeTextPath(const string & name, const Hash & hash,
+    const PathSet & references) const
+{
+    assert(hash.type == htSHA256);
+    /* Stuff the references (if any) into the type.  This is a bit
+       hacky, but we can't put them in `s' since that would be
+       ambiguous. */
+    string type = "text";
+    for (auto & i : references) {
+        type += ":";
+        type += i;
+    }
+    return makeStorePath(type, hash, name);
+}
+
+
 std::pair<Path, Hash> Store::computeStorePathForPath(const Path & srcPath,
     bool recursive, HashType hashAlgo, PathFilter & filter) const
 {
@@ -215,16 +231,7 @@ std::pair<Path, Hash> Store::computeStorePathForPath(const Path & srcPath,
 Path Store::computeStorePathForText(const string & name, const string & s,
     const PathSet & references) const
 {
-    Hash hash = hashString(htSHA256, s);
-    /* Stuff the references (if any) into the type.  This is a bit
-       hacky, but we can't put them in `s' since that would be
-       ambiguous. */
-    string type = "text";
-    for (auto & i : references) {
-        type += ":";
-        type += i;
-    }
-    return makeStorePath(type, hash, name);
+    return makeTextPath(name, hashString(htSHA256, s), references);
 }
 
 
@@ -432,9 +439,38 @@ void ValidPathInfo::sign(const SecretKey & secretKey)
 }
 
 
-unsigned int ValidPathInfo::checkSignatures(const PublicKeys & publicKeys) const
+bool ValidPathInfo::isContentAddressed(const Store & store) const
+{
+    auto warn = [&]() {
+        printMsg(lvlError, format("warning: path ‘%s’ claims to be content-addressed but isn't") % path);
+    };
+
+    if (hasPrefix(ca, "text:")) {
+        auto hash = parseHash(std::string(ca, 5));
+        if (store.makeTextPath(storePathToName(path), hash, references) == path)
+            return true;
+        else
+            warn();
+    }
+
+    else if (hasPrefix(ca, "fixed:")) {
+        bool recursive = ca.compare(6, 2, "r:") == 0;
+        auto hash = parseHash(std::string(ca, recursive ? 8 : 6));
+        if (store.makeFixedOutputPath(recursive, hash, storePathToName(path)) == path)
+            return true;
+        else
+            warn();
+    }
+
+    return false;
+}
+
+
+size_t ValidPathInfo::checkSignatures(const Store & store, const PublicKeys & publicKeys) const
 {
-    unsigned int good = 0;
+    if (isContentAddressed(store)) return maxSigs;
+
+    size_t good = 0;
     for (auto & sig : sigs)
         if (checkSignature(publicKeys, sig))
             good++;
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index 0b80312d6307..41fc58fc48e2 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -16,6 +16,13 @@
 namespace nix {
 
 
+struct BasicDerivation;
+struct Derivation;
+class FSAccessor;
+class NarInfoDiskCache;
+class Store;
+
+
 /* Size of the hash part of store paths, in base-32 characters. */
 const size_t storePathHashLen = 32; // i.e. 160 bits
 
@@ -109,6 +116,34 @@ struct ValidPathInfo
 
     StringSet sigs; // note: not necessarily verified
 
+    /* If non-empty, an assertion that the path is content-addressed,
+       i.e., that the store path is computed from a cryptographic hash
+       of the contents of the path, plus some other bits of data like
+       the "name" part of the path. Such a path doesn't need
+       signatures, since we don't have to trust anybody's claim that
+       the path is the output of a particular derivation. (In the
+       extensional store model, we have to trust that the *contents*
+       of an output path of a derivation were actually produced by
+       that derivation. In the intensional model, we have to trust
+       that a particular output path was produced by a derivation; the
+       path name then implies the contents.)
+
+       Ideally, the content-addressability assertion would just be a
+       Boolean, and the store path would be computed from
+       ‘storePathToName(path)’, ‘narHash’ and ‘references’. However,
+       1) we've accumulated several types of content-addressed paths
+       over the years; and 2) fixed-output derivations support
+       multiple hash algorithms and serialisation methods (flat file
+       vs NAR). Thus, ‘ca’ has one of the following forms:
+
+       * ‘text:sha256:<sha256 hash of file contents>’: For paths
+         computed by makeTextPath() / addTextToStore().
+
+       * ‘fixed:<r?>:<ht>:<h>’: For paths computed by
+         makeFixedOutputPath() / addToStore().
+    */
+    std::string ca;
+
     bool operator == (const ValidPathInfo & i) const
     {
         return
@@ -117,19 +152,25 @@ struct ValidPathInfo
             && references == i.references;
     }
 
-    /*  Return a fingerprint of the store path to be used in binary
-        cache signatures. It contains the store path, the base-32
-        SHA-256 hash of the NAR serialisation of the path, the size of
-        the NAR, and the sorted references. The size field is strictly
-        speaking superfluous, but might prevent endless/excessive data
-        attacks. */
+    /* Return a fingerprint of the store path to be used in binary
+       cache signatures. It contains the store path, the base-32
+       SHA-256 hash of the NAR serialisation of the path, the size of
+       the NAR, and the sorted references. The size field is strictly
+       speaking superfluous, but might prevent endless/excessive data
+       attacks. */
     std::string fingerprint() const;
 
     void sign(const SecretKey & secretKey);
 
+    /* Return true iff the path is verifiably content-addressed. */
+    bool isContentAddressed(const Store & store) const;
+
+    static const size_t maxSigs = std::numeric_limits<size_t>::max();
+
     /* Return the number of signatures on this .narinfo that were
-       produced by one of the specified keys. */
-    unsigned int checkSignatures(const PublicKeys & publicKeys) const;
+       produced by one of the specified keys, or maxSigs if the path
+       is content-addressed. */
+    size_t checkSignatures(const Store & store, const PublicKeys & publicKeys) const;
 
     /* Verify a single signature. */
     bool checkSignature(const PublicKeys & publicKeys, const std::string & sig) const;
@@ -169,12 +210,6 @@ struct BuildResult
 };
 
 
-struct BasicDerivation;
-struct Derivation;
-class FSAccessor;
-class NarInfoDiskCache;
-
-
 class Store : public std::enable_shared_from_this<Store>
 {
 public:
@@ -234,10 +269,12 @@ public:
     Path makeFixedOutputPath(bool recursive,
         const Hash & hash, const string & name) const;
 
-    /* This is the preparatory part of addToStore() and
-       addToStoreFixed(); it computes the store path to which srcPath
-       is to be copied.  Returns the store path and the cryptographic
-       hash of the contents of srcPath. */
+    Path makeTextPath(const string & name, const Hash & hash,
+        const PathSet & references) const;
+
+    /* This is the preparatory part of addToStore(); it computes the
+       store path to which srcPath is to be copied.  Returns the store
+       path and the cryptographic hash of the contents of srcPath. */
     std::pair<Path, Hash> computeStorePathForPath(const Path & srcPath,
         bool recursive = true, HashType hashAlgo = htSHA256,
         PathFilter & filter = defaultPathFilter) const;
@@ -491,7 +528,9 @@ protected:
 class LocalFSStore : public Store
 {
 public:
+    const Path rootDir;
     const Path stateDir;
+    const Path logDir;
 
     LocalFSStore(const Params & params);
 
diff --git a/src/libutil/json.cc b/src/libutil/json.cc
new file mode 100644
index 000000000000..ecc3fdfe514e
--- /dev/null
+++ b/src/libutil/json.cc
@@ -0,0 +1,176 @@
+#include "json.hh"
+
+#include <iomanip>
+#include <cstring>
+
+namespace nix {
+
+void toJSON(std::ostream & str, const char * start, const char * end)
+{
+    str << '"';
+    for (auto i = start; i != end; i++)
+        if (*i == '\"' || *i == '\\') str << '\\' << *i;
+        else if (*i == '\n') str << "\\n";
+        else if (*i == '\r') str << "\\r";
+        else if (*i == '\t') str << "\\t";
+        else if (*i >= 0 && *i < 32)
+            str << "\\u" << std::setfill('0') << std::setw(4) << std::hex << (uint16_t) *i << std::dec;
+        else str << *i;
+    str << '"';
+}
+
+void toJSON(std::ostream & str, const std::string & s)
+{
+    toJSON(str, s.c_str(), s.c_str() + s.size());
+}
+
+void toJSON(std::ostream & str, const char * s)
+{
+    if (!s) str << "null"; else toJSON(str, s, s + strlen(s));
+}
+
+void toJSON(std::ostream & str, unsigned long long n)
+{
+    str << n;
+}
+
+void toJSON(std::ostream & str, unsigned long n)
+{
+    str << n;
+}
+
+void toJSON(std::ostream & str, long n)
+{
+    str << n;
+}
+
+void toJSON(std::ostream & str, double f)
+{
+    str << f;
+}
+
+void toJSON(std::ostream & str, bool b)
+{
+    str << (b ? "true" : "false");
+}
+
+JSONWriter::JSONWriter(std::ostream & str, bool indent)
+    : state(new JSONState(str, indent))
+{
+    state->stack.push_back(this);
+}
+
+JSONWriter::JSONWriter(JSONState * state)
+    : state(state)
+{
+    state->stack.push_back(this);
+}
+
+JSONWriter::~JSONWriter()
+{
+    assertActive();
+    state->stack.pop_back();
+    if (state->stack.empty()) delete state;
+}
+
+void JSONWriter::comma()
+{
+    assertActive();
+    if (first) {
+        first = false;
+    } else {
+        state->str << ',';
+    }
+    if (state->indent) indent();
+}
+
+void JSONWriter::indent()
+{
+    state->str << '\n' << std::string(state->depth * 2, ' ');
+}
+
+void JSONList::open()
+{
+    state->depth++;
+    state->str << '[';
+}
+
+JSONList::~JSONList()
+{
+    state->depth--;
+    if (state->indent && !first) indent();
+    state->str << "]";
+}
+
+JSONList JSONList::list()
+{
+    comma();
+    return JSONList(state);
+}
+
+JSONObject JSONList::object()
+{
+    comma();
+    return JSONObject(state);
+}
+
+JSONPlaceholder JSONList::placeholder()
+{
+    comma();
+    return JSONPlaceholder(state);
+}
+
+void JSONObject::open()
+{
+    state->depth++;
+    state->str << '{';
+}
+
+JSONObject::~JSONObject()
+{
+    state->depth--;
+    if (state->indent && !first) indent();
+    state->str << "}";
+}
+
+void JSONObject::attr(const std::string & s)
+{
+    comma();
+    toJSON(state->str, s);
+    state->str << ':';
+    if (state->indent) state->str << ' ';
+}
+
+JSONList JSONObject::list(const std::string & name)
+{
+    attr(name);
+    return JSONList(state);
+}
+
+JSONObject JSONObject::object(const std::string & name)
+{
+    attr(name);
+    return JSONObject(state);
+}
+
+JSONPlaceholder JSONObject::placeholder(const std::string & name)
+{
+    attr(name);
+    return JSONPlaceholder(state);
+}
+
+JSONList JSONPlaceholder::list()
+{
+    assertValid();
+    first = false;
+    return JSONList(state);
+}
+
+JSONObject JSONPlaceholder::object()
+{
+    assertValid();
+    first = false;
+    return JSONObject(state);
+}
+
+}
diff --git a/src/libutil/json.hh b/src/libutil/json.hh
new file mode 100644
index 000000000000..aec456845056
--- /dev/null
+++ b/src/libutil/json.hh
@@ -0,0 +1,184 @@
+#pragma once
+
+#include <iostream>
+#include <vector>
+#include <cassert>
+
+namespace nix {
+
+void toJSON(std::ostream & str, const char * start, const char * end);
+void toJSON(std::ostream & str, const std::string & s);
+void toJSON(std::ostream & str, const char * s);
+void toJSON(std::ostream & str, unsigned long long n);
+void toJSON(std::ostream & str, unsigned long n);
+void toJSON(std::ostream & str, long n);
+void toJSON(std::ostream & str, double f);
+void toJSON(std::ostream & str, bool b);
+
+class JSONWriter
+{
+protected:
+
+    struct JSONState
+    {
+        std::ostream & str;
+        bool indent;
+        size_t depth = 0;
+        std::vector<JSONWriter *> stack;
+        JSONState(std::ostream & str, bool indent) : str(str), indent(indent) { }
+        ~JSONState()
+        {
+            assert(stack.empty());
+        }
+    };
+
+    JSONState * state;
+
+    bool first = true;
+
+    JSONWriter(std::ostream & str, bool indent);
+
+    JSONWriter(JSONState * state);
+
+    ~JSONWriter();
+
+    void assertActive()
+    {
+        assert(!state->stack.empty() && state->stack.back() == this);
+    }
+
+    void comma();
+
+    void indent();
+};
+
+class JSONObject;
+class JSONPlaceholder;
+
+class JSONList : JSONWriter
+{
+private:
+
+    friend class JSONObject;
+    friend class JSONPlaceholder;
+
+    void open();
+
+    JSONList(JSONState * state)
+        : JSONWriter(state)
+    {
+        open();
+    }
+
+public:
+
+    JSONList(std::ostream & str, bool indent = false)
+        : JSONWriter(str, indent)
+    {
+        open();
+    }
+
+    ~JSONList();
+
+    template<typename T>
+    JSONList & elem(const T & v)
+    {
+        comma();
+        toJSON(state->str, v);
+        return *this;
+    }
+
+    JSONList list();
+
+    JSONObject object();
+
+    JSONPlaceholder placeholder();
+};
+
+class JSONObject : JSONWriter
+{
+private:
+
+    friend class JSONList;
+    friend class JSONPlaceholder;
+
+    void open();
+
+    JSONObject(JSONState * state)
+        : JSONWriter(state)
+    {
+        open();
+    }
+
+    void attr(const std::string & s);
+
+public:
+
+    JSONObject(std::ostream & str, bool indent = false)
+        : JSONWriter(str, indent)
+    {
+        open();
+    }
+
+    ~JSONObject();
+
+    template<typename T>
+    JSONObject & attr(const std::string & name, const T & v)
+    {
+        attr(name);
+        toJSON(state->str, v);
+        return *this;
+    }
+
+    JSONList list(const std::string & name);
+
+    JSONObject object(const std::string & name);
+
+    JSONPlaceholder placeholder(const std::string & name);
+};
+
+class JSONPlaceholder : JSONWriter
+{
+
+private:
+
+    friend class JSONList;
+    friend class JSONObject;
+
+    JSONPlaceholder(JSONState * state)
+        : JSONWriter(state)
+    {
+    }
+
+    void assertValid()
+    {
+        assertActive();
+        assert(first);
+    }
+
+public:
+
+    JSONPlaceholder(std::ostream & str, bool indent = false)
+        : JSONWriter(str, indent)
+    {
+    }
+
+    ~JSONPlaceholder()
+    {
+        assert(!first || std::uncaught_exception());
+    }
+
+    template<typename T>
+    void write(const T & v)
+    {
+        assertValid();
+        first = false;
+        toJSON(state->str, v);
+    }
+
+    JSONList list();
+
+    JSONObject object();
+};
+
+}
diff --git a/src/nix-channel/local.mk b/src/nix-channel/local.mk
new file mode 100644
index 000000000000..49fc105c6f79
--- /dev/null
+++ b/src/nix-channel/local.mk
@@ -0,0 +1,7 @@
+programs += nix-channel
+
+nix-channel_DIR := $(d)
+
+nix-channel_LIBS = libmain libutil libformat libstore
+
+nix-channel_SOURCES := $(d)/nix-channel.cc
diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
new file mode 100755
index 000000000000..0f7858aa53a5
--- /dev/null
+++ b/src/nix-channel/nix-channel.cc
@@ -0,0 +1,270 @@
+#include "shared.hh"
+#include "globals.hh"
+#include "download.hh"
+#include <fcntl.h>
+#include <regex>
+#include "store-api.hh"
+#include <pwd.h>
+
+using namespace nix;
+
+typedef std::map<string,string> Channels;
+
+static auto channels = Channels{};
+static auto channelsList = Path{};
+
+// Reads the list of channels.
+static void readChannels()
+{
+    if (!pathExists(channelsList)) return;
+    auto channelsFile = readFile(channelsList);
+
+    for (const auto & line : tokenizeString<std::vector<string>>(channelsFile, "\n")) {
+        chomp(line);
+        if (std::regex_search(line, std::regex("^\\s*\\#")))
+            continue;
+        auto split = tokenizeString<std::vector<string>>(line, " ");
+        auto url = std::regex_replace(split[0], std::regex("/*$"), "");
+        auto name = split.size() > 1 ? split[1] : baseNameOf(url);
+        channels[name] = url;
+    }
+}
+
+// Writes the list of channels.
+static void writeChannels()
+{
+    auto channelsFD = AutoCloseFD{open(channelsList.c_str(), O_WRONLY | O_CLOEXEC | O_CREAT | O_TRUNC, 0644)};
+    if (!channelsFD)
+        throw SysError(format("opening ‘%1%’ for writing") % channelsList);
+    for (const auto & channel : channels)
+        writeFull(channelsFD.get(), channel.second + " " + channel.first + "\n");
+}
+
+// Adds a channel.
+static void addChannel(const string & url, const string & name)
+{
+    if (!regex_search(url, std::regex("^(file|http|https)://")))
+        throw Error(format("invalid channel URL ‘%1%’") % url);
+    if (!regex_search(name, std::regex("^[a-zA-Z0-9_][a-zA-Z0-9_\\.-]*$")))
+        throw Error(format("invalid channel identifier ‘%1%’") % name);
+    readChannels();
+    channels[name] = url;
+    writeChannels();
+}
+
+static auto profile = Path{};
+
+// Remove a channel.
+static void removeChannel(const string & name)
+{
+    readChannels();
+    channels.erase(name);
+    writeChannels();
+
+    runProgram(settings.nixBinDir + "/nix-env", true, { "--profile", profile, "--uninstall", name });
+}
+
+static auto nixDefExpr = Path{};
+
+// Fetch Nix expressions and binary cache URLs from the subscribed channels.
+static void update(const StringSet & channelNames)
+{
+    readChannels();
+
+    auto store = openStore();
+
+    // Download each channel.
+    auto exprs = Strings{};
+    for (const auto & channel : channels) {
+        if (!channelNames.empty() && channelNames.find(channel.first) != channelNames.end())
+            continue;
+        auto name = channel.first;
+        auto url = channel.second;
+
+        // We want to download the url to a file to see if it's a tarball while also checking if we
+        // got redirected in the process, so that we can grab the various parts of a nix channel
+        // definition from a consistent location if the redirect changes mid-download.
+        auto effectiveUrl = string{};
+        auto dl = makeDownloader();
+        auto filename = dl->downloadCached(store, url, false, "", Hash(), &effectiveUrl);
+        url = chomp(std::move(effectiveUrl));
+
+        // If the URL contains a version number, append it to the name
+        // attribute (so that "nix-env -q" on the channels profile
+        // shows something useful).
+        auto cname = name;
+        std::smatch match;
+        auto urlBase = baseNameOf(url);
+        if (std::regex_search(urlBase, match, std::regex("(-\\d.*)$"))) {
+            cname = cname + (string) match[1];
+        }
+
+        auto extraAttrs = string{};
+
+        auto unpacked = false;
+        if (std::regex_search(filename, std::regex("\\.tar\\.(gz|bz2|xz)$"))) {
+            try {
+                runProgram(settings.nixBinDir + "/nix-build", false, { "--no-out-link", "--expr", "import <nix/unpack-channel.nix> "
+                            "{ name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" + filename + "\"; }" });
+                unpacked = true;
+            } catch (ExecError & e) {
+            }
+        }
+
+        if (!unpacked) {
+            // The URL doesn't unpack directly, so let's try treating it like a full channel folder with files in it
+            // Check if the channel advertises a binary cache.
+            DownloadOptions opts;
+            opts.showProgress = DownloadOptions::no;
+            try {
+                auto dlRes = dl->download(url + "/binary-cache-url", opts);
+                extraAttrs = "binaryCacheURL = \"" + *dlRes.data + "\";";
+            } catch (DownloadError & e) {
+            }
+
+            // Download the channel tarball.
+            auto fullURL = url + "/nixexprs.tar.xz";
+            try {
+                filename = dl->downloadCached(store, fullURL, false);
+            } catch (DownloadError & e) {
+                fullURL = url + "/nixexprs.tar.bz2";
+                filename = dl->downloadCached(store, fullURL, false);
+            }
+            chomp(filename);
+        }
+
+        // Regardless of where it came from, add the expression representing this channel to accumulated expression
+        exprs.push_back("f: f { name = \"" + cname + "\"; channelName = \"" + name + "\"; src = builtins.storePath \"" + filename + "\"; " + extraAttrs + " }");
+    }
+
+    // Unpack the channel tarballs into the Nix store and install them
+    // into the channels profile.
+    std::cerr << "unpacking channels...\n";
+    auto envArgs = Strings{ "--profile", profile, "--file", "<nix/unpack-channel.nix>", "--install", "--from-expression" };
+    for (auto & expr : exprs)
+        envArgs.push_back(std::move(expr));
+    envArgs.push_back("--quiet");
+    runProgram(settings.nixBinDir + "/nix-env", false, envArgs);
+
+    // Make the channels appear in nix-env.
+    struct stat st;
+    if (lstat(nixDefExpr.c_str(), &st) == 0) {
+        if (S_ISLNK(st.st_mode))
+            // old-skool ~/.nix-defexpr
+            if (unlink(nixDefExpr.c_str()) == -1)
+                throw SysError(format("unlinking %1%") % nixDefExpr);
+    } else if (errno != ENOENT) {
+        throw SysError(format("getting status of %1%") % nixDefExpr);
+    }
+    createDirs(nixDefExpr);
+    auto channelLink = nixDefExpr + "/channels";
+    replaceSymlink(profile, channelLink);
+}
+
+int main(int argc, char ** argv)
+{
+    return handleExceptions(argv[0], [&]() {
+        initNix();
+
+        // Turn on caching in nix-prefetch-url.
+        auto channelCache = settings.nixStateDir + "/channel-cache";
+        createDirs(channelCache);
+        setenv("NIX_DOWNLOAD_CACHE", channelCache.c_str(), 1);
+
+        // Figure out the name of the `.nix-channels' file to use
+        auto home = getEnv("HOME");
+        if (home.empty())
+            throw Error("$HOME not set");
+        channelsList = home + "/.nix-channels";
+        nixDefExpr = home + "/.nix-defexpr";
+
+        // Figure out the name of the channels profile.
+        auto name = string{};
+        auto pw = getpwuid(getuid());
+        if (!pw)
+            name = getEnv("USER", "");
+        else
+            name = pw->pw_name;
+        if (name.empty())
+            throw Error("cannot figure out user name");
+        profile = settings.nixStateDir + "/profiles/per-user/" + name + "/channels";
+        createDirs(dirOf(profile));
+
+        enum {
+            cNone,
+            cAdd,
+            cRemove,
+            cList,
+            cUpdate,
+            cRollback
+        } cmd = cNone;
+        auto args = std::vector<string>{};
+        parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) {
+            if (*arg == "--help") {
+                showManPage("nix-channel");
+            } else if (*arg == "--version") {
+                printVersion("nix-channel");
+            } else if (*arg == "--add") {
+                cmd = cAdd;
+            } else if (*arg == "--remove") {
+                cmd = cRemove;
+            } else if (*arg == "--list") {
+                cmd = cList;
+            } else if (*arg == "--update") {
+                cmd = cUpdate;
+            } else if (*arg == "--rollback") {
+                cmd = cRollback;
+            } else {
+                args.push_back(std::move(*arg));
+            }
+            return true;
+        });
+        switch (cmd) {
+            case cNone:
+                throw UsageError("no command specified");
+            case cAdd:
+                if (args.size() < 1 || args.size() > 2)
+                    throw UsageError("‘--add’ requires one or two arguments");
+                {
+                auto url = args[0];
+                auto name = string{};
+                if (args.size() == 2) {
+                    name = args[1];
+                } else {
+                    name = baseNameOf(url);
+                    name = std::regex_replace(name, std::regex("-unstable$"), "");
+                    name = std::regex_replace(name, std::regex("-stable$"), "");
+                }
+                addChannel(url, name);
+                }
+                break;
+            case cRemove:
+                if (args.size() != 1)
+                    throw UsageError("‘--remove’ requires one argument");
+                removeChannel(args[0]);
+                break;
+            case cList:
+                if (!args.empty())
+                    throw UsageError("‘--list’ expects no arguments");
+                readChannels();
+                for (const auto & channel : channels)
+                    std::cout << channel.first << ' ' << channel.second << '\n';
+                break;
+            case cUpdate:
+                update(StringSet(args.begin(), args.end()));
+                break;
+            case cRollback:
+                if (args.size() > 1)
+                    throw UsageError("‘--rollback’ has at most one argument");
+                auto envArgs = Strings{"--profile", profile};
+                if (args.size() == 1) {
+                    envArgs.push_back("--switch-generation");
+                    envArgs.push_back(args[0]);
+                } else {
+                    envArgs.push_back("--rollback");
+                }
+                runProgram(settings.nixBinDir + "/nix-env", false, envArgs);
+                break;
+        }
+    });
+}
diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc
index 6e0d869f4c87..f2b59c84a858 100644
--- a/src/nix-daemon/nix-daemon.cc
+++ b/src/nix-daemon/nix-daemon.cc
@@ -413,12 +413,10 @@ static void performOp(ref<LocalStore> store, bool trusted, unsigned int clientVe
         options.pathsToDelete = readStorePaths<PathSet>(*store, from);
         options.ignoreLiveness = readInt(from);
         options.maxFreed = readLongLong(from);
-        readInt(from); // obsolete field
-        if (GET_PROTOCOL_MINOR(clientVersion) >= 5) {
-            /* removed options */
-            readInt(from);
-            readInt(from);
-        }
+        // obsolete fields
+        readInt(from);
+        readInt(from);
+        readInt(from);
 
         GCResults results;
 
@@ -440,17 +438,12 @@ static void performOp(ref<LocalStore> store, bool trusted, unsigned int clientVe
         verbosity = (Verbosity) readInt(from);
         settings.set("build-max-jobs", std::to_string(readInt(from)));
         settings.set("build-max-silent-time", std::to_string(readInt(from)));
-        if (GET_PROTOCOL_MINOR(clientVersion) >= 2)
-            settings.useBuildHook = readInt(from) != 0;
-        if (GET_PROTOCOL_MINOR(clientVersion) >= 4) {
-            settings.verboseBuild = lvlError == (Verbosity) readInt(from);
-            readInt(from); // obsolete logType
-            readInt(from); // obsolete printBuildTrace
-        }
-        if (GET_PROTOCOL_MINOR(clientVersion) >= 6)
-            settings.set("build-cores", std::to_string(readInt(from)));
-        if (GET_PROTOCOL_MINOR(clientVersion) >= 10)
-            settings.set("build-use-substitutes", readInt(from) ? "true" : "false");
+        settings.useBuildHook = readInt(from) != 0;
+        settings.verboseBuild = lvlError == (Verbosity) readInt(from);
+        readInt(from); // obsolete logType
+        readInt(from); // obsolete printBuildTrace
+        settings.set("build-cores", std::to_string(readInt(from)));
+        settings.set("build-use-substitutes", readInt(from) ? "true" : "false");
         if (GET_PROTOCOL_MINOR(clientVersion) >= 12) {
             unsigned int n = readInt(from);
             for (unsigned int i = 0; i < n; i++) {
@@ -478,9 +471,7 @@ static void performOp(ref<LocalStore> store, bool trusted, unsigned int clientVe
         if (i == infos.end())
             to << 0;
         else {
-            to << 1 << i->second.deriver << i->second.references << i->second.downloadSize;
-            if (GET_PROTOCOL_MINOR(clientVersion) >= 7)
-                to << i->second.narSize;
+            to << 1 << i->second.deriver << i->second.references << i->second.downloadSize << i->second.narSize;
         }
         break;
     }
@@ -524,7 +515,8 @@ static void performOp(ref<LocalStore> store, bool trusted, unsigned int clientVe
                << info->registrationTime << info->narSize;
             if (GET_PROTOCOL_MINOR(clientVersion) >= 16) {
                 to << info->ultimate
-                   << info->sigs;
+                   << info->sigs
+                   << info->ca;
             }
         } else {
             assert(GET_PROTOCOL_MINOR(clientVersion) >= 17);
@@ -585,11 +577,13 @@ static void processConnection(bool trusted)
     to.flush();
     unsigned int clientVersion = readInt(from);
 
+    if (clientVersion < 0x10a)
+        throw Error("the Nix client version is too old");
+
     if (GET_PROTOCOL_MINOR(clientVersion) >= 14 && readInt(from))
         setAffinityTo(readInt(from));
 
-    if (GET_PROTOCOL_MINOR(clientVersion) >= 11)
-        readInt(from); // obsolete reserveSpace
+    readInt(from); // obsolete reserveSpace
 
     /* Send startup error messages to the client. */
     startWork();
@@ -636,10 +630,10 @@ static void processConnection(bool trusted)
                    during addTextToStore() / importPath().  If that
                    happens, just send the error message and exit. */
                 bool errorAllowed = canSendStderr;
-                stopWork(false, e.msg(), GET_PROTOCOL_MINOR(clientVersion) >= 8 ? e.status : 0);
+                stopWork(false, e.msg(), e.status);
                 if (!errorAllowed) throw;
             } catch (std::bad_alloc & e) {
-                stopWork(false, "Nix daemon out of memory", GET_PROTOCOL_MINOR(clientVersion) >= 8 ? 1 : 0);
+                stopWork(false, "Nix daemon out of memory", 1);
                 throw;
             }
 
@@ -653,7 +647,7 @@ static void processConnection(bool trusted)
         printMsg(lvlDebug, format("%1% operations") % opCount);
 
     } catch (Error & e) {
-        stopWork(false, e.msg(), GET_PROTOCOL_MINOR(clientVersion) >= 8 ? 1 : 0);
+        stopWork(false, e.msg(), 1);
         to.flush();
         return;
     }
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index 3f0486bb6541..6a557e8ac9db 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -10,6 +10,7 @@
 #include "store-api.hh"
 #include "user-env.hh"
 #include "util.hh"
+#include "json.hh"
 #include "value-to-json.hh"
 #include "xml-writer.hh"
 
@@ -128,9 +129,8 @@ static void getAllExprs(EvalState & state,
             }
             attrs.insert(attrName);
             /* Load the expression on demand. */
-            Value & vFun(*state.allocValue());
+            Value & vFun = state.getBuiltin("import");
             Value & vArg(*state.allocValue());
-            state.getBuiltin("import", vFun);
             mkString(vArg, path2);
             if (v.attrs->size() == v.attrs->capacity())
                 throw Error(format("too many Nix expressions in directory ‘%1%’") % path);
@@ -861,26 +861,24 @@ static VersionDiff compareVersionAgainstSet(
 
 static void queryJSON(Globals & globals, vector<DrvInfo> & elems)
 {
-    JSONObject topObj(cout);
+    JSONObject topObj(cout, true);
     for (auto & i : elems) {
-        topObj.attr(i.attrPath);
-        JSONObject pkgObj(cout);
+        JSONObject pkgObj = topObj.object(i.attrPath);
 
         pkgObj.attr("name", i.name);
         pkgObj.attr("system", i.system);
 
-        pkgObj.attr("meta");
-        JSONObject metaObj(cout);
+        JSONObject metaObj = pkgObj.object("meta");
         StringSet metaNames = i.queryMetaNames();
         for (auto & j : metaNames) {
-            metaObj.attr(j);
+            auto placeholder = metaObj.placeholder(j);
             Value * v = i.queryMeta(j);
             if (!v) {
                 printMsg(lvlError, format("derivation ‘%1%’ has invalid meta attribute ‘%2%’") % i.name % j);
-                cout << "null";
+                placeholder.write(nullptr);
             } else {
                 PathSet context;
-                printValueAsJSON(*globals.state, true, *v, cout, context);
+                printValueAsJSON(*globals.state, true, *v, placeholder, context);
             }
         }
     }
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index 8532101a174d..e8b56f929b13 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -483,6 +483,10 @@ static void opReadLog(Strings opFlags, Strings opArgs)
 
     RunPager pager;
 
+    // FIXME: move getting logs into Store.
+    auto store2 = std::dynamic_pointer_cast<LocalFSStore>(store);
+    if (!store2) throw Error(format("store ‘%s’ does not support reading logs") % store->getUri());
+
     for (auto & i : opArgs) {
         Path path = useDeriver(store->followLinksToStorePath(i));
 
@@ -493,8 +497,8 @@ static void opReadLog(Strings opFlags, Strings opArgs)
 
             Path logPath =
                 j == 0
-                ? (format("%1%/%2%/%3%/%4%") % settings.nixLogDir % drvsLogDir % string(baseName, 0, 2) % string(baseName, 2)).str()
-                : (format("%1%/%2%/%3%") % settings.nixLogDir % drvsLogDir % baseName).str();
+                ? (format("%1%/%2%/%3%/%4%") % store2->logDir % drvsLogDir % string(baseName, 0, 2) % string(baseName, 2)).str()
+                : (format("%1%/%2%/%3%") % store2->logDir % drvsLogDir % baseName).str();
             Path logBz2Path = logPath + ".bz2";
 
             if (pathExists(logPath)) {
diff --git a/src/nix/installables.cc b/src/nix/installables.cc
index 6257c7679af9..8341bbc5a3a4 100644
--- a/src/nix/installables.cc
+++ b/src/nix/installables.cc
@@ -9,6 +9,41 @@
 
 namespace nix {
 
+Value * MixInstallables::buildSourceExpr(EvalState & state)
+{
+    Value * vRoot = state.allocValue();
+
+    if (file != "") {
+        Expr * e = state.parseExprFromFile(resolveExprPath(lookupFileArg(state, file)));
+        state.eval(e, *vRoot);
+    }
+
+    else {
+
+        /* Construct the installation source from $NIX_PATH. */
+
+        auto searchPath = state.getSearchPath();
+
+        state.mkAttrs(*vRoot, searchPath.size());
+
+        std::unordered_set<std::string> seen;
+
+        for (auto & i : searchPath) {
+            if (i.first == "") continue;
+            if (seen.count(i.first)) continue;
+            seen.insert(i.first);
+            if (!pathExists(i.second)) continue;
+            mkApp(*state.allocAttr(*vRoot, state.symbols.create(i.first)),
+                state.getBuiltin("import"),
+                mkString(*state.allocValue(), i.second));
+        }
+
+        vRoot->attrs->sort();
+    }
+
+    return vRoot;
+}
+
 UserEnvElems MixInstallables::evalInstallables(ref<Store> store)
 {
     UserEnvElems res;
@@ -46,15 +81,12 @@ UserEnvElems MixInstallables::evalInstallables(ref<Store> store)
 
             EvalState state({}, store);
 
-            Expr * e = state.parseExprFromFile(resolveExprPath(lookupFileArg(state, file)));
-
-            Value vRoot;
-            state.eval(e, vRoot);
+            auto vRoot = buildSourceExpr(state);
 
             std::map<string, string> autoArgs_;
             Bindings & autoArgs(*evalAutoArgs(state, autoArgs_));
 
-            Value & v(*findAlongAttrPath(state, installable, autoArgs, vRoot));
+            Value & v(*findAlongAttrPath(state, installable, autoArgs, *vRoot));
             state.forceValue(v);
 
             DrvInfos drvs;
diff --git a/src/nix/installables.hh b/src/nix/installables.hh
index 5eb897d46148..a58f7dc59bb4 100644
--- a/src/nix/installables.hh
+++ b/src/nix/installables.hh
@@ -21,10 +21,13 @@ struct UserEnvElem
 
 typedef std::vector<UserEnvElem> UserEnvElems;
 
+struct Value;
+class EvalState;
+
 struct MixInstallables : virtual Args
 {
     Strings installables;
-    Path file = "<nixpkgs>";
+    Path file;
 
     MixInstallables()
     {
@@ -33,6 +36,13 @@ struct MixInstallables : virtual Args
     }
 
     UserEnvElems evalInstallables(ref<Store> store);
+
+    /* Return a value representing the Nix expression from which we
+       are installing. This is either the file specified by ‘--file’,
+       or an attribute set constructed from $NIX_PATH, e.g. ‘{ nixpkgs
+       = import ...; bla = import ...; }’. */
+    Value * buildSourceExpr(EvalState & state);
+
 };
 
 }
diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc
index c61fe7ff1e00..d8b6232d47ca 100644
--- a/src/nix/path-info.cc
+++ b/src/nix/path-info.cc
@@ -1,6 +1,7 @@
 #include "command.hh"
 #include "shared.hh"
 #include "store-api.hh"
+#include "json.hh"
 
 #include <iomanip>
 #include <algorithm>
@@ -12,12 +13,14 @@ struct CmdPathInfo : StorePathsCommand
     bool showSize = false;
     bool showClosureSize = false;
     bool showSigs = false;
+    bool json = false;
 
     CmdPathInfo()
     {
         mkFlag('s', "size", "print size of the NAR dump of each path", &showSize);
         mkFlag('S', "closure-size", "print sum size of the NAR dumps of the closure of each path", &showClosureSize);
         mkFlag(0, "sigs", "show signatures", &showSigs);
+        mkFlag(0, "json", "produce JSON output", &json);
     }
 
     std::string name() override
@@ -41,6 +44,10 @@ struct CmdPathInfo : StorePathsCommand
                 "To check the existence of a path in a binary cache:",
                 "nix path-info -r /nix/store/7qvk5c91...-geeqie-1.1 --store https://cache.nixos.org/"
             },
+            Example{
+                "To print the 10 most recently added paths (using --json and the jq(1) command):",
+                "nix path-info --all --json | jq -r 'sort_by(.registrationTime)[-11:-1][].path'"
+            },
         };
     }
 
@@ -50,35 +57,85 @@ struct CmdPathInfo : StorePathsCommand
         for (auto & storePath : storePaths)
             pathLen = std::max(pathLen, storePath.size());
 
-        for (auto storePath : storePaths) {
-            auto info = store->queryPathInfo(storePath);
-            storePath = info->path; // FIXME: screws up padding
+        auto getClosureSize = [&](const Path & storePath) -> unsigned long long {
+            unsigned long long totalSize = 0;
+            PathSet closure;
+            store->computeFSClosure(storePath, closure, false, false);
+            for (auto & p : closure)
+                totalSize += store->queryPathInfo(p)->narSize;
+            return totalSize;
+        };
 
-            std::cout << storePath << std::string(std::max(0, (int) pathLen - (int) storePath.size()), ' ');
+        if (json) {
+            JSONList jsonRoot(std::cout, true);
 
-            if (showSize) {
-                std::cout << '\t' << std::setw(11) << info->narSize;
-            }
+            for (auto storePath : storePaths) {
+                auto info = store->queryPathInfo(storePath);
+                storePath = info->path;
+
+                auto jsonPath = jsonRoot.object();
+                jsonPath
+                    .attr("path", storePath)
+                    .attr("narHash", info->narHash.to_string())
+                    .attr("narSize", info->narSize);
+
+                if (showClosureSize)
+                    jsonPath.attr("closureSize", getClosureSize(storePath));
+
+                if (info->deriver != "")
+                    jsonPath.attr("deriver", info->deriver);
+
+                {
+                    auto jsonRefs = jsonPath.list("references");
+                    for (auto & ref : info->references)
+                        jsonRefs.elem(ref);
+                }
+
+                if (info->registrationTime)
+                    jsonPath.attr("registrationTime", info->registrationTime);
+
+                if (info->ultimate)
+                    jsonPath.attr("ultimate", info->ultimate);
 
-            if (showClosureSize) {
-                size_t totalSize = 0;
-                PathSet closure;
-                store->computeFSClosure(storePath, closure, false, false);
-                for (auto & p : closure)
-                    totalSize += store->queryPathInfo(p)->narSize;
-                std::cout << '\t' << std::setw(11) << totalSize;
+                if (info->ca != "")
+                    jsonPath.attr("ca", info->ca);
+
+                if (!info->sigs.empty()) {
+                    auto jsonSigs = jsonPath.list("signatures");
+                    for (auto & sig : info->sigs)
+                        jsonSigs.elem(sig);
+                }
             }
+        }
+
+        else {
+
+            for (auto storePath : storePaths) {
+                auto info = store->queryPathInfo(storePath);
+                storePath = info->path; // FIXME: screws up padding
+
+                std::cout << storePath << std::string(std::max(0, (int) pathLen - (int) storePath.size()), ' ');
 
-            if (showSigs) {
-                std::cout << '\t';
-                Strings ss;
-                if (info->ultimate) ss.push_back("ultimate");
-                for (auto & sig : info->sigs) ss.push_back(sig);
-                std::cout << concatStringsSep(" ", ss);
+                if (showSize)
+                    std::cout << '\t' << std::setw(11) << info->narSize;
+
+                if (showClosureSize)
+                    std::cout << '\t' << std::setw(11) << getClosureSize(storePath);
+
+                if (showSigs) {
+                    std::cout << '\t';
+                    Strings ss;
+                    if (info->ultimate) ss.push_back("ultimate");
+                    if (info->ca != "") ss.push_back("ca:" + info->ca);
+                    for (auto & sig : info->sigs) ss.push_back(sig);
+                    std::cout << concatStringsSep(" ", ss);
+                }
+
+                std::cout << std::endl;
             }
 
-            std::cout << std::endl;
         }
+
     }
 };
 
diff --git a/src/nix/verify.cc b/src/nix/verify.cc
index fd904f465687..f2b6acdfbf0b 100644
--- a/src/nix/verify.cc
+++ b/src/nix/verify.cc
@@ -116,12 +116,16 @@ struct CmdVerify : StorePathsCommand
                             }
                         };
 
+                        if (info->isContentAddressed(*store)) validSigs = ValidPathInfo::maxSigs;
+
                         doSigs(info->sigs);
 
                         for (auto & store2 : substituters) {
                             if (validSigs >= actualSigsNeeded) break;
                             try {
-                                doSigs(store2->queryPathInfo(info->path)->sigs);
+                                auto info2 = store2->queryPathInfo(info->path);
+                                if (info2->isContentAddressed(*store)) validSigs = ValidPathInfo::maxSigs;
+                                doSigs(info2->sigs);
                             } catch (InvalidPath &) {
                             } catch (Error & e) {
                                 printMsg(lvlError, format(ANSI_RED "error:" ANSI_NORMAL " %s") % e.what());
diff --git a/src/resolve-system-dependencies/local.mk b/src/resolve-system-dependencies/local.mk
new file mode 100644
index 000000000000..8792a4a252fa
--- /dev/null
+++ b/src/resolve-system-dependencies/local.mk
@@ -0,0 +1,11 @@
+ifeq ($(OS), Darwin)
+  programs += resolve-system-dependencies
+endif
+
+resolve-system-dependencies_DIR := $(d)
+
+resolve-system-dependencies_INSTALL_DIR := $(libexecdir)/nix
+
+resolve-system-dependencies_LIBS := libstore libmain libutil libformat
+
+resolve-system-dependencies_SOURCES := $(d)/resolve-system-dependencies.cc
diff --git a/src/resolve-system-dependencies/resolve-system-dependencies.cc b/src/resolve-system-dependencies/resolve-system-dependencies.cc
new file mode 100644
index 000000000000..a5f0cd7b3b70
--- /dev/null
+++ b/src/resolve-system-dependencies/resolve-system-dependencies.cc
@@ -0,0 +1,194 @@
+#include "derivations.hh"
+#include "globals.hh"
+#include "shared.hh"
+#include "store-api.hh"
+#include <sys/utsname.h>
+#include <algorithm>
+#include <iostream>
+#include <fstream>
+#include <sys/mman.h>
+#include <fcntl.h>
+#include <mach-o/loader.h>
+#include <mach-o/swap.h>
+
+#define DO_SWAP(x, y) ((x) ? OSSwapInt32(y) : (y))
+
+using namespace nix;
+
+static auto cacheDir = Path{};
+
+Path resolveCacheFile(Path lib) {
+    std::replace(lib.begin(), lib.end(), '/', '%');
+    return cacheDir + "/" + lib;
+}
+
+std::set<string> readCacheFile(const Path & file) {
+    return tokenizeString<set<string>>(readFile(file), "\n");
+}
+
+void writeCacheFile(const Path & file, std::set<string> & deps) {
+    std::ofstream fp;
+    fp.open(file);
+    for (auto & d : deps) {
+        fp << d << "\n";
+    }
+    fp.close();
+}
+
+std::string findDylibName(bool should_swap, ptrdiff_t dylib_command_start) {
+    struct dylib_command *dylc = (struct dylib_command*)dylib_command_start;
+    return std::string((char*)(dylib_command_start + DO_SWAP(should_swap, dylc->dylib.name.offset)));
+}
+
+std::set<std::string> runResolver(const Path & filename) {
+    int fd = open(filename.c_str(), O_RDONLY);
+    struct stat s;
+    fstat(fd, &s);
+    void *obj = mmap(NULL, s.st_size, PROT_READ, MAP_SHARED, fd, 0);
+
+    ptrdiff_t mach64_offset = 0;
+
+    uint32_t magic = ((struct mach_header_64*)obj)->magic;
+    if(magic == FAT_CIGAM || magic == FAT_MAGIC) {
+        bool should_swap = magic == FAT_CIGAM;
+        uint32_t narches = DO_SWAP(should_swap, ((struct fat_header*)obj)->nfat_arch);
+
+        for(uint32_t iter = 0; iter < narches; iter++) {
+            ptrdiff_t header_offset = (ptrdiff_t)obj + sizeof(struct fat_header) * (iter + 1);
+            struct fat_arch* arch = (struct fat_arch*)header_offset;
+            if(DO_SWAP(should_swap, arch->cputype) == CPU_TYPE_X86_64) {
+                mach64_offset = (ptrdiff_t)DO_SWAP(should_swap, arch->offset);
+                break;
+            }
+        }
+        if (mach64_offset == 0) {
+            printMsg(lvlError, format("Could not find any mach64 blobs in file ‘%1%’, continuing...") % filename);
+            return std::set<string>();
+        }
+    } else if (magic == MH_MAGIC_64 || magic == MH_CIGAM_64) {
+        mach64_offset = 0;
+    } else {
+        printMsg(lvlError, format("Object file has unknown magic number ‘%1%’, skipping it...") % magic);
+        return std::set<string>();
+    }
+
+    ptrdiff_t mach_header_offset = (ptrdiff_t)obj + mach64_offset;
+    struct mach_header_64 *m_header = (struct mach_header_64 *)mach_header_offset;
+
+    bool should_swap = magic == MH_CIGAM_64;
+    ptrdiff_t cmd_offset = mach_header_offset + sizeof(struct mach_header_64);
+
+    std::set<string> libs;
+    for(uint32_t i = 0; i < DO_SWAP(should_swap, m_header->ncmds); i++) {
+        struct load_command *cmd = (struct load_command*)cmd_offset;
+        switch(DO_SWAP(should_swap, cmd->cmd)) {
+            case LC_LOAD_UPWARD_DYLIB:
+            case LC_LOAD_DYLIB:
+            case LC_REEXPORT_DYLIB:
+                libs.insert(findDylibName(should_swap, cmd_offset));
+                break;
+        }
+        cmd_offset += DO_SWAP(should_swap, cmd->cmdsize);
+    }
+
+    return libs;
+}
+
+bool isSymlink(const Path & path) {
+    struct stat st;
+    if(lstat(path.c_str(), &st))
+        throw SysError(format("getting attributes of path ‘%1%’") % path);
+
+    return S_ISLNK(st.st_mode);
+}
+
+Path resolveSymlink(const Path & path) {
+    char buf[PATH_MAX];
+    ssize_t len = readlink(path.c_str(), buf, sizeof(buf) - 1);
+    if(len != -1) {
+        buf[len] = 0;
+        return Path(buf);
+    } else {
+        throw SysError(format("readlink('%1%')") % path);
+    }
+}
+
+std::set<string> resolveTree(const Path & path, PathSet & deps) {
+    std::set<string> results;
+    if(deps.find(path) != deps.end()) {
+        return std::set<string>();
+    }
+    deps.insert(path);
+    for (auto & lib : runResolver(path)) {
+        results.insert(lib);
+        for (auto & p : resolveTree(lib, deps)) {
+            results.insert(p);
+        }
+    }
+    return results;
+}
+
+std::set<string> getPath(const Path & path) {
+    Path cacheFile = resolveCacheFile(path);
+    if(pathExists(cacheFile)) {
+        return readCacheFile(cacheFile);
+    }
+
+    std::set<string> deps;
+    std::set<string> paths;
+    paths.insert(path);
+
+    Path next_path = Path(path);
+    while(isSymlink(next_path)) {
+        next_path = resolveSymlink(next_path);
+        paths.insert(next_path);
+    }
+
+    for(auto & t : resolveTree(next_path, deps)) {
+        paths.insert(t);
+    }
+
+    writeCacheFile(cacheFile, paths);
+
+    return paths;
+}
+
+int main(int argc, char ** argv) {
+    return handleExceptions(argv[0], [&]() {
+        initNix();
+
+        struct utsname _uname;
+
+        uname(&_uname);
+
+        auto cacheParentDir = (format("%1%/dependency-maps") % settings.nixStateDir).str();
+
+        cacheDir = (format("%1%/%2%-%3%-%4%")
+                % cacheParentDir
+                % _uname.machine
+                % _uname.sysname
+                % _uname.release).str();
+
+        mkdir(cacheParentDir.c_str(), 0755);
+        mkdir(cacheDir.c_str(), 0755);
+
+        auto store = openStore();
+
+        auto drv = store->derivationFromPath(Path(argv[1]));
+        Strings impurePaths = tokenizeString<Strings>(get(drv.env, "__impureHostDeps"));
+
+        std::set<string> all_paths;
+
+        for (auto & path : impurePaths) {
+            for(auto & p : getPath(path)) {
+                all_paths.insert(p);
+            }
+        }
+
+        std::cout << "extra-chroot-dirs" << std::endl;
+        for(auto & path : all_paths) {
+            std::cout << path << std::endl;
+        }
+        std::cout << std::endl;
+    });
+}
diff --git a/tests/binary-cache.sh b/tests/binary-cache.sh
index 96cab6ad4269..4ce428f643e5 100644
--- a/tests/binary-cache.sh
+++ b/tests/binary-cache.sh
@@ -6,7 +6,7 @@ clearCache
 # Create the binary cache.
 outPath=$(nix-build dependencies.nix --no-out-link)
 
-nix-push --dest $cacheDir $outPath
+nix copy --recursive --to file://$cacheDir $outPath
 
 
 basicTests() {
@@ -58,7 +58,7 @@ unset _NIX_FORCE_HTTP_BINARY_CACHE_STORE
 # Test whether Nix notices if the NAR doesn't match the hash in the NAR info.
 clearStore
 
-nar=$(ls $cacheDir/*.nar.xz | head -n1)
+nar=$(ls $cacheDir/nar/*.nar.xz | head -n1)
 mv $nar $nar.good
 mkdir -p $TEST_ROOT/empty
 nix-store --dump $TEST_ROOT/empty | xz > $nar
@@ -117,7 +117,7 @@ badKey="$(cat $TEST_ROOT/pk2)"
 res=($(nix-store --generate-binary-cache-key foo.nixos.org-1 $TEST_ROOT/sk3 $TEST_ROOT/pk3))
 otherKey="$(cat $TEST_ROOT/pk3)"
 
-nix-push --dest $cacheDir --key-file $TEST_ROOT/sk1 $outPath
+nix copy --recursive --to file://$cacheDir?secret-key=$TEST_ROOT/sk1 $outPath
 
 
 # Downloading should fail if we don't provide a key.
diff --git a/tests/common.sh.in b/tests/common.sh.in
index 097d94bb1eea..316d5f6896bb 100644
--- a/tests/common.sh.in
+++ b/tests/common.sh.in
@@ -12,7 +12,6 @@ fi
 export NIX_LOCALSTATE_DIR=$TEST_ROOT/var
 export NIX_LOG_DIR=$TEST_ROOT/var/log/nix
 export NIX_STATE_DIR=$TEST_ROOT/var/nix
-export NIX_DB_DIR=$TEST_ROOT/db
 export NIX_CONF_DIR=$TEST_ROOT/etc
 export NIX_MANIFESTS_DIR=$TEST_ROOT/var/nix/manifests
 export _NIX_TEST_SHARED=$TEST_ROOT/shared
@@ -51,12 +50,10 @@ clearStore() {
     chmod -R +w "$NIX_STORE_DIR"
     rm -rf "$NIX_STORE_DIR"
     mkdir "$NIX_STORE_DIR"
-    rm -rf "$NIX_DB_DIR"
-    mkdir "$NIX_DB_DIR"
+    rm -rf "$NIX_STATE_DIR"
+    mkdir "$NIX_STATE_DIR"
     nix-store --init
     clearProfiles
-    rm -f "$NIX_STATE_DIR"/gcroots/auto/*
-    rm -f "$NIX_STATE_DIR"/gcroots/ref
 }
 
 clearCache() {
diff --git a/tests/config.nix b/tests/config.nix
index 6244a15fa48a..76388fdd5b95 100644
--- a/tests/config.nix
+++ b/tests/config.nix
@@ -13,7 +13,7 @@ rec {
     derivation ({
       inherit system;
       builder = shell;
-      args = ["-e" args.builder];
+      args = ["-e" args.builder or (builtins.toFile "builder.sh" "eval \"$buildCommand\"")];
       PATH = path;
     } // removeAttrs args ["builder" "meta"])
     // { meta = args.meta or {}; };
diff --git a/tests/dump-db.sh b/tests/dump-db.sh
index 57c8c401600d..d6eea42aa04e 100644
--- a/tests/dump-db.sh
+++ b/tests/dump-db.sh
@@ -8,8 +8,7 @@ deps="$(nix-store -qR $TEST_ROOT/result)"
 
 nix-store --dump-db > $TEST_ROOT/dump
 
-rm -rf $NIX_DB_DIR
-mkdir $NIX_DB_DIR
+rm -rf $NIX_STATE_DIR/db
 
 nix-store --load-db < $TEST_ROOT/dump
 
diff --git a/tests/init.sh b/tests/init.sh
index 5be999e4b1be..4571b75b859e 100644
--- a/tests/init.sh
+++ b/tests/init.sh
@@ -1,7 +1,5 @@
 source common.sh
 
-echo "NIX_STORE_DIR=$NIX_STORE_DIR NIX_DB_DIR=$NIX_DB_DIR"
-
 test -n "$TEST_ROOT"
 if test -d "$TEST_ROOT"; then
     chmod -R u+w "$TEST_ROOT"
@@ -13,7 +11,6 @@ mkdir "$NIX_STORE_DIR"
 mkdir "$NIX_LOCALSTATE_DIR"
 mkdir -p "$NIX_LOG_DIR"/drvs
 mkdir "$NIX_STATE_DIR"
-mkdir "$NIX_DB_DIR"
 mkdir "$NIX_CONF_DIR"
 
 cat > "$NIX_CONF_DIR"/nix.conf <<EOF
@@ -28,6 +25,6 @@ EOF
 nix-store --init
 
 # Did anything happen?
-test -e "$NIX_DB_DIR"/db.sqlite
+test -e "$NIX_STATE_DIR"/db/db.sqlite
 
 echo 'Hello World' > ./dummy
diff --git a/tests/install-package.sh b/tests/install-package.sh
deleted file mode 100644
index 1916f72713e2..000000000000
--- a/tests/install-package.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-source common.sh
-
-drvPath=$(nix-instantiate ./dependencies.nix)
-outPath=$(nix-store -r $drvPath)
-nix-push --dest $cacheDir $outPath
-
-clearStore
-clearProfiles
-
-cat > $TEST_ROOT/foo.nixpkg <<EOF
-NIXPKG1 - simple $system $drvPath $outPath file://$cacheDir
-EOF
-
-nix-install-package --non-interactive -p $profiles/test $TEST_ROOT/foo.nixpkg
-test "$(nix-env -p $profiles/test -q '*' | wc -l)" -eq 1
-
-clearProfiles
-
-nix-install-package --non-interactive -p $profiles/test --url file://$TEST_ROOT/foo.nixpkg
-test "$(nix-env -p $profiles/test -q '*' | wc -l)" -eq 1
diff --git a/tests/lang/eval-okay-partition.exp b/tests/lang/eval-okay-partition.exp
new file mode 100644
index 000000000000..cd8b8b020c05
--- /dev/null
+++ b/tests/lang/eval-okay-partition.exp
@@ -0,0 +1 @@
+{ right = [ 0 2 4 6 8 10 100 102 104 106 108 110 ]; wrong = [ 1 3 5 7 9 101 103 105 107 109 ]; }
diff --git a/tests/lang/eval-okay-partition.nix b/tests/lang/eval-okay-partition.nix
new file mode 100644
index 000000000000..846d2ce49486
--- /dev/null
+++ b/tests/lang/eval-okay-partition.nix
@@ -0,0 +1,5 @@
+with import ./lib.nix;
+
+builtins.partition
+  (x: x / 2 * 2 == x)
+  (builtins.concatLists [ (range 0 10) (range 100 110) ])
diff --git a/tests/local.mk b/tests/local.mk
index 7c5a553d39e0..2ca52144baee 100644
--- a/tests/local.mk
+++ b/tests/local.mk
@@ -3,14 +3,15 @@ check:
 
 nix_tests = \
   init.sh hash.sh lang.sh add.sh simple.sh dependencies.sh \
-  build-hook.sh nix-push.sh gc.sh gc-concurrent.sh \
+  build-hook.sh gc.sh gc-concurrent.sh \
   referrers.sh user-envs.sh logging.sh nix-build.sh misc.sh fixed.sh \
-  gc-runtime.sh install-package.sh check-refs.sh filter-source.sh \
+  gc-runtime.sh check-refs.sh filter-source.sh \
   remote-store.sh export.sh export-graph.sh \
   timeout.sh secure-drv-outputs.sh nix-channel.sh \
   multiple-outputs.sh import-derivation.sh fetchurl.sh optimise-store.sh \
   binary-cache.sh nix-profile.sh repair.sh dump-db.sh case-hack.sh \
-  check-reqs.sh pass-as-file.sh tarball.sh restricted.sh
+  check-reqs.sh pass-as-file.sh tarball.sh restricted.sh \
+  placeholders.sh
   # parallel.sh
 
 install-tests += $(foreach x, $(nix_tests), tests/$(x))
diff --git a/tests/nix-channel.sh b/tests/nix-channel.sh
index 2ec986dd415b..553ada51d9f7 100644
--- a/tests/nix-channel.sh
+++ b/tests/nix-channel.sh
@@ -15,7 +15,7 @@ nix-channel --remove xyzzy
 # Create a channel.
 rm -rf $TEST_ROOT/foo
 mkdir -p $TEST_ROOT/foo
-nix-push --dest $TEST_ROOT/foo --manifest --bzip2 $(nix-store -r $(nix-instantiate dependencies.nix))
+nix copy --recursive --to file://$TEST_ROOT/foo?compression="bzip2" $(nix-store -r $(nix-instantiate dependencies.nix))
 rm -rf $TEST_ROOT/nixexprs
 mkdir -p $TEST_ROOT/nixexprs
 cp config.nix dependencies.nix dependencies.builder*.sh $TEST_ROOT/nixexprs/
diff --git a/tests/nix-push.sh b/tests/nix-push.sh
deleted file mode 100644
index 8ea59516c62c..000000000000
--- a/tests/nix-push.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-source common.sh
-
-clearStore
-
-drvPath=$(nix-instantiate dependencies.nix)
-outPath=$(nix-store -r $drvPath)
-
-echo "pushing $drvPath"
-
-mkdir -p $TEST_ROOT/cache
-
-nix-push --dest $TEST_ROOT/cache --manifest $drvPath --bzip2
diff --git a/tests/placeholders.sh b/tests/placeholders.sh
new file mode 100644
index 000000000000..071cfe2dc893
--- /dev/null
+++ b/tests/placeholders.sh
@@ -0,0 +1,22 @@
+source common.sh
+
+clearStore
+
+nix-build --no-out-link -E '
+  with import ./config.nix;
+
+  mkDerivation {
+    name = "placeholders";
+    outputs = [ "out" "bin" "dev" ];
+    buildCommand = "
+      echo foo1 > $out
+      echo foo2 > $bin
+      echo foo3 > $dev
+      [[ $(cat ${placeholder "out"}) = foo1 ]]
+      [[ $(cat ${placeholder "bin"}) = foo2 ]]
+      [[ $(cat ${placeholder "dev"}) = foo3 ]]
+    ";
+  }
+'
+
+echo XYZZY
diff --git a/tests/referrers.sh b/tests/referrers.sh
index 5c1ef20cfeb4..8ab8e5ddfe87 100644
--- a/tests/referrers.sh
+++ b/tests/referrers.sh
@@ -30,7 +30,7 @@ echo "collecting garbage..."
 ln -sfn $reference "$NIX_STATE_DIR"/gcroots/ref
 nix-store --gc
 
-if [ -n "$(type -p sqlite3)" -a "$(sqlite3 $NIX_DB_DIR/db.sqlite 'select count(*) from Refs')" -ne 0 ]; then
+if [ -n "$(type -p sqlite3)" -a "$(sqlite3 $NIX_STATE_DIR/db/db.sqlite 'select count(*) from Refs')" -ne 0 ]; then
     echo "referrers not cleaned up"
     exit 1
 fi
diff --git a/tests/remote-store.sh b/tests/remote-store.sh
index b3908717a40e..f2f2806d022d 100644
--- a/tests/remote-store.sh
+++ b/tests/remote-store.sh
@@ -4,7 +4,7 @@ clearStore
 
 startDaemon
 
-$SHELL ./user-envs.sh
+storeCleared=1 $SHELL ./user-envs.sh
 
 nix-store --dump-db > $TEST_ROOT/d1
 NIX_REMOTE= nix-store --dump-db > $TEST_ROOT/d2
diff --git a/tests/repair.sh b/tests/repair.sh
index 92f2f8fe60a5..782838704da7 100644
--- a/tests/repair.sh
+++ b/tests/repair.sh
@@ -46,7 +46,7 @@ fi
 # --verify can fix it.
 clearCache
 
-nix-push --dest $cacheDir $path
+nix copy --recursive --to file://$cacheDir $path
 
 chmod u+w $path2
 rm -rf $path2
diff --git a/tests/user-envs.sh b/tests/user-envs.sh
index 526c1267cefe..c4192fdc59b2 100644
--- a/tests/user-envs.sh
+++ b/tests/user-envs.sh
@@ -1,6 +1,9 @@
 source common.sh
 
-clearStore
+if [ -z "$storeCleared" ]; then
+    clearStore
+fi
+
 clearProfiles
 
 # Query installed: should be empty.