about summary refs log tree commit diff
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/build-remote.pl.in15
-rw-r--r--scripts/download-from-binary-cache.pl.in11
-rwxr-xr-xscripts/download-using-manifests.pl.in377
-rw-r--r--scripts/install-nix-from-closure.sh11
-rw-r--r--scripts/local.mk14
-rwxr-xr-xscripts/nix-build.in40
-rwxr-xr-xscripts/nix-channel.in27
-rwxr-xr-xscripts/nix-copy-closure.in16
-rwxr-xr-xscripts/nix-generate-patches.in51
-rwxr-xr-xscripts/nix-install-package.in22
-rwxr-xr-xscripts/nix-prefetch-url.in132
-rw-r--r--scripts/nix-profile.sh.in78
-rwxr-xr-xscripts/nix-pull.in102
-rwxr-xr-xscripts/nix-push.in9
-rwxr-xr-xscripts/resolve-system-dependencies.pl.in122
15 files changed, 250 insertions, 777 deletions
diff --git a/scripts/build-remote.pl.in b/scripts/build-remote.pl.in
index c9b4a502e23d..4bf42941116f 100755
--- a/scripts/build-remote.pl.in
+++ b/scripts/build-remote.pl.in
@@ -16,8 +16,6 @@ binmode STDERR, ":encoding(utf8)";
 
 my $debug = defined $ENV{NIX_DEBUG_HOOK};
 
-setVerbosity(0); # make exportPath() less verbose
-
 
 # General operation:
 #
@@ -55,7 +53,7 @@ sub all { $_ || return 0 for @_; 1 }
 # Initialisation.
 my $loadIncreased = 0;
 
-my ($localSystem, $maxSilentTime, $printBuildTrace, $buildTimeout) = @ARGV;
+my ($localSystem, $maxSilentTime, $buildTimeout) = @ARGV;
 
 my $currentLoad = $ENV{"NIX_CURRENT_LOAD"} // "/run/nix/current-load";
 my $conf = $ENV{"NIX_REMOTE_SYSTEMS"} // "@sysconfdir@/nix/machines";
@@ -225,13 +223,6 @@ my @inputs = split /\s/, readline(STDIN);
 my @outputs = split /\s/, readline(STDIN);
 
 
-print STDERR "@ build-remote $drvPath $hostName\n" if $printBuildTrace;
-
-
-my $maybeSign = "";
-$maybeSign = "--sign" if -e "$Nix::Config::confDir/signing-key.sec";
-
-
 # Copy the derivation and its dependencies to the build machine.  This
 # is guarded by an exclusive lock per machine to prevent multiple
 # build-remote instances from copying to a machine simultaneously.
@@ -255,19 +246,17 @@ if ($@) {
     print STDERR "somebody is hogging $uploadLock, continuing...\n";
     unlink $uploadLock;
 }
-Nix::CopyClosure::copyToOpen($from, $to, $hostName, [ $drvPath, @inputs ], 0, 0, $maybeSign ne "");
+Nix::CopyClosure::copyToOpen($from, $to, $hostName, [ $drvPath, @inputs ], 0, 0);
 close UPLOADLOCK;
 
 
 # Perform the build.
 print STDERR "building ‘$drvPath’ on ‘$hostName’\n";
-print STDERR "@ build-remote-start $drvPath $hostName\n" if $printBuildTrace;
 writeInt(6, $to) or die; # == cmdBuildPaths
 writeStrings([$drvPath], $to);
 writeInt($maxSilentTime, $to);
 writeInt($buildTimeout, $to);
 my $res = readInt($from);
-print STDERR "@ build-remote-done $drvPath $hostName\n" if $printBuildTrace;
 if ($res != 0) {
     my $msg = decode("utf-8", readString($from));
     print STDERR "error: $msg on ‘$hostName’\n";
diff --git a/scripts/download-from-binary-cache.pl.in b/scripts/download-from-binary-cache.pl.in
index bb63eafca522..a4f858610aca 100644
--- a/scripts/download-from-binary-cache.pl.in
+++ b/scripts/download-from-binary-cache.pl.in
@@ -21,7 +21,7 @@ Nix::Config::readConfig;
 my @caches;
 my $gotCaches = 0;
 
-my $maxParallelRequests = int($Nix::Config::config{"binary-caches-parallel-connections"} // 150);
+my $maxParallelRequests = int($Nix::Config::config{"binary-caches-parallel-connections"} // 25);
 $maxParallelRequests = 1 if $maxParallelRequests < 1;
 
 my $ttlNegative = 24 * 3600; # when to purge negative lookups from the database
@@ -80,7 +80,12 @@ sub addRequest {
     $curl->setopt(CURLOPT_WRITEDATA, $fh);
     $curl->setopt(CURLOPT_FOLLOWLOCATION, 1);
     $curl->setopt(CURLOPT_CAINFO, $caBundle) if defined $caBundle;
-    $curl->setopt(CURLOPT_SSL_VERIFYPEER, 0) unless isTrue($Nix::Config::config{"verify-https-binary-caches"} // "1");
+
+    unless (isTrue($Nix::Config::config{"verify-https-binary-caches"} // "1")) {
+        $curl->setopt(CURLOPT_SSL_VERIFYPEER, 0);
+        $curl->setopt(CURLOPT_SSL_VERIFYHOST, 0);
+    }
+
     $curl->setopt(CURLOPT_USERAGENT, $userAgent);
     $curl->setopt(CURLOPT_NOBODY, 1) if $head;
     $curl->setopt(CURLOPT_FAILONERROR, 1);
@@ -561,7 +566,7 @@ sub downloadBinary {
         die if $requireSignedBinaryCaches && !defined $info->{signedBy};
         print STDERR "\n*** Downloading ‘$url’ ", ($requireSignedBinaryCaches ? "(signed by ‘$info->{signedBy}’) " : ""), "to ‘$storePath’...\n";
         checkURL $url;
-        if (system("$Nix::Config::curl --fail --location --insecure --connect-timeout $curlConnectTimeout -A '$userAgent' '$url' $decompressor | $Nix::Config::binDir/nix-store --restore $destPath") != 0) {
+        if (system("$Nix::Config::curl --fail --location --connect-timeout $curlConnectTimeout -A '$userAgent' '$url' $decompressor | $Nix::Config::binDir/nix-store --restore $destPath") != 0) {
             warn "download of ‘$url’ failed" . ($! ? ": $!" : "") . "\n";
             next;
         }
diff --git a/scripts/download-using-manifests.pl.in b/scripts/download-using-manifests.pl.in
deleted file mode 100755
index 591cd6b43a3a..000000000000
--- a/scripts/download-using-manifests.pl.in
+++ /dev/null
@@ -1,377 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use strict;
-use Nix::Config;
-use Nix::Manifest;
-use Nix::Store;
-use Nix::Utils;
-use POSIX qw(strftime);
-
-STDOUT->autoflush(1);
-binmode STDERR, ":encoding(utf8)";
-
-my $logFile = "$Nix::Config::logDir/downloads";
-
-# For queries, skip expensive calls to nix-hash etc.  We're just
-# estimating the expected download size.
-my $fast = 1;
-
-# ‘--insecure’ is fine because Nix verifies the hash of the result.
-my $curl = "$Nix::Config::curl --fail --location --insecure";
-
-
-# Open the manifest cache and update it if necessary.
-my $dbh = updateManifestDB();
-exit 0 unless defined $dbh; # exit if there are no manifests
-print "\n";
-
-
-# $hashCache->{$algo}->{$path} yields the $algo-hash of $path.
-my $hashCache;
-
-
-sub parseHash {
-    my $hash = shift;
-    if ($hash =~ /^(.+):(.+)$/) {
-        return ($1, $2);
-    } else {
-        return ("md5", $hash);
-    }
-}
-
-
-# Compute the most efficient sequence of downloads to produce the
-# given path.
-sub computeSmallestDownload {
-    my $targetPath = shift;
-
-    # Build a graph of all store paths that might contribute to the
-    # construction of $targetPath, and the special node "start".  The
-    # edges are either patch operations, or downloads of full NAR
-    # files.  The latter edges only occur between "start" and a store
-    # path.
-    my %graph;
-
-    $graph{"start"} = {d => 0, pred => undef, edges => []};
-
-    my @queue = ();
-    my $queueFront = 0;
-    my %done;
-
-    sub addNode {
-        my $graph = shift;
-        my $u = shift;
-        $$graph{$u} = {d => 999999999999, pred => undef, edges => []}
-            unless defined $$graph{$u};
-    }
-
-    sub addEdge {
-        my $graph = shift;
-        my $u = shift;
-        my $v = shift;
-        my $w = shift;
-        my $type = shift;
-        my $info = shift;
-        addNode $graph, $u;
-        push @{$$graph{$u}->{edges}},
-            {weight => $w, start => $u, end => $v, type => $type, info => $info};
-        my $n = scalar @{$$graph{$u}->{edges}};
-    }
-
-    push @queue, $targetPath;
-
-    while ($queueFront < scalar @queue) {
-        my $u = $queue[$queueFront++];
-        next if defined $done{$u};
-        $done{$u} = 1;
-
-        addNode \%graph, $u;
-
-        # If the path already exists, it has distance 0 from the
-        # "start" node.
-        if (isValidPath($u)) {
-            addEdge \%graph, "start", $u, 0, "present", undef;
-        }
-
-        else {
-
-            # Add patch edges.
-            my $patchList = $dbh->selectall_arrayref(
-                "select * from Patches where storePath = ?",
-                { Slice => {} }, $u);
-
-            foreach my $patch (@{$patchList}) {
-                if (isValidPath($patch->{basePath})) {
-                    my ($baseHashAlgo, $baseHash) = parseHash $patch->{baseHash};
-
-                    my $hash = $hashCache->{$baseHashAlgo}->{$patch->{basePath}};
-                    if (!defined $hash) {
-                        $hash = $fast && $baseHashAlgo eq "sha256"
-                            ? queryPathHash($patch->{basePath})
-                            : hashPath($baseHashAlgo, $baseHashAlgo ne "md5", $patch->{basePath});
-                        $hash =~ s/.*://;
-                        $hashCache->{$baseHashAlgo}->{$patch->{basePath}} = $hash;
-                    }
-
-                    next if $hash ne $baseHash;
-                }
-                push @queue, $patch->{basePath};
-                addEdge \%graph, $patch->{basePath}, $u, $patch->{size}, "patch", $patch;
-            }
-
-            # Add NAR file edges to the start node.
-            my $narFileList = $dbh->selectall_arrayref(
-                "select * from NARs where storePath = ?",
-                { Slice => {} }, $u);
-
-            foreach my $narFile (@{$narFileList}) {
-                # !!! how to handle files whose size is not known in advance?
-                # For now, assume some arbitrary size (1 GB).
-                # This has the side-effect of preferring non-Hydra downloads.
-                addEdge \%graph, "start", $u, ($narFile->{size} || 1000000000), "narfile", $narFile;
-            }
-        }
-    }
-
-
-    # Run Dijkstra's shortest path algorithm to determine the shortest
-    # sequence of download and/or patch actions that will produce
-    # $targetPath.
-
-    my @todo = keys %graph;
-
-    while (scalar @todo > 0) {
-
-        # Remove the closest element from the todo list.
-        # !!! inefficient, use a priority queue
-        @todo = sort { -($graph{$a}->{d} <=> $graph{$b}->{d}) } @todo;
-        my $u = pop @todo;
-
-        my $u_ = $graph{$u};
-
-        foreach my $edge (@{$u_->{edges}}) {
-            my $v_ = $graph{$edge->{end}};
-            if ($v_->{d} > $u_->{d} + $edge->{weight}) {
-                $v_->{d} = $u_->{d} + $edge->{weight};
-                # Store the edge; to edge->start is actually the
-                # predecessor.
-                $v_->{pred} = $edge;
-            }
-        }
-    }
-
-
-    # Retrieve the shortest path from "start" to $targetPath.
-    my @path = ();
-    my $cur = $targetPath;
-    return () unless defined $graph{$targetPath}->{pred};
-    while ($cur ne "start") {
-        push @path, $graph{$cur}->{pred};
-        $cur = $graph{$cur}->{pred}->{start};
-    }
-
-    return @path;
-}
-
-
-# Parse the arguments.
-
-if ($ARGV[0] eq "--query") {
-
-    while (<STDIN>) {
-        chomp;
-        my ($cmd, @args) = split " ", $_;
-
-        if ($cmd eq "have") {
-            foreach my $storePath (@args) {
-                print "$storePath\n" if scalar @{$dbh->selectcol_arrayref("select 1 from NARs where storePath = ?", {}, $storePath)} > 0;
-            }
-            print "\n";
-        }
-
-        elsif ($cmd eq "info") {
-            foreach my $storePath (@args) {
-
-                my $infos = $dbh->selectall_arrayref(
-                    "select * from NARs where storePath = ?",
-                    { Slice => {} }, $storePath);
-
-                next unless scalar @{$infos} > 0;
-                my $info = @{$infos}[0];
-
-                print "$storePath\n";
-                print "$info->{deriver}\n";
-                my @references = split " ", $info->{refs};
-                print scalar @references, "\n";
-                print "$_\n" foreach @references;
-
-                my @path = computeSmallestDownload $storePath;
-
-                my $downloadSize = 0;
-                while (scalar @path > 0) {
-                    my $edge = pop @path;
-                    my $u = $edge->{start};
-                    my $v = $edge->{end};
-                    if ($edge->{type} eq "patch") {
-                        $downloadSize += $edge->{info}->{size} || 0;
-                    }
-                    elsif ($edge->{type} eq "narfile") {
-                        $downloadSize += $edge->{info}->{size} || 0;
-                    }
-                }
-
-                print "$downloadSize\n";
-
-                my $narSize = $info->{narSize} || 0;
-                print "$narSize\n";
-            }
-
-            print "\n";
-        }
-
-        else { die "unknown command ‘$cmd’"; }
-    }
-
-    exit 0;
-}
-
-elsif ($ARGV[0] ne "--substitute") {
-    die;
-}
-
-
-die unless scalar @ARGV == 3;
-my $targetPath = $ARGV[1];
-my $destPath = $ARGV[2];
-$fast = 0;
-
-
-# Create a temporary directory.
-my $tmpDir = mkTempDir("nix-download");
-
-my $tmpNar = "$tmpDir/nar";
-my $tmpNar2 = "$tmpDir/nar2";
-
-
-open LOGFILE, ">>$logFile" or die "cannot open log file $logFile";
-
-my $date = strftime ("%F %H:%M:%S UTC", gmtime (time));
-print LOGFILE "$$ get $targetPath $date\n";
-
-print STDERR "\n*** Trying to download/patch ‘$targetPath’\n";
-
-
-# Compute the shortest path.
-my @path = computeSmallestDownload $targetPath;
-die "don't know how to produce $targetPath\n" if scalar @path == 0;
-
-
-# We don't need the manifest anymore, so close it as an optimisation:
-# if we still have SQLite locks blocking other processes (we
-# shouldn't), this gets rid of them.
-$dbh->disconnect;
-
-
-# Traverse the shortest path, perform the actions described by the
-# edges.
-my $curStep = 1;
-my $maxStep = scalar @path;
-
-my $finalNarHash;
-
-while (scalar @path > 0) {
-    my $edge = pop @path;
-    my $u = $edge->{start};
-    my $v = $edge->{end};
-
-    print STDERR "\n*** Step $curStep/$maxStep: ";
-
-    if ($edge->{type} eq "present") {
-        print STDERR "using already present path ‘$v’\n";
-        print LOGFILE "$$ present $v\n";
-
-        if ($curStep < $maxStep) {
-            # Since this is not the last step, the path will be used
-            # as a base to one or more patches.  So turn the base path
-            # into a NAR archive, to which we can apply the patch.
-            print STDERR "  packing base path...\n";
-            system("$Nix::Config::binDir/nix-store --dump $v > $tmpNar") == 0
-                or die "cannot dump ‘$v’";
-        }
-    }
-
-    elsif ($edge->{type} eq "patch") {
-        my $patch = $edge->{info};
-        print STDERR "applying patch ‘$patch->{url}’ to ‘$u’ to create ‘$v’\n";
-
-        print LOGFILE "$$ patch $patch->{url} $patch->{size} $patch->{baseHash} $u $v\n";
-
-        # Download the patch.
-        print STDERR "  downloading patch...\n";
-        my $patchPath = "$tmpDir/patch";
-        checkURL $patch->{url};
-        system("$curl '$patch->{url}' -o $patchPath") == 0
-            or die "cannot download patch ‘$patch->{url}’\n";
-
-        # Apply the patch to the NAR archive produced in step 1 (for
-        # the already present path) or a later step (for patch sequences).
-        print STDERR "  applying patch...\n";
-        system("$Nix::Config::libexecDir/nix/bspatch $tmpNar $tmpNar2 $patchPath") == 0
-            or die "cannot apply patch ‘$patchPath’ to $tmpNar\n";
-
-        if ($curStep < $maxStep) {
-            # The archive will be used as the base of the next patch.
-            rename "$tmpNar2", "$tmpNar" or die "cannot rename NAR archive: $!";
-        } else {
-            # This was the last patch.  Unpack the final NAR archive
-            # into the target path.
-            print STDERR "  unpacking patched archive...\n";
-            system("$Nix::Config::binDir/nix-store --restore $destPath < $tmpNar2") == 0
-                or die "cannot unpack $tmpNar2 to ‘$v’\n";
-        }
-
-        $finalNarHash = $patch->{narHash};
-    }
-
-    elsif ($edge->{type} eq "narfile") {
-        my $narFile = $edge->{info};
-        print STDERR "downloading ‘$narFile->{url}’ to ‘$v’\n";
-
-        my $size = $narFile->{size} || -1;
-        print LOGFILE "$$ narfile $narFile->{url} $size $v\n";
-
-        checkURL $narFile->{url};
-
-        my $decompressor =
-            $narFile->{compressionType} eq "bzip2" ? "| $Nix::Config::bzip2 -d" :
-            $narFile->{compressionType} eq "xz" ? "| $Nix::Config::xz -d" :
-            $narFile->{compressionType} eq "none" ? "" :
-            die "unknown compression type ‘$narFile->{compressionType}’";
-
-        if ($curStep < $maxStep) {
-            # The archive will be used a base to a patch.
-            system("$curl '$narFile->{url}' $decompressor > $tmpNar") == 0
-                or die "cannot download and unpack ‘$narFile->{url}’ to ‘$v’\n";
-        } else {
-            # Unpack the archive to the target path.
-            system("$curl '$narFile->{url}' $decompressor | $Nix::Config::binDir/nix-store --restore '$destPath'") == 0
-                or die "cannot download and unpack ‘$narFile->{url}’ to ‘$v’\n";
-        }
-
-        $finalNarHash = $narFile->{narHash};
-    }
-
-    $curStep++;
-}
-
-
-# Tell Nix about the expected hash so it can verify it.
-die "cannot check integrity of the downloaded path since its hash is not known\n"
-    unless defined $finalNarHash;
-print "$finalNarHash\n";
-
-
-print STDERR "\n";
-print LOGFILE "$$ success\n";
-close LOGFILE;
diff --git a/scripts/install-nix-from-closure.sh b/scripts/install-nix-from-closure.sh
index 3efe7b38435e..95f69cad84bc 100644
--- a/scripts/install-nix-from-closure.sh
+++ b/scripts/install-nix-from-closure.sh
@@ -1,4 +1,4 @@
-#! /usr/bin/env bash
+#!/bin/sh
 
 set -e
 
@@ -25,7 +25,7 @@ echo "performing a single-user installation of Nix..." >&2
 
 if ! [ -e $dest ]; then
     cmd="mkdir -m 0755 $dest && chown $USER $dest"
-    echo "directory $dest does not exist; creating it by running ‘$cmd’ using sudo" >&2
+    echo "directory $dest does not exist; creating it by running '$cmd' using sudo" >&2
     if ! sudo sh -c "$cmd"; then
         echo "$0: please manually run ‘$cmd’ as root to create $dest" >&2
         exit 1
@@ -33,7 +33,7 @@ if ! [ -e $dest ]; then
 fi
 
 if ! [ -w $dest ]; then
-    echo "$0: directory $dest exists, but is not writable by you; please run ‘chown -R $USER $dest’ as root" >&2
+    echo "$0: directory $dest exists, but is not writable by you. This could indicate that another user has already performed a single-user installation of Nix on this system. If you wish to enable multi-user support see http://nixos.org/nix/manual/#ssec-multi-user. If you wish to continue with a single-user install for $USER please run ‘chown -R $USER $dest’ as root." >&2
     exit 1
 fi
 
@@ -49,7 +49,10 @@ for i in $(cd $self/store >/dev/null && echo *); do
     fi
     if ! [ -e "$dest/store/$i" ]; then
         cp -Rp "$self/store/$i" "$i_tmp"
+        chmod -R a-w "$i_tmp"
+        chmod +w "$i_tmp"
         mv "$i_tmp" "$dest/store/$i"
+        chmod -w "$dest/store/$i"
     fi
 done
 echo "" >&2
@@ -92,7 +95,7 @@ p=$NIX_LINK/etc/profile.d/nix.sh
 added=
 for i in .bash_profile .bash_login .profile; do
     fn="$HOME/$i"
-    if [ -e "$fn" ]; then
+    if [ -w "$fn" ]; then
         if ! grep -q "$p" "$fn"; then
             echo "modifying $fn..." >&2
             echo "if [ -e $p ]; then . $p; fi # added by Nix installer" >> $fn
diff --git a/scripts/local.mk b/scripts/local.mk
index 39e1df611c5c..13b13a86bc6c 100644
--- a/scripts/local.mk
+++ b/scripts/local.mk
@@ -2,26 +2,18 @@ nix_bin_scripts := \
   $(d)/nix-build \
   $(d)/nix-channel \
   $(d)/nix-copy-closure \
-  $(d)/nix-generate-patches \
   $(d)/nix-install-package \
-  $(d)/nix-prefetch-url \
-  $(d)/nix-pull \
   $(d)/nix-push
 
 bin-scripts += $(nix_bin_scripts)
 
-nix_substituters := \
-  $(d)/copy-from-other-stores.pl \
-  $(d)/download-from-binary-cache.pl \
-  $(d)/download-using-manifests.pl
-
 nix_noinst_scripts := \
   $(d)/build-remote.pl \
   $(d)/find-runtime-roots.pl \
+  $(d)/resolve-system-dependencies.pl \
   $(d)/nix-http-export.cgi \
   $(d)/nix-profile.sh \
-  $(d)/nix-reduce-build \
-  $(nix_substituters)
+  $(d)/nix-reduce-build
 
 noinst-scripts += $(nix_noinst_scripts)
 
@@ -30,7 +22,7 @@ profiledir = $(sysconfdir)/profile.d
 $(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
 $(eval $(call install-program-in, $(d)/find-runtime-roots.pl, $(libexecdir)/nix))
 $(eval $(call install-program-in, $(d)/build-remote.pl, $(libexecdir)/nix))
-$(foreach prog, $(nix_substituters), $(eval $(call install-program-in, $(prog), $(libexecdir)/nix/substituters)))
+$(eval $(call install-program-in, $(d)/resolve-system-dependencies.pl, $(libexecdir)/nix))
 $(eval $(call install-symlink, nix-build, $(bindir)/nix-shell))
 
 clean-files += $(nix_bin_scripts) $(nix_noinst_scripts)
diff --git a/scripts/nix-build.in b/scripts/nix-build.in
index 0a4431681cb1..2d45e37c52d6 100755
--- a/scripts/nix-build.in
+++ b/scripts/nix-build.in
@@ -6,6 +6,7 @@ use Nix::Config;
 use Nix::Store;
 use Nix::Utils;
 use File::Basename;
+use Text::ParseWords;
 use Cwd;
 
 binmode STDERR, ":encoding(utf8)";
@@ -56,7 +57,7 @@ if ($runEnv && defined $ARGV[0] && $ARGV[0] !~ /nix-shell/) {
             while (<SCRIPT>) {
                 chomp;
                 if (/^\#\!\s*nix-shell (.*)$/) {
-                    push @ARGV, split(/ /, $1);
+                    push @ARGV, shellwords($1);
                 }
             }
         }
@@ -109,13 +110,6 @@ for (my $n = 0; $n < scalar @ARGV; $n++) {
         $n += 2;
     }
 
-    elsif ($arg eq "--log-type") {
-        $n++;
-        die "$0: ‘$arg’ requires an argument\n" unless $n < scalar @ARGV;
-        push @instArgs, ($arg, $ARGV[$n]);
-        push @buildArgs, ($arg, $ARGV[$n]);
-    }
-
     elsif ($arg eq "--option") {
         die "$0: ‘$arg’ requires two arguments\n" unless $n + 2 < scalar @ARGV;
         push @instArgs, ($arg, $ARGV[$n + 1], $ARGV[$n + 2]);
@@ -123,7 +117,7 @@ for (my $n = 0; $n < scalar @ARGV; $n++) {
         $n += 2;
     }
 
-    elsif ($arg eq "--max-jobs" || $arg eq "-j" || $arg eq "--max-silent-time" || $arg eq "--log-type" || $arg eq "--cores" || $arg eq "--timeout" || $arg eq '--add-root') {
+    elsif ($arg eq "--max-jobs" || $arg eq "-j" || $arg eq "--max-silent-time" || $arg eq "--cores" || $arg eq "--timeout" || $arg eq '--add-root') {
         $n++;
         die "$0: ‘$arg’ requires an argument\n" unless $n < scalar @ARGV;
         push @buildArgs, ($arg, $ARGV[$n]);
@@ -190,17 +184,31 @@ for (my $n = 0; $n < scalar @ARGV; $n++) {
         $n++;
         die "$0: ‘$arg’ requires an argument\n" unless $n < scalar @ARGV;
         my $interpreter = $ARGV[$n];
-        # Überhack to support Perl. Perl examines the shebang and
-        # executes it unless it contains the string "perl" or "indir",
-        # or (undocumented) argv[0] does not contain "perl". Exploit
-        # the latter by doing "exec -a".
-        my $execArgs = $interpreter =~ /perl/ ? "-a PERL" : "";
+        my $execArgs = "";
+
         sub shellEscape {
             my $s = $_;
             $s =~ s/'/'\\''/g;
             return "'" . $s . "'";
         }
-        $envCommand = "exec $execArgs $interpreter $script ${\(join ' ', (map shellEscape, @savedArgs))}";
+
+        # Überhack to support Perl. Perl examines the shebang and
+        # executes it unless it contains the string "perl" or "indir",
+        # or (undocumented) argv[0] does not contain "perl". Exploit
+        # the latter by doing "exec -a".
+        if ($interpreter =~ /perl/) {
+            $execArgs = "-a PERL";
+        }
+
+        if ($interpreter =~ /ruby/) {
+            # Hack for Ruby. Ruby also examines the shebang. It tries to
+            # read the shebang to understand which packages to read from. Since
+            # this is handled via nix-shell -p, we wrap our ruby script execution
+            # in ruby -e 'load' which ignores the shebangs.
+            $envCommand = "exec $execArgs $interpreter -e 'load(\"$script\")' -- ${\(join ' ', (map shellEscape, @savedArgs))}";
+        } else {
+            $envCommand = "exec $execArgs $interpreter $script ${\(join ' ', (map shellEscape, @savedArgs))}";
+        }
     }
 
     elsif (substr($arg, 0, 1) eq "-") {
@@ -269,7 +277,7 @@ foreach my $expr (@exprs) {
         my $tmp = $ENV{"TMPDIR"} // $ENV{"XDG_RUNTIME_DIR"} // "/tmp";
         if ($pure) {
             foreach my $name (keys %ENV) {
-                next if grep { $_ eq $name } ("HOME", "USER", "LOGNAME", "DISPLAY", "PATH", "TERM", "IN_NIX_SHELL", "TZ", "PAGER");
+                next if grep { $_ eq $name } ("HOME", "USER", "LOGNAME", "DISPLAY", "PATH", "TERM", "IN_NIX_SHELL", "TZ", "PAGER", "NIX_BUILD_SHELL");
                 delete $ENV{$name};
             }
             # NixOS hack: prevent /etc/bashrc from sourcing /etc/profile.
diff --git a/scripts/nix-channel.in b/scripts/nix-channel.in
index 05716645a780..65084ff1f34a 100755
--- a/scripts/nix-channel.in
+++ b/scripts/nix-channel.in
@@ -12,8 +12,6 @@ binmode STDERR, ":encoding(utf8)";
 
 Nix::Config::readConfig;
 
-my $manifestDir = $Nix::Config::manifestDir;
-
 
 # Turn on caching in nix-prefetch-url.
 my $channelCache = "$Nix::Config::stateDir/channel-cache";
@@ -75,7 +73,6 @@ sub removeChannel {
     my ($name) = @_;
     readChannels;
     my $url = $channels{$name};
-    deleteOldManifests($url . "/MANIFEST", undef) if defined $url;
     delete $channels{$name};
     writeChannels;
 
@@ -84,8 +81,7 @@ sub removeChannel {
 }
 
 
-# Fetch Nix expressions and pull manifests from the subscribed
-# channels.
+# Fetch Nix expressions and binary cache URLs from the subscribed channels.
 sub update {
     my @channelNames = @_;
 
@@ -97,7 +93,6 @@ sub update {
         next if scalar @channelNames > 0 && ! grep { $_ eq $name } @{channelNames};
 
         my $url = $channels{$name};
-        my $origUrl = "$url/MANIFEST";
 
         # We want to download the url to a file to see if it's a tarball while also checking if we
         # got redirected in the process, so that we can grab the various parts of a nix channel
@@ -105,8 +100,8 @@ sub update {
         my $tmpdir = tempdir( CLEANUP => 1 );
         my $filename;
         ($url, $filename) = `cd $tmpdir && $Nix::Config::curl --silent --write-out '%{url_effective}\n%{filename_effective}' -L '$url' -O`;
-        die "$0: unable to check ‘$url’\n" if $? != 0;
         chomp $url;
+        die "$0: unable to check ‘$url’\n" if $? != 0;
 
         # If the URL contains a version number, append it to the name
         # attribute (so that "nix-env -q" on the channels profile
@@ -132,22 +127,8 @@ sub update {
         if ($ret != 0) {
             # Check if the channel advertises a binary cache.
             my $binaryCacheURL = `$Nix::Config::curl --silent '$url'/binary-cache-url`;
-            my $getManifest = ($Nix::Config::config{"force-manifest"} // "false") eq "true";
-            if ($? == 0 && $binaryCacheURL ne "") {
-                $extraAttrs .= "binaryCacheURL = \"$binaryCacheURL\"; ";
-                deleteOldManifests($origUrl, undef);
-            } else {
-                $getManifest = 1;
-            }
-
-            if ($getManifest) {
-                # No binary cache, so pull the channel manifest.
-                mkdir $manifestDir, 0755 unless -e $manifestDir;
-                die "$0: you do not have write permission to ‘$manifestDir’!\n" unless -W $manifestDir;
-                $ENV{'NIX_ORIG_URL'} = $origUrl;
-                system("$Nix::Config::binDir/nix-pull", "--skip-wrong-store", "$url/MANIFEST") == 0
-                    or die "cannot pull manifest from ‘$url’\n";
-            }
+            $extraAttrs .= "binaryCacheURL = \"$binaryCacheURL\"; "
+                if $? == 0 && $binaryCacheURL ne "";
 
             # Download the channel tarball.
             my $fullURL = "$url/nixexprs.tar.xz";
diff --git a/scripts/nix-copy-closure.in b/scripts/nix-copy-closure.in
index d7a2c973948b..0078d7267353 100755
--- a/scripts/nix-copy-closure.in
+++ b/scripts/nix-copy-closure.in
@@ -1,5 +1,6 @@
 #! @perl@ -w @perlFlags@
 
+use utf8;
 use strict;
 use Nix::SSH;
 use Nix::Config;
@@ -11,7 +12,7 @@ binmode STDERR, ":encoding(utf8)";
 
 if (scalar @ARGV < 1) {
     print STDERR <<EOF
-Usage: nix-copy-closure [--from | --to] HOSTNAME [--sign] [--gzip] [--bzip2] [--xz] PATHS...
+Usage: nix-copy-closure [--from | --to] HOSTNAME [--gzip] [--bzip2] [--xz] PATHS...
 EOF
     ;
     exit 1;
@@ -20,11 +21,11 @@ EOF
 
 # Get the target host.
 my $sshHost;
-my $sign = 0;
 my $toMode = 1;
 my $includeOutputs = 0;
 my $dryRun = 0;
 my $useSubstitutes = 0;
+my $verbosity = 1;
 
 
 # !!! Copied from nix-pack-closure, should put this in a module.
@@ -36,9 +37,6 @@ while (@ARGV) {
     if ($arg eq "--help") {
         exec "man nix-copy-closure" or die;
     }
-    elsif ($arg eq "--sign") {
-        $sign = 1;
-    }
     elsif ($arg eq "--gzip" || $arg eq "--bzip2" || $arg eq "--xz") {
         warn "$0: ‘$arg’ is not implemented\n" if $arg ne "--gzip";
         push @globalSshOpts, "-C";
@@ -61,6 +59,10 @@ while (@ARGV) {
     elsif ($arg eq "--use-substitutes" || $arg eq "-s") {
         $useSubstitutes = 1;
     }
+    elsif ($arg eq "-v") {
+        $verbosity++;
+        setVerbosity($verbosity);
+    }
     elsif (!defined $sshHost) {
         $sshHost = $arg;
     }
@@ -75,7 +77,7 @@ die "$0: you did not specify a host name\n" unless defined $sshHost;
 if ($toMode) { # Copy TO the remote machine.
     Nix::CopyClosure::copyTo(
         $sshHost, [ @storePaths ],
-        $includeOutputs, $dryRun, $sign, $useSubstitutes);
+        $includeOutputs, $dryRun, $useSubstitutes);
 }
 
 else { # Copy FROM the remote machine.
@@ -93,7 +95,7 @@ else { # Copy FROM the remote machine.
     if (scalar @missing > 0) {
         print STDERR "copying ", scalar @missing, " missing paths from ‘$sshHost’...\n";
         writeInt(5, $to); # == cmdExportPaths
-        writeInt($sign ? 1 : 0, $to);
+        writeInt(0, $to); # obsolete
         writeStrings(\@missing, $to);
         importPaths(fileno($from));
     }
diff --git a/scripts/nix-generate-patches.in b/scripts/nix-generate-patches.in
deleted file mode 100755
index 0a29c0548c1f..000000000000
--- a/scripts/nix-generate-patches.in
+++ /dev/null
@@ -1,51 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use strict;
-use Nix::Manifest;
-use Nix::GeneratePatches;
-use Nix::Utils;
-
-if (scalar @ARGV != 5) {
-    print STDERR <<EOF;
-Usage: nix-generate-patches NAR-DIR PATCH-DIR PATCH-URI OLD-MANIFEST NEW-MANIFEST
-
-This command generates binary patches between NAR files listed in
-OLD-MANIFEST and NEW-MANIFEST.  The patches are written to the
-directory PATCH-DIR, and the prefix PATCH-URI is used to generate URIs
-for the patches.  The patches are added to NEW-MANIFEST.  All NARs are
-required to exist in NAR-DIR.  Patches are generated between
-succeeding versions of packages with the same name.
-EOF
-    exit 1;
-}
-
-my $narPath = $ARGV[0];
-my $patchesPath = $ARGV[1];
-my $patchesURL = $ARGV[2];
-my $srcManifest = $ARGV[3];
-my $dstManifest = $ARGV[4];
-
-my (%srcNarFiles, %srcLocalPaths, %srcPatches);
-readManifest $srcManifest, \%srcNarFiles, \%srcPatches;
-
-my (%dstNarFiles, %dstLocalPaths, %dstPatches);
-readManifest $dstManifest, \%dstNarFiles, \%dstPatches;
-
-my $tmpDir = mkTempDir("nix-generate-patches");
-
-generatePatches \%srcNarFiles, \%dstNarFiles, \%srcPatches, \%dstPatches,
-    $narPath, $patchesPath, $patchesURL, $tmpDir;
-
-propagatePatches \%srcPatches, \%dstNarFiles, \%dstPatches;
-
-# Optionally add all new patches to the manifest in $NIX_ALL_PATCHES.
-my $allPatchesFile = $ENV{"NIX_ALL_PATCHES"};
-if (defined $allPatchesFile) {
-    my (%dummy, %allPatches);
-    readManifest("$patchesPath/all-patches", \%dummy, \%allPatches)
-        if -f $allPatchesFile;
-    copyPatches \%dstPatches, \%allPatches;
-    writeManifest($allPatchesFile, {}, \%allPatches, 0);
-}
-
-writeManifest $dstManifest, \%dstNarFiles, \%dstPatches;
diff --git a/scripts/nix-install-package.in b/scripts/nix-install-package.in
index b442c708b1a2..ba349774af54 100755
--- a/scripts/nix-install-package.in
+++ b/scripts/nix-install-package.in
@@ -89,7 +89,7 @@ my $pathRE = "(?: \/ [\/A-Za-z0-9\+\-\.\_\?\=]* )";
 # store path.  We'll let nix-env do that.
 
 $contents =~
-    / ^ \s* (\S+) \s+ ($Nix::Utils::urlRE) \s+ ($nameRE) \s+ ($systemRE) \s+ ($pathRE) \s+ ($pathRE) ( \s+ ($Nix::Utils::urlRE) )?  /x
+    / ^ \s* (\S+) \s+ (\S+) \s+ ($nameRE) \s+ ($systemRE) \s+ ($pathRE) \s+ ($pathRE) ( \s+ ($Nix::Utils::urlRE) )?  /x
     or barf "invalid package contents";
 my $version = $1;
 my $manifestURL = $2;
@@ -111,25 +111,9 @@ if ($interactive) {
 }
 
 
-if (defined $binaryCacheURL) {
+die "$0: package does not supply a binary cache\n" unless defined $binaryCacheURL;
 
-    push @extraNixEnvArgs, "--option", "extra-binary-caches", $binaryCacheURL;
-
-} else {
-
-    # Store the manifest in the temporary directory so that we don't
-    # pollute /nix/var/nix/manifests.  This also requires that we
-    # don't use the Nix daemon (because otherwise
-    # download-using-manifests won't see our NIX_MANIFESTS_DIRS
-    # environment variable).
-    $ENV{NIX_MANIFESTS_DIR} = $tmpDir;
-    $ENV{NIX_REMOTE} = "";
-
-    print "\nPulling manifests...\n";
-    system("$Nix::Config::binDir/nix-pull", $manifestURL) == 0
-        or barf "nix-pull failed: $?";
-
-}
+push @extraNixEnvArgs, "--option", "extra-binary-caches", $binaryCacheURL;
 
 
 print "\nInstalling package...\n";
diff --git a/scripts/nix-prefetch-url.in b/scripts/nix-prefetch-url.in
deleted file mode 100755
index 6effbe208146..000000000000
--- a/scripts/nix-prefetch-url.in
+++ /dev/null
@@ -1,132 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use strict;
-use File::Basename;
-use File::stat;
-use Nix::Store;
-use Nix::Config;
-use Nix::Utils;
-
-binmode STDERR, ":encoding(utf8)";
-
-
-my $hashType = $ENV{'NIX_HASH_ALGO'} || "sha256"; # obsolete
-my $cacheDir = $ENV{'NIX_DOWNLOAD_CACHE'};
-
-my @args;
-my $arg;
-while ($arg = shift) {
-    if ($arg eq "--help") {
-        exec "man nix-prefetch-url" or die;
-    } elsif ($arg eq "--type") {
-        $hashType = shift;
-        die "$0: ‘$arg’ requires an argument\n" unless defined $hashType;
-    } elsif (substr($arg, 0, 1) eq "-") {
-        die "$0: unknown flag ‘$arg’\n";
-    } else {
-        push @args, $arg;
-    }
-}
-
-my $url = $args[0];
-my $expHash = $args[1];
-
-
-if (!defined $url || $url eq "") {
-    print STDERR <<EOF
-Usage: nix-prefetch-url URL [EXPECTED-HASH]
-EOF
-    ;
-    exit 1;
-}
-
-my $tmpDir = mkTempDir("nix-prefetch-url");
-
-# Hack to support the mirror:// scheme from Nixpkgs.
-if ($url =~ /^mirror:\/\//) {
-    system("$Nix::Config::binDir/nix-build '<nixpkgs>' -A resolveMirrorURLs --argstr url '$url' -o $tmpDir/urls > /dev/null") == 0
-        or die "$0: nix-build failed; maybe \$NIX_PATH is not set properly\n";
-    my @expanded = split ' ', readFile("$tmpDir/urls");
-    die "$0: cannot resolve ‘$url’" unless scalar @expanded > 0;
-    print STDERR "$url expands to $expanded[0]\n";
-    $url = $expanded[0];
-}
-
-# Handle escaped characters in the URI.  `+', `=' and `?' are the only
-# characters that are valid in Nix store path names but have a special
-# meaning in URIs.
-my $name = basename $url;
-die "cannot figure out file name for ‘$url’\n" if $name eq ""; 
-$name =~ s/%2b/+/g;
-$name =~ s/%3d/=/g;
-$name =~ s/%3f/?/g;
-
-my $finalPath;
-my $hash;
-
-# If the hash was given, a file with that hash may already be in the
-# store.
-if (defined $expHash) {
-    $finalPath = makeFixedOutputPath(0, $hashType, $expHash, $name);
-    if (isValidPath($finalPath)) { $hash = $expHash; } else { $finalPath = undef; }
-}
-
-# If we don't know the hash or a file with that hash doesn't exist,
-# download the file and add it to the store.
-if (!defined $finalPath) {
-
-    my $tmpFile = "$tmpDir/$name";
-    
-    # Optionally do timestamp-based caching of the download.
-    # Actually, the only thing that we cache in $NIX_DOWNLOAD_CACHE is
-    # the hash and the timestamp of the file at $url.  The caching of
-    # the file *contents* is done in Nix store, where it can be
-    # garbage-collected independently.
-    my ($cachedTimestampFN, $cachedHashFN, @cacheFlags);
-    if (defined $cacheDir) {
-        my $urlHash = hashString("sha256", 1, $url);
-        writeFile "$cacheDir/$urlHash.url", $url;
-        $cachedHashFN = "$cacheDir/$urlHash.$hashType";
-        $cachedTimestampFN = "$cacheDir/$urlHash.stamp";
-        @cacheFlags = ("--time-cond", $cachedTimestampFN) if -f $cachedHashFN && -f $cachedTimestampFN;
-    }
-    
-    # Perform the download.
-    my @curlFlags = ("curl", $url, "-o", $tmpFile, "--fail", "--location", "--max-redirs", "20", "--disable-epsv", "--cookie-jar", "$tmpDir/cookies", "--remote-time", (split " ", ($ENV{NIX_CURL_FLAGS} || "")));
-    (system $Nix::Config::curl @curlFlags, @cacheFlags) == 0 or die "$0: download of ‘$url’ failed\n";
-
-    if (defined $cacheDir && ! -e $tmpFile) {
-        # Curl didn't create $tmpFile, so apparently there's no newer
-        # file on the server.
-        $hash = readFile $cachedHashFN or die;
-        $finalPath = makeFixedOutputPath(0, $hashType, $hash, $name);
-        unless (isValidPath $finalPath) {
-            print STDERR "cached contents of ‘$url’ disappeared, redownloading...\n";
-            $finalPath = undef;
-            (system $Nix::Config::curl @curlFlags) == 0 or die "$0: download of ‘$url’ failed\n";
-        }
-    }
-
-    if (!defined $finalPath) {
-        
-        # Compute the hash.
-        $hash = hashFile($hashType, $hashType ne "md5", $tmpFile);
-
-        if (defined $cacheDir) {
-            writeFile $cachedHashFN, $hash;
-            my $st = stat($tmpFile) or die;
-            open STAMP, ">$cachedTimestampFN" or die; close STAMP;
-            utime($st->atime, $st->mtime, $cachedTimestampFN) or die;
-        }
-    
-        # Add the downloaded file to the Nix store.
-        $finalPath = addToStore($tmpFile, 0, $hashType);
-    }
-
-    die "$0: hash mismatch for ‘$url’\n" if defined $expHash && $expHash ne $hash;
-}
-
-print STDERR "path is ‘$finalPath’\n" unless $ENV{'QUIET'};
-print "$hash\n";
-print "$finalPath\n" if $ENV{'PRINT_PATH'};
diff --git a/scripts/nix-profile.sh.in b/scripts/nix-profile.sh.in
index 672d1f035f91..5e01de95156c 100644
--- a/scripts/nix-profile.sh.in
+++ b/scripts/nix-profile.sh.in
@@ -1,28 +1,77 @@
-if [ -n "$HOME" ]; then
-    NIX_LINK="$HOME/.nix-profile"
-
-    # Set the default profile.
-    if ! [ -L "$NIX_LINK" ]; then
-        echo "creating $NIX_LINK" >&2
-        _NIX_DEF_LINK=@localstatedir@/nix/profiles/default
-        @coreutils@/ln -s "$_NIX_DEF_LINK" "$NIX_LINK"
+if [ -n "$HOME" ] && [ -n "$USER" ]; then
+    __savedpath="$PATH"
+    export PATH=@coreutils@
+
+    # Set up the per-user profile.
+    # This part should be kept in sync with nixpkgs:nixos/modules/programs/shell.nix
+
+    : ${NIX_LINK:=$HOME/.nix-profile}
+
+    : ${NIX_USER_PROFILE_DIR:=@localstatedir@/nix/profiles/per-user/$USER}
+
+    mkdir -m 0755 -p "$NIX_USER_PROFILE_DIR"
+
+    if [ "$(stat --printf '%u' "$NIX_USER_PROFILE_DIR")" != "$(id -u)" ]; then
+        echo "Nix: WARNING: bad ownership on "$NIX_USER_PROFILE_DIR", should be $(id -u)" >&2
     fi
 
-    export PATH=$NIX_LINK/bin:$NIX_LINK/sbin:$PATH
+    if [ -w "$HOME" ]; then
+        if ! [ -L "$NIX_LINK" ]; then
+            echo "Nix: creating $NIX_LINK" >&2
+            if [ "$USER" != root ]; then
+                if ! ln -s "$NIX_USER_PROFILE_DIR"/profile "$NIX_LINK"; then
+                    echo "Nix: WARNING: could not create $NIX_LINK -> $NIX_USER_PROFILE_DIR/profile" >&2
+                fi
+            else
+                # Root installs in the system-wide profile by default.
+                ln -s @localstatedir@/nix/profiles/default "$NIX_LINK"
+            fi
+        fi
 
-    # Subscribe the user to the Nixpkgs channel by default.
-    if [ ! -e $HOME/.nix-channels ]; then
-        echo "https://nixos.org/channels/nixpkgs-unstable nixpkgs" > $HOME/.nix-channels
+        # Subscribe the user to the unstable Nixpkgs channel by default.
+        if [ ! -e "$HOME/.nix-channels" ]; then
+            echo "https://nixos.org/channels/nixpkgs-unstable nixpkgs" > "$HOME/.nix-channels"
+        fi
+
+        # Create the per-user garbage collector roots directory.
+        __user_gcroots=@localstatedir@/nix/gcroots/per-user/"$USER"
+        mkdir -m 0755 -p "$__user_gcroots"
+        if [ "$(stat --printf '%u' "$__user_gcroots")" != "$(id -u)" ]; then
+            echo "Nix: WARNING: bad ownership on $__user_gcroots, should be $(id -u)" >&2
+        fi
+        unset __user_gcroots
+
+        # Set up a default Nix expression from which to install stuff.
+        __nix_defexpr="$HOME"/.nix-defexpr
+        [ -L "$__nix_defexpr" ] && rm -f "$__nix_defexpr"
+        mkdir -m 0755 -p "$__nix_defexpr"
+        if [ "$USER" != root ] && [ ! -L "$__nix_defexpr"/channels_root ]; then
+            ln -s @localstatedir@/nix/profiles/per-user/root/channels "$__nix_defexpr"/channels_root
+        fi
+        unset __nix_defexpr
     fi
 
     # Append ~/.nix-defexpr/channels/nixpkgs to $NIX_PATH so that
     # <nixpkgs> paths work when the user has fetched the Nixpkgs
     # channel.
-    export NIX_PATH=${NIX_PATH:+$NIX_PATH:}nixpkgs=$HOME/.nix-defexpr/channels/nixpkgs
+    export NIX_PATH="${NIX_PATH:+$NIX_PATH:}nixpkgs=$HOME/.nix-defexpr/channels/nixpkgs"
+
+    # Set up environment.
+    # This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
+    export NIX_USER_PROFILE_DIR
+    export NIX_PROFILES="@localstatedir@/nix/profiles/default $NIX_USER_PROFILE_DIR"
+
+    for i in $NIX_PROFILES; do
+        if [ -d "$i/lib/aspell" ]; then
+            export ASPELL_CONF="dict-dir $i/lib/aspell"
+        fi
+    done
 
     # Set $SSL_CERT_FILE so that Nixpkgs applications like curl work.
     if [ -e /etc/ssl/certs/ca-certificates.crt ]; then # NixOS, Ubuntu, Debian, Gentoo, Arch
         export SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt
+    elif [ -e /etc/ssl/ca-bundle.pem ]; then # openSUSE Tumbleweed
+        export SSL_CERT_FILE=/etc/ssl/ca-bundle.pem
     elif [ -e /etc/ssl/certs/ca-bundle.crt ]; then # Old NixOS
         export SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt
     elif [ -e /etc/pki/tls/certs/ca-bundle.crt ]; then # Fedora, CentOS
@@ -32,4 +81,7 @@ if [ -n "$HOME" ]; then
     elif [ -e "$NIX_LINK/etc/ca-bundle.crt" ]; then # old cacert in Nix profile
         export SSL_CERT_FILE="$NIX_LINK/etc/ca-bundle.crt"
     fi
+
+    export PATH="$NIX_LINK/bin:$NIX_LINK/sbin:$__savedpath"
+    unset __savedpath
 fi
diff --git a/scripts/nix-pull.in b/scripts/nix-pull.in
deleted file mode 100755
index 995b50935964..000000000000
--- a/scripts/nix-pull.in
+++ /dev/null
@@ -1,102 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use strict;
-use Nix::Config;
-use Nix::Manifest;
-
-binmode STDERR, ":encoding(utf8)";
-
-my $manifestDir = $Nix::Config::manifestDir;
-
-
-# Prevent access problems in shared-stored installations.
-umask 0022;
-
-
-# Create the manifests directory if it doesn't exist.
-if (! -e $manifestDir) {
-    mkdir $manifestDir, 0755 or die "cannot create directory ‘$manifestDir’";
-}
-
-
-# Make sure that the manifests directory is scanned for GC roots.
-my $gcRootsDir = "$Nix::Config::stateDir/gcroots";
-my $manifestDirLink = "$gcRootsDir/manifests";
-if (! -l $manifestDirLink) {
-    symlink($manifestDir, $manifestDirLink) or die "cannot create symlink ‘$manifestDirLink’";
-}
-
-
-# Process the URLs specified on the command line.
-
-sub downloadFile {
-    my $url = shift;
-    $ENV{"PRINT_PATH"} = 1;
-    $ENV{"QUIET"} = 1;
-    my ($dummy, $path) = `$Nix::Config::binDir/nix-prefetch-url '$url'`;
-    die "cannot fetch ‘$url’" if $? != 0;
-    die "nix-prefetch-url did not return a path" unless defined $path;
-    chomp $path;
-    return $path;
-}
-
-sub processURL {
-    my $url = shift;
-
-    $url =~ s/\/$//;
-
-    my $manifest;
-
-    my $origUrl = $ENV{'NIX_ORIG_URL'} || $url;
-
-    # First see if a bzipped manifest is available.
-    if (system("$Nix::Config::curl --fail --silent --location --head '$url'.bz2 > /dev/null") == 0) {
-        print "fetching list of Nix archives at ‘$url.bz2’...\n";
-        $manifest = downloadFile "$url.bz2";
-    }
-
-    # Otherwise, just get the uncompressed manifest.
-    else {
-        print "fetching list of Nix archives at ‘$url’...\n";
-        $manifest = downloadFile $url;
-    }
-
-    my $baseName = "unnamed";
-    if ($url =~ /\/([^\/]+)\/[^\/]+$/) { # get the forelast component
-        $baseName = $1;
-    }
-
-    my $hash = `$Nix::Config::binDir/nix-hash --flat '$manifest'`
-        or die "cannot hash ‘$manifest’";
-    chomp $hash;
-
-    my $urlFile = "$manifestDir/$baseName-$hash.url";
-    open URL, ">$urlFile" or die "cannot create ‘$urlFile’";
-    print URL $origUrl;
-    close URL;
-
-    my $finalPath = "$manifestDir/$baseName-$hash.nixmanifest";
-
-    unlink $finalPath if -e $finalPath;
-
-    symlink("$manifest", "$finalPath")
-        or die "cannot link ‘$finalPath’ to ‘$manifest’";
-
-    deleteOldManifests($origUrl, $urlFile);
-}
-
-while (@ARGV) {
-    my $url = shift @ARGV;
-    if ($url eq "--help") {
-        exec "man nix-pull" or die;
-    } elsif ($url eq "--skip-wrong-store") {
-        # No-op, no longer supported.
-    } else {
-        processURL $url;
-    }
-}
-
-
-# Update the cache.
-updateManifestDB();
diff --git a/scripts/nix-push.in b/scripts/nix-push.in
index 2d9d83f59b92..54456ac9512e 100755
--- a/scripts/nix-push.in
+++ b/scripts/nix-push.in
@@ -258,13 +258,10 @@ for (my $n = 0; $n < scalar @storePaths2; $n++) {
     }
 
     if (defined $secretKeyFile) {
-        my $s = readFile $secretKeyFile;
-        chomp $s;
-        my ($keyName, $secretKey) = split ":", $s;
-        die "invalid secret key file ‘$secretKeyFile’\n" unless defined $keyName && defined $secretKey;
+        my $secretKey = readFile $secretKeyFile;
         my $fingerprint = fingerprintPath($storePath, $narHash, $narSize, $refs);
-        my $sig = encode_base64(signString(decode_base64($secretKey), $fingerprint), "");
-        $info .= "Sig: $keyName:$sig\n";
+        my $sig = signString($secretKey, $fingerprint);
+        $info .= "Sig: $sig\n";
     }
 
     my $pathHash = substr(basename($storePath), 0, 32);
diff --git a/scripts/resolve-system-dependencies.pl.in b/scripts/resolve-system-dependencies.pl.in
new file mode 100755
index 000000000000..a20f0dc020fe
--- /dev/null
+++ b/scripts/resolve-system-dependencies.pl.in
@@ -0,0 +1,122 @@
+#! @perl@ -w @perlFlags@
+
+use utf8;
+use strict;
+use warnings;
+use Cwd qw(realpath);
+use Errno;
+use File::Basename qw(dirname);
+use File::Path qw(make_path);
+use File::Spec::Functions qw(catfile);
+use List::Util qw(reduce);
+use IPC::Open3;
+use Nix::Config;
+use Nix::Store qw(derivationFromPath);
+use POSIX qw(uname);
+use Storable qw(lock_retrieve lock_store);
+
+my ($sysname, undef, $version, undef, $machine) = uname;
+$sysname =~ /Darwin/ or die "This tool is only meant to be used on Darwin systems.";
+
+my $cache = "$Nix::Config::stateDir/dependency-maps/$machine-$sysname-$version.map";
+
+make_path dirname($cache);
+
+our $DEPS;
+eval {
+  $DEPS = lock_retrieve($cache);
+};
+
+if($!{ENOENT}) {
+  lock_store {}, $cache;
+  $DEPS = {};
+} elsif($@) {
+  die "Unable to obtain a lock on dependency-map file $cache: $@";
+}
+
+sub mkset(@) {
+  my %set;
+  @set{@_} = ();
+  \%set
+}
+
+sub union($$) {
+  my ($set1, $set2) = @_;
+  my %new = (%$set1, %$set2);
+  \%new
+}
+
+sub cache_filepath($) {
+  my $fp = shift;
+  $fp =~ s/-/--/g;
+  $fp =~ s/\//-/g;
+  $fp =~ s/^-//g;
+  catfile $cache, $fp
+}
+
+sub resolve_tree {
+  sub resolve_tree_inner {
+    my ($lib, $TREE) = @_;
+    return if (defined $TREE->{$lib});
+    $TREE->{$lib} = mkset(@{cache_get($lib)});
+    foreach my $dep (keys %{$TREE->{$lib}}) {
+      resolve_tree_inner($dep, $TREE);
+    }
+    values %$TREE
+  }
+
+  reduce { union($a, $b) } {}, resolve_tree_inner(@_)
+}
+
+sub cache_get {
+  my $key = shift;
+  if (defined $DEPS->{$key}) {
+    $DEPS->{$key}
+  } else {
+    cache_insert($key);
+    cache_get($key)
+  }
+}
+
+sub cache_insert($) {
+  my $key = shift;
+  print STDERR "Finding dependencies for $key...\n";
+  my @deps = find_deps($key);
+  $DEPS->{$key} = \@deps;
+}
+
+sub find_deps($) {
+  my $lib = shift;
+  my($chld_in, $chld_out, $chld_err);
+  my $pid = open3($chld_in, $chld_out, $chld_err, "@otool@", "-L", "-arch", "x86_64", $lib);
+  waitpid($pid, 0);
+  my $line = readline $chld_out;
+  if($? == 0 and $line !~ /not an object file/) {
+    my @libs;
+    while(<$chld_out>) {
+      my $dep = (split /\s+/)[1];
+      push @libs, $dep unless $dep eq $lib or $dep =~ /\@rpath/;
+    }
+    @libs
+  } elsif (-l $lib) {
+    (realpath($lib))
+  } else {
+    ()
+  }
+}
+
+if (defined $ARGV[0]) {
+  my $deps = derivationFromPath($ARGV[0])->{"env"}->{"__impureHostDeps"};
+  if (defined $deps) {
+    my @files = split(/\s+/, $deps);
+    my $depcache = {};
+    my $depset = reduce { union($a, $b) } (map { resolve_tree($_, $depcache) } @files);
+    print "extra-chroot-dirs\n";
+    print join("\n", keys %$depset);
+    print "\n";
+  }
+  lock_store($DEPS, $cache);
+} else {
+  print STDERR "Usage: $0 path/to/derivation.drv\n";
+  exit 1
+}