about summary refs log tree commit diff
path: root/scripts
diff options
context:
space:
mode:
authorEelco Dolstra <e.dolstra@tudelft.nl>2011-12-16T23·33+0000
committerEelco Dolstra <e.dolstra@tudelft.nl>2011-12-16T23·33+0000
commit194d21f9f63ceb034f3e8294f89aa6bf6a217bc9 (patch)
tree7eb6da5955482a82f4d34b60dcb10514a4a55f59 /scripts
parent3c3107da86ff71a08ce44027ee5899acf486796a (diff)
parent273b288a7e862ac1918064537ff130cc751fa9fd (diff)
* Sync with the trunk.
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/build-remote.pl.in11
-rwxr-xr-xscripts/download-using-manifests.pl.in66
-rwxr-xr-xscripts/nix-copy-closure.in48
-rwxr-xr-xscripts/nix-prefetch-url.in233
-rwxr-xr-xscripts/nix-pull.in35
-rwxr-xr-xscripts/nix-push.in4
6 files changed, 157 insertions, 240 deletions
diff --git a/scripts/build-remote.pl.in b/scripts/build-remote.pl.in
index e8c76086dae2..c805d6740ef3 100755
--- a/scripts/build-remote.pl.in
+++ b/scripts/build-remote.pl.in
@@ -3,7 +3,9 @@
 use Fcntl ':flock';
 use English '-no_match_vars';
 use IO::Handle;
+use Nix::Config;
 use Nix::SSH qw/sshOpts openSSHConnection/;
+use Nix::CopyClosure;
 no warnings('once');
 
 
@@ -208,7 +210,7 @@ print STDERR "@ build-remote $drvPath $hostName\n" if $printBuildTrace;
 
 
 my $maybeSign = "";
-$maybeSign = "--sign" if -e "/nix/etc/nix/signing-key.sec";
+$maybeSign = "--sign" if -e "$Nix::Config::confDir/signing-key.sec";
 
 
 # Register the derivation as a temporary GC root.  Note that $PPID is
@@ -224,8 +226,7 @@ sub removeRoots {
 
 
 # Copy the derivation and its dependencies to the build machine.
-system("NIX_SSHOPTS=\"@sshOpts\" @bindir@/nix-copy-closure $hostName $maybeSign $drvPath @inputs") == 0
-    or die "cannot copy inputs to $hostName: $?";
+Nix::CopyClosure::copyTo($hostName, [ @sshOpts ], [ $drvPath, @inputs ], "", "", 0, 0, $maybeSign ne "");
 
 
 # Perform the build.
@@ -239,7 +240,7 @@ my $buildFlags = "--max-silent-time $maxSilentTime --fallback --add-root $rootsD
 # in which case every child receives SIGHUP; however, `-tt' doesn't
 # work on some platforms when connection sharing is used.)
 pipe STDIN, DUMMY; # make sure we have a readable STDIN
-if (system("ssh $hostName @sshOpts '(read; kill -INT -\$\$) <&0 & nix-store -r $drvPath $buildFlags > /dev/null' 2>&4") != 0) {
+if (system("exec ssh $hostName @sshOpts '(read; kill -INT -\$\$) <&0 & nix-store -r $drvPath $buildFlags > /dev/null' 2>&4") != 0) {
     # Note that if we get exit code 100 from `nix-store -r', it
     # denotes a permanent build failure (as opposed to an SSH problem
     # or a temporary Nix problem).  We propagate this to the caller to
@@ -259,7 +260,7 @@ foreach my $output (@outputs) {
     my $maybeSignRemote = "";
     $maybeSignRemote = "--sign" if $UID != 0;
     
-    system("ssh $hostName @sshOpts 'nix-store --export $maybeSignRemote $output'" .
+    system("exec ssh $hostName @sshOpts 'nix-store --export $maybeSignRemote $output'" .
            "| NIX_HELD_LOCKS=$output @bindir@/nix-store --import > /dev/null") == 0
 	or die "cannot copy $output from $hostName: $?";
 }
diff --git a/scripts/download-using-manifests.pl.in b/scripts/download-using-manifests.pl.in
index a827a995f919..ef663dabb1ef 100755
--- a/scripts/download-using-manifests.pl.in
+++ b/scripts/download-using-manifests.pl.in
@@ -3,6 +3,7 @@
 use strict;
 use Nix::Config;
 use Nix::Manifest;
+use Nix::Store;
 use POSIX qw(strftime);
 use File::Temp qw(tempdir);
 
@@ -19,14 +20,8 @@ my $fast = 1;
 my $dbh = updateManifestDB();
 
 
-sub isValidPath {
-    my $p = shift;
-    if ($fast) {
-        return -e $p;
-    } else {
-        return system("$Nix::Config::binDir/nix-store --check-validity '$p' 2> /dev/null") == 0;
-    }
-}
+# $hashCache->{$algo}->{$path} yields the $algo-hash of $path.
+my $hashCache;
 
 
 sub parseHash {
@@ -101,15 +96,17 @@ sub computeSmallestDownload {
             
             foreach my $patch (@{$patchList}) {
                 if (isValidPath($patch->{basePath})) {
-                    # !!! this should be cached
                     my ($baseHashAlgo, $baseHash) = parseHash $patch->{baseHash};
-                    my $format = "--base32";
-                    $format = "" if $baseHashAlgo eq "md5";
-                    my $hash = $fast && $baseHashAlgo eq "sha256"
-                        ? `$Nix::Config::binDir/nix-store -q --hash "$patch->{basePath}"`
-                        : `$Nix::Config::binDir/nix-hash --type '$baseHashAlgo' $format "$patch->{basePath}"`;
-                    chomp $hash;
-                    $hash =~ s/.*://;
+
+                    my $hash = $hashCache->{$baseHashAlgo}->{$patch->{basePath}};
+                    if (!defined $hash) {
+                        $hash = $fast && $baseHashAlgo eq "sha256"
+                            ? queryPathHash($patch->{basePath})
+                            : hashPath($baseHashAlgo, $baseHashAlgo ne "md5", $patch->{basePath});
+                        $hash =~ s/.*://;
+                        $hashCache->{$baseHashAlgo}->{$patch->{basePath}} = $hash;
+                    }
+                    
                     next if $hash ne $baseHash;
                 }
                 push @queue, $patch->{basePath};
@@ -257,7 +254,7 @@ open LOGFILE, ">>$logFile" or die "cannot open log file $logFile";
 my $date = strftime ("%F %H:%M:%S UTC", gmtime (time));
 print LOGFILE "$$ get $targetPath $date\n";
 
-print "\n*** Trying to download/patch `$targetPath'\n";
+print STDERR "\n*** Trying to download/patch `$targetPath'\n";
 
 
 # Compute the shortest path.
@@ -281,7 +278,7 @@ sub downloadFile {
     $ENV{"PRINT_PATH"} = 1;
     $ENV{"QUIET"} = 1;
     my ($hash, $path) = `$Nix::Config::binDir/nix-prefetch-url '$url'`;
-    die "download of `$url' failed" . ($! ? ": $!" : "") unless $? == 0;
+    die "download of `$url' failed" . ($! ? ": $!" : "") . "\n" unless $? == 0;
     chomp $path;
     return $path;
 }
@@ -293,17 +290,17 @@ while (scalar @path > 0) {
     my $u = $edge->{start};
     my $v = $edge->{end};
 
-    print "\n*** Step $curStep/$maxStep: ";
+    print STDERR "\n*** Step $curStep/$maxStep: ";
 
     if ($edge->{type} eq "present") {
-        print "using already present path `$v'\n";
+        print STDERR "using already present path `$v'\n";
         print LOGFILE "$$ present $v\n";
 
         if ($curStep < $maxStep) {
             # Since this is not the last step, the path will be used
             # as a base to one or more patches.  So turn the base path
             # into a NAR archive, to which we can apply the patch.
-            print "  packing base path...\n";
+            print STDERR "  packing base path...\n";
             system("$Nix::Config::binDir/nix-store --dump $v > $tmpNar") == 0
                 or die "cannot dump `$v'";
         }
@@ -311,17 +308,17 @@ while (scalar @path > 0) {
 
     elsif ($edge->{type} eq "patch") {
         my $patch = $edge->{info};
-        print "applying patch `$patch->{url}' to `$u' to create `$v'\n";
+        print STDERR "applying patch `$patch->{url}' to `$u' to create `$v'\n";
 
         print LOGFILE "$$ patch $patch->{url} $patch->{size} $patch->{baseHash} $u $v\n";
 
         # Download the patch.
-        print "  downloading patch...\n";
+        print STDERR "  downloading patch...\n";
         my $patchPath = downloadFile "$patch->{url}";
 
         # Apply the patch to the NAR archive produced in step 1 (for
         # the already present path) or a later step (for patch sequences).
-        print "  applying patch...\n";
+        print STDERR "  applying patch...\n";
         system("$Nix::Config::libexecDir/bspatch $tmpNar $tmpNar2 $patchPath") == 0
             or die "cannot apply patch `$patchPath' to $tmpNar";
 
@@ -331,7 +328,7 @@ while (scalar @path > 0) {
         } else {
             # This was the last patch.  Unpack the final NAR archive
             # into the target path.
-            print "  unpacking patched archive...\n";
+            print STDERR "  unpacking patched archive...\n";
             system("$Nix::Config::binDir/nix-store --restore $v < $tmpNar2") == 0
                 or die "cannot unpack $tmpNar2 into `$v'";
         }
@@ -341,13 +338,13 @@ while (scalar @path > 0) {
 
     elsif ($edge->{type} eq "narfile") {
         my $narFile = $edge->{info};
-        print "downloading `$narFile->{url}' into `$v'\n";
+        print STDERR "downloading `$narFile->{url}' into `$v'\n";
 
         my $size = $narFile->{size} || -1;
         print LOGFILE "$$ narfile $narFile->{url} $size $v\n";
         
         # Download the archive.
-        print "  downloading archive...\n";
+        print STDERR "  downloading archive...\n";
         my $narFilePath = downloadFile "$narFile->{url}";
 
         if ($curStep < $maxStep) {
@@ -356,7 +353,7 @@ while (scalar @path > 0) {
                 or die "cannot unpack `$narFilePath' into `$v'";
         } else {
             # Unpack the archive into the target path.
-            print "  unpacking archive...\n";
+            print STDERR "  unpacking archive...\n";
             system("$Nix::Config::bzip2 -d < '$narFilePath' | $Nix::Config::binDir/nix-store --restore '$v'") == 0
                 or die "cannot unpack `$narFilePath' into `$v'";
         }
@@ -376,20 +373,15 @@ if (defined $finalNarHash) {
 
     # The hash in the manifest can be either in base-16 or base-32.
     # Handle both.
-    my $extraFlag =
-        ($hashAlgo eq "sha256" && length($hash) != 64)
-        ? "--base32" : "";
-    
-    my $hash2 = `$Nix::Config::binDir/nix-hash --type $hashAlgo $extraFlag $targetPath`
-        or die "cannot compute hash of path `$targetPath'";
-    chomp $hash2;
+    my $hash2 = hashPath($hashAlgo, $hashAlgo eq "sha256" && length($hash) != 64, $targetPath);
     
-    die "hash mismatch in downloaded path $targetPath; expected $hash, got $hash2"
+    die "hash mismatch in downloaded path $targetPath; expected $hash, got $hash2\n"
         if $hash ne $hash2;
 } else {
-    die "cannot check integrity of the downloaded path since its hash is not known";
+    die "cannot check integrity of the downloaded path since its hash is not known\n";
 }
 
 
+print STDERR "\n";
 print LOGFILE "$$ success\n";
 close LOGFILE;
diff --git a/scripts/nix-copy-closure.in b/scripts/nix-copy-closure.in
index 172acd9e7da2..8bb60e920ef2 100755
--- a/scripts/nix-copy-closure.in
+++ b/scripts/nix-copy-closure.in
@@ -3,11 +3,12 @@
 use Nix::SSH;
 use Nix::Config;
 use Nix::Store;
+use Nix::CopyClosure;
 
 
 if (scalar @ARGV < 1) {
     print STDERR <<EOF
-Usage: nix-copy-closure [--from | --to] HOSTNAME [--sign] [--gzip] PATHS...
+Usage: nix-copy-closure [--from | --to] HOSTNAME [--sign] [--gzip] [--bzip2] [--xz] PATHS...
 EOF
     ;
     exit 1;
@@ -39,8 +40,16 @@ while (@ARGV) {
         $sign = 1;
     }
     elsif ($arg eq "--gzip") {
-        $compressor = "| gzip";
-        $decompressor = "gunzip |";
+        $compressor = "gzip";
+        $decompressor = "gunzip";
+    }
+    elsif ($arg eq "--bzip2") {
+        $compressor = "bzip2";
+        $decompressor = "bunzip2";
+    }
+    elsif ($arg eq "--xz") {
+        $compressor = "xz";
+        $decompressor = "xz -d";
     }
     elsif ($arg eq "--from") {
         $toMode = 0;
@@ -67,30 +76,7 @@ openSSHConnection $sshHost or die "$0: unable to start SSH\n";
 
 
 if ($toMode) { # Copy TO the remote machine.
-
-    # Get the closure of this path.
-    my @allStorePaths = reverse(topoSortPaths(computeFSClosure(0, $includeOutputs, map { followLinksToStorePath $_ } @storePaths)));
-
-    # Ask the remote host which paths are invalid.
-    open(READ, "set -f; ssh $sshHost @sshOpts nix-store --check-validity --print-invalid @allStorePaths|");
-    my @missing = ();
-    while (<READ>) {
-        chomp;
-        push @missing, $_;
-    }
-    close READ or die;
-
-    # Export the store paths and import them on the remote machine.
-    if (scalar @missing > 0) {
-        print STDERR "copying these missing paths:\n";
-        print STDERR "  $_\n" foreach @missing;
-        unless ($dryRun) {
-            my $extraOpts = $sign ? "--sign" : "";
-            system("set -f; nix-store --export $extraOpts @missing $compressor | ssh $sshHost @sshOpts '$decompressor nix-store --import'") == 0
-                or die "copying store paths to remote machine `$sshHost' failed: $?";
-        }
-    }
-
+    Nix::CopyClosure::copyTo($sshHost, [ @sshOpts ], [ @storePaths ], $compressor, $decompressor, $includeOutputs, $dryRun, $sign);
 }
 
 else { # Copy FROM the remote machine.
@@ -110,10 +96,12 @@ else { # Copy FROM the remote machine.
 
     close READ or die "nix-store on remote machine `$sshHost' failed: $?";
 
-    # Export the store paths on the remote machine and import them on locally.
+    # Export the store paths on the remote machine and import them locally.
     if (scalar @missing > 0) {
-        print STDERR "copying these missing paths:\n";
-        print STDERR "  $_\n" foreach @missing;
+        print STDERR "copying ", scalar @missing, " missing paths from ‘$sshHost’...\n";
+        #print STDERR "  $_\n" foreach @missing;
+        $compressor = "| $compressor" if $compressor ne "";
+        $decompressor = "$decompressor |" if $decompressor ne "";
         unless ($dryRun) {
             my $extraOpts = $sign ? "--sign" : "";
             system("set -f; ssh $sshHost @sshOpts 'nix-store --export $extraOpts @missing $compressor' | $decompressor $Nix::Config::binDir/nix-store --import") == 0
diff --git a/scripts/nix-prefetch-url.in b/scripts/nix-prefetch-url.in
index 45bad75f3e9f..eea2b814b733 100755
--- a/scripts/nix-prefetch-url.in
+++ b/scripts/nix-prefetch-url.in
@@ -1,165 +1,128 @@
-#! @shell@ -e
-
-url=$1
-expHash=$2
-
-binDir=@bindir@
-if [ -n "$NIX_BIN_DIR" ]; then binDir="$NIX_BIN_DIR"; fi
+#! @perl@ -w @perlFlags@
+
+use strict;
+use File::Basename;
+use File::Temp qw(tempdir);
+use File::stat;
+use Nix::Store;
+use Nix::Config;
+
+my $url = shift;
+my $expHash = shift;
+my $hashType = $ENV{'NIX_HASH_ALGO'} || "sha256";
+my $cacheDir = $ENV{'NIX_DOWNLOAD_CACHE'};
+
+if (!defined $url || $url eq "") {
+    print STDERR <<EOF
+Usage: nix-prefetch-url URL [EXPECTED-HASH]
+EOF
+    ;
+    exit 1;
+}
 
-# needed to make it work on NixOS
-export PATH=$PATH:@coreutils@
+sub writeFile {
+    my ($fn, $s) = @_;
+    open TMP, ">$fn" or die;
+    print TMP "$s" or die;
+    close TMP or die;
+}
 
-hashType=$NIX_HASH_ALGO
-if test -z "$hashType"; then
-    hashType=sha256
-fi
+sub readFile {
+    local $/ = undef;
+    my ($fn) = @_;
+    open TMP, "<$fn" or die;
+    my $s = <TMP>;
+    close TMP or die;
+    return $s;
+}
 
-hashFormat=
-if test "$hashType" != "md5"; then
-    hashFormat=--base32
-fi
+my $tmpDir = tempdir("nix-prefetch-url.XXXXXX", CLEANUP => 1, TMPDIR => 1)
+    or die "cannot create a temporary directory";
 
-if test -z "$url"; then
-    echo "syntax: nix-prefetch-url URL [EXPECTED-HASH]" >&2
-    exit 1
-fi
+# Hack to support the mirror:// scheme from Nixpkgs.
+if ($url =~ /^mirror:\/\//) {
+    system("$Nix::Config::binDir/nix-build '<nixpkgs>' -A resolveMirrorURLs --argstr url '$url' -o $tmpDir/urls > /dev/null") == 0
+        or die "$0: nix-build failed; maybe \$NIX_PATH is not set properly\n";
+    my @expanded = split ' ', readFile("$tmpDir/urls");
+    die "$0: cannot resolve ‘$url’" unless scalar @expanded > 0;
+    print STDERR "$url expands to $expanded[0]\n";
+    $url = $expanded[0];
+}
 
 # Handle escaped characters in the URI.  `+', `=' and `?' are the only
 # characters that are valid in Nix store path names but have a special
 # meaning in URIs.
-name=$(basename "$url" | @sed@ -e 's/%2b/+/g' -e 's/%3d/=/g' -e 's/%3f/\?/g')
-if test -z "$name"; then echo "invalid url"; exit 1; fi
+my $name = basename $url;
+die "cannot figure out file name for ‘$url’\n" if $name eq ""; 
+$name =~ s/%2b/+/g;
+$name =~ s/%3d/=/g;
+$name =~ s/%3f/?/g;
 
+my $finalPath;
+my $hash;
 
 # If the hash was given, a file with that hash may already be in the
 # store.
-if test -n "$expHash"; then
-    finalPath=$($binDir/nix-store --print-fixed-path "$hashType" "$expHash" "$name")
-    if ! $bindir/nix-store --check-validity "$finalPath" 2> /dev/null; then
-        finalPath=
-    fi
-    hash=$expHash
-fi
-
-
-mkTempDir() {
-    if test -n "$tmpPath"; then return; fi
-    local i=0
-    while true; do
-        if test -z "$TMPDIR"; then TMPDIR=/tmp; fi
-        tmpPath=$TMPDIR/nix-prefetch-url-$$-$i
-        if mkdir "$tmpPath"; then break; fi
-        # !!! to bad we can't check for ENOENT in mkdir, so this check
-        # is slightly racy (it bombs out if somebody just removed
-        # $tmpPath...).
-        if ! test -e "$tmpPath"; then exit 1; fi
-        i=$((i + 1))
-    done
-    trap removeTempDir EXIT SIGINT SIGQUIT
+if (defined $expHash) {
+    $finalPath = makeFixedOutputPath(0, $hashType, $expHash, $name);
+    if (isValidPath($finalPath)) { $hash = $expHash; } else { $finalPath = undef; }
 }
 
-removeTempDir() {
-    if test -n "$tmpPath"; then
-        rm -rf "$tmpPath" || true
-    fi
-}
-
-
-doDownload() {
-    @curl@ $cacheFlags --fail --location --max-redirs 20 --disable-epsv \
-        --cookie-jar $tmpPath/cookies "$url" -o $tmpFile
-}
-
-
-# Hack to support the mirror:// scheme from Nixpkgs.
-if test "${url:0:9}" = "mirror://"; then
-    if test -z "$NIXPKGS_ALL"; then
-        echo "Resolving mirror:// URLs requires Nixpkgs.  Please point \$NIXPKGS_ALL at a Nixpkgs tree." >&2
-        exit 1
-    fi
-
-    mkTempDir
-    nix-build "$NIXPKGS_ALL" -A resolveMirrorURLs --argstr url "$url" -o $tmpPath/urls > /dev/null
-
-    expanded=($(cat $tmpPath/urls))
-    if test "${#expanded[*]}" = 0; then
-        echo "$0: cannot resolve $url." >&2
-        exit 1
-    fi
-
-    echo "$url expands to ${expanded[*]} (using ${expanded[0]})" >&2
-    url="${expanded[0]}"
-fi
-
-
 # If we don't know the hash or a file with that hash doesn't exist,
 # download the file and add it to the store.
-if test -z "$finalPath"; then
-
-    mkTempDir
-    tmpFile=$tmpPath/$name
+if (!defined $finalPath) {
 
+    my $tmpFile = "$tmpDir/$name";
+    
     # Optionally do timestamp-based caching of the download.
     # Actually, the only thing that we cache in $NIX_DOWNLOAD_CACHE is
     # the hash and the timestamp of the file at $url.  The caching of
     # the file *contents* is done in Nix store, where it can be
     # garbage-collected independently.
-    if test -n "$NIX_DOWNLOAD_CACHE"; then
-        echo -n "$url" > $tmpPath/url
-        urlHash=$($binDir/nix-hash --type sha256 --base32 --flat $tmpPath/url)
-        echo "$url" > "$NIX_DOWNLOAD_CACHE/$urlHash.url"
-        cachedHashFN="$NIX_DOWNLOAD_CACHE/$urlHash.$hashType"
-        cachedTimestampFN="$NIX_DOWNLOAD_CACHE/$urlHash.stamp"
-        cacheFlags="--remote-time"
-        if test -e "$cachedTimestampFN" -a -e "$cachedHashFN"; then
-            # Only download the file if it is newer than the cached version.
-            cacheFlags="$cacheFlags --time-cond $cachedTimestampFN"
-        fi
-    fi
-
+    my ($cachedTimestampFN, $cachedHashFN, @cacheFlags);
+    if (defined $cacheDir) {
+        my $urlHash = hashString("sha256", 1, $url);
+        writeFile "$cacheDir/$urlHash.url", $url;
+        $cachedHashFN = "$cacheDir/$urlHash.$hashType";
+        $cachedTimestampFN = "$cacheDir/$urlHash.stamp";
+        @cacheFlags = ("--time-cond", $cachedTimestampFN) if -f $cachedHashFN && -f $cachedTimestampFN;
+    }
+    
     # Perform the download.
-    doDownload
+    my @curlFlags = ("curl", $url, "-o", $tmpFile, "--fail", "--location", "--max-redirs", "20", "--disable-epsv", "--cookie-jar", "$tmpDir/cookies", "--remote-time", (split " ", ($ENV{NIX_CURL_FLAGS} || "")));
+    (system $Nix::Config::curl @curlFlags, @cacheFlags) == 0 or die "$0: download of ‘$url’ failed\n";
 
-    if test -n "$NIX_DOWNLOAD_CACHE" -a ! -e $tmpFile; then
+    if (defined $cacheDir && ! -e $tmpFile) {
         # Curl didn't create $tmpFile, so apparently there's no newer
         # file on the server.
-        hash=$(cat $cachedHashFN)
-        finalPath=$($binDir/nix-store --print-fixed-path "$hashType" "$hash" "$name") 
-        if ! $binDir/nix-store --check-validity "$finalPath" 2> /dev/null; then
-            echo "cached contents of \`$url' disappeared, redownloading..." >&2
-            finalPath=
-            cacheFlags="--remote-time"
-            doDownload
-        fi
-    fi
-
-    if test -z "$finalPath"; then
-
+        $hash = readFile $cachedHashFN or die;
+        $finalPath = makeFixedOutputPath(0, $hashType, $hash, $name);
+        unless (isValidPath $finalPath) {
+            print STDERR "cached contents of ‘$url’ disappeared, redownloading...\n";
+            $finalPath = undef;
+            (system $Nix::Config::curl @curlFlags) == 0 or die "$0: download of ‘$url’ failed\n";
+        }
+    }
+
+    if (!defined $finalPath) {
+        
         # Compute the hash.
-        hash=$($binDir/nix-hash --type "$hashType" $hashFormat --flat $tmpFile)
-        if ! test -n "$QUIET"; then echo "hash is $hash" >&2; fi
-
-        if test -n "$NIX_DOWNLOAD_CACHE"; then
-            echo $hash > $cachedHashFN
-            touch -r $tmpFile $cachedTimestampFN
-        fi
-
+        $hash = hashFile($hashType, $hashType ne "md5", $tmpFile);
+
+        if (defined $cacheDir) {
+            writeFile $cachedHashFN, $hash;
+            my $st = stat($tmpFile) or die;
+            open STAMP, ">$cachedTimestampFN" or die; close STAMP;
+            utime($st->atime, $st->mtime, $cachedTimestampFN) or die;
+        }
+    
         # Add the downloaded file to the Nix store.
-        finalPath=$($binDir/nix-store --add-fixed "$hashType" $tmpFile)
-
-        if test -n "$expHash" -a "$expHash" != "$hash"; then
-            echo "hash mismatch for URL \`$url'" >&2
-            exit 1
-        fi
-        
-    fi
-fi
+        $finalPath = addToStore($tmpFile, 0, $hashType);
+    }
 
+    die "$0: hash mismatch for ‘$url’\n" if defined $expHash && $expHash ne $hash;
+}
 
-if ! test -n "$QUIET"; then echo "path is $finalPath" >&2; fi
-
-echo $hash
-
-if test -n "$PRINT_PATH"; then
-    echo $finalPath
-fi
+print STDERR "path is ‘$finalPath’\n" unless $ENV{'QUIET'};
+print "$hash\n";
+print "$finalPath\n" if $ENV{'PRINT_PATH'};
diff --git a/scripts/nix-pull.in b/scripts/nix-pull.in
index f3cba0c02619..74545a35004c 100755
--- a/scripts/nix-pull.in
+++ b/scripts/nix-pull.in
@@ -33,10 +33,6 @@ if (! -l $manifestDirLink) {
 
 
 # Process the URLs specified on the command line.
-my %narFiles;
-my %patches;
-
-my $skipWrongStore = 0;
 
 sub downloadFile {
     my $url = shift;
@@ -59,16 +55,7 @@ sub processURL {
     # First see if a bzipped manifest is available.
     if (system("$Nix::Config::curl --fail --silent --head '$url'.bz2 > /dev/null") == 0) {
         print "fetching list of Nix archives at `$url.bz2'...\n";
-        my $bzipped = downloadFile "$url.bz2";
-
-        $manifest = "$tmpDir/MANIFEST";
-
-        system("$Nix::Config::bzip2 -d < $bzipped > $manifest") == 0
-            or die "cannot decompress manifest";
-
-        $manifest = (`$Nix::Config::binDir/nix-store --add $manifest`
-                     or die "cannot copy $manifest to the store");
-        chomp $manifest;
+        $manifest = downloadFile "$url.bz2";
     }
 
     # Otherwise, just get the uncompressed manifest.
@@ -77,20 +64,6 @@ sub processURL {
         $manifest = downloadFile $url;
     }
 
-    my $version = readManifest($manifest, \%narFiles, \%patches);
-    
-    die "`$url' is not a manifest or it is too old (i.e., for Nix <= 0.7)\n" if $version < 3;
-    die "manifest `$url' is too new\n" if $version >= 5;
-
-    if ($skipWrongStore) {
-        foreach my $path (keys %narFiles) {
-            if (substr($path, 0, length($storeDir) + 1) ne "$storeDir/") {
-                print STDERR "warning: manifest `$url' assumes a Nix store at a different location than $storeDir, skipping...\n";
-                exit 0;
-            }
-        }
-    }
-
     my $baseName = "unnamed";
     if ($url =~ /\/([^\/]+)\/[^\/]+$/) { # get the forelast component
         $baseName = $1;
@@ -129,12 +102,12 @@ sub processURL {
 while (@ARGV) {
     my $url = shift @ARGV;
     if ($url eq "--skip-wrong-store") {
-        $skipWrongStore = 1;
+        # No-op, no longer supported.
     } else {
         processURL $url;
     }
 }
 
 
-my $size = scalar (keys %narFiles);
-print "$size store paths in manifest\n";
+# Update the cache.
+updateManifestDB();
diff --git a/scripts/nix-push.in b/scripts/nix-push.in
index dcdad5721265..cf46d00dfb06 100755
--- a/scripts/nix-push.in
+++ b/scripts/nix-push.in
@@ -198,8 +198,8 @@ for (my $n = 0; $n < scalar @storePaths; $n++) {
     # In some exceptional cases (such as VM tests that use the Nix
     # store of the host), the database doesn't contain the hash.  So
     # compute it.
-    if ($narHash eq "sha256:0000000000000000000000000000000000000000000000000000") {
-        $narHash = `$binDir/nix-hash --type sha256 '$storePath'`;
+    if ($narHash =~ /^sha256:0*$/) {
+        $narHash = `$binDir/nix-hash --type sha256 --base32 '$storePath'`;
         die "cannot hash `$storePath'" if $? != 0;
         chomp $narHash;
         $narHash = "sha256:$narHash";