about summary refs log tree commit diff
path: root/scripts/download-using-manifests.pl.in
diff options
context:
space:
mode:
authorEelco Dolstra <eelco.dolstra@logicblox.com>2012-07-27T14·47-0400
committerEelco Dolstra <eelco.dolstra@logicblox.com>2012-07-27T14·47-0400
commit3a8f841612f08b9be11cc5346fa3c025413282d6 (patch)
treeb0efd25e2a3ec292e5dc9da4aca97bd4d73abe85 /scripts/download-using-manifests.pl.in
parentb4ea83249b40dd910daa6a8ee32f13e023e9c858 (diff)
download-using-manifests: Don't use nix-prefetch-url
Instead call curl directly and pipe it into ‘nix-store --restore’.
This saves I/O and prevents creating garbage in the Nix store.
Diffstat (limited to '')
-rwxr-xr-xscripts/download-using-manifests.pl.in70
1 files changed, 29 insertions, 41 deletions
diff --git a/scripts/download-using-manifests.pl.in b/scripts/download-using-manifests.pl.in
index f00debc685..ed63e792ea 100755
--- a/scripts/download-using-manifests.pl.in
+++ b/scripts/download-using-manifests.pl.in
@@ -15,6 +15,9 @@ my $logFile = "$Nix::Config::logDir/downloads";
 # estimating the expected download size.
 my $fast = 1;
 
+# ‘--insecure’ is fine because Nix verifies the hash of the result.
+my $curl = "$Nix::Config::curl --fail --location --insecure";
+
 
 # Open the manifest cache and update it if necessary.
 my $dbh = updateManifestDB();
@@ -38,7 +41,7 @@ sub parseHash {
 # given path.
 sub computeSmallestDownload {
     my $targetPath = shift;
-    
+
     # Build a graph of all store paths that might contribute to the
     # construction of $targetPath, and the special node "start".  The
     # edges are either patch operations, or downloads of full NAR
@@ -93,7 +96,7 @@ sub computeSmallestDownload {
             my $patchList = $dbh->selectall_arrayref(
                 "select * from Patches where storePath = ?",
                 { Slice => {} }, $u);
-            
+
             foreach my $patch (@{$patchList}) {
                 if (isValidPath($patch->{basePath})) {
                     my ($baseHashAlgo, $baseHash) = parseHash $patch->{baseHash};
@@ -106,7 +109,7 @@ sub computeSmallestDownload {
                         $hash =~ s/.*://;
                         $hashCache->{$baseHashAlgo}->{$patch->{basePath}} = $hash;
                     }
-                    
+
                     next if $hash ne $baseHash;
                 }
                 push @queue, $patch->{basePath};
@@ -117,7 +120,7 @@ sub computeSmallestDownload {
             my $narFileList = $dbh->selectall_arrayref(
                 "select * from NARs where storePath = ?",
                 { Slice => {} }, $u);
-                
+
             foreach my $narFile (@{$narFileList}) {
                 # !!! how to handle files whose size is not known in advance?
                 # For now, assume some arbitrary size (1 GB).
@@ -189,7 +192,7 @@ if ($ARGV[0] eq "--query") {
                 my $infos = $dbh->selectall_arrayref(
                     "select * from NARs where storePath = ?",
                     { Slice => {} }, $storePath);
-            
+
                 next unless scalar @{$infos} > 0;
                 my $info = @{$infos}[0];
 
@@ -215,14 +218,14 @@ if ($ARGV[0] eq "--query") {
                 }
 
                 print "$downloadSize\n";
-            
+
                 my $narSize = $info->{narSize} || 0;
                 print "$narSize\n";
             }
 
             print "\n";
         }
-        
+
         else { die "unknown command `$cmd'"; }
     }
 
@@ -271,16 +274,6 @@ $dbh->disconnect;
 my $curStep = 1;
 my $maxStep = scalar @path;
 
-sub downloadFile { 
-    my $url = shift; 
-    $ENV{"PRINT_PATH"} = 1;
-    $ENV{"QUIET"} = 1;
-    my ($hash, $path) = `$Nix::Config::binDir/nix-prefetch-url '$url'`;
-    die "download of `$url' failed" . ($! ? ": $!" : "") . "\n" unless $? == 0;
-    chomp $path;
-    return $path;
-}
-
 my $finalNarHash;
 
 while (scalar @path > 0) {
@@ -312,13 +305,15 @@ while (scalar @path > 0) {
 
         # Download the patch.
         print STDERR "  downloading patch...\n";
-        my $patchPath = downloadFile "$patch->{url}";
+        my $patchPath = "$tmpDir/patch";
+        system("$curl '$patch->{url}' -o $patchPath") == 0
+            or die "cannot download patch `$patch->{url}'\n";
 
         # Apply the patch to the NAR archive produced in step 1 (for
         # the already present path) or a later step (for patch sequences).
         print STDERR "  applying patch...\n";
         system("$Nix::Config::libexecDir/bspatch $tmpNar $tmpNar2 $patchPath") == 0
-            or die "cannot apply patch `$patchPath' to $tmpNar";
+            or die "cannot apply patch `$patchPath' to $tmpNar\n";
 
         if ($curStep < $maxStep) {
             # The archive will be used as the base of the next patch.
@@ -328,7 +323,7 @@ while (scalar @path > 0) {
             # into the target path.
             print STDERR "  unpacking patched archive...\n";
             system("$Nix::Config::binDir/nix-store --restore $v < $tmpNar2") == 0
-                or die "cannot unpack $tmpNar2 into `$v'";
+                or die "cannot unpack $tmpNar2 into `$v'\n";
         }
 
         $finalNarHash = $patch->{narHash};
@@ -340,20 +335,15 @@ while (scalar @path > 0) {
 
         my $size = $narFile->{size} || -1;
         print LOGFILE "$$ narfile $narFile->{url} $size $v\n";
-        
-        # Download the archive.
-        print STDERR "  downloading archive...\n";
-        my $narFilePath = downloadFile "$narFile->{url}";
 
         if ($curStep < $maxStep) {
             # The archive will be used a base to a patch.
-            system("$Nix::Config::bzip2 -d < '$narFilePath' > $tmpNar") == 0
-                or die "cannot unpack `$narFilePath' into `$v'";
+            system("$curl '$narFile->{url}' | $Nix::Config::bzip2 -d > $tmpNar") == 0
+                or die "cannot download and unpack `$narFile->{url}' into `$v'\n";
         } else {
             # Unpack the archive into the target path.
-            print STDERR "  unpacking archive...\n";
-            system("$Nix::Config::bzip2 -d < '$narFilePath' | $Nix::Config::binDir/nix-store --restore '$v'") == 0
-                or die "cannot unpack `$narFilePath' into `$v'";
+            system("$curl '$narFile->{url}' | $Nix::Config::bzip2 -d | $Nix::Config::binDir/nix-store --restore '$v'") == 0
+                or die "cannot download and unpack `$narFile->{url}' into `$v'\n";
         }
 
         $finalNarHash = $narFile->{narHash};
@@ -365,19 +355,17 @@ while (scalar @path > 0) {
 
 # Make sure that the hash declared in the manifest matches what we
 # downloaded and unpacked.
+die "cannot check integrity of the downloaded path since its hash is not known\n"
+    unless defined $finalNarHash;
 
-if (defined $finalNarHash) {
-    my ($hashAlgo, $hash) = parseHash $finalNarHash;
-
-    # The hash in the manifest can be either in base-16 or base-32.
-    # Handle both.
-    my $hash2 = hashPath($hashAlgo, $hashAlgo eq "sha256" && length($hash) != 64, $targetPath);
-    
-    die "hash mismatch in downloaded path $targetPath; expected $hash, got $hash2\n"
-        if $hash ne $hash2;
-} else {
-    die "cannot check integrity of the downloaded path since its hash is not known\n";
-}
+my ($hashAlgo, $hash) = parseHash $finalNarHash;
+
+# The hash in the manifest can be either in base-16 or base-32.
+# Handle both.
+my $hash2 = hashPath($hashAlgo, $hashAlgo eq "sha256" && length($hash) != 64, $targetPath);
+
+die "hash mismatch in downloaded path $targetPath; expected $hash, got $hash2\n"
+    if $hash ne $hash2;
 
 
 print STDERR "\n";