#! @perl@ -w @perlFlags@
use strict;
use File::Basename;
use File::stat;
use Nix::Store;
use Nix::Config;
use Nix::Utils;
my $hashType = $ENV{'NIX_HASH_ALGO'} || "sha256"; # obsolete
my $cacheDir = $ENV{'NIX_DOWNLOAD_CACHE'};
my @args;
my $arg;
while ($arg = shift) {
if ($arg eq "--help") {
exec "man nix-prefetch-url" or die;
} elsif ($arg eq "--type") {
$hashType = shift;
die "$0: ‘$arg’ requires an argument\n" unless defined $hashType;
} elsif (substr($arg, 0, 1) eq "-") {
die "$0: unknown flag ‘$arg’\n";
} else {
push @args, $arg;
}
}
my $url = $args[0];
my $expHash = $args[1];
if (!defined $url || $url eq "") {
print STDERR <<EOF
Usage: nix-prefetch-url URL [EXPECTED-HASH]
EOF
;
exit 1;
}
my $tmpDir = mkTempDir("nix-prefetch-url");
# Hack to support the mirror:// scheme from Nixpkgs.
if ($url =~ /^mirror:\/\//) {
system("$Nix::Config::binDir/nix-build '<nixpkgs>' -A resolveMirrorURLs --argstr url '$url' -o $tmpDir/urls > /dev/null") == 0
or die "$0: nix-build failed; maybe \$NIX_PATH is not set properly\n";
my @expanded = split ' ', readFile("$tmpDir/urls");
die "$0: cannot resolve ‘$url’" unless scalar @expanded > 0;
print STDERR "$url expands to $expanded[0]\n";
$url = $expanded[0];
}
# Handle escaped characters in the URI. `+', `=' and `?' are the only
# characters that are valid in Nix store path names but have a special
# meaning in URIs.
my $name = basename $url;
die "cannot figure out file name for ‘$url’\n" if $name eq "";
$name =~ s/%2b/+/g;
$name =~ s/%3d/=/g;
$name =~ s/%3f/?/g;
my $finalPath;
my $hash;
# If the hash was given, a file with that hash may already be in the
# store.
if (defined $expHash) {
$finalPath = makeFixedOutputPath(0, $hashType, $expHash, $name);
if (isValidPath($finalPath)) { $hash = $expHash; } else { $finalPath = undef; }
}
# If we don't know the hash or a file with that hash doesn't exist,
# download the file and add it to the store.
if (!defined $finalPath) {
my $tmpFile = "$tmpDir/$name";
# Optionally do timestamp-based caching of the download.
# Actually, the only thing that we cache in $NIX_DOWNLOAD_CACHE is
# the hash and the timestamp of the file at $url. The caching of
# the file *contents* is done in Nix store, where it can be
# garbage-collected independently.
my ($cachedTimestampFN, $cachedHashFN, @cacheFlags);
if (defined $cacheDir) {
my $urlHash = hashString("sha256", 1, $url);
writeFile "$cacheDir/$urlHash.url", $url;
$cachedHashFN = "$cacheDir/$urlHash.$hashType";
$cachedTimestampFN = "$cacheDir/$urlHash.stamp";
@cacheFlags = ("--time-cond", $cachedTimestampFN) if -f $cachedHashFN && -f $cachedTimestampFN;
}
# Perform the download.
my @curlFlags = ("curl", $url, "-o", $tmpFile, "--fail", "--location", "--max-redirs", "20", "--disable-epsv", "--cookie-jar", "$tmpDir/cookies", "--remote-time", (split " ", ($ENV{NIX_CURL_FLAGS} || "")));
(system $Nix::Config::curl @curlFlags, @cacheFlags) == 0 or die "$0: download of ‘$url’ failed\n";
if (defined $cacheDir && ! -e $tmpFile) {
# Curl didn't create $tmpFile, so apparently there's no newer
# file on the server.
$hash = readFile $cachedHashFN or die;
$finalPath = makeFixedOutputPath(0, $hashType, $hash, $name);
unless (isValidPath $finalPath) {
print STDERR "cached contents of ‘$url’ disappeared, redownloading...\n";
$finalPath = undef;
(system $Nix::Config::curl @curlFlags) == 0 or die "$0: download of ‘$url’ failed\n";
}
}
if (!defined $finalPath) {
# Compute the hash.
$hash = hashFile($hashType, $hashType ne "md5", $tmpFile);
if (defined $cacheDir) {
writeFile $cachedHashFN, $hash;
my $st = stat($tmpFile) or die;
open STAMP, ">$cachedTimestampFN" or die; close STAMP;
utime($st->atime, $st->mtime, $cachedTimestampFN) or die;
}
# Add the downloaded file to the Nix store.
$finalPath = addToStore($tmpFile, 0, $hashType);
}
die "$0: hash mismatch for ‘$url’\n" if defined $expHash && $expHash ne $hash;
}
print STDERR "path is ‘$finalPath’\n" unless $ENV{'QUIET'};
print "$hash\n";
print "$finalPath\n" if $ENV{'PRINT_PATH'};