#! @perl@ -w @perlFlags@
use strict;
use File::Basename;
use File::Temp qw(tempdir);
use File::Path qw(mkpath);
use File::stat;
use File::Copy;
use Nix::Config;
use Nix::Store;
my $hashAlgo = "sha256";
my $tmpDir = tempdir("nix-push.XXXXXX", CLEANUP => 1, TMPDIR => 1)
or die "cannot create a temporary directory";
my $nixExpr = "$tmpDir/create-nars.nix";
my $curl = "$Nix::Config::curl --fail --silent";
my $extraCurlFlags = ${ENV{'CURL_FLAGS'}};
$curl = "$curl $extraCurlFlags" if defined $extraCurlFlags;
# Parse the command line.
my $compressionType = "xz";
my $force = 0;
my $localCopy;
my $localArchivesDir;
my $archivesPutURL;
my $archivesGetURL;
my @roots;
sub showSyntax {
print STDERR <<EOF
Usage: nix-push --copy ARCHIVES_DIR PATHS...
or: nix-push --upload ARCHIVES_PUT_URL ARCHIVES_GET_URL PATHS...
`nix-push' copies or uploads the closure of PATHS to the given
destination.
EOF
; # `
exit 1;
}
for (my $n = 0; $n < scalar @ARGV; $n++) {
my $arg = $ARGV[$n];
if ($arg eq "--help") {
showSyntax;
} elsif ($arg eq "--bzip2") {
$compressionType = "bzip2";
} elsif ($arg eq "--force") {
$force = 1;
} elsif ($arg eq "--copy") {
$n++;
die "$0: `$arg' requires an argument\n" unless $n < scalar @ARGV;
$localCopy = 1;
$localArchivesDir = $ARGV[$n];
mkpath($localArchivesDir, 0, 0755);
} elsif ($arg eq "--upload") {
die "$0: `$arg' requires two arguments\n" unless $n + 2 < scalar @ARGV;
$localCopy = 0;
$archivesPutURL = $ARGV[$n + 1];
$archivesGetURL = $ARGV[$n + 2];
$n++;
} elsif (substr($arg, 0, 1) eq "-") {
showSyntax;
} else {
push @roots, $arg;
}
}
showSyntax if !defined $localCopy;
# From the given store paths, determine the set of requisite store
# paths, i.e, the paths required to realise them.
my %storePaths;
foreach my $path (@roots) {
die unless $path =~ /^\//;
# Get all paths referenced by the normalisation of the given
# Nix expression.
my $pid = open(READ,
"$Nix::Config::binDir/nix-store --query --requisites --force-realise " .
"--include-outputs '$path'|") or die;
while (<READ>) {
chomp;
die "bad: $_" unless /^\//;
$storePaths{$_} = "";
}
close READ or die "nix-store failed: $?";
}
my @storePaths = keys %storePaths;
# Create a list of Nix derivations that turn each path into a Nix
# archive.
open NIX, ">$nixExpr";
print NIX "[";
foreach my $storePath (@storePaths) {
die unless ($storePath =~ /\/[0-9a-z]{32}[^\"\\\$]*$/);
# Construct a Nix expression that creates a Nix archive.
my $nixexpr =
"(import <nix/nar.nix> " .
"{ storePath = builtins.storePath \"$storePath\"; hashAlgo = \"$hashAlgo\"; compressionType = \"$compressionType\"; }) ";
print NIX $nixexpr;
}
print NIX "]";
close NIX;
# Build the Nix expression.
print STDERR "building compressed archives...\n";
my @narPaths;
my $pid = open(READ, "$Nix::Config::binDir/nix-build $nixExpr -o $tmpDir/result |")
or die "cannot run nix-build";
while (<READ>) {
chomp;
die unless /^\//;
push @narPaths, $_;
}
close READ or die "nix-build failed: $?";
# Upload the archives and the corresponding info files.
print STDERR "uploading/copying archives...\n";
my $totalNarSize = 0;
my $totalCompressedSize = 0;
for (my $n = 0; $n < scalar @storePaths; $n++) {
my $storePath = $storePaths[$n];
my $narDir = $narPaths[$n];
my $baseName = basename $storePath;
# Get info about the store path.
my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($storePath);
# In some exceptional cases (such as VM tests that use the Nix
# store of the host), the database doesn't contain the hash. So
# compute it.
if ($narHash =~ /^sha256:0*$/) {
my $nar = "$tmpDir/nar";
system("$Nix::Config::binDir/nix-store --dump $storePath > $nar") == 0
or die "cannot dump $storePath\n";
$narHash = `$Nix::Config::binDir/nix-hash --type sha256 --flat $nar`;
die "cannot hash `$nar'" if $? != 0;
chomp $narHash;
$narHash = "sha256:$narHash";
$narSize = stat("$nar")->size;
unlink $nar or die;
}
$totalNarSize += $narSize;
# Get info about the compressed NAR.
open HASH, "$narDir/nar-compressed-hash" or die "cannot open nar-compressed-hash";
my $compressedHash = <HASH>;
chomp $compressedHash;
$compressedHash =~ /^[0-9a-z]+$/ or die "invalid hash";
close HASH;
my $narName = "$compressedHash.nar." . ($compressionType eq "xz" ? "xz" : "bz2");
my $narFile = "$narDir/$narName";
(-f $narFile) or die "NAR file for $storePath not found";
my $compressedSize = stat($narFile)->size;
$totalCompressedSize += $compressedSize;
printf STDERR "%s [%.2f MiB, %.1f%%]\n", $storePath,
$compressedSize / (1024 * 1024), $compressedSize / $narSize * 100;
# Upload the compressed NAR.
if ($localCopy) {
my $dst = "$localArchivesDir/$narName";
if (! -f $dst) {
my $tmp = "$localArchivesDir/.tmp.$$.$narName";
copy($narFile, $tmp) or die "cannot copy $narFile to $tmp: $!\n";
rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
}
} else {
die "unimplemented";
#if (!archiveExists("$basename")) {
# system("$curl --show-error --upload-file " .
# "'$narArchive' '$archivesPutURL/$basename' > /dev/null") == 0 or
# die "curl failed on $narArchive: $?";
#}
}
# Upload the info file.
my $info;
$info .= "StorePath: $storePath\n";
$info .= "URL: $narName\n";
$info .= "Compression: $compressionType\n";
$info .= "FileHash: sha256:$compressedHash\n";
$info .= "FileSize: $compressedSize\n";
$info .= "NarHash: $narHash\n";
$info .= "NarSize: $narSize\n";
$info .= "References: " . join(" ", map { basename $_ } @{$refs}) . "\n";
if (defined $deriver) {
$info .= "Deriver: " . basename $deriver . "\n";
if (isValidPath($deriver)) {
my $drv = derivationFromPath($deriver);
$info .= "System: $drv->{platform}\n";
}
}
my $infoName = hashString("sha256", 1, $storePath);
if ($localCopy) {
my $dst = "$localArchivesDir/$infoName.narinfo";
if ($force || ! -f $dst) {
my $tmp = "$localArchivesDir/.tmp.$$.$infoName";
open INFO, ">$tmp" or die;
print INFO "$info" or die;
close INFO or die;
rename($tmp, $dst) or die "cannot rename $tmp to $dst: $!\n";
}
} else {
die "unimplemented";
}
}
printf STDERR "total compressed size %.2f MiB, %.1f%%\n",
$totalCompressedSize / (1024 * 1024), $totalCompressedSize / $totalNarSize * 100;