diff options
Diffstat (limited to 'perl')
-rw-r--r-- | perl/MANIFEST | 7 | ||||
-rw-r--r-- | perl/lib/Nix/Config.pm.in | 42 | ||||
-rw-r--r-- | perl/lib/Nix/CopyClosure.pm | 57 | ||||
-rw-r--r-- | perl/lib/Nix/Crypto.pm | 42 | ||||
-rw-r--r-- | perl/lib/Nix/GeneratePatches.pm | 340 | ||||
-rw-r--r-- | perl/lib/Nix/Manifest.pm | 467 | ||||
-rw-r--r-- | perl/lib/Nix/SSH.pm | 53 | ||||
-rw-r--r-- | perl/lib/Nix/Store.pm | 92 | ||||
-rw-r--r-- | perl/lib/Nix/Store.xs | 281 | ||||
-rw-r--r-- | perl/lib/Nix/Utils.pm | 38 | ||||
-rw-r--r-- | perl/local.mk | 40 |
11 files changed, 1459 insertions, 0 deletions
diff --git a/perl/MANIFEST b/perl/MANIFEST new file mode 100644 index 000000000000..08897647c978 --- /dev/null +++ b/perl/MANIFEST @@ -0,0 +1,7 @@ +Changes +Makefile.PL +MANIFEST +Nix.xs +README +t/Nix.t +lib/Nix.pm diff --git a/perl/lib/Nix/Config.pm.in b/perl/lib/Nix/Config.pm.in new file mode 100644 index 000000000000..e07d4c08f13f --- /dev/null +++ b/perl/lib/Nix/Config.pm.in @@ -0,0 +1,42 @@ +package Nix::Config; + +$version = "@PACKAGE_VERSION@"; + +$binDir = $ENV{"NIX_BIN_DIR"} || "@bindir@"; +$libexecDir = $ENV{"NIX_LIBEXEC_DIR"} || "@libexecdir@"; +$stateDir = $ENV{"NIX_STATE_DIR"} || "@localstatedir@/nix"; +$manifestDir = $ENV{"NIX_MANIFESTS_DIR"} || "@localstatedir@/nix/manifests"; +$logDir = $ENV{"NIX_LOG_DIR"} || "@localstatedir@/log/nix"; +$confDir = $ENV{"NIX_CONF_DIR"} || "@sysconfdir@/nix"; +$storeDir = $ENV{"NIX_STORE_DIR"} || "@storedir@"; + +$bzip2 = "@bzip2@"; +$xz = "@xz@"; +$curl = "@curl@"; +$openssl = "@openssl@"; + +$useBindings = "@perlbindings@" eq "yes"; + +%config = (); + +sub readConfig { + if (defined $ENV{'_NIX_OPTIONS'}) { + foreach my $s (split '\n', $ENV{'_NIX_OPTIONS'}) { + my ($n, $v) = split '=', $s, 2; + $config{$n} = $v; + } + return; + } + + my $config = "$confDir/nix.conf"; + return unless -f $config; + + open CONFIG, "<$config" or die "cannot open `$config'"; + while (<CONFIG>) { + /^\s*([\w\-\.]+)\s*=\s*(.*)$/ or next; + $config{$1} = $2; + } + close CONFIG; +} + +return 1; diff --git a/perl/lib/Nix/CopyClosure.pm b/perl/lib/Nix/CopyClosure.pm new file mode 100644 index 000000000000..41ceabd85847 --- /dev/null +++ b/perl/lib/Nix/CopyClosure.pm @@ -0,0 +1,57 @@ +package Nix::CopyClosure; + +use strict; +use Nix::Config; +use Nix::Store; +use List::Util qw(sum); + + +sub copyTo { + my ($sshHost, $sshOpts, $storePaths, $compressor, $decompressor, + $includeOutputs, $dryRun, $sign, $progressViewer, $useSubstitutes) = @_; + + # Get the closure of this path. + my @closure = reverse(topoSortPaths(computeFSClosure(0, $includeOutputs, + map { followLinksToStorePath $_ } @{$storePaths}))); + + # Optionally use substitutes on the remote host. + if (!$dryRun && $useSubstitutes) { + system "ssh $sshHost @{$sshOpts} nix-store -r --ignore-unknown @closure"; + # Ignore exit status because this is just an optimisation. + } + + # Ask the remote host which paths are invalid. Because of limits + # to the command line length, do this in chunks. Eventually, + # we'll want to use ‘--from-stdin’, but we can't rely on the + # target having this option yet. + my @missing = (); + my $missingSize = 0; + while (scalar(@closure) > 0) { + my @ps = splice(@closure, 0, 1500); + open(READ, "set -f; ssh $sshHost @{$sshOpts} nix-store --check-validity --print-invalid @ps|"); + while (<READ>) { + chomp; + push @missing, $_; + my ($deriver, $narHash, $time, $narSize, $refs) = queryPathInfo($_, 1); + $missingSize += $narSize; + } + close READ or die; + } + + $compressor = "$compressor |" if $compressor ne ""; + $decompressor = "$decompressor |" if $decompressor ne ""; + $progressViewer = "$progressViewer -s $missingSize |" if $progressViewer ne ""; + + # Export the store paths and import them on the remote machine. + if (scalar @missing > 0) { + print STDERR "copying ", scalar @missing, " missing paths to ‘$sshHost’...\n"; + unless ($dryRun) { + open SSH, "| $progressViewer $compressor ssh $sshHost @{$sshOpts} '$decompressor nix-store --import' > /dev/null" or die; + exportPaths(fileno(SSH), $sign, @missing); + close SSH or die "copying store paths to remote machine `$sshHost' failed: $?"; + } + } +} + + +1; diff --git a/perl/lib/Nix/Crypto.pm b/perl/lib/Nix/Crypto.pm new file mode 100644 index 000000000000..0286e88d3d28 --- /dev/null +++ b/perl/lib/Nix/Crypto.pm @@ -0,0 +1,42 @@ +package Nix::Crypto; + +use strict; +use MIME::Base64; +use Nix::Store; +use Nix::Config; +use IPC::Open2; + +our @ISA = qw(Exporter); +our @EXPORT = qw(signString isValidSignature); + +sub signString { + my ($privateKeyFile, $s) = @_; + my $hash = hashString("sha256", 0, $s); + my ($from, $to); + my $pid = open2($from, $to, $Nix::Config::openssl, "rsautl", "-sign", "-inkey", $privateKeyFile); + print $to $hash; + close $to; + local $/ = undef; + my $sig = <$from>; + close $from; + waitpid($pid, 0); + die "$0: OpenSSL returned exit code $? while signing hash\n" if $? != 0; + my $sig64 = encode_base64($sig, ""); + return $sig64; +} + +sub isValidSignature { + my ($publicKeyFile, $sig64, $s) = @_; + my ($from, $to); + my $pid = open2($from, $to, $Nix::Config::openssl, "rsautl", "-verify", "-inkey", $publicKeyFile, "-pubin"); + print $to decode_base64($sig64); + close $to; + my $decoded = <$from>; + close $from; + waitpid($pid, 0); + return 0 if $? != 0; + my $hash = hashString("sha256", 0, $s); + return $decoded eq $hash; +} + +1; diff --git a/perl/lib/Nix/GeneratePatches.pm b/perl/lib/Nix/GeneratePatches.pm new file mode 100644 index 000000000000..612c8a3a15ba --- /dev/null +++ b/perl/lib/Nix/GeneratePatches.pm @@ -0,0 +1,340 @@ +package Nix::GeneratePatches; + +use strict; +use File::Temp qw(tempdir); +use File::stat; +use Nix::Config; +use Nix::Manifest; + +our @ISA = qw(Exporter); +our @EXPORT = qw(generatePatches propagatePatches copyPatches); + + +# Some patch generations options. + +# Max size of NAR archives to generate patches for. +my $maxNarSize = $ENV{"NIX_MAX_NAR_SIZE"}; +$maxNarSize = 160 * 1024 * 1024 if !defined $maxNarSize; + +# If patch is bigger than this fraction of full archive, reject. +my $maxPatchFraction = $ENV{"NIX_PATCH_FRACTION"}; +$maxPatchFraction = 0.60 if !defined $maxPatchFraction; + +my $timeLimit = $ENV{"NIX_BSDIFF_TIME_LIMIT"}; +$timeLimit = 180 if !defined $timeLimit; + +my $hashAlgo = "sha256"; + + +sub findOutputPaths { + my $narFiles = shift; + + my %outPaths; + + foreach my $p (keys %{$narFiles}) { + + # Ignore derivations. + next if ($p =~ /\.drv$/); + + # Ignore builders (too much ambiguity -- they're all called + # `builder.sh'). + next if ($p =~ /\.sh$/); + next if ($p =~ /\.patch$/); + + # Don't bother including tar files etc. + next if ($p =~ /\.tar$/ || $p =~ /\.tar\.(gz|bz2|Z|lzma|xz)$/ || $p =~ /\.zip$/ || $p =~ /\.bin$/ || $p =~ /\.tgz$/ || $p =~ /\.rpm$/ || $p =~ /cvs-export$/ || $p =~ /fetchhg$/); + + $outPaths{$p} = 1; + } + + return %outPaths; +} + + +sub getNameVersion { + my $p = shift; + $p =~ /\/[0-9a-z]+((?:-[a-zA-Z][^\/-]*)+)([^\/]*)$/; + my $name = $1; + my $version = $2; + return undef unless defined $name && defined $version; + $name =~ s/^-//; + $version =~ s/^-//; + return ($name, $version); +} + + +# A quick hack to get a measure of the `distance' between two +# versions: it's just the position of the first character that differs +# (or 999 if they are the same). +sub versionDiff { + my $s = shift; + my $t = shift; + my $i; + return 999 if $s eq $t; + for ($i = 0; $i < length $s; $i++) { + return $i if $i >= length $t or + substr($s, $i, 1) ne substr($t, $i, 1); + } + return $i; +} + + +sub getNarBz2 { + my $narPath = shift; + my $narFiles = shift; + my $storePath = shift; + + my $narFileList = $$narFiles{$storePath}; + die "missing path $storePath" unless defined $narFileList; + + my $narFile = @{$narFileList}[0]; + die unless defined $narFile; + + $narFile->{url} =~ /\/([^\/]+)$/; + die unless defined $1; + return "$narPath/$1"; +} + + +sub containsPatch { + my $patches = shift; + my $storePath = shift; + my $basePath = shift; + my $patchList = $$patches{$storePath}; + return 0 if !defined $patchList; + my $found = 0; + foreach my $patch (@{$patchList}) { + # !!! baseHash might differ + return 1 if $patch->{basePath} eq $basePath; + } + return 0; +} + + +sub generatePatches { + my ($srcNarFiles, $dstNarFiles, $srcPatches, $dstPatches, $narPath, $patchesPath, $patchesURL, $tmpDir) = @_; + + my %srcOutPaths = findOutputPaths $srcNarFiles; + my %dstOutPaths = findOutputPaths $dstNarFiles; + + # For each output path in the destination, see if we need to / can + # create a patch. + + print STDERR "creating patches...\n"; + + foreach my $p (keys %dstOutPaths) { + + # If exactly the same path already exists in the source, skip it. + next if defined $srcOutPaths{$p}; + + print " $p\n"; + + # If not, then we should find the paths in the source that are + # `most' likely to be present on a system that wants to + # install this path. + + (my $name, my $version) = getNameVersion $p; + next unless defined $name && defined $version; + + my @closest = (); + my $closestVersion; + my $minDist = -1; # actually, larger means closer + + # Find all source paths with the same name. + + foreach my $q (keys %srcOutPaths) { + (my $name2, my $version2) = getNameVersion $q; + next unless defined $name2 && defined $version2; + + if ($name eq $name2) { + + my $srcSystem = @{$$dstNarFiles{$p}}[0]->{system}; + my $dstSystem = @{$$srcNarFiles{$q}}[0]->{system}; + if (defined $srcSystem && defined $dstSystem && $srcSystem ne $dstSystem) { + print " SKIPPING $q due to different systems ($srcSystem vs. $dstSystem)\n"; + next; + } + + # If the sizes differ too much, then skip. This + # disambiguates between, e.g., a real component and a + # wrapper component (cf. Firefox in Nixpkgs). + my $srcSize = @{$$srcNarFiles{$q}}[0]->{size}; + my $dstSize = @{$$dstNarFiles{$p}}[0]->{size}; + my $ratio = $srcSize / $dstSize; + $ratio = 1 / $ratio if $ratio < 1; + # print " SIZE $srcSize $dstSize $ratio $q\n"; + + if ($ratio >= 3) { + print " SKIPPING $q due to size ratio $ratio ($srcSize vs. $dstSize)\n"; + next; + } + + # If there are multiple matching names, include the + # ones with the closest version numbers. + my $dist = versionDiff $version, $version2; + if ($dist > $minDist) { + $minDist = $dist; + @closest = ($q); + $closestVersion = $version2; + } elsif ($dist == $minDist) { + push @closest, $q; + } + } + } + + if (scalar(@closest) == 0) { + print " NO BASE: $p\n"; + next; + } + + foreach my $closest (@closest) { + + # Generate a patch between $closest and $p. + print STDERR " $p <- $closest\n"; + + # If the patch already exists, skip it. + if (containsPatch($srcPatches, $p, $closest) || + containsPatch($dstPatches, $p, $closest)) + { + print " skipping, already exists\n"; + next; + } + + my $srcNarBz2 = getNarBz2 $narPath, $srcNarFiles, $closest; + my $dstNarBz2 = getNarBz2 $narPath, $dstNarFiles, $p; + + if (! -f $srcNarBz2) { + warn "patch source archive $srcNarBz2 is missing\n"; + next; + } + + system("$Nix::Config::bzip2 -d < $srcNarBz2 > $tmpDir/A") == 0 + or die "cannot unpack $srcNarBz2"; + + if (stat("$tmpDir/A")->size >= $maxNarSize) { + print " skipping, source is too large\n"; + next; + } + + system("$Nix::Config::bzip2 -d < $dstNarBz2 > $tmpDir/B") == 0 + or die "cannot unpack $dstNarBz2"; + + if (stat("$tmpDir/B")->size >= $maxNarSize) { + print " skipping, destination is too large\n"; + next; + } + + my $time1 = time(); + my $res = system("ulimit -t $timeLimit; $Nix::Config::libexecDir/nix/bsdiff $tmpDir/A $tmpDir/B $tmpDir/DIFF"); + my $time2 = time(); + if ($res) { + warn "binary diff computation aborted after ", $time2 - $time1, " seconds\n"; + next; + } + + my $baseHash = `$Nix::Config::binDir/nix-hash --flat --type $hashAlgo --base32 $tmpDir/A` or die; + chomp $baseHash; + + my $narHash = `$Nix::Config::binDir/nix-hash --flat --type $hashAlgo --base32 $tmpDir/B` or die; + chomp $narHash; + + my $narDiffHash = `$Nix::Config::binDir/nix-hash --flat --type $hashAlgo --base32 $tmpDir/DIFF` or die; + chomp $narDiffHash; + + my $narDiffSize = stat("$tmpDir/DIFF")->size; + my $dstNarBz2Size = stat($dstNarBz2)->size; + + print " size $narDiffSize; full size $dstNarBz2Size; ", $time2 - $time1, " seconds\n"; + + if ($narDiffSize >= $dstNarBz2Size) { + print " rejecting; patch bigger than full archive\n"; + next; + } + + if ($narDiffSize / $dstNarBz2Size >= $maxPatchFraction) { + print " rejecting; patch too large relative to full archive\n"; + next; + } + + my $finalName = "$narDiffHash.nar-bsdiff"; + + if (-e "$patchesPath/$finalName") { + print " not copying, already exists\n"; + } + + else { + system("cp '$tmpDir/DIFF' '$patchesPath/$finalName.tmp'") == 0 + or die "cannot copy diff"; + rename("$patchesPath/$finalName.tmp", "$patchesPath/$finalName") + or die "cannot rename $patchesPath/$finalName.tmp"; + } + + # Add the patch to the manifest. + addPatch $dstPatches, $p, + { url => "$patchesURL/$finalName", hash => "$hashAlgo:$narDiffHash" + , size => $narDiffSize, basePath => $closest, baseHash => "$hashAlgo:$baseHash" + , narHash => "$hashAlgo:$narHash", patchType => "nar-bsdiff" + }; + } + } +} + + +# Propagate useful patches from $srcPatches to $dstPatches. A patch +# is useful if it produces either paths in the $dstNarFiles or paths +# that can be used as the base for other useful patches. +sub propagatePatches { + my ($srcPatches, $dstNarFiles, $dstPatches) = @_; + + print STDERR "propagating patches...\n"; + + my $changed; + do { + # !!! we repeat this to reach the transitive closure; inefficient + $changed = 0; + + print STDERR "loop\n"; + + my %dstBasePaths; + foreach my $q (keys %{$dstPatches}) { + foreach my $patch (@{$$dstPatches{$q}}) { + $dstBasePaths{$patch->{basePath}} = 1; + } + } + + foreach my $p (keys %{$srcPatches}) { + my $patchList = $$srcPatches{$p}; + + my $include = 0; + + # Is path $p included in the destination? If so, include + # patches that produce it. + $include = 1 if defined $$dstNarFiles{$p}; + + # Is path $p a path that serves as a base for paths in the + # destination? If so, include patches that produce it. + # !!! check baseHash + $include = 1 if defined $dstBasePaths{$p}; + + if ($include) { + foreach my $patch (@{$patchList}) { + $changed = 1 if addPatch $dstPatches, $p, $patch; + } + } + + } + + } while $changed; +} + + +# Add all new patches in $srcPatches to $dstPatches. +sub copyPatches { + my ($srcPatches, $dstPatches) = @_; + foreach my $p (keys %{$srcPatches}) { + addPatch $dstPatches, $p, $_ foreach @{$$srcPatches{$p}}; + } +} + + +return 1; diff --git a/perl/lib/Nix/Manifest.pm b/perl/lib/Nix/Manifest.pm new file mode 100644 index 000000000000..015c92835674 --- /dev/null +++ b/perl/lib/Nix/Manifest.pm @@ -0,0 +1,467 @@ +package Nix::Manifest; + +use strict; +use DBI; +use DBD::SQLite; +use Cwd; +use File::stat; +use File::Path; +use Fcntl ':flock'; +use Nix::Config; +use Nix::Crypto; + +our @ISA = qw(Exporter); +our @EXPORT = qw(readManifest writeManifest updateManifestDB addPatch deleteOldManifests parseNARInfo); + + +sub addNAR { + my ($narFiles, $storePath, $info) = @_; + + $$narFiles{$storePath} = [] + unless defined $$narFiles{$storePath}; + + my $narFileList = $$narFiles{$storePath}; + + my $found = 0; + foreach my $narFile (@{$narFileList}) { + $found = 1 if $narFile->{url} eq $info->{url}; + } + + push @{$narFileList}, $info if !$found; +} + + +sub addPatch { + my ($patches, $storePath, $patch) = @_; + + $$patches{$storePath} = [] + unless defined $$patches{$storePath}; + + my $patchList = $$patches{$storePath}; + + my $found = 0; + foreach my $patch2 (@{$patchList}) { + $found = 1 if + $patch2->{url} eq $patch->{url} && + $patch2->{basePath} eq $patch->{basePath}; + } + + push @{$patchList}, $patch if !$found; + + return !$found; +} + + +sub readManifest_ { + my ($manifest, $addNAR, $addPatch) = @_; + + # Decompress the manifest if necessary. + if ($manifest =~ /\.bz2$/) { + open MANIFEST, "$Nix::Config::bzip2 -d < $manifest |" + or die "cannot decompress `$manifest': $!"; + } else { + open MANIFEST, "<$manifest" + or die "cannot open `$manifest': $!"; + } + + my $inside = 0; + my $type; + + my $manifestVersion = 2; + + my ($storePath, $url, $hash, $size, $basePath, $baseHash, $patchType); + my ($narHash, $narSize, $references, $deriver, $copyFrom, $system, $compressionType); + + while (<MANIFEST>) { + chomp; + s/\#.*$//g; + next if (/^$/); + + if (!$inside) { + + if (/^\s*(\w*)\s*\{$/) { + $type = $1; + $type = "narfile" if $type eq ""; + $inside = 1; + undef $storePath; + undef $url; + undef $hash; + undef $size; + undef $narHash; + undef $narSize; + undef $basePath; + undef $baseHash; + undef $patchType; + undef $system; + $references = ""; + $deriver = ""; + $compressionType = "bzip2"; + } + + } else { + + if (/^\}$/) { + $inside = 0; + + if ($type eq "narfile") { + &$addNAR($storePath, + { url => $url, hash => $hash, size => $size + , narHash => $narHash, narSize => $narSize + , references => $references + , deriver => $deriver + , system => $system + , compressionType => $compressionType + }); + } + + elsif ($type eq "patch") { + &$addPatch($storePath, + { url => $url, hash => $hash, size => $size + , basePath => $basePath, baseHash => $baseHash + , narHash => $narHash, narSize => $narSize + , patchType => $patchType + }); + } + + } + + elsif (/^\s*StorePath:\s*(\/\S+)\s*$/) { $storePath = $1; } + elsif (/^\s*CopyFrom:\s*(\/\S+)\s*$/) { $copyFrom = $1; } + elsif (/^\s*Hash:\s*(\S+)\s*$/) { $hash = $1; } + elsif (/^\s*URL:\s*(\S+)\s*$/) { $url = $1; } + elsif (/^\s*Compression:\s*(\S+)\s*$/) { $compressionType = $1; } + elsif (/^\s*Size:\s*(\d+)\s*$/) { $size = $1; } + elsif (/^\s*BasePath:\s*(\/\S+)\s*$/) { $basePath = $1; } + elsif (/^\s*BaseHash:\s*(\S+)\s*$/) { $baseHash = $1; } + elsif (/^\s*Type:\s*(\S+)\s*$/) { $patchType = $1; } + elsif (/^\s*NarHash:\s*(\S+)\s*$/) { $narHash = $1; } + elsif (/^\s*NarSize:\s*(\d+)\s*$/) { $narSize = $1; } + elsif (/^\s*References:\s*(.*)\s*$/) { $references = $1; } + elsif (/^\s*Deriver:\s*(\S+)\s*$/) { $deriver = $1; } + elsif (/^\s*ManifestVersion:\s*(\d+)\s*$/) { $manifestVersion = $1; } + elsif (/^\s*System:\s*(\S+)\s*$/) { $system = $1; } + + # Compatibility; + elsif (/^\s*NarURL:\s*(\S+)\s*$/) { $url = $1; } + elsif (/^\s*MD5:\s*(\S+)\s*$/) { $hash = "md5:$1"; } + + } + } + + close MANIFEST; + + return $manifestVersion; +} + + +sub readManifest { + my ($manifest, $narFiles, $patches) = @_; + readManifest_($manifest, + sub { addNAR($narFiles, @_); }, + sub { addPatch($patches, @_); } ); +} + + +sub writeManifest { + my ($manifest, $narFiles, $patches, $noCompress) = @_; + + open MANIFEST, ">$manifest.tmp"; # !!! check exclusive + + print MANIFEST "version {\n"; + print MANIFEST " ManifestVersion: 3\n"; + print MANIFEST "}\n"; + + foreach my $storePath (sort (keys %{$narFiles})) { + my $narFileList = $$narFiles{$storePath}; + foreach my $narFile (@{$narFileList}) { + print MANIFEST "{\n"; + print MANIFEST " StorePath: $storePath\n"; + print MANIFEST " NarURL: $narFile->{url}\n"; + print MANIFEST " Compression: $narFile->{compressionType}\n"; + print MANIFEST " Hash: $narFile->{hash}\n" if defined $narFile->{hash}; + print MANIFEST " Size: $narFile->{size}\n" if defined $narFile->{size}; + print MANIFEST " NarHash: $narFile->{narHash}\n"; + print MANIFEST " NarSize: $narFile->{narSize}\n" if $narFile->{narSize}; + print MANIFEST " References: $narFile->{references}\n" + if defined $narFile->{references} && $narFile->{references} ne ""; + print MANIFEST " Deriver: $narFile->{deriver}\n" + if defined $narFile->{deriver} && $narFile->{deriver} ne ""; + print MANIFEST " System: $narFile->{system}\n" if defined $narFile->{system}; + print MANIFEST "}\n"; + } + } + + foreach my $storePath (sort (keys %{$patches})) { + my $patchList = $$patches{$storePath}; + foreach my $patch (@{$patchList}) { + print MANIFEST "patch {\n"; + print MANIFEST " StorePath: $storePath\n"; + print MANIFEST " NarURL: $patch->{url}\n"; + print MANIFEST " Hash: $patch->{hash}\n"; + print MANIFEST " Size: $patch->{size}\n"; + print MANIFEST " NarHash: $patch->{narHash}\n"; + print MANIFEST " NarSize: $patch->{narSize}\n" if $patch->{narSize}; + print MANIFEST " BasePath: $patch->{basePath}\n"; + print MANIFEST " BaseHash: $patch->{baseHash}\n"; + print MANIFEST " Type: $patch->{patchType}\n"; + print MANIFEST "}\n"; + } + } + + + close MANIFEST; + + rename("$manifest.tmp", $manifest) + or die "cannot rename $manifest.tmp: $!"; + + + # Create a bzipped manifest. + unless (defined $noCompress) { + system("$Nix::Config::bzip2 < $manifest > $manifest.bz2.tmp") == 0 + or die "cannot compress manifest"; + + rename("$manifest.bz2.tmp", "$manifest.bz2") + or die "cannot rename $manifest.bz2.tmp: $!"; + } +} + + +sub updateManifestDB { + my $manifestDir = $Nix::Config::manifestDir; + + my @manifests = glob "$manifestDir/*.nixmanifest"; + return undef if scalar @manifests == 0; + + mkpath($manifestDir); + + unlink "$manifestDir/cache.sqlite"; # remove obsolete cache + my $dbPath = "$manifestDir/cache-v2.sqlite"; + + # Open/create the database. + our $dbh = DBI->connect("dbi:SQLite:dbname=$dbPath", "", "") + or die "cannot open database `$dbPath'"; + $dbh->{RaiseError} = 1; + $dbh->{PrintError} = 0; + + $dbh->do("pragma foreign_keys = on"); + $dbh->do("pragma synchronous = off"); # we can always reproduce the cache + $dbh->do("pragma journal_mode = truncate"); + + # Initialise the database schema, if necessary. + $dbh->do(<<EOF); + create table if not exists Manifests ( + id integer primary key autoincrement not null, + path text unique not null, + timestamp integer not null + ); +EOF + + $dbh->do(<<EOF); + create table if not exists NARs ( + id integer primary key autoincrement not null, + manifest integer not null, + storePath text not null, + url text not null, + compressionType text not null, + hash text, + size integer, + narHash text, + narSize integer, + refs text, + deriver text, + system text, + foreign key (manifest) references Manifests(id) on delete cascade + ); +EOF + + $dbh->do("create index if not exists NARs_storePath on NARs(storePath)"); + + $dbh->do(<<EOF); + create table if not exists Patches ( + id integer primary key autoincrement not null, + manifest integer not null, + storePath text not null, + basePath text not null, + baseHash text not null, + url text not null, + hash text, + size integer, + narHash text, + narSize integer, + patchType text not null, + foreign key (manifest) references Manifests(id) on delete cascade + ); +EOF + + $dbh->do("create index if not exists Patches_storePath on Patches(storePath)"); + + # Acquire an exclusive lock to ensure that only one process + # updates the DB at the same time. This isn't really necessary, + # but it prevents work duplication and lock contention in SQLite. + my $lockFile = "$manifestDir/cache.lock"; + open MAINLOCK, ">>$lockFile" or die "unable to acquire lock ‘$lockFile’: $!\n"; + flock(MAINLOCK, LOCK_EX) or die; + + our $insertNAR = $dbh->prepare( + "insert into NARs(manifest, storePath, url, compressionType, hash, size, narHash, " . + "narSize, refs, deriver, system) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)") or die; + + our $insertPatch = $dbh->prepare( + "insert into Patches(manifest, storePath, basePath, baseHash, url, hash, " . + "size, narHash, narSize, patchType) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"); + + $dbh->begin_work; + + # Read each manifest in $manifestDir and add it to the database, + # unless we've already done so on a previous run. + my %seen; + + for my $manifestLink (@manifests) { + my $manifest = Cwd::abs_path($manifestLink); + next unless -f $manifest; + my $timestamp = lstat($manifest)->mtime; + $seen{$manifest} = 1; + + next if scalar @{$dbh->selectcol_arrayref( + "select 1 from Manifests where path = ? and timestamp = ?", + {}, $manifest, $timestamp)} == 1; + + print STDERR "caching $manifest...\n"; + + $dbh->do("delete from Manifests where path = ?", {}, $manifest); + + $dbh->do("insert into Manifests(path, timestamp) values (?, ?)", + {}, $manifest, $timestamp); + + our $id = $dbh->last_insert_id("", "", "", ""); + + sub addNARToDB { + my ($storePath, $narFile) = @_; + $insertNAR->execute( + $id, $storePath, $narFile->{url}, $narFile->{compressionType}, $narFile->{hash}, + $narFile->{size}, $narFile->{narHash}, $narFile->{narSize}, $narFile->{references}, + $narFile->{deriver}, $narFile->{system}); + }; + + sub addPatchToDB { + my ($storePath, $patch) = @_; + $insertPatch->execute( + $id, $storePath, $patch->{basePath}, $patch->{baseHash}, $patch->{url}, + $patch->{hash}, $patch->{size}, $patch->{narHash}, $patch->{narSize}, + $patch->{patchType}); + }; + + my $version = readManifest_($manifest, \&addNARToDB, \&addPatchToDB); + + if ($version < 3) { + die "you have an old-style or corrupt manifest `$manifestLink'; please delete it\n"; + } + if ($version >= 10) { + die "manifest `$manifestLink' is too new; please delete it or upgrade Nix\n"; + } + } + + # Removed cached information for removed manifests from the DB. + foreach my $manifest (@{$dbh->selectcol_arrayref("select path from Manifests")}) { + next if defined $seen{$manifest}; + $dbh->do("delete from Manifests where path = ?", {}, $manifest); + } + + $dbh->commit; + + close MAINLOCK; + + return $dbh; +} + + + +# Delete all old manifests downloaded from a given URL. +sub deleteOldManifests { + my ($url, $curUrlFile) = @_; + for my $urlFile (glob "$Nix::Config::manifestDir/*.url") { + next if defined $curUrlFile && $urlFile eq $curUrlFile; + open URL, "<$urlFile" or die; + my $url2 = <URL>; + chomp $url2; + close URL; + next unless $url eq $url2; + my $base = $urlFile; $base =~ s/.url$//; + unlink "${base}.url"; + unlink "${base}.nixmanifest"; + } +} + + +# Parse a NAR info file. +sub parseNARInfo { + my ($storePath, $content, $requireValidSig, $location) = @_; + + my ($storePath2, $url, $fileHash, $fileSize, $narHash, $narSize, $deriver, $system, $sig); + my $signedData = ""; + my $compression = "bzip2"; + my @refs; + + foreach my $line (split "\n", $content) { + return undef unless $line =~ /^(.*): (.*)$/; + if ($1 eq "StorePath") { $storePath2 = $2; } + elsif ($1 eq "URL") { $url = $2; } + elsif ($1 eq "Compression") { $compression = $2; } + elsif ($1 eq "FileHash") { $fileHash = $2; } + elsif ($1 eq "FileSize") { $fileSize = int($2); } + elsif ($1 eq "NarHash") { $narHash = $2; } + elsif ($1 eq "NarSize") { $narSize = int($2); } + elsif ($1 eq "References") { @refs = split / /, $2; } + elsif ($1 eq "Deriver") { $deriver = $2; } + elsif ($1 eq "System") { $system = $2; } + elsif ($1 eq "Signature") { $sig = $2; last; } + $signedData .= "$line\n"; + } + + return undef if $storePath ne $storePath2 || !defined $url || !defined $narHash; + + my $res = + { url => $url + , compression => $compression + , fileHash => $fileHash + , fileSize => $fileSize + , narHash => $narHash + , narSize => $narSize + , refs => [ @refs ] + , deriver => $deriver + , system => $system + }; + + if ($requireValidSig) { + if (!defined $sig) { + warn "NAR info file `$location' lacks a signature; ignoring\n"; + return undef; + } + my ($sigVersion, $keyName, $sig64) = split ";", $sig; + $sigVersion //= 0; + if ($sigVersion != 1) { + warn "NAR info file `$location' has unsupported version $sigVersion; ignoring\n"; + return undef; + } + return undef unless defined $keyName && defined $sig64; + my $publicKeyFile = $Nix::Config::config{"binary-cache-public-key-$keyName"}; + if (!defined $publicKeyFile) { + warn "NAR info file `$location' is signed by unknown key `$keyName'; ignoring\n"; + return undef; + } + if (! -f $publicKeyFile) { + die "binary cache public key file `$publicKeyFile' does not exist\n"; + return undef; + } + if (!isValidSignature($publicKeyFile, $sig64, $signedData)) { + warn "NAR info file `$location' has an invalid signature; ignoring\n"; + return undef; + } + $res->{signedBy} = $keyName; + } + + return $res; +} + + +return 1; diff --git a/perl/lib/Nix/SSH.pm b/perl/lib/Nix/SSH.pm new file mode 100644 index 000000000000..584c44500981 --- /dev/null +++ b/perl/lib/Nix/SSH.pm @@ -0,0 +1,53 @@ +use strict; +use File::Temp qw(tempdir); + +our @sshOpts = split ' ', ($ENV{"NIX_SSHOPTS"} or ""); + +push @sshOpts, "-x"; + +my $sshStarted = 0; +my $sshHost; + +# Open a master SSH connection to `host', unless there already is a +# running master connection (as determined by `-O check'). +sub openSSHConnection { + my ($host) = @_; + die if $sshStarted; + $sshHost = $host; + return 1 if system("ssh $sshHost @sshOpts -O check 2> /dev/null") == 0; + + my $tmpDir = tempdir("nix-ssh.XXXXXX", CLEANUP => 1, TMPDIR => 1) + or die "cannot create a temporary directory"; + + push @sshOpts, "-S", "$tmpDir/control"; + + # Start the master. We can't use the `-f' flag (fork into + # background after establishing the connection) because then the + # child continues to run if we are killed. So instead make SSH + # print "started" when it has established the connection, and wait + # until we see that. + open SSHPIPE, "ssh $sshHost @sshOpts -M -N -o LocalCommand='echo started' -o PermitLocalCommand=yes |" or die; + + while (<SSHPIPE>) { + chomp; + if ($_ eq "started") { + $sshStarted = 1; + return 1; + } + } + + return 0; +} + +# Tell the master SSH client to exit. +sub closeSSHConnection { + if ($sshStarted) { + system("ssh $sshHost @sshOpts -O exit 2> /dev/null") == 0 + or warn "unable to stop SSH master: $?"; + $sshStarted = 0; + } +} + +END { my $saved = $?; closeSSHConnection; $? = $saved; } + +return 1; diff --git a/perl/lib/Nix/Store.pm b/perl/lib/Nix/Store.pm new file mode 100644 index 000000000000..191116ee5637 --- /dev/null +++ b/perl/lib/Nix/Store.pm @@ -0,0 +1,92 @@ +package Nix::Store; + +use strict; +use warnings; +use Nix::Config; + +require Exporter; + +our @ISA = qw(Exporter); + +our %EXPORT_TAGS = ( 'all' => [ qw( ) ] ); + +our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } ); + +our @EXPORT = qw( + isValidPath queryReferences queryPathInfo queryDeriver queryPathHash + queryPathFromHashPart + topoSortPaths computeFSClosure followLinksToStorePath exportPaths + hashPath hashFile hashString + addToStore makeFixedOutputPath + derivationFromPath +); + +our $VERSION = '0.15'; + +sub backtick { + open(RES, "-|", @_) or die; + local $/; + my $res = <RES> || ""; + close RES or die; + return $res; +} + +if ($Nix::Config::useBindings) { + require XSLoader; + XSLoader::load('Nix::Store', $VERSION); +} else { + + # Provide slow fallbacks of some functions on platforms that don't + # support the Perl bindings. + + use File::Temp; + use Fcntl qw/F_SETFD/; + + *hashFile = sub { + my ($algo, $base32, $path) = @_; + my $res = backtick("$Nix::Config::binDir/nix-hash", "--flat", $path, "--type", $algo, $base32 ? "--base32" : ()); + chomp $res; + return $res; + }; + + *hashPath = sub { + my ($algo, $base32, $path) = @_; + my $res = backtick("$Nix::Config::binDir/nix-hash", $path, "--type", $algo, $base32 ? "--base32" : ()); + chomp $res; + return $res; + }; + + *hashString = sub { + my ($algo, $base32, $s) = @_; + my $fh = File::Temp->new(); + print $fh $s; + my $res = backtick("$Nix::Config::binDir/nix-hash", $fh->filename, "--type", $algo, $base32 ? "--base32" : ()); + chomp $res; + return $res; + }; + + *addToStore = sub { + my ($srcPath, $recursive, $algo) = @_; + die "not implemented" if $recursive || $algo ne "sha256"; + my $res = backtick("$Nix::Config::binDir/nix-store", "--add", $srcPath); + chomp $res; + return $res; + }; + + *isValidPath = sub { + my ($path) = @_; + my $res = backtick("$Nix::Config::binDir/nix-store", "--check-validity", "--print-invalid", $path); + chomp $res; + return $res ne $path; + }; + + *queryPathHash = sub { + my ($path) = @_; + my $res = backtick("$Nix::Config::binDir/nix-store", "--query", "--hash", $path); + chomp $res; + return $res; + }; +} + +1; +__END__ diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs new file mode 100644 index 000000000000..07ccebe62f51 --- /dev/null +++ b/perl/lib/Nix/Store.xs @@ -0,0 +1,281 @@ +#include "EXTERN.h" +#include "perl.h" +#include "XSUB.h" + +/* Prevent a clash between some Perl and libstdc++ macros. */ +#undef do_open +#undef do_close + +#include <store-api.hh> +#include <globals.hh> +#include <misc.hh> +#include <util.hh> + + +using namespace nix; + + +void doInit() +{ + if (!store) { + try { + settings.processEnvironment(); + settings.loadConfFile(); + settings.update(); + settings.lockCPU = false; + store = openStore(); + } catch (Error & e) { + croak(e.what()); + } + } +} + + +MODULE = Nix::Store PACKAGE = Nix::Store +PROTOTYPES: ENABLE + + +#undef dNOOP // Hack to work around "error: declaration of 'Perl___notused' has a different language linkage" error message on clang. +#define dNOOP + + +void init() + CODE: + doInit(); + + +int isValidPath(char * path) + CODE: + try { + doInit(); + RETVAL = store->isValidPath(path); + } catch (Error & e) { + croak(e.what()); + } + OUTPUT: + RETVAL + + +SV * queryReferences(char * path) + PPCODE: + try { + doInit(); + PathSet paths; + store->queryReferences(path, paths); + for (PathSet::iterator i = paths.begin(); i != paths.end(); ++i) + XPUSHs(sv_2mortal(newSVpv(i->c_str(), 0))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * queryPathHash(char * path) + PPCODE: + try { + doInit(); + Hash hash = store->queryPathHash(path); + string s = "sha256:" + printHash32(hash); + XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * queryDeriver(char * path) + PPCODE: + try { + doInit(); + Path deriver = store->queryDeriver(path); + if (deriver == "") XSRETURN_UNDEF; + XPUSHs(sv_2mortal(newSVpv(deriver.c_str(), 0))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * queryPathInfo(char * path, int base32) + PPCODE: + try { + doInit(); + ValidPathInfo info = store->queryPathInfo(path); + if (info.deriver == "") + XPUSHs(&PL_sv_undef); + else + XPUSHs(sv_2mortal(newSVpv(info.deriver.c_str(), 0))); + string s = "sha256:" + (base32 ? printHash32(info.hash) : printHash(info.hash)); + XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); + mXPUSHi(info.registrationTime); + mXPUSHi(info.narSize); + AV * arr = newAV(); + for (PathSet::iterator i = info.references.begin(); i != info.references.end(); ++i) + av_push(arr, newSVpv(i->c_str(), 0)); + XPUSHs(sv_2mortal(newRV((SV *) arr))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * queryPathFromHashPart(char * hashPart) + PPCODE: + try { + doInit(); + Path path = store->queryPathFromHashPart(hashPart); + XPUSHs(sv_2mortal(newSVpv(path.c_str(), 0))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * computeFSClosure(int flipDirection, int includeOutputs, ...) + PPCODE: + try { + doInit(); + PathSet paths; + for (int n = 2; n < items; ++n) + computeFSClosure(*store, SvPV_nolen(ST(n)), paths, flipDirection, includeOutputs); + for (PathSet::iterator i = paths.begin(); i != paths.end(); ++i) + XPUSHs(sv_2mortal(newSVpv(i->c_str(), 0))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * topoSortPaths(...) + PPCODE: + try { + doInit(); + PathSet paths; + for (int n = 0; n < items; ++n) paths.insert(SvPV_nolen(ST(n))); + Paths sorted = topoSortPaths(*store, paths); + for (Paths::iterator i = sorted.begin(); i != sorted.end(); ++i) + XPUSHs(sv_2mortal(newSVpv(i->c_str(), 0))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * followLinksToStorePath(char * path) + CODE: + try { + doInit(); + RETVAL = newSVpv(followLinksToStorePath(path).c_str(), 0); + } catch (Error & e) { + croak(e.what()); + } + OUTPUT: + RETVAL + + +void exportPaths(int fd, int sign, ...) + PPCODE: + try { + doInit(); + Paths paths; + for (int n = 2; n < items; ++n) paths.push_back(SvPV_nolen(ST(n))); + FdSink sink(fd); + exportPaths(*store, paths, sign, sink); + } catch (Error & e) { + croak(e.what()); + } + + +SV * hashPath(char * algo, int base32, char * path) + PPCODE: + try { + Hash h = hashPath(parseHashType(algo), path).first; + string s = base32 ? printHash32(h) : printHash(h); + XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * hashFile(char * algo, int base32, char * path) + PPCODE: + try { + Hash h = hashFile(parseHashType(algo), path); + string s = base32 ? printHash32(h) : printHash(h); + XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * hashString(char * algo, int base32, char * s) + PPCODE: + try { + Hash h = hashString(parseHashType(algo), s); + string s = base32 ? printHash32(h) : printHash(h); + XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * addToStore(char * srcPath, int recursive, char * algo) + PPCODE: + try { + doInit(); + Path path = store->addToStore(srcPath, recursive, parseHashType(algo)); + XPUSHs(sv_2mortal(newSVpv(path.c_str(), 0))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) + PPCODE: + try { + doInit(); + HashType ht = parseHashType(algo); + Path path = makeFixedOutputPath(recursive, ht, + parseHash16or32(ht, hash), name); + XPUSHs(sv_2mortal(newSVpv(path.c_str(), 0))); + } catch (Error & e) { + croak(e.what()); + } + + +SV * derivationFromPath(char * drvPath) + PREINIT: + HV *hash; + CODE: + try { + doInit(); + Derivation drv = derivationFromPath(*store, drvPath); + hash = newHV(); + + HV * outputs = newHV(); + for (DerivationOutputs::iterator i = drv.outputs.begin(); i != drv.outputs.end(); ++i) + hv_store(outputs, i->first.c_str(), i->first.size(), newSVpv(i->second.path.c_str(), 0), 0); + hv_stores(hash, "outputs", newRV((SV *) outputs)); + + AV * inputDrvs = newAV(); + for (DerivationInputs::iterator i = drv.inputDrvs.begin(); i != drv.inputDrvs.end(); ++i) + av_push(inputDrvs, newSVpv(i->first.c_str(), 0)); // !!! ignores i->second + hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs)); + + AV * inputSrcs = newAV(); + for (PathSet::iterator i = drv.inputSrcs.begin(); i != drv.inputSrcs.end(); ++i) + av_push(inputSrcs, newSVpv(i->c_str(), 0)); + hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs)); + + hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0)); + hv_stores(hash, "builder", newSVpv(drv.builder.c_str(), 0)); + + AV * args = newAV(); + for (Strings::iterator i = drv.args.begin(); i != drv.args.end(); ++i) + av_push(args, newSVpv(i->c_str(), 0)); + hv_stores(hash, "args", newRV((SV *) args)); + + HV * env = newHV(); + for (StringPairs::iterator i = drv.env.begin(); i != drv.env.end(); ++i) + hv_store(env, i->first.c_str(), i->first.size(), newSVpv(i->second.c_str(), 0), 0); + hv_stores(hash, "env", newRV((SV *) env)); + + RETVAL = newRV_noinc((SV *)hash); + } catch (Error & e) { + croak(e.what()); + } + OUTPUT: + RETVAL diff --git a/perl/lib/Nix/Utils.pm b/perl/lib/Nix/Utils.pm new file mode 100644 index 000000000000..78d3db54270d --- /dev/null +++ b/perl/lib/Nix/Utils.pm @@ -0,0 +1,38 @@ +package Nix::Utils; + +our @ISA = qw(Exporter); +our @EXPORT = qw(checkURL uniq writeFile readFile); + +$urlRE = "(?: [a-zA-Z][a-zA-Z0-9\+\-\.]*\:[a-zA-Z0-9\%\/\?\:\@\&\=\+\$\,\-\_\.\!\~\*]+ )"; + +sub checkURL { + my ($url) = @_; + die "invalid URL ‘$url’\n" unless $url =~ /^ $urlRE $ /x; +} + +sub uniq { + my %seen; + my @res; + foreach my $name (@_) { + next if $seen{$name}; + $seen{$name} = 1; + push @res, $name; + } + return @res; +} + +sub writeFile { + my ($fn, $s) = @_; + open TMP, ">$fn" or die "cannot create file `$fn': $!"; + print TMP "$s" or die; + close TMP or die; +} + +sub readFile { + local $/ = undef; + my ($fn) = @_; + open TMP, "<$fn" or die "cannot open file `$fn': $!"; + my $s = <TMP>; + close TMP or die; + return $s; +} diff --git a/perl/local.mk b/perl/local.mk new file mode 100644 index 000000000000..74c054e7133f --- /dev/null +++ b/perl/local.mk @@ -0,0 +1,40 @@ +nix_perl_sources := \ + $(d)/lib/Nix/Store.pm \ + $(d)/lib/Nix/Manifest.pm \ + $(d)/lib/Nix/GeneratePatches.pm \ + $(d)/lib/Nix/SSH.pm \ + $(d)/lib/Nix/CopyClosure.pm \ + $(d)/lib/Nix/Config.pm.in \ + $(d)/lib/Nix/Utils.pm \ + $(d)/lib/Nix/Crypto.pm + +nix_perl_modules := $(nix_perl_sources:.in=) + +$(foreach x, $(nix_perl_modules), $(eval $(call install-data-in, $(x), $(perllibdir)/Nix))) + +ifeq ($(perlbindings), yes) + + $(d)/lib/Nix/Store.cc: $(d)/lib/Nix/Store.xs + $(trace-gen) xsubpp $^ -output $@ + + libraries += Store + + Store_DIR := $(d)/lib/Nix + + Store_SOURCES := $(Store_DIR)/Store.cc + + Store_LIBS = libstore + + Store_CXXFLAGS = \ + -I$(shell $(perl) -e 'use Config; print $$Config{archlibexp};')/CORE \ + -D_FILE_OFFSET_BITS=64 + + Store_ALLOW_UNDEFINED = 1 + + Store_FORCE_INSTALL = 1 + + Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store + +endif + +clean-files += $(d)/lib/Nix/Config.pm $(d)/lib/Nix/Store.cc |