diff options
Diffstat (limited to 'perl/lib/Nix')
-rw-r--r-- | perl/lib/Nix/Config.pm.in | 2 | ||||
-rw-r--r-- | perl/lib/Nix/CopyClosure.pm | 12 | ||||
-rw-r--r-- | perl/lib/Nix/GeneratePatches.pm | 340 | ||||
-rw-r--r-- | perl/lib/Nix/Manifest.pm | 168 | ||||
-rw-r--r-- | perl/lib/Nix/Store.pm | 1 | ||||
-rw-r--r-- | perl/lib/Nix/Store.xs | 140 |
6 files changed, 74 insertions, 589 deletions
diff --git a/perl/lib/Nix/Config.pm.in b/perl/lib/Nix/Config.pm.in index b0dc71fab377..3575d99cb671 100644 --- a/perl/lib/Nix/Config.pm.in +++ b/perl/lib/Nix/Config.pm.in @@ -7,7 +7,6 @@ $version = "@PACKAGE_VERSION@"; $binDir = $ENV{"NIX_BIN_DIR"} || "@bindir@"; $libexecDir = $ENV{"NIX_LIBEXEC_DIR"} || "@libexecdir@"; $stateDir = $ENV{"NIX_STATE_DIR"} || "@localstatedir@/nix"; -$manifestDir = $ENV{"NIX_MANIFESTS_DIR"} || "@localstatedir@/nix/manifests"; $logDir = $ENV{"NIX_LOG_DIR"} || "@localstatedir@/log/nix"; $confDir = $ENV{"NIX_CONF_DIR"} || "@sysconfdir@/nix"; $storeDir = $ENV{"NIX_STORE_DIR"} || "@storedir@"; @@ -15,7 +14,6 @@ $storeDir = $ENV{"NIX_STORE_DIR"} || "@storedir@"; $bzip2 = "@bzip2@"; $xz = "@xz@"; $curl = "@curl@"; -$openssl = "@openssl@"; $useBindings = "@perlbindings@" eq "yes"; diff --git a/perl/lib/Nix/CopyClosure.pm b/perl/lib/Nix/CopyClosure.pm index 800feb3b413a..1adce07a4ded 100644 --- a/perl/lib/Nix/CopyClosure.pm +++ b/perl/lib/Nix/CopyClosure.pm @@ -10,7 +10,7 @@ use IPC::Open2; sub copyToOpen { - my ($from, $to, $sshHost, $storePaths, $includeOutputs, $dryRun, $sign, $useSubstitutes) = @_; + my ($from, $to, $sshHost, $storePaths, $includeOutputs, $dryRun, $useSubstitutes) = @_; $useSubstitutes = 0 if $dryRun || !defined $useSubstitutes; @@ -41,13 +41,13 @@ sub copyToOpen { # Send the "import paths" command. syswrite($to, pack("L<x4", 4)) or die; - exportPaths(fileno($to), $sign, @missing); + exportPaths(fileno($to), @missing); readInt($from) == 1 or die "remote machine ‘$sshHost’ failed to import closure\n"; } sub copyTo { - my ($sshHost, $storePaths, $includeOutputs, $dryRun, $sign, $useSubstitutes) = @_; + my ($sshHost, $storePaths, $includeOutputs, $dryRun, $useSubstitutes) = @_; # Connect to the remote host. my ($from, $to); @@ -61,7 +61,7 @@ sub copyTo { return oldCopyTo(@_); } - copyToOpen($from, $to, $sshHost, $storePaths, $includeOutputs, $dryRun, $sign, $useSubstitutes); + copyToOpen($from, $to, $sshHost, $storePaths, $includeOutputs, $dryRun, $useSubstitutes); close $to; } @@ -70,7 +70,7 @@ sub copyTo { # For backwards compatibility with Nix <= 1.7. Will be removed # eventually. sub oldCopyTo { - my ($sshHost, $storePaths, $includeOutputs, $dryRun, $sign, $useSubstitutes) = @_; + my ($sshHost, $storePaths, $includeOutputs, $dryRun, $useSubstitutes) = @_; # Get the closure of this path. my @closure = reverse(topoSortPaths(computeFSClosure(0, $includeOutputs, @@ -105,7 +105,7 @@ sub oldCopyTo { print STDERR "copying ", scalar @missing, " missing paths to ‘$sshHost’...\n"; unless ($dryRun) { open SSH, "| ssh $sshHost @globalSshOpts 'nix-store --import' > /dev/null" or die; - exportPaths(fileno(SSH), $sign, @missing); + exportPaths(fileno(SSH), @missing); close SSH or die "copying store paths to remote machine ‘$sshHost’ failed: $?"; } } diff --git a/perl/lib/Nix/GeneratePatches.pm b/perl/lib/Nix/GeneratePatches.pm deleted file mode 100644 index 612c8a3a15ba..000000000000 --- a/perl/lib/Nix/GeneratePatches.pm +++ /dev/null @@ -1,340 +0,0 @@ -package Nix::GeneratePatches; - -use strict; -use File::Temp qw(tempdir); -use File::stat; -use Nix::Config; -use Nix::Manifest; - -our @ISA = qw(Exporter); -our @EXPORT = qw(generatePatches propagatePatches copyPatches); - - -# Some patch generations options. - -# Max size of NAR archives to generate patches for. -my $maxNarSize = $ENV{"NIX_MAX_NAR_SIZE"}; -$maxNarSize = 160 * 1024 * 1024 if !defined $maxNarSize; - -# If patch is bigger than this fraction of full archive, reject. -my $maxPatchFraction = $ENV{"NIX_PATCH_FRACTION"}; -$maxPatchFraction = 0.60 if !defined $maxPatchFraction; - -my $timeLimit = $ENV{"NIX_BSDIFF_TIME_LIMIT"}; -$timeLimit = 180 if !defined $timeLimit; - -my $hashAlgo = "sha256"; - - -sub findOutputPaths { - my $narFiles = shift; - - my %outPaths; - - foreach my $p (keys %{$narFiles}) { - - # Ignore derivations. - next if ($p =~ /\.drv$/); - - # Ignore builders (too much ambiguity -- they're all called - # `builder.sh'). - next if ($p =~ /\.sh$/); - next if ($p =~ /\.patch$/); - - # Don't bother including tar files etc. - next if ($p =~ /\.tar$/ || $p =~ /\.tar\.(gz|bz2|Z|lzma|xz)$/ || $p =~ /\.zip$/ || $p =~ /\.bin$/ || $p =~ /\.tgz$/ || $p =~ /\.rpm$/ || $p =~ /cvs-export$/ || $p =~ /fetchhg$/); - - $outPaths{$p} = 1; - } - - return %outPaths; -} - - -sub getNameVersion { - my $p = shift; - $p =~ /\/[0-9a-z]+((?:-[a-zA-Z][^\/-]*)+)([^\/]*)$/; - my $name = $1; - my $version = $2; - return undef unless defined $name && defined $version; - $name =~ s/^-//; - $version =~ s/^-//; - return ($name, $version); -} - - -# A quick hack to get a measure of the `distance' between two -# versions: it's just the position of the first character that differs -# (or 999 if they are the same). -sub versionDiff { - my $s = shift; - my $t = shift; - my $i; - return 999 if $s eq $t; - for ($i = 0; $i < length $s; $i++) { - return $i if $i >= length $t or - substr($s, $i, 1) ne substr($t, $i, 1); - } - return $i; -} - - -sub getNarBz2 { - my $narPath = shift; - my $narFiles = shift; - my $storePath = shift; - - my $narFileList = $$narFiles{$storePath}; - die "missing path $storePath" unless defined $narFileList; - - my $narFile = @{$narFileList}[0]; - die unless defined $narFile; - - $narFile->{url} =~ /\/([^\/]+)$/; - die unless defined $1; - return "$narPath/$1"; -} - - -sub containsPatch { - my $patches = shift; - my $storePath = shift; - my $basePath = shift; - my $patchList = $$patches{$storePath}; - return 0 if !defined $patchList; - my $found = 0; - foreach my $patch (@{$patchList}) { - # !!! baseHash might differ - return 1 if $patch->{basePath} eq $basePath; - } - return 0; -} - - -sub generatePatches { - my ($srcNarFiles, $dstNarFiles, $srcPatches, $dstPatches, $narPath, $patchesPath, $patchesURL, $tmpDir) = @_; - - my %srcOutPaths = findOutputPaths $srcNarFiles; - my %dstOutPaths = findOutputPaths $dstNarFiles; - - # For each output path in the destination, see if we need to / can - # create a patch. - - print STDERR "creating patches...\n"; - - foreach my $p (keys %dstOutPaths) { - - # If exactly the same path already exists in the source, skip it. - next if defined $srcOutPaths{$p}; - - print " $p\n"; - - # If not, then we should find the paths in the source that are - # `most' likely to be present on a system that wants to - # install this path. - - (my $name, my $version) = getNameVersion $p; - next unless defined $name && defined $version; - - my @closest = (); - my $closestVersion; - my $minDist = -1; # actually, larger means closer - - # Find all source paths with the same name. - - foreach my $q (keys %srcOutPaths) { - (my $name2, my $version2) = getNameVersion $q; - next unless defined $name2 && defined $version2; - - if ($name eq $name2) { - - my $srcSystem = @{$$dstNarFiles{$p}}[0]->{system}; - my $dstSystem = @{$$srcNarFiles{$q}}[0]->{system}; - if (defined $srcSystem && defined $dstSystem && $srcSystem ne $dstSystem) { - print " SKIPPING $q due to different systems ($srcSystem vs. $dstSystem)\n"; - next; - } - - # If the sizes differ too much, then skip. This - # disambiguates between, e.g., a real component and a - # wrapper component (cf. Firefox in Nixpkgs). - my $srcSize = @{$$srcNarFiles{$q}}[0]->{size}; - my $dstSize = @{$$dstNarFiles{$p}}[0]->{size}; - my $ratio = $srcSize / $dstSize; - $ratio = 1 / $ratio if $ratio < 1; - # print " SIZE $srcSize $dstSize $ratio $q\n"; - - if ($ratio >= 3) { - print " SKIPPING $q due to size ratio $ratio ($srcSize vs. $dstSize)\n"; - next; - } - - # If there are multiple matching names, include the - # ones with the closest version numbers. - my $dist = versionDiff $version, $version2; - if ($dist > $minDist) { - $minDist = $dist; - @closest = ($q); - $closestVersion = $version2; - } elsif ($dist == $minDist) { - push @closest, $q; - } - } - } - - if (scalar(@closest) == 0) { - print " NO BASE: $p\n"; - next; - } - - foreach my $closest (@closest) { - - # Generate a patch between $closest and $p. - print STDERR " $p <- $closest\n"; - - # If the patch already exists, skip it. - if (containsPatch($srcPatches, $p, $closest) || - containsPatch($dstPatches, $p, $closest)) - { - print " skipping, already exists\n"; - next; - } - - my $srcNarBz2 = getNarBz2 $narPath, $srcNarFiles, $closest; - my $dstNarBz2 = getNarBz2 $narPath, $dstNarFiles, $p; - - if (! -f $srcNarBz2) { - warn "patch source archive $srcNarBz2 is missing\n"; - next; - } - - system("$Nix::Config::bzip2 -d < $srcNarBz2 > $tmpDir/A") == 0 - or die "cannot unpack $srcNarBz2"; - - if (stat("$tmpDir/A")->size >= $maxNarSize) { - print " skipping, source is too large\n"; - next; - } - - system("$Nix::Config::bzip2 -d < $dstNarBz2 > $tmpDir/B") == 0 - or die "cannot unpack $dstNarBz2"; - - if (stat("$tmpDir/B")->size >= $maxNarSize) { - print " skipping, destination is too large\n"; - next; - } - - my $time1 = time(); - my $res = system("ulimit -t $timeLimit; $Nix::Config::libexecDir/nix/bsdiff $tmpDir/A $tmpDir/B $tmpDir/DIFF"); - my $time2 = time(); - if ($res) { - warn "binary diff computation aborted after ", $time2 - $time1, " seconds\n"; - next; - } - - my $baseHash = `$Nix::Config::binDir/nix-hash --flat --type $hashAlgo --base32 $tmpDir/A` or die; - chomp $baseHash; - - my $narHash = `$Nix::Config::binDir/nix-hash --flat --type $hashAlgo --base32 $tmpDir/B` or die; - chomp $narHash; - - my $narDiffHash = `$Nix::Config::binDir/nix-hash --flat --type $hashAlgo --base32 $tmpDir/DIFF` or die; - chomp $narDiffHash; - - my $narDiffSize = stat("$tmpDir/DIFF")->size; - my $dstNarBz2Size = stat($dstNarBz2)->size; - - print " size $narDiffSize; full size $dstNarBz2Size; ", $time2 - $time1, " seconds\n"; - - if ($narDiffSize >= $dstNarBz2Size) { - print " rejecting; patch bigger than full archive\n"; - next; - } - - if ($narDiffSize / $dstNarBz2Size >= $maxPatchFraction) { - print " rejecting; patch too large relative to full archive\n"; - next; - } - - my $finalName = "$narDiffHash.nar-bsdiff"; - - if (-e "$patchesPath/$finalName") { - print " not copying, already exists\n"; - } - - else { - system("cp '$tmpDir/DIFF' '$patchesPath/$finalName.tmp'") == 0 - or die "cannot copy diff"; - rename("$patchesPath/$finalName.tmp", "$patchesPath/$finalName") - or die "cannot rename $patchesPath/$finalName.tmp"; - } - - # Add the patch to the manifest. - addPatch $dstPatches, $p, - { url => "$patchesURL/$finalName", hash => "$hashAlgo:$narDiffHash" - , size => $narDiffSize, basePath => $closest, baseHash => "$hashAlgo:$baseHash" - , narHash => "$hashAlgo:$narHash", patchType => "nar-bsdiff" - }; - } - } -} - - -# Propagate useful patches from $srcPatches to $dstPatches. A patch -# is useful if it produces either paths in the $dstNarFiles or paths -# that can be used as the base for other useful patches. -sub propagatePatches { - my ($srcPatches, $dstNarFiles, $dstPatches) = @_; - - print STDERR "propagating patches...\n"; - - my $changed; - do { - # !!! we repeat this to reach the transitive closure; inefficient - $changed = 0; - - print STDERR "loop\n"; - - my %dstBasePaths; - foreach my $q (keys %{$dstPatches}) { - foreach my $patch (@{$$dstPatches{$q}}) { - $dstBasePaths{$patch->{basePath}} = 1; - } - } - - foreach my $p (keys %{$srcPatches}) { - my $patchList = $$srcPatches{$p}; - - my $include = 0; - - # Is path $p included in the destination? If so, include - # patches that produce it. - $include = 1 if defined $$dstNarFiles{$p}; - - # Is path $p a path that serves as a base for paths in the - # destination? If so, include patches that produce it. - # !!! check baseHash - $include = 1 if defined $dstBasePaths{$p}; - - if ($include) { - foreach my $patch (@{$patchList}) { - $changed = 1 if addPatch $dstPatches, $p, $patch; - } - } - - } - - } while $changed; -} - - -# Add all new patches in $srcPatches to $dstPatches. -sub copyPatches { - my ($srcPatches, $dstPatches) = @_; - foreach my $p (keys %{$srcPatches}) { - addPatch $dstPatches, $p, $_ foreach @{$$srcPatches{$p}}; - } -} - - -return 1; diff --git a/perl/lib/Nix/Manifest.pm b/perl/lib/Nix/Manifest.pm index 428decf09b54..0da376761201 100644 --- a/perl/lib/Nix/Manifest.pm +++ b/perl/lib/Nix/Manifest.pm @@ -13,7 +13,7 @@ use Nix::Config; use Nix::Store; our @ISA = qw(Exporter); -our @EXPORT = qw(readManifest writeManifest updateManifestDB addPatch deleteOldManifests parseNARInfo fingerprintPath); +our @EXPORT = qw(readManifest writeManifest addPatch parseNARInfo fingerprintPath); sub addNAR { @@ -228,172 +228,6 @@ sub writeManifest { } -sub updateManifestDB { - my $manifestDir = $Nix::Config::manifestDir; - - my @manifests = glob "$manifestDir/*.nixmanifest"; - return undef if scalar @manifests == 0; - - mkpath($manifestDir); - - unlink "$manifestDir/cache.sqlite"; # remove obsolete cache - my $dbPath = "$manifestDir/cache-v2.sqlite"; - - # Open/create the database. - our $dbh = DBI->connect("dbi:SQLite:dbname=$dbPath", "", "") - or die "cannot open database ‘$dbPath’"; - $dbh->{RaiseError} = 1; - $dbh->{PrintError} = 0; - - $dbh->do("pragma foreign_keys = on"); - $dbh->do("pragma synchronous = off"); # we can always reproduce the cache - $dbh->do("pragma journal_mode = truncate"); - - # Initialise the database schema, if necessary. - $dbh->do(<<EOF); - create table if not exists Manifests ( - id integer primary key autoincrement not null, - path text unique not null, - timestamp integer not null - ); -EOF - - $dbh->do(<<EOF); - create table if not exists NARs ( - id integer primary key autoincrement not null, - manifest integer not null, - storePath text not null, - url text not null, - compressionType text not null, - hash text, - size integer, - narHash text, - narSize integer, - refs text, - deriver text, - system text, - foreign key (manifest) references Manifests(id) on delete cascade - ); -EOF - - $dbh->do("create index if not exists NARs_storePath on NARs(storePath)"); - - $dbh->do(<<EOF); - create table if not exists Patches ( - id integer primary key autoincrement not null, - manifest integer not null, - storePath text not null, - basePath text not null, - baseHash text not null, - url text not null, - hash text, - size integer, - narHash text, - narSize integer, - patchType text not null, - foreign key (manifest) references Manifests(id) on delete cascade - ); -EOF - - $dbh->do("create index if not exists Patches_storePath on Patches(storePath)"); - - # Acquire an exclusive lock to ensure that only one process - # updates the DB at the same time. This isn't really necessary, - # but it prevents work duplication and lock contention in SQLite. - my $lockFile = "$manifestDir/cache.lock"; - open MAINLOCK, ">>$lockFile" or die "unable to acquire lock ‘$lockFile’: $!\n"; - flock(MAINLOCK, LOCK_EX) or die; - - our $insertNAR = $dbh->prepare( - "insert into NARs(manifest, storePath, url, compressionType, hash, size, narHash, " . - "narSize, refs, deriver, system) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)") or die; - - our $insertPatch = $dbh->prepare( - "insert into Patches(manifest, storePath, basePath, baseHash, url, hash, " . - "size, narHash, narSize, patchType) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"); - - $dbh->begin_work; - - # Read each manifest in $manifestDir and add it to the database, - # unless we've already done so on a previous run. - my %seen; - - for my $manifestLink (@manifests) { - my $manifest = Cwd::abs_path($manifestLink); - next unless -f $manifest; - my $timestamp = lstat($manifest)->mtime; - $seen{$manifest} = 1; - - next if scalar @{$dbh->selectcol_arrayref( - "select 1 from Manifests where path = ? and timestamp = ?", - {}, $manifest, $timestamp)} == 1; - - print STDERR "caching $manifest...\n"; - - $dbh->do("delete from Manifests where path = ?", {}, $manifest); - - $dbh->do("insert into Manifests(path, timestamp) values (?, ?)", - {}, $manifest, $timestamp); - - our $id = $dbh->last_insert_id("", "", "", ""); - - sub addNARToDB { - my ($storePath, $narFile) = @_; - $insertNAR->execute( - $id, $storePath, $narFile->{url}, $narFile->{compressionType}, $narFile->{hash}, - $narFile->{size}, $narFile->{narHash}, $narFile->{narSize}, $narFile->{references}, - $narFile->{deriver}, $narFile->{system}); - }; - - sub addPatchToDB { - my ($storePath, $patch) = @_; - $insertPatch->execute( - $id, $storePath, $patch->{basePath}, $patch->{baseHash}, $patch->{url}, - $patch->{hash}, $patch->{size}, $patch->{narHash}, $patch->{narSize}, - $patch->{patchType}); - }; - - my $version = readManifest_($manifest, \&addNARToDB, \&addPatchToDB); - - if ($version < 3) { - die "you have an old-style or corrupt manifest ‘$manifestLink’; please delete it\n"; - } - if ($version >= 10) { - die "manifest ‘$manifestLink’ is too new; please delete it or upgrade Nix\n"; - } - } - - # Removed cached information for removed manifests from the DB. - foreach my $manifest (@{$dbh->selectcol_arrayref("select path from Manifests")}) { - next if defined $seen{$manifest}; - $dbh->do("delete from Manifests where path = ?", {}, $manifest); - } - - $dbh->commit; - - close MAINLOCK; - - return $dbh; -} - - -# Delete all old manifests downloaded from a given URL. -sub deleteOldManifests { - my ($url, $curUrlFile) = @_; - for my $urlFile (glob "$Nix::Config::manifestDir/*.url") { - next if defined $curUrlFile && $urlFile eq $curUrlFile; - open URL, "<$urlFile" or die; - my $url2 = <URL>; - chomp $url2; - close URL; - next unless $url eq $url2; - my $base = $urlFile; $base =~ s/.url$//; - unlink "${base}.url"; - unlink "${base}.nixmanifest"; - } -} - - # Return a fingerprint of a store path to be used in binary cache # signatures. It contains the store path, the base-32 SHA-256 hash of # the contents of the path, and the references. diff --git a/perl/lib/Nix/Store.pm b/perl/lib/Nix/Store.pm index af3d2fa2e719..d226264d4df3 100644 --- a/perl/lib/Nix/Store.pm +++ b/perl/lib/Nix/Store.pm @@ -21,6 +21,7 @@ our @EXPORT = qw( signString checkSignature addToStore makeFixedOutputPath derivationFromPath + addTempRoot ); our $VERSION = '0.15'; diff --git a/perl/lib/Nix/Store.xs b/perl/lib/Nix/Store.xs index d3bfa19fd846..ee60ce13011d 100644 --- a/perl/lib/Nix/Store.xs +++ b/perl/lib/Nix/Store.xs @@ -6,10 +6,11 @@ #undef do_open #undef do_close -#include <store-api.hh> -#include <globals.hh> -#include <misc.hh> -#include <util.hh> +#include "derivations.hh" +#include "globals.hh" +#include "store-api.hh" +#include "util.hh" +#include "crypto.hh" #if HAVE_SODIUM #include <sodium.h> @@ -19,19 +20,22 @@ using namespace nix; -void doInit() +static ref<Store> store() { - if (!store) { + static std::shared_ptr<Store> _store; + if (!_store) { try { + logger = makeDefaultLogger(); settings.processEnvironment(); settings.loadConfFile(); settings.update(); settings.lockCPU = false; - store = openStore(); + _store = openStore(); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } } + return ref<Store>(_store); } @@ -45,7 +49,7 @@ PROTOTYPES: ENABLE void init() CODE: - doInit(); + store(); void setVerbosity(int level) @@ -56,10 +60,9 @@ void setVerbosity(int level) int isValidPath(char * path) CODE: try { - doInit(); - RETVAL = store->isValidPath(path); + RETVAL = store()->isValidPath(path); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } OUTPUT: RETVAL @@ -68,134 +71,124 @@ int isValidPath(char * path) SV * queryReferences(char * path) PPCODE: try { - doInit(); - PathSet paths; - store->queryReferences(path, paths); + PathSet paths = store()->queryPathInfo(path)->references; for (PathSet::iterator i = paths.begin(); i != paths.end(); ++i) XPUSHs(sv_2mortal(newSVpv(i->c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } SV * queryPathHash(char * path) PPCODE: try { - doInit(); - Hash hash = store->queryPathHash(path); + auto hash = store()->queryPathInfo(path)->narHash; string s = "sha256:" + printHash32(hash); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } SV * queryDeriver(char * path) PPCODE: try { - doInit(); - Path deriver = store->queryDeriver(path); + auto deriver = store()->queryPathInfo(path)->deriver; if (deriver == "") XSRETURN_UNDEF; XPUSHs(sv_2mortal(newSVpv(deriver.c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } SV * queryPathInfo(char * path, int base32) PPCODE: try { - doInit(); - ValidPathInfo info = store->queryPathInfo(path); - if (info.deriver == "") + auto info = store()->queryPathInfo(path); + if (info->deriver == "") XPUSHs(&PL_sv_undef); else - XPUSHs(sv_2mortal(newSVpv(info.deriver.c_str(), 0))); - string s = "sha256:" + (base32 ? printHash32(info.hash) : printHash(info.hash)); + XPUSHs(sv_2mortal(newSVpv(info->deriver.c_str(), 0))); + string s = "sha256:" + (base32 ? printHash32(info->narHash) : printHash(info->narHash)); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); - mXPUSHi(info.registrationTime); - mXPUSHi(info.narSize); + mXPUSHi(info->registrationTime); + mXPUSHi(info->narSize); AV * arr = newAV(); - for (PathSet::iterator i = info.references.begin(); i != info.references.end(); ++i) + for (PathSet::iterator i = info->references.begin(); i != info->references.end(); ++i) av_push(arr, newSVpv(i->c_str(), 0)); XPUSHs(sv_2mortal(newRV((SV *) arr))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } SV * queryPathFromHashPart(char * hashPart) PPCODE: try { - doInit(); - Path path = store->queryPathFromHashPart(hashPart); + Path path = store()->queryPathFromHashPart(hashPart); XPUSHs(sv_2mortal(newSVpv(path.c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } SV * computeFSClosure(int flipDirection, int includeOutputs, ...) PPCODE: try { - doInit(); PathSet paths; for (int n = 2; n < items; ++n) - computeFSClosure(*store, SvPV_nolen(ST(n)), paths, flipDirection, includeOutputs); + store()->computeFSClosure(SvPV_nolen(ST(n)), paths, flipDirection, includeOutputs); for (PathSet::iterator i = paths.begin(); i != paths.end(); ++i) XPUSHs(sv_2mortal(newSVpv(i->c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } SV * topoSortPaths(...) PPCODE: try { - doInit(); PathSet paths; for (int n = 0; n < items; ++n) paths.insert(SvPV_nolen(ST(n))); - Paths sorted = topoSortPaths(*store, paths); + Paths sorted = store()->topoSortPaths(paths); for (Paths::iterator i = sorted.begin(); i != sorted.end(); ++i) XPUSHs(sv_2mortal(newSVpv(i->c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } SV * followLinksToStorePath(char * path) CODE: try { - doInit(); + store(); RETVAL = newSVpv(followLinksToStorePath(path).c_str(), 0); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } OUTPUT: RETVAL -void exportPaths(int fd, int sign, ...) +void exportPaths(int fd, ...) PPCODE: try { - doInit(); Paths paths; - for (int n = 2; n < items; ++n) paths.push_back(SvPV_nolen(ST(n))); + for (int n = 1; n < items; ++n) paths.push_back(SvPV_nolen(ST(n))); FdSink sink(fd); - exportPaths(*store, paths, sign, sink); + store()->exportPaths(paths, sink); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } void importPaths(int fd) PPCODE: try { - doInit(); FdSource source(fd); - store->importPaths(false, source); + store()->importPaths(source, 0); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } @@ -206,7 +199,7 @@ SV * hashPath(char * algo, int base32, char * path) string s = base32 ? printHash32(h) : printHash(h); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } @@ -217,7 +210,7 @@ SV * hashFile(char * algo, int base32, char * path) string s = base32 ? printHash32(h) : printHash(h); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } @@ -228,7 +221,7 @@ SV * hashString(char * algo, int base32, char * s) string s = base32 ? printHash32(h) : printHash(h); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } @@ -239,28 +232,21 @@ SV * convertHash(char * algo, char * s, int toBase32) string s = toBase32 ? printHash32(h) : printHash(h); XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } -SV * signString(SV * secretKey_, char * msg) +SV * signString(char * secretKey_, char * msg) PPCODE: try { #if HAVE_SODIUM - STRLEN secretKeyLen; - unsigned char * secretKey = (unsigned char *) SvPV(secretKey_, secretKeyLen); - if (secretKeyLen != crypto_sign_SECRETKEYBYTES) - throw Error("secret key is not valid"); - - unsigned char sig[crypto_sign_BYTES]; - unsigned long long sigLen; - crypto_sign_detached(sig, &sigLen, (unsigned char *) msg, strlen(msg), secretKey); - XPUSHs(sv_2mortal(newSVpv((char *) sig, sigLen))); + auto sig = SecretKey(secretKey_).signDetached(msg); + XPUSHs(sv_2mortal(newSVpv(sig.c_str(), sig.size()))); #else throw Error("Nix was not compiled with libsodium, required for signed binary cache support"); #endif } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } @@ -283,7 +269,7 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg) throw Error("Nix was not compiled with libsodium, required for signed binary cache support"); #endif } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } OUTPUT: RETVAL @@ -292,24 +278,22 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg) SV * addToStore(char * srcPath, int recursive, char * algo) PPCODE: try { - doInit(); - Path path = store->addToStore(baseNameOf(srcPath), srcPath, recursive, parseHashType(algo)); + Path path = store()->addToStore(baseNameOf(srcPath), srcPath, recursive, parseHashType(algo)); XPUSHs(sv_2mortal(newSVpv(path.c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name) PPCODE: try { - doInit(); HashType ht = parseHashType(algo); Path path = makeFixedOutputPath(recursive, ht, parseHash16or32(ht, hash), name); XPUSHs(sv_2mortal(newSVpv(path.c_str(), 0))); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } @@ -318,8 +302,7 @@ SV * derivationFromPath(char * drvPath) HV *hash; CODE: try { - doInit(); - Derivation drv = derivationFromPath(*store, drvPath); + Derivation drv = store()->derivationFromPath(drvPath); hash = newHV(); HV * outputs = newHV(); @@ -352,7 +335,16 @@ SV * derivationFromPath(char * drvPath) RETVAL = newRV_noinc((SV *)hash); } catch (Error & e) { - croak(e.what()); + croak("%s", e.what()); } OUTPUT: RETVAL + + +void addTempRoot(char * storePath) + PPCODE: + try { + store()->addTempRoot(storePath); + } catch (Error & e) { + croak("%s", e.what()); + } |