about summary refs log tree commit diff
path: root/scripts
diff options
context:
space:
mode:
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/download-using-manifests.pl.in376
-rw-r--r--scripts/local.mk5
-rwxr-xr-xscripts/nix-channel.in25
-rwxr-xr-xscripts/nix-generate-patches.in51
-rwxr-xr-xscripts/nix-install-package.in22
-rwxr-xr-xscripts/nix-pull.in102
6 files changed, 7 insertions, 574 deletions
diff --git a/scripts/download-using-manifests.pl.in b/scripts/download-using-manifests.pl.in
deleted file mode 100755
index ffc49f8fffde..000000000000
--- a/scripts/download-using-manifests.pl.in
+++ /dev/null
@@ -1,376 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use strict;
-use Nix::Config;
-use Nix::Manifest;
-use Nix::Store;
-use Nix::Utils;
-use POSIX qw(strftime);
-
-STDOUT->autoflush(1);
-binmode STDERR, ":encoding(utf8)";
-
-my $logFile = "$Nix::Config::logDir/downloads";
-
-# For queries, skip expensive calls to nix-hash etc.  We're just
-# estimating the expected download size.
-my $fast = 1;
-
-my $curl = "$Nix::Config::curl --fail --location";
-
-
-# Open the manifest cache and update it if necessary.
-my $dbh = updateManifestDB();
-exit 0 unless defined $dbh; # exit if there are no manifests
-print "\n";
-
-
-# $hashCache->{$algo}->{$path} yields the $algo-hash of $path.
-my $hashCache;
-
-
-sub parseHash {
-    my $hash = shift;
-    if ($hash =~ /^(.+):(.+)$/) {
-        return ($1, $2);
-    } else {
-        return ("md5", $hash);
-    }
-}
-
-
-# Compute the most efficient sequence of downloads to produce the
-# given path.
-sub computeSmallestDownload {
-    my $targetPath = shift;
-
-    # Build a graph of all store paths that might contribute to the
-    # construction of $targetPath, and the special node "start".  The
-    # edges are either patch operations, or downloads of full NAR
-    # files.  The latter edges only occur between "start" and a store
-    # path.
-    my %graph;
-
-    $graph{"start"} = {d => 0, pred => undef, edges => []};
-
-    my @queue = ();
-    my $queueFront = 0;
-    my %done;
-
-    sub addNode {
-        my $graph = shift;
-        my $u = shift;
-        $$graph{$u} = {d => 999999999999, pred => undef, edges => []}
-            unless defined $$graph{$u};
-    }
-
-    sub addEdge {
-        my $graph = shift;
-        my $u = shift;
-        my $v = shift;
-        my $w = shift;
-        my $type = shift;
-        my $info = shift;
-        addNode $graph, $u;
-        push @{$$graph{$u}->{edges}},
-            {weight => $w, start => $u, end => $v, type => $type, info => $info};
-        my $n = scalar @{$$graph{$u}->{edges}};
-    }
-
-    push @queue, $targetPath;
-
-    while ($queueFront < scalar @queue) {
-        my $u = $queue[$queueFront++];
-        next if defined $done{$u};
-        $done{$u} = 1;
-
-        addNode \%graph, $u;
-
-        # If the path already exists, it has distance 0 from the
-        # "start" node.
-        if (isValidPath($u)) {
-            addEdge \%graph, "start", $u, 0, "present", undef;
-        }
-
-        else {
-
-            # Add patch edges.
-            my $patchList = $dbh->selectall_arrayref(
-                "select * from Patches where storePath = ?",
-                { Slice => {} }, $u);
-
-            foreach my $patch (@{$patchList}) {
-                if (isValidPath($patch->{basePath})) {
-                    my ($baseHashAlgo, $baseHash) = parseHash $patch->{baseHash};
-
-                    my $hash = $hashCache->{$baseHashAlgo}->{$patch->{basePath}};
-                    if (!defined $hash) {
-                        $hash = $fast && $baseHashAlgo eq "sha256"
-                            ? queryPathHash($patch->{basePath})
-                            : hashPath($baseHashAlgo, $baseHashAlgo ne "md5", $patch->{basePath});
-                        $hash =~ s/.*://;
-                        $hashCache->{$baseHashAlgo}->{$patch->{basePath}} = $hash;
-                    }
-
-                    next if $hash ne $baseHash;
-                }
-                push @queue, $patch->{basePath};
-                addEdge \%graph, $patch->{basePath}, $u, $patch->{size}, "patch", $patch;
-            }
-
-            # Add NAR file edges to the start node.
-            my $narFileList = $dbh->selectall_arrayref(
-                "select * from NARs where storePath = ?",
-                { Slice => {} }, $u);
-
-            foreach my $narFile (@{$narFileList}) {
-                # !!! how to handle files whose size is not known in advance?
-                # For now, assume some arbitrary size (1 GB).
-                # This has the side-effect of preferring non-Hydra downloads.
-                addEdge \%graph, "start", $u, ($narFile->{size} || 1000000000), "narfile", $narFile;
-            }
-        }
-    }
-
-
-    # Run Dijkstra's shortest path algorithm to determine the shortest
-    # sequence of download and/or patch actions that will produce
-    # $targetPath.
-
-    my @todo = keys %graph;
-
-    while (scalar @todo > 0) {
-
-        # Remove the closest element from the todo list.
-        # !!! inefficient, use a priority queue
-        @todo = sort { -($graph{$a}->{d} <=> $graph{$b}->{d}) } @todo;
-        my $u = pop @todo;
-
-        my $u_ = $graph{$u};
-
-        foreach my $edge (@{$u_->{edges}}) {
-            my $v_ = $graph{$edge->{end}};
-            if ($v_->{d} > $u_->{d} + $edge->{weight}) {
-                $v_->{d} = $u_->{d} + $edge->{weight};
-                # Store the edge; to edge->start is actually the
-                # predecessor.
-                $v_->{pred} = $edge;
-            }
-        }
-    }
-
-
-    # Retrieve the shortest path from "start" to $targetPath.
-    my @path = ();
-    my $cur = $targetPath;
-    return () unless defined $graph{$targetPath}->{pred};
-    while ($cur ne "start") {
-        push @path, $graph{$cur}->{pred};
-        $cur = $graph{$cur}->{pred}->{start};
-    }
-
-    return @path;
-}
-
-
-# Parse the arguments.
-
-if ($ARGV[0] eq "--query") {
-
-    while (<STDIN>) {
-        chomp;
-        my ($cmd, @args) = split " ", $_;
-
-        if ($cmd eq "have") {
-            foreach my $storePath (@args) {
-                print "$storePath\n" if scalar @{$dbh->selectcol_arrayref("select 1 from NARs where storePath = ?", {}, $storePath)} > 0;
-            }
-            print "\n";
-        }
-
-        elsif ($cmd eq "info") {
-            foreach my $storePath (@args) {
-
-                my $infos = $dbh->selectall_arrayref(
-                    "select * from NARs where storePath = ?",
-                    { Slice => {} }, $storePath);
-
-                next unless scalar @{$infos} > 0;
-                my $info = @{$infos}[0];
-
-                print "$storePath\n";
-                print "$info->{deriver}\n";
-                my @references = split " ", $info->{refs};
-                print scalar @references, "\n";
-                print "$_\n" foreach @references;
-
-                my @path = computeSmallestDownload $storePath;
-
-                my $downloadSize = 0;
-                while (scalar @path > 0) {
-                    my $edge = pop @path;
-                    my $u = $edge->{start};
-                    my $v = $edge->{end};
-                    if ($edge->{type} eq "patch") {
-                        $downloadSize += $edge->{info}->{size} || 0;
-                    }
-                    elsif ($edge->{type} eq "narfile") {
-                        $downloadSize += $edge->{info}->{size} || 0;
-                    }
-                }
-
-                print "$downloadSize\n";
-
-                my $narSize = $info->{narSize} || 0;
-                print "$narSize\n";
-            }
-
-            print "\n";
-        }
-
-        else { die "unknown command ‘$cmd’"; }
-    }
-
-    exit 0;
-}
-
-elsif ($ARGV[0] ne "--substitute") {
-    die;
-}
-
-
-die unless scalar @ARGV == 3;
-my $targetPath = $ARGV[1];
-my $destPath = $ARGV[2];
-$fast = 0;
-
-
-# Create a temporary directory.
-my $tmpDir = mkTempDir("nix-download");
-
-my $tmpNar = "$tmpDir/nar";
-my $tmpNar2 = "$tmpDir/nar2";
-
-
-open LOGFILE, ">>$logFile" or die "cannot open log file $logFile";
-
-my $date = strftime ("%F %H:%M:%S UTC", gmtime (time));
-print LOGFILE "$$ get $targetPath $date\n";
-
-print STDERR "\n*** Trying to download/patch ‘$targetPath’\n";
-
-
-# Compute the shortest path.
-my @path = computeSmallestDownload $targetPath;
-die "don't know how to produce $targetPath\n" if scalar @path == 0;
-
-
-# We don't need the manifest anymore, so close it as an optimisation:
-# if we still have SQLite locks blocking other processes (we
-# shouldn't), this gets rid of them.
-$dbh->disconnect;
-
-
-# Traverse the shortest path, perform the actions described by the
-# edges.
-my $curStep = 1;
-my $maxStep = scalar @path;
-
-my $finalNarHash;
-
-while (scalar @path > 0) {
-    my $edge = pop @path;
-    my $u = $edge->{start};
-    my $v = $edge->{end};
-
-    print STDERR "\n*** Step $curStep/$maxStep: ";
-
-    if ($edge->{type} eq "present") {
-        print STDERR "using already present path ‘$v’\n";
-        print LOGFILE "$$ present $v\n";
-
-        if ($curStep < $maxStep) {
-            # Since this is not the last step, the path will be used
-            # as a base to one or more patches.  So turn the base path
-            # into a NAR archive, to which we can apply the patch.
-            print STDERR "  packing base path...\n";
-            system("$Nix::Config::binDir/nix-store --dump $v > $tmpNar") == 0
-                or die "cannot dump ‘$v’";
-        }
-    }
-
-    elsif ($edge->{type} eq "patch") {
-        my $patch = $edge->{info};
-        print STDERR "applying patch ‘$patch->{url}’ to ‘$u’ to create ‘$v’\n";
-
-        print LOGFILE "$$ patch $patch->{url} $patch->{size} $patch->{baseHash} $u $v\n";
-
-        # Download the patch.
-        print STDERR "  downloading patch...\n";
-        my $patchPath = "$tmpDir/patch";
-        checkURL $patch->{url};
-        system("$curl '$patch->{url}' -o $patchPath") == 0
-            or die "cannot download patch ‘$patch->{url}’\n";
-
-        # Apply the patch to the NAR archive produced in step 1 (for
-        # the already present path) or a later step (for patch sequences).
-        print STDERR "  applying patch...\n";
-        system("$Nix::Config::libexecDir/nix/bspatch $tmpNar $tmpNar2 $patchPath") == 0
-            or die "cannot apply patch ‘$patchPath’ to $tmpNar\n";
-
-        if ($curStep < $maxStep) {
-            # The archive will be used as the base of the next patch.
-            rename "$tmpNar2", "$tmpNar" or die "cannot rename NAR archive: $!";
-        } else {
-            # This was the last patch.  Unpack the final NAR archive
-            # into the target path.
-            print STDERR "  unpacking patched archive...\n";
-            system("$Nix::Config::binDir/nix-store --restore $destPath < $tmpNar2") == 0
-                or die "cannot unpack $tmpNar2 to ‘$v’\n";
-        }
-
-        $finalNarHash = $patch->{narHash};
-    }
-
-    elsif ($edge->{type} eq "narfile") {
-        my $narFile = $edge->{info};
-        print STDERR "downloading ‘$narFile->{url}’ to ‘$v’\n";
-
-        my $size = $narFile->{size} || -1;
-        print LOGFILE "$$ narfile $narFile->{url} $size $v\n";
-
-        checkURL $narFile->{url};
-
-        my $decompressor =
-            $narFile->{compressionType} eq "bzip2" ? "| $Nix::Config::bzip2 -d" :
-            $narFile->{compressionType} eq "xz" ? "| $Nix::Config::xz -d" :
-            $narFile->{compressionType} eq "none" ? "" :
-            die "unknown compression type ‘$narFile->{compressionType}’";
-
-        if ($curStep < $maxStep) {
-            # The archive will be used a base to a patch.
-            system("$curl '$narFile->{url}' $decompressor > $tmpNar") == 0
-                or die "cannot download and unpack ‘$narFile->{url}’ to ‘$v’\n";
-        } else {
-            # Unpack the archive to the target path.
-            system("$curl '$narFile->{url}' $decompressor | $Nix::Config::binDir/nix-store --restore '$destPath'") == 0
-                or die "cannot download and unpack ‘$narFile->{url}’ to ‘$v’\n";
-        }
-
-        $finalNarHash = $narFile->{narHash};
-    }
-
-    $curStep++;
-}
-
-
-# Tell Nix about the expected hash so it can verify it.
-die "cannot check integrity of the downloaded path since its hash is not known\n"
-    unless defined $finalNarHash;
-print "$finalNarHash\n";
-
-
-print STDERR "\n";
-print LOGFILE "$$ success\n";
-close LOGFILE;
diff --git a/scripts/local.mk b/scripts/local.mk
index cdac56bf13cb..f6542a4cba2f 100644
--- a/scripts/local.mk
+++ b/scripts/local.mk
@@ -2,17 +2,14 @@ nix_bin_scripts := \
   $(d)/nix-build \
   $(d)/nix-channel \
   $(d)/nix-copy-closure \
-  $(d)/nix-generate-patches \
   $(d)/nix-install-package \
-  $(d)/nix-pull \
   $(d)/nix-push
 
 bin-scripts += $(nix_bin_scripts)
 
 nix_substituters := \
   $(d)/copy-from-other-stores.pl \
-  $(d)/download-from-binary-cache.pl \
-  $(d)/download-using-manifests.pl
+  $(d)/download-from-binary-cache.pl
 
 nix_noinst_scripts := \
   $(d)/build-remote.pl \
diff --git a/scripts/nix-channel.in b/scripts/nix-channel.in
index 5191b5855ae0..65084ff1f34a 100755
--- a/scripts/nix-channel.in
+++ b/scripts/nix-channel.in
@@ -12,8 +12,6 @@ binmode STDERR, ":encoding(utf8)";
 
 Nix::Config::readConfig;
 
-my $manifestDir = $Nix::Config::manifestDir;
-
 
 # Turn on caching in nix-prefetch-url.
 my $channelCache = "$Nix::Config::stateDir/channel-cache";
@@ -75,7 +73,6 @@ sub removeChannel {
     my ($name) = @_;
     readChannels;
     my $url = $channels{$name};
-    deleteOldManifests($url . "/MANIFEST", undef) if defined $url;
     delete $channels{$name};
     writeChannels;
 
@@ -84,8 +81,7 @@ sub removeChannel {
 }
 
 
-# Fetch Nix expressions and pull manifests from the subscribed
-# channels.
+# Fetch Nix expressions and binary cache URLs from the subscribed channels.
 sub update {
     my @channelNames = @_;
 
@@ -97,7 +93,6 @@ sub update {
         next if scalar @channelNames > 0 && ! grep { $_ eq $name } @{channelNames};
 
         my $url = $channels{$name};
-        my $origUrl = "$url/MANIFEST";
 
         # We want to download the url to a file to see if it's a tarball while also checking if we
         # got redirected in the process, so that we can grab the various parts of a nix channel
@@ -132,22 +127,8 @@ sub update {
         if ($ret != 0) {
             # Check if the channel advertises a binary cache.
             my $binaryCacheURL = `$Nix::Config::curl --silent '$url'/binary-cache-url`;
-            my $getManifest = ($Nix::Config::config{"force-manifest"} // "false") eq "true";
-            if ($? == 0 && $binaryCacheURL ne "") {
-                $extraAttrs .= "binaryCacheURL = \"$binaryCacheURL\"; ";
-                deleteOldManifests($origUrl, undef);
-            } else {
-                $getManifest = 1;
-            }
-
-            if ($getManifest) {
-                # No binary cache, so pull the channel manifest.
-                mkdir $manifestDir, 0755 unless -e $manifestDir;
-                die "$0: you do not have write permission to ‘$manifestDir’!\n" unless -W $manifestDir;
-                $ENV{'NIX_ORIG_URL'} = $origUrl;
-                system("$Nix::Config::binDir/nix-pull", "--skip-wrong-store", "$url/MANIFEST") == 0
-                    or die "cannot pull manifest from ‘$url’\n";
-            }
+            $extraAttrs .= "binaryCacheURL = \"$binaryCacheURL\"; "
+                if $? == 0 && $binaryCacheURL ne "";
 
             # Download the channel tarball.
             my $fullURL = "$url/nixexprs.tar.xz";
diff --git a/scripts/nix-generate-patches.in b/scripts/nix-generate-patches.in
deleted file mode 100755
index 0a29c0548c1f..000000000000
--- a/scripts/nix-generate-patches.in
+++ /dev/null
@@ -1,51 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use strict;
-use Nix::Manifest;
-use Nix::GeneratePatches;
-use Nix::Utils;
-
-if (scalar @ARGV != 5) {
-    print STDERR <<EOF;
-Usage: nix-generate-patches NAR-DIR PATCH-DIR PATCH-URI OLD-MANIFEST NEW-MANIFEST
-
-This command generates binary patches between NAR files listed in
-OLD-MANIFEST and NEW-MANIFEST.  The patches are written to the
-directory PATCH-DIR, and the prefix PATCH-URI is used to generate URIs
-for the patches.  The patches are added to NEW-MANIFEST.  All NARs are
-required to exist in NAR-DIR.  Patches are generated between
-succeeding versions of packages with the same name.
-EOF
-    exit 1;
-}
-
-my $narPath = $ARGV[0];
-my $patchesPath = $ARGV[1];
-my $patchesURL = $ARGV[2];
-my $srcManifest = $ARGV[3];
-my $dstManifest = $ARGV[4];
-
-my (%srcNarFiles, %srcLocalPaths, %srcPatches);
-readManifest $srcManifest, \%srcNarFiles, \%srcPatches;
-
-my (%dstNarFiles, %dstLocalPaths, %dstPatches);
-readManifest $dstManifest, \%dstNarFiles, \%dstPatches;
-
-my $tmpDir = mkTempDir("nix-generate-patches");
-
-generatePatches \%srcNarFiles, \%dstNarFiles, \%srcPatches, \%dstPatches,
-    $narPath, $patchesPath, $patchesURL, $tmpDir;
-
-propagatePatches \%srcPatches, \%dstNarFiles, \%dstPatches;
-
-# Optionally add all new patches to the manifest in $NIX_ALL_PATCHES.
-my $allPatchesFile = $ENV{"NIX_ALL_PATCHES"};
-if (defined $allPatchesFile) {
-    my (%dummy, %allPatches);
-    readManifest("$patchesPath/all-patches", \%dummy, \%allPatches)
-        if -f $allPatchesFile;
-    copyPatches \%dstPatches, \%allPatches;
-    writeManifest($allPatchesFile, {}, \%allPatches, 0);
-}
-
-writeManifest $dstManifest, \%dstNarFiles, \%dstPatches;
diff --git a/scripts/nix-install-package.in b/scripts/nix-install-package.in
index b442c708b1a2..ba349774af54 100755
--- a/scripts/nix-install-package.in
+++ b/scripts/nix-install-package.in
@@ -89,7 +89,7 @@ my $pathRE = "(?: \/ [\/A-Za-z0-9\+\-\.\_\?\=]* )";
 # store path.  We'll let nix-env do that.
 
 $contents =~
-    / ^ \s* (\S+) \s+ ($Nix::Utils::urlRE) \s+ ($nameRE) \s+ ($systemRE) \s+ ($pathRE) \s+ ($pathRE) ( \s+ ($Nix::Utils::urlRE) )?  /x
+    / ^ \s* (\S+) \s+ (\S+) \s+ ($nameRE) \s+ ($systemRE) \s+ ($pathRE) \s+ ($pathRE) ( \s+ ($Nix::Utils::urlRE) )?  /x
     or barf "invalid package contents";
 my $version = $1;
 my $manifestURL = $2;
@@ -111,25 +111,9 @@ if ($interactive) {
 }
 
 
-if (defined $binaryCacheURL) {
+die "$0: package does not supply a binary cache\n" unless defined $binaryCacheURL;
 
-    push @extraNixEnvArgs, "--option", "extra-binary-caches", $binaryCacheURL;
-
-} else {
-
-    # Store the manifest in the temporary directory so that we don't
-    # pollute /nix/var/nix/manifests.  This also requires that we
-    # don't use the Nix daemon (because otherwise
-    # download-using-manifests won't see our NIX_MANIFESTS_DIRS
-    # environment variable).
-    $ENV{NIX_MANIFESTS_DIR} = $tmpDir;
-    $ENV{NIX_REMOTE} = "";
-
-    print "\nPulling manifests...\n";
-    system("$Nix::Config::binDir/nix-pull", $manifestURL) == 0
-        or barf "nix-pull failed: $?";
-
-}
+push @extraNixEnvArgs, "--option", "extra-binary-caches", $binaryCacheURL;
 
 
 print "\nInstalling package...\n";
diff --git a/scripts/nix-pull.in b/scripts/nix-pull.in
deleted file mode 100755
index 995b50935964..000000000000
--- a/scripts/nix-pull.in
+++ /dev/null
@@ -1,102 +0,0 @@
-#! @perl@ -w @perlFlags@
-
-use utf8;
-use strict;
-use Nix::Config;
-use Nix::Manifest;
-
-binmode STDERR, ":encoding(utf8)";
-
-my $manifestDir = $Nix::Config::manifestDir;
-
-
-# Prevent access problems in shared-stored installations.
-umask 0022;
-
-
-# Create the manifests directory if it doesn't exist.
-if (! -e $manifestDir) {
-    mkdir $manifestDir, 0755 or die "cannot create directory ‘$manifestDir’";
-}
-
-
-# Make sure that the manifests directory is scanned for GC roots.
-my $gcRootsDir = "$Nix::Config::stateDir/gcroots";
-my $manifestDirLink = "$gcRootsDir/manifests";
-if (! -l $manifestDirLink) {
-    symlink($manifestDir, $manifestDirLink) or die "cannot create symlink ‘$manifestDirLink’";
-}
-
-
-# Process the URLs specified on the command line.
-
-sub downloadFile {
-    my $url = shift;
-    $ENV{"PRINT_PATH"} = 1;
-    $ENV{"QUIET"} = 1;
-    my ($dummy, $path) = `$Nix::Config::binDir/nix-prefetch-url '$url'`;
-    die "cannot fetch ‘$url’" if $? != 0;
-    die "nix-prefetch-url did not return a path" unless defined $path;
-    chomp $path;
-    return $path;
-}
-
-sub processURL {
-    my $url = shift;
-
-    $url =~ s/\/$//;
-
-    my $manifest;
-
-    my $origUrl = $ENV{'NIX_ORIG_URL'} || $url;
-
-    # First see if a bzipped manifest is available.
-    if (system("$Nix::Config::curl --fail --silent --location --head '$url'.bz2 > /dev/null") == 0) {
-        print "fetching list of Nix archives at ‘$url.bz2’...\n";
-        $manifest = downloadFile "$url.bz2";
-    }
-
-    # Otherwise, just get the uncompressed manifest.
-    else {
-        print "fetching list of Nix archives at ‘$url’...\n";
-        $manifest = downloadFile $url;
-    }
-
-    my $baseName = "unnamed";
-    if ($url =~ /\/([^\/]+)\/[^\/]+$/) { # get the forelast component
-        $baseName = $1;
-    }
-
-    my $hash = `$Nix::Config::binDir/nix-hash --flat '$manifest'`
-        or die "cannot hash ‘$manifest’";
-    chomp $hash;
-
-    my $urlFile = "$manifestDir/$baseName-$hash.url";
-    open URL, ">$urlFile" or die "cannot create ‘$urlFile’";
-    print URL $origUrl;
-    close URL;
-
-    my $finalPath = "$manifestDir/$baseName-$hash.nixmanifest";
-
-    unlink $finalPath if -e $finalPath;
-
-    symlink("$manifest", "$finalPath")
-        or die "cannot link ‘$finalPath’ to ‘$manifest’";
-
-    deleteOldManifests($origUrl, $urlFile);
-}
-
-while (@ARGV) {
-    my $url = shift @ARGV;
-    if ($url eq "--help") {
-        exec "man nix-pull" or die;
-    } elsif ($url eq "--skip-wrong-store") {
-        # No-op, no longer supported.
-    } else {
-        processURL $url;
-    }
-}
-
-
-# Update the cache.
-updateManifestDB();