about summary refs log tree commit diff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/build-remote/build-remote.cc19
-rw-r--r--src/build-remote/local.mk9
-rw-r--r--src/libexpr/eval.cc150
-rw-r--r--src/libexpr/primops.cc2
-rw-r--r--src/libstore/build.cc436
-rw-r--r--src/libstore/builtins/fetchurl.cc5
-rw-r--r--src/libstore/derivations.cc14
-rw-r--r--src/libstore/derivations.hh4
-rw-r--r--src/libstore/download.cc47
-rw-r--r--src/libstore/globals.cc15
-rw-r--r--src/libstore/globals.hh12
-rw-r--r--src/libstore/http-binary-cache-store.cc41
-rw-r--r--src/libstore/local-store.cc6
-rw-r--r--src/libstore/parsed-derivations.cc111
-rw-r--r--src/libstore/parsed-derivations.hh35
-rw-r--r--src/libstore/remote-store.cc171
-rw-r--r--src/libstore/remote-store.hh13
-rw-r--r--src/libstore/s3-binary-cache-store.cc28
-rw-r--r--src/libstore/store-api.cc4
-rw-r--r--src/libstore/store-api.hh1
-rw-r--r--src/libutil/archive.cc2
-rw-r--r--src/libutil/compression.cc2
-rw-r--r--src/libutil/pool.hh6
-rw-r--r--src/libutil/serialise.cc31
-rw-r--r--src/libutil/serialise.hh2
-rw-r--r--src/libutil/util.cc4
-rw-r--r--src/nix-build/local.mk9
-rwxr-xr-xsrc/nix-build/nix-build.cc24
-rw-r--r--src/nix-channel/local.mk7
-rwxr-xr-xsrc/nix-channel/nix-channel.cc18
-rw-r--r--src/nix-collect-garbage/local.mk7
-rw-r--r--src/nix-collect-garbage/nix-collect-garbage.cc15
-rw-r--r--src/nix-copy-closure/local.mk7
-rwxr-xr-xsrc/nix-copy-closure/nix-copy-closure.cc13
-rw-r--r--src/nix-daemon/local.mk13
-rw-r--r--src/nix-daemon/nix-daemon.cc20
-rw-r--r--src/nix-env/local.mk7
-rw-r--r--src/nix-env/nix-env.cc22
-rw-r--r--src/nix-instantiate/local.mk7
-rw-r--r--src/nix-instantiate/nix-instantiate.cc16
-rw-r--r--src/nix-prefetch-url/local.mk7
-rw-r--r--src/nix-prefetch-url/nix-prefetch-url.cc14
-rw-r--r--src/nix-store/graphml.cc90
-rw-r--r--src/nix-store/graphml.hh (renamed from src/nix-store/xmlgraph.hh)2
-rw-r--r--src/nix-store/local.mk9
-rw-r--r--src/nix-store/nix-store.cc23
-rw-r--r--src/nix-store/xmlgraph.cc66
-rw-r--r--src/nix/copy.cc8
-rw-r--r--src/nix/local.mk21
-rw-r--r--src/nix/main.cc6
-rw-r--r--src/nix/repl.cc5
-rw-r--r--src/nix/search.cc4
-rw-r--r--src/nix/verify.cc2
53 files changed, 997 insertions, 615 deletions
diff --git a/src/build-remote/build-remote.cc b/src/build-remote/build-remote.cc
index 38dbe3e58b26..abf3669b5b35 100644
--- a/src/build-remote/build-remote.cc
+++ b/src/build-remote/build-remote.cc
@@ -17,6 +17,7 @@
 #include "store-api.hh"
 #include "derivations.hh"
 #include "local-store.hh"
+#include "legacy.hh"
 
 using namespace nix;
 using std::cin;
@@ -37,11 +38,9 @@ static AutoCloseFD openSlotLock(const Machine & m, unsigned long long slot)
     return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true);
 }
 
-int main (int argc, char * * argv)
+static int _main(int argc, char * * argv)
 {
-    return handleExceptions(argv[0], [&]() {
-        initNix();
-
+    {
         logger = makeJSONLogger(*logger);
 
         /* Ensure we don't get any SSH passphrase or host key popups. */
@@ -80,7 +79,7 @@ int main (int argc, char * * argv)
 
         if (machines.empty()) {
             std::cerr << "# decline-permanently\n";
-            return;
+            return 0;
         }
 
         string drvPath;
@@ -90,8 +89,8 @@ int main (int argc, char * * argv)
 
             try {
                 auto s = readString(source);
-                if (s != "try") return;
-            } catch (EndOfFile &) { return; }
+                if (s != "try") return 0;
+            } catch (EndOfFile &) { return 0; }
 
             auto amWilling = readInt(source);
             auto neededSystem = readString(source);
@@ -253,6 +252,8 @@ connected:
             copyPaths(ref<Store>(sshStore), store, missing, NoRepair, NoCheckSigs, NoSubstitute);
         }
 
-        return;
-    });
+        return 0;
+    }
 }
+
+static RegisterLegacyCommand s1("build-remote", _main);
diff --git a/src/build-remote/local.mk b/src/build-remote/local.mk
deleted file mode 100644
index 50b0409d1886..000000000000
--- a/src/build-remote/local.mk
+++ /dev/null
@@ -1,9 +0,0 @@
-programs += build-remote
-
-build-remote_DIR := $(d)
-
-build-remote_INSTALL_DIR := $(libexecdir)/nix
-
-build-remote_LIBS = libmain libformat libstore libutil
-
-build-remote_SOURCES := $(d)/build-remote.cc
diff --git a/src/libexpr/eval.cc b/src/libexpr/eval.cc
index f41905787f9e..ab407e56907c 100644
--- a/src/libexpr/eval.cc
+++ b/src/libexpr/eval.cc
@@ -6,12 +6,15 @@
 #include "globals.hh"
 #include "eval-inline.hh"
 #include "download.hh"
+#include "json.hh"
 
 #include <algorithm>
 #include <cstring>
 #include <unistd.h>
 #include <sys/time.h>
 #include <sys/resource.h>
+#include <iostream>
+#include <fstream>
 
 #include <sys/time.h>
 #include <sys/resource.h>
@@ -23,7 +26,6 @@
 
 #endif
 
-
 namespace nix {
 
 
@@ -1723,12 +1725,9 @@ bool EvalState::eqValues(Value & v1, Value & v2)
     }
 }
 
-
 void EvalState::printStats()
 {
     bool showStats = getEnv("NIX_SHOW_STATS", "0") != "0";
-    Verbosity v = showStats ? lvlInfo : lvlDebug;
-    printMsg(v, "evaluation statistics:");
 
     struct rusage buf;
     getrusage(RUSAGE_SELF, &buf);
@@ -1739,62 +1738,101 @@ void EvalState::printStats()
     uint64_t bValues = nrValues * sizeof(Value);
     uint64_t bAttrsets = nrAttrsets * sizeof(Bindings) + nrAttrsInAttrsets * sizeof(Attr);
 
-    printMsg(v, format("  time elapsed: %1%") % cpuTime);
-    printMsg(v, format("  size of a value: %1%") % sizeof(Value));
-    printMsg(v, format("  size of an attr: %1%") % sizeof(Attr));
-    printMsg(v, format("  environments allocated count: %1%") % nrEnvs);
-    printMsg(v, format("  environments allocated bytes: %1%") % bEnvs);
-    printMsg(v, format("  list elements count: %1%") % nrListElems);
-    printMsg(v, format("  list elements bytes: %1%") % bLists);
-    printMsg(v, format("  list concatenations: %1%") % nrListConcats);
-    printMsg(v, format("  values allocated count: %1%") % nrValues);
-    printMsg(v, format("  values allocated bytes: %1%") % bValues);
-    printMsg(v, format("  sets allocated: %1% (%2% bytes)") % nrAttrsets % bAttrsets);
-    printMsg(v, format("  right-biased unions: %1%") % nrOpUpdates);
-    printMsg(v, format("  values copied in right-biased unions: %1%") % nrOpUpdateValuesCopied);
-    printMsg(v, format("  symbols in symbol table: %1%") % symbols.size());
-    printMsg(v, format("  size of symbol table: %1%") % symbols.totalSize());
-    printMsg(v, format("  number of thunks: %1%") % nrThunks);
-    printMsg(v, format("  number of thunks avoided: %1%") % nrAvoided);
-    printMsg(v, format("  number of attr lookups: %1%") % nrLookups);
-    printMsg(v, format("  number of primop calls: %1%") % nrPrimOpCalls);
-    printMsg(v, format("  number of function calls: %1%") % nrFunctionCalls);
-    printMsg(v, format("  total allocations: %1% bytes") % (bEnvs + bLists + bValues + bAttrsets));
-
 #if HAVE_BOEHMGC
     GC_word heapSize, totalBytes;
     GC_get_heap_usage_safe(&heapSize, 0, 0, 0, &totalBytes);
-    printMsg(v, format("  current Boehm heap size: %1% bytes") % heapSize);
-    printMsg(v, format("  total Boehm heap allocations: %1% bytes") % totalBytes);
 #endif
-
-    if (countCalls) {
-        v = lvlInfo;
-
-        printMsg(v, format("calls to %1% primops:") % primOpCalls.size());
-        typedef std::multimap<size_t, Symbol> PrimOpCalls_;
-        PrimOpCalls_ primOpCalls_;
-        for (auto & i : primOpCalls)
-            primOpCalls_.insert(std::pair<size_t, Symbol>(i.second, i.first));
-        for (auto i = primOpCalls_.rbegin(); i != primOpCalls_.rend(); ++i)
-            printMsg(v, format("%1$10d %2%") % i->first % i->second);
-
-        printMsg(v, format("calls to %1% functions:") % functionCalls.size());
-        typedef std::multimap<size_t, ExprLambda *> FunctionCalls_;
-        FunctionCalls_ functionCalls_;
-        for (auto & i : functionCalls)
-            functionCalls_.insert(std::pair<size_t, ExprLambda *>(i.second, i.first));
-        for (auto i = functionCalls_.rbegin(); i != functionCalls_.rend(); ++i)
-            printMsg(v, format("%1$10d %2%") % i->first % i->second->showNamePos());
-
-        printMsg(v, format("evaluations of %1% attributes:") % attrSelects.size());
-        typedef std::multimap<size_t, Pos> AttrSelects_;
-        AttrSelects_ attrSelects_;
-        for (auto & i : attrSelects)
-            attrSelects_.insert(std::pair<size_t, Pos>(i.second, i.first));
-        for (auto i = attrSelects_.rbegin(); i != attrSelects_.rend(); ++i)
-            printMsg(v, format("%1$10d %2%") % i->first % i->second);
-
+    if (showStats) {
+        auto outPath = getEnv("NIX_SHOW_STATS_PATH","-");
+        std::fstream fs;
+        if (outPath != "-")
+            fs.open(outPath, std::fstream::out);
+        JSONObject topObj(outPath == "-" ? std::cerr : fs, true);
+        topObj.attr("cpuTime",cpuTime);
+        {
+            auto envs = topObj.object("envs");
+            envs.attr("number", nrEnvs);
+            envs.attr("elements", nrValuesInEnvs);
+            envs.attr("bytes", bEnvs);
+        }
+        {
+            auto lists = topObj.object("list");
+            lists.attr("elements", nrListElems);
+            lists.attr("bytes", bLists);
+            lists.attr("concats", nrListConcats);
+        }
+        {
+            auto values = topObj.object("values");
+            values.attr("number", nrValues);
+            values.attr("bytes", bValues);
+        }
+        {
+            auto syms = topObj.object("symbols");
+            syms.attr("number", symbols.size());
+            syms.attr("bytes", symbols.totalSize());
+        }
+        {
+            auto sets = topObj.object("sets");
+            sets.attr("number", nrAttrsets);
+            sets.attr("bytes", bAttrsets);
+            sets.attr("elements", nrAttrsInAttrsets);
+        }
+        {
+            auto sizes = topObj.object("sizes");
+            sizes.attr("Env", sizeof(Env));
+            sizes.attr("Value", sizeof(Value));
+            sizes.attr("Bindings", sizeof(Bindings));
+            sizes.attr("Attr", sizeof(Attr));
+        }
+        topObj.attr("nrOpUpdates", nrOpUpdates);
+        topObj.attr("nrOpUpdateValuesCopied", nrOpUpdateValuesCopied);
+        topObj.attr("nrThunks", nrThunks);
+        topObj.attr("nrAvoided", nrAvoided);
+        topObj.attr("nrLookups", nrLookups);
+        topObj.attr("nrPrimOpCalls", nrPrimOpCalls);
+        topObj.attr("nrFunctionCalls", nrFunctionCalls);
+#if HAVE_BOEHMGC
+        {
+            auto gc = topObj.object("gc");
+            gc.attr("heapSize", heapSize);
+            gc.attr("totalBytes", totalBytes);
+        }
+#endif
+        if (countCalls) {
+            {
+                auto obj = topObj.object("primops");
+                for (auto & i : primOpCalls)
+                    obj.attr(i.first, i.second);
+            }
+            {
+                auto list = topObj.list("functions");
+                for (auto & i : functionCalls) {
+                    auto obj = list.object();
+                    if (i.first->name.set())
+                        obj.attr("name", (const string &) i.first->name);
+                    else
+                        obj.attr("name", nullptr);
+                    if (i.first->pos) {
+                        obj.attr("file", (const string &) i.first->pos.file);
+                        obj.attr("line", i.first->pos.line);
+                        obj.attr("column", i.first->pos.column);
+                    }
+                    obj.attr("count", i.second);
+                }
+            }
+            {
+                auto list = topObj.list("attributes");
+                for (auto & i : attrSelects) {
+                    auto obj = list.object();
+                    if (i.first) {
+                        obj.attr("file", (const string &) i.first.file);
+                        obj.attr("line", i.first.line);
+                        obj.attr("column", i.first.column);
+                    }
+                    obj.attr("count", i.second);
+                }
+            }
+        }
     }
 }
 
diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc
index 6f82c6c404f2..7372134e2c9b 100644
--- a/src/libexpr/primops.cc
+++ b/src/libexpr/primops.cc
@@ -866,7 +866,7 @@ static void prim_baseNameOf(EvalState & state, const Pos & pos, Value * * args,
 static void prim_dirOf(EvalState & state, const Pos & pos, Value * * args, Value & v)
 {
     PathSet context;
-    Path dir = dirOf(state.coerceToPath(pos, *args[0], context));
+    Path dir = dirOf(state.coerceToString(pos, *args[0], context, false, false));
     if (args[0]->type == tPath) mkPath(v, dir.c_str()); else mkString(v, dir, context);
 }
 
diff --git a/src/libstore/build.cc b/src/libstore/build.cc
index cd37f7a3fc08..cf4218a261fa 100644
--- a/src/libstore/build.cc
+++ b/src/libstore/build.cc
@@ -11,6 +11,7 @@
 #include "compression.hh"
 #include "json.hh"
 #include "nar-info.hh"
+#include "parsed-derivations.hh"
 
 #include <algorithm>
 #include <iostream>
@@ -20,6 +21,7 @@
 #include <future>
 #include <chrono>
 #include <regex>
+#include <queue>
 
 #include <limits.h>
 #include <sys/time.h>
@@ -740,6 +742,8 @@ private:
     /* The derivation stored at drvPath. */
     std::unique_ptr<BasicDerivation> drv;
 
+    std::unique_ptr<ParsedDerivation> parsedDrv;
+
     /* The remainder is state held during the build. */
 
     /* Locks on the output paths. */
@@ -854,7 +858,7 @@ private:
        building multiple times. Since this contains the hash, it
        allows us to compare whether two rounds produced the same
        result. */
-    ValidPathInfos prevInfos;
+    std::map<Path, ValidPathInfo> prevInfos;
 
     const uid_t sandboxUid = 1000;
     const gid_t sandboxGid = 100;
@@ -935,6 +939,11 @@ private:
        as valid. */
     void registerOutputs();
 
+    /* Check that an output meets the requirements specified by the
+       'outputChecks' attribute (or the legacy
+       '{allowed,disallowed}{References,Requisites}' attributes). */
+    void checkOutputs(const std::map<std::string, ValidPathInfo> & outputs);
+
     /* Open a log file and a pipe to it. */
     Path openLogFile();
 
@@ -1139,6 +1148,8 @@ void DerivationGoal::haveDerivation()
         return;
     }
 
+    parsedDrv = std::make_unique<ParsedDerivation>(drvPath, *drv);
+
     /* We are first going to try to create the invalid output paths
        through substitutes.  If that doesn't work, we'll build
        them. */
@@ -1395,7 +1406,7 @@ void DerivationGoal::tryToBuild()
     /* Don't do a remote build if the derivation has the attribute
        `preferLocalBuild' set.  Also, check and repair modes are only
        supported for local builds. */
-    bool buildLocally = buildMode != bmNormal || drv->willBuildLocally();
+    bool buildLocally = buildMode != bmNormal || parsedDrv->willBuildLocally();
 
     auto started = [&]() {
         auto msg = fmt(
@@ -1641,19 +1652,13 @@ HookReply DerivationGoal::tryBuildHook()
 
     try {
 
-        /* Tell the hook about system features (beyond the system type)
-           required from the build machine.  (The hook could parse the
-           drv file itself, but this is easier.) */
-        Strings features = tokenizeString<Strings>(get(drv->env, "requiredSystemFeatures"));
-        for (auto & i : features) checkStoreName(i); /* !!! abuse */
-
         /* Send the request to the hook. */
         worker.hook->sink
             << "try"
             << (worker.getNrLocalBuilds() < settings.maxBuildJobs ? 1 : 0)
             << drv->platform
             << drvPath
-            << features;
+            << parsedDrv->getRequiredSystemFeatures();
         worker.hook->sink.flush();
 
         /* Read the first line of input, which should be a word indicating
@@ -1793,23 +1798,26 @@ static void preloadNSS() {
 void DerivationGoal::startBuilder()
 {
     /* Right platform? */
-    if (!drv->canBuildLocally()) {
-        throw Error(
-            format("a '%1%' is required to build '%3%', but I am a '%2%'")
-            % drv->platform % settings.thisSystem % drvPath);
-    }
+    if (!parsedDrv->canBuildLocally())
+        throw Error("a '%s' with features {%s} is required to build '%s', but I am a '%s' with features {%s}",
+            drv->platform,
+            concatStringsSep(", ", parsedDrv->getRequiredSystemFeatures()),
+            drvPath,
+            settings.thisSystem,
+            concatStringsSep(", ", settings.systemFeatures));
 
     if (drv->isBuiltin())
         preloadNSS();
 
 #if __APPLE__
-    additionalSandboxProfile = get(drv->env, "__sandboxProfile");
+    additionalSandboxProfile = parsedDrv->getStringAttr("__sandboxProfile").value_or("");
 #endif
 
     /* Are we doing a chroot build? */
     {
+        auto noChroot = parsedDrv->getBoolAttr("__noChroot");
         if (settings.sandboxMode == smEnabled) {
-            if (get(drv->env, "__noChroot") == "1")
+            if (noChroot)
                 throw Error(format("derivation '%1%' has '__noChroot' set, "
                     "but that's not allowed when 'sandbox' is 'true'") % drvPath);
 #if __APPLE__
@@ -1822,7 +1830,7 @@ void DerivationGoal::startBuilder()
         else if (settings.sandboxMode == smDisabled)
             useChroot = false;
         else if (settings.sandboxMode == smRelaxed)
-            useChroot = !fixedOutput && get(drv->env, "__noChroot") != "1";
+            useChroot = !fixedOutput && !noChroot;
     }
 
     if (worker.store.storeDir != worker.store.realStoreDir) {
@@ -1873,7 +1881,7 @@ void DerivationGoal::startBuilder()
     writeStructuredAttrs();
 
     /* Handle exportReferencesGraph(), if set. */
-    if (!drv->env.count("__json")) {
+    if (!parsedDrv->getStructuredAttrs()) {
         /* The `exportReferencesGraph' feature allows the references graph
            to be passed to a builder.  This attribute should be a list of
            pairs [name1 path1 name2 path2 ...].  The references graph of
@@ -1938,7 +1946,7 @@ void DerivationGoal::startBuilder()
         PathSet allowedPaths = settings.allowedImpureHostPrefixes;
 
         /* This works like the above, except on a per-derivation level */
-        Strings impurePaths = tokenizeString<Strings>(get(drv->env, "__impureHostDeps"));
+        auto impurePaths = parsedDrv->getStringsAttr("__impureHostDeps").value_or(Strings());
 
         for (auto & i : impurePaths) {
             bool found = false;
@@ -2306,7 +2314,7 @@ void DerivationGoal::initEnv()
        passAsFile is ignored in structure mode because it's not
        needed (attributes are not passed through the environment, so
        there is no size constraint). */
-    if (!drv->env.count("__json")) {
+    if (!parsedDrv->getStructuredAttrs()) {
 
         StringSet passAsFile = tokenizeString<StringSet>(get(drv->env, "passAsFile"));
         int fileNr = 0;
@@ -2353,8 +2361,8 @@ void DerivationGoal::initEnv()
        fixed-output derivations is by definition pure (since we
        already know the cryptographic hash of the output). */
     if (fixedOutput) {
-        Strings varNames = tokenizeString<Strings>(get(drv->env, "impureEnvVars"));
-        for (auto & i : varNames) env[i] = getEnv(i);
+        for (auto & i : parsedDrv->getStringsAttr("impureEnvVars").value_or(Strings()))
+            env[i] = getEnv(i);
     }
 
     /* Currently structured log messages piggyback on stderr, but we
@@ -2369,111 +2377,103 @@ static std::regex shVarName("[A-Za-z_][A-Za-z0-9_]*");
 
 void DerivationGoal::writeStructuredAttrs()
 {
-    auto jsonAttr = drv->env.find("__json");
-    if (jsonAttr == drv->env.end()) return;
-
-    try {
+    auto & structuredAttrs = parsedDrv->getStructuredAttrs();
+    if (!structuredAttrs) return;
 
-        auto jsonStr = rewriteStrings(jsonAttr->second, inputRewrites);
+    auto json = *structuredAttrs;
 
-        auto json = nlohmann::json::parse(jsonStr);
-
-        /* Add an "outputs" object containing the output paths. */
-        nlohmann::json outputs;
-        for (auto & i : drv->outputs)
-            outputs[i.first] = rewriteStrings(i.second.path, inputRewrites);
-        json["outputs"] = outputs;
-
-        /* Handle exportReferencesGraph. */
-        auto e = json.find("exportReferencesGraph");
-        if (e != json.end() && e->is_object()) {
-            for (auto i = e->begin(); i != e->end(); ++i) {
-                std::ostringstream str;
-                {
-                    JSONPlaceholder jsonRoot(str, true);
-                    PathSet storePaths;
-                    for (auto & p : *i)
-                        storePaths.insert(p.get<std::string>());
-                    worker.store.pathInfoToJSON(jsonRoot,
-                        exportReferences(storePaths), false, true);
-                }
-                json[i.key()] = nlohmann::json::parse(str.str()); // urgh
+    /* Add an "outputs" object containing the output paths. */
+    nlohmann::json outputs;
+    for (auto & i : drv->outputs)
+        outputs[i.first] = rewriteStrings(i.second.path, inputRewrites);
+    json["outputs"] = outputs;
+
+    /* Handle exportReferencesGraph. */
+    auto e = json.find("exportReferencesGraph");
+    if (e != json.end() && e->is_object()) {
+        for (auto i = e->begin(); i != e->end(); ++i) {
+            std::ostringstream str;
+            {
+                JSONPlaceholder jsonRoot(str, true);
+                PathSet storePaths;
+                for (auto & p : *i)
+                    storePaths.insert(p.get<std::string>());
+                worker.store.pathInfoToJSON(jsonRoot,
+                    exportReferences(storePaths), false, true);
             }
+            json[i.key()] = nlohmann::json::parse(str.str()); // urgh
         }
+    }
 
-        writeFile(tmpDir + "/.attrs.json", json.dump());
-
-        /* As a convenience to bash scripts, write a shell file that
-           maps all attributes that are representable in bash -
-           namely, strings, integers, nulls, Booleans, and arrays and
-           objects consisting entirely of those values. (So nested
-           arrays or objects are not supported.) */
+    writeFile(tmpDir + "/.attrs.json", rewriteStrings(json.dump(), inputRewrites));
 
-        auto handleSimpleType = [](const nlohmann::json & value) -> std::experimental::optional<std::string> {
-            if (value.is_string())
-                return shellEscape(value);
+    /* As a convenience to bash scripts, write a shell file that
+       maps all attributes that are representable in bash -
+       namely, strings, integers, nulls, Booleans, and arrays and
+       objects consisting entirely of those values. (So nested
+       arrays or objects are not supported.) */
 
-            if (value.is_number()) {
-                auto f = value.get<float>();
-                if (std::ceil(f) == f)
-                    return std::to_string(value.get<int>());
-            }
+    auto handleSimpleType = [](const nlohmann::json & value) -> std::experimental::optional<std::string> {
+        if (value.is_string())
+            return shellEscape(value);
 
-            if (value.is_null())
-                return std::string("''");
+        if (value.is_number()) {
+            auto f = value.get<float>();
+            if (std::ceil(f) == f)
+                return std::to_string(value.get<int>());
+        }
 
-            if (value.is_boolean())
-                return value.get<bool>() ? std::string("1") : std::string("");
+        if (value.is_null())
+            return std::string("''");
 
-            return {};
-        };
+        if (value.is_boolean())
+            return value.get<bool>() ? std::string("1") : std::string("");
 
-        std::string jsonSh;
+        return {};
+    };
 
-        for (auto i = json.begin(); i != json.end(); ++i) {
+    std::string jsonSh;
 
-            if (!std::regex_match(i.key(), shVarName)) continue;
+    for (auto i = json.begin(); i != json.end(); ++i) {
 
-            auto & value = i.value();
+        if (!std::regex_match(i.key(), shVarName)) continue;
 
-            auto s = handleSimpleType(value);
-            if (s)
-                jsonSh += fmt("declare %s=%s\n", i.key(), *s);
+        auto & value = i.value();
 
-            else if (value.is_array()) {
-                std::string s2;
-                bool good = true;
+        auto s = handleSimpleType(value);
+        if (s)
+            jsonSh += fmt("declare %s=%s\n", i.key(), *s);
 
-                for (auto i = value.begin(); i != value.end(); ++i) {
-                    auto s3 = handleSimpleType(i.value());
-                    if (!s3) { good = false; break; }
-                    s2 += *s3; s2 += ' ';
-                }
+        else if (value.is_array()) {
+            std::string s2;
+            bool good = true;
 
-                if (good)
-                    jsonSh += fmt("declare -a %s=(%s)\n", i.key(), s2);
+            for (auto i = value.begin(); i != value.end(); ++i) {
+                auto s3 = handleSimpleType(i.value());
+                if (!s3) { good = false; break; }
+                s2 += *s3; s2 += ' ';
             }
 
-            else if (value.is_object()) {
-                std::string s2;
-                bool good = true;
+            if (good)
+                jsonSh += fmt("declare -a %s=(%s)\n", i.key(), s2);
+        }
 
-                for (auto i = value.begin(); i != value.end(); ++i) {
-                    auto s3 = handleSimpleType(i.value());
-                    if (!s3) { good = false; break; }
-                    s2 += fmt("[%s]=%s ", shellEscape(i.key()), *s3);
-                }
+        else if (value.is_object()) {
+            std::string s2;
+            bool good = true;
 
-                if (good)
-                    jsonSh += fmt("declare -A %s=(%s)\n", i.key(), s2);
+            for (auto i = value.begin(); i != value.end(); ++i) {
+                auto s3 = handleSimpleType(i.value());
+                if (!s3) { good = false; break; }
+                s2 += fmt("[%s]=%s ", shellEscape(i.key()), *s3);
             }
-        }
-
-        writeFile(tmpDir + "/.attrs.sh", jsonSh);
 
-    } catch (std::exception & e) {
-        throw Error("cannot process __json attribute of '%s': %s", drvPath, e.what());
+            if (good)
+                jsonSh += fmt("declare -A %s=(%s)\n", i.key(), s2);
+        }
     }
+
+    writeFile(tmpDir + "/.attrs.sh", rewriteStrings(jsonSh, inputRewrites));
 }
 
 
@@ -2628,7 +2628,7 @@ void DerivationGoal::runChild()
                 createDirs(chrootRootDir + "/dev/shm");
                 createDirs(chrootRootDir + "/dev/pts");
                 ss.push_back("/dev/full");
-                if (pathExists("/dev/kvm"))
+                if (settings.systemFeatures.get().count("kvm") && pathExists("/dev/kvm"))
                     ss.push_back("/dev/kvm");
                 ss.push_back("/dev/null");
                 ss.push_back("/dev/random");
@@ -2917,7 +2917,7 @@ void DerivationGoal::runChild()
 
             writeFile(sandboxFile, sandboxProfile);
 
-            bool allowLocalNetworking = get(drv->env, "__darwinAllowLocalNetworking") == "1";
+            bool allowLocalNetworking = parsedDrv->getBoolAttr("__darwinAllowLocalNetworking");
 
             /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms
                to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */
@@ -2989,10 +2989,9 @@ void DerivationGoal::runChild()
 /* Parse a list of reference specifiers.  Each element must either be
    a store path, or the symbolic name of the output of the derivation
    (such as `out'). */
-PathSet parseReferenceSpecifiers(Store & store, const BasicDerivation & drv, string attr)
+PathSet parseReferenceSpecifiers(Store & store, const BasicDerivation & drv, const Strings & paths)
 {
     PathSet result;
-    Paths paths = tokenizeString<Paths>(attr);
     for (auto & i : paths) {
         if (store.isStorePath(i))
             result.insert(i);
@@ -3017,7 +3016,7 @@ void DerivationGoal::registerOutputs()
         if (allValid) return;
     }
 
-    ValidPathInfos infos;
+    std::map<std::string, ValidPathInfo> infos;
 
     /* Set of inodes seen during calls to canonicalisePathMetaData()
        for this build's outputs.  This needs to be shared between
@@ -3121,7 +3120,7 @@ void DerivationGoal::registerOutputs()
                the derivation to its content-addressed location. */
             Hash h2 = recursive ? hashPath(h.type, actualPath).first : hashFile(h.type, actualPath);
 
-            Path dest = worker.store.makeFixedOutputPath(recursive, h2, drv->env["name"]);
+            Path dest = worker.store.makeFixedOutputPath(recursive, h2, storePathToName(path));
 
             if (h != h2) {
 
@@ -3202,48 +3201,6 @@ void DerivationGoal::registerOutputs()
                 debug(format("referenced input: '%1%'") % i);
         }
 
-        /* Enforce `allowedReferences' and friends. */
-        auto checkRefs = [&](const string & attrName, bool allowed, bool recursive) {
-            if (drv->env.find(attrName) == drv->env.end()) return;
-
-            PathSet spec = parseReferenceSpecifiers(worker.store, *drv, get(drv->env, attrName));
-
-            PathSet used;
-            if (recursive) {
-                /* Our requisites are the union of the closures of our references. */
-                for (auto & i : references)
-                    /* Don't call computeFSClosure on ourselves. */
-                    if (path != i)
-                        worker.store.computeFSClosure(i, used);
-            } else
-                used = references;
-
-            PathSet badPaths;
-
-            for (auto & i : used)
-                if (allowed) {
-                    if (spec.find(i) == spec.end())
-                        badPaths.insert(i);
-                } else {
-                    if (spec.find(i) != spec.end())
-                        badPaths.insert(i);
-                }
-
-            if (!badPaths.empty()) {
-                string badPathsStr;
-                for (auto & i : badPaths) {
-                    badPathsStr += "\n\t";
-                    badPathsStr += i;
-                }
-                throw BuildError(format("output '%1%' is not allowed to refer to the following paths:%2%") % actualPath % badPathsStr);
-            }
-        };
-
-        checkRefs("allowedReferences", true, false);
-        checkRefs("allowedRequisites", true, true);
-        checkRefs("disallowedReferences", false, false);
-        checkRefs("disallowedRequisites", false, true);
-
         if (curRound == nrRounds) {
             worker.store.optimisePath(actualPath); // FIXME: combine with scanForReferences()
             worker.markContentsGood(path);
@@ -3259,11 +3216,14 @@ void DerivationGoal::registerOutputs()
 
         if (!info.references.empty()) info.ca.clear();
 
-        infos.push_back(info);
+        infos[i.first] = info;
     }
 
     if (buildMode == bmCheck) return;
 
+    /* Apply output checks. */
+    checkOutputs(infos);
+
     /* Compare the result with the previous round, and report which
        path is different, if any.*/
     if (curRound > 1 && prevInfos != infos) {
@@ -3271,16 +3231,16 @@ void DerivationGoal::registerOutputs()
         for (auto i = prevInfos.begin(), j = infos.begin(); i != prevInfos.end(); ++i, ++j)
             if (!(*i == *j)) {
                 result.isNonDeterministic = true;
-                Path prev = i->path + checkSuffix;
+                Path prev = i->second.path + checkSuffix;
                 bool prevExists = keepPreviousRound && pathExists(prev);
                 auto msg = prevExists
-                    ? fmt("output '%1%' of '%2%' differs from '%3%' from previous round", i->path, drvPath, prev)
-                    : fmt("output '%1%' of '%2%' differs from previous round", i->path, drvPath);
+                    ? fmt("output '%1%' of '%2%' differs from '%3%' from previous round", i->second.path, drvPath, prev)
+                    : fmt("output '%1%' of '%2%' differs from previous round", i->second.path, drvPath);
 
                 auto diffHook = settings.diffHook;
                 if (prevExists && diffHook != "" && runDiffHook) {
                     try {
-                        auto diff = runProgram(diffHook, true, {prev, i->path});
+                        auto diff = runProgram(diffHook, true, {prev, i->second.path});
                         if (diff != "")
                             printError(chomp(diff));
                     } catch (Error & error) {
@@ -3325,7 +3285,11 @@ void DerivationGoal::registerOutputs()
     /* Register each output path as valid, and register the sets of
        paths referenced by each of them.  If there are cycles in the
        outputs, this will fail. */
-    worker.store.registerValidPaths(infos);
+    {
+        ValidPathInfos infos2;
+        for (auto & i : infos) infos2.push_back(i.second);
+        worker.store.registerValidPaths(infos2);
+    }
 
     /* In case of a fixed-output derivation hash mismatch, throw an
        exception now that we have registered the output as valid. */
@@ -3334,6 +3298,153 @@ void DerivationGoal::registerOutputs()
 }
 
 
+void DerivationGoal::checkOutputs(const std::map<Path, ValidPathInfo> & outputs)
+{
+    std::map<Path, const ValidPathInfo &> outputsByPath;
+    for (auto & output : outputs)
+        outputsByPath.emplace(output.second.path, output.second);
+
+    for (auto & output : outputs) {
+        auto & outputName = output.first;
+        auto & info = output.second;
+
+        struct Checks
+        {
+            std::experimental::optional<uint64_t> maxSize, maxClosureSize;
+            std::experimental::optional<Strings> allowedReferences, allowedRequisites, disallowedReferences, disallowedRequisites;
+        };
+
+        /* Compute the closure and closure size of some output. This
+           is slightly tricky because some of its references (namely
+           other outputs) may not be valid yet. */
+        auto getClosure = [&](const Path & path)
+        {
+            uint64_t closureSize = 0;
+            PathSet pathsDone;
+            std::queue<Path> pathsLeft;
+            pathsLeft.push(path);
+
+            while (!pathsLeft.empty()) {
+                auto path = pathsLeft.front();
+                pathsLeft.pop();
+                if (!pathsDone.insert(path).second) continue;
+
+                auto i = outputsByPath.find(path);
+                if (i != outputsByPath.end()) {
+                    closureSize += i->second.narSize;
+                    for (auto & ref : i->second.references)
+                        pathsLeft.push(ref);
+                } else {
+                    auto info = worker.store.queryPathInfo(path);
+                    closureSize += info->narSize;
+                    for (auto & ref : info->references)
+                        pathsLeft.push(ref);
+                }
+            }
+
+            return std::make_pair(pathsDone, closureSize);
+        };
+
+        auto checkRefs = [&](const std::experimental::optional<Strings> & value, bool allowed, bool recursive)
+        {
+            if (!value) return;
+
+            PathSet spec = parseReferenceSpecifiers(worker.store, *drv, *value);
+
+            PathSet used = recursive ? getClosure(info.path).first : info.references;
+
+            PathSet badPaths;
+
+            for (auto & i : used)
+                if (allowed) {
+                    if (spec.find(i) == spec.end())
+                        badPaths.insert(i);
+                } else {
+                    if (spec.find(i) != spec.end())
+                        badPaths.insert(i);
+                }
+
+            if (!badPaths.empty()) {
+                string badPathsStr;
+                for (auto & i : badPaths) {
+                    badPathsStr += "\n  ";
+                    badPathsStr += i;
+                }
+                throw BuildError("output '%s' is not allowed to refer to the following paths:%s", info.path, badPathsStr);
+            }
+        };
+
+        auto applyChecks = [&](const Checks & checks)
+        {
+            if (checks.maxSize && info.narSize > *checks.maxSize)
+                throw BuildError("path '%s' is too large at %d bytes; limit is %d bytes",
+                    info.path, info.narSize, *checks.maxSize);
+
+            if (checks.maxClosureSize) {
+                uint64_t closureSize = getClosure(info.path).second;
+                if (closureSize > *checks.maxClosureSize)
+                    throw BuildError("closure of path '%s' is too large at %d bytes; limit is %d bytes",
+                        info.path, closureSize, *checks.maxClosureSize);
+            }
+
+            checkRefs(checks.allowedReferences, true, false);
+            checkRefs(checks.allowedRequisites, true, true);
+            checkRefs(checks.disallowedReferences, false, false);
+            checkRefs(checks.disallowedRequisites, false, true);
+        };
+
+        if (auto structuredAttrs = parsedDrv->getStructuredAttrs()) {
+            auto outputChecks = structuredAttrs->find("outputChecks");
+            if (outputChecks != structuredAttrs->end()) {
+                auto output = outputChecks->find(outputName);
+
+                if (output != outputChecks->end()) {
+                    Checks checks;
+
+                    auto maxSize = output->find("maxSize");
+                    if (maxSize != output->end())
+                        checks.maxSize = maxSize->get<uint64_t>();
+
+                    auto maxClosureSize = output->find("maxClosureSize");
+                    if (maxClosureSize != output->end())
+                        checks.maxClosureSize = maxClosureSize->get<uint64_t>();
+
+                    auto get = [&](const std::string & name) -> std::experimental::optional<Strings> {
+                        auto i = output->find(name);
+                        if (i != output->end()) {
+                            Strings res;
+                            for (auto j = i->begin(); j != i->end(); ++j) {
+                                if (!j->is_string())
+                                    throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath);
+                                res.push_back(j->get<std::string>());
+                            }
+                            checks.disallowedRequisites = res;
+                            return res;
+                        }
+                        return {};
+                    };
+
+                    checks.allowedReferences = get("allowedReferences");
+                    checks.allowedRequisites = get("allowedRequisites");
+                    checks.disallowedReferences = get("disallowedReferences");
+                    checks.disallowedRequisites = get("disallowedRequisites");
+
+                    applyChecks(checks);
+                }
+            }
+        } else {
+            // legacy non-structured-attributes case
+            Checks checks;
+            checks.allowedReferences = parsedDrv->getStringsAttr("allowedReferences");
+            checks.allowedRequisites = parsedDrv->getStringsAttr("allowedRequisites");
+            checks.disallowedReferences = parsedDrv->getStringsAttr("disallowedReferences");
+            checks.disallowedRequisites = parsedDrv->getStringsAttr("disallowedRequisites");
+            applyChecks(checks);
+        }
+    }
+}
+
+
 Path DerivationGoal::openLogFile()
 {
     logSize = 0;
@@ -3682,6 +3793,19 @@ void SubstitutionGoal::tryNext()
     } catch (InvalidPath &) {
         tryNext();
         return;
+    } catch (SubstituterDisabled &) {
+        if (settings.tryFallback) {
+            tryNext();
+            return;
+        }
+        throw;
+    } catch (Error & e) {
+        if (settings.tryFallback) {
+            printError(e.what());
+            tryNext();
+            return;
+        }
+        throw;
     }
 
     /* Update the total expected download size. */
diff --git a/src/libstore/builtins/fetchurl.cc b/src/libstore/builtins/fetchurl.cc
index b4dcb35f951a..92aec63a0379 100644
--- a/src/libstore/builtins/fetchurl.cc
+++ b/src/libstore/builtins/fetchurl.cc
@@ -24,6 +24,7 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
 
     Path storePath = getAttr("out");
     auto mainUrl = getAttr("url");
+    bool unpack = get(drv.env, "unpack", "") == "1";
 
     /* Note: have to use a fresh downloader here because we're in
        a forked process. */
@@ -40,12 +41,12 @@ void builtinFetchurl(const BasicDerivation & drv, const std::string & netrcData)
             request.decompress = false;
 
             auto decompressor = makeDecompressionSink(
-                hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink);
+                unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink);
             downloader->download(std::move(request), *decompressor);
             decompressor->finish();
         });
 
-        if (get(drv.env, "unpack", "") == "1")
+        if (unpack)
             restorePath(storePath, *source);
         else
             writeFile(storePath, *source);
diff --git a/src/libstore/derivations.cc b/src/libstore/derivations.cc
index 1e187ec5e954..3961126fff9c 100644
--- a/src/libstore/derivations.cc
+++ b/src/libstore/derivations.cc
@@ -36,12 +36,6 @@ Path BasicDerivation::findOutput(const string & id) const
 }
 
 
-bool BasicDerivation::willBuildLocally() const
-{
-    return get(env, "preferLocalBuild") == "1" && canBuildLocally();
-}
-
-
 bool BasicDerivation::substitutesAllowed() const
 {
     return get(env, "allowSubstitutes", "1") == "1";
@@ -54,14 +48,6 @@ bool BasicDerivation::isBuiltin() const
 }
 
 
-bool BasicDerivation::canBuildLocally() const
-{
-    return platform == settings.thisSystem
-        || settings.extraPlatforms.get().count(platform) > 0
-        || isBuiltin();
-}
-
-
 Path writeDerivation(ref<Store> store,
     const Derivation & drv, const string & name, RepairFlag repair)
 {
diff --git a/src/libstore/derivations.hh b/src/libstore/derivations.hh
index 7b97730d3bf2..9753e796db5f 100644
--- a/src/libstore/derivations.hh
+++ b/src/libstore/derivations.hh
@@ -56,14 +56,10 @@ struct BasicDerivation
        the given derivation. */
     Path findOutput(const string & id) const;
 
-    bool willBuildLocally() const;
-
     bool substitutesAllowed() const;
 
     bool isBuiltin() const;
 
-    bool canBuildLocally() const;
-
     /* Return true iff this is a fixed-output derivation. */
     bool isFixedOutput() const;
 
diff --git a/src/libstore/download.cc b/src/libstore/download.cc
index 973fca0b130f..f44f1836b31e 100644
--- a/src/libstore/download.cc
+++ b/src/libstore/download.cc
@@ -345,7 +345,7 @@ struct CurlDownloader : public Downloader
                 done = true;
 
                 try {
-                    act.progress(result.data->size(), result.data->size());
+                    act.progress(result.bodySize, result.bodySize);
                     callback(std::move(result));
                 } catch (...) {
                     done = true;
@@ -710,11 +710,12 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
 
         /* If the buffer is full, then go to sleep until the calling
            thread wakes us up (i.e. when it has removed data from the
-           buffer). Note: this does stall the download thread. */
-        while (state->data.size() > 1024 * 1024) {
-            if (state->quit) return;
+           buffer). We don't wait forever to prevent stalling the
+           download thread. (Hopefully sleeping will throttle the
+           sender.) */
+        if (state->data.size() > 1024 * 1024) {
             debug("download buffer is full; going to sleep");
-            state.wait(state->request);
+            state.wait_for(state->request, std::chrono::seconds(10));
         }
 
         /* Append data to the buffer and wake up the calling
@@ -736,30 +737,36 @@ void Downloader::download(DownloadRequest && request, Sink & sink)
             state->request.notify_one();
         }});
 
-    auto state(_state->lock());
-
     while (true) {
         checkInterrupt();
 
-        /* If no data is available, then wait for the download thread
-           to wake us up. */
-        if (state->data.empty()) {
+        std::string chunk;
+
+        /* Grab data if available, otherwise wait for the download
+           thread to wake us up. */
+        {
+            auto state(_state->lock());
+
+            while (state->data.empty()) {
 
-            if (state->quit) {
-                if (state->exc) std::rethrow_exception(state->exc);
-                break;
+                if (state->quit) {
+                    if (state->exc) std::rethrow_exception(state->exc);
+                    return;
+                }
+
+                state.wait(state->avail);
             }
 
-            state.wait(state->avail);
-        }
+            chunk = std::move(state->data);
 
-        /* If data is available, then flush it to the sink and wake up
-           the download thread if it's blocked on a full buffer. */
-        if (!state->data.empty()) {
-            sink((unsigned char *) state->data.data(), state->data.size());
-            state->data.clear();
             state->request.notify_one();
         }
+
+        /* Flush the data to the sink and wake up the download thread
+           if it's blocked on a full buffer. We don't hold the state
+           lock while doing this to prevent blocking the download
+           thread if sink() takes a long time. */
+        sink((unsigned char *) chunk.data(), chunk.size());
     }
 }
 
diff --git a/src/libstore/globals.cc b/src/libstore/globals.cc
index d95db56726cb..a9c07b23a6f3 100644
--- a/src/libstore/globals.cc
+++ b/src/libstore/globals.cc
@@ -86,6 +86,21 @@ unsigned int Settings::getDefaultCores()
     return std::max(1U, std::thread::hardware_concurrency());
 }
 
+StringSet Settings::getDefaultSystemFeatures()
+{
+    /* For backwards compatibility, accept some "features" that are
+       used in Nixpkgs to route builds to certain machines but don't
+       actually require anything special on the machines. */
+    StringSet features{"nixos-test", "benchmark", "big-parallel"};
+
+    #if __linux__
+    if (access("/dev/kvm", R_OK | W_OK) == 0)
+        features.insert("kvm");
+    #endif
+
+    return features;
+}
+
 const string nixVersion = PACKAGE_VERSION;
 
 template<> void BaseSetting<SandboxMode>::set(const std::string & str)
diff --git a/src/libstore/globals.hh b/src/libstore/globals.hh
index f589078dbb98..6b3e204536f1 100644
--- a/src/libstore/globals.hh
+++ b/src/libstore/globals.hh
@@ -32,6 +32,8 @@ class Settings : public Config {
 
     unsigned int getDefaultCores();
 
+    StringSet getDefaultSystemFeatures();
+
 public:
 
     Settings();
@@ -80,9 +82,9 @@ public:
     /* Whether to show build log output in real time. */
     bool verboseBuild = true;
 
-    /* If verboseBuild is false, the number of lines of the tail of
-       the log to show if a build fails. */
-    size_t logLines = 10;
+    Setting<size_t> logLines{this, 10, "log-lines",
+        "If verbose-build is false, the number of lines of the tail of "
+        "the log to show if a build fails."};
 
     MaxBuildJobsSetting maxBuildJobs{this, 1, "max-jobs",
         "Maximum number of parallel build jobs. \"auto\" means use number of cores.",
@@ -261,6 +263,10 @@ public:
         "These may be supported natively (e.g. armv7 on some aarch64 CPUs "
         "or using hacks like qemu-user."};
 
+    Setting<StringSet> systemFeatures{this, getDefaultSystemFeatures(),
+        "system-features",
+        "Optional features that this system implements (like \"kvm\")."};
+
     Setting<Strings> substituters{this,
         nixStore == "/nix/store" ? Strings{"https://cache.nixos.org/"} : Strings(),
         "substituters",
diff --git a/src/libstore/http-binary-cache-store.cc b/src/libstore/http-binary-cache-store.cc
index ab524d523cf2..8da0e2f9d82a 100644
--- a/src/libstore/http-binary-cache-store.cc
+++ b/src/libstore/http-binary-cache-store.cc
@@ -13,6 +13,14 @@ private:
 
     Path cacheUri;
 
+    struct State
+    {
+        bool enabled = true;
+        std::chrono::steady_clock::time_point disabledUntil;
+    };
+
+    Sync<State> _state;
+
 public:
 
     HttpBinaryCacheStore(
@@ -46,8 +54,33 @@ public:
 
 protected:
 
+    void maybeDisable()
+    {
+        auto state(_state.lock());
+        if (state->enabled && settings.tryFallback) {
+            int t = 60;
+            printError("disabling binary cache '%s' for %s seconds", getUri(), t);
+            state->enabled = false;
+            state->disabledUntil = std::chrono::steady_clock::now() + std::chrono::seconds(t);
+        }
+    }
+
+    void checkEnabled()
+    {
+        auto state(_state.lock());
+        if (state->enabled) return;
+        if (std::chrono::steady_clock::now() > state->disabledUntil) {
+            state->enabled = true;
+            debug("re-enabling binary cache '%s'", getUri());
+            return;
+        }
+        throw SubstituterDisabled("substituter '%s' is disabled", getUri());
+    }
+
     bool fileExists(const std::string & path) override
     {
+        checkEnabled();
+
         try {
             DownloadRequest request(cacheUri + "/" + path);
             request.head = true;
@@ -59,6 +92,7 @@ protected:
                bucket is unlistable, so treat 403 as 404. */
             if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
                 return false;
+            maybeDisable();
             throw;
         }
     }
@@ -86,12 +120,14 @@ protected:
 
     void getFile(const std::string & path, Sink & sink) override
     {
+        checkEnabled();
         auto request(makeRequest(path));
         try {
             getDownloader()->download(std::move(request), sink);
         } catch (DownloadError & e) {
             if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
                 throw NoSuchBinaryCacheFile("file '%s' does not exist in binary cache '%s'", path, getUri());
+            maybeDisable();
             throw;
         }
     }
@@ -99,15 +135,18 @@ protected:
     void getFile(const std::string & path,
         Callback<std::shared_ptr<std::string>> callback) override
     {
+        checkEnabled();
+
         auto request(makeRequest(path));
 
         getDownloader()->enqueueDownload(request,
-            {[callback](std::future<DownloadResult> result) {
+            {[callback, this](std::future<DownloadResult> result) {
                 try {
                     callback(result.get().data);
                 } catch (DownloadError & e) {
                     if (e.error == Downloader::NotFound || e.error == Downloader::Forbidden)
                         return callback(std::shared_ptr<std::string>());
+                    maybeDisable();
                     callback.rethrow();
                 } catch (...) {
                     callback.rethrow();
diff --git a/src/libstore/local-store.cc b/src/libstore/local-store.cc
index c8117c0c6508..216f3417c4a8 100644
--- a/src/libstore/local-store.cc
+++ b/src/libstore/local-store.cc
@@ -880,6 +880,12 @@ void LocalStore::querySubstitutablePathInfos(const PathSet & paths,
                     narInfo ? narInfo->fileSize : 0,
                     info->narSize};
             } catch (InvalidPath) {
+            } catch (SubstituterDisabled) {
+            } catch (Error & e) {
+                if (settings.tryFallback)
+                    printError(e.what());
+                else
+                    throw;
             }
         }
     }
diff --git a/src/libstore/parsed-derivations.cc b/src/libstore/parsed-derivations.cc
new file mode 100644
index 000000000000..dc3286482736
--- /dev/null
+++ b/src/libstore/parsed-derivations.cc
@@ -0,0 +1,111 @@
+#include "parsed-derivations.hh"
+
+namespace nix {
+
+ParsedDerivation::ParsedDerivation(const Path & drvPath, BasicDerivation & drv)
+    : drvPath(drvPath), drv(drv)
+{
+    /* Parse the __json attribute, if any. */
+    auto jsonAttr = drv.env.find("__json");
+    if (jsonAttr != drv.env.end()) {
+        try {
+            structuredAttrs = nlohmann::json::parse(jsonAttr->second);
+        } catch (std::exception & e) {
+            throw Error("cannot process __json attribute of '%s': %s", drvPath, e.what());
+        }
+    }
+}
+
+std::experimental::optional<std::string> ParsedDerivation::getStringAttr(const std::string & name) const
+{
+    if (structuredAttrs) {
+        auto i = structuredAttrs->find(name);
+        if (i == structuredAttrs->end())
+            return {};
+        else {
+            if (!i->is_string())
+                throw Error("attribute '%s' of derivation '%s' must be a string", name, drvPath);
+            return i->get<std::string>();
+        }
+    } else {
+        auto i = drv.env.find(name);
+        if (i == drv.env.end())
+            return {};
+        else
+            return i->second;
+    }
+}
+
+bool ParsedDerivation::getBoolAttr(const std::string & name, bool def) const
+{
+    if (structuredAttrs) {
+        auto i = structuredAttrs->find(name);
+        if (i == structuredAttrs->end())
+            return def;
+        else {
+            if (!i->is_boolean())
+                throw Error("attribute '%s' of derivation '%s' must be a Boolean", name, drvPath);
+            return i->get<bool>();
+        }
+    } else {
+        auto i = drv.env.find(name);
+        if (i == drv.env.end())
+            return def;
+        else
+            return i->second == "1";
+    }
+}
+
+std::experimental::optional<Strings> ParsedDerivation::getStringsAttr(const std::string & name) const
+{
+    if (structuredAttrs) {
+        auto i = structuredAttrs->find(name);
+        if (i == structuredAttrs->end())
+            return {};
+        else {
+            if (!i->is_array())
+                throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath);
+            Strings res;
+            for (auto j = i->begin(); j != i->end(); ++j) {
+                if (!j->is_string())
+                    throw Error("attribute '%s' of derivation '%s' must be a list of strings", name, drvPath);
+                res.push_back(j->get<std::string>());
+            }
+            return res;
+        }
+    } else {
+        auto i = drv.env.find(name);
+        if (i == drv.env.end())
+            return {};
+        else
+            return tokenizeString<Strings>(i->second);
+    }
+}
+
+StringSet ParsedDerivation::getRequiredSystemFeatures() const
+{
+    StringSet res;
+    for (auto & i : getStringsAttr("requiredSystemFeatures").value_or(Strings()))
+        res.insert(i);
+    return res;
+}
+
+bool ParsedDerivation::canBuildLocally() const
+{
+    if (drv.platform != settings.thisSystem.get()
+        && !settings.extraPlatforms.get().count(drv.platform)
+        && !drv.isBuiltin())
+        return false;
+
+    for (auto & feature : getRequiredSystemFeatures())
+        if (!settings.systemFeatures.get().count(feature)) return false;
+
+    return true;
+}
+
+bool ParsedDerivation::willBuildLocally() const
+{
+    return getBoolAttr("preferLocalBuild") && canBuildLocally();
+}
+
+}
diff --git a/src/libstore/parsed-derivations.hh b/src/libstore/parsed-derivations.hh
new file mode 100644
index 000000000000..0a82c146172b
--- /dev/null
+++ b/src/libstore/parsed-derivations.hh
@@ -0,0 +1,35 @@
+#include "derivations.hh"
+
+#include <nlohmann/json.hpp>
+
+namespace nix {
+
+class ParsedDerivation
+{
+    Path drvPath;
+    BasicDerivation & drv;
+    std::experimental::optional<nlohmann::json> structuredAttrs;
+
+public:
+
+    ParsedDerivation(const Path & drvPath, BasicDerivation & drv);
+
+    const std::experimental::optional<nlohmann::json> & getStructuredAttrs() const
+    {
+        return structuredAttrs;
+    }
+
+    std::experimental::optional<std::string> getStringAttr(const std::string & name) const;
+
+    bool getBoolAttr(const std::string & name, bool def = false) const;
+
+    std::experimental::optional<Strings> getStringsAttr(const std::string & name) const;
+
+    StringSet getRequiredSystemFeatures() const;
+
+    bool canBuildLocally() const;
+
+    bool willBuildLocally() const;
+};
+
+}
diff --git a/src/libstore/remote-store.cc b/src/libstore/remote-store.cc
index eff5d252419f..def140cfbe18 100644
--- a/src/libstore/remote-store.cc
+++ b/src/libstore/remote-store.cc
@@ -161,7 +161,8 @@ void RemoteStore::initConnection(Connection & conn)
         if (GET_PROTOCOL_MINOR(conn.daemonVersion) >= 11)
             conn.to << false;
 
-        conn.processStderr();
+        auto ex = conn.processStderr();
+        if (ex) std::rethrow_exception(ex);
     }
     catch (Error & e) {
         throw Error("cannot open connection to remote store '%s': %s", getUri(), e.what());
@@ -195,22 +196,68 @@ void RemoteStore::setOptions(Connection & conn)
             conn.to << i.first << i.second.value;
     }
 
-    conn.processStderr();
+    auto ex = conn.processStderr();
+    if (ex) std::rethrow_exception(ex);
+}
+
+
+/* A wrapper around Pool<RemoteStore::Connection>::Handle that marks
+   the connection as bad (causing it to be closed) if a non-daemon
+   exception is thrown before the handle is closed. Such an exception
+   causes a deviation from the expected protocol and therefore a
+   desynchronization between the client and daemon. */
+struct ConnectionHandle
+{
+    Pool<RemoteStore::Connection>::Handle handle;
+    bool daemonException = false;
+
+    ConnectionHandle(Pool<RemoteStore::Connection>::Handle && handle)
+        : handle(std::move(handle))
+    { }
+
+    ConnectionHandle(ConnectionHandle && h)
+        : handle(std::move(h.handle))
+    { }
+
+    ~ConnectionHandle()
+    {
+        if (!daemonException && std::uncaught_exception()) {
+            handle.markBad();
+            debug("closing daemon connection because of an exception");
+        }
+    }
+
+    RemoteStore::Connection * operator -> () { return &*handle; }
+
+    void processStderr(Sink * sink = 0, Source * source = 0)
+    {
+        auto ex = handle->processStderr(sink, source);
+        if (ex) {
+            daemonException = true;
+            std::rethrow_exception(ex);
+        }
+    }
+};
+
+
+ConnectionHandle RemoteStore::getConnection()
+{
+    return ConnectionHandle(connections->get());
 }
 
 
 bool RemoteStore::isValidPathUncached(const Path & path)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopIsValidPath << path;
-    conn->processStderr();
+    conn.processStderr();
     return readInt(conn->from);
 }
 
 
 PathSet RemoteStore::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubstitute)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
         PathSet res;
         for (auto & i : paths)
@@ -218,7 +265,7 @@ PathSet RemoteStore::queryValidPaths(const PathSet & paths, SubstituteFlag maybe
         return res;
     } else {
         conn->to << wopQueryValidPaths << paths;
-        conn->processStderr();
+        conn.processStderr();
         return readStorePaths<PathSet>(*this, conn->from);
     }
 }
@@ -226,27 +273,27 @@ PathSet RemoteStore::queryValidPaths(const PathSet & paths, SubstituteFlag maybe
 
 PathSet RemoteStore::queryAllValidPaths()
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopQueryAllValidPaths;
-    conn->processStderr();
+    conn.processStderr();
     return readStorePaths<PathSet>(*this, conn->from);
 }
 
 
 PathSet RemoteStore::querySubstitutablePaths(const PathSet & paths)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
         PathSet res;
         for (auto & i : paths) {
             conn->to << wopHasSubstitutes << i;
-            conn->processStderr();
+            conn.processStderr();
             if (readInt(conn->from)) res.insert(i);
         }
         return res;
     } else {
         conn->to << wopQuerySubstitutablePaths << paths;
-        conn->processStderr();
+        conn.processStderr();
         return readStorePaths<PathSet>(*this, conn->from);
     }
 }
@@ -257,14 +304,14 @@ void RemoteStore::querySubstitutablePathInfos(const PathSet & paths,
 {
     if (paths.empty()) return;
 
-    auto conn(connections->get());
+    auto conn(getConnection());
 
     if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 12) {
 
         for (auto & i : paths) {
             SubstitutablePathInfo info;
             conn->to << wopQuerySubstitutablePathInfo << i;
-            conn->processStderr();
+            conn.processStderr();
             unsigned int reply = readInt(conn->from);
             if (reply == 0) continue;
             info.deriver = readString(conn->from);
@@ -278,7 +325,7 @@ void RemoteStore::querySubstitutablePathInfos(const PathSet & paths,
     } else {
 
         conn->to << wopQuerySubstitutablePathInfos << paths;
-        conn->processStderr();
+        conn.processStderr();
         size_t count = readNum<size_t>(conn->from);
         for (size_t n = 0; n < count; n++) {
             Path path = readStorePath(*this, conn->from);
@@ -300,10 +347,10 @@ void RemoteStore::queryPathInfoUncached(const Path & path,
     try {
         std::shared_ptr<ValidPathInfo> info;
         {
-            auto conn(connections->get());
+            auto conn(getConnection());
             conn->to << wopQueryPathInfo << path;
             try {
-                conn->processStderr();
+                conn.processStderr();
             } catch (Error & e) {
                 // Ugly backwards compatibility hack.
                 if (e.msg().find("is not valid") != std::string::npos)
@@ -335,9 +382,9 @@ void RemoteStore::queryPathInfoUncached(const Path & path,
 void RemoteStore::queryReferrers(const Path & path,
     PathSet & referrers)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopQueryReferrers << path;
-    conn->processStderr();
+    conn.processStderr();
     PathSet referrers2 = readStorePaths<PathSet>(*this, conn->from);
     referrers.insert(referrers2.begin(), referrers2.end());
 }
@@ -345,36 +392,36 @@ void RemoteStore::queryReferrers(const Path & path,
 
 PathSet RemoteStore::queryValidDerivers(const Path & path)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopQueryValidDerivers << path;
-    conn->processStderr();
+    conn.processStderr();
     return readStorePaths<PathSet>(*this, conn->from);
 }
 
 
 PathSet RemoteStore::queryDerivationOutputs(const Path & path)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopQueryDerivationOutputs << path;
-    conn->processStderr();
+    conn.processStderr();
     return readStorePaths<PathSet>(*this, conn->from);
 }
 
 
 PathSet RemoteStore::queryDerivationOutputNames(const Path & path)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopQueryDerivationOutputNames << path;
-    conn->processStderr();
+    conn.processStderr();
     return readStrings<PathSet>(conn->from);
 }
 
 
 Path RemoteStore::queryPathFromHashPart(const string & hashPart)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopQueryPathFromHashPart << hashPart;
-    conn->processStderr();
+    conn.processStderr();
     Path path = readString(conn->from);
     if (!path.empty()) assertStorePath(path);
     return path;
@@ -384,7 +431,7 @@ Path RemoteStore::queryPathFromHashPart(const string & hashPart)
 void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
     RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr<FSAccessor> accessor)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
 
     if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 18) {
         conn->to << wopImportPaths;
@@ -403,7 +450,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
                 ;
         });
 
-        conn->processStderr(0, source2.get());
+        conn.processStderr(0, source2.get());
 
         auto importedPaths = readStorePaths<PathSet>(*this, conn->from);
         assert(importedPaths.size() <= 1);
@@ -417,7 +464,7 @@ void RemoteStore::addToStore(const ValidPathInfo & info, Source & source,
                  << repair << !checkSigs;
         bool tunnel = GET_PROTOCOL_MINOR(conn->daemonVersion) >= 21;
         if (!tunnel) copyNAR(source, conn->to);
-        conn->processStderr(0, tunnel ? &source : nullptr);
+        conn.processStderr(0, tunnel ? &source : nullptr);
     }
 }
 
@@ -427,7 +474,7 @@ Path RemoteStore::addToStore(const string & name, const Path & _srcPath,
 {
     if (repair) throw Error("repairing is not supported when building through the Nix daemon");
 
-    auto conn(connections->get());
+    auto conn(getConnection());
 
     Path srcPath(absPath(_srcPath));
 
@@ -445,13 +492,13 @@ Path RemoteStore::addToStore(const string & name, const Path & _srcPath,
             dumpPath(srcPath, conn->to, filter);
         }
         conn->to.warn = false;
-        conn->processStderr();
+        conn.processStderr();
     } catch (SysError & e) {
         /* Daemon closed while we were sending the path. Probably OOM
            or I/O error. */
         if (e.errNo == EPIPE)
             try {
-                conn->processStderr();
+                conn.processStderr();
             } catch (EndOfFile & e) { }
         throw;
     }
@@ -465,17 +512,17 @@ Path RemoteStore::addTextToStore(const string & name, const string & s,
 {
     if (repair) throw Error("repairing is not supported when building through the Nix daemon");
 
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopAddTextToStore << name << s << references;
 
-    conn->processStderr();
+    conn.processStderr();
     return readStorePath(*this, conn->from);
 }
 
 
 void RemoteStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopBuildPaths;
     if (GET_PROTOCOL_MINOR(conn->daemonVersion) >= 13) {
         conn->to << drvPaths;
@@ -494,7 +541,7 @@ void RemoteStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode)
             drvPaths2.insert(string(i, 0, i.find('!')));
         conn->to << drvPaths2;
     }
-    conn->processStderr();
+    conn.processStderr();
     readInt(conn->from);
 }
 
@@ -502,9 +549,9 @@ void RemoteStore::buildPaths(const PathSet & drvPaths, BuildMode buildMode)
 BuildResult RemoteStore::buildDerivation(const Path & drvPath, const BasicDerivation & drv,
     BuildMode buildMode)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopBuildDerivation << drvPath << drv << buildMode;
-    conn->processStderr();
+    conn.processStderr();
     BuildResult res;
     unsigned int status;
     conn->from >> status >> res.errorMsg;
@@ -515,45 +562,45 @@ BuildResult RemoteStore::buildDerivation(const Path & drvPath, const BasicDeriva
 
 void RemoteStore::ensurePath(const Path & path)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopEnsurePath << path;
-    conn->processStderr();
+    conn.processStderr();
     readInt(conn->from);
 }
 
 
 void RemoteStore::addTempRoot(const Path & path)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopAddTempRoot << path;
-    conn->processStderr();
+    conn.processStderr();
     readInt(conn->from);
 }
 
 
 void RemoteStore::addIndirectRoot(const Path & path)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopAddIndirectRoot << path;
-    conn->processStderr();
+    conn.processStderr();
     readInt(conn->from);
 }
 
 
 void RemoteStore::syncWithGC()
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopSyncWithGC;
-    conn->processStderr();
+    conn.processStderr();
     readInt(conn->from);
 }
 
 
 Roots RemoteStore::findRoots()
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopFindRoots;
-    conn->processStderr();
+    conn.processStderr();
     size_t count = readNum<size_t>(conn->from);
     Roots result;
     while (count--) {
@@ -567,7 +614,7 @@ Roots RemoteStore::findRoots()
 
 void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
 
     conn->to
         << wopCollectGarbage << options.action << options.pathsToDelete << options.ignoreLiveness
@@ -575,7 +622,7 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results)
         /* removed options */
         << 0 << 0 << 0;
 
-    conn->processStderr();
+    conn.processStderr();
 
     results.paths = readStrings<PathSet>(conn->from);
     results.bytesFreed = readLongLong(conn->from);
@@ -590,27 +637,27 @@ void RemoteStore::collectGarbage(const GCOptions & options, GCResults & results)
 
 void RemoteStore::optimiseStore()
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopOptimiseStore;
-    conn->processStderr();
+    conn.processStderr();
     readInt(conn->from);
 }
 
 
 bool RemoteStore::verifyStore(bool checkContents, RepairFlag repair)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopVerifyStore << checkContents << repair;
-    conn->processStderr();
+    conn.processStderr();
     return readInt(conn->from);
 }
 
 
 void RemoteStore::addSignatures(const Path & storePath, const StringSet & sigs)
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
     conn->to << wopAddSignatures << storePath << sigs;
-    conn->processStderr();
+    conn.processStderr();
     readInt(conn->from);
 }
 
@@ -620,13 +667,13 @@ void RemoteStore::queryMissing(const PathSet & targets,
     unsigned long long & downloadSize, unsigned long long & narSize)
 {
     {
-        auto conn(connections->get());
+        auto conn(getConnection());
         if (GET_PROTOCOL_MINOR(conn->daemonVersion) < 19)
             // Don't hold the connection handle in the fallback case
             // to prevent a deadlock.
             goto fallback;
         conn->to << wopQueryMissing << targets;
-        conn->processStderr();
+        conn.processStderr();
         willBuild = readStorePaths<PathSet>(*this, conn->from);
         willSubstitute = readStorePaths<PathSet>(*this, conn->from);
         unknown = readStorePaths<PathSet>(*this, conn->from);
@@ -642,7 +689,7 @@ void RemoteStore::queryMissing(const PathSet & targets,
 
 void RemoteStore::connect()
 {
-    auto conn(connections->get());
+    auto conn(getConnection());
 }
 
 
@@ -686,7 +733,7 @@ static Logger::Fields readFields(Source & from)
 }
 
 
-void RemoteStore::Connection::processStderr(Sink * sink, Source * source)
+std::exception_ptr RemoteStore::Connection::processStderr(Sink * sink, Source * source)
 {
     to.flush();
 
@@ -711,7 +758,7 @@ void RemoteStore::Connection::processStderr(Sink * sink, Source * source)
         else if (msg == STDERR_ERROR) {
             string error = readString(from);
             unsigned int status = readInt(from);
-            throw Error(status, error);
+            return std::make_exception_ptr(Error(status, error));
         }
 
         else if (msg == STDERR_NEXT)
@@ -745,6 +792,8 @@ void RemoteStore::Connection::processStderr(Sink * sink, Source * source)
         else
             throw Error("got unknown message type %x from Nix daemon", msg);
     }
+
+    return nullptr;
 }
 
 static std::string uriScheme = "unix://";
diff --git a/src/libstore/remote-store.hh b/src/libstore/remote-store.hh
index 16daee8b6731..4f554b5980e8 100644
--- a/src/libstore/remote-store.hh
+++ b/src/libstore/remote-store.hh
@@ -14,6 +14,7 @@ class Pid;
 struct FdSink;
 struct FdSource;
 template<typename T> class Pool;
+struct ConnectionHandle;
 
 
 /* FIXME: RemoteStore is a misnomer - should be something like
@@ -105,6 +106,7 @@ protected:
 
     struct Connection
     {
+        AutoCloseFD fd;
         FdSink to;
         FdSource from;
         unsigned int daemonVersion;
@@ -112,7 +114,7 @@ protected:
 
         virtual ~Connection();
 
-        void processStderr(Sink * sink = 0, Source * source = 0);
+        std::exception_ptr processStderr(Sink * sink = 0, Source * source = 0);
     };
 
     ref<Connection> openConnectionWrapper();
@@ -125,6 +127,10 @@ protected:
 
     virtual void setOptions(Connection & conn);
 
+    ConnectionHandle getConnection();
+
+    friend struct ConnectionHandle;
+
 private:
 
     std::atomic_bool failed{false};
@@ -142,11 +148,6 @@ public:
 
 private:
 
-    struct Connection : RemoteStore::Connection
-    {
-        AutoCloseFD fd;
-    };
-
     ref<RemoteStore::Connection> openConnection() override;
     std::experimental::optional<std::string> path;
 };
diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc
index 7711388f05a9..ba11ce6bb6de 100644
--- a/src/libstore/s3-binary-cache-store.cc
+++ b/src/libstore/s3-binary-cache-store.cc
@@ -19,8 +19,6 @@
 #include <aws/core/utils/logging/LogMacros.h>
 #include <aws/core/utils/threading/Executor.h>
 #include <aws/s3/S3Client.h>
-#include <aws/s3/model/CreateBucketRequest.h>
-#include <aws/s3/model/GetBucketLocationRequest.h>
 #include <aws/s3/model/GetObjectRequest.h>
 #include <aws/s3/model/HeadObjectRequest.h>
 #include <aws/s3/model/ListObjectsRequest.h>
@@ -202,32 +200,6 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore
     {
         if (!diskCache->cacheExists(getUri(), wantMassQuery_, priority)) {
 
-            /* Create the bucket if it doesn't already exists. */
-            // FIXME: HeadBucket would be more appropriate, but doesn't return
-            // an easily parsed 404 message.
-            auto res = s3Helper.client->GetBucketLocation(
-                Aws::S3::Model::GetBucketLocationRequest().WithBucket(bucketName));
-
-            if (!res.IsSuccess()) {
-                if (res.GetError().GetErrorType() != Aws::S3::S3Errors::NO_SUCH_BUCKET)
-                    throw Error(format("AWS error checking bucket '%s': %s") % bucketName % res.GetError().GetMessage());
-
-                printInfo("creating S3 bucket '%s'...", bucketName);
-
-                // Stupid S3 bucket locations.
-                auto bucketConfig = Aws::S3::Model::CreateBucketConfiguration();
-                if (s3Helper.config->region != "us-east-1")
-                    bucketConfig.SetLocationConstraint(
-                        Aws::S3::Model::BucketLocationConstraintMapper::GetBucketLocationConstraintForName(
-                            s3Helper.config->region));
-
-                checkAws(format("AWS error creating bucket '%s'") % bucketName,
-                    s3Helper.client->CreateBucket(
-                        Aws::S3::Model::CreateBucketRequest()
-                        .WithBucket(bucketName)
-                        .WithCreateBucketConfiguration(bucketConfig)));
-            }
-
             BinaryCacheStore::init();
 
             diskCache->createCache(getUri(), storeDir, wantMassQuery_, priority);
diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc
index 1f42097fccfb..92e2685f7f66 100644
--- a/src/libstore/store-api.cc
+++ b/src/libstore/store-api.cc
@@ -320,6 +320,8 @@ ref<const ValidPathInfo> Store::queryPathInfo(const Path & storePath)
 void Store::queryPathInfo(const Path & storePath,
     Callback<ref<ValidPathInfo>> callback)
 {
+    assertStorePath(storePath);
+
     auto hashPart = storePathToHash(storePath);
 
     try {
@@ -610,7 +612,7 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore,
         });
         srcStore->narFromPath({storePath}, wrapperSink);
     }, [&]() {
-	throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", storePath, srcStore->getUri());
+        throw EndOfFile("NAR for '%s' fetched from '%s' is incomplete", storePath, srcStore->getUri());
     });
 
     dstStore->addToStore(*info, *source, repair, checkSigs);
diff --git a/src/libstore/store-api.hh b/src/libstore/store-api.hh
index c2f964e11f76..106b2be5e6b2 100644
--- a/src/libstore/store-api.hh
+++ b/src/libstore/store-api.hh
@@ -23,6 +23,7 @@ MakeError(BuildError, Error) /* denotes a permanent build failure */
 MakeError(InvalidPath, Error)
 MakeError(Unsupported, Error)
 MakeError(SubstituteGone, Error)
+MakeError(SubstituterDisabled, Error)
 
 
 struct BasicDerivation;
diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc
index 1be8934a2eba..bb68e82886d0 100644
--- a/src/libutil/archive.cc
+++ b/src/libutil/archive.cc
@@ -283,7 +283,7 @@ void parseDump(ParseSink & sink, Source & source)
 {
     string version;
     try {
-        version = readString(source);
+        version = readString(source, narVersionMagic1.size());
     } catch (SerialisationError & e) {
         /* This generally means the integer at the start couldn't be
            decoded.  Ignore and throw the exception below. */
diff --git a/src/libutil/compression.cc b/src/libutil/compression.cc
index 204c63cd26fc..0dd84e32034a 100644
--- a/src/libutil/compression.cc
+++ b/src/libutil/compression.cc
@@ -250,7 +250,7 @@ struct XzCompressionSink : CompressionSink
             ret = lzma_stream_encoder_mt(&strm, &mt_options);
             done = true;
 #else
-            printMsg(lvlError, "warning: parallel compression requested but not supported for metho  d '%1%', falling back to single-threaded compression", method);
+            printMsg(lvlError, "warning: parallel XZ compression requested but not supported, falling back to single-threaded compression");
 #endif
         }
 
diff --git a/src/libutil/pool.hh b/src/libutil/pool.hh
index 0b142b0597c7..d49067bb95dc 100644
--- a/src/libutil/pool.hh
+++ b/src/libutil/pool.hh
@@ -97,6 +97,7 @@ public:
     private:
         Pool & pool;
         std::shared_ptr<R> r;
+        bool bad = false;
 
         friend Pool;
 
@@ -112,7 +113,8 @@ public:
             if (!r) return;
             {
                 auto state_(pool.state.lock());
-                state_->idle.push_back(ref<R>(r));
+                if (!bad)
+                    state_->idle.push_back(ref<R>(r));
                 assert(state_->inUse);
                 state_->inUse--;
             }
@@ -121,6 +123,8 @@ public:
 
         R * operator -> () { return &*r; }
         R & operator * () { return *r; }
+
+        void markBad() { bad = true; }
     };
 
     Handle get()
diff --git a/src/libutil/serialise.cc b/src/libutil/serialise.cc
index 17448f70efb6..0e75eeec2bfe 100644
--- a/src/libutil/serialise.cc
+++ b/src/libutil/serialise.cc
@@ -169,17 +169,13 @@ std::unique_ptr<Source> sinkToSource(
     {
         typedef boost::coroutines2::coroutine<std::string> coro_t;
 
+        std::function<void(Sink &)> fun;
         std::function<void()> eof;
-        coro_t::pull_type coro;
+        std::experimental::optional<coro_t::pull_type> coro;
+        bool started = false;
 
         SinkToSource(std::function<void(Sink &)> fun, std::function<void()> eof)
-            : eof(eof)
-            , coro([&](coro_t::push_type & yield) {
-                LambdaSink sink([&](const unsigned char * data, size_t len) {
-                    if (len) yield(std::string((const char *) data, len));
-                });
-                fun(sink);
-            })
+            : fun(fun), eof(eof)
         {
         }
 
@@ -188,11 +184,19 @@ std::unique_ptr<Source> sinkToSource(
 
         size_t read(unsigned char * data, size_t len) override
         {
-            if (!coro) { eof(); abort(); }
+            if (!coro)
+                coro = coro_t::pull_type([&](coro_t::push_type & yield) {
+                    LambdaSink sink([&](const unsigned char * data, size_t len) {
+                            if (len) yield(std::string((const char *) data, len));
+                        });
+                    fun(sink);
+                });
+
+            if (!*coro) { eof(); abort(); }
 
             if (pos == cur.size()) {
-                if (!cur.empty()) coro();
-                cur = coro.get();
+                if (!cur.empty()) (*coro)();
+                cur = coro->get();
                 pos = 0;
             }
 
@@ -268,16 +272,17 @@ void readPadding(size_t len, Source & source)
 size_t readString(unsigned char * buf, size_t max, Source & source)
 {
     auto len = readNum<size_t>(source);
-    if (len > max) throw Error("string is too long");
+    if (len > max) throw SerialisationError("string is too long");
     source(buf, len);
     readPadding(len, source);
     return len;
 }
 
 
-string readString(Source & source)
+string readString(Source & source, size_t max)
 {
     auto len = readNum<size_t>(source);
+    if (len > max) throw SerialisationError("string is too long");
     std::string res(len, 0);
     source((unsigned char*) res.data(), len);
     readPadding(len, source);
diff --git a/src/libutil/serialise.hh b/src/libutil/serialise.hh
index 4b6ad5da5b9c..969e4dff383d 100644
--- a/src/libutil/serialise.hh
+++ b/src/libutil/serialise.hh
@@ -284,7 +284,7 @@ inline uint64_t readLongLong(Source & source)
 
 void readPadding(size_t len, Source & source);
 size_t readString(unsigned char * buf, size_t max, Source & source);
-string readString(Source & source);
+string readString(Source & source, size_t max = std::numeric_limits<size_t>::max());
 template<class T> T readStrings(Source & source);
 
 Source & operator >> (Source & in, string & s);
diff --git a/src/libutil/util.cc b/src/libutil/util.cc
index 6bc64ae75a42..03f0be705c1d 100644
--- a/src/libutil/util.cc
+++ b/src/libutil/util.cc
@@ -167,7 +167,7 @@ Path dirOf(const Path & path)
 {
     Path::size_type pos = path.rfind('/');
     if (pos == string::npos)
-        throw Error(format("invalid file name '%1%'") % path);
+        return ".";
     return pos == 0 ? "/" : Path(path, 0, pos);
 }
 
@@ -468,7 +468,7 @@ static Lazy<Path> getHome2([]() {
         std::vector<char> buf(16384);
         struct passwd pwbuf;
         struct passwd * pw;
-        if (getpwuid_r(getuid(), &pwbuf, buf.data(), buf.size(), &pw) != 0
+        if (getpwuid_r(geteuid(), &pwbuf, buf.data(), buf.size(), &pw) != 0
             || !pw || !pw->pw_dir || !pw->pw_dir[0])
             throw Error("cannot determine user's home directory");
         homeDir = pw->pw_dir;
diff --git a/src/nix-build/local.mk b/src/nix-build/local.mk
deleted file mode 100644
index a2d1c91dfd9d..000000000000
--- a/src/nix-build/local.mk
+++ /dev/null
@@ -1,9 +0,0 @@
-programs += nix-build
-
-nix-build_DIR := $(d)
-
-nix-build_SOURCES := $(d)/nix-build.cc
-
-nix-build_LIBS = libmain libexpr libstore libutil libformat
-
-$(eval $(call install-symlink, nix-build, $(bindir)/nix-shell))
diff --git a/src/nix-build/nix-build.cc b/src/nix-build/nix-build.cc
index 21a0756a2007..618895d387d4 100755
--- a/src/nix-build/nix-build.cc
+++ b/src/nix-build/nix-build.cc
@@ -16,6 +16,7 @@
 #include "get-drvs.hh"
 #include "common-eval-args.hh"
 #include "attr-path.hh"
+#include "legacy.hh"
 
 using namespace nix;
 using namespace std::string_literals;
@@ -66,11 +67,8 @@ std::vector<string> shellwords(const string & s)
     return res;
 }
 
-void mainWrapped(int argc, char * * argv)
+static void _main(int argc, char * * argv)
 {
-    initNix();
-    initGC();
-
     auto dryRun = false;
     auto runEnv = std::regex_search(argv[0], std::regex("nix-shell$"));
     auto pure = false;
@@ -305,6 +303,8 @@ void mainWrapped(int argc, char * * argv)
         }
     }
 
+    state->printStats();
+
     auto buildPaths = [&](const PathSet & paths) {
         /* Note: we do this even when !printMissing to efficiently
            fetch binary cache data. */
@@ -415,16 +415,20 @@ void mainWrapped(int argc, char * * argv)
                 "dontAddDisableDepTrack=1; "
                 "[ -e $stdenv/setup ] && source $stdenv/setup; "
                 "%3%"
+                "PATH=\"%4%:$PATH\"; "
+                "SHELL=%5%; "
                 "set +e; "
                 R"s([ -n "$PS1" ] && PS1='\n\[\033[1;32m\][nix-shell:\w]\$\[\033[0m\] '; )s"
                 "if [ \"$(type -t runHook)\" = function ]; then runHook shellHook; fi; "
                 "unset NIX_ENFORCE_PURITY; "
                 "shopt -u nullglob; "
-                "unset TZ; %4%"
-                "%5%",
+                "unset TZ; %6%"
+                "%7%",
                 (Path) tmpDir,
                 (pure ? "" : "p=$PATH; "),
                 (pure ? "" : "PATH=$PATH:$p; unset p; "),
+                dirOf(shell),
+                shell,
                 (getenv("TZ") ? (string("export TZ='") + getenv("TZ") + "'; ") : ""),
                 envCommand));
 
@@ -498,9 +502,5 @@ void mainWrapped(int argc, char * * argv)
     }
 }
 
-int main(int argc, char * * argv)
-{
-    return handleExceptions(argv[0], [&]() {
-        return mainWrapped(argc, argv);
-    });
-}
+static RegisterLegacyCommand s1("nix-build", _main);
+static RegisterLegacyCommand s2("nix-shell", _main);
diff --git a/src/nix-channel/local.mk b/src/nix-channel/local.mk
deleted file mode 100644
index c14e8c359ca0..000000000000
--- a/src/nix-channel/local.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-programs += nix-channel
-
-nix-channel_DIR := $(d)
-
-nix-channel_LIBS = libmain libformat libstore libutil
-
-nix-channel_SOURCES := $(d)/nix-channel.cc
diff --git a/src/nix-channel/nix-channel.cc b/src/nix-channel/nix-channel.cc
index 55ebda438965..8b66cc7e314e 100755
--- a/src/nix-channel/nix-channel.cc
+++ b/src/nix-channel/nix-channel.cc
@@ -1,9 +1,11 @@
 #include "shared.hh"
 #include "globals.hh"
 #include "download.hh"
+#include "store-api.hh"
+#include "legacy.hh"
+
 #include <fcntl.h>
 #include <regex>
-#include "store-api.hh"
 #include <pwd.h>
 
 using namespace nix;
@@ -157,11 +159,9 @@ static void update(const StringSet & channelNames)
     replaceSymlink(profile, channelLink);
 }
 
-int main(int argc, char ** argv)
+static int _main(int argc, char ** argv)
 {
-    return handleExceptions(argv[0], [&]() {
-        initNix();
-
+    {
         // Figure out the name of the `.nix-channels' file to use
         auto home = getHome();
         channelsList = home + "/.nix-channels";
@@ -169,7 +169,7 @@ int main(int argc, char ** argv)
 
         // Figure out the name of the channels profile.
         ;
-        auto pw = getpwuid(getuid());
+        auto pw = getpwuid(geteuid());
         std::string name = pw ? pw->pw_name : getEnv("USER", "");
         if (name.empty())
             throw Error("cannot figure out user name");
@@ -255,5 +255,9 @@ int main(int argc, char ** argv)
                 runProgram(settings.nixBinDir + "/nix-env", false, envArgs);
                 break;
         }
-    });
+
+        return 0;
+    }
 }
+
+static RegisterLegacyCommand s1("nix-channel", _main);
diff --git a/src/nix-collect-garbage/local.mk b/src/nix-collect-garbage/local.mk
deleted file mode 100644
index 02d14cf62199..000000000000
--- a/src/nix-collect-garbage/local.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-programs += nix-collect-garbage
-
-nix-collect-garbage_DIR := $(d)
-
-nix-collect-garbage_SOURCES := $(d)/nix-collect-garbage.cc
-
-nix-collect-garbage_LIBS = libmain libstore libutil libformat
diff --git a/src/nix-collect-garbage/nix-collect-garbage.cc b/src/nix-collect-garbage/nix-collect-garbage.cc
index 37fe22f48134..d4060ac937fc 100644
--- a/src/nix-collect-garbage/nix-collect-garbage.cc
+++ b/src/nix-collect-garbage/nix-collect-garbage.cc
@@ -2,6 +2,7 @@
 #include "profiles.hh"
 #include "shared.hh"
 #include "globals.hh"
+#include "legacy.hh"
 
 #include <iostream>
 #include <cerrno>
@@ -48,12 +49,10 @@ void removeOldGenerations(std::string dir)
     }
 }
 
-int main(int argc, char * * argv)
+static int _main(int argc, char * * argv)
 {
-    bool removeOld = false;
-
-    return handleExceptions(argv[0], [&]() {
-        initNix();
+    {
+        bool removeOld = false;
 
         GCOptions options;
 
@@ -90,5 +89,9 @@ int main(int argc, char * * argv)
             PrintFreed freed(true, results);
             store->collectGarbage(options, results);
         }
-    });
+
+        return 0;
+    }
 }
+
+static RegisterLegacyCommand s1("nix-collect-garbage", _main);
diff --git a/src/nix-copy-closure/local.mk b/src/nix-copy-closure/local.mk
deleted file mode 100644
index 5018ab975b44..000000000000
--- a/src/nix-copy-closure/local.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-programs += nix-copy-closure
-
-nix-copy-closure_DIR := $(d)
-
-nix-copy-closure_LIBS = libmain libformat libstore libutil
-
-nix-copy-closure_SOURCES := $(d)/nix-copy-closure.cc
diff --git a/src/nix-copy-closure/nix-copy-closure.cc b/src/nix-copy-closure/nix-copy-closure.cc
index dfb1b8fc5dc4..fdcde8b076b5 100755
--- a/src/nix-copy-closure/nix-copy-closure.cc
+++ b/src/nix-copy-closure/nix-copy-closure.cc
@@ -1,13 +1,12 @@
 #include "shared.hh"
 #include "store-api.hh"
+#include "legacy.hh"
 
 using namespace nix;
 
-int main(int argc, char ** argv)
+static int _main(int argc, char ** argv)
 {
-    return handleExceptions(argv[0], [&]() {
-        initNix();
-
+    {
         auto gzip = false;
         auto toMode = true;
         auto includeOutputs = false;
@@ -61,5 +60,9 @@ int main(int argc, char ** argv)
         from->computeFSClosure(storePaths2, closure, false, includeOutputs);
 
         copyPaths(from, to, closure, NoRepair, NoCheckSigs, useSubstitutes);
-    });
+
+        return 0;
+    }
 }
+
+static RegisterLegacyCommand s1("nix-copy-closure", _main);
diff --git a/src/nix-daemon/local.mk b/src/nix-daemon/local.mk
deleted file mode 100644
index 5a4474465b3c..000000000000
--- a/src/nix-daemon/local.mk
+++ /dev/null
@@ -1,13 +0,0 @@
-programs += nix-daemon
-
-nix-daemon_DIR := $(d)
-
-nix-daemon_SOURCES := $(d)/nix-daemon.cc
-
-nix-daemon_LIBS = libmain libstore libutil libformat
-
-nix-daemon_LDFLAGS = -pthread
-
-ifeq ($(OS), SunOS)
-        nix-daemon_LDFLAGS += -lsocket
-endif
diff --git a/src/nix-daemon/nix-daemon.cc b/src/nix-daemon/nix-daemon.cc
index 644fa6681de3..8368c3266142 100644
--- a/src/nix-daemon/nix-daemon.cc
+++ b/src/nix-daemon/nix-daemon.cc
@@ -9,6 +9,7 @@
 #include "monitor-fd.hh"
 #include "derivations.hh"
 #include "finally.hh"
+#include "legacy.hh"
 
 #include <algorithm>
 
@@ -557,7 +558,8 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
                     ;
                 else if (trusted
                     || name == settings.buildTimeout.name
-                    || name == "connect-timeout")
+                    || name == "connect-timeout"
+                    || (name == "builders" && value == ""))
                     settings.set(name, value);
                 else if (setSubstituters(settings.substituters))
                     ;
@@ -708,7 +710,7 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
         logger->startWork();
 
         // FIXME: race if addToStore doesn't read source?
-        store.cast<Store>()->addToStore(info, *source, (RepairFlag) repair,
+        store->addToStore(info, *source, (RepairFlag) repair,
             dontCheckSigs ? NoCheckSigs : CheckSigs, nullptr);
 
         logger->stopWork();
@@ -1057,11 +1059,9 @@ static void daemonLoop(char * * argv)
 }
 
 
-int main(int argc, char * * argv)
+static int _main(int argc, char * * argv)
 {
-    return handleExceptions(argv[0], [&]() {
-        initNix();
-
+    {
         auto stdio = false;
 
         parseCmdLine(argc, argv, [&](Strings::iterator & arg, const Strings::iterator & end) {
@@ -1121,7 +1121,7 @@ int main(int argc, char * * argv)
                         if (res == -1)
                             throw SysError("splicing data from stdin to daemon socket");
                         else if (res == 0)
-                            return;
+                            return 0;
                     }
                 }
             } else {
@@ -1130,5 +1130,9 @@ int main(int argc, char * * argv)
         } else {
             daemonLoop(argv);
         }
-    });
+
+        return 0;
+    }
 }
+
+static RegisterLegacyCommand s1("nix-daemon", _main);
diff --git a/src/nix-env/local.mk b/src/nix-env/local.mk
deleted file mode 100644
index e80719cd76f7..000000000000
--- a/src/nix-env/local.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-programs += nix-env
-
-nix-env_DIR := $(d)
-
-nix-env_SOURCES := $(wildcard $(d)/*.cc)
-
-nix-env_LIBS = libexpr libmain libstore libutil libformat
diff --git a/src/nix-env/nix-env.cc b/src/nix-env/nix-env.cc
index a43b103f6ec6..56ed75daee44 100644
--- a/src/nix-env/nix-env.cc
+++ b/src/nix-env/nix-env.cc
@@ -13,6 +13,7 @@
 #include "json.hh"
 #include "value-to-json.hh"
 #include "xml-writer.hh"
+#include "legacy.hh"
 
 #include <cerrno>
 #include <ctime>
@@ -150,10 +151,8 @@ static void loadSourceExpr(EvalState & state, const Path & path, Value & v)
     if (stat(path.c_str(), &st) == -1)
         throw SysError(format("getting information about '%1%'") % path);
 
-    if (isNixExpr(path, st)) {
+    if (isNixExpr(path, st))
         state.evalFile(path, v);
-        return;
-    }
 
     /* The path is a directory.  Put the Nix expressions in the
        directory in a set, with the file name of each expression as
@@ -161,13 +160,15 @@ static void loadSourceExpr(EvalState & state, const Path & path, Value & v)
        set flat, not nested, to make it easier for a user to have a
        ~/.nix-defexpr directory that includes some system-wide
        directory). */
-    if (S_ISDIR(st.st_mode)) {
+    else if (S_ISDIR(st.st_mode)) {
         state.mkAttrs(v, 1024);
         state.mkList(*state.allocAttr(v, state.symbols.create("_combineChannels")), 0);
         StringSet attrs;
         getAllExprs(state, path, attrs, v);
         v.attrs->sort();
     }
+
+    else throw Error("path '%s' is not a directory or a Nix expression", path);
 }
 
 
@@ -1311,12 +1312,9 @@ static void opVersion(Globals & globals, Strings opFlags, Strings opArgs)
 }
 
 
-int main(int argc, char * * argv)
+static int _main(int argc, char * * argv)
 {
-    return handleExceptions(argv[0], [&]() {
-        initNix();
-        initGC();
-
+    {
         Strings opFlags, opArgs;
         Operation op = 0;
         RepairFlag repair = NoRepair;
@@ -1428,5 +1426,9 @@ int main(int argc, char * * argv)
         op(globals, opFlags, opArgs);
 
         globals.state->printStats();
-    });
+
+        return 0;
+    }
 }
+
+static RegisterLegacyCommand s1("nix-env", _main);
diff --git a/src/nix-instantiate/local.mk b/src/nix-instantiate/local.mk
deleted file mode 100644
index 7d1bc5ec9dfb..000000000000
--- a/src/nix-instantiate/local.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-programs += nix-instantiate
-
-nix-instantiate_DIR := $(d)
-
-nix-instantiate_SOURCES := $(d)/nix-instantiate.cc
-
-nix-instantiate_LIBS = libexpr libmain libstore libutil libformat
diff --git a/src/nix-instantiate/nix-instantiate.cc b/src/nix-instantiate/nix-instantiate.cc
index eb6d34dd8219..a736caa8f056 100644
--- a/src/nix-instantiate/nix-instantiate.cc
+++ b/src/nix-instantiate/nix-instantiate.cc
@@ -9,6 +9,7 @@
 #include "util.hh"
 #include "store-api.hh"
 #include "common-eval-args.hh"
+#include "legacy.hh"
 
 #include <map>
 #include <iostream>
@@ -83,12 +84,9 @@ void processExpr(EvalState & state, const Strings & attrPaths,
 }
 
 
-int main(int argc, char * * argv)
+static int _main(int argc, char * * argv)
 {
-    return handleExceptions(argv[0], [&]() {
-        initNix();
-        initGC();
-
+    {
         Strings files;
         bool readStdin = false;
         bool fromArgs = false;
@@ -171,7 +169,7 @@ int main(int argc, char * * argv)
                 if (p == "") throw Error(format("unable to find '%1%'") % i);
                 std::cout << p << std::endl;
             }
-            return;
+            return 0;
         }
 
         if (readStdin) {
@@ -190,5 +188,9 @@ int main(int argc, char * * argv)
         }
 
         state->printStats();
-    });
+
+        return 0;
+    }
 }
+
+static RegisterLegacyCommand s1("nix-instantiate", _main);
diff --git a/src/nix-prefetch-url/local.mk b/src/nix-prefetch-url/local.mk
deleted file mode 100644
index 3e7735406af0..000000000000
--- a/src/nix-prefetch-url/local.mk
+++ /dev/null
@@ -1,7 +0,0 @@
-programs += nix-prefetch-url
-
-nix-prefetch-url_DIR := $(d)
-
-nix-prefetch-url_SOURCES := $(d)/nix-prefetch-url.cc
-
-nix-prefetch-url_LIBS = libmain libexpr libstore libutil libformat
diff --git a/src/nix-prefetch-url/nix-prefetch-url.cc b/src/nix-prefetch-url/nix-prefetch-url.cc
index a3b025723cf1..ddb724913214 100644
--- a/src/nix-prefetch-url/nix-prefetch-url.cc
+++ b/src/nix-prefetch-url/nix-prefetch-url.cc
@@ -6,6 +6,7 @@
 #include "eval-inline.hh"
 #include "common-eval-args.hh"
 #include "attr-path.hh"
+#include "legacy.hh"
 
 #include <iostream>
 
@@ -44,12 +45,9 @@ string resolveMirrorUri(EvalState & state, string uri)
 }
 
 
-int main(int argc, char * * argv)
+static int _main(int argc, char * * argv)
 {
-    return handleExceptions(argv[0], [&]() {
-        initNix();
-        initGC();
-
+    {
         HashType ht = htSHA256;
         std::vector<string> args;
         bool printPath = getEnv("PRINT_PATH") != "";
@@ -221,5 +219,9 @@ int main(int argc, char * * argv)
         std::cout << printHash16or32(hash) << std::endl;
         if (printPath)
             std::cout << storePath << std::endl;
-    });
+
+        return 0;
+    }
 }
+
+static RegisterLegacyCommand s1("nix-prefetch-url", _main);
diff --git a/src/nix-store/graphml.cc b/src/nix-store/graphml.cc
new file mode 100644
index 000000000000..670fbe227a4c
--- /dev/null
+++ b/src/nix-store/graphml.cc
@@ -0,0 +1,90 @@
+#include "graphml.hh"
+#include "util.hh"
+#include "store-api.hh"
+#include "derivations.hh"
+
+#include <iostream>
+
+
+using std::cout;
+
+namespace nix {
+
+
+static inline const string & xmlQuote(const string & s)
+{
+    // Luckily, store paths shouldn't contain any character that needs to be
+    // quoted.
+    return s;
+}
+
+
+static string symbolicName(const string & path)
+{
+    string p = baseNameOf(path);
+    return string(p, p.find('-') + 1);
+}
+
+
+static string makeEdge(const string & src, const string & dst)
+{
+    return fmt("  <edge source=\"%1%\" target=\"%2%\"/>\n",
+        xmlQuote(src), xmlQuote(dst));
+}
+
+
+static string makeNode(const ValidPathInfo & info)
+{
+    return fmt(
+        "  <node id=\"%1%\">\n"
+        "    <data key=\"narSize\">%2%</data>\n"
+        "    <data key=\"name\">%3%</data>\n"
+        "    <data key=\"type\">%4%</data>\n"
+        "  </node>\n",
+        info.path,
+        info.narSize,
+        symbolicName(info.path),
+        (isDerivation(info.path) ? "derivation" : "output-path"));
+}
+
+
+void printGraphML(ref<Store> store, const PathSet & roots)
+{
+    PathSet workList(roots);
+    PathSet doneSet;
+    std::pair<PathSet::iterator,bool> ret;
+
+    cout << "<?xml version='1.0' encoding='utf-8'?>\n"
+         << "<graphml xmlns='http://graphml.graphdrawing.org/xmlns'\n"
+         << "    xmlns:xsi='http://www.w3.org/2001/XMLSchema-instance'\n"
+         << "    xsi:schemaLocation='http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd'>\n"
+         << "<key id='narSize' for='node' attr.name='narSize' attr.type='int'/>"
+         << "<key id='name' for='node' attr.name='name' attr.type='string'/>"
+         << "<key id='type' for='node' attr.name='type' attr.type='string'/>"
+         << "<graph id='G' edgedefault='directed'>\n";
+
+    while (!workList.empty()) {
+        Path path = *(workList.begin());
+        workList.erase(path);
+
+        ret = doneSet.insert(path);
+        if (ret.second == false) continue;
+
+        ValidPathInfo info = *(store->queryPathInfo(path));
+        cout << makeNode(info);
+
+        for (auto & p : store->queryPathInfo(path)->references) {
+            if (p != path) {
+                workList.insert(p);
+                cout << makeEdge(path, p);
+            }
+        }
+
+    }
+
+    cout << "</graph>\n";
+    cout << "</graphml>\n";
+}
+
+
+}
diff --git a/src/nix-store/xmlgraph.hh b/src/nix-store/graphml.hh
index a6e7d4e2805a..b78df1e49a67 100644
--- a/src/nix-store/xmlgraph.hh
+++ b/src/nix-store/graphml.hh
@@ -6,6 +6,6 @@ namespace nix {
 
 class Store;
 
-void printXmlGraph(ref<Store> store, const PathSet & roots);
+void printGraphML(ref<Store> store, const PathSet & roots);
 
 }
diff --git a/src/nix-store/local.mk b/src/nix-store/local.mk
deleted file mode 100644
index ade0b233adf3..000000000000
--- a/src/nix-store/local.mk
+++ /dev/null
@@ -1,9 +0,0 @@
-programs += nix-store
-
-nix-store_DIR := $(d)
-
-nix-store_SOURCES := $(wildcard $(d)/*.cc)
-
-nix-store_LIBS = libmain libstore libutil libformat
-
-nix-store_LDFLAGS = -lbz2 -pthread $(SODIUM_LIBS)
diff --git a/src/nix-store/nix-store.cc b/src/nix-store/nix-store.cc
index fe68f681ae28..a9ad14762e62 100644
--- a/src/nix-store/nix-store.cc
+++ b/src/nix-store/nix-store.cc
@@ -8,7 +8,8 @@
 #include "shared.hh"
 #include "util.hh"
 #include "worker-protocol.hh"
-#include "xmlgraph.hh"
+#include "graphml.hh"
+#include "legacy.hh"
 
 #include <iostream>
 #include <algorithm>
@@ -273,7 +274,7 @@ static void opQuery(Strings opFlags, Strings opArgs)
     enum QueryType
         { qDefault, qOutputs, qRequisites, qReferences, qReferrers
         , qReferrersClosure, qDeriver, qBinding, qHash, qSize
-        , qTree, qGraph, qXml, qResolve, qRoots };
+        , qTree, qGraph, qGraphML, qResolve, qRoots };
     QueryType query = qDefault;
     bool useOutput = false;
     bool includeOutputs = false;
@@ -299,7 +300,7 @@ static void opQuery(Strings opFlags, Strings opArgs)
         else if (i == "--size") query = qSize;
         else if (i == "--tree") query = qTree;
         else if (i == "--graph") query = qGraph;
-        else if (i == "--xml") query = qXml;
+        else if (i == "--graphml") query = qGraphML;
         else if (i == "--resolve") query = qResolve;
         else if (i == "--roots") query = qRoots;
         else if (i == "--use-output" || i == "-u") useOutput = true;
@@ -403,13 +404,13 @@ static void opQuery(Strings opFlags, Strings opArgs)
             break;
         }
 
-        case qXml: {
+        case qGraphML: {
             PathSet roots;
             for (auto & i : opArgs) {
                 PathSet paths = maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise);
                 roots.insert(paths.begin(), paths.end());
             }
-            printXmlGraph(ref<Store>(store), roots);
+            printGraphML(ref<Store>(store), roots);
             break;
         }
 
@@ -993,11 +994,9 @@ static void opVersion(Strings opFlags, Strings opArgs)
 /* Scan the arguments; find the operation, set global flags, put all
    other flags in a list, and put all other arguments in another
    list. */
-int main(int argc, char * * argv)
+static int _main(int argc, char * * argv)
 {
-    return handleExceptions(argv[0], [&]() {
-        initNix();
-
+    {
         Strings opFlags, opArgs;
         Operation op = 0;
 
@@ -1084,5 +1083,9 @@ int main(int argc, char * * argv)
             store = openStore();
 
         op(opFlags, opArgs);
-    });
+
+        return 0;
+    }
 }
+
+static RegisterLegacyCommand s1("nix-store", _main);
diff --git a/src/nix-store/xmlgraph.cc b/src/nix-store/xmlgraph.cc
deleted file mode 100644
index 0f7be7f7a02d..000000000000
--- a/src/nix-store/xmlgraph.cc
+++ /dev/null
@@ -1,66 +0,0 @@
-#include "xmlgraph.hh"
-#include "util.hh"
-#include "store-api.hh"
-
-#include <iostream>
-
-
-using std::cout;
-
-namespace nix {
-
-
-static inline const string & xmlQuote(const string & s)
-{
-    // Luckily, store paths shouldn't contain any character that needs to be
-    // quoted.
-    return s;
-}
-
-
-static string makeEdge(const string & src, const string & dst)
-{
-    format f = format("  <edge src=\"%1%\" dst=\"%2%\"/>\n")
-      % xmlQuote(src) % xmlQuote(dst);
-    return f.str();
-}
-
-
-static string makeNode(const string & id)
-{
-    format f = format("  <node name=\"%1%\"/>\n") % xmlQuote(id);
-    return f.str();
-}
-
-
-void printXmlGraph(ref<Store> store, const PathSet & roots)
-{
-    PathSet workList(roots);
-    PathSet doneSet;
-
-    cout << "<?xml version='1.0' encoding='utf-8'?>\n"
-         << "<nix>\n";
-
-    while (!workList.empty()) {
-        Path path = *(workList.begin());
-        workList.erase(path);
-
-        if (doneSet.find(path) != doneSet.end()) continue;
-        doneSet.insert(path);
-
-        cout << makeNode(path);
-
-        for (auto & p : store->queryPathInfo(path)->references) {
-            if (p != path) {
-                workList.insert(p);
-                cout << makeEdge(p, path);
-            }
-        }
-
-    }
-
-    cout << "</nix>\n";
-}
-
-
-}
diff --git a/src/nix/copy.cc b/src/nix/copy.cc
index 91711c8b46da..96bd453d87b4 100644
--- a/src/nix/copy.cc
+++ b/src/nix/copy.cc
@@ -69,12 +69,12 @@ struct CmdCopy : StorePathsCommand
             },
 #ifdef ENABLE_S3
             Example{
-                "To populate the current folder build output to a S3 binary cache:",
-                "nix copy --to s3://my-bucket?region=eu-west-1"
+                "To copy Hello to an S3 binary cache:",
+                "nix copy --to s3://my-bucket?region=eu-west-1 nixpkgs.hello"
             },
             Example{
-                "To populate the current folder build output to an S3-compatible binary cache:",
-                "nix copy --to s3://my-bucket?region=eu-west-1&endpoint=example.com"
+                "To copy Hello to an S3-compatible binary cache:",
+                "nix copy --to s3://my-bucket?region=eu-west-1&endpoint=example.com nixpkgs.hello"
             },
 #endif
         };
diff --git a/src/nix/local.mk b/src/nix/local.mk
index f76da194467c..bdcca33d2a6f 100644
--- a/src/nix/local.mk
+++ b/src/nix/local.mk
@@ -2,10 +2,25 @@ programs += nix
 
 nix_DIR := $(d)
 
-nix_SOURCES := $(wildcard $(d)/*.cc) $(wildcard src/linenoise/*.cpp)
+nix_SOURCES := \
+  $(wildcard $(d)/*.cc) \
+  $(wildcard src/linenoise/*.cpp) \
+  $(wildcard src/build-remote/*.cc) \
+  $(wildcard src/nix-build/*.cc) \
+  $(wildcard src/nix-channel/*.cc) \
+  $(wildcard src/nix-collect-garbage/*.cc) \
+  $(wildcard src/nix-copy-closure/*.cc) \
+  $(wildcard src/nix-daemon/*.cc) \
+  $(wildcard src/nix-env/*.cc) \
+  $(wildcard src/nix-instantiate/*.cc) \
+  $(wildcard src/nix-prefetch-url/*.cc) \
+  $(wildcard src/nix-store/*.cc) \
 
 nix_LIBS = libexpr libmain libstore libutil libformat
 
-nix_LDFLAGS = -pthread
+nix_LDFLAGS = -pthread $(SODIUM_LIBS)
 
-$(eval $(call install-symlink, nix, $(bindir)/nix-hash))
+$(foreach name, \
+  nix-build nix-channel nix-collect-garbage nix-copy-closure nix-daemon nix-env nix-hash nix-instantiate nix-prefetch-url nix-shell nix-store, \
+  $(eval $(call install-symlink, nix, $(bindir)/$(name))))
+$(eval $(call install-symlink, $(bindir)/nix, $(libexecdir)/nix/build-remote))
diff --git a/src/nix/main.cc b/src/nix/main.cc
index 69791e223c22..64c1dc35787c 100644
--- a/src/nix/main.cc
+++ b/src/nix/main.cc
@@ -67,9 +67,6 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs
 
 void mainWrapped(int argc, char * * argv)
 {
-    verbosity = lvlError;
-    settings.verboseBuild = false;
-
     /* The chroot helper needs to be run before any threads have been
        started. */
     if (argc > 0 && argv[0] == chrootHelperName) {
@@ -88,6 +85,9 @@ void mainWrapped(int argc, char * * argv)
         if (legacy) return legacy(argc, argv);
     }
 
+    verbosity = lvlError;
+    settings.verboseBuild = false;
+
     NixArgs args;
 
     args.parseCmdline(argvToStrings(argc, argv));
diff --git a/src/nix/repl.cc b/src/nix/repl.cc
index b71e6f905f23..1bbe256b2d8b 100644
--- a/src/nix/repl.cc
+++ b/src/nix/repl.cc
@@ -31,6 +31,7 @@ struct NixRepl
 {
     string curDir;
     EvalState state;
+    Bindings * autoArgs;
 
     Strings loadedFiles;
 
@@ -446,8 +447,7 @@ void NixRepl::loadFile(const Path & path)
     loadedFiles.push_back(path);
     Value v, v2;
     state.evalFile(lookupFileArg(state, path), v);
-    Bindings & bindings(*state.allocBindings(0));
-    state.autoCallFunction(bindings, v, v2);
+    state.autoCallFunction(*autoArgs, v, v2);
     addAttrsToScope(v2);
 }
 
@@ -699,6 +699,7 @@ struct CmdRepl : StoreCommand, MixEvalArgs
     void run(ref<Store> store) override
     {
         auto repl = std::make_unique<NixRepl>(searchPath, openStore());
+        repl->autoArgs = getAutoArgs(repl->state);
         repl->mainLoop(files);
     }
 };
diff --git a/src/nix/search.cc b/src/nix/search.cc
index 4cb1efa7955b..e086de2260a6 100644
--- a/src/nix/search.cc
+++ b/src/nix/search.cc
@@ -173,10 +173,12 @@ struct CmdSearch : SourceExprCommand, MixJSON
                             jsonElem.attr("description", description);
 
                         } else {
+                            auto name = hilite(parsed.name, nameMatch, "\e[0;2m")
+                                + std::string(parsed.fullName, parsed.name.length());
                             results[attrPath] = fmt(
                                 "* %s (%s)\n  %s\n",
                                 wrap("\e[0;1m", hilite(attrPath, attrPathMatch, "\e[0;1m")),
-                                wrap("\e[0;2m", hilite(parsed.fullName, nameMatch, "\e[0;2m")),
+                                wrap("\e[0;2m", hilite(name, nameMatch, "\e[0;2m")),
                                 hilite(description, descriptionMatch, ANSI_NORMAL));
                         }
                     }
diff --git a/src/nix/verify.cc b/src/nix/verify.cc
index 6540208a8a2c..7ef571561a0e 100644
--- a/src/nix/verify.cc
+++ b/src/nix/verify.cc
@@ -120,7 +120,7 @@ struct CmdVerify : StorePathsCommand
                             for (auto sig : sigs) {
                                 if (sigsSeen.count(sig)) continue;
                                 sigsSeen.insert(sig);
-                                if (info->checkSignature(publicKeys, sig))
+                                if (validSigs < ValidPathInfo::maxSigs && info->checkSignature(publicKeys, sig))
                                     validSigs++;
                             }
                         };