diff options
Diffstat (limited to 'src')
29 files changed, 521 insertions, 187 deletions
diff --git a/src/libexpr/common-eval-args.hh b/src/libexpr/common-eval-args.hh index 09fa406b2cdc..be7fda783783 100644 --- a/src/libexpr/common-eval-args.hh +++ b/src/libexpr/common-eval-args.hh @@ -6,7 +6,7 @@ namespace nix { class Store; class EvalState; -struct Bindings; +class Bindings; struct MixEvalArgs : virtual Args { diff --git a/src/libexpr/parser.y b/src/libexpr/parser.y index eee31522830f..ef11dd609217 100644 --- a/src/libexpr/parser.y +++ b/src/libexpr/parser.y @@ -523,7 +523,6 @@ formal #include "eval.hh" #include "download.hh" #include "store-api.hh" -#include "primops/fetchgit.hh" namespace nix { @@ -665,11 +664,7 @@ std::pair<bool, std::string> EvalState::resolveSearchPathElem(const SearchPathEl if (isUri(elem.second)) { try { - if (hasPrefix(elem.second, "git://") || hasSuffix(elem.second, ".git")) - // FIXME: support specifying revision/branch - res = { true, exportGit(store, elem.second, "master").storePath }; - else - res = { true, getDownloader()->downloadCached(store, elem.second, true) }; + res = { true, getDownloader()->downloadCached(store, elem.second, true) }; } catch (DownloadError & e) { printError(format("warning: Nix search path entry '%1%' cannot be downloaded, ignoring") % elem.second); res = { false, "" }; diff --git a/src/libexpr/primops.cc b/src/libexpr/primops.cc index cd0dfbc03e94..e3b5dfb420b4 100644 --- a/src/libexpr/primops.cc +++ b/src/libexpr/primops.cc @@ -1009,22 +1009,21 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu } -struct FilterFromExpr : PathFilter +static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args, Value & v) { - EvalState & state; - Value & filter; - Pos pos; + PathSet context; + Path path = state.coerceToPath(pos, *args[1], context); + if (!context.empty()) + throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos); - FilterFromExpr(EvalState & state, Value & filter, const Pos & pos) - : state(state), filter(filter), pos(pos) - { - } + state.forceValue(*args[0]); + if (args[0]->type != tLambda) + throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos); - bool operator () (const Path & path) - { - struct stat st; - if (lstat(path.c_str(), &st)) - throw SysError(format("getting attributes of path '%1%'") % path); + path = state.checkSourcePath(path); + + PathFilter filter = [&](const Path & path) { + auto st = lstat(path); /* Call the filter function. The first argument is the path, the second is a string indicating the type of the file. */ @@ -1032,7 +1031,7 @@ struct FilterFromExpr : PathFilter mkString(arg1, path); Value fun2; - state.callFunction(filter, arg1, fun2, noPos); + state.callFunction(*args[0], arg1, fun2, noPos); Value arg2; mkString(arg2, @@ -1045,24 +1044,7 @@ struct FilterFromExpr : PathFilter state.callFunction(fun2, arg2, res, noPos); return state.forceBool(res, pos); - } -}; - - -static void prim_filterSource(EvalState & state, const Pos & pos, Value * * args, Value & v) -{ - PathSet context; - Path path = state.coerceToPath(pos, *args[1], context); - if (!context.empty()) - throw EvalError(format("string '%1%' cannot refer to other paths, at %2%") % path % pos); - - state.forceValue(*args[0]); - if (args[0]->type != tLambda) - throw TypeError(format("first argument in call to 'filterSource' is not a function but %1%, at %2%") % showType(*args[0]) % pos); - - FilterFromExpr filter(state, *args[0], pos); - - path = state.checkSourcePath(path); + }; Path dstPath = settings.readOnlyMode ? state.store->computeStorePathForPath(path, true, htSHA256, filter).first diff --git a/src/libexpr/primops/fetchgit.cc b/src/libexpr/primops/fetchGit.cc index 4af5301247bc..bca68ed72091 100644 --- a/src/libexpr/primops/fetchgit.cc +++ b/src/libexpr/primops/fetchGit.cc @@ -1,4 +1,3 @@ -#include "fetchgit.hh" #include "primops.hh" #include "eval-inline.hh" #include "download.hh" @@ -15,10 +14,63 @@ using namespace std::string_literals; namespace nix { +struct GitInfo +{ + Path storePath; + std::string rev; + std::string shortRev; + uint64_t revCount = 0; +}; + GitInfo exportGit(ref<Store> store, const std::string & uri, - const std::string & ref, const std::string & rev, + std::experimental::optional<std::string> ref, const std::string & rev, const std::string & name) { + if (!ref && rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.git")) { + + bool clean = true; + + try { + runProgram("git", true, { "-C", uri, "diff-index", "--quiet", "HEAD", "--" }); + } catch (ExecError e) { + if (!WIFEXITED(e.status) || WEXITSTATUS(e.status) != 1) throw; + clean = false; + } + + if (!clean) { + + /* This is an unclean working tree. So copy all tracked + files. */ + + GitInfo gitInfo; + gitInfo.rev = "0000000000000000000000000000000000000000"; + gitInfo.shortRev = std::string(gitInfo.rev, 0, 7); + + auto files = tokenizeString<std::set<std::string>>( + runProgram("git", true, { "-C", uri, "ls-files", "-z" }), "\0"s); + + PathFilter filter = [&](const Path & p) -> bool { + assert(hasPrefix(p, uri)); + std::string file(p, uri.size() + 1); + + auto st = lstat(p); + + if (S_ISDIR(st.st_mode)) { + auto i = files.lower_bound(file); + return i != files.end() && hasPrefix(*i, file); + } + + return files.count(file); + }; + + gitInfo.storePath = store->addToStore("source", uri, true, htSHA256, filter); + + return gitInfo; + } + } + + if (!ref) ref = "master"s; + if (rev != "") { std::regex revRegex("^[0-9a-fA-F]{40}$"); if (!std::regex_match(rev, revRegex)) @@ -32,7 +84,7 @@ GitInfo exportGit(ref<Store> store, const std::string & uri, runProgram("git", true, { "init", "--bare", cacheDir }); } - std::string localRef = hashString(htSHA256, fmt("%s-%s", uri, ref)).to_string(Base32, false); + std::string localRef = hashString(htSHA256, fmt("%s-%s", uri, *ref)).to_string(Base32, false); Path localRefFile = cacheDir + "/refs/heads/" + localRef; @@ -41,21 +93,27 @@ GitInfo exportGit(ref<Store> store, const std::string & uri, time_t now = time(0); struct stat st; if (stat(localRefFile.c_str(), &st) != 0 || - st.st_mtime < now - settings.tarballTtl) + st.st_mtime <= now - settings.tarballTtl) { - Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", uri)); - - // FIXME: git stderr messes up our progress indicator, so - // we're using --quiet for now. Should process its stderr. - runProgram("git", true, { "-C", cacheDir, "fetch", "--quiet", "--force", "--", uri, ref + ":" + localRef }); - - struct timeval times[2]; - times[0].tv_sec = now; - times[0].tv_usec = 0; - times[1].tv_sec = now; - times[1].tv_usec = 0; - - utimes(localRefFile.c_str(), times); + if (rev == "" || + chomp(runProgram( + RunOptions("git", { "-C", cacheDir, "cat-file", "-t", rev }) + .killStderr(true)).second) != "commit") + { + Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Git repository '%s'", uri)); + + // FIXME: git stderr messes up our progress indicator, so + // we're using --quiet for now. Should process its stderr. + runProgram("git", true, { "-C", cacheDir, "fetch", "--quiet", "--force", "--", uri, *ref + ":" + localRef }); + + struct timeval times[2]; + times[0].tv_sec = now; + times[0].tv_usec = 0; + times[1].tv_sec = now; + times[1].tv_usec = 0; + + utimes(localRefFile.c_str(), times); + } } // FIXME: check whether rev is an ancestor of ref. @@ -67,10 +125,9 @@ GitInfo exportGit(ref<Store> store, const std::string & uri, std::string storeLinkName = hashString(htSHA512, name + std::string("\0"s) + gitInfo.rev).to_string(Base32, false); Path storeLink = cacheDir + "/" + storeLinkName + ".link"; - PathLocks storeLinkLock({storeLink}, fmt("waiting for lock on '%1%'...", storeLink)); + PathLocks storeLinkLock({storeLink}, fmt("waiting for lock on '%1%'...", storeLink)); // FIXME: broken try { - // FIXME: doesn't handle empty lines auto json = nlohmann::json::parse(readFile(storeLink)); assert(json["name"] == name && json["rev"] == gitInfo.rev); @@ -114,7 +171,7 @@ GitInfo exportGit(ref<Store> store, const std::string & uri, static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Value & v) { std::string url; - std::string ref = "master"; + std::experimental::optional<std::string> ref; std::string rev; std::string name = "source"; PathSet context; @@ -145,7 +202,7 @@ static void prim_fetchGit(EvalState & state, const Pos & pos, Value * * args, Va } else url = state.coerceToString(pos, *args[0], context, false, false); - if (hasPrefix(url, "/")) url = "file://" + url; + if (!isUri(url)) url = absPath(url); // FIXME: git externals probably can be used to bypass the URI // whitelist. Ah well. diff --git a/src/libexpr/primops/fetchMercurial.cc b/src/libexpr/primops/fetchMercurial.cc new file mode 100644 index 000000000000..7def7103bf3d --- /dev/null +++ b/src/libexpr/primops/fetchMercurial.cc @@ -0,0 +1,202 @@ +#include "primops.hh" +#include "eval-inline.hh" +#include "download.hh" +#include "store-api.hh" +#include "pathlocks.hh" + +#include <sys/time.h> + +#include <regex> + +#include <nlohmann/json.hpp> + +using namespace std::string_literals; + +namespace nix { + +struct HgInfo +{ + Path storePath; + std::string branch; + std::string rev; + uint64_t revCount = 0; +}; + +std::regex commitHashRegex("^[0-9a-fA-F]{40}$"); + +HgInfo exportMercurial(ref<Store> store, const std::string & uri, + std::string rev, const std::string & name) +{ + if (rev == "" && hasPrefix(uri, "/") && pathExists(uri + "/.hg")) { + + bool clean = runProgram("hg", true, { "status", "-R", uri, "--modified", "--added", "--removed" }) == ""; + + if (!clean) { + + /* This is an unclean working tree. So copy all tracked + files. */ + + printTalkative("copying unclean Mercurial working tree '%s'", uri); + + HgInfo hgInfo; + hgInfo.rev = "0000000000000000000000000000000000000000"; + hgInfo.branch = chomp(runProgram("hg", true, { "branch", "-R", uri })); + + auto files = tokenizeString<std::set<std::string>>( + runProgram("hg", true, { "status", "-R", uri, "--clean", "--modified", "--added", "--no-status", "--print0" }), "\0"s); + + PathFilter filter = [&](const Path & p) -> bool { + assert(hasPrefix(p, uri)); + std::string file(p, uri.size() + 1); + + auto st = lstat(p); + + if (S_ISDIR(st.st_mode)) { + auto i = files.lower_bound(file); + return i != files.end() && hasPrefix(*i, file); + } + + return files.count(file); + }; + + hgInfo.storePath = store->addToStore("source", uri, true, htSHA256, filter); + + return hgInfo; + } + } + + if (rev == "") rev = "default"; + + Path cacheDir = fmt("%s/nix/hg/%s", getCacheDir(), hashString(htSHA256, uri).to_string(Base32, false)); + + Path stampFile = fmt("%s/.hg/%s.stamp", cacheDir, hashString(htSHA512, rev).to_string(Base32, false)); + + /* If we haven't pulled this repo less than ‘tarball-ttl’ seconds, + do so now. */ + time_t now = time(0); + struct stat st; + if (stat(stampFile.c_str(), &st) != 0 || + st.st_mtime <= now - settings.tarballTtl) + { + /* Except that if this is a commit hash that we already have, + we don't have to pull again. */ + if (!(std::regex_match(rev, commitHashRegex) + && pathExists(cacheDir) + && runProgram( + RunOptions("hg", { "log", "-R", cacheDir, "-r", rev, "--template", "1" }) + .killStderr(true)).second == "1")) + { + Activity act(*logger, lvlTalkative, actUnknown, fmt("fetching Mercurial repository '%s'", uri)); + + if (pathExists(cacheDir)) { + runProgram("hg", true, { "pull", "-R", cacheDir, "--", uri }); + } else { + createDirs(dirOf(cacheDir)); + runProgram("hg", true, { "clone", "--noupdate", "--", uri, cacheDir }); + } + } + + writeFile(stampFile, ""); + } + + auto tokens = tokenizeString<std::vector<std::string>>( + runProgram("hg", true, { "log", "-R", cacheDir, "-r", rev, "--template", "{node} {rev} {branch}" })); + assert(tokens.size() == 3); + + HgInfo hgInfo; + hgInfo.rev = tokens[0]; + hgInfo.revCount = std::stoull(tokens[1]); + hgInfo.branch = tokens[2]; + + std::string storeLinkName = hashString(htSHA512, name + std::string("\0"s) + hgInfo.rev).to_string(Base32, false); + Path storeLink = fmt("%s/.hg/%s.link", cacheDir, storeLinkName); + + try { + auto json = nlohmann::json::parse(readFile(storeLink)); + + assert(json["name"] == name && json["rev"] == hgInfo.rev); + + hgInfo.storePath = json["storePath"]; + + if (store->isValidPath(hgInfo.storePath)) { + printTalkative("using cached Mercurial store path '%s'", hgInfo.storePath); + return hgInfo; + } + + } catch (SysError & e) { + if (e.errNo != ENOENT) throw; + } + + Path tmpDir = createTempDir(); + AutoDelete delTmpDir(tmpDir, true); + + runProgram("hg", true, { "archive", "-R", cacheDir, "-r", rev, tmpDir }); + + deletePath(tmpDir + "/.hg_archival.txt"); + + hgInfo.storePath = store->addToStore(name, tmpDir); + + nlohmann::json json; + json["storePath"] = hgInfo.storePath; + json["uri"] = uri; + json["name"] = name; + json["branch"] = hgInfo.branch; + json["rev"] = hgInfo.rev; + json["revCount"] = hgInfo.revCount; + + writeFile(storeLink, json.dump()); + + return hgInfo; +} + +static void prim_fetchMercurial(EvalState & state, const Pos & pos, Value * * args, Value & v) +{ + std::string url; + std::string rev; + std::string name = "source"; + PathSet context; + + state.forceValue(*args[0]); + + if (args[0]->type == tAttrs) { + + state.forceAttrs(*args[0], pos); + + for (auto & attr : *args[0]->attrs) { + string n(attr.name); + if (n == "url") + url = state.coerceToString(*attr.pos, *attr.value, context, false, false); + else if (n == "rev") + rev = state.forceStringNoCtx(*attr.value, *attr.pos); + else if (n == "name") + name = state.forceStringNoCtx(*attr.value, *attr.pos); + else + throw EvalError("unsupported argument '%s' to 'fetchMercurial', at %s", attr.name, *attr.pos); + } + + if (url.empty()) + throw EvalError(format("'url' argument required, at %1%") % pos); + + } else + url = state.coerceToString(pos, *args[0], context, false, false); + + if (!isUri(url)) url = absPath(url); + + // FIXME: git externals probably can be used to bypass the URI + // whitelist. Ah well. + state.checkURI(url); + + auto hgInfo = exportMercurial(state.store, url, rev, name); + + state.mkAttrs(v, 8); + mkString(*state.allocAttr(v, state.sOutPath), hgInfo.storePath, PathSet({hgInfo.storePath})); + mkString(*state.allocAttr(v, state.symbols.create("branch")), hgInfo.branch); + mkString(*state.allocAttr(v, state.symbols.create("rev")), hgInfo.rev); + mkString(*state.allocAttr(v, state.symbols.create("shortRev")), std::string(hgInfo.rev, 0, 12)); + mkInt(*state.allocAttr(v, state.symbols.create("revCount")), hgInfo.revCount); + v.attrs->sort(); +} + +static RegisterPrimOp r("fetchMercurial", 1, prim_fetchMercurial); + +} diff --git a/src/libexpr/primops/fetchgit.hh b/src/libexpr/primops/fetchgit.hh deleted file mode 100644 index 056b6fcbe78d..000000000000 --- a/src/libexpr/primops/fetchgit.hh +++ /dev/null @@ -1,23 +0,0 @@ -#pragma once - -#include <string> - -#include "util.hh" - -namespace nix { - -class Store; - -struct GitInfo -{ - Path storePath; - std::string rev; - std::string shortRev; - uint64_t revCount = 0; -}; - -GitInfo exportGit(ref<Store> store, const std::string & uri, - const std::string & ref, const std::string & rev = "", - const std::string & name = ""); - -} diff --git a/src/libmain/common-args.cc b/src/libmain/common-args.cc index ea27aaa35e03..d3aac6aba1ff 100644 --- a/src/libmain/common-args.cc +++ b/src/libmain/common-args.cc @@ -34,6 +34,10 @@ MixCommonArgs::MixCommonArgs(const string & programName) warn(e.what()); } }); + + std::string cat = "config"; + settings.convertToArgs(*this, cat); + hiddenCategories.insert(cat); } } diff --git a/src/libmain/shared.cc b/src/libmain/shared.cc index 0f599f388585..85d3c077ba5e 100644 --- a/src/libmain/shared.cc +++ b/src/libmain/shared.cc @@ -369,5 +369,6 @@ PrintFreed::~PrintFreed() % showBytes(results.bytesFreed); } +Exit::~Exit() { } } diff --git a/src/libmain/shared.hh b/src/libmain/shared.hh index 9219dbed8325..1dcc4f0ac942 100644 --- a/src/libmain/shared.hh +++ b/src/libmain/shared.hh @@ -17,6 +17,7 @@ public: int status; Exit() : status(0) { } Exit(int status) : status(status) { } + virtual ~Exit(); }; int handleExceptions(const string & programName, std::function<void()> fun); diff --git a/src/libstore/binary-cache-store.cc b/src/libstore/binary-cache-store.cc index 67607ab3d43a..68af85bf16d9 100644 --- a/src/libstore/binary-cache-store.cc +++ b/src/libstore/binary-cache-store.cc @@ -73,6 +73,23 @@ Path BinaryCacheStore::narInfoFileFor(const Path & storePath) return storePathToHash(storePath) + ".narinfo"; } +void BinaryCacheStore::writeNarInfo(ref<NarInfo> narInfo) +{ + auto narInfoFile = narInfoFileFor(narInfo->path); + + upsertFile(narInfoFile, narInfo->to_string(), "text/x-nix-narinfo"); + + auto hashPart = storePathToHash(narInfo->path); + + { + auto state_(state.lock()); + state_->pathInfoCache.upsert(hashPart, std::shared_ptr<NarInfo>(narInfo)); + } + + if (diskCache) + diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo)); +} + void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref<std::string> & nar, RepairFlag repair, CheckSigsFlag checkSigs, std::shared_ptr<FSAccessor> accessor) { @@ -89,8 +106,6 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref<std::str % info.path % ref); } - auto narInfoFile = narInfoFileFor(info.path); - assert(nar->compare(0, narMagic.size(), narMagic) == 0); auto narInfo = make_ref<NarInfo>(info); @@ -119,42 +134,9 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref<std::str accessor_->addToCache(info.path, *nar); } - std::function<void(const Path &, JSONPlaceholder &)> recurse; - - recurse = [&](const Path & path, JSONPlaceholder & res) { - auto st = narAccessor->stat(path); - - auto obj = res.object(); - - switch (st.type) { - case FSAccessor::Type::tRegular: - obj.attr("type", "regular"); - obj.attr("size", st.fileSize); - if (st.isExecutable) - obj.attr("executable", true); - break; - case FSAccessor::Type::tDirectory: - obj.attr("type", "directory"); - { - auto res2 = obj.object("entries"); - for (auto & name : narAccessor->readDirectory(path)) { - auto res3 = res2.placeholder(name); - recurse(path + "/" + name, res3); - } - } - break; - case FSAccessor::Type::tSymlink: - obj.attr("type", "symlink"); - obj.attr("target", narAccessor->readLink(path)); - break; - default: - abort(); - } - }; - { auto res = jsonRoot.placeholder("root"); - recurse("", res); + listNar(res, narAccessor, "", true); } } @@ -201,17 +183,7 @@ void BinaryCacheStore::addToStore(const ValidPathInfo & info, const ref<std::str /* Atomically write the NAR info file.*/ if (secretKey) narInfo->sign(*secretKey); - upsertFile(narInfoFile, narInfo->to_string(), "text/x-nix-narinfo"); - - auto hashPart = storePathToHash(narInfo->path); - - { - auto state_(state.lock()); - state_->pathInfoCache.upsert(hashPart, std::shared_ptr<NarInfo>(narInfo)); - } - - if (diskCache) - diskCache->upsertNarInfo(getUri(), hashPart, std::shared_ptr<NarInfo>(narInfo)); + writeNarInfo(narInfo); stats.narInfoWrite++; } @@ -326,6 +298,22 @@ ref<FSAccessor> BinaryCacheStore::getFSAccessor() return make_ref<RemoteFSAccessor>(ref<Store>(shared_from_this()), localNarCache); } +void BinaryCacheStore::addSignatures(const Path & storePath, const StringSet & sigs) +{ + /* Note: this is inherently racy since there is no locking on + binary caches. In particular, with S3 this unreliable, even + when addSignatures() is called sequentially on a path, because + S3 might return an outdated cached version. */ + + auto narInfo = make_ref<NarInfo>((NarInfo &) *queryPathInfo(storePath)); + + narInfo->sigs.insert(sigs.begin(), sigs.end()); + + auto narInfoFile = narInfoFileFor(narInfo->path); + + writeNarInfo(narInfo); +} + std::shared_ptr<std::string> BinaryCacheStore::getBuildLog(const Path & path) { Path drvPath; diff --git a/src/libstore/binary-cache-store.hh b/src/libstore/binary-cache-store.hh index d3b0e0bd9332..8492ff600eba 100644 --- a/src/libstore/binary-cache-store.hh +++ b/src/libstore/binary-cache-store.hh @@ -59,6 +59,8 @@ private: std::string narInfoFileFor(const Path & storePath); + void writeNarInfo(ref<NarInfo> narInfo); + public: bool isValidPathUncached(const Path & path) override; @@ -119,8 +121,7 @@ public: ref<FSAccessor> getFSAccessor() override; - void addSignatures(const Path & storePath, const StringSet & sigs) override - { unsupported(); } + void addSignatures(const Path & storePath, const StringSet & sigs) override; std::shared_ptr<std::string> getBuildLog(const Path & path) override; diff --git a/src/libstore/build.cc b/src/libstore/build.cc index 9f8edc826a04..061682377257 100644 --- a/src/libstore/build.cc +++ b/src/libstore/build.cc @@ -2833,10 +2833,10 @@ void DerivationGoal::runChild() sandboxProfile += "(deny default (with no-log))\n"; } - sandboxProfile += "(import \"sandbox-defaults.sb\")"; + sandboxProfile += "(import \"sandbox-defaults.sb\")\n"; if (fixedOutput) - sandboxProfile += "(import \"sandbox-network.sb\")"; + sandboxProfile += "(import \"sandbox-network.sb\")\n"; /* Our rwx outputs */ sandboxProfile += "(allow file-read* file-write* process-exec\n"; @@ -2879,7 +2879,7 @@ void DerivationGoal::runChild() sandboxProfile += additionalSandboxProfile; } else - sandboxProfile += "(import \"sandbox-minimal.sb\")"; + sandboxProfile += "(import \"sandbox-minimal.sb\")\n"; debug("Generated sandbox profile:"); debug(sandboxProfile); @@ -2888,6 +2888,8 @@ void DerivationGoal::runChild() writeFile(sandboxFile, sandboxProfile); + bool allowLocalNetworking = get(drv->env, "__darwinAllowLocalNetworking") == "1"; + /* The tmpDir in scope points at the temporary build directory for our derivation. Some packages try different mechanisms to find temporary directories, so we want to open up a broader place for them to dump their files, if needed. */ Path globalTmpDir = canonPath(getEnv("TMPDIR", "/tmp"), true); @@ -2903,6 +2905,10 @@ void DerivationGoal::runChild() args.push_back("_GLOBAL_TMP_DIR=" + globalTmpDir); args.push_back("-D"); args.push_back("IMPORT_DIR=" + settings.nixDataDir + "/nix/sandbox/"); + if (allowLocalNetworking) { + args.push_back("-D"); + args.push_back(string("_ALLOW_LOCAL_NETWORKING=1")); + } args.push_back(drv->builder); } #endif diff --git a/src/libstore/download.cc b/src/libstore/download.cc index 579a5e8c1b59..da31029b4f87 100644 --- a/src/libstore/download.cc +++ b/src/libstore/download.cc @@ -533,7 +533,7 @@ struct CurlDownloader : public Downloader // FIXME: do this on a worker thread sync2async<DownloadResult>(success, failure, [&]() -> DownloadResult { #ifdef ENABLE_S3 - S3Helper s3Helper(Aws::Region::US_EAST_1); // FIXME: make configurable + S3Helper s3Helper("", Aws::Region::US_EAST_1); // FIXME: make configurable auto slash = request.uri.find('/', 5); if (slash == std::string::npos) throw nix::Error("bad S3 URI '%s'", request.uri); @@ -707,7 +707,7 @@ bool isUri(const string & s) size_t pos = s.find("://"); if (pos == string::npos) return false; string scheme(s, 0, pos); - return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3"; + return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git" || scheme == "s3" || scheme == "ssh"; } diff --git a/src/libstore/nar-accessor.cc b/src/libstore/nar-accessor.cc index 2afdeb021a93..839a7991c89f 100644 --- a/src/libstore/nar-accessor.cc +++ b/src/libstore/nar-accessor.cc @@ -1,5 +1,6 @@ #include "nar-accessor.hh" #include "archive.hh" +#include "json.hh" #include <map> #include <stack> @@ -181,4 +182,40 @@ ref<FSAccessor> makeNarAccessor(ref<const std::string> nar) return make_ref<NarAccessor>(nar); } +void listNar(JSONPlaceholder & res, ref<FSAccessor> accessor, + const Path & path, bool recurse) +{ + auto st = accessor->stat(path); + + auto obj = res.object(); + + switch (st.type) { + case FSAccessor::Type::tRegular: + obj.attr("type", "regular"); + obj.attr("size", st.fileSize); + if (st.isExecutable) + obj.attr("executable", true); + break; + case FSAccessor::Type::tDirectory: + obj.attr("type", "directory"); + { + auto res2 = obj.object("entries"); + for (auto & name : accessor->readDirectory(path)) { + if (recurse) { + auto res3 = res2.placeholder(name); + listNar(res3, accessor, path + "/" + name, true); + } else + res2.object(name); + } + } + break; + case FSAccessor::Type::tSymlink: + obj.attr("type", "symlink"); + obj.attr("target", accessor->readLink(path)); + break; + default: + throw Error("path '%s' does not exist in NAR", path); + } +} + } diff --git a/src/libstore/nar-accessor.hh b/src/libstore/nar-accessor.hh index 83c570be4c7b..ed8fe15cad23 100644 --- a/src/libstore/nar-accessor.hh +++ b/src/libstore/nar-accessor.hh @@ -8,4 +8,11 @@ namespace nix { file. */ ref<FSAccessor> makeNarAccessor(ref<const std::string> nar); +class JSONPlaceholder; + +/* Write a JSON representation of the contents of a NAR (except file + contents). */ +void listNar(JSONPlaceholder & res, ref<FSAccessor> accessor, + const Path & path, bool recurse); + } diff --git a/src/libstore/s3-binary-cache-store.cc b/src/libstore/s3-binary-cache-store.cc index 5fc7371a5198..0079da1becfb 100644 --- a/src/libstore/s3-binary-cache-store.cc +++ b/src/libstore/s3-binary-cache-store.cc @@ -10,6 +10,8 @@ #include "istringstream_nocopy.hh" #include <aws/core/Aws.h> +#include <aws/core/auth/AWSCredentialsProvider.h> +#include <aws/core/auth/AWSCredentialsProviderChain.h> #include <aws/core/client/ClientConfiguration.h> #include <aws/core/client/DefaultRetryStrategy.h> #include <aws/core/utils/logging/FormattedLogSystem.h> @@ -77,9 +79,15 @@ static void initAWS() }); } -S3Helper::S3Helper(const string & region) +S3Helper::S3Helper(const std::string & profile, const std::string & region) : config(makeConfig(region)) - , client(make_ref<Aws::S3::S3Client>(*config, true, false)) + , client(make_ref<Aws::S3::S3Client>( + profile == "" + ? std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>( + std::make_shared<Aws::Auth::DefaultAWSCredentialsProviderChain>()) + : std::dynamic_pointer_cast<Aws::Auth::AWSCredentialsProvider>( + std::make_shared<Aws::Auth::ProfileConfigFileAWSCredentialsProvider>(profile.c_str())), + *config, true, false)) { } @@ -148,6 +156,7 @@ S3Helper::DownloadResult S3Helper::getObject( struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore { + const Setting<std::string> profile{this, "", "profile", "The name of the AWS configuration profile to use."}; const Setting<std::string> region{this, Aws::Region::US_EAST_1, "region", {"aws-region"}}; const Setting<std::string> narinfoCompression{this, "", "narinfo-compression", "compression method for .narinfo files"}; const Setting<std::string> lsCompression{this, "", "ls-compression", "compression method for .ls files"}; @@ -163,7 +172,7 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore const Params & params, const std::string & bucketName) : S3BinaryCacheStore(params) , bucketName(bucketName) - , s3Helper(region) + , s3Helper(profile, region) { diskCache = getNarInfoDiskCache(); } @@ -241,8 +250,8 @@ struct S3BinaryCacheStoreImpl : public S3BinaryCacheStore auto & error = res.GetError(); if (error.GetErrorType() == Aws::S3::S3Errors::RESOURCE_NOT_FOUND || error.GetErrorType() == Aws::S3::S3Errors::NO_SUCH_KEY - || (error.GetErrorType() == Aws::S3::S3Errors::UNKNOWN // FIXME - && error.GetMessage().find("404") != std::string::npos)) + // If bucket listing is disabled, 404s turn into 403s + || error.GetErrorType() == Aws::S3::S3Errors::ACCESS_DENIED) return false; throw Error(format("AWS error fetching '%s': %s") % path % error.GetMessage()); } diff --git a/src/libstore/s3.hh b/src/libstore/s3.hh index 08a7fbf96e98..4f996400343c 100644 --- a/src/libstore/s3.hh +++ b/src/libstore/s3.hh @@ -14,7 +14,7 @@ struct S3Helper ref<Aws::Client::ClientConfiguration> config; ref<Aws::S3::S3Client> client; - S3Helper(const std::string & region); + S3Helper(const std::string & profile, const std::string & region); ref<Aws::Client::ClientConfiguration> makeConfig(const std::string & region); diff --git a/src/libstore/sandbox-defaults.sb b/src/libstore/sandbox-defaults.sb index d63c8f813c9e..0299d1ee45d2 100644 --- a/src/libstore/sandbox-defaults.sb +++ b/src/libstore/sandbox-defaults.sb @@ -21,8 +21,15 @@ ; Allow sending signals within the sandbox. (allow signal (target same-sandbox)) +; Allow getpwuid. +(allow mach-lookup (global-name "com.apple.system.opendirectoryd.libinfo")) + ; Access to /tmp. -(allow file* process-exec (literal "/tmp") (subpath TMPDIR)) +; The network-outbound/network-inbound ones are for unix domain sockets, which +; we allow access to in TMPDIR (but if we allow them more broadly, you could in +; theory escape the sandbox) +(allow file* process-exec network-outbound network-inbound + (literal "/tmp") (subpath TMPDIR)) ; Some packages like to read the system version. (allow file-read* (literal "/System/Library/CoreServices/SystemVersion.plist")) @@ -30,6 +37,29 @@ ; Without this line clang cannot write to /dev/null, breaking some configure tests. (allow file-read-metadata (literal "/dev")) +; Many packages like to do local networking in their test suites, but let's only +; allow it if the package explicitly asks for it. +(if (param "_ALLOW_LOCAL_NETWORKING") + (begin + (allow network* (local ip) (local tcp) (local udp)) + + ; Allow access to /etc/resolv.conf (which is a symlink to + ; /private/var/run/resolv.conf). + ; TODO: deduplicate with sandbox-network.sb + (allow file-read-metadata + (literal "/var") + (literal "/etc") + (literal "/etc/resolv.conf") + (literal "/private/etc/resolv.conf")) + + (allow file-read* + (literal "/private/var/run/resolv.conf")) + + ; Allow DNS lookups. This is even needed for localhost, which lots of tests rely on + (allow file-read-metadata (literal "/etc/hosts")) + (allow file-read* (literal "/private/etc/hosts")) + (allow network-outbound (remote unix-socket (path-literal "/private/var/run/mDNSResponder"))))) + ; Standard devices. (allow file* (literal "/dev/null") @@ -54,5 +84,4 @@ (allow file-read-metadata (literal "/etc") (literal "/var") - (literal "/private/var/tmp") - ) + (literal "/private/var/tmp")) diff --git a/src/libstore/store-api.cc b/src/libstore/store-api.cc index c57e42fec00d..8146513a4b6e 100644 --- a/src/libstore/store-api.cc +++ b/src/libstore/store-api.cc @@ -389,8 +389,10 @@ PathSet Store::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubsti Sync<State> state_(State{paths.size(), PathSet()}); std::condition_variable wakeup; + ThreadPool pool; - for (auto & path : paths) + auto doQuery = [&](const Path & path ) { + checkInterrupt(); queryPathInfo(path, [path, &state_, &wakeup](ref<ValidPathInfo> info) { auto state(state_.lock()); @@ -411,6 +413,12 @@ PathSet Store::queryValidPaths(const PathSet & paths, SubstituteFlag maybeSubsti if (!--state->left) wakeup.notify_one(); }); + }; + + for (auto & path : paths) + pool.enqueue(std::bind(doQuery, path)); + + pool.process(); while (true) { auto state(state_.lock()); @@ -569,9 +577,9 @@ void copyStorePath(ref<Store> srcStore, ref<Store> dstStore, auto dstUri = dstStore->getUri(); Activity act(*logger, lvlInfo, actCopyPath, - srcUri == "local" + srcUri == "local" || srcUri == "daemon" ? fmt("copying path '%s' to '%s'", storePath, dstUri) - : dstUri == "local" + : dstUri == "local" || dstUri == "daemon" ? fmt("copying path '%s' from '%s'", storePath, srcUri) : fmt("copying path '%s' from '%s' to '%s'", storePath, srcUri, dstUri), {storePath, srcUri, dstUri}); diff --git a/src/libutil/archive.cc b/src/libutil/archive.cc index ea1deb924e67..f71229d8fdd6 100644 --- a/src/libutil/archive.cc +++ b/src/libutil/archive.cc @@ -29,7 +29,7 @@ const std::string narVersionMagic1 = "nix-archive-1"; static string caseHackSuffix = "~nix~case~hack~"; -PathFilter defaultPathFilter; +PathFilter defaultPathFilter = [](const Path &) { return true; }; static void dumpContents(const Path & path, size_t size, diff --git a/src/libutil/archive.hh b/src/libutil/archive.hh index 607ebf8b28f9..8a15e849c7b8 100644 --- a/src/libutil/archive.hh +++ b/src/libutil/archive.hh @@ -44,13 +44,6 @@ namespace nix { `+' denotes string concatenation. */ -struct PathFilter -{ - virtual ~PathFilter() { } - virtual bool operator () (const Path & path) { return true; } -}; - -extern PathFilter defaultPathFilter; void dumpPath(const Path & path, Sink & sink, PathFilter & filter = defaultPathFilter); diff --git a/src/libutil/hash.hh b/src/libutil/hash.hh index d83049b02368..fd7a61df8e46 100644 --- a/src/libutil/hash.hh +++ b/src/libutil/hash.hh @@ -93,8 +93,6 @@ Hash hashFile(HashType ht, const Path & path); /* Compute the hash of the given path. The hash is defined as (essentially) hashString(ht, dumpPath(path)). */ -struct PathFilter; -extern PathFilter defaultPathFilter; typedef std::pair<Hash, unsigned long long> HashResult; HashResult hashPath(HashType ht, const Path & path, PathFilter & filter = defaultPathFilter); diff --git a/src/libutil/util.cc b/src/libutil/util.cc index 16c2c97aac9b..96c0cd78383d 100644 --- a/src/libutil/util.cc +++ b/src/libutil/util.cc @@ -895,31 +895,45 @@ std::vector<char *> stringsToCharPtrs(const Strings & ss) string runProgram(Path program, bool searchPath, const Strings & args, const std::experimental::optional<std::string> & input) { + RunOptions opts(program, args); + opts.searchPath = searchPath; + opts.input = input; + + auto res = runProgram(opts); + + if (!statusOk(res.first)) + throw ExecError(res.first, fmt("program '%1%' %2%", program, statusToString(res.first))); + + return res.second; +} + +std::pair<int, std::string> runProgram(const RunOptions & options) +{ checkInterrupt(); /* Create a pipe. */ Pipe out, in; out.create(); - if (input) in.create(); + if (options.input) in.create(); /* Fork. */ Pid pid = startProcess([&]() { if (dup2(out.writeSide.get(), STDOUT_FILENO) == -1) throw SysError("dupping stdout"); - if (input && dup2(in.readSide.get(), STDIN_FILENO) == -1) + if (options.input && dup2(in.readSide.get(), STDIN_FILENO) == -1) throw SysError("dupping stdin"); - Strings args_(args); - args_.push_front(program); + Strings args_(options.args); + args_.push_front(options.program); restoreSignals(); - if (searchPath) - execvp(program.c_str(), stringsToCharPtrs(args_).data()); + if (options.searchPath) + execvp(options.program.c_str(), stringsToCharPtrs(args_).data()); else - execv(program.c_str(), stringsToCharPtrs(args_).data()); + execv(options.program.c_str(), stringsToCharPtrs(args_).data()); - throw SysError(format("executing '%1%'") % program); + throw SysError("executing '%1%'", options.program); }); out.writeSide = -1; @@ -934,11 +948,11 @@ string runProgram(Path program, bool searchPath, const Strings & args, }); - if (input) { + if (options.input) { in.readSide = -1; writerThread = std::thread([&]() { try { - writeFull(in.writeSide.get(), *input); + writeFull(in.writeSide.get(), *options.input); promise.set_value(); } catch (...) { promise.set_exception(std::current_exception()); @@ -951,14 +965,11 @@ string runProgram(Path program, bool searchPath, const Strings & args, /* Wait for the child to finish. */ int status = pid.wait(); - if (!statusOk(status)) - throw ExecError(status, format("program '%1%' %2%") - % program % statusToString(status)); /* Wait for the writer thread to finish. */ - if (input) promise.get_future().get(); + if (options.input) promise.get_future().get(); - return result; + return {status, result}; } diff --git a/src/libutil/util.hh b/src/libutil/util.hh index fccf5d854800..a3494e09b09b 100644 --- a/src/libutil/util.hh +++ b/src/libutil/util.hh @@ -245,6 +245,23 @@ string runProgram(Path program, bool searchPath = false, const Strings & args = Strings(), const std::experimental::optional<std::string> & input = {}); +struct RunOptions +{ + Path program; + bool searchPath = true; + Strings args; + std::experimental::optional<std::string> input; + bool _killStderr = false; + + RunOptions(const Path & program, const Strings & args) + : program(program), args(args) { }; + + RunOptions & killStderr(bool v) { _killStderr = true; return *this; } +}; + +std::pair<int, std::string> runProgram(const RunOptions & options); + + class ExecError : public Error { public: @@ -481,4 +498,10 @@ struct MaintainCount std::pair<unsigned short, unsigned short> getWindowSize(); +/* Used in various places. */ +typedef std::function<bool(const Path & path)> PathFilter; + +extern PathFilter defaultPathFilter; + + } diff --git a/src/nix/ls.cc b/src/nix/ls.cc index 5a5fa8f62d92..69620595d8ca 100644 --- a/src/nix/ls.cc +++ b/src/nix/ls.cc @@ -2,10 +2,12 @@ #include "store-api.hh" #include "fs-accessor.hh" #include "nar-accessor.hh" +#include "common-args.hh" +#include "json.hh" using namespace nix; -struct MixLs : virtual Args +struct MixLs : virtual Args, MixJSON { std::string path; @@ -20,7 +22,7 @@ struct MixLs : virtual Args mkFlag('d', "directory", "show directories rather than their contents", &showDirectory); } - void list(ref<FSAccessor> accessor) + void listText(ref<FSAccessor> accessor) { std::function<void(const FSAccessor::Stat &, const Path &, const std::string &, bool)> doPath; @@ -61,10 +63,6 @@ struct MixLs : virtual Args showFile(curPath, relPath); }; - if (path == "/") { - path = ""; - } - auto st = accessor->stat(path); if (st.type == FSAccessor::Type::tMissing) throw Error(format("path '%1%' does not exist") % path); @@ -72,6 +70,17 @@ struct MixLs : virtual Args st.type == FSAccessor::Type::tDirectory ? "." : baseNameOf(path), showDirectory); } + + void list(ref<FSAccessor> accessor) + { + if (path == "/") path = ""; + + if (json) { + JSONPlaceholder jsonRoot(std::cout); + listNar(jsonRoot, accessor, path, recursive); + } else + listText(accessor); + } }; struct CmdLsStore : StoreCommand, MixLs diff --git a/src/nix/main.cc b/src/nix/main.cc index 060402cd08d5..06bb8a1c3043 100644 --- a/src/nix/main.cc +++ b/src/nix/main.cc @@ -43,10 +43,6 @@ struct NixArgs : virtual MultiCommand, virtual MixCommonArgs .longName("version") .description("show version information") .handler([&]() { printVersion(programName); }); - - std::string cat = "config"; - settings.convertToArgs(*this, cat); - hiddenCategories.insert(cat); } void printFlags(std::ostream & out) override diff --git a/src/nix/path-info.cc b/src/nix/path-info.cc index ca02a4c929be..47caa401d3c9 100644 --- a/src/nix/path-info.cc +++ b/src/nix/path-info.cc @@ -65,7 +65,7 @@ struct CmdPathInfo : StorePathsCommand, MixJSON pathLen = std::max(pathLen, storePath.size()); if (json) { - JSONPlaceholder jsonRoot(std::cout, true); + JSONPlaceholder jsonRoot(std::cout); store->pathInfoToJSON(jsonRoot, // FIXME: preserve order? PathSet(storePaths.begin(), storePaths.end()), diff --git a/src/nix/search.cc b/src/nix/search.cc index f458367dcb55..a9dc2d6b924b 100644 --- a/src/nix/search.cc +++ b/src/nix/search.cc @@ -84,7 +84,7 @@ struct CmdSearch : SourceExprCommand, MixJSON bool first = true; - auto jsonOut = json ? std::make_unique<JSONObject>(std::cout, true) : nullptr; + auto jsonOut = json ? std::make_unique<JSONObject>(std::cout) : nullptr; auto sToplevel = state->symbols.create("_toplevel"); auto sRecurse = state->symbols.create("recurseForDerivations"); diff --git a/src/nix/show-config.cc b/src/nix/show-config.cc index c628c2898d73..c64b12c8dd62 100644 --- a/src/nix/show-config.cc +++ b/src/nix/show-config.cc @@ -26,7 +26,7 @@ struct CmdShowConfig : Command, MixJSON { if (json) { // FIXME: use appropriate JSON types (bool, ints, etc). - JSONObject jsonObj(std::cout, true); + JSONObject jsonObj(std::cout); settings.toJSON(jsonObj); } else { for (auto & s : settings.getSettings()) |