about summary refs log tree commit diff
path: root/nix
diff options
context:
space:
mode:
Diffstat (limited to 'nix')
-rw-r--r--nix/OWNERS1
-rw-r--r--nix/binify/default.nix2
-rw-r--r--nix/bufCheck/default.nix31
-rw-r--r--nix/buildGo/README.md27
-rw-r--r--nix/buildGo/default.nix154
-rw-r--r--nix/buildGo/example/default.nix13
-rw-r--r--nix/buildGo/example/thing.proto10
-rw-r--r--nix/buildGo/external/default.nix57
-rw-r--r--nix/buildGo/external/main.go36
-rw-r--r--nix/buildGo/proto.nix84
-rw-r--r--nix/buildLisp/README.md157
-rw-r--r--nix/buildLisp/default.nix852
-rw-r--r--nix/buildLisp/example/default.nix19
-rw-r--r--nix/buildLisp/tests/argv0.nix58
-rw-r--r--nix/buildManPages/OWNERS1
-rw-r--r--nix/buildManPages/default.nix103
-rw-r--r--nix/buildTypedGo/default.nix34
-rw-r--r--nix/buildTypedGo/example/default.nix8
-rw-r--r--nix/buildTypedGo/example/main.go215
-rw-r--r--nix/buildkite/default.nix491
-rwxr-xr-xnix/buildkite/fetch-parent-targets.sh55
-rw-r--r--nix/dependency-analyzer/default.nix263
-rw-r--r--nix/dependency-analyzer/examples/ci-targets.nix12
-rw-r--r--nix/dependency-analyzer/examples/lisp.nix5
-rw-r--r--nix/dependency-analyzer/tests/default.nix36
-rw-r--r--nix/drvSeqL/default.nix47
-rw-r--r--nix/emptyDerivation/OWNERS4
-rw-r--r--nix/emptyDerivation/default.nix6
-rw-r--r--nix/emptyDerivation/emptyDerivation.nix18
-rw-r--r--nix/emptyDerivation/tests.nix18
-rw-r--r--nix/escapeExecline/OWNERS3
-rw-r--r--nix/escapeExecline/default.nix19
-rw-r--r--nix/fetchGoModule/OWNERS2
-rw-r--r--nix/fetchGoModule/default.nix32
-rw-r--r--nix/getBins/OWNERS3
-rw-r--r--nix/getBins/default.nix21
-rw-r--r--nix/getBins/tests.nix20
-rw-r--r--nix/lazy-deps/default.nix91
-rw-r--r--nix/mergePatch/default.nix192
-rw-r--r--nix/netstring/attrsToKeyValList.nix33
-rw-r--r--nix/netstring/fromString.nix10
-rw-r--r--nix/nint/OWNERS1
-rw-r--r--nix/nint/README.md93
-rw-r--r--nix/nint/default.nix16
-rw-r--r--nix/nint/nint.rs149
-rw-r--r--nix/nix-1p/README.md648
-rw-r--r--nix/nix-1p/default.nix16
-rw-r--r--nix/readTree/README.md28
-rw-r--r--nix/readTree/default.nix330
-rw-r--r--nix/readTree/tests/.skip-subtree1
-rw-r--r--nix/readTree/tests/default.nix139
-rw-r--r--nix/readTree/tests/test-example/third_party/default.nix5
-rw-r--r--nix/readTree/tests/test-example/third_party/rustpkgs/aho-corasick.nix1
-rw-r--r--nix/readTree/tests/test-example/third_party/rustpkgs/serde.nix1
-rw-r--r--nix/readTree/tests/test-example/tools/cheddar/default.nix1
-rw-r--r--nix/readTree/tests/test-example/tools/roquefort.nix1
-rw-r--r--nix/readTree/tests/test-marker/directory-marked/default.nix3
-rw-r--r--nix/readTree/tests/test-marker/directory-marked/nested/default.nix3
-rw-r--r--nix/readTree/tests/test-marker/file-children/one.nix3
-rw-r--r--nix/readTree/tests/test-marker/file-children/two.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/default.nix7
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/subdir/a.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/default.nix5
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/no-merge/default.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/no-merge/subdir/a.nix1
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/sibling.nix1
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/subdir/a.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/no-skip-subtree/a/default.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/no-skip-subtree/b/c.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/no-skip-subtree/default.nix5
-rw-r--r--nix/readTree/tests/test-tree-traversal/skip-subtree/.skip-subtree1
-rw-r--r--nix/readTree/tests/test-tree-traversal/skip-subtree/a/default.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/skip-subtree/b/c.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/skip-subtree/default.nix5
-rw-r--r--nix/readTree/tests/test-tree-traversal/skip-tree/a/default.nix1
-rw-r--r--nix/readTree/tests/test-tree-traversal/skip-tree/b/.skip-tree1
-rw-r--r--nix/readTree/tests/test-tree-traversal/skip-tree/b/default.nix1
-rw-r--r--nix/readTree/tests/test-wrong-no-dots/no-dots-in-function.nix3
-rw-r--r--nix/readTree/tests/test-wrong-not-a-function/not-a-function.nix1
-rw-r--r--nix/renderMarkdown/default.nix21
-rw-r--r--nix/runExecline/OWNERS3
-rw-r--r--nix/runExecline/default.nix16
-rw-r--r--nix/runExecline/runExecline.nix65
-rw-r--r--nix/runExecline/tests.nix134
-rw-r--r--nix/runTestsuite/default.nix152
-rw-r--r--nix/sparseTree/OWNERS1
-rw-r--r--nix/sparseTree/default.nix84
-rw-r--r--nix/stateMonad/default.nix76
-rw-r--r--nix/stateMonad/tests/default.nix110
-rw-r--r--nix/tag/default.nix166
-rw-r--r--nix/tag/tests.nix99
-rw-r--r--nix/tailscale/default.nix5
-rw-r--r--nix/utils/OWNERS1
-rw-r--r--nix/utils/default.nix160
-rw-r--r--nix/utils/tests/.skip-subtree1
-rw-r--r--nix/utils/tests/default.nix99
-rw-r--r--nix/utils/tests/directory/file0
l---------nix/utils/tests/missing1
l---------nix/utils/tests/symlink-directory1
l---------nix/utils/tests/symlink-file1
l---------nix/utils/tests/symlink-symlink-directory1
l---------nix/utils/tests/symlink-symlink-file1
-rw-r--r--nix/writeElispBin/default.nix20
-rw-r--r--nix/writeExecline/OWNERS3
-rw-r--r--nix/writeExecline/default.nix17
-rw-r--r--nix/writeScript/default.nix20
-rw-r--r--nix/writeTree/OWNERS1
-rw-r--r--nix/writeTree/default.nix43
-rw-r--r--nix/writeTree/tests/default.nix93
-rw-r--r--nix/writers/default.nix113
-rw-r--r--nix/writers/tests/rust.nix76
-rw-r--r--nix/yants/README.md6
-rw-r--r--nix/yants/default.nix323
-rw-r--r--nix/yants/tests/default.nix155
114 files changed, 5674 insertions, 932 deletions
diff --git a/nix/OWNERS b/nix/OWNERS
new file mode 100644
index 0000000000..a640227914
--- /dev/null
+++ b/nix/OWNERS
@@ -0,0 +1 @@
+Profpatsch
diff --git a/nix/binify/default.nix b/nix/binify/default.nix
index d40930fd33..a9900caf43 100644
--- a/nix/binify/default.nix
+++ b/nix/binify/default.nix
@@ -10,7 +10,7 @@
 # with `binify { exe = …; name = "hello" }`.
 { exe, name }:
 
-pkgs.runCommandLocal "${name}-bin" {} ''
+pkgs.runCommandLocal "${name}-bin" { } ''
   mkdir -p $out/bin
   ln -sT ${lib.escapeShellArg exe} $out/bin/${lib.escapeShellArg name}
 ''
diff --git a/nix/bufCheck/default.nix b/nix/bufCheck/default.nix
index 6d17cb460e..ec98cfc376 100644
--- a/nix/bufCheck/default.nix
+++ b/nix/bufCheck/default.nix
@@ -1,9 +1,26 @@
-# Check protobuf syntax and breaking.
+# Check protobuf breaking. Lints already happen in individual targets.
 #
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
-pkgs.writeShellScriptBin "ci-buf-check" ''
-  ${depot.third_party.bufbuild}/bin/buf check lint --input "${depot.depotPath}"
-  # Report-only
-  ${depot.third_party.bufbuild}/bin/buf check breaking --input "${depot.depotPath}" --against-input "${depot.depotPath}/.git#branch=canon" || true
-''
+let
+  inherit (depot.nix) bufCheck;# self reference
+
+  script = pkgs.writeShellScriptBin "ci-buf-check" ''
+    export PATH="$PATH:${pkgs.lib.makeBinPath [ pkgs.buf ]}"
+    # Report-only
+    (cd $(git rev-parse --show-toplevel) && (buf breaking . --against "./.git#ref=HEAD~1" || true))
+  '';
+in
+
+script.overrideAttrs (old: {
+  meta = lib.recursiveUpdate old.meta {
+    # Protobuf check step executed in the buildkite pipeline which
+    # validates that changes to .proto files between revisions
+    # don't cause backwards-incompatible or otherwise flawed changes.
+    ci.extraSteps.protoCheck = {
+      alwaysRun = true;
+      label = ":water_buffalo: protoCheck";
+      command = pkgs.writeShellScript "ci-buf-check-step" "exec ${depot.nix.bufCheck}/bin/ci-buf-check";
+    };
+  };
+})
diff --git a/nix/buildGo/README.md b/nix/buildGo/README.md
index 37e0c06933..e9667c039a 100644
--- a/nix/buildGo/README.md
+++ b/nix/buildGo/README.md
@@ -2,8 +2,7 @@ buildGo.nix
 ===========
 
 This is an alternative [Nix][] build system for [Go][]. It supports building Go
-libraries and programs, and even automatically generating Protobuf & gRPC
-libraries.
+libraries and programs.
 
 *Note:* This will probably end up being folded into [Nixery][].
 
@@ -33,7 +32,6 @@ Given a program layout like this:
 ├── lib          <-- some library component
 │   ├── bar.go
 │   └── foo.go
-├── api.proto    <-- gRPC API definition
 ├── main.go      <-- program implementation
 └── default.nix  <-- build instructions
 ```
@@ -44,11 +42,6 @@ The contents of `default.nix` could look like this:
 { buildGo }:
 
 let
-  api = buildGo.grpc {
-    name  = "someapi";
-    proto = ./api.proto;
-  };
-
   lib = buildGo.package {
     name = "somelib";
     srcs = [
@@ -58,7 +51,7 @@ let
   };
 in buildGo.program {
   name = "my-program";
-  deps = [ api lib ];
+  deps = [ lib ];
 
   srcs = [
     ./main.go
@@ -105,22 +98,6 @@ in buildGo.program {
   | `src`     | `path`         | Path to the source **directory**              | yes       |
   | `deps`    | `list<drv>`    | List of dependencies (i.e. other Go packages) | no        |
 
-  For some examples of how `buildGo.external` is used, check out
-  [`proto.nix`](./proto.nix).
-
-* `buildGo.proto`: Build a Go library out of the specified Protobuf definition.
-
-  | parameter   | type        | use                                              | required? |
-  |-------------|-------------|--------------------------------------------------|-----------|
-  | `name`      | `string`    | Name for the resulting library                   | yes       |
-  | `proto`     | `path`      | Path to the Protobuf definition file             | yes       |
-  | `path`      | `string`    | Import path for the resulting Go library         | no        |
-  | `extraDeps` | `list<drv>` | Additional Go dependencies to add to the library | no        |
-
-* `buildGo.grpc`: Build a Go library out of the specified gRPC definition.
-
-  The parameters are identical to `buildGo.proto`.
-
 ## Current status
 
 This project is work-in-progress. Crucially it is lacking the following features:
diff --git a/nix/buildGo/default.nix b/nix/buildGo/default.nix
index 9999d1423e..c93642a127 100644
--- a/nix/buildGo/default.nix
+++ b/nix/buildGo/default.nix
@@ -4,8 +4,9 @@
 # buildGo provides Nix functions to build Go packages in the style of Bazel's
 # rules_go.
 
-{ pkgs ? import <nixpkgs> {}
-, ... }:
+{ pkgs ? import <nixpkgs> { }
+, ...
+}:
 
 let
   inherit (builtins)
@@ -21,7 +22,8 @@ let
     replaceStrings
     toString;
 
-  inherit (pkgs) lib go runCommand fetchFromGitHub protobuf symlinkJoin;
+  inherit (pkgs) lib runCommand fetchFromGitHub protobuf symlinkJoin go;
+  goStdlib = buildStdlib go;
 
   # Helpers for low-level Go compiler invocations
   spaceOut = lib.concatStringsSep " ";
@@ -40,8 +42,6 @@ let
 
   xFlags = x_defs: spaceOut (map (k: "-X ${k}=${x_defs."${k}"}") (attrNames x_defs));
 
-  pathToName = p: replaceStrings ["/"] ["_"] (toString p);
-
   # Add an `overrideGo` attribute to a function result that works
   # similar to `overrideAttrs`, but is used specifically for the
   # arguments passed to Go builders.
@@ -49,52 +49,92 @@ let
     overrideGo = new: makeOverridable f (orig // (new orig));
   };
 
+  buildStdlib = go: runCommand "go-stdlib-${go.version}"
+    {
+      nativeBuildInputs = [ go ];
+    } ''
+    HOME=$NIX_BUILD_TOP/home
+    mkdir $HOME
+
+    goroot="$(go env GOROOT)"
+    cp -R "$goroot/src" "$goroot/pkg" .
+
+    chmod -R +w .
+    GODEBUG=installgoroot=all GOROOT=$NIX_BUILD_TOP go install -v --trimpath std
+
+    mkdir $out
+    cp -r pkg/*_*/* $out
+
+    find $out -name '*.a' | while read -r ARCHIVE_FULL; do
+      ARCHIVE="''${ARCHIVE_FULL#"$out/"}"
+      PACKAGE="''${ARCHIVE%.a}"
+      echo "packagefile $PACKAGE=$ARCHIVE_FULL"
+    done > $out/importcfg
+  '';
+
+  importcfgCmd = { name, deps, out ? "importcfg" }: ''
+    echo "# nix buildGo ${name}" > "${out}"
+    cat "${goStdlib}/importcfg" >> "${out}"
+    ${lib.concatStringsSep "\n" (map (dep: ''
+      find "${dep}" -name '*.a' | while read -r pkgp; do
+        relpath="''${pkgp#"${dep}/"}"
+        pkgname="''${relpath%.a}"
+        echo "packagefile $pkgname=$pkgp"
+      done >> "${out}"
+    '') deps)}
+  '';
+
   # High-level build functions
 
   # Build a Go program out of the specified files and dependencies.
-  program = { name, srcs, deps ? [], x_defs ? {} }:
-  let uniqueDeps = allDeps (map (d: d.gopkg) deps);
-  in runCommand name {} ''
-    ${go}/bin/go tool compile -o ${name}.a -trimpath=$PWD -trimpath=${go} ${includeSources uniqueDeps} ${spaceOut srcs}
-    mkdir -p $out/bin
-    export GOROOT_FINAL=go
-    ${go}/bin/go tool link -o $out/bin/${name} -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a
-  '';
+  program = { name, srcs, deps ? [ ], x_defs ? { } }:
+    let uniqueDeps = allDeps (map (d: d.gopkg) deps);
+    in runCommand name { } ''
+      ${importcfgCmd { inherit name; deps = uniqueDeps; }}
+      ${go}/bin/go tool compile -o ${name}.a -importcfg=importcfg -trimpath=$PWD -trimpath=${go} -p main ${includeSources uniqueDeps} ${spaceOut srcs}
+      mkdir -p $out/bin
+      export GOROOT_FINAL=go
+      ${go}/bin/go tool link -o $out/bin/${name} -importcfg=importcfg -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a
+    '';
 
   # Build a Go library assembled out of the specified files.
   #
   # This outputs both the sources and compiled binary, as both are
   # needed when downstream packages depend on it.
-  package = { name, srcs, deps ? [], path ? name, sfiles ? [] }:
-  let
-    uniqueDeps = allDeps (map (d: d.gopkg) deps);
-
-    # The build steps below need to be executed conditionally for Go
-    # assembly if the analyser detected any *.s files.
-    #
-    # This is required for several popular packages (e.g. x/sys).
-    ifAsm = do: lib.optionalString (sfiles != []) do;
-    asmBuild = ifAsm ''
-      ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles}
-      ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles}
-    '';
-    asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h";
-    asmPack = ifAsm ''
-      ${go}/bin/go tool pack r $out/${path}.a ./asm.o
-    '';
-
-    gopkg = (runCommand "golib-${name}" {} ''
-      mkdir -p $out/${path}
-      ${srcList path (map (s: "${s}") srcs)}
-      ${asmBuild}
-      ${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs}
-      ${asmPack}
-    '') // {
-      inherit gopkg;
-      goDeps = uniqueDeps;
-      goImportPath = path;
-    };
-  in gopkg;
+  package = { name, srcs, deps ? [ ], path ? name, sfiles ? [ ] }:
+    let
+      uniqueDeps = allDeps (map (d: d.gopkg) deps);
+
+      # The build steps below need to be executed conditionally for Go
+      # assembly if the analyser detected any *.s files.
+      #
+      # This is required for several popular packages (e.g. x/sys).
+      ifAsm = do: lib.optionalString (sfiles != [ ]) do;
+      asmBuild = ifAsm ''
+        ${go}/bin/go tool asm -p ${path} -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles}
+        ${go}/bin/go tool asm -p ${path} -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles}
+      '';
+      asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h";
+      asmPack = ifAsm ''
+        ${go}/bin/go tool pack r $out/${path}.a ./asm.o
+      '';
+
+      gopkg = (runCommand "golib-${name}" { } ''
+        mkdir -p $out/${path}
+        ${srcList path (map (s: "${s}") srcs)}
+        ${asmBuild}
+        ${importcfgCmd { inherit name; deps = uniqueDeps; }}
+        ${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -importcfg=importcfg -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs}
+        ${asmPack}
+      '').overrideAttrs (_: {
+        passthru = {
+          inherit gopkg;
+          goDeps = uniqueDeps;
+          goImportPath = path;
+        };
+      });
+    in
+    gopkg;
 
   # Build a tree of Go libraries out of an external Go source
   # directory that follows the standard Go layout and was not built
@@ -104,32 +144,14 @@ let
   # named "gopkg", and an attribute named "gobin" for binaries.
   external = import ./external { inherit pkgs program package; };
 
-  # Import support libraries needed for protobuf & gRPC support
-  protoLibs = import ./proto.nix {
-    inherit external;
-  };
-
-  # Build a Go library out of the specified protobuf definition.
-  proto = { name, proto, path ? name, extraDeps ? [] }: (makeOverridable package) {
-    inherit name path;
-    deps = [ protoLibs.goProto.proto.gopkg ] ++ extraDeps;
-    srcs = lib.singleton (runCommand "goproto-${name}.pb.go" {} ''
-      cp ${proto} ${baseNameOf proto}
-      ${protobuf}/bin/protoc --plugin=${protoLibs.goProto.protoc-gen-go.gopkg}/bin/protoc-gen-go \
-        --go_out=plugins=grpc,import_path=${baseNameOf path}:. ${baseNameOf proto}
-      mv *.pb.go $out
-    '');
-  };
-
-  # Build a Go library out of the specified gRPC definition.
-  grpc = args: proto (args // { extraDeps = [ protoLibs.goGrpc.gopkg ]; });
-
-in {
+in
+{
   # Only the high-level builder functions are exposed, but made
   # overrideable.
   program = makeOverridable program;
   package = makeOverridable package;
-  proto = makeOverridable proto;
-  grpc = makeOverridable grpc;
   external = makeOverridable external;
+
+  # re-expose the Go version used
+  inherit go;
 }
diff --git a/nix/buildGo/example/default.nix b/nix/buildGo/example/default.nix
index 99c0a7d79b..6756bf39e2 100644
--- a/nix/buildGo/example/default.nix
+++ b/nix/buildGo/example/default.nix
@@ -8,7 +8,7 @@
 # users a quick introduction to how to use buildGo.
 
 let
-  buildGo = import ../default.nix {};
+  buildGo = import ../default.nix { };
 
   # Example use of buildGo.package, which creates an importable Go
   # package from the specified source files.
@@ -19,17 +19,11 @@ let
     ];
   };
 
-  # Example use of buildGo.proto, which generates a Go library from a
-  # Protobuf definition file.
-  exampleProto = buildGo.proto {
-    name = "exampleproto";
-    proto = ./thing.proto;
-  };
-
   # Example use of buildGo.program, which builds an executable using
   # the specified name and dependencies (which in turn must have been
   # created via buildGo.package etc.)
-in buildGo.program {
+in
+buildGo.program {
   name = "example";
 
   srcs = [
@@ -38,7 +32,6 @@ in buildGo.program {
 
   deps = [
     examplePackage
-    exampleProto
   ];
 
   x_defs = {
diff --git a/nix/buildGo/example/thing.proto b/nix/buildGo/example/thing.proto
deleted file mode 100644
index 0f6d6575e0..0000000000
--- a/nix/buildGo/example/thing.proto
+++ /dev/null
@@ -1,10 +0,0 @@
-// Copyright 2019 Google LLC.
-// SPDX-License-Identifier: Apache-2.0
-
-syntax = "proto3";
-package example;
-
-message Thing {
-  string id = 1;
-  string kind_of_thing = 2;
-}
diff --git a/nix/buildGo/external/default.nix b/nix/buildGo/external/default.nix
index 6f31fbe5ca..42592c67e4 100644
--- a/nix/buildGo/external/default.nix
+++ b/nix/buildGo/external/default.nix
@@ -13,19 +13,20 @@ let
     readFile
     replaceStrings
     tail
+    unsafeDiscardStringContext
     throw;
 
   inherit (pkgs) lib runCommand go jq ripgrep;
 
-  pathToName = p: replaceStrings ["/"] ["_"] (toString p);
+  pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p);
 
   # Collect all non-vendored dependencies from the Go standard library
   # into a file that can be used to filter them out when processing
   # dependencies.
-  stdlibPackages = runCommand "stdlib-pkgs.json" {} ''
+  stdlibPackages = runCommand "stdlib-pkgs.json" { } ''
     export HOME=$PWD
     export GOPATH=/dev/null
-    ${go}/bin/go list all | \
+    ${go}/bin/go list std | \
       ${ripgrep}/bin/rg -v 'vendor' | \
       ${jq}/bin/jq -R '.' | \
       ${jq}/bin/jq -c -s 'map({key: ., value: true}) | from_entries' \
@@ -45,20 +46,28 @@ let
   };
 
   mkset = path: value:
-    if path == [] then { gopkg = value; }
+    if path == [ ] then { gopkg = value; }
     else { "${head path}" = mkset (tail path) value; };
 
   last = l: elemAt l ((length l) - 1);
 
   toPackage = self: src: path: depMap: entry:
     let
-      localDeps = map (d: lib.attrByPath (d ++ [ "gopkg" ]) (
-        throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'"
-      ) self) entry.localDeps;
-
-      foreignDeps = map (d: lib.attrByPath [ d ] (
-        throw "missing foreign dependency '${d}' in '${path}'"
-      ) depMap) entry.foreignDeps;
+      localDeps = map
+        (d: lib.attrByPath (d ++ [ "gopkg" ])
+          (
+            throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'"
+          )
+          self)
+        entry.localDeps;
+
+      foreignDeps = map
+        (d: lib.attrByPath [ d.path ]
+          (
+            throw "missing foreign dependency '${d.path}' in '${path}, imported at ${d.position}'"
+          )
+          depMap)
+        entry.foreignDeps;
 
       args = {
         srcs = map (f: src + ("/" + f)) entry.files;
@@ -74,22 +83,30 @@ let
       binArgs = args // {
         name = (last ((lib.splitString "/" path) ++ entry.locator));
       };
-    in if entry.isCommand then (program binArgs) else (package libArgs);
+    in
+    if entry.isCommand then (program binArgs) else (package libArgs);
 
-in { src, path, deps ? [] }: let
+in
+{ src, path, deps ? [ ] }:
+let
   # Build a map of dependencies (from their import paths to their
   # derivation) so that they can be conditionally imported only in
   # sub-packages that require them.
-  depMap = listToAttrs (map (d: {
-    name = d.goImportPath;
-    value = d;
-  }) (map (d: d.gopkg) deps));
+  depMap = listToAttrs (map
+    (d: {
+      name = d.goImportPath;
+      value = d;
+    })
+    (map (d: d.gopkg) deps));
 
   name = pathToName path;
-  analysisOutput = runCommand "${name}-structure.json" {} ''
+  analysisOutput = runCommand "${name}-structure.json" { } ''
     ${analyser}/bin/analyser -path ${path} -source ${src} > $out
   '';
-  analysis = fromJSON (readFile analysisOutput);
-in lib.fix(self: foldl' lib.recursiveUpdate {} (
+  # readFile adds the references of the read in file to the string context for
+  # Nix >= 2.6 which would break the attribute set construction in fromJSON
+  analysis = fromJSON (unsafeDiscardStringContext (readFile analysisOutput));
+in
+lib.fix (self: foldl' lib.recursiveUpdate { } (
   map (entry: mkset entry.locator (toPackage self src path depMap entry)) analysis
 ))
diff --git a/nix/buildGo/external/main.go b/nix/buildGo/external/main.go
index 50c6c85895..4402a8eb86 100644
--- a/nix/buildGo/external/main.go
+++ b/nix/buildGo/external/main.go
@@ -10,7 +10,6 @@ import (
 	"flag"
 	"fmt"
 	"go/build"
-	"io/ioutil"
 	"log"
 	"os"
 	"path"
@@ -29,13 +28,19 @@ var stdlibList string
 // Return information includes the local (relative from project root)
 // and external (none-stdlib) dependencies of this package.
 type pkg struct {
-	Name        string     `json:"name"`
-	Locator     []string   `json:"locator"`
-	Files       []string   `json:"files"`
-	SFiles      []string   `json:"sfiles"`
-	LocalDeps   [][]string `json:"localDeps"`
-	ForeignDeps []string   `json:"foreignDeps"`
-	IsCommand   bool       `json:"isCommand"`
+	Name        string       `json:"name"`
+	Locator     []string     `json:"locator"`
+	Files       []string     `json:"files"`
+	SFiles      []string     `json:"sfiles"`
+	LocalDeps   [][]string   `json:"localDeps"`
+	ForeignDeps []foreignDep `json:"foreignDeps"`
+	IsCommand   bool         `json:"isCommand"`
+}
+
+type foreignDep struct {
+	Path string `json:"path"`
+	// filename, column and line number of the import, if known
+	Position string `json:"position"`
 }
 
 // findGoDirs returns a filepath.WalkFunc that identifies all
@@ -68,8 +73,8 @@ func findGoDirs(at string) ([]string, error) {
 	}
 
 	goDirs := []string{}
-	for k, _ := range dirSet {
-		goDirs = append(goDirs, k)
+	for goDir := range dirSet {
+		goDirs = append(goDirs, goDir)
 	}
 
 	return goDirs, nil
@@ -88,7 +93,7 @@ func analysePackage(root, source, importpath string, stdlib map[string]bool) (pk
 	}
 
 	local := [][]string{}
-	foreign := []string{}
+	foreign := []foreignDep{}
 
 	for _, i := range p.Imports {
 		if stdlib[i] {
@@ -100,7 +105,12 @@ func analysePackage(root, source, importpath string, stdlib map[string]bool) (pk
 		} else if strings.HasPrefix(i, importpath+"/") {
 			local = append(local, strings.Split(strings.TrimPrefix(i, importpath+"/"), "/"))
 		} else {
-			foreign = append(foreign, i)
+			// The import positions is a map keyed on the import name.
+			// The value is a list, presumably because an import can appear
+			// multiple times in a package. Let’s just take the first one,
+			// should be enough for a good error message.
+			firstPos := p.ImportPos[i][0].String()
+			foreign = append(foreign, foreignDep{Path: i, Position: firstPos})
 		}
 	}
 
@@ -137,7 +147,7 @@ func analysePackage(root, source, importpath string, stdlib map[string]bool) (pk
 }
 
 func loadStdlibPkgs(from string) (pkgs map[string]bool, err error) {
-	f, err := ioutil.ReadFile(from)
+	f, err := os.ReadFile(from)
 	if err != nil {
 		return
 	}
diff --git a/nix/buildGo/proto.nix b/nix/buildGo/proto.nix
deleted file mode 100644
index 57e9a5ec98..0000000000
--- a/nix/buildGo/proto.nix
+++ /dev/null
@@ -1,84 +0,0 @@
-# Copyright 2019 Google LLC.
-# SPDX-License-Identifier: Apache-2.0
-#
-# This file provides derivations for the dependencies of a gRPC
-# service in Go.
-
-{ external }:
-
-let
-  inherit (builtins) fetchGit map;
-in rec {
-  goProto = external {
-    path = "github.com/golang/protobuf";
-    src = fetchGit {
-      url = "https://github.com/golang/protobuf";
-      rev = "ed6926b37a637426117ccab59282c3839528a700";
-    };
-  };
-
-  xnet = external {
-    path = "golang.org/x/net";
-
-    src = fetchGit {
-      url = "https://go.googlesource.com/net";
-      rev = "ffdde105785063a81acd95bdf89ea53f6e0aac2d";
-    };
-
-    deps = [
-      xtext.secure.bidirule
-      xtext.unicode.bidi
-      xtext.unicode.norm
-    ];
-  };
-
-  xsys = external {
-    path = "golang.org/x/sys";
-    src = fetchGit {
-      url = "https://go.googlesource.com/sys";
-      rev = "bd437916bb0eb726b873ee8e9b2dcf212d32e2fd";
-    };
-  };
-
-  xtext = external {
-    path = "golang.org/x/text";
-    src = fetchGit {
-      url = "https://go.googlesource.com/text";
-      rev = "cbf43d21aaebfdfeb81d91a5f444d13a3046e686";
-    };
-  };
-
-  genproto = external {
-    path = "google.golang.org/genproto";
-    src = fetchGit {
-      url = "https://github.com/google/go-genproto";
-      rev = "83cc0476cb11ea0da33dacd4c6354ab192de6fe6";
-    };
-
-    deps = with goProto; [
-      proto
-      ptypes.any
-    ];
-  };
-
-  goGrpc = external {
-    path = "google.golang.org/grpc";
-    deps = ([
-      xnet.trace
-      xnet.http2
-      xsys.unix
-      xnet.http2.hpack
-      genproto.googleapis.rpc.status
-    ] ++ (with goProto; [
-      proto
-      ptypes
-      ptypes.duration
-      ptypes.timestamp
-    ]));
-
-    src = fetchGit {
-      url = "https://github.com/grpc/grpc-go";
-      rev = "d8e3da36ac481ef00e510ca119f6b68177713689";
-    };
-  };
-}
diff --git a/nix/buildLisp/README.md b/nix/buildLisp/README.md
index 452f495deb..0d1e469834 100644
--- a/nix/buildLisp/README.md
+++ b/nix/buildLisp/README.md
@@ -6,18 +6,24 @@ This is a build system for Common Lisp, written in Nix.
 It aims to offer an alternative to ASDF for users who live in a
 Nix-based ecosystem. This offers several advantages over ASDF:
 
-* Simpler (logic-less) package definitions
+* Simpler (almost logic-less) package definitions
 * Easy linking of native dependencies (from Nix)
 * Composability with Nix tooling for other languages
 * Effective, per-system caching strategies
 * Easy overriding of dependencies and whatnot
+* Convenient support for multiple Common Lisp implementations
 * ... and more!
 
 The project is still in its early stages and some important
 restrictions should be highlighted:
 
-* Only SBCL is supported (though the plan is to add support for at
-  least ABCL and Clozure CL, and maybe make it extensible)
+* Extending `buildLisp` with support for a custom implementation
+  currently requires some knowledge of internals and may not be
+  considered stable yet.
+* Parallel compilation is not possible: Since buildLisp doesn't encode
+  dependencies between components (i. e. source files) like ASDF,
+  it must compile source files in sequence to avoid errors due to
+  undefined symbols.
 
 ## Usage
 
@@ -32,6 +38,7 @@ restrictions should be highlighted:
   | `deps`    | `list<drv>`  | List of dependencies          | no        |
   | `native`  | `list<drv>`  | List of native dependencies   | no        |
   | `test`    | see "Tests"  | Specification for test suite  | no        |
+  | `implementation` | see "Implementations" | Common Lisp implementation to use | no |
 
   The output of invoking this is a directory containing a FASL file
   that is the concatenated result of all compiled sources.
@@ -46,6 +53,7 @@ restrictions should be highlighted:
   | `native`  | `list<drv>`  | List of native dependencies   | no        |
   | `main`    | `string`     | Entrypoint function           | no        |
   | `test`    | see "Tests"  | Specification for test suite  | no        |
+  | `implementation` | see "Implementations" | Common Lisp implementation to use | no |
 
   The `main` parameter should be the name of a function and defaults
   to `${name}:main` (i.e. the *exported* `main` function of the
@@ -64,13 +72,6 @@ restrictions should be highlighted:
   built-in library and returns a "package" that simply requires this
   library.
 
-* `buildLisp.sbclWith`: Creates an SBCL pre-loaded with various dependencies.
-
-  This function takes a single argument which is a list of Lisp
-  libraries programs or programs. It creates an SBCL that is
-  pre-loaded with all of that Lisp code and can be used as the host
-  for e.g. Sly or SLIME.
-
 ## Tests
 
 Both `buildLisp.library` and `buildLisp.program` take an optional argument
@@ -115,3 +116,139 @@ in buildLisp.program {
     };
 }
 ```
+
+## Development REPLs
+
+`buildLisp` builds loadable variants of both `program` and `library` derivations
+(usually FASL files). Therefore it can provide a convenient way to obtain an
+instance of any implementation preloaded with `buildLisp`-derivations. This
+is especially useful to use as a host for Sly or SLIME.
+
+* `buildLisp.sbcl.lispWith`, `buildLisp.ccl.lispWith`, ...:
+  Creates a wrapper script preloading a Lisp implementation with various dependencies.
+
+  This function takes a single argument which is a list of Lisp
+  libraries programs or programs. The desired Lisp implementation
+  will load all given derivations and all their dependencies on
+  startup.
+
+  The shortcut `buildLisp.sbclWith` for `buildLisp.sbcl.lispWith` is also provided.
+
+* `repl` passthru attribute: `derivation.repl` is provided as a shortcut
+  for `buildLisp.${implementationName}.lispWith [ derivation ]`.
+  `derivation.ccl.repl`, `derivation.sbcl.repl` etc. work as well, of course
+  (see also "Implementations" section).
+
+## Implementations
+
+Both `buildLisp.library` and `buildLisp.program` allow specifying a different
+Common Lisp implementation than the default one (which is SBCL). When an
+implementation is passed, `buildLisp` makes sure all dependencies are built
+with that implementation as well since build artifacts from different
+implementation will be incompatible with each other.
+
+The argument taken by `implementation` is a special attribute set which
+describes how to do certain tasks for a given implementation, like building
+or loading a library. In case you want to use a custom implementation
+description, the precise structure needed is documented in `buildLisp`'s
+source code for now. `buildLisp` also exposes the following already
+working implementation sets:
+
+* `buildLisp.sbcl`: [SBCL][sbcl], our default implementation
+
+* `buildLisp.ccl`: [CCL][ccl], similar to SBCL, but with very good macOS support
+
+* `buildLisp.ecl`: [ECL][ecl] setup to produce statically linked binaries and
+  libraries. Note that its runtime library is LGPL, so [extra conditions][lgpl-static]
+  must be fulfilled when distributing binaries produced this way.
+
+* Support for ABCL is planned.
+
+For every of these “known” implementations, `buildLisp` will create a `passthru`
+attribute named like the implementation which points to a variant of the derivation
+built with said implementation. Say we have a derivation, `myDrv`, built using SBCL:
+While `myDrv` and `myDrv.sbcl` are built using SBCL, `myDrv.ecl`, `myDrv.ccl` etc.
+build the derivation and all its dependencies using ECL and CCL respectively.
+
+This is useful to test portability of your derivation, but is also used internally
+to speed up the “normalization” of the dependency graph. Thus it is important to
+make sure that your custom implementation's name doesn't clash with one of the
+“known” ones.
+
+## Handling Implementation Specifics
+
+When targeting multiple Common Lisp implementation, it is often necessary to
+handle differing interfaces for OS interaction or to make use of special
+implementation features. For this reason, `buildLisp` allows specifying
+dependencies and source files for specific implementations only. This can
+be utilized by having an attribute set in the list for the `deps` or `srcs`
+argument: `buildLisp` will pick the value of the attribute named like the
+used implementation or `default` and ignore the set completely if both
+are missing.
+
+```nix
+{ buildLisp, lispPkgs }:
+
+buildLisp.library {
+  name = "mylib";
+
+  srcs = [
+    # These are included always of course
+    ./package.lisp
+    ./portable-lib.lisp
+
+    # Choose right impl-* file
+    {
+      sbcl = ./impl-sbcl.lisp;
+      ccl = ./impl-ccl.lisp;
+      ecl = ./impl-ecl.lisp;
+    }
+
+    # We can also use this to inject extra files
+    { ecl = ./extra-ecl-optimizations.lisp; }
+  ];
+
+  deps = [
+    # Use SBCL's special bundled package, flexi-streams otherwise
+    {
+      sbcl = buildLisp.bundled "sb-rotate-byte";
+      default = lispPkgs.flexi-streams;
+    }
+  ];
+}
+```
+
+Additionally a `brokenOn` parameter is accepted which takes a list of
+implementation names on which the derivation is not expected to work.
+This only influences `meta.ci.targets` which is read by depot's CI to
+check which variants (see "Implementations") of the derivation to
+build, so it may not be useful outside of depot.
+
+## Influencing the Lisp Runtime
+
+Lisp implementations which create an executable by dumping an image
+usually parse a few implementation-specific command line options on
+executable startup that influence runtime settings related to things
+like GC. `buildLisp` generates a wrapper which makes sure that this
+never interferes with the argument parsing implemented in the actual
+application, but sometimes it is useful to run an executable with
+special settings. To allow this, the content of `NIX_BUILDLISP_LISP_ARGS`
+is passed to the lisp implementation.
+
+For example, you can make the underlying SBCL print its version for
+any executable built with `buildLisp` (and SBCL) like this:
+
+```console
+$ env NIX_BUILDLISP_LISP_ARGS="--version" ./result/bin/🕰️
+SBCL 2.1.2.nixos
+```
+
+In practice you'd probably want to specify options like
+`--dynamic-space-size` or `--tls-limit` (try passing `--help` for a
+full list). Naturally, these options are completely different for
+different implementations.
+
+[sbcl]: http://www.sbcl.org/
+[ccl]: https://ccl.clozure.com/
+[ecl]: https://common-lisp.net/project/ecl/
+[lgpl-static]: https://www.gnu.org/licenses/gpl-faq.en.html#LGPLStaticVsDynamic
diff --git a/nix/buildLisp/default.nix b/nix/buildLisp/default.nix
index 98d6260870..0d68a2818b 100644
--- a/nix/buildLisp/default.nix
+++ b/nix/buildLisp/default.nix
@@ -4,67 +4,95 @@
 # buildLisp is designed to enforce conventions and do away with the
 # free-for-all of existing Lisp build systems.
 
-{ pkgs ? import <nixpkgs> {}, ... }:
+{ pkgs ? import <nixpkgs> { }, ... }:
 
 let
   inherit (builtins) map elemAt match filter;
-  inherit (pkgs) lib runCommandNoCC makeWrapper writeText writeShellScriptBin sbcl;
+  inherit (pkgs) lib runCommand makeWrapper writeText writeShellScriptBin sbcl ecl-static ccl;
+  inherit (pkgs.stdenv) targetPlatform;
 
   #
   # Internal helper definitions
   #
 
-  # 'genLoadLisp' generates Lisp code that instructs SBCL to load all
-  # the provided Lisp libraries.
-  genLoadLisp = deps: lib.concatStringsSep "\n"
-    (map (lib: "(load \"${lib}/${lib.lispName}.fasl\")") (allDeps deps));
-
-  # 'genCompileLisp' generates a Lisp file that instructs SBCL to
-  # compile the provided list of Lisp source files to $out.
-  genCompileLisp = srcs: deps: writeText "compile.lisp" ''
-    ;; This file compiles the specified sources into the Nix build
-    ;; directory, creating one FASL file for each source.
-    (require 'sb-posix)
-
-    ${genLoadLisp deps}
-
-    (defun nix-compile-lisp (file srcfile)
-      (let ((outfile (make-pathname :type "fasl"
-                                    :directory (or (sb-posix:getenv "NIX_BUILD_TOP")
-                                                   (error "not running in a Nix build"))
-                                    :name (substitute #\- #\/ srcfile))))
-        (multiple-value-bind (_outfile _warnings-p failure-p)
-            (compile-file srcfile :output-file outfile)
-          (if failure-p (sb-posix:exit 1)
-              (progn
-                ;; For the case of multiple files belonging to the same
-                ;; library being compiled, load them in order:
-                (load outfile)
-
-                ;; Write them to the FASL list in the same order:
-                (format file "cat ~a~%" (namestring outfile)))))))
-
-    (let ((*compile-verbose* t)
-          ;; FASL files are compiled into the working directory of the
-          ;; build and *then* moved to the correct out location.
-          (pwd (sb-posix:getcwd)))
-
-      (with-open-file (file "cat_fasls"
-                            :direction :output
-                            :if-does-not-exist :create)
-
-        ;; These forms were inserted by the Nix build:
-        ${
-          lib.concatStringsSep "\n" (map (src: "(nix-compile-lisp file \"${src}\")") srcs)
-        }
-        ))
+  defaultImplementation = impls.sbcl;
+
+  # Many Common Lisp implementations (like ECL and CCL) will occasionally drop
+  # you into an interactive debugger even when executing something as a script.
+  # In nix builds we don't want such a situation: Any error should make the
+  # script exit non-zero. Luckily the ANSI standard specifies *debugger-hook*
+  # which is invoked before the debugger letting us just do that.
+  disableDebugger = writeText "disable-debugger.lisp" ''
+    (setf *debugger-hook*
+          (lambda (error hook)
+            (declare (ignore hook))
+            (format *error-output* "~%Unhandled error: ~a~%" error)
+            #+ccl (quit 1)
+            #+ecl (ext:quit 1)))
   '';
 
-  # 'genTestLisp' generates a Lisp file that loads all sources and deps and
-  # executes expression
-  genTestLisp = name: srcs: deps: expression: writeText "${name}.lisp" ''
+  # Process a list of arbitrary values which also contains “implementation
+  # filter sets” which describe conditonal inclusion of elements depending
+  # on the CL implementation used. Elements are processed in the following
+  # manner:
+  #
+  # * Paths, strings, derivations are left as is
+  # * A non-derivation attribute set is processed like this:
+  #   1. If it has an attribute equal to impl.name, replace with its value.
+  #   2. Alternatively use the value of the "default" attribute.
+  #   3. In all other cases delete the element from the list.
+  #
+  # This can be used to express dependencies or source files which are specific
+  # to certain implementations:
+  #
+  #  srcs = [
+  #    # mixable with unconditional entries
+  #    ./package.lisp
+  #
+  #    # implementation specific source files
+  #    {
+  #      ccl = ./impl-ccl.lisp;
+  #      sbcl = ./impl-sbcl.lisp;
+  #      ecl = ./impl-ecl.lisp;
+  #    }
+  #  ];
+  #
+  #  deps = [
+  #    # this dependency is ignored if impl.name != "sbcl"
+  #    { sbcl = buildLisp.bundled "sb-posix"; }
+  #
+  #    # only special casing for a single implementation
+  #    {
+  #      sbcl = buildLisp.bundled "uiop";
+  #      default = buildLisp.bundled "asdf";
+  #    }
+  #  ];
+  implFilter = impl: xs:
+    let
+      isFilterSet = x: builtins.isAttrs x && !(lib.isDerivation x);
+    in
+    builtins.map
+      (
+        x: if isFilterSet x then x.${impl.name} or x.default else x
+      )
+      (builtins.filter
+        (
+          x: !(isFilterSet x) || x ? ${impl.name} || x ? default
+        )
+        xs);
+
+  # Generates lisp code which instructs the given lisp implementation to load
+  # all the given dependencies.
+  genLoadLispGeneric = impl: deps:
+    lib.concatStringsSep "\n"
+      (map (lib: "(load \"${lib}/${lib.lispName}.${impl.faslExt}\")")
+        (allDeps impl deps));
+
+  # 'genTestLispGeneric' generates a Lisp file that loads all sources and deps
+  # and executes expression for a given implementation description.
+  genTestLispGeneric = impl: { name, srcs, deps, expression }: writeText "${name}.lisp" ''
     ;; Dependencies
-    ${genLoadLisp deps}
+    ${impl.genLoadLisp deps}
 
     ;; Sources
     ${lib.concatStringsSep "\n" (map (src: "(load \"${src}\")") srcs)}
@@ -78,10 +106,23 @@ let
   dependsOn = a: b: builtins.elem a b.lispDeps;
 
   # 'allDeps' flattens the list of dependencies (and their
-  # dependencies) into one ordered list of unique deps.
-  allDeps = deps: (lib.toposort dependsOn (lib.unique (
-    lib.flatten (deps ++ (map (d: d.lispDeps) deps))
-  ))).result;
+  # dependencies) into one ordered list of unique deps which
+  # all use the given implementation.
+  allDeps = impl: deps:
+    let
+      # The override _should_ propagate itself recursively, as every derivation
+      # would only expose its actually used dependencies. Use implementation
+      # attribute created by withExtras if present, override in all other cases
+      # (mainly bundled).
+      deps' = builtins.map
+        (dep: dep."${impl.name}" or (dep.overrideLisp (_: {
+          implementation = impl;
+        })))
+        deps;
+    in
+    (lib.toposort dependsOn (lib.unique (
+      lib.flatten (deps' ++ (map (d: d.lispDeps) deps'))
+    ))).result;
 
   # 'allNative' extracts all native dependencies of a dependency list
   # to ensure that library load paths are set correctly during all
@@ -90,26 +131,6 @@ let
     lib.flatten (native ++ (map (d: d.lispNativeDeps) deps))
   );
 
-  # 'genDumpLisp' generates a Lisp file that instructs SBCL to dump
-  # the currently loaded image as an executable to $out/bin/$name.
-  #
-  # TODO(tazjin): Compression is currently unsupported because the
-  # SBCL in nixpkgs is, by default, not compiled with zlib support.
-  genDumpLisp = name: main: deps: writeText "dump.lisp" ''
-    (require 'sb-posix)
-
-    ${genLoadLisp deps}
-
-    (let* ((bindir (concatenate 'string (sb-posix:getenv "out") "/bin"))
-           (outpath (make-pathname :name "${name}"
-                                   :directory bindir)))
-      (save-lisp-and-die outpath
-                         :executable t
-                         :toplevel (function ${main})
-                         :purify t))
-    ;;
-  '';
-
   # Add an `overrideLisp` attribute to a function result that works
   # similar to `overrideAttrs`, but is used specifically for the
   # arguments passed to Lisp builders.
@@ -117,140 +138,641 @@ let
     overrideLisp = new: makeOverridable f (orig // (new orig));
   };
 
+  # This is a wrapper arround 'makeOverridable' which performs its
+  # function, but also adds a the following additional attributes to the
+  # resulting derivation, namely a repl attribute which builds a `lispWith`
+  # derivation for the current implementation and additional attributes for
+  # every all implementations. So `drv.sbcl` would build the derivation
+  # with SBCL regardless of what was specified in the initial arguments.
+  withExtras = f: args:
+    let
+      drv = (makeOverridable f) args;
+    in
+    lib.fix (self:
+      drv.overrideLisp
+        (old:
+          let
+            implementation = old.implementation or defaultImplementation;
+            brokenOn = old.brokenOn or [ ];
+            targets = lib.subtractLists (brokenOn ++ [ implementation.name ])
+              (builtins.attrNames impls);
+          in
+          {
+            passthru = (old.passthru or { }) // {
+              repl = implementation.lispWith [ self ];
+
+              # meta is done via passthru to minimize rebuilds caused by overriding
+              meta = (old.passthru.meta or { }) // {
+                ci = (old.passthru.meta.ci or { }) // {
+                  inherit targets;
+                };
+              };
+            } // builtins.listToAttrs (builtins.map
+              (impl: {
+                inherit (impl) name;
+                value = self.overrideLisp (_: {
+                  implementation = impl;
+                });
+              })
+              (builtins.attrValues impls));
+          }) // {
+        overrideLisp = new: withExtras f (args // new args);
+      });
+
   # 'testSuite' builds a Common Lisp test suite that loads all of srcs and deps,
   # and then executes expression to check its result
-  testSuite = { name, expression, srcs, deps ? [], native ? [] }:
+  testSuite = { name, expression, srcs, deps ? [ ], native ? [ ], implementation }:
     let
-      lispNativeDeps = allNative native deps;
-      lispDeps = allDeps deps;
-    in runCommandNoCC name {
-      LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
-      LANG = "C.UTF-8";
-    } ''
+      lispDeps = allDeps implementation (implFilter implementation deps);
+      lispNativeDeps = allNative native lispDeps;
+      filteredSrcs = implFilter implementation srcs;
+    in
+    runCommand name
+      {
+        LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
+        LANG = "C.UTF-8";
+      } ''
       echo "Running test suite ${name}"
 
-      ${sbcl}/bin/sbcl --script ${genTestLisp name srcs deps expression} \
-        | tee $out
+      ${implementation.runScript} ${
+        implementation.genTestLisp {
+          inherit name expression;
+          srcs = filteredSrcs;
+          deps = lispDeps;
+        }
+      } | tee $out
 
       echo "Test suite ${name} succeeded"
     '';
 
+  # 'impls' is an attribute set of attribute sets which describe how to do common
+  # tasks when building for different Common Lisp implementations. Each
+  # implementation set has the following members:
+  #
+  # Required members:
+  #
+  # - runScript :: string
+  #   Describes how to invoke the implementation from the shell, so it runs a
+  #   lisp file as a script and exits.
+  # - faslExt :: string
+  #   File extension of the implementations loadable (FASL) files.
+  #   Implementations are free to generate native object files, but with the way
+  #   buildLisp works it is required that we can also 'load' libraries, so
+  #   (additionally) building a FASL or equivalent is required.
+  # - genLoadLisp :: [ dependency ] -> string
+  #   Returns lisp code to 'load' the given dependencies. 'genLoadLispGeneric'
+  #   should work for most dependencies.
+  # - genCompileLisp :: { name, srcs, deps } -> file
+  #   Builds a lisp file which instructs the implementation to build a library
+  #   from the given source files when executed. After running at least
+  #   the file "$out/${name}.${impls.${implementation}.faslExt}" should have
+  #   been created.
+  # - genDumpLisp :: { name, main, deps } -> file
+  #   Builds a lisp file which instructs the implementation to build an
+  #   executable which runs 'main' (and exits) where 'main' is available from
+  #   'deps'. The executable should be created as "$out/bin/${name}", usually
+  #   by dumping the lisp image with the replaced toplevel function replaced.
+  # - wrapProgram :: boolean
+  #   Whether to wrap the resulting binary / image with a wrapper script setting
+  #   `LD_LIBRARY_PATH`.
+  # - genTestLisp :: { name, srcs, deps, expression } -> file
+  #   Builds a lisp file which loads the given 'deps' and 'srcs' files and
+  #   then evaluates 'expression'. Depending on whether 'expression' returns
+  #   true or false, the script must exit with a zero or non-zero exit code.
+  #   'genTestLispGeneric' will work for most implementations.
+  # - lispWith :: [ dependency ] -> drv
+  #   Builds a script (or dumped image) which when executed loads (or has
+  #   loaded) all given dependencies. When built this should create an executable
+  #   at "$out/bin/${implementation}".
+  #
+  # Optional members:
+  #
+  # - bundled :: string -> library
+  #   Allows giving an implementation specific builder for a bundled library.
+  #   This function is used as a replacement for the internal defaultBundled
+  #   function and only needs to support one implementation. The returned derivation
+  #   must behave like one built by 'library' (in particular have the same files
+  #   available in "$out" and the same 'passthru' attributes), but may be built
+  #   completely differently.
+  impls = lib.mapAttrs (name: v: { inherit name; } // v) {
+    sbcl = {
+      runScript = "${sbcl}/bin/sbcl --script";
+      faslExt = "fasl";
+
+      # 'genLoadLisp' generates Lisp code that instructs SBCL to load all
+      # the provided Lisp libraries.
+      genLoadLisp = genLoadLispGeneric impls.sbcl;
+
+      # 'genCompileLisp' generates a Lisp file that instructs SBCL to
+      # compile the provided list of Lisp source files to "$out/${name}.fasl".
+      genCompileLisp = { name, srcs, deps }: writeText "sbcl-compile.lisp" ''
+        ;; This file compiles the specified sources into the Nix build
+        ;; directory, creating one FASL file for each source.
+        (require 'sb-posix)
+
+        ${impls.sbcl.genLoadLisp deps}
+
+        (defun nix-compile-lisp (srcfile)
+          (let ((outfile (make-pathname :type "fasl"
+                                        :directory (or (sb-posix:getenv "NIX_BUILD_TOP")
+                                                       (error "not running in a Nix build"))
+                                        :name (substitute #\- #\/ srcfile))))
+            (multiple-value-bind (out-truename _warnings-p failure-p)
+                (compile-file srcfile :output-file outfile)
+              (if failure-p (sb-posix:exit 1)
+                  (progn
+                    ;; For the case of multiple files belonging to the same
+                    ;; library being compiled, load them in order:
+                    (load out-truename)
+
+                    ;; Return pathname as a string for cat-ting it later
+                    (namestring out-truename))))))
+
+        (let ((*compile-verbose* t)
+              (catted-fasl (make-pathname :type "fasl"
+                                          :directory (or (sb-posix:getenv "out")
+                                                         (error "not running in a Nix build"))
+                                          :name "${name}")))
+
+          (with-open-file (file catted-fasl
+                                :direction :output
+                                :if-does-not-exist :create)
+
+            ;; SBCL's FASL files can just be bundled together using cat
+            (sb-ext:run-program "cat"
+             (mapcar #'nix-compile-lisp
+              ;; These forms were inserted by the Nix build:
+              '(${
+                lib.concatMapStringsSep "\n" (src: "\"${src}\"") srcs
+              }))
+             :output file :search t)))
+      '';
+
+      # 'genDumpLisp' generates a Lisp file that instructs SBCL to dump
+      # the currently loaded image as an executable to $out/bin/$name.
+      #
+      # TODO(tazjin): Compression is currently unsupported because the
+      # SBCL in nixpkgs is, by default, not compiled with zlib support.
+      genDumpLisp = { name, main, deps }: writeText "sbcl-dump.lisp" ''
+        (require 'sb-posix)
+
+        ${impls.sbcl.genLoadLisp deps}
+
+        (let* ((bindir (concatenate 'string (sb-posix:getenv "out") "/bin"))
+               (outpath (make-pathname :name "${name}"
+                                       :directory bindir)))
+
+          ;; Tell UIOP that argv[0] will refer to running image, not the lisp impl
+          (when (find-package :uiop)
+            (eval `(setq ,(find-symbol "*IMAGE-DUMPED-P*" :uiop) :executable)))
+
+          (save-lisp-and-die outpath
+                             :executable t
+                             :toplevel
+                             (lambda ()
+                               ;; Filter out everything prior to the `--` we
+                               ;; insert in the wrapper to prevent SBCL from
+                               ;; parsing arguments at startup
+                               (setf sb-ext:*posix-argv*
+                                     (delete "--" sb-ext:*posix-argv*
+                                             :test #'string= :count 1))
+                               (${main}))
+                             :purify t))
+      '';
+
+      wrapProgram = true;
+
+      genTestLisp = genTestLispGeneric impls.sbcl;
+
+      lispWith = deps:
+        let lispDeps = filter (d: !d.lispBinary) (allDeps impls.sbcl deps);
+        in writeShellScriptBin "sbcl" ''
+          export LD_LIBRARY_PATH="${lib.makeLibraryPath (allNative [] lispDeps)}"
+          export LANG="C.UTF-8"
+          exec ${sbcl}/bin/sbcl ${
+            lib.optionalString (deps != [])
+              "--load ${writeText "load.lisp" (impls.sbcl.genLoadLisp lispDeps)}"
+          } $@
+        '';
+    };
+    ecl = {
+      runScript = "${ecl-static}/bin/ecl --load ${disableDebugger} --shell";
+      faslExt = "fasc";
+      genLoadLisp = genLoadLispGeneric impls.ecl;
+      genCompileLisp = { name, srcs, deps }: writeText "ecl-compile.lisp" ''
+        ;; This seems to be required to bring make the 'c' package available
+        ;; early, otherwise ECL tends to fail with a read failure…
+        (ext:install-c-compiler)
+
+        ;; Load dependencies
+        ${impls.ecl.genLoadLisp deps}
+
+        (defun getenv-or-fail (var)
+          (or (ext:getenv var)
+              (error (format nil "Missing expected environment variable ~A" var))))
+
+        (defun nix-compile-file (srcfile &key native)
+          "Compile the given srcfile into a compilation unit in :out-dir using
+          a unique name based on srcfile as the filename which is returned after
+          compilation. If :native is true, create an native object file,
+          otherwise a byte-compile fasc file is built and immediately loaded."
+
+          (let* ((unique-name (substitute #\_ #\/ srcfile))
+                 (out-file (make-pathname :type (if native "o" "fasc")
+                                          :directory (getenv-or-fail "NIX_BUILD_TOP")
+                                          :name unique-name)))
+            (multiple-value-bind (out-truename _warnings-p failure-p)
+                (compile-file srcfile :system-p native
+                                      :load (not native)
+                                      :output-file out-file
+                                      :verbose t :print t)
+              (if failure-p (ext:quit 1) out-truename))))
+
+        (let* ((out-dir (getenv-or-fail "out"))
+               (nix-build-dir (getenv-or-fail "NIX_BUILD_TOP"))
+               (srcs
+                ;; These forms are inserted by the Nix build
+                '(${lib.concatMapStringsSep "\n" (src: "\"${src}\"") srcs})))
+
+          ;; First, we'll byte compile loadable FASL files and load them
+          ;; immediately. Since we are using a statically linked ECL, there's
+          ;; no way to load native objects, so we rely on byte compilation
+          ;; for all our loading — which is crucial in compilation of course.
+          (ext:install-bytecodes-compiler)
+
+          ;; ECL's bytecode FASLs can just be concatenated to create a bundle
+          ;; at least since a recent bugfix which we apply as a patch.
+          ;; See also: https://gitlab.com/embeddable-common-lisp/ecl/-/issues/649
+          (let ((bundle-out (make-pathname :type "fasc" :name "${name}"
+                                           :directory out-dir)))
+
+            (with-open-file (fasc-stream bundle-out :direction :output)
+              (ext:run-program "cat"
+                               (mapcar (lambda (f)
+                                         (namestring
+                                          (nix-compile-file f :native nil)))
+                                       srcs)
+                               :output fasc-stream)))
+
+          (ext:install-c-compiler)
+
+          ;; Build a (natively compiled) static archive (.a) file. We want to
+          ;; use this for (statically) linking an executable later. The bytecode
+          ;; dance is only required because we can't load such archives.
+          (c:build-static-library
+           (make-pathname :type "a" :name "${name}" :directory out-dir)
+           :lisp-files (mapcar (lambda (x)
+                                 (nix-compile-file x :native t))
+                               srcs)))
+      '';
+      genDumpLisp = { name, main, deps }: writeText "ecl-dump.lisp" ''
+        (defun getenv-or-fail (var)
+          (or (ext:getenv var)
+              (error (format nil "Missing expected environment variable ~A" var))))
+
+        ${impls.ecl.genLoadLisp deps}
+
+        ;; makes a 'c' package available that can link executables
+        (ext:install-c-compiler)
+
+        (c:build-program
+         (merge-pathnames (make-pathname :directory '(:relative "bin")
+                                         :name "${name}")
+                          (truename (getenv-or-fail "out")))
+         :epilogue-code `(progn
+                          ;; UIOP doesn't understand ECL, so we need to make it
+                          ;; aware that we are a proper executable, causing it
+                          ;; to handle argument parsing and such properly. Since
+                          ;; this needs to work even when we're not using UIOP,
+                          ;; we need to do some compile-time acrobatics.
+                          ,(when (find-package :uiop)
+                            `(setf ,(find-symbol "*IMAGE-DUMPED-P*" :uiop) :executable))
+                          ;; Run the actual application…
+                          (${main})
+                          ;; … and exit.
+                          (ext:quit))
+         ;; ECL can't remember these from its own build…
+         :ld-flags '("-static")
+         :lisp-files
+         ;; The following forms are inserted by the Nix build
+         '(${
+             lib.concatMapStrings (dep: ''
+               "${dep}/${dep.lispName}.a"
+             '') (allDeps impls.ecl deps)
+           }))
+      '';
+
+      wrapProgram = false;
+
+      genTestLisp = genTestLispGeneric impls.ecl;
+
+      lispWith = deps:
+        let lispDeps = filter (d: !d.lispBinary) (allDeps impls.ecl deps);
+        in writeShellScriptBin "ecl" ''
+          exec ${ecl-static}/bin/ecl ${
+            lib.optionalString (deps != [])
+              "--load ${writeText "load.lisp" (impls.ecl.genLoadLisp lispDeps)}"
+          } $@
+        '';
+
+      bundled = name: runCommand "${name}-cllib"
+        {
+          passthru = {
+            lispName = name;
+            lispNativeDeps = [ ];
+            lispDeps = [ ];
+            lispBinary = false;
+            repl = impls.ecl.lispWith [ (impls.ecl.bundled name) ];
+          };
+        } ''
+        mkdir -p "$out"
+        ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/${name}.${impls.ecl.faslExt}" -t "$out"
+        ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/lib${name}.a" "$out/${name}.a"
+      '';
+    };
+    ccl = {
+      # Relatively bespoke wrapper script necessary to make CCL just™ execute
+      # a lisp file as a script.
+      runScript = pkgs.writers.writeBash "ccl" ''
+        # don't print intro message etc.
+        args=("--quiet")
+
+        # makes CCL crash on error instead of entering the debugger
+        args+=("--load" "${disableDebugger}")
+
+        # load files from command line in order
+        for f in "$@"; do
+          args+=("--load" "$f")
+        done
+
+        # Exit if everything was processed successfully
+        args+=("--eval" "(quit)")
+
+        exec ${ccl}/bin/ccl ''${args[@]}
+      '';
+
+      # See https://ccl.clozure.com/docs/ccl.html#building-definitions
+      faslExt =
+        if targetPlatform.isPower && targetPlatform.is32bit then "pfsl"
+        else if targetPlatform.isPower && targetPlatform.is64bit then "p64fsl"
+        else if targetPlatform.isx86_64 && targetPlatform.isLinux then "lx64fsl"
+        else if targetPlatform.isx86_32 && targetPlatform.isLinux then "lx32fsl"
+        else if targetPlatform.isAarch32 && targetPlatform.isLinux then "lafsl"
+        else if targetPlatform.isx86_32 && targetPlatform.isDarwin then "dx32fsl"
+        else if targetPlatform.isx86_64 && targetPlatform.isDarwin then "dx64fsl"
+        else if targetPlatform.isx86_64 && targetPlatform.isDarwin then "dx64fsl"
+        else if targetPlatform.isx86_32 && targetPlatform.isFreeBSD then "fx32fsl"
+        else if targetPlatform.isx86_64 && targetPlatform.isFreeBSD then "fx64fsl"
+        else if targetPlatform.isx86_32 && targetPlatform.isWindows then "wx32fsl"
+        else if targetPlatform.isx86_64 && targetPlatform.isWindows then "wx64fsl"
+        else builtins.throw "Don't know what FASLs are called for this platform: "
+          + pkgs.stdenv.targetPlatform.system;
+
+      genLoadLisp = genLoadLispGeneric impls.ccl;
+
+      genCompileLisp = { name, srcs, deps }: writeText "ccl-compile.lisp" ''
+        ${impls.ccl.genLoadLisp deps}
+
+        (defun getenv-or-fail (var)
+          (or (getenv var)
+              (error (format nil "Missing expected environment variable ~A" var))))
+
+        (defun nix-compile-file (srcfile)
+          "Trivial wrapper around COMPILE-FILE which causes CCL to exit if
+          compilation fails and LOADs the compiled file on success."
+          (let ((output (make-pathname :name (substitute #\_ #\/ srcfile)
+                                       :type "${impls.ccl.faslExt}"
+                                       :directory (getenv-or-fail "NIX_BUILD_TOP"))))
+            (multiple-value-bind (out-truename _warnings-p failure-p)
+                (compile-file srcfile :output-file output :print t :verbose t)
+                (declare (ignore _warnings-p))
+              (if failure-p (quit 1)
+                  (progn (load out-truename) out-truename)))))
+
+        (fasl-concatenate (make-pathname :name "${name}" :type "${impls.ccl.faslExt}"
+                                         :directory (getenv-or-fail "out"))
+                          (mapcar #'nix-compile-file
+                                  ;; These forms where inserted by the Nix build
+                                  '(${
+                                      lib.concatMapStrings (src: ''
+                                        "${src}"
+                                      '') srcs
+                                   })))
+      '';
+
+      genDumpLisp = { name, main, deps }: writeText "ccl-dump.lisp" ''
+        ${impls.ccl.genLoadLisp deps}
+
+        (let* ((out (or (getenv "out") (error "Not running in a Nix build")))
+               (bindir (concatenate 'string out "/bin/"))
+               (executable (make-pathname :directory bindir :name "${name}")))
+
+          ;; Tell UIOP that argv[0] will refer to running image, not the lisp impl
+          (when (find-package :uiop)
+            (eval `(setf ,(find-symbol "*IMAGE-DUMPED-P*" :uiop) :executable)))
+
+          (save-application executable
+                            :purify t
+                            :error-handler :quit
+                            :toplevel-function
+                            (lambda ()
+                              ;; Filter out everything prior to the `--` we
+                              ;; insert in the wrapper to prevent SBCL from
+                              ;; parsing arguments at startup
+                              (setf ccl:*command-line-argument-list*
+                                    (delete "--" ccl:*command-line-argument-list*
+                                                 :test #'string= :count 1))
+                              (${main}))
+                            :mode #o755
+                            ;; TODO(sterni): use :native t on macOS
+                            :prepend-kernel t))
+      '';
+
+      wrapProgram = true;
+
+      genTestLisp = genTestLispGeneric impls.ccl;
+
+      lispWith = deps:
+        let lispDeps = filter (d: !d.lispBinary) (allDeps impls.ccl deps);
+        in writeShellScriptBin "ccl" ''
+          export LD_LIBRARY_PATH="${lib.makeLibraryPath (allNative [] lispDeps)}"
+          exec ${ccl}/bin/ccl ${
+            lib.optionalString (deps != [])
+              "--load ${writeText "load.lisp" (impls.ccl.genLoadLisp lispDeps)}"
+          } "$@"
+        '';
+    };
+  };
+
   #
   # Public API functions
   #
 
-  # 'library' builds a list of Common Lisp files into a single FASL
-  # which can then be loaded into SBCL.
+  # 'library' builds a list of Common Lisp files into an implementation
+  # specific library format, usually a single FASL file, which can then be
+  # loaded and built into an executable via 'program'.
   library =
     { name
+    , implementation ? defaultImplementation
+    , brokenOn ? [ ] # TODO(sterni): make this a warning
     , srcs
-    , deps ? []
-    , native ? []
+    , deps ? [ ]
+    , native ? [ ]
     , tests ? null
+    , passthru ? { }
     }:
     let
-      lispNativeDeps = (allNative native deps);
-      lispDeps = allDeps deps;
-      testDrv = if ! isNull tests
-        then testSuite {
-          name = tests.name or "${name}-test";
-          srcs = srcs ++ (tests.srcs or []);
-          deps = deps ++ (tests.deps or []);
-          expression = tests.expression;
-        }
+      filteredDeps = implFilter implementation deps;
+      filteredSrcs = implFilter implementation srcs;
+      lispNativeDeps = (allNative native filteredDeps);
+      lispDeps = allDeps implementation filteredDeps;
+      testDrv =
+        if ! isNull tests
+        then
+          testSuite
+            {
+              name = tests.name or "${name}-test";
+              srcs = filteredSrcs ++ (tests.srcs or [ ]);
+              deps = filteredDeps ++ (tests.deps or [ ]);
+              expression = tests.expression;
+              inherit implementation;
+            }
         else null;
-    in runCommandNoCC "${name}-cllib" {
-      LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
-      LANG = "C.UTF-8";
-    } ''
+    in
+    lib.fix (self: runCommand "${name}-cllib"
+      {
+        LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
+        LANG = "C.UTF-8";
+        passthru = passthru // {
+          inherit lispNativeDeps lispDeps;
+          lispName = name;
+          lispBinary = false;
+          tests = testDrv;
+        };
+      } ''
       ${if ! isNull testDrv
         then "echo 'Test ${testDrv} succeeded'"
         else "echo 'No tests run'"}
-      ${sbcl}/bin/sbcl --script ${genCompileLisp srcs lispDeps}
 
-      echo "Compilation finished, assembling FASL files"
-
-      # FASL files can be combined by simply concatenating them
-      # together, but it needs to be in the compilation order.
       mkdir $out
 
-      chmod +x cat_fasls
-      ./cat_fasls > $out/${name}.fasl
-    '' // {
-      inherit lispNativeDeps lispDeps;
-      lispName = name;
-      lispBinary = false;
-      tests = testDrv;
-    };
+      ${implementation.runScript} ${
+        implementation.genCompileLisp {
+          srcs = filteredSrcs;
+          inherit name;
+          deps = lispDeps;
+        }
+      }
+    '');
 
-  # 'program' creates an executable containing a dumped image of the
+  # 'program' creates an executable, usually containing a dumped image of the
   # specified sources and dependencies.
   program =
     { name
+    , implementation ? defaultImplementation
+    , brokenOn ? [ ] # TODO(sterni): make this a warning
     , main ? "${name}:main"
     , srcs
-    , deps ? []
-    , native ? []
+    , deps ? [ ]
+    , native ? [ ]
     , tests ? null
+    , passthru ? { }
     }:
     let
-      lispDeps = allDeps deps;
+      filteredSrcs = implFilter implementation srcs;
+      filteredDeps = implFilter implementation deps;
+      lispDeps = allDeps implementation filteredDeps;
       libPath = lib.makeLibraryPath (allNative native lispDeps);
-      selfLib = library {
-        inherit name srcs native;
+      # overriding is used internally to propagate the implementation to use
+      selfLib = (makeOverridable library) {
+        inherit name native brokenOn;
         deps = lispDeps;
+        srcs = filteredSrcs;
       };
-      testDrv = if ! isNull tests
-        then testSuite {
-          name = tests.name or "${name}-test";
-          srcs =
-            (
-              srcs ++ (tests.srcs or []));
-          deps = deps ++ (tests.deps or []);
-          expression = tests.expression;
-        }
+      testDrv =
+        if ! isNull tests
+        then
+          testSuite
+            {
+              name = tests.name or "${name}-test";
+              srcs =
+                (
+                  # testSuite does run implFilter as well
+                  filteredSrcs ++ (tests.srcs or [ ])
+                );
+              deps = filteredDeps ++ (tests.deps or [ ]);
+              expression = tests.expression;
+              inherit implementation;
+            }
         else null;
-    in runCommandNoCC "${name}" {
-      nativeBuildInputs = [ makeWrapper ];
-      LD_LIBRARY_PATH = libPath;
-      LANG = "C.UTF-8";
-    } ''
-      ${if ! isNull testDrv
-        then "echo 'Test ${testDrv} succeeded'"
-        else ""}
-      mkdir -p $out/bin
-
-      ${sbcl}/bin/sbcl --script ${
-        genDumpLisp name main ([ selfLib ] ++ lispDeps)
+    in
+    lib.fix (self: runCommand "${name}"
+      {
+        nativeBuildInputs = [ makeWrapper ];
+        LD_LIBRARY_PATH = libPath;
+        LANG = "C.UTF-8";
+        passthru = passthru // {
+          lispName = name;
+          lispDeps = [ selfLib ];
+          lispNativeDeps = native;
+          lispBinary = true;
+          tests = testDrv;
+        };
       }
+      (''
+        ${if ! isNull testDrv
+          then "echo 'Test ${testDrv} succeeded'"
+          else ""}
+        mkdir -p $out/bin
+
+        ${implementation.runScript} ${
+          implementation.genDumpLisp {
+            inherit name main;
+            deps = ([ selfLib ] ++ lispDeps);
+          }
+        }
+      '' + lib.optionalString implementation.wrapProgram ''
+        wrapProgram $out/bin/${name} \
+          --prefix LD_LIBRARY_PATH : "${libPath}" \
+          --add-flags "\$NIX_BUILDLISP_LISP_ARGS --"
+      ''));
+
+  # 'bundled' creates a "library" which makes a built-in package available,
+  # such as any of SBCL's sb-* packages or ASDF. By default this is done
+  # by calling 'require', but implementations are free to provide their
+  # own specific bundled function.
+  bundled = name:
+    let
+      # TODO(sterni): allow overriding args to underlying 'library' (e. g. srcs)
+      defaultBundled = implementation: name: library {
+        inherit name implementation;
+        srcs = lib.singleton (builtins.toFile "${name}.lisp" "(require '${name})");
+      };
+
+      bundled' =
+        { implementation ? defaultImplementation
+        , name
+        }:
+        implementation.bundled or (defaultBundled implementation) name;
 
-      wrapProgram $out/bin/${name} --prefix LD_LIBRARY_PATH : "${libPath}"
-    '' // {
-      lispName = name;
-      lispDeps = [ selfLib ] ++ (tests.deps or []);
-      lispNativeDeps = native;
-      lispBinary = true;
-      tests = testDrv;
+    in
+    (makeOverridable bundled') {
+      inherit name;
     };
 
-  # 'bundled' creates a "library" that calls 'require' on a built-in
-  # package, such as any of SBCL's sb-* packages.
-  bundled = name: (makeOverridable library) {
-    inherit name;
-    srcs = lib.singleton (builtins.toFile "${name}.lisp" "(require '${name})");
-  };
+in
+{
+  library = withExtras library;
+  program = withExtras program;
+  inherit bundled;
 
   # 'sbclWith' creates an image with the specified libraries /
-  # programs loaded.
-  sbclWith = deps:
-  let lispDeps = filter (d: !d.lispBinary) (allDeps deps);
-  in writeShellScriptBin "sbcl" ''
-    export LD_LIBRARY_PATH=${lib.makeLibraryPath (allNative [] lispDeps)};
-    exec ${sbcl}/bin/sbcl ${lib.optionalString (deps != []) "--load ${writeText "load.lisp" (genLoadLisp lispDeps)}"} $@
-  '';
-in {
-  library = makeOverridable library;
-  program = makeOverridable program;
-  sbclWith = makeOverridable sbclWith;
-  bundled = makeOverridable bundled;
+  # programs loaded in SBCL.
+  sbclWith = impls.sbcl.lispWith;
+
+  inherit (impls)
+    sbcl
+    ecl
+    ccl
+    ;
 }
diff --git a/nix/buildLisp/example/default.nix b/nix/buildLisp/example/default.nix
index 6a518e4964..6add2676f1 100644
--- a/nix/buildLisp/example/default.nix
+++ b/nix/buildLisp/example/default.nix
@@ -14,15 +14,16 @@ let
     ];
   };
 
-# Example Lisp program.
-#
-# This builds & writes an executable for a program using the library
-# above to disk.
-#
-# By default, buildLisp.program expects the entry point to be
-# `$name:main`. This can be overridden by configuring the `main`
-# attribute.
-in buildLisp.program {
+  # Example Lisp program.
+  #
+  # This builds & writes an executable for a program using the library
+  # above to disk.
+  #
+  # By default, buildLisp.program expects the entry point to be
+  # `$name:main`. This can be overridden by configuring the `main`
+  # attribute.
+in
+buildLisp.program {
   name = "example";
   deps = [ libExample ];
 
diff --git a/nix/buildLisp/tests/argv0.nix b/nix/buildLisp/tests/argv0.nix
new file mode 100644
index 0000000000..ca5f2b9741
--- /dev/null
+++ b/nix/buildLisp/tests/argv0.nix
@@ -0,0 +1,58 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  # Trivial test program that outputs argv[0] and exits
+  prog =
+    depot.nix.buildLisp.program {
+      name = "argv0-test";
+
+      srcs = [
+        (pkgs.writeText "argv0-test.lisp" ''
+          (defpackage :argv0-test (:use :common-lisp :uiop) (:export :main))
+          (in-package :argv0-test)
+
+          (defun main ()
+            (format t "~A~%" (uiop:argv0)))
+        '')
+      ];
+
+      deps = [
+        {
+          sbcl = depot.nix.buildLisp.bundled "uiop";
+          default = depot.nix.buildLisp.bundled "asdf";
+        }
+      ];
+    };
+
+  # Extract verify argv[0] output for given buildLisp program
+  checkImplementation = prog:
+    pkgs.runCommand "check-argv0" { } ''
+      set -eux
+
+      checkInvocation() {
+        invocation="$1"
+        test "$invocation" = "$("$invocation")"
+      }
+
+      checkInvocation "${prog}/bin/argv0-test"
+
+      cd ${prog}
+      checkInvocation "./bin/argv0-test"
+
+      cd bin
+      checkInvocation ./argv0-test
+
+      set +x
+
+      touch "$out"
+    '';
+
+  inherit (prog.meta.ci) targets;
+in
+
+(checkImplementation prog).overrideAttrs (_: {
+  # Wire up a subtarget all (active) non-default implementations
+  passthru = lib.genAttrs targets (name: checkImplementation prog.${name});
+
+  meta.ci = { inherit targets; };
+})
diff --git a/nix/buildManPages/OWNERS b/nix/buildManPages/OWNERS
new file mode 100644
index 0000000000..2e95807063
--- /dev/null
+++ b/nix/buildManPages/OWNERS
@@ -0,0 +1 @@
+sterni
diff --git a/nix/buildManPages/default.nix b/nix/buildManPages/default.nix
new file mode 100644
index 0000000000..746ed25182
--- /dev/null
+++ b/nix/buildManPages/default.nix
@@ -0,0 +1,103 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  inherit (pkgs)
+    gzip
+    mandoc
+    coreutils
+    ;
+
+  inherit (depot.nix)
+    runExecline
+    getBins
+    ;
+
+  bins = getBins mandoc [ "mandoc" ]
+    // getBins gzip [ "gzip" ]
+    // getBins coreutils [ "mkdir" "ln" "cp" ]
+  ;
+
+  defaultGzip = true;
+
+  basename = gzip: { name, section, ... }:
+    "${name}.${toString section}${lib.optionalString gzip ".gz"}";
+
+  manDir = { section, ... }:
+    "\${out}/share/man/man${toString section}";
+
+  target = gzip: args:
+    "${manDir args}/${basename gzip args}";
+
+  buildManPage =
+    { requireLint ? false
+    , gzip ? defaultGzip
+    , ...
+    }:
+    { content
+    , ...
+    }@page:
+    let
+      source = builtins.toFile (basename false page) content;
+    in
+    runExecline (basename gzip page) { } ([
+      (if requireLint then "if" else "foreground")
+      [
+        bins.mandoc
+        "-mdoc"
+        "-T"
+        "lint"
+        source
+      ]
+      "importas"
+      "out"
+      "out"
+    ] ++ (if gzip then [
+      "redirfd"
+      "-w"
+      "1"
+      "$out"
+      bins.gzip
+      "-c"
+      source
+    ] else [
+      bins.cp
+      "--reflink=auto"
+      source
+      "$out"
+    ]));
+
+  buildManPages =
+    name:
+    { derivationArgs ? { }
+    , gzip ? defaultGzip
+    , ...
+    }@args:
+    pages:
+    runExecline "${name}-man-pages"
+      {
+        inherit derivationArgs;
+      }
+      ([
+        "importas"
+        "out"
+        "out"
+      ] ++ lib.concatMap
+        ({ name, section, content }@page: [
+          "if"
+          [ bins.mkdir "-p" (manDir page) ]
+          "if"
+          [
+            bins.ln
+            "-s"
+            (buildManPage args page)
+            (target gzip page)
+          ]
+        ])
+        pages);
+
+in
+{
+  __functor = _: buildManPages;
+
+  single = buildManPage;
+}
diff --git a/nix/buildTypedGo/default.nix b/nix/buildTypedGo/default.nix
deleted file mode 100644
index f135b1ebb4..0000000000
--- a/nix/buildTypedGo/default.nix
+++ /dev/null
@@ -1,34 +0,0 @@
-# SPDX-License-Identifier: Apache-2.0
-#
-# A crude wrapper around //nix/buildGo that supports the Go 2 alpha.
-#
-# The way the alpha is implemented is via a transpiler from typed to
-# untyped Go.
-{ depot, pkgs, ... }:
-
-let
-  inherit (builtins)
-    baseNameOf
-    stringLength
-    substring;
-
-  inherit (depot.nix.buildGo) gpackage program;
-
-  go2goext = file: substring 0 ((stringLength file) - 1) file;
-  go2go = file: pkgs.runCommandNoCC "${go2goext (baseNameOf file)}" {} ''
-    cp ${file} .
-    ${pkgs.go}/bin/go tool go2go translate *.go2
-    mv *.go $out
-  '';
-
-in rec {
-  program = { name, srcs, deps ? [], x_defs ? {} }: depot.nix.buildGo.program {
-    inherit name deps x_defs;
-    srcs = map go2go srcs;
-  };
-
-  package = { name, srcs, deps ? [], path ? name, sfiles ? [] }: depot.nix.buildGo.package {
-    inherit name deps path sfiles;
-    srcs = map go2go srcs;
-  };
-}
diff --git a/nix/buildTypedGo/example/default.nix b/nix/buildTypedGo/example/default.nix
deleted file mode 100644
index 5b6d4171f9..0000000000
--- a/nix/buildTypedGo/example/default.nix
+++ /dev/null
@@ -1,8 +0,0 @@
-{ depot, ... }:
-
-depot.nix.buildTypedGo.program {
-  name = "example";
-  srcs = [
-    ./main.go2
-  ];
-}
diff --git a/nix/buildTypedGo/example/main.go2 b/nix/buildTypedGo/example/main.go2
deleted file mode 100644
index 8986f57b94..0000000000
--- a/nix/buildTypedGo/example/main.go2
+++ /dev/null
@@ -1,15 +0,0 @@
-package main
-
-import (
-	"fmt"
-)
-
-func Print(type T)(s []T) {
-	for _, v := range s {
-		fmt.Print(v)
-	}
-}
-
-func main() {
-	Print([]string{"Hello, ", "TVL\n"})
-}
diff --git a/nix/buildkite/default.nix b/nix/buildkite/default.nix
new file mode 100644
index 0000000000..9abba9408a
--- /dev/null
+++ b/nix/buildkite/default.nix
@@ -0,0 +1,491 @@
+# Logic for generating Buildkite pipelines from Nix build targets read
+# by //nix/readTree.
+#
+# It outputs a "YAML" (actually JSON) file which is evaluated and
+# submitted to Buildkite at the start of each build.
+#
+# The structure of the file that is being created is documented here:
+#   https://buildkite.com/docs/pipelines/defining-steps
+{ depot, pkgs, ... }:
+
+let
+  inherit (builtins)
+    attrValues
+    concatLists
+    concatStringsSep
+    elem
+    foldl'
+    hasAttr
+    hashString
+    isNull
+    isString
+    length
+    listToAttrs
+    mapAttrs
+    toJSON
+    unsafeDiscardStringContext;
+
+  inherit (pkgs) lib runCommand writeText;
+  inherit (depot.nix.readTree) mkLabel;
+
+  inherit (depot.nix) dependency-analyzer;
+in
+rec {
+  # Create a unique key for the buildkite pipeline based on the given derivation
+  # or drvPath. A consequence of using such keys is that every derivation may
+  # only be exposed as a single, unique step in the pipeline.
+  keyForDrv = drvOrPath:
+    let
+      drvPath =
+        if lib.isDerivation drvOrPath then drvOrPath.drvPath
+        else if lib.isString drvOrPath then drvOrPath
+        else builtins.throw "keyForDrv: expected string or derivation";
+
+      # Only use the drv hash to prevent escaping problems. Buildkite also has a
+      # limit of 100 characters on keys.
+    in
+    "drv-" + (builtins.substring 0 32
+      (builtins.baseNameOf (unsafeDiscardStringContext drvPath))
+    );
+
+  # Given an arbitrary attribute path generate a Nix expression which obtains
+  # this from the root of depot (assumed to be ./.). Attributes may be any
+  # Nix strings suitable as attribute names, not just Nix literal-safe strings.
+  mkBuildExpr = attrPath:
+    let
+      descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})";
+    in
+    foldl' descend "import ./. {}" attrPath;
+
+  # Determine whether to skip a target if it has not diverged from the
+  # HEAD branch.
+  shouldSkip = { parentTargetMap ? { }, label, drvPath }:
+    if (hasAttr label parentTargetMap) && parentTargetMap."${label}".drvPath == drvPath
+    then "Target has not changed."
+    else false;
+
+  # Create build command for an attribute path pointing to a derivation.
+  mkBuildCommand = { attrPath, drvPath, outLink ? "result" }: concatStringsSep " " [
+    # If the nix build fails, the Nix command's exit status should be used.
+    "set -o pipefail;"
+
+    # First try to realise the drvPath of the target so we don't evaluate twice.
+    # Nix has no concept of depending on a derivation file without depending on
+    # at least one of its `outPath`s, so we need to discard the string context
+    # if we don't want to build everything during pipeline construction.
+    #
+    # To make this more uniform with how nix-build(1) works, we call realpath(1)
+    # on nix-store(1)'s output since it has the habit of printing the path of the
+    # out link, not the store path.
+    "(nix-store --realise '${drvPath}' --add-root '${outLink}' --indirect | xargs -r realpath)"
+
+    # Since we don't gcroot the derivation files, they may be deleted by the
+    # garbage collector. In that case we can reevaluate and build the attribute
+    # using nix-build.
+    "|| (test ! -f '${drvPath}' && nix-build -E '${mkBuildExpr attrPath}' --show-trace --out-link '${outLink}')"
+  ];
+
+  # Attribute path of a target relative to the depot root. Needs to take into
+  # account whether the target is a physical target (which corresponds to a path
+  # in the filesystem) or the subtarget of a physical target.
+  targetAttrPath = target:
+    target.__readTree
+    ++ lib.optionals (target ? __subtarget) [ target.__subtarget ];
+
+  # Given a derivation (identified by drvPath) that is part of the list of
+  # targets passed to mkPipeline, determine all derivations that it depends on
+  # and are also part of the pipeline. Finally, return the keys of the steps
+  # that build them. This is used to populate `depends_on` in `mkStep`.
+  #
+  # See //nix/dependency-analyzer for documentation on the structure of `targetDepMap`.
+  getTargetPipelineDeps = targetDepMap: drvPath:
+    # Sanity check: We should only call this function on targets explicitly
+    # passed to mkPipeline. Thus it should have been passed as a “known” drv to
+    # dependency-analyzer.
+    assert targetDepMap.${drvPath}.known;
+    builtins.map keyForDrv targetDepMap.${drvPath}.knownDeps;
+
+  # Create a pipeline step from a single target.
+  mkStep = { headBranch, parentTargetMap, targetDepMap, target, cancelOnBuildFailing }:
+    let
+      label = mkLabel target;
+      drvPath = unsafeDiscardStringContext target.drvPath;
+    in
+    {
+      label = ":nix: " + label;
+      key = keyForDrv target;
+      skip = shouldSkip { inherit label drvPath parentTargetMap; };
+      command = mkBuildCommand {
+        attrPath = targetAttrPath target;
+        inherit drvPath;
+      };
+      env.READTREE_TARGET = label;
+      cancel_on_build_failing = cancelOnBuildFailing;
+
+      # Add a dependency on the initial static pipeline step which
+      # always runs. This allows build steps uploaded in batches to
+      # start running before all batches have been uploaded.
+      depends_on = [ ":init:" ]
+      ++ getTargetPipelineDeps targetDepMap drvPath
+      ++ lib.optionals (target ? meta.ci.buildkiteExtraDeps) target.meta.ci.buildkiteExtraDeps;
+    } // lib.optionalAttrs (target ? meta.timeout) {
+      timeout_in_minutes = target.meta.timeout / 60;
+      # Additional arguments to set on the step.
+      # Keep in mind these *overwrite* existing step args, not extend. Use with caution.
+    } // lib.optionalAttrs (target ? meta.ci.buildkiteExtraStepArgs) target.meta.ci.buildkiteExtraStepArgs;
+
+  # Helper function to inelegantly divide a list into chunks of at
+  # most n elements.
+  #
+  # This works by assigning each element a chunk ID based on its
+  # index, and then grouping all elements by their chunk ID.
+  chunksOf = n: list:
+    let
+      chunkId = idx: toString (idx / n + 1);
+      assigned = lib.imap1 (idx: value: { inherit value; chunk = chunkId idx; }) list;
+      unchunk = mapAttrs (_: elements: map (e: e.value) elements);
+    in
+    unchunk (lib.groupBy (e: e.chunk) assigned);
+
+  # Define a build pipeline chunk as a JSON file, using the pipeline
+  # format documented on
+  # https://buildkite.com/docs/pipelines/defining-steps.
+  makePipelineChunk = name: chunkId: chunk: rec {
+    filename = "${name}-chunk-${chunkId}.json";
+    path = writeText filename (toJSON {
+      steps = chunk;
+    });
+  };
+
+  # Split the pipeline into chunks of at most 192 steps at once, which
+  # are uploaded sequentially. This is because of a limitation in the
+  # Buildkite backend which struggles to process more than a specific
+  # number of chunks at once.
+  pipelineChunks = name: steps:
+    attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps));
+
+  # Create a pipeline structure for the given targets.
+  mkPipeline =
+    {
+      # HEAD branch of the repository on which release steps, GC
+      # anchoring and other "mainline only" steps should run.
+      headBranch
+    , # List of derivations as read by readTree (in most cases just the
+      # output of readTree.gather) that should be built in Buildkite.
+      #
+      # These are scheduled as the first build steps and run as fast as
+      # possible, in order, without any concurrency restrictions.
+      drvTargets
+    , # Derivation map of a parent commit. Only targets which no longer
+      # correspond to the content of this map will be built. Passing an
+      # empty map will always build all targets.
+      parentTargetMap ? { }
+    , # A list of plain Buildkite step structures to run alongside the
+      # build for all drvTargets, but before proceeding with any
+      # post-build actions such as status reporting.
+      #
+      # Can be used for things like code formatting checks.
+      additionalSteps ? [ ]
+    , # A list of plain Buildkite step structures to run after all
+      # previous steps succeeded.
+      #
+      # Can be used for status reporting steps and the like.
+      postBuildSteps ? [ ]
+      # The list of phases known by the current Buildkite
+      # pipeline. Dynamic pipeline chunks for each phase are uploaded
+      # to Buildkite on execution of static part of the
+      # pipeline. Phases selection is hard-coded in the static
+      # pipeline.
+      #
+      # Pipeline generation will fail when an extra step with
+      # unregistered phase is added.
+      #
+      # Common scenarios for different phase:
+      #   - "build" - main phase for building all Nix targets
+      #   - "release" - pushing artifacts to external repositories
+      #   - "deploy" - updating external deployment configurations
+    , phases ? [ "build" "release" ]
+      # Build phases that are active for this invocation (i.e. their
+      # steps should be generated).
+      #
+      # This can be used to disable outputting parts of a pipeline if,
+      # for example, build and release phases are created in separate
+      # eval contexts.
+      #
+      # TODO(tazjin): Fail/warn if unknown phase is requested.
+    , activePhases ? phases
+      # Setting this attribute to true cancels dynamic pipeline steps
+      # as soon as the build is marked as failing.
+      #
+      # To enable this feature one should enable "Fail Fast" setting
+      # at Buildkite pipeline or on organization level.
+    , cancelOnBuildFailing ? false
+    }:
+    let
+      # List of phases to include.
+      enabledPhases = lib.intersectLists activePhases phases;
+
+      # Is the 'build' phase included? This phase is treated specially
+      # because it always contains the plain Nix builds, and some
+      # logic/optimisation depends on knowing whether is executing.
+      buildEnabled = elem "build" enabledPhases;
+
+      # Dependency relations between the `drvTargets`. See also //nix/dependency-analyzer.
+      targetDepMap = dependency-analyzer (dependency-analyzer.drvsToPaths drvTargets);
+
+      # Convert a target into all of its steps, separated by build
+      # phase (as phases end up in different chunks).
+      targetToSteps = target:
+        let
+          mkStepArgs = {
+            inherit headBranch parentTargetMap targetDepMap target cancelOnBuildFailing;
+          };
+          step = mkStep mkStepArgs;
+
+          # Same step, but with an override function applied. This is
+          # used in mkExtraStep if the extra step needs to modify the
+          # parent derivation somehow.
+          #
+          # Note that this will never affect the label.
+          overridable = f: mkStep (mkStepArgs // { target = (f target); });
+
+          # Split extra steps by phase.
+          splitExtraSteps = lib.groupBy ({ phase, ... }: phase)
+            (attrValues (mapAttrs (normaliseExtraStep phases overridable)
+              (target.meta.ci.extraSteps or { })));
+
+          extraSteps = mapAttrs
+            (_: steps:
+              map (mkExtraStep (targetAttrPath target) buildEnabled) steps)
+            splitExtraSteps;
+        in
+        if !buildEnabled then extraSteps
+        else extraSteps // {
+          build = [ step ] ++ (extraSteps.build or [ ]);
+        };
+
+      # Combine all target steps into step lists per phase.
+      #
+      # TODO(tazjin): Refactor when configurable phases show up.
+      globalSteps = {
+        build = additionalSteps;
+        release = postBuildSteps;
+      };
+
+      phasesWithSteps = lib.zipAttrsWithNames enabledPhases (_: concatLists)
+        ((map targetToSteps drvTargets) ++ [ globalSteps ]);
+
+      # Generate pipeline chunks for each phase.
+      chunks = foldl'
+        (acc: phase:
+          let phaseSteps = phasesWithSteps.${phase} or [ ]; in
+          if phaseSteps == [ ]
+          then acc
+          else acc ++ (pipelineChunks phase phaseSteps))
+        [ ]
+        enabledPhases;
+
+    in
+    runCommand "buildkite-pipeline" { } ''
+      mkdir $out
+      echo "Generated ${toString (length chunks)} pipeline chunks"
+      ${
+        lib.concatMapStringsSep "\n"
+          (chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks
+      }
+    '';
+
+  # Create a drvmap structure for the given targets, containing the
+  # mapping of all target paths to their derivations. The mapping can
+  # be persisted for future use.
+  mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map
+    (target: {
+      name = mkLabel target;
+      value = {
+        drvPath = unsafeDiscardStringContext target.drvPath;
+
+        # Include the attrPath in the output to reconstruct the drv
+        # without parsing the human-readable label.
+        attrPath = targetAttrPath target;
+      };
+    })
+    drvTargets)));
+
+  # Implementation of extra step logic.
+  #
+  # Each target extra step is an attribute specified in
+  # `meta.ci.extraSteps`. Its attribute name will be used as the step
+  # name on Buildkite.
+  #
+  #   command (required): A command that will be run in the depot
+  #     checkout when this step is executed. Should be a derivation
+  #     resulting in a single executable file, e.g. through
+  #     pkgs.writeShellScript.
+  #
+  #   label (optional): Human-readable label for this step to display
+  #     in the Buildkite UI instead of the attribute name.
+  #
+  #   prompt (optional): Setting this blocks the step until confirmed
+  #     by a human. Should be a string which is displayed for
+  #     confirmation. These steps always run after the main build is
+  #     done and have no influence on CI status.
+  #
+  #   needsOutput (optional): If set to true, the parent derivation
+  #     will be built in the working directory before running the
+  #     command. Output will be available as 'result'.
+  #     TODO: Figure out multiple-output derivations.
+  #
+  #   parentOverride (optional): A function (drv -> drv) to override
+  #     the parent's target definition when preparing its output. Only
+  #     used in extra steps that use needsOutput.
+  #
+  #   branches (optional): Git references (branches, tags ... ) on
+  #     which this step should be allowed to run. List of strings.
+  #
+  #   alwaysRun (optional): If set to true, this step will always run,
+  #     even if its parent has not been rebuilt.
+  #
+  # Note that gated steps are independent of each other.
+
+  # Create a gated step in a step group, independent from any other
+  # steps.
+  mkGatedStep = { step, label, parent, prompt }: {
+    inherit (step) depends_on;
+    group = label;
+    skip = parent.skip or false;
+
+    steps = [
+      {
+        inherit prompt;
+        branches = step.branches or [ ];
+        block = ":radio_button: Run ${label}? (from ${parent.env.READTREE_TARGET})";
+      }
+
+      # The explicit depends_on of the wrapped step must be removed,
+      # otherwise its dependency relationship with the gate step will
+      # break.
+      (builtins.removeAttrs step [ "depends_on" ])
+    ];
+  };
+
+  # Validate and normalise extra step configuration before actually
+  # generating build steps, in order to use user-provided metadata
+  # during the pipeline generation.
+  normaliseExtraStep = phases: overridableParent: key:
+    { command
+    , label ? key
+    , needsOutput ? false
+    , parentOverride ? (x: x)
+    , branches ? null
+    , alwaysRun ? false
+    , prompt ? false
+    , softFail ? false
+    , phase ? "build"
+    , skip ? false
+    , agents ? null
+    }:
+    let
+      parent = overridableParent parentOverride;
+      parentLabel = parent.env.READTREE_TARGET;
+
+      validPhase = lib.throwIfNot (elem phase phases) ''
+        In step '${label}' (from ${parentLabel}):
+
+        Phase '${phase}' is not valid.
+
+        Known phases: ${concatStringsSep ", " phases}
+      ''
+        phase;
+    in
+    {
+      inherit
+        alwaysRun
+        branches
+        command
+        key
+        label
+        needsOutput
+        parent
+        parentLabel
+        softFail
+        skip
+        agents;
+
+      phase = validPhase;
+
+      prompt = lib.throwIf (prompt != false && phase == "build") ''
+        In step '${label}' (from ${parentLabel}):
+
+        The 'prompt' feature can not be used by steps in the "build"
+        phase, because CI builds should not be gated on manual human
+        approvals.
+      ''
+        prompt;
+    };
+
+  # Create the Buildkite configuration for an extra step, optionally
+  # wrapping it in a gate group.
+  mkExtraStep = parentAttrPath: buildEnabled: cfg:
+    let
+      # ATTN: needs to match an entry in .gitignore so that the tree won't get dirty
+      commandScriptLink = "nix-buildkite-extra-step-command-script";
+
+      step = {
+        key = "extra-step-" + hashString "sha1" "${cfg.label}-${cfg.parentLabel}";
+        label = ":gear: ${cfg.label} (from ${cfg.parentLabel})";
+        skip =
+          let
+            # When parent doesn't have skip attribute set, default to false
+            parentSkip = cfg.parent.skip or false;
+            # Extra step skip parameter can be string explaining the
+            # skip reason.
+            extraStepSkip = if builtins.isString cfg.skip then true else cfg.skip;
+            # Don't run if extra step is explicitly set to skip. If
+            # parameter is not set or equal to false, follow parent behavior.
+            skip' = if extraStepSkip then cfg.skip else parentSkip;
+          in
+          if cfg.alwaysRun then false else skip';
+
+        depends_on = lib.optional
+          (buildEnabled && !cfg.alwaysRun && !cfg.needsOutput)
+          cfg.parent.key;
+
+        command = pkgs.writeShellScript "${cfg.key}-script" ''
+          set -ueo pipefail
+          ${lib.optionalString cfg.needsOutput
+            "echo '~~~ Preparing build output of ${cfg.parentLabel}'"
+          }
+          ${lib.optionalString cfg.needsOutput cfg.parent.command}
+          echo '--- Building extra step script'
+          command_script="$(${
+            # Using command substitution in this way assumes the script drv only has one output
+            assert builtins.length cfg.command.outputs == 1;
+            mkBuildCommand {
+              # script is exposed at <parent>.meta.ci.extraSteps.<key>.command
+              attrPath =
+                parentAttrPath
+                ++ [ "meta" "ci" "extraSteps" cfg.key "command" ];
+              drvPath = unsafeDiscardStringContext cfg.command.drvPath;
+              # make sure it doesn't conflict with result (from needsOutput)
+              outLink = commandScriptLink;
+            }
+          })"
+          echo '+++ Running extra step script'
+          exec "$command_script"
+        '';
+
+        soft_fail = cfg.softFail;
+      } // (lib.optionalAttrs (cfg.agents != null) { inherit (cfg) agents; })
+      // (lib.optionalAttrs (cfg.branches != null) {
+        branches = lib.concatStringsSep " " cfg.branches;
+      });
+    in
+    if (isString cfg.prompt)
+    then
+      mkGatedStep
+        {
+          inherit step;
+          inherit (cfg) label parent prompt;
+        }
+    else step;
+}
diff --git a/nix/buildkite/fetch-parent-targets.sh b/nix/buildkite/fetch-parent-targets.sh
new file mode 100755
index 0000000000..08c2d1f3ab
--- /dev/null
+++ b/nix/buildkite/fetch-parent-targets.sh
@@ -0,0 +1,55 @@
+#!/usr/bin/env bash
+set -ueo pipefail
+
+# Each Buildkite build stores the derivation target map as a pipeline
+# artifact. To reduce the amount of work done by CI, each CI build is
+# diffed against the latest such derivation map found for the
+# repository.
+#
+# Note that this does not take into account when the currently
+# processing CL was forked off from the canonical branch, meaning that
+# things like nixpkgs updates in between will cause mass rebuilds in
+# any case.
+#
+# If no map is found, the failure mode is not critical: We simply
+# build all targets.
+
+readonly REPO_ROOT=$(git rev-parse --show-toplevel)
+
+: ${DRVMAP_PATH:=pipeline/drvmap.json}
+: ${BUILDKITE_TOKEN_PATH:=~/buildkite-token}
+
+# Runs a fairly complex Buildkite GraphQL query that attempts to fetch all
+# pipeline-gen steps from the default branch, as long as one appears within the
+# last 50 builds or so. The query restricts build states to running or passed
+# builds, which means that it *should* be unlikely that nothing is found.
+#
+# There is no way to filter this more loosely (e.g. by saying "any recent build
+# matching these conditions").
+#
+# The returned data structure is complex, and disassembled by a JQ script that
+# first filters out all builds with no matching jobs (e.g. builds that are still
+# in progress), and then filters those down to builds with artifacts, and then
+# to drvmap artifacts specifically.
+#
+# If a recent drvmap was found, this returns its download URL. Otherwise, it
+# returns the string "null".
+function latest_drvmap_url {
+    set -u
+    curl 'https://graphql.buildkite.com/v1' \
+         --silent \
+         -H "Authorization: Bearer $(cat ${BUILDKITE_TOKEN_PATH})" \
+         -H "Content-Type: application/json" \
+         -d "{\"query\": \"{ pipeline(slug: \\\"$BUILDKITE_ORGANIZATION_SLUG/$BUILDKITE_PIPELINE_SLUG\\\") { builds(first: 50, branch: [\\\"%default\\\"], state: [RUNNING, PASSED]) { edges { node { jobs(passed: true, first: 1, type: [COMMAND], step: {key: [\\\"pipeline-gen\\\"]}) { edges { node { ... on JobTypeCommand { url artifacts { edges { node { downloadURL path }}}}}}}}}}}}\"}" | tee out.json | \
+        jq -r '[.data.pipeline.builds.edges[] | select((.node.jobs.edges | length) > 0) | .node.jobs.edges[] | .node.artifacts[][] | select(.node.path == "pipeline/drvmap.json")][0].node.downloadURL'
+}
+
+readonly DOWNLOAD_URL=$(latest_drvmap_url)
+
+if [[ ${DOWNLOAD_URL} != "null" ]]; then
+    mkdir -p tmp
+    curl -o tmp/parent-target-map.json ${DOWNLOAD_URL} && echo "downloaded parent derivation map" \
+            || echo "failed to download derivation map!"
+else
+    echo "no derivation map found!"
+fi
diff --git a/nix/dependency-analyzer/default.nix b/nix/dependency-analyzer/default.nix
new file mode 100644
index 0000000000..2ec8d7b5b9
--- /dev/null
+++ b/nix/dependency-analyzer/default.nix
@@ -0,0 +1,263 @@
+{ lib, depot, pkgs, ... }:
+
+let
+  inherit (builtins) unsafeDiscardStringContext appendContext;
+
+  #
+  # Utilities
+  #
+
+  # Determine all paths a derivation depends on, i.e. input derivations and
+  # files imported into the Nix store.
+  #
+  # Implementation for Nix < 2.6 is quite hacky at the moment.
+  #
+  # Type: str -> [str]
+  #
+  # TODO(sterni): clean this up and expose it
+  directDrvDeps =
+    let
+      getDeps =
+        if lib.versionAtLeast builtins.nixVersion "2.6"
+        then
+        # Since https://github.com/NixOS/nix/pull/1643, Nix apparently »preserves
+        # string context« through a readFile invocation. This has the side effect
+        # that it becomes possible to query the actual references a store path has.
+        # Not a 100% sure this is intended, but _very_ convenient for us here.
+          drvPath:
+          builtins.attrNames (builtins.getContext (builtins.readFile drvPath))
+        else
+        # For Nix < 2.6 we have to rely on HACK, namely grepping for quoted
+        # store path references in the file. In the future this should be
+        # replaced by a proper derivation parser.
+          drvPath: builtins.concatLists (
+            builtins.filter builtins.isList (
+              builtins.split
+                "\"(${lib.escapeRegex builtins.storeDir}/[[:alnum:]+._?=-]+.drv)\""
+                (builtins.readFile drvPath)
+            )
+          );
+    in
+    drvPath:
+    # if the passed path is not a derivation we can't necessarily get its
+    # dependencies, since it may not be representable as a Nix string due to
+    # NUL bytes, e.g. compressed patch files imported into the Nix store.
+    if builtins.match "^.+\\.drv$" drvPath == null
+    then [ ]
+    else getDeps drvPath;
+
+  # Maps a list of derivation to the list of corresponding `drvPath`s.
+  #
+  # Type: [drv] -> [str]
+  drvsToPaths = drvs:
+    builtins.map (drv: builtins.unsafeDiscardOutputDependency drv.drvPath) drvs;
+
+  #
+  # Calculate map of direct derivation dependencies
+  #
+
+  # Create the dependency map entry for a given `drvPath` which mainly includes
+  # a list of other `drvPath`s it depends on. Additionally we store whether the
+  # derivation is `known`, i.e. part of the initial list of derivations we start
+  # generating the map from
+  #
+  # Type: bool -> string -> set
+  drvEntry = known: drvPath:
+    let
+      # key may not refer to a store path, …
+      key = unsafeDiscardStringContext drvPath;
+      # but we must read from the .drv file.
+      path = builtins.unsafeDiscardOutputDependency drvPath;
+    in
+    {
+      inherit key;
+      # trick so we can call listToAttrs directly on the result of genericClosure
+      name = key;
+      value = {
+        deps = directDrvDeps path;
+        inherit known;
+      };
+    };
+
+  # Create an attribute set that maps every derivation in the combined
+  # dependency closure of the list of input derivation paths to every of their
+  # direct dependencies. Additionally every entry will have set their `known`
+  # attribute to `true` if it is in the list of input derivation paths.
+  #
+  # Type: [str] -> set
+  plainDrvDepMap = drvPaths:
+    builtins.listToAttrs (
+      builtins.genericClosure {
+        startSet = builtins.map (drvEntry true) drvPaths;
+        operator = { value, ... }: builtins.map (drvEntry false) value.deps;
+      }
+    );
+
+  #
+  # Calculate closest known dependencies in the dependency map
+  #
+
+  inherit (depot.nix.stateMonad)
+    after
+    bind
+    for_
+    get
+    getAttr
+    run
+    setAttr
+    pure
+    ;
+
+  # This is an action in stateMonad which expects the (initial) state to have
+  # been produced by `plainDrvDepMap`. Given a `drvPath`, it calculates a
+  # `knownDeps` list which holds the `drvPath`s of the closest derivation marked
+  # as `known` along every edge. This list is inserted into the dependency map
+  # for `drvPath` and every other derivation in its dependecy closure (unless
+  # the information was already present). This means that the known dependency
+  # information for a derivation never has to be recalculated, as long as they
+  # are part of the same stateful computation.
+  #
+  # The upshot is that after calling `insertKnownDeps drvPath`,
+  # `fmap (builtins.getAttr "knownDeps") (getAttr drvPath)` will always succeed.
+  #
+  # Type: str -> stateMonad drvDepMap null
+  insertKnownDeps = drvPathWithContext:
+    let
+      # We no longer need to read from the store, so context is irrelevant, but
+      # we need to check for attr names which requires the absence of context.
+      drvPath = unsafeDiscardStringContext drvPathWithContext;
+    in
+    bind get (initDepMap:
+      # Get the dependency map's state before we've done anything to obtain the
+      # entry we'll be manipulating later as well as its dependencies.
+      let
+        entryPoint = initDepMap.${drvPath};
+
+        # We don't need to recurse if our direct dependencies either have their
+        # knownDeps list already populated or are known dependencies themselves.
+        depsPrecalculated =
+          builtins.partition
+            (dep:
+              initDepMap.${dep}.known
+              || initDepMap.${dep} ? knownDeps
+            )
+            entryPoint.deps;
+
+        # If a direct dependency is known, it goes right to our known dependency
+        # list. If it is unknown, we can copy its knownDeps list into our own.
+        initiallyKnownDeps =
+          builtins.concatLists (
+            builtins.map
+              (dep:
+                if initDepMap.${dep}.known
+                then [ dep ]
+                else initDepMap.${dep}.knownDeps
+              )
+              depsPrecalculated.right
+          );
+      in
+
+      # If the information was already calculated before, we can exit right away
+      if entryPoint ? knownDeps
+      then pure null
+      else
+        after
+          # For all unknown direct dependencies which don't have a `knownDeps`
+          # list, we call ourselves recursively to populate it. Since this is
+          # done sequentially in the state monad, we avoid recalculating the
+          # list for the same derivation multiple times.
+          (for_
+            depsPrecalculated.wrong
+            insertKnownDeps)
+          # After this we can obtain the updated dependency map which will have
+          # a `knownDeps` list for all our direct dependencies and update the
+          # entry for the input `drvPath`.
+          (bind
+            get
+            (populatedDepMap:
+              (setAttr drvPath (entryPoint // {
+                knownDeps =
+                  lib.unique (
+                    initiallyKnownDeps
+                      ++ builtins.concatLists (
+                      builtins.map
+                        (dep: populatedDepMap.${dep}.knownDeps)
+                        depsPrecalculated.wrong
+                    )
+                  );
+              }))))
+    );
+
+  # This function puts it all together and is exposed via `__functor`.
+  #
+  # For a list of `drvPath`s, calculate an attribute set which maps every
+  # `drvPath` to a set of the following form:
+  #
+  #     {
+  #       known = true /* if it is in the list of input derivation paths */;
+  #       deps = [
+  #         /* list of derivation paths it depends on directly */
+  #       ];
+  #       knownDeps = [
+  #         /* list of the closest derivation paths marked as known this
+  #            derivation depends on.
+  #         */
+  #       ];
+  #     }
+  knownDrvDepMap = knownDrvPaths:
+    run
+      (plainDrvDepMap knownDrvPaths)
+      (after
+        (for_
+          knownDrvPaths
+          insertKnownDeps)
+        get);
+
+  #
+  # Other things based on knownDrvDepMap
+  #
+
+  # Create a SVG visualizing `knownDrvDepMap`. Nodes are identified by derivation
+  # name, so multiple entries can be collapsed if they have the same name.
+  #
+  # Type: [drv] -> drv
+  knownDependencyGraph = name: drvs:
+    let
+      justName = drvPath:
+        builtins.substring
+          (builtins.stringLength builtins.storeDir + 1 + 32 + 1)
+          (builtins.stringLength drvPath)
+          (unsafeDiscardStringContext drvPath);
+
+      gv = pkgs.writeText "${name}-dependency-analysis.gv" ''
+        digraph depot {
+        ${
+          (lib.concatStringsSep "\n"
+          (lib.mapAttrsToList (name: value:
+            if !value.known then ""
+            else lib.concatMapStringsSep "\n"
+              (knownDep: "  \"${justName name}\" -> \"${justName knownDep}\"")
+              value.knownDeps
+          )
+          (depot.nix.dependency-analyzer (
+            drvsToPaths drvs
+          ))))
+        }
+        }
+      '';
+    in
+
+    pkgs.runCommand "${name}-dependency-analysis.svg"
+      {
+        nativeBuildInputs = [
+          pkgs.buildPackages.graphviz
+        ];
+      }
+      "dot -Tsvg < ${gv} > $out";
+in
+
+{
+  __functor = _: knownDrvDepMap;
+
+  inherit knownDependencyGraph plainDrvDepMap drvsToPaths;
+}
diff --git a/nix/dependency-analyzer/examples/ci-targets.nix b/nix/dependency-analyzer/examples/ci-targets.nix
new file mode 100644
index 0000000000..597abd4109
--- /dev/null
+++ b/nix/dependency-analyzer/examples/ci-targets.nix
@@ -0,0 +1,12 @@
+{ depot, lib, ... }:
+
+(
+  depot.nix.dependency-analyzer.knownDependencyGraph
+    "depot"
+    depot.ci.targets
+).overrideAttrs (old: {
+  # Causes an infinite recursion via ci.targets otherwise
+  meta = lib.recursiveUpdate (old.meta or { }) {
+    ci.skip = true;
+  };
+})
diff --git a/nix/dependency-analyzer/examples/lisp.nix b/nix/dependency-analyzer/examples/lisp.nix
new file mode 100644
index 0000000000..775eb9ab57
--- /dev/null
+++ b/nix/dependency-analyzer/examples/lisp.nix
@@ -0,0 +1,5 @@
+{ depot, lib, ... }:
+
+depot.nix.dependency-analyzer.knownDependencyGraph "3p-lisp" (
+  builtins.filter lib.isDerivation (builtins.attrValues depot.third_party.lisp)
+)
diff --git a/nix/dependency-analyzer/tests/default.nix b/nix/dependency-analyzer/tests/default.nix
new file mode 100644
index 0000000000..79ac127e92
--- /dev/null
+++ b/nix/dependency-analyzer/tests/default.nix
@@ -0,0 +1,36 @@
+{ depot, lib, ... }:
+
+let
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    assertEq
+    it
+    ;
+
+  inherit (depot.nix.dependency-analyzer)
+    plainDrvDepMap
+    drvsToPaths
+    ;
+
+  knownDrvs = drvsToPaths (
+    builtins.filter lib.isDerivation (builtins.attrValues depot.third_party.lisp)
+  );
+  exampleMap = plainDrvDepMap knownDrvs;
+
+  # These will be needed to index into the attribute set which can't have context
+  # in the attribute names.
+  knownDrvsNoContext = builtins.map builtins.unsafeDiscardStringContext knownDrvs;
+in
+
+runTestsuite "dependency-analyzer" [
+  (it "checks plainDrvDepMap properties" [
+    (assertEq "all known drvs are marked known"
+      (builtins.all (drv: exampleMap.${drv}.known) knownDrvsNoContext)
+      true)
+    (assertEq "no unknown drv is marked known"
+      (builtins.all (entry: !entry.known) (
+        builtins.attrValues (builtins.removeAttrs exampleMap knownDrvsNoContext)
+      ))
+      true)
+  ])
+]
diff --git a/nix/drvSeqL/default.nix b/nix/drvSeqL/default.nix
new file mode 100644
index 0000000000..6437e1a043
--- /dev/null
+++ b/nix/drvSeqL/default.nix
@@ -0,0 +1,47 @@
+{ depot, lib, pkgs, ... }:
+
+let
+
+  inherit (depot.nix.yants)
+    defun
+    list
+    drv
+    ;
+
+  /* Realize drvDeps, then return drvOut if that succeds.
+   * This can be used to make drvOut depend on the
+   * build success of all drvDeps without making each drvDep
+   * a dependency of drvOut.
+   * => drvOut is not rebuilt if drvDep changes
+   */
+  drvSeqL = defun [ (list drv) drv drv ]
+    (drvDeps: drvOut:
+      let
+        drvOutOutputs = drvOut.outputs or [ "out" ];
+      in
+      pkgs.runCommandLocal drvOut.name
+        {
+          # we inherit all attributes in order to replicate
+          # the original derivation as much as possible
+          outputs = drvOutOutputs;
+          passthru = drvOut.drvAttrs;
+          # depend on drvDeps (by putting it in builder context)
+          inherit drvDeps;
+        }
+        # the outputs of the original derivation are replicated
+        # by creating a symlink to the old output path
+        (lib.concatMapStrings
+          (output: ''
+            target=${lib.escapeShellArg drvOut.${output}}
+            # if the target is already a symlink, follow it until it’s not;
+            # this is done to prevent too many dereferences
+            target=$(readlink -e "$target")
+            # link to the output
+            ln -s "$target" "${"$"}${output}"
+          '')
+          drvOutOutputs));
+
+in
+{
+  __functor = _: drvSeqL;
+}
diff --git a/nix/emptyDerivation/OWNERS b/nix/emptyDerivation/OWNERS
index a742d0d22b..a640227914 100644
--- a/nix/emptyDerivation/OWNERS
+++ b/nix/emptyDerivation/OWNERS
@@ -1,3 +1 @@
-inherited: true
-owners:
-  - Profpatsch
+Profpatsch
diff --git a/nix/emptyDerivation/default.nix b/nix/emptyDerivation/default.nix
index 4165d4fd9a..f808aa228d 100644
--- a/nix/emptyDerivation/default.nix
+++ b/nix/emptyDerivation/default.nix
@@ -1,10 +1,11 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, localSystem, ... }:
 
 let
   emptyDerivation = import ./emptyDerivation.nix {
     inherit pkgs;
     inherit (pkgs) stdenv;
     inherit (depot.nix) getBins;
+    system = localSystem;
   };
 
   tests = import ./tests.nix {
@@ -14,7 +15,8 @@ let
     inherit (depot.nix.runTestsuite) runTestsuite it assertEq;
   };
 
-in {
+in
+{
   __functor = _: emptyDerivation;
   inherit tests;
 }
diff --git a/nix/emptyDerivation/emptyDerivation.nix b/nix/emptyDerivation/emptyDerivation.nix
index 5e84abe2d5..d7de7ccfbc 100644
--- a/nix/emptyDerivation/emptyDerivation.nix
+++ b/nix/emptyDerivation/emptyDerivation.nix
@@ -1,4 +1,4 @@
-{ stdenv, pkgs, getBins }:
+{ stdenv, system, pkgs, getBins }:
 
 # The empty derivation. All it does is touch $out.
 # Basically the unit value for derivations.
@@ -11,22 +11,24 @@
 
 let
   bins = getBins pkgs.s6-portable-utils [ "s6-touch" ]
-      // getBins pkgs.execline [ "importas" "exec" ];
+    // getBins pkgs.execline [ "importas" "exec" ];
 
   emptiness = {
     name = "empty-derivation";
-
-    # TODO(Profpatsch): can we get system from tvl?
-    inherit (stdenv) system;
+    inherit system;
 
     builder = bins.exec;
     args = [
-      bins.importas "out" "out"
-      bins.s6-touch "$out"
+      bins.importas
+      "out"
+      "out"
+      bins.s6-touch
+      "$out"
     ];
   };
 
-in (derivation emptiness) // {
+in
+(derivation emptiness) // {
   # This allows us to call the empty derivation
   # like a function and override fields/add new fields.
   __functor = _: overrides:
diff --git a/nix/emptyDerivation/tests.nix b/nix/emptyDerivation/tests.nix
index 053603b027..a738428824 100644
--- a/nix/emptyDerivation/tests.nix
+++ b/nix/emptyDerivation/tests.nix
@@ -10,10 +10,17 @@ let
   ];
 
   fooOut = emptyDerivation {
-    builder = writeExecline "foo-builder" {} [
-      "importas" "out" "out"
-      "redirfd" "-w" "1" "$out"
-      bins.s6-echo "-n" "foo"
+    builder = writeExecline "foo-builder" { } [
+      "importas"
+      "out"
+      "out"
+      "redirfd"
+      "-w"
+      "1"
+      "$out"
+      bins.s6-echo
+      "-n"
+      "foo"
     ];
   };
 
@@ -26,7 +33,8 @@ let
       "bar")
   ];
 
-in runTestsuite "emptyDerivation" [
+in
+runTestsuite "emptyDerivation" [
   empty
   overrideBuilder
 ]
diff --git a/nix/escapeExecline/OWNERS b/nix/escapeExecline/OWNERS
deleted file mode 100644
index a742d0d22b..0000000000
--- a/nix/escapeExecline/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-inherited: true
-owners:
-  - Profpatsch
diff --git a/nix/escapeExecline/default.nix b/nix/escapeExecline/default.nix
index deef5c2c4e..d2c39dd398 100644
--- a/nix/escapeExecline/default.nix
+++ b/nix/escapeExecline/default.nix
@@ -16,14 +16,17 @@ let
   #   escapeExecline [ "if" [ "somecommand" ] "true" ]
   #   == ''"if" { "somecommand" } "true"''
   escapeExecline = execlineList: lib.concatStringsSep " "
-    (let
-      go = arg:
-        if      builtins.isString arg then [(escapeExeclineArg arg)]
-        else if builtins.isPath arg then [(escapeExeclineArg "${arg}")]
-        else if lib.isDerivation arg then [(escapeExeclineArg arg)]
-        else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
-        else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}";
-     in builtins.concatMap go execlineList);
+    (
+      let
+        go = arg:
+          if builtins.isString arg then [ (escapeExeclineArg arg) ]
+          else if builtins.isPath arg then [ (escapeExeclineArg "${arg}") ]
+          else if lib.isDerivation arg then [ (escapeExeclineArg arg) ]
+          else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
+          else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}";
+      in
+      builtins.concatMap go execlineList
+    );
 
 in
 escapeExecline
diff --git a/nix/fetchGoModule/OWNERS b/nix/fetchGoModule/OWNERS
deleted file mode 100644
index 47b6a04183..0000000000
--- a/nix/fetchGoModule/OWNERS
+++ /dev/null
@@ -1,2 +0,0 @@
-owners:
-  - edef
diff --git a/nix/fetchGoModule/default.nix b/nix/fetchGoModule/default.nix
deleted file mode 100644
index 84794bbc50..0000000000
--- a/nix/fetchGoModule/default.nix
+++ /dev/null
@@ -1,32 +0,0 @@
-{ lib, pkgs, ... }:
-
-let
-
-  inherit (lib)
-    lowerChars
-    replaceStrings
-    upperChars
-  ;
-
-  caseFold = replaceStrings upperChars (map (c: "!" + c) lowerChars);
-
-in
-
-{ path, version, sha256 }:
-
-pkgs.fetchurl {
-  name = "source";
-  url = "https://proxy.golang.org/${caseFold path}/@v/v${version}.zip";
-  inherit sha256;
-
-  recursiveHash = true;
-  downloadToTemp = true;
-
-  postFetch = ''
-    unpackDir="$TMPDIR/unpack"
-    mkdir "$unpackDir"
-
-    ${pkgs.unzip}/bin/unzip "$downloadedFile" -d "$unpackDir"
-    mv "$unpackDir/${path}@v${version}" "$out"
-  '';
-}
diff --git a/nix/getBins/OWNERS b/nix/getBins/OWNERS
deleted file mode 100644
index a742d0d22b..0000000000
--- a/nix/getBins/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-inherited: true
-owners:
-  - Profpatsch
diff --git a/nix/getBins/default.nix b/nix/getBins/default.nix
index 5ba7584ed8..e354b176c8 100644
--- a/nix/getBins/default.nix
+++ b/nix/getBins/default.nix
@@ -26,14 +26,16 @@
 
 let
   getBins = drv: xs:
-    let f = x:
-      # TODO(Profpatsch): typecheck
-      let x' = if builtins.isString x then { use = x; as = x; } else x;
-      in {
-        name = x'.as;
-        value = "${lib.getBin drv}/bin/${x'.use}";
-      };
-    in builtins.listToAttrs (builtins.map f xs);
+    let
+      f = x:
+        # TODO(Profpatsch): typecheck
+        let x' = if builtins.isString x then { use = x; as = x; } else x;
+        in {
+          name = x'.as;
+          value = "${lib.getBin drv}/bin/${x'.use}";
+        };
+    in
+    builtins.listToAttrs (builtins.map f xs);
 
 
   tests = import ./tests.nix {
@@ -42,7 +44,8 @@ let
     inherit (depot.nix.runTestsuite) assertEq it runTestsuite;
   };
 
-in {
+in
+{
   __functor = _: getBins;
   inherit tests;
 }
diff --git a/nix/getBins/tests.nix b/nix/getBins/tests.nix
index ff81deb5f1..e0f5ab4263 100644
--- a/nix/getBins/tests.nix
+++ b/nix/getBins/tests.nix
@@ -5,11 +5,11 @@ let
   drv2 = writeScriptBin "goodbye" "tschau";
 
   bins = getBins drv [
-            "hello"
-            { use = "hello"; as = "also-hello"; }
-          ]
-      // getBins drv2 [ "goodbye" ]
-      ;
+    "hello"
+    { use = "hello"; as = "also-hello"; }
+  ]
+  // getBins drv2 [ "goodbye" ]
+  ;
 
   simple = it "path is equal to the executable name" [
     (assertEq "path"
@@ -33,8 +33,8 @@ let
   ];
 
 in
-  runTestsuite "getBins" [
-    simple
-    useAs
-    secondDrv
-  ]
+runTestsuite "getBins" [
+  simple
+  useAs
+  secondDrv
+]
diff --git a/nix/lazy-deps/default.nix b/nix/lazy-deps/default.nix
new file mode 100644
index 0000000000..fbdb30b38e
--- /dev/null
+++ b/nix/lazy-deps/default.nix
@@ -0,0 +1,91 @@
+# Helper function to synthesize a directory of "lazy-built" binaries
+# that can be added to $PATH inside of a repository.
+#
+# Using this, a Nix shell environment in some repository can contain
+# several slow-to-build commands without blowing up evaluation and
+# build time whenever the shell is loaded.
+#
+# Note that the generated script is deliberately impure to speed up
+# evaluation, and expects both `git` and `nix-build` to exist in the
+# user's $PATH. If required, this can be done in the shell
+# configuration invoking this function.
+{ pkgs, lib, ... }:
+
+let
+  inherit (builtins) attrNames attrValues mapAttrs;
+  inherit (lib) fix concatStringsSep;
+
+  # Create the case statement for a command invocations, optionally
+  # overriding the `TARGET_TOOL` variable.
+  invoke = name: { attr, cmd ? null }: ''
+    ${name})
+      attr="${attr}"
+      ${if cmd != null then "TARGET_TOOL=\"${cmd}\"\n;;" else ";;"}
+  '';
+
+  # Create command to symlink to the dispatch script for each tool.
+  link = name: "ln -s $target $out/bin/${name}";
+
+  invocations = tools: concatStringsSep "\n" (attrValues (mapAttrs invoke tools));
+in
+fix (self:
+
+# Attribute set of tools that should be lazily-added to the $PATH.
+#
+# The name of each attribute is used as the command name (on $PATH).
+# It must contain the keys 'attr' (containing the Nix attribute path
+# to the tool's derivation from the top-level), and may optionally
+# contain the key 'cmd' to override the name of the binary inside the
+# derivation.
+tools:
+
+pkgs.runCommandNoCC "lazy-dispatch"
+{
+  passthru.overrideDeps = newTools: self (tools // newTools);
+  passthru.tools = tools;
+
+  text = ''
+    #!${pkgs.runtimeShell}
+    set -ue
+
+    if ! type git>/dev/null || ! type nix-build>/dev/null; then
+      echo "The 'git' and 'nix-build' commands must be available." >&2
+      exit 127
+    fi
+
+    readonly REPO_ROOT=$(git rev-parse --show-toplevel)
+    TARGET_TOOL=$(basename "$0")
+
+    case "''${TARGET_TOOL}" in
+    ${invocations tools}
+    *)
+      echo "''${TARGET_TOOL} is currently not installed in this repository." >&2
+      exit 127
+      ;;
+    esac
+
+    result=$(nix-build --no-out-link --attr "''${attr}" "''${REPO_ROOT}")
+    PATH="''${result}/bin:$PATH"
+    exec "''${TARGET_TOOL}" "''${@}"
+  '';
+
+  # Access this to get a compatible nix-shell
+  passthru.devShell = pkgs.mkShellNoCC {
+    name = "${self.name}-shell";
+    packages = [ self ];
+  };
+}
+  ''
+    # Write the dispatch code
+    target=$out/bin/__dispatch
+    mkdir -p "$(dirname "$target")"
+    echo "$text" > $target
+    chmod +x $target
+
+    # Add symlinks from all the tools to the dispatch
+    ${concatStringsSep "\n" (map link (attrNames tools))}
+
+    # Check that it's working-ish
+    ${pkgs.stdenv.shellDryRun} $target
+  ''
+)
diff --git a/nix/mergePatch/default.nix b/nix/mergePatch/default.nix
new file mode 100644
index 0000000000..d56106925a
--- /dev/null
+++ b/nix/mergePatch/default.nix
@@ -0,0 +1,192 @@
+{ lib, depot, ... }:
+/*
+  JSON Merge-Patch for nix
+  Spec: https://tools.ietf.org/html/rfc7396
+
+  An algorithm for changing and removing fields in nested objects.
+
+  For example, given the following original document:
+
+  {
+  a = "b";
+  c = {
+      d = "e";
+      f = "g";
+  }
+  }
+
+  Changing the value of `a` and removing `f` can be achieved by merging the patch
+
+  {
+  a = "z";
+  c.f = null;
+  }
+
+  which results in
+
+  {
+  a = "z";
+  c = {
+      d = "e";
+  };
+  }
+
+  Pseudo-code:
+  define MergePatch(Target, Patch):
+      if Patch is an Object:
+        if Target is not an Object:
+          Target = {} # Ignore the contents and set it to an empty Object
+        for each Name/Value pair in Patch:
+          if Value is null:
+            if Name exists in Target:
+              remove the Name/Value pair from Target
+          else:
+            Target[Name] = MergePatch(Target[Name], Value)
+        return Target
+      else:
+        return Patch
+*/
+
+let
+  foldlAttrs = op: init: attrs:
+    lib.foldl' op init
+      (lib.mapAttrsToList lib.nameValuePair attrs);
+
+  mergePatch = target: patch:
+    if lib.isAttrs patch
+    then
+      let target' = if lib.isAttrs target then target else { };
+      in foldlAttrs
+        (acc: patchEl:
+          if patchEl.value == null
+          then removeAttrs acc [ patchEl.name ]
+          else acc // {
+            ${patchEl.name} =
+              mergePatch
+                (acc.${patchEl.name} or "unnused")
+                patchEl.value;
+          })
+        target'
+        patch
+    else patch;
+
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    it
+    assertEq
+    ;
+
+  tests =
+    let
+      # example target from the RFC
+      testTarget = {
+        a = "b";
+        c = {
+          d = "e";
+          f = "g";
+        };
+      };
+      # example patch from the RFC
+      testPatch = {
+        a = "z";
+        c.f = null;
+      };
+      emptyPatch = it "the empty patch returns the original target" [
+        (assertEq "id"
+          (mergePatch testTarget { })
+          testTarget)
+      ];
+      nonAttrs = it "one side is a non-attrset value" [
+        (assertEq "target is a value means the value is replaced by the patch"
+          (mergePatch 42 testPatch)
+          (mergePatch { } testPatch))
+        (assertEq "patch is a value means it replaces target alltogether"
+          (mergePatch testTarget 42)
+          42)
+      ];
+      rfcExamples = it "the examples from the RFC" [
+        (assertEq "a subset is deleted and overwritten"
+          (mergePatch testTarget testPatch)
+          {
+            a = "z";
+            c = {
+              d = "e";
+            };
+          })
+        (assertEq "a more complicated example from the example section"
+          (mergePatch
+            {
+              title = "Goodbye!";
+              author = {
+                givenName = "John";
+                familyName = "Doe";
+              };
+              tags = [ "example" "sample" ];
+              content = "This will be unchanged";
+            }
+            {
+              title = "Hello!";
+              phoneNumber = "+01-123-456-7890";
+              author.familyName = null;
+              tags = [ "example" ];
+            })
+          {
+            title = "Hello!";
+            phoneNumber = "+01-123-456-7890";
+            author = {
+              givenName = "John";
+            };
+            tags = [ "example" ];
+            content = "This will be unchanged";
+          })
+      ];
+
+      rfcTests =
+        let
+          r = index: target: patch: res:
+            (assertEq "test number ${toString index}"
+              (mergePatch target patch)
+              res);
+        in
+        it "the test suite from the RFC" [
+          (r 1 { "a" = "b"; } { "a" = "c"; } { "a" = "c"; })
+          (r 2 { "a" = "b"; } { "b" = "c"; } { "a" = "b"; "b" = "c"; })
+          (r 3 { "a" = "b"; } { "a" = null; } { })
+          (r 4 { "a" = "b"; "b" = "c"; }
+            { "a" = null; }
+            { "b" = "c"; })
+          (r 5 { "a" = [ "b" ]; } { "a" = "c"; } { "a" = "c"; })
+          (r 6 { "a" = "c"; } { "a" = [ "b" ]; } { "a" = [ "b" ]; })
+          (r 7 { "a" = { "b" = "c"; }; }
+            { "a" = { "b" = "d"; "c" = null; }; }
+            { "a" = { "b" = "d"; }; })
+          (r 8 { "a" = [{ "b" = "c"; }]; }
+            { "a" = [ 1 ]; }
+            { "a" = [ 1 ]; })
+          (r 9 [ "a" "b" ] [ "c" "d" ] [ "c" "d" ])
+          (r 10 { "a" = "b"; } [ "c" ] [ "c" ])
+          (r 11 { "a" = "foo"; } null null)
+          (r 12 { "a" = "foo"; } "bar" "bar")
+          (r 13 { "e" = null; } { "a" = 1; } { "e" = null; "a" = 1; })
+          (r 14 [ 1 2 ]
+            { "a" = "b"; "c" = null; }
+            { "a" = "b"; })
+          (r 15 { }
+            { "a" = { "bb" = { "ccc" = null; }; }; }
+            { "a" = { "bb" = { }; }; })
+        ];
+
+    in
+    runTestsuite "mergePatch" [
+      emptyPatch
+      nonAttrs
+      rfcExamples
+      rfcTests
+    ];
+
+in
+{
+  __functor = _: mergePatch;
+
+  inherit tests;
+}
diff --git a/nix/netstring/attrsToKeyValList.nix b/nix/netstring/attrsToKeyValList.nix
new file mode 100644
index 0000000000..c854b56955
--- /dev/null
+++ b/nix/netstring/attrsToKeyValList.nix
@@ -0,0 +1,33 @@
+{ depot, lib, ... }:
+
+# Convert an attrset of strings to a list of key/value netstring pairs.
+# A good minimally viable json replacement if all you need is to iterate.
+# You can use e.g. `forstdin -Ed '' item` in execline to split the items
+# and then get the key and value via `multidefine -d '' $item { key value }`
+#
+# Example:
+#   { foo = "bar"; x = "abc"; }
+#   => "12:3:foo,3:bar,,10:1:x,3:abc,,"
+#
+# Example with runExecline:
+#   nix.runExecline "test" {
+#     stdin = nix.netstring.attrsToKeyValList {
+#       foo = "bar";
+#       x = "abc";
+#     };
+#   } [
+#     "forstdin" "-Ed" "" "item"
+#     "multidefine" "-d" "" "$item" [ "key" "value" ]
+#     "${pkgs.coreutils}/bin/echo" "\${key} -> \${value}"
+#   ]
+
+#   will print:
+#     foo -> bar
+#     x -> abc
+attrs:
+lib.concatStrings
+  (lib.mapAttrsToList
+    (k: v: depot.nix.netstring.fromString
+      (depot.nix.netstring.fromString k
+        + depot.nix.netstring.fromString v))
+    attrs)
diff --git a/nix/netstring/fromString.nix b/nix/netstring/fromString.nix
new file mode 100644
index 0000000000..dcd688a8b0
--- /dev/null
+++ b/nix/netstring/fromString.nix
@@ -0,0 +1,10 @@
+{ ... }:
+# convert any nix string into a netstring
+# (prefixed by its length) according to https://en.wikipedia.org/wiki/Netstring
+#
+# Examples:
+#   netstring.fromString "foo"
+#   => "3:foo,"
+#   netstring.fromString ""
+#   => "0:,"
+s: "${toString (builtins.stringLength s)}:${s},"
diff --git a/nix/nint/OWNERS b/nix/nint/OWNERS
new file mode 100644
index 0000000000..2e95807063
--- /dev/null
+++ b/nix/nint/OWNERS
@@ -0,0 +1 @@
+sterni
diff --git a/nix/nint/README.md b/nix/nint/README.md
new file mode 100644
index 0000000000..369a827619
--- /dev/null
+++ b/nix/nint/README.md
@@ -0,0 +1,93 @@
+# nint — Nix INTerpreter
+
+`nint` is a shebang compatible interpreter for nix. It is currently
+implemented as a fairly trivial wrapper around `nix-instantiate --eval`.
+It allows to run nix expressions as command line tools if they conform
+to the following calling convention:
+
+* Every nix script needs to evaluate to a function which takes an
+  attribute set as its single argument. Ideally a set pattern with
+  an ellipsis should be used. By default `nint` passes the following
+  arguments:
+
+  * `currentDir`: the current working directory as a nix path
+  * `argv`: a list of arguments to the invokation including the
+    program name at `builtins.head argv`.
+  * Extra arguments can be manually passed as described below.
+
+* The return value must either be
+
+  * A string which is rendered to `stdout`.
+
+  * An attribute set with the following optional attributes:
+
+    * `stdout`: A string that's rendered to `stdout`
+    * `stderr`: A string that's rendered to `stderr`
+    * `exit`: A number which is used as an exit code.
+      If missing, nint always exits with 0 (or equivalent).
+
+## Usage
+
+```
+nint [ --arg ARG VALUE … ] script.nix [ ARGS … ]
+```
+
+Instead of `--arg`, `--argstr` can also be used. They both work
+like the flags of the same name for `nix-instantiate` and may
+be specified any number of times as long as they are passed
+*before* the nix expression to run.
+
+Below is a shebang which also passes `depot` as an argument
+(note the usage of `env -S` to get around the shebang limitation
+to two arguments).
+
+```nix
+#!/usr/bin/env -S nint --arg depot /path/to/depot
+```
+
+## Limitations
+
+* No side effects except for writing to `stdout`.
+
+* Output is not streaming, i. e. even if the output is incrementally
+  calculated, nothing will be printed until the full output is available.
+  With plain nix strings we can't do better anyways.
+
+* Limited error handling for the script, no way to set the exit code etc.
+
+Some of these limitations may be possible to address in the future by using
+an alternative nix interpreter and a more elaborate calling convention.
+
+## Example
+
+Below is a (very simple) implementation of a `ls(1)`-like program in nix:
+
+```nix
+#!/usr/bin/env nint
+{ currentDir, argv, ... }:
+
+let
+  lib = import <nixpkgs/lib>;
+
+  dirs =
+    let
+      args = builtins.tail argv;
+    in
+      if args == []
+      then [ currentDir ]
+      else args;
+
+  makeAbsolute = p:
+    if builtins.isPath p
+    then p
+    else if builtins.match "^/.*" p != null
+    then p
+    else "${toString currentDir}/${p}";
+in
+
+  lib.concatStringsSep "\n"
+    (lib.flatten
+      (builtins.map
+        (d: (builtins.attrNames (builtins.readDir (makeAbsolute d))))
+        dirs)) + "\n"
+```
diff --git a/nix/nint/default.nix b/nix/nint/default.nix
new file mode 100644
index 0000000000..0087fc0416
--- /dev/null
+++ b/nix/nint/default.nix
@@ -0,0 +1,16 @@
+{ depot, pkgs, ... }:
+
+let
+  inherit (depot.nix.writers)
+    rustSimpleBin
+    ;
+in
+
+rustSimpleBin
+{
+  name = "nint";
+  dependencies = [
+    depot.third_party.rust-crates.serde_json
+  ];
+}
+  (builtins.readFile ./nint.rs)
diff --git a/nix/nint/nint.rs b/nix/nint/nint.rs
new file mode 100644
index 0000000000..abb0153c3a
--- /dev/null
+++ b/nix/nint/nint.rs
@@ -0,0 +1,149 @@
+extern crate serde_json;
+
+use serde_json::Value;
+use std::convert::TryFrom;
+use std::ffi::OsString;
+use std::io::{stderr, stdout, Error, ErrorKind, Write};
+use std::os::unix::ffi::{OsStrExt, OsStringExt};
+use std::process::Command;
+
+fn render_nix_string(s: &OsString) -> OsString {
+    let mut rendered = Vec::new();
+
+    rendered.extend(b"\"");
+
+    for b in s.as_os_str().as_bytes() {
+        match char::from(*b) {
+            '\"' => rendered.extend(b"\\\""),
+            '\\' => rendered.extend(b"\\\\"),
+            '$' => rendered.extend(b"\\$"),
+            _ => rendered.push(*b),
+        }
+    }
+
+    rendered.extend(b"\"");
+
+    OsString::from_vec(rendered)
+}
+
+fn render_nix_list(arr: &[OsString]) -> OsString {
+    let mut rendered = Vec::new();
+
+    rendered.extend(b"[ ");
+
+    for el in arr {
+        rendered.extend(render_nix_string(el).as_os_str().as_bytes());
+        rendered.extend(b" ");
+    }
+
+    rendered.extend(b"]");
+
+    OsString::from_vec(rendered)
+}
+
+/// Slightly overkill helper macro which takes a `Map<String, Value>` obtained
+/// from `Value::Object` and an output name (`stderr` or `stdout`) as an
+/// identifier. If a value exists for the given output in the object it gets
+/// written to the appropriate output.
+macro_rules! handle_set_output {
+    ($map_name:ident, $output_name:ident) => {
+        match $map_name.get(stringify!($output_name)) {
+            Some(Value::String(s)) => $output_name().write_all(s.as_bytes()),
+            Some(_) => Err(Error::new(
+                ErrorKind::Other,
+                format!("Attribute {} must be a string!", stringify!($output_name)),
+            )),
+            None => Ok(()),
+        }
+    };
+}
+
+fn main() -> std::io::Result<()> {
+    let mut nix_args = Vec::new();
+
+    let mut args = std::env::args_os().into_iter();
+    let mut in_args = true;
+
+    let mut argv: Vec<OsString> = Vec::new();
+
+    // skip argv[0]
+    args.next();
+
+    loop {
+        let arg = match args.next() {
+            Some(a) => a,
+            None => break,
+        };
+
+        if !arg.to_str().map(|s| s.starts_with("-")).unwrap_or(false) {
+            in_args = false;
+        }
+
+        if in_args {
+            match (arg.to_str()) {
+                Some("--arg") | Some("--argstr") => {
+                    nix_args.push(arg);
+                    nix_args.push(args.next().unwrap());
+                    nix_args.push(args.next().unwrap());
+                    Ok(())
+                }
+                _ => Err(Error::new(ErrorKind::Other, "unknown argument")),
+            }?
+        } else {
+            argv.push(arg);
+        }
+    }
+
+    if argv.len() < 1 {
+        Err(Error::new(ErrorKind::Other, "missing argv"))
+    } else {
+        let cd = std::env::current_dir()?.into_os_string();
+
+        nix_args.push(OsString::from("--arg"));
+        nix_args.push(OsString::from("currentDir"));
+        nix_args.push(cd);
+
+        nix_args.push(OsString::from("--arg"));
+        nix_args.push(OsString::from("argv"));
+        nix_args.push(render_nix_list(&argv[..]));
+
+        nix_args.push(OsString::from("--eval"));
+        nix_args.push(OsString::from("--strict"));
+        nix_args.push(OsString::from("--json"));
+
+        nix_args.push(argv[0].clone());
+
+        let run = Command::new("nix-instantiate").args(nix_args).output()?;
+
+        match serde_json::from_slice(&run.stdout[..]) {
+            Ok(Value::String(s)) => stdout().write_all(s.as_bytes()),
+            Ok(Value::Object(m)) => {
+                handle_set_output!(m, stdout)?;
+                handle_set_output!(m, stderr)?;
+
+                match m.get("exit") {
+                    Some(Value::Number(n)) => {
+                        let code = n.as_i64().and_then(|v| i32::try_from(v).ok());
+
+                        match code {
+                            Some(i) => std::process::exit(i),
+                            None => {
+                                Err(Error::new(ErrorKind::Other, "Attribute exit is not an i32"))
+                            }
+                        }
+                    }
+                    Some(_) => Err(Error::new(ErrorKind::Other, "exit must be a number")),
+                    None => Ok(()),
+                }
+            }
+            Ok(_) => Err(Error::new(
+                ErrorKind::Other,
+                "output must be a string or an object",
+            )),
+            _ => {
+                stderr().write_all(&run.stderr[..]);
+                Err(Error::new(ErrorKind::Other, "internal nix error"))
+            }
+        }
+    }
+}
diff --git a/nix/nix-1p/README.md b/nix/nix-1p/README.md
new file mode 100644
index 0000000000..309eddb51e
--- /dev/null
+++ b/nix/nix-1p/README.md
@@ -0,0 +1,648 @@
+> [!TIP]
+> Are you interested in hacking on Nix projects for a week, together
+> with other Nix users? Do you have time at the end of August? Great,
+> come join us at [Volga Sprint](https://volgasprint.org/)!
+
+Nix - A One Pager
+=================
+
+[Nix](https://nixos.org/nix/), the package manager, is built on and with Nix,
+the language. This page serves as a fast intro to most of the (small) language.
+
+Unless otherwise specified, the word "Nix" refers only to the language below.
+
+Please file an issue if something in here confuses you or you think something
+important is missing.
+
+If you have Nix installed, you can try the examples below by running `nix repl`
+and entering code snippets there.
+
+<!-- markdown-toc start - Don't edit this section. Run M-x markdown-toc-refresh-toc -->
+**Table of Contents**
+
+- [Overview](#overview)
+- [Language constructs](#language-constructs)
+    - [Primitives / literals](#primitives--literals)
+    - [Operators](#operators)
+        - [`//` (merge) operator](#-merge-operator)
+    - [Variable bindings](#variable-bindings)
+    - [Functions](#functions)
+        - [Multiple arguments (currying)](#multiple-arguments-currying)
+        - [Multiple arguments (attribute sets)](#multiple-arguments-attribute-sets)
+    - [`if ... then ... else ...`](#if--then--else-)
+    - [`inherit` keyword](#inherit-keyword)
+    - [`with` statements](#with-statements)
+    - [`import` / `NIX_PATH` / `<entry>`](#import--nix_path--entry)
+    - [`or` expressions](#or-expressions)
+- [Standard libraries](#standard-libraries)
+    - [`builtins`](#builtins)
+    - [`pkgs.lib`](#pkgslib)
+    - [`pkgs` itself](#pkgs-itself)
+- [Derivations](#derivations)
+- [Nix Idioms](#nix-idioms)
+    - [File lambdas](#file-lambdas)
+    - [`callPackage`](#callpackage)
+    - [Overrides / Overlays](#overrides--overlays)
+
+<!-- markdown-toc end -->
+
+
+# Overview
+
+Nix is:
+
+*   **purely functional**. It has no concept of sequential steps being executed,
+    any dependency between operations is established by depending on *data* from
+    previous operations.
+
+    Any valid piece of Nix code is an *expression* that returns a value.
+
+    Evaluating a Nix expression *yields a single data structure*, it does not
+    execute a sequence of operations.
+
+    Every Nix file evaluates to a *single expression*.
+*   **lazy**. It will only evaluate expressions when their result is actually
+    requested.
+
+    For example, the builtin function `throw` causes evaluation to stop.
+    Entering the following expression works fine however, because we never
+    actually ask for the part of the structure that causes the `throw`.
+
+    ```nix
+    let attrs = { a = 15; b = builtins.throw "Oh no!"; };
+    in "The value of 'a' is ${toString attrs.a}"
+    ```
+*   **purpose-built**. Nix only exists to be the language for Nix, the package
+    manager. While people have occasionally used it for other use-cases, it is
+    explicitly not a general-purpose language.
+
+# Language constructs
+
+This section describes the language constructs in Nix. It is a small language
+and most of these should be self-explanatory.
+
+## Primitives / literals
+
+Nix has a handful of data types which can be represented literally in source
+code, similar to many other languages.
+
+```nix
+# numbers
+42
+1.72394
+
+# strings & paths
+"hello"
+./some-file.json
+
+# strings support interpolation
+"Hello ${name}"
+
+# multi-line strings (common prefix whitespace is dropped)
+''
+first line
+second line
+''
+
+# lists (note: no commas!)
+[ 1 2 3 ]
+
+# attribute sets (field access with dot syntax)
+{ a = 15; b = "something else"; }
+
+# recursive attribute sets (fields can reference each other)
+rec { a = 15; b = a * 2; }
+```
+
+## Operators
+
+Nix has several operators, most of which are unsurprising:
+
+| Syntax                    | Description                                                                 |
+|---------------------------|-----------------------------------------------------------------------------|
+| `+`, `-`, `*`, `/`        | Numerical operations                                                        |
+| `+`                       | String concatenation                                                        |
+| `++`                      | List concatenation                                                          |
+| `==`                      | Equality                                                                    |
+| `>`, `>=`, `<`, `<=`      | Ordering comparators                                                        |
+| `&&`                      | Logical `AND`                                                               |
+| <code>&vert;&vert;</code> | Logical `OR`                                                                |
+| `e1 -> e2`                | Logical implication (i.e. <code>!e1 &vert;&vert; e2</code>)                 |
+| `!`                       | Boolean negation                                                            |
+| `set.attr`                | Access attribute `attr` in attribute set `set`                              |
+| `set ? attribute`         | Test whether attribute set contains an attribute                            |
+| `left // right`           | Merge `left` & `right` attribute sets, with the right set taking precedence |
+
+
+### `//` (merge) operator
+
+The `//`-operator is used pervasively in Nix code. You should familiarise
+yourself with it, as it is likely also the least familiar one.
+
+It merges the left and right attribute sets given to it:
+
+```nix
+{ a = 1; } // { b = 2; }
+
+# yields { a = 1; b = 2; }
+```
+
+Values from the right side take precedence:
+
+```nix
+{ a = "left"; } // { a = "right"; }
+
+# yields { a = "right"; }
+```
+
+The merge operator does *not* recursively merge attribute sets;
+
+```nix
+{ a = { b = 1; }; } // { a = { c = 2; }; }
+
+# yields { a = { c = 2; }; }
+```
+
+Helper functions for recursive merging exist in the [`lib` library](#pkgslib).
+
+## Variable bindings
+
+Bindings in Nix are introduced locally via `let` expressions, which make some
+variables available within a given scope.
+
+For example:
+
+```nix
+let
+  a = 15;
+  b = 2;
+in a * b
+
+# yields 30
+```
+
+Variables are immutable. This means that after defining what `a` or `b` are, you
+can not *modify* their value in the scope in which they are available.
+
+You can nest `let`-expressions to shadow variables.
+
+Variables are *not* available outside of the scope of the `let` expression.
+There are no global variables.
+
+## Functions
+
+All functions in Nix are anonymous lambdas. This means that they are treated
+just like data. Giving them names is accomplished by assigning them to
+variables, or setting them as values in an attribute set (more on that below).
+
+```
+# simple function
+# declaration is simply the argument followed by a colon
+name: "Hello ${name}"
+```
+
+### Multiple arguments (currying)
+
+Technically any Nix function can only accept **one argument**. Sometimes
+however, a function needs multiple arguments. This is achieved in Nix via
+[currying][], which means to create a function with one argument, that returns a
+function with another argument, that returns ... and so on.
+
+For example:
+
+```nix
+name: age: "${name} is ${toString age} years old"
+```
+
+An additional benefit of this approach is that you can pass one parameter to a
+curried function, and receive back a function that you can re-use (similar to
+partial application):
+
+```nix
+let
+  multiply = a: b: a * b;
+  doubleIt = multiply 2; # at this point we have passed in the value for 'a' and
+                         # receive back another function that still expects 'b'
+in
+  doubleIt 15
+
+# yields 30
+```
+
+### Multiple arguments (attribute sets)
+
+Another way of specifying multiple arguments to a function in Nix is to make it
+accept an attribute set, which enables multiple other features:
+
+```nix
+{ name, age }: "${name} is ${toString age} years old"
+```
+
+Using this method, we gain the ability to specify default arguments (so that
+callers can omit them):
+
+```nix
+{ name, age ? 42 }: "${name} is ${toString age} years old"
+
+```
+
+Or in practice:
+
+```nix
+let greeter =  { name, age ? 42 }: "${name} is ${toString age} years old";
+in greeter { name = "Slartibartfast"; }
+
+# yields "Slartibartfast is 42 years old"
+# (note: Slartibartfast is actually /significantly/ older)
+```
+
+Additionally we can introduce an ellipsis using `...`, meaning that we can
+accept an attribute set as our input that contains more variables than are
+needed for the function.
+
+```nix
+let greeter = { name, age, ... }: "${name} is ${toString age} years old";
+    person = {
+      name = "Slartibartfast";
+      age = 42;
+      # the 'email' attribute is not expected by the 'greeter' function ...
+      email = "slartibartfast@magrath.ea";
+    };
+in greeter person # ... but the call works due to the ellipsis.
+```
+
+Nix also supports binding the whole set of passed in attributes to a
+parameter using the `@` syntax:
+
+```nix
+let func = { name, age, ... }@args: builtins.attrNames args;
+in func {
+    name = "Slartibartfast";
+    age = 42;
+    email = "slartibartfast@magrath.ea";
+}
+
+# yields: [ "age" "email" "name" ]
+```
+
+**Warning:** Combining the `@` syntax with default arguments can lead
+to surprising behaviour, as the passed attributes are bound verbatim.
+This means that defaulted arguments are not included in the bound
+attribute set:
+
+```nix
+({ a ? 1, b }@args: args.a) { b = 1; }
+# throws: error: attribute 'a' missing
+
+({ a ? 1, b }@args: args.a) { b = 1; a = 2; }
+# => 2
+```
+
+## `if ... then ... else ...`
+
+Nix has simple conditional support. Note that `if` is an **expression** in Nix,
+which means that both branches must be specified.
+
+```nix
+if someCondition
+then "it was true"
+else "it was false"
+```
+
+## `inherit` keyword
+
+The `inherit` keyword is used in attribute sets or `let` bindings to "inherit"
+variables from the parent scope.
+
+In short, a statement like `inherit foo;` expands to `foo = foo;`.
+
+Consider this example:
+
+```nix
+let
+  name = "Slartibartfast";
+  # ... other variables
+in {
+  name = name; # set the attribute set key 'name' to the value of the 'name' var
+  # ... other attributes
+}
+```
+
+The `name = name;` line can be replaced with `inherit name;`:
+
+```nix
+let
+  name = "Slartibartfast";
+  # ... other variables
+in {
+  inherit name;
+  # ... other attributes
+}
+```
+
+This is often convenient, especially because inherit supports multiple variables
+at the same time as well as "inheritance" from other attribute sets:
+
+```nix
+{
+  inherit name age; # equivalent to `name = name; age = age;`
+  inherit (otherAttrs) email; # equivalent to `email = otherAttrs.email`;
+}
+```
+
+## `with` statements
+
+The `with` statement "imports" all attributes from an attribute set into
+variables of the same name:
+
+```nix
+let attrs = { a = 15; b = 2; };
+in with attrs; a + b # 'a' and 'b' become variables in the scope following 'with'
+```
+
+The scope of a `with`-"block" is the expression immediately following the
+semicolon, i.e.:
+
+```nix
+let attrs = { /* some attributes */ };
+in with attrs; (/* this is the scope of the `with` */)
+```
+
+## `import` / `NIX_PATH` / `<entry>`
+
+Nix files can import each other by using the builtin `import` function and a
+literal path:
+
+```nix
+# assuming there is a file lib.nix with some useful functions
+let myLib = import ./lib.nix;
+in myLib.usefulFunction 42
+```
+
+The `import` function will read and evaluate the file, and return its Nix value.
+
+Nix files often begin with a function header to pass parameters into the rest of
+the file, so you will often see imports of the form `import ./some-file { ... }`.
+
+Nix has a concept of a `NIX_PATH` (similar to the standard `PATH` environment
+variable) which contains named aliases for file paths containing Nix
+expressions.
+
+In a standard Nix installation, several [channels][] will be present (for
+example `nixpkgs` or `nixos-unstable`) on the `NIX_PATH`.
+
+`NIX_PATH` entries can be accessed using the `<entry>` syntax, which simply
+evaluates to their file path:
+
+```nix
+<nixpkgs>
+# might yield something like `/home/tazjin/.nix-defexpr/channels/nixpkgs`
+```
+
+This is commonly used to import from channels:
+
+```nix
+let pkgs = import <nixpkgs> {};
+in pkgs.something
+```
+
+## `or` expressions
+
+Nix has a keyword called `or` which can be used to access a value from an
+attribute set while providing a fallback to a default value.
+
+The syntax is simple:
+
+```nix
+# Access an existing attribute
+let set = { a = 42; };
+in set.a or 23
+```
+
+Since the attribute `a` exists, this will return `42`.
+
+
+```nix
+# ... or fall back to a default if there is no such key
+let set = { };
+in set.a or 23
+```
+
+Since the attribute `a` does not exist, this will fall back to returning the
+default value `23`.
+
+Note that `or` expressions also work for nested attribute set access.
+
+# Standard libraries
+
+Yes, libraries, plural.
+
+Nix has three major things that could be considered its standard library and
+while there's a lot of debate to be had about this point, you still need to know
+all three.
+
+## `builtins`
+
+Nix comes with several functions that are baked into the language. These work
+regardless of which other Nix code you may or may not have imported.
+
+Most of these functions are implemented in the Nix interpreter itself, which
+means that they are rather fast when compared to some of the equivalents which
+are implemented in Nix itself.
+
+The Nix manual has [a section listing all `builtins`][builtins] and their usage.
+
+Examples of builtins that you will commonly encounter include, but are not
+limited to:
+
+* `derivation` (see [Derivations](#derivations))
+* `toJSON` / `fromJSON`
+* `toString`
+* `toPath` / `fromPath`
+
+The builtins also include several functions that have the (spooky) ability to
+break Nix' evaluation purity. No functions written in Nix itself can do this.
+
+Examples of those include:
+
+* `fetchGit` which can fetch a git-repository using the environment's default
+  git/ssh configuration
+* `fetchTarball` which can fetch & extract archives without having to specify
+  hashes
+
+Read through the manual linked above to get the full overview.
+
+## `pkgs.lib`
+
+The Nix package set, commonly referred to by Nixers simply as [nixpkgs][],
+contains a child attribute set called `lib` which provides a large number of
+useful functions.
+
+The canonical definition of these functions is [their source code][lib-src]. I
+wrote a tool ([nixdoc][]) in 2018 which generates manual entries for these
+functions, however not all of the files are included as of July 2019.
+
+See the [Nixpkgs manual entry on `lib`][lib-manual] for the documentation.
+
+These functions include various utilities for dealing with the data types in Nix
+(lists, attribute sets, strings etc.) and it is useful to at least skim through
+them to familiarise yourself with what is available.
+
+```nix
+{ pkgs ? import <nixpkgs> {} }:
+
+with pkgs.lib; # bring contents pkgs.lib into scope
+
+strings.toUpper "hello"
+
+# yields "HELLO"
+```
+
+## `pkgs` itself
+
+The Nix package set itself does not just contain packages, but also many useful
+functions which you might run into while creating new Nix packages.
+
+One particular subset of these that stands out are the [trivial builders][],
+which provide utilities for writing text files or shell scripts, running shell
+commands and capturing their output and so on.
+
+```nix
+{ pkgs ? import <nixpkgs> {} }:
+
+pkgs.writeText "hello.txt" "Hello dear reader!"
+
+# yields a derivation which creates a text file with the above content
+```
+
+# Derivations
+
+When a Nix expression is evaluated it may yield one or more *derivations*.
+Derivations describe a single build action that, when run, places one or more
+outputs (whether they be files or folders) in the Nix store.
+
+The builtin function `derivation` is responsible for creating derivations at a
+lower level. Usually when Nix users create derivations they will use the
+higher-level functions such as [stdenv.mkDerivation][smkd].
+
+Please see the manual [on derivations][drv-manual] for more information, as the
+general build logic is out of scope for this document.
+
+# Nix Idioms
+
+There are several idioms in Nix which are not technically part of the language
+specification, but will commonly be encountered in the wild.
+
+This section is an (incomplete) list of them.
+
+## File lambdas
+
+It is customary to start every file with a function header that receives the
+files dependencies, instead of importing them directly in the file.
+
+Sticking to this pattern lets users of your code easily change out, for example,
+the specific version of `nixpkgs` that is used.
+
+A common file header pattern would look like this:
+
+```nix
+{ pkgs ? import <nixpkgs> {} }:
+
+# ... 'pkgs' is then used in the code
+```
+
+In some sense, you might consider the function header of a file to be its "API".
+
+## `callPackage`
+
+Building on the previous pattern, there is a custom in nixpkgs of specifying the
+dependencies of your file explicitly instead of accepting the entire package
+set.
+
+For example, a file containing build instructions for a tool that needs the
+standard build environment and `libsvg` might start like this:
+
+```nix
+# my-funky-program.nix
+{ stdenv, libsvg }:
+
+stdenv.mkDerivation { ... }
+```
+
+Any time a file follows this header pattern it is probably meant to be imported
+using a special function called `callPackage` which is part of the top-level
+package set (as well as certain subsets, such as `haskellPackages`).
+
+```nix
+{ pkgs ? import <nixpkgs> {} }:
+
+let my-funky-program = pkgs.callPackage ./my-funky-program.nix {};
+in # ... something happens with my-funky-program
+```
+
+The `callPackage` function looks at the expected arguments (via
+`builtins.functionArgs`) and passes the appropriate keys from the set in which
+it is defined as the values for each corresponding argument.
+
+## Overrides / Overlays
+
+One of the most powerful features of Nix is that the representation of all build
+instructions as data means that they can easily be *overridden* to get a
+different result.
+
+For example, assuming there is a package `someProgram` which is built without
+our favourite configuration flag (`--mimic-threaten-tag`) we might override it
+like this:
+
+```nix
+someProgram.overrideAttrs(old: {
+    configureFlags = old.configureFlags or [] ++ ["--mimic-threaten-tag"];
+})
+```
+
+This pattern has a variety of applications of varying complexity. The top-level
+package set itself can have an `overlays` argument passed to it which may add
+new packages to the imported set.
+
+Note the use of the `or` operator to default to an empty list if the
+original flags do not include `configureFlags`. This is required in
+case a package does not set any flags by itself.
+
+Since this can change in a package over time, it is useful to guard
+against it using `or`.
+
+For a slightly more advanced example, assume that we want to import `<nixpkgs>`
+but have the modification above be reflected in the imported package set:
+
+```nix
+let
+  overlay = (final: prev: {
+    someProgram = prev.someProgram.overrideAttrs(old: {
+      configureFlags = old.configureFlags or [] ++ ["--mimic-threaten-tag"];
+    });
+  });
+in import <nixpkgs> { overlays = [ overlay ]; }
+```
+
+The overlay function receives two arguments, `final` and `prev`. `final` is
+the [fixed point][fp] of the overlay's evaluation, i.e. the package set
+*including* the new packages and `prev` is the "original" package set.
+
+See the Nix manual sections [on overrides][] and [on overlays][] for more
+details (note: the convention has moved away from using `self` in favor of
+`final`, and `prev` instead of `super`, but the documentation has not been
+updated to reflect this).
+
+[currying]: https://en.wikipedia.org/wiki/Currying
+[builtins]: https://nixos.org/manual/nix/stable/language/builtins
+[nixpkgs]: https://github.com/NixOS/nixpkgs
+[lib-src]: https://github.com/NixOS/nixpkgs/tree/master/lib
+[nixdoc]: https://github.com/tazjin/nixdoc
+[lib-manual]: https://nixos.org/manual/nixpkgs/stable/#sec-functions-library
+[channels]: https://nixos.org/manual/nix/stable/command-ref/files/channels
+[trivial builders]: https://github.com/NixOS/nixpkgs/blob/master/pkgs/build-support/trivial-builders/default.nix
+[smkd]: https://nixos.org/manual/nixpkgs/stable/#chap-stdenv
+[drv-manual]: https://nixos.org/manual/nix/stable/language/derivations
+[fp]: https://github.com/NixOS/nixpkgs/blob/master/lib/fixed-points.nix
+[on overrides]: https://nixos.org/manual/nixpkgs/stable/#chap-overrides
+[on overlays]: https://nixos.org/manual/nixpkgs/stable/#chap-overlays
diff --git a/nix/nix-1p/default.nix b/nix/nix-1p/default.nix
new file mode 100644
index 0000000000..6cc71b9548
--- /dev/null
+++ b/nix/nix-1p/default.nix
@@ -0,0 +1,16 @@
+# The canonical source location of nix-1p is //nix/nix-1p in the TVL
+# depot: https://code.tvl.fyi/about/nix/nix-1p
+#
+# This file configures TVL CI to mirror the subtree to GitHub.
+{ depot ? { }, pkgs ? import <nixpkgs> { }, ... }:
+
+(pkgs.runCommandLocal "nix-1p" { } ''
+  mkdir $out
+  cp ${./README.md} $out/README.md
+'').overrideAttrs (_: {
+  meta.ci.extraSteps.github = depot.tools.releases.filteredGitPush {
+    filter = ":/nix/nix-1p";
+    remote = "git@github.com:tazjin/nix-1p.git";
+    ref = "refs/heads/master";
+  };
+})
diff --git a/nix/readTree/README.md b/nix/readTree/README.md
index c93cf2bfdd..5d430d1cfc 100644
--- a/nix/readTree/README.md
+++ b/nix/readTree/README.md
@@ -52,14 +52,20 @@ true;` attribute merged into it.
 `readTree` will follow any subdirectories of a tree and import all Nix files,
 with some exceptions:
 
+* If a folder contains a `default.nix` file, no *sibling* Nix files will be
+  imported - however children are traversed as normal.
+* If a folder contains a `default.nix` it is loaded and, if it
+  evaluates to a set, *merged* with the children. If it evaluates to
+  anything other than a set, else the children are *not traversed*.
+* A folder can opt out from readTree completely by containing a
+  `.skip-tree` file. The content of the file is not read. These
+  folders will be missing completely from the readTree structure.
 * A folder can declare that its children are off-limit by containing a
   `.skip-subtree` file. Since the content of the file is not checked, it can be
   useful to leave a note for a human in the file.
-* If a folder contains a `default.nix` file, no *sibling* Nix files will be
-  imported - however children are traversed as normal.
-* If a folder contains a `default.nix` it is loaded and, if it evaluates to a
-  set, *merged* with the children. If it evaluates to anything else the children
-  are *not traversed*.
+* The `default.nix` of the top-level folder on which readTree is
+  called is **not** read to avoid infinite recursion (as, presumably,
+  this file is where readTree itself is called).
 
 Traversal is lazy, `readTree` will only build up the tree as requested. This
 currently has the downside that directories with no importable files end up in
@@ -67,8 +73,16 @@ the tree as empty nodes (`{}`).
 
 ## Import structure
 
-`readTree` is called with two parameters: The arguments to pass to all imports,
-and the initial path at which to start the traversal.
+`readTree` is called with an argument set containing a few parameters:
+
+* `path`: Initial path at which to start the traversal.
+* `args`: Arguments to pass to all imports.
+* `filter`: (optional) A function to filter the argument set on each
+  import based on the location in the tree. This can be used to, for
+  example, implement a "visibility" system inside of a tree.
+* `scopedArgs`: (optional) An argument set that is passed to all
+  imported files via `builtins.scopedImport`. This will forcefully
+  override the given values in the import scope, use with care!
 
 The package headers in this repository follow the form `{ pkgs, ... }:` where
 `pkgs` is a fixed-point of the entire package tree (see the `default.nix` at the
diff --git a/nix/readTree/default.nix b/nix/readTree/default.nix
index 9793310eed..4a745ce33c 100644
--- a/nix/readTree/default.nix
+++ b/nix/readTree/default.nix
@@ -1,74 +1,326 @@
+# Copyright (c) 2019 Vincent Ambo
+# Copyright (c) 2020-2021 The TVL Authors
+# SPDX-License-Identifier: MIT
+#
+# Provides a function to automatically read a filesystem structure
+# into a Nix attribute set.
+#
+# Called with an attribute set taking the following arguments:
+#
+#   path: Path to a directory from which to start reading the tree.
+#
+#   args: Argument set to pass to each imported file.
+#
+#   filter: Function to filter `args` based on the tree location. This should
+#           be a function of the form `args -> location -> args`, where the
+#           location is a list of strings representing the path components of
+#           the current readTree target. Optional.
 { ... }:
 
-args: initPath:
-
 let
   inherit (builtins)
     attrNames
-    baseNameOf
+    concatMap
+    concatStringsSep
+    elem
+    elemAt
     filter
     hasAttr
     head
     isAttrs
-    length
     listToAttrs
     map
     match
     readDir
     substring;
 
-  argsWithPath = parts:
+  argsWithPath = args: parts:
     let meta.locatedAt = parts;
     in meta // (if isAttrs args then args else args meta);
 
   readDirVisible = path:
     let
       children = readDir path;
-      isVisible = f: f == ".skip-subtree" || (substring 0 1 f) != ".";
+      # skip hidden files, except for those that contain special instructions to readTree
+      isVisible = f: f == ".skip-subtree" || f == ".skip-tree" || (substring 0 1 f) != ".";
       names = filter isVisible (attrNames children);
-    in listToAttrs (map (name: {
-      inherit name;
-      value = children.${name};
-    }) names);
-
-  # The marker is added to every set that was imported directly by
-  # readTree.
-  importWithMark = path: parts:
-    let imported = import path (argsWithPath parts);
-    in if (isAttrs imported)
-      then imported // { __readTree = true; }
-      else imported;
+    in
+    listToAttrs (map
+      (name: {
+        inherit name;
+        value = children.${name};
+      })
+      names);
+
+  # Create a mark containing the location of this attribute and
+  # a list of all child attribute names added by readTree.
+  marker = parts: children: {
+    __readTree = parts;
+    __readTreeChildren = builtins.attrNames children;
+  };
+
+  # Create a label from a target's tree location.
+  mkLabel = target:
+    let label = concatStringsSep "/" target.__readTree;
+    in if target ? __subtarget
+    then "${label}:${target.__subtarget}"
+    else label;
+
+  # Merge two attribute sets, but place attributes in `passthru` via
+  # `overrideAttrs` for derivation targets that support it.
+  merge = a: b:
+    if a ? overrideAttrs
+    then
+      a.overrideAttrs
+        (prev: {
+          passthru = (prev.passthru or { }) // b;
+        })
+    else a // b;
+
+  # Import a file and enforce our calling convention
+  importFile = args: scopedArgs: path: parts: filter:
+    let
+      importedFile =
+        if scopedArgs != { } && builtins ? scopedImport # For tvix
+        then builtins.scopedImport scopedArgs path
+        else import path;
+      pathType = builtins.typeOf importedFile;
+    in
+    if pathType != "lambda"
+    then throw "readTree: trying to import ${toString path}, but it’s a ${pathType}, you need to make it a function like { depot, pkgs, ... }"
+    else importedFile (filter parts (argsWithPath args parts));
 
   nixFileName = file:
-    let res = match "(.*)\.nix" file;
+    let res = match "(.*)\\.nix" file;
     in if res == null then null else head res;
 
-  readTree = path: parts:
+  # Internal implementation of readTree, which handles things like the
+  # skipping of trees and subtrees.
+  #
+  # This method returns an attribute sets with either of two shapes:
+  #
+  # { ok = ...; }    # a tree was read successfully
+  # { skip = true; } # a tree was skipped
+  #
+  # The higher-level `readTree` method assembles the final attribute
+  # set out of these results at the top-level, and the internal
+  # `children` implementation unwraps and processes nested trees.
+  readTreeImpl = { args, initPath, rootDir, parts, argsFilter, scopedArgs }:
     let
-      dir = readDirVisible path;
-      self = importWithMark path parts;
-      joinChild = c: path + ("/" + c);
+      dir = readDirVisible initPath;
+
+      # Determine whether any part of this tree should be skipped.
+      #
+      # Adding a `.skip-subtree` file will still allow the import of
+      # the current node's "default.nix" file, but stop recursion
+      # there.
+      #
+      # Adding a `.skip-tree` file will completely ignore the folder
+      # in which this file is located.
+      skipTree = hasAttr ".skip-tree" dir;
+      skipSubtree = skipTree || hasAttr ".skip-subtree" dir;
+
+      joinChild = c: initPath + ("/" + c);
+
+      self =
+        if rootDir
+        then { __readTree = [ ]; }
+        else importFile args scopedArgs initPath parts argsFilter;
 
-      # Import subdirectories of the current one, unless the special
-      # `.skip-subtree` file exists which makes readTree ignore the
-      # children.
+      # Import subdirectories of the current one, unless any skip
+      # instructions exist.
       #
       # This file can optionally contain information on why the tree
       # should be ignored, but its content is not inspected by
       # readTree
       filterDir = f: dir."${f}" == "directory";
-      children = if hasAttr ".skip-subtree" dir then [] else map (c: {
-        name = c;
-        value = readTree (joinChild c) (parts ++ [ c ]);
-      }) (filter filterDir (attrNames dir));
+      filteredChildren = map
+        (c: {
+          name = c;
+          value = readTreeImpl {
+            inherit argsFilter scopedArgs;
+            args = args;
+            initPath = (joinChild c);
+            rootDir = false;
+            parts = (parts ++ [ c ]);
+          };
+        })
+        (filter filterDir (attrNames dir));
+
+      # Remove skipped children from the final set, and unwrap the
+      # result set.
+      children =
+        if skipSubtree then [ ]
+        else map ({ name, value }: { inherit name; value = value.ok; }) (filter (child: child.value ? ok) filteredChildren);
 
       # Import Nix files
-      nixFiles = filter (f: f != null) (map nixFileName (attrNames dir));
-      nixChildren = map (c: let p = joinChild (c + ".nix"); in {
-        name = c;
-        value = importWithMark p (parts ++ [ c ]);
-      }) nixFiles;
-    in if dir ? "default.nix"
-      then (if isAttrs self then self // (listToAttrs children) else self)
-      else listToAttrs (nixChildren ++ children);
-in readTree initPath [ (baseNameOf initPath) ]
+      nixFiles =
+        if skipSubtree then [ ]
+        else filter (f: f != null) (map nixFileName (attrNames dir));
+      nixChildren = map
+        (c:
+          let
+            p = joinChild (c + ".nix");
+            childParts = parts ++ [ c ];
+            imported = importFile args scopedArgs p childParts argsFilter;
+          in
+          {
+            name = c;
+            value =
+              if isAttrs imported
+              then merge imported (marker childParts { })
+              else imported;
+          })
+        nixFiles;
+
+      nodeValue = if dir ? "default.nix" then self else { };
+
+      allChildren = listToAttrs (
+        if dir ? "default.nix"
+        then children
+        else nixChildren ++ children
+      );
+
+    in
+    if skipTree
+    then { skip = true; }
+    else {
+      ok =
+        if isAttrs nodeValue
+        then merge nodeValue (allChildren // (marker parts allChildren))
+        else nodeValue;
+    };
+
+  # Top-level implementation of readTree itself.
+  readTree = args:
+    let
+      tree = readTreeImpl args;
+    in
+    if tree ? skip
+    then throw "Top-level folder has a .skip-tree marker and could not be read by readTree!"
+    else tree.ok;
+
+  # Helper function to fetch subtargets from a target. This is a
+  # temporary helper to warn on the use of the `meta.targets`
+  # attribute, which is deprecated in favour of `meta.ci.targets`.
+  subtargets = node:
+    let targets = (node.meta.targets or [ ]) ++ (node.meta.ci.targets or [ ]);
+    in if node ? meta.targets then
+      builtins.trace ''
+        Warning: The meta.targets attribute is deprecated.
+
+        Please move the subtargets of //${mkLabel node} to the
+        meta.ci.targets attribute.
+        
+      ''
+        targets else targets;
+
+  # Function which can be used to find all readTree targets within an
+  # attribute set.
+  #
+  # This function will gather physical targets, that is targets which
+  # correspond directly to a location in the repository, as well as
+  # subtargets (specified in the meta.ci.targets attribute of a node).
+  #
+  # This can be used to discover targets for inclusion in CI
+  # pipelines.
+  #
+  # Called with the arguments:
+  #
+  #   eligible: Function to determine whether the given derivation
+  #             should be included in the build.
+  gather = eligible: node:
+    if node ? __readTree then
+    # Include the node itself if it is eligible.
+      (if eligible node then [ node ] else [ ])
+      # Include eligible children of the node
+      ++ concatMap (gather eligible) (map (attr: node."${attr}") node.__readTreeChildren)
+      # Include specified sub-targets of the node
+      ++ filter eligible (map
+        (k: (node."${k}" or { }) // {
+          # Keep the same tree location, but explicitly mark this
+          # node as a subtarget.
+          __readTree = node.__readTree;
+          __readTreeChildren = [ ];
+          __subtarget = k;
+        })
+        (subtargets node))
+    else [ ];
+
+  # Determine whether a given value is a derivation.
+  # Copied from nixpkgs/lib for cases where lib is not available yet.
+  isDerivation = x: isAttrs x && x ? type && x.type == "derivation";
+in
+{
+  inherit gather mkLabel;
+
+  __functor = _:
+    { path
+    , args
+    , filter ? (_parts: x: x)
+    , scopedArgs ? { }
+    }:
+    readTree {
+      inherit args scopedArgs;
+      argsFilter = filter;
+      initPath = path;
+      rootDir = true;
+      parts = [ ];
+    };
+
+  # In addition to readTree itself, some functionality is exposed that
+  # is useful for users of readTree.
+
+  # Create a readTree filter disallowing access to the specified
+  # top-level folder in the repository, except for specific exceptions
+  # specified by their (full) paths.
+  #
+  # Called with the arguments:
+  #
+  #   folder: Name of the restricted top-level folder (e.g. 'experimental')
+  #
+  #   exceptions: List of readTree parts (e.g. [ [ "services" "some-app" ] ]),
+  #               which should be able to access the restricted folder.
+  #
+  #   reason: Textual explanation for the restriction (included in errors)
+  restrictFolder = { folder, exceptions ? [ ], reason }: parts: args:
+    if (elemAt parts 0) == folder || elem parts exceptions
+    then args
+    else args // {
+      depot = args.depot // {
+        "${folder}" = throw ''
+          Access to targets under //${folder} is not permitted from
+          other repository paths. Specific exceptions are configured
+          at the top-level.
+
+          ${reason}
+          At location: ${builtins.concatStringsSep "." parts}
+        '';
+      };
+    };
+
+  # This definition of fix is identical to <nixpkgs>.lib.fix, but is
+  # provided here for cases where readTree is used before nixpkgs can
+  # be imported.
+  #
+  # It is often required to create the args attribute set.
+  fix = f: let x = f x; in x;
+
+  # Takes an attribute set and adds a meta.ci.targets attribute to it
+  # which contains all direct children of the attribute set which are
+  # derivations.
+  #
+  # Type: attrs -> attrs
+  drvTargets = attrs:
+    attrs // {
+      # preserve .meta from original attrs
+      meta = (attrs.meta or { }) // {
+        # preserve .meta.ci (except .targets) from original attrs
+        ci = (attrs.meta.ci or { }) // {
+          targets = builtins.filter
+            (x: isDerivation attrs."${x}")
+            (builtins.attrNames attrs);
+        };
+      };
+    };
+}
diff --git a/nix/readTree/tests/.skip-subtree b/nix/readTree/tests/.skip-subtree
new file mode 100644
index 0000000000..46952a4ca6
--- /dev/null
+++ b/nix/readTree/tests/.skip-subtree
@@ -0,0 +1 @@
+These tests call their own readTree, so the toplevel one shouldn’t bother
diff --git a/nix/readTree/tests/default.nix b/nix/readTree/tests/default.nix
new file mode 100644
index 0000000000..6f9eb02eff
--- /dev/null
+++ b/nix/readTree/tests/default.nix
@@ -0,0 +1,139 @@
+{ depot, lib, ... }:
+
+let
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    it
+    assertEq
+    assertThrows
+    ;
+
+  tree-ex = depot.nix.readTree {
+    path = ./test-example;
+    args = { };
+  };
+
+  example = it "corresponds to the README example" [
+    (assertEq "third_party attrset"
+      (lib.isAttrs tree-ex.third_party
+        && (! lib.isDerivation tree-ex.third_party))
+      true)
+    (assertEq "third_party attrset other attribute"
+      tree-ex.third_party.favouriteColour
+      "orange")
+    (assertEq "rustpkgs attrset aho-corasick"
+      tree-ex.third_party.rustpkgs.aho-corasick
+      "aho-corasick")
+    (assertEq "rustpkgs attrset serde"
+      tree-ex.third_party.rustpkgs.serde
+      "serde")
+    (assertEq "tools cheddear"
+      "cheddar"
+      tree-ex.tools.cheddar)
+    (assertEq "tools roquefort"
+      tree-ex.tools.roquefort
+      "roquefort")
+  ];
+
+  tree-tl = depot.nix.readTree {
+    path = ./test-tree-traversal;
+    args = { };
+  };
+
+  traversal-logic = it "corresponds to the traversal logic in the README" [
+    (assertEq "skip-tree/a is read"
+      tree-tl.skip-tree.a
+      "a is read normally")
+    (assertEq "skip-tree does not contain b"
+      (builtins.attrNames tree-tl.skip-tree)
+      [ "__readTree" "__readTreeChildren" "a" ])
+    (assertEq "skip-tree children list does not contain b"
+      tree-tl.skip-tree.__readTreeChildren
+      [ "a" ])
+
+    (assertEq "skip subtree default.nix is read"
+      tree-tl.skip-subtree.but
+      "the default.nix is still read")
+    (assertEq "skip subtree a/default.nix is skipped"
+      (tree-tl.skip-subtree ? a)
+      false)
+    (assertEq "skip subtree b/c.nix is skipped"
+      (tree-tl.skip-subtree ? b)
+      false)
+    (assertEq "skip subtree a/default.nix would be read without .skip-subtree"
+      (tree-tl.no-skip-subtree.a)
+      "am I subtree yet?")
+    (assertEq "skip subtree b/c.nix would be read without .skip-subtree"
+      (tree-tl.no-skip-subtree.b.c)
+      "cool")
+
+    (assertEq "default.nix attrset is merged with siblings"
+      tree-tl.default-nix.no
+      "siblings should be read")
+    (assertEq "default.nix means sibling isn’t read"
+      (tree-tl.default-nix ? sibling)
+      false)
+    (assertEq "default.nix means subdirs are still read and merged into default.nix"
+      (tree-tl.default-nix.subdir.a)
+      "but I’m picked up")
+
+    (assertEq "default.nix can be not an attrset"
+      tree-tl.default-nix.no-merge
+      "I’m not merged with any children")
+    (assertEq "default.nix is not an attrset -> children are not merged"
+      (tree-tl.default-nix.no-merge ? subdir)
+      false)
+
+    (assertEq "default.nix can contain a derivation"
+      (lib.isDerivation tree-tl.default-nix.can-be-drv)
+      true)
+    (assertEq "Even if default.nix is a derivation, children are traversed and merged"
+      tree-tl.default-nix.can-be-drv.subdir.a
+      "Picked up through the drv")
+    (assertEq "default.nix drv is not changed by readTree"
+      tree-tl.default-nix.can-be-drv
+      (import ./test-tree-traversal/default-nix/can-be-drv/default.nix { }))
+  ];
+
+  # these each call readTree themselves because the throws have to happen inside assertThrows
+  wrong = it "cannot read these files and will complain" [
+    (assertThrows "this file is not a function"
+      (depot.nix.readTree {
+        path = ./test-wrong-not-a-function;
+        args = { };
+      }).not-a-function)
+    # can’t test for that, assertThrows can’t catch this error
+    # (assertThrows "this file is a function but doesn’t have dots"
+    #   (depot.nix.readTree {} ./test-wrong-no-dots).no-dots-in-function)
+  ];
+
+  read-markers = depot.nix.readTree {
+    path = ./test-marker;
+    args = { };
+  };
+
+  assertMarkerByPath = path:
+    assertEq "${lib.concatStringsSep "." path} is marked correctly"
+      (lib.getAttrFromPath path read-markers).__readTree
+      path;
+
+  markers = it "marks nodes correctly" [
+    (assertMarkerByPath [ "directory-marked" ])
+    (assertMarkerByPath [ "directory-marked" "nested" ])
+    (assertMarkerByPath [ "file-children" "one" ])
+    (assertMarkerByPath [ "file-children" "two" ])
+    (assertEq "nix file children are marked correctly"
+      read-markers.file-children.__readTreeChildren [ "one" "two" ])
+    (assertEq "directory children are marked correctly"
+      read-markers.directory-marked.__readTreeChildren [ "nested" ])
+    (assertEq "absence of children is marked"
+      read-markers.directory-marked.nested.__readTreeChildren [ ])
+  ];
+
+in
+runTestsuite "readTree" [
+  example
+  traversal-logic
+  wrong
+  markers
+]
diff --git a/nix/readTree/tests/test-example/third_party/default.nix b/nix/readTree/tests/test-example/third_party/default.nix
new file mode 100644
index 0000000000..27d8724218
--- /dev/null
+++ b/nix/readTree/tests/test-example/third_party/default.nix
@@ -0,0 +1,5 @@
+{ ... }:
+
+{
+  favouriteColour = "orange";
+}
diff --git a/nix/readTree/tests/test-example/third_party/rustpkgs/aho-corasick.nix b/nix/readTree/tests/test-example/third_party/rustpkgs/aho-corasick.nix
new file mode 100644
index 0000000000..f964fa583c
--- /dev/null
+++ b/nix/readTree/tests/test-example/third_party/rustpkgs/aho-corasick.nix
@@ -0,0 +1 @@
+{ ... }: "aho-corasick"
diff --git a/nix/readTree/tests/test-example/third_party/rustpkgs/serde.nix b/nix/readTree/tests/test-example/third_party/rustpkgs/serde.nix
new file mode 100644
index 0000000000..54b3c0503f
--- /dev/null
+++ b/nix/readTree/tests/test-example/third_party/rustpkgs/serde.nix
@@ -0,0 +1 @@
+{ ... }: "serde"
diff --git a/nix/readTree/tests/test-example/tools/cheddar/default.nix b/nix/readTree/tests/test-example/tools/cheddar/default.nix
new file mode 100644
index 0000000000..a1919442d8
--- /dev/null
+++ b/nix/readTree/tests/test-example/tools/cheddar/default.nix
@@ -0,0 +1 @@
+{ ... }: "cheddar"
diff --git a/nix/readTree/tests/test-example/tools/roquefort.nix b/nix/readTree/tests/test-example/tools/roquefort.nix
new file mode 100644
index 0000000000..03c7492bb6
--- /dev/null
+++ b/nix/readTree/tests/test-example/tools/roquefort.nix
@@ -0,0 +1 @@
+{ ... }: "roquefort"
diff --git a/nix/readTree/tests/test-marker/directory-marked/default.nix b/nix/readTree/tests/test-marker/directory-marked/default.nix
new file mode 100644
index 0000000000..5bd3e36b53
--- /dev/null
+++ b/nix/readTree/tests/test-marker/directory-marked/default.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+{ }
diff --git a/nix/readTree/tests/test-marker/directory-marked/nested/default.nix b/nix/readTree/tests/test-marker/directory-marked/nested/default.nix
new file mode 100644
index 0000000000..5bd3e36b53
--- /dev/null
+++ b/nix/readTree/tests/test-marker/directory-marked/nested/default.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+{ }
diff --git a/nix/readTree/tests/test-marker/file-children/one.nix b/nix/readTree/tests/test-marker/file-children/one.nix
new file mode 100644
index 0000000000..5bd3e36b53
--- /dev/null
+++ b/nix/readTree/tests/test-marker/file-children/one.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+{ }
diff --git a/nix/readTree/tests/test-marker/file-children/two.nix b/nix/readTree/tests/test-marker/file-children/two.nix
new file mode 100644
index 0000000000..5bd3e36b53
--- /dev/null
+++ b/nix/readTree/tests/test-marker/file-children/two.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+{ }
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/default.nix b/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/default.nix
new file mode 100644
index 0000000000..95d13d3c27
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/default.nix
@@ -0,0 +1,7 @@
+{ ... }:
+derivation {
+  name = "im-a-drv";
+  system = builtins.currentSystem;
+  builder = "/bin/sh";
+  args = [ "-c" ''echo "" > $out'' ];
+}
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/subdir/a.nix b/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/subdir/a.nix
new file mode 100644
index 0000000000..2ee2d648f0
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/subdir/a.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"Picked up through the drv"
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/default.nix b/nix/readTree/tests/test-tree-traversal/default-nix/default.nix
new file mode 100644
index 0000000000..6003b53983
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/default.nix
@@ -0,0 +1,5 @@
+{ ... }:
+
+{
+  no = "siblings should be read";
+}
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/default.nix b/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/default.nix
new file mode 100644
index 0000000000..c469533fbe
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/default.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"I’m not merged with any children"
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/subdir/a.nix b/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/subdir/a.nix
new file mode 100644
index 0000000000..2008c2d241
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/subdir/a.nix
@@ -0,0 +1 @@
+"not accessible since parent default.nix is not an attrset"
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/sibling.nix b/nix/readTree/tests/test-tree-traversal/default-nix/sibling.nix
new file mode 100644
index 0000000000..8c57f2c161
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/sibling.nix
@@ -0,0 +1 @@
+"I’m left alone"
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/subdir/a.nix b/nix/readTree/tests/test-tree-traversal/default-nix/subdir/a.nix
new file mode 100644
index 0000000000..cf0ac2c8f3
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/subdir/a.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"but I’m picked up"
diff --git a/nix/readTree/tests/test-tree-traversal/no-skip-subtree/a/default.nix b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/a/default.nix
new file mode 100644
index 0000000000..a586ce534c
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/a/default.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"am I subtree yet?"
diff --git a/nix/readTree/tests/test-tree-traversal/no-skip-subtree/b/c.nix b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/b/c.nix
new file mode 100644
index 0000000000..06216c417b
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/b/c.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"cool"
diff --git a/nix/readTree/tests/test-tree-traversal/no-skip-subtree/default.nix b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/default.nix
new file mode 100644
index 0000000000..3d9f241cdd
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/default.nix
@@ -0,0 +1,5 @@
+{ ... }:
+
+{
+  but = "the default.nix is still read";
+}
diff --git a/nix/readTree/tests/test-tree-traversal/skip-subtree/.skip-subtree b/nix/readTree/tests/test-tree-traversal/skip-subtree/.skip-subtree
new file mode 100644
index 0000000000..87271ba5e1
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/skip-subtree/.skip-subtree
@@ -0,0 +1 @@
+this file makes subdirs be skipped, I hope
diff --git a/nix/readTree/tests/test-tree-traversal/skip-subtree/a/default.nix b/nix/readTree/tests/test-tree-traversal/skip-subtree/a/default.nix
new file mode 100644
index 0000000000..a586ce534c
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/skip-subtree/a/default.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"am I subtree yet?"
diff --git a/nix/readTree/tests/test-tree-traversal/skip-subtree/b/c.nix b/nix/readTree/tests/test-tree-traversal/skip-subtree/b/c.nix
new file mode 100644
index 0000000000..06216c417b
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/skip-subtree/b/c.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"cool"
diff --git a/nix/readTree/tests/test-tree-traversal/skip-subtree/default.nix b/nix/readTree/tests/test-tree-traversal/skip-subtree/default.nix
new file mode 100644
index 0000000000..3d9f241cdd
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/skip-subtree/default.nix
@@ -0,0 +1,5 @@
+{ ... }:
+
+{
+  but = "the default.nix is still read";
+}
diff --git a/nix/readTree/tests/test-tree-traversal/skip-tree/a/default.nix b/nix/readTree/tests/test-tree-traversal/skip-tree/a/default.nix
new file mode 100644
index 0000000000..186488be3c
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/skip-tree/a/default.nix
@@ -0,0 +1 @@
+_: "a is read normally"
diff --git a/nix/readTree/tests/test-tree-traversal/skip-tree/b/.skip-tree b/nix/readTree/tests/test-tree-traversal/skip-tree/b/.skip-tree
new file mode 100644
index 0000000000..34936b45d1
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/skip-tree/b/.skip-tree
@@ -0,0 +1 @@
+b subfolder should be skipped completely
diff --git a/nix/readTree/tests/test-tree-traversal/skip-tree/b/default.nix b/nix/readTree/tests/test-tree-traversal/skip-tree/b/default.nix
new file mode 100644
index 0000000000..7903f8e95a
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/skip-tree/b/default.nix
@@ -0,0 +1 @@
+throw "b is skipped completely"
diff --git a/nix/readTree/tests/test-wrong-no-dots/no-dots-in-function.nix b/nix/readTree/tests/test-wrong-no-dots/no-dots-in-function.nix
new file mode 100644
index 0000000000..4681253af8
--- /dev/null
+++ b/nix/readTree/tests/test-wrong-no-dots/no-dots-in-function.nix
@@ -0,0 +1,3 @@
+{}:
+
+"This is a function, but readTree wants to pass a bunch of arguments, and not having dots means we depend on exactly which arguments."
diff --git a/nix/readTree/tests/test-wrong-not-a-function/not-a-function.nix b/nix/readTree/tests/test-wrong-not-a-function/not-a-function.nix
new file mode 100644
index 0000000000..f46ee2a355
--- /dev/null
+++ b/nix/readTree/tests/test-wrong-not-a-function/not-a-function.nix
@@ -0,0 +1 @@
+"This file needs to be a function, otherwise readTree doesn’t like it!"
diff --git a/nix/renderMarkdown/default.nix b/nix/renderMarkdown/default.nix
new file mode 100644
index 0000000000..8759ada0fe
--- /dev/null
+++ b/nix/renderMarkdown/default.nix
@@ -0,0 +1,21 @@
+# Render a Markdown file to HTML.
+{ depot, pkgs, ... }:
+
+with depot.nix.yants;
+
+let
+  args = struct "args" {
+    path = path;
+    tagfilter = option bool;
+  };
+in
+defun [ (either path args) drv ]
+  (arg: pkgs.runCommand "${arg.path or arg}.rendered.html" { }
+    (
+      let
+        tagfilter = if (arg.tagfilter or true) then "" else "--no-tagfilter";
+      in
+      ''
+        cat ${arg.path or arg} | ${depot.tools.cheddar}/bin/cheddar --about-filter ${tagfilter} ${arg.path or arg} > $out
+      ''
+    ))
diff --git a/nix/runExecline/OWNERS b/nix/runExecline/OWNERS
deleted file mode 100644
index a742d0d22b..0000000000
--- a/nix/runExecline/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-inherited: true
-owners:
-  - Profpatsch
diff --git a/nix/runExecline/default.nix b/nix/runExecline/default.nix
index 22d968a1c6..76fffdce7b 100644
--- a/nix/runExecline/default.nix
+++ b/nix/runExecline/default.nix
@@ -6,14 +6,26 @@ let
     inherit pkgs lib;
   };
 
+  runExeclineLocal = name: args: execline:
+    runExecline name
+      (args // {
+        derivationArgs = args.derivationArgs or { } // {
+          preferLocalBuild = true;
+          allowSubstitutes = false;
+        };
+      })
+      execline;
+
   tests = import ./tests.nix {
-    inherit runExecline;
+    inherit runExecline runExeclineLocal;
     inherit (depot.nix) getBins writeScript;
     inherit (pkgs) stdenv coreutils;
     inherit pkgs;
   };
 
-in {
+in
+{
   __functor = _: runExecline;
+  local = runExeclineLocal;
   inherit tests;
 }
diff --git a/nix/runExecline/runExecline.nix b/nix/runExecline/runExecline.nix
index 498e26e576..23b9a63303 100644
--- a/nix/runExecline/runExecline.nix
+++ b/nix/runExecline/runExecline.nix
@@ -35,31 +35,32 @@
 
 let
   bins = getBins pkgs.execline [
-           "execlineb"
-           { use = "if"; as = "execlineIf"; }
-           "redirfd"
-           "importas"
-           "exec"
-         ]
-      // getBins pkgs.s6-portable-utils [
-           "s6-cat"
-           "s6-grep"
-           "s6-touch"
-           "s6-test"
-           "s6-chmod"
-         ];
+    "execlineb"
+    { use = "if"; as = "execlineIf"; }
+    "redirfd"
+    "importas"
+    "exec"
+  ]
+  // getBins pkgs.s6-portable-utils [
+    "s6-cat"
+    "s6-grep"
+    "s6-touch"
+    "s6-test"
+    "s6-chmod"
+  ];
 
 in
 
+# TODO: move name into the attrset
 name:
 {
-# a string to pass as stdin to the execline script
-stdin ? ""
-# a program wrapping the acutal execline invocation;
-# should be in Bernstein-chaining style
+  # a string to pass as stdin to the execline script
+  stdin ? ""
+  # a program wrapping the acutal execline invocation;
+  # should be in Bernstein-chaining style
 , builderWrapper ? bins.exec
-# additional arguments to pass to the derivation
-, derivationArgs ? {}
+  # additional arguments to pass to the derivation
+, derivationArgs ? { }
 }:
 # the execline script as a nested list of string,
 # representing the blocks;
@@ -89,33 +90,33 @@ derivation (derivationArgs // {
   passAsFile = [
     "_runExeclineScript"
     "_runExeclineStdin"
-  ] ++ derivationArgs.passAsFile or [];
+  ] ++ derivationArgs.passAsFile or [ ];
 
   # the default, exec acts as identity executable
   builder = builderWrapper;
 
   args = [
-    bins.importas            # import script file as $script
-    "-ui"                    # drop the envvar afterwards
-    "script"                 # substitution name
+    bins.importas # import script file as $script
+    "-ui" # drop the envvar afterwards
+    "script" # substitution name
     "_runExeclineScriptPath" # passed script file
 
-    bins.importas            # do the same for $stdin
+    bins.importas # do the same for $stdin
     "-ui"
     "stdin"
     "_runExeclineStdinPath"
 
-    bins.redirfd             # now we
-    "-r"                     # read the file
-    "0"                      # into the stdin of execlineb
-    "$stdin"                 # that was given via stdin
+    bins.redirfd # now we
+    "-r" # read the file
+    "0" # into the stdin of execlineb
+    "$stdin" # that was given via stdin
 
-    bins.execlineb           # the actual invocation
+    bins.execlineb # the actual invocation
     # TODO(Profpatsch): depending on the use-case, -S0 might not be enough
     # in all use-cases, then a wrapper for execlineb arguments
     # should be added (-P, -S, -s).
-    "-S0"                    # set $@ inside the execline script
-    "-W"                     # die on syntax error
-    "$script"                # substituted by importas
+    "-S0" # set $@ inside the execline script
+    "-W" # die on syntax error
+    "$script" # substituted by importas
   ];
 })
diff --git a/nix/runExecline/tests.nix b/nix/runExecline/tests.nix
index a8f91f3cf3..f82b544224 100644
--- a/nix/runExecline/tests.nix
+++ b/nix/runExecline/tests.nix
@@ -1,23 +1,29 @@
-{ stdenv, pkgs, runExecline, getBins, writeScript
-# https://www.mail-archive.com/skaware@list.skarnet.org/msg01256.html
-, coreutils }:
+{ stdenv
+, pkgs
+, runExecline
+, runExeclineLocal
+, getBins
+, writeScript
+  # https://www.mail-archive.com/skaware@list.skarnet.org/msg01256.html
+, coreutils
+}:
 
 let
 
   bins = getBins coreutils [ "mv" ]
-      // getBins pkgs.execline [
-           "execlineb"
-           { use = "if"; as = "execlineIf"; }
-           "redirfd"
-           "importas"
-         ]
-      // getBins pkgs.s6-portable-utils [
-           "s6-chmod"
-           "s6-grep"
-           "s6-touch"
-           "s6-cat"
-           "s6-test"
-         ];
+    // getBins pkgs.execline [
+    "execlineb"
+    { use = "if"; as = "execlineIf"; }
+    "redirfd"
+    "importas"
+  ]
+    // getBins pkgs.s6-portable-utils [
+    "s6-chmod"
+    "s6-grep"
+    "s6-touch"
+    "s6-cat"
+    "s6-test"
+  ];
 
   # execline block of depth 1
   block = args: builtins.map (arg: " ${arg}") args ++ [ "" ];
@@ -31,60 +37,80 @@ let
     builder = bins.execlineIf;
     args =
       (block [
-        bins.redirfd "-r" "0" file   # read file to stdin
-        bins.s6-grep "-F" "-q" line   # and grep for the line
+        bins.redirfd
+        "-r"
+        "0"
+        file # read file to stdin
+        bins.s6-grep
+        "-F"
+        "-q"
+        line # and grep for the line
       ])
       ++ [
         # if the block succeeded, touch $out
-        bins.importas "-ui" "out" "out"
-        bins.s6-touch "$out"
+        bins.importas
+        "-ui"
+        "out"
+        "out"
+        bins.s6-touch
+        "$out"
       ];
     preferLocalBuild = true;
     allowSubstitutes = false;
   };
 
   # basic test that touches out
-  basic = runExecline "run-execline-test-basic" {
-    derivationArgs = {
-      preferLocalBuild = true;
-      allowSubstitutes = false;
-    };
-  } [
-      "importas" "-ui" "out" "out"
-      "${bins.s6-touch}" "$out"
+  basic = runExeclineLocal "run-execline-test-basic"
+    { } [
+    "importas"
+    "-ui"
+    "out"
+    "out"
+    "${bins.s6-touch}"
+    "$out"
   ];
 
   # whether the stdin argument works as intended
-  stdin = fileHasLine "foo" (runExecline "run-execline-test-stdin" {
-    stdin = "foo\nbar\nfoo";
-    derivationArgs = {
-      preferLocalBuild = true;
-      allowSubstitutes = false;
-    };
-  } [
-      "importas" "-ui" "out" "out"
-      # this pipes stdout of s6-cat to $out
-      # and s6-cat redirects from stdin to stdout
-      "redirfd" "-w" "1" "$out" bins.s6-cat
+  stdin = fileHasLine "foo" (runExeclineLocal "run-execline-test-stdin"
+    {
+      stdin = "foo\nbar\nfoo";
+    } [
+    "importas"
+    "-ui"
+    "out"
+    "out"
+    # this pipes stdout of s6-cat to $out
+    # and s6-cat redirects from stdin to stdout
+    "redirfd"
+    "-w"
+    "1"
+    "$out"
+    bins.s6-cat
   ]);
 
-  wrapWithVar = runExecline "run-execline-test-wrap-with-var" {
-    builderWrapper = writeScript "var-wrapper" ''
-      #!${bins.execlineb} -S0
-      export myvar myvalue $@
-    '';
-    derivationArgs = {
-      preferLocalBuild = true;
-      allowSubstitutes = false;
-    };
-  } [
-    "importas" "-ui" "v" "myvar"
-    "if" [ bins.s6-test "myvalue" "=" "$v" ]
-      "importas" "out" "out"
-      bins.s6-touch "$out"
+
+  wrapWithVar = runExeclineLocal "run-execline-test-wrap-with-var"
+    {
+      builderWrapper = writeScript "var-wrapper" ''
+        #!${bins.execlineb} -S0
+        export myvar myvalue $@
+      '';
+    } [
+    "importas"
+    "-ui"
+    "v"
+    "myvar"
+    "if"
+    [ bins.s6-test "myvalue" "=" "$v" ]
+    "importas"
+    "out"
+    "out"
+    bins.s6-touch
+    "$out"
   ];
 
-in [
+in
+[
   basic
   stdin
   wrapWithVar
diff --git a/nix/runTestsuite/default.nix b/nix/runTestsuite/default.nix
index 0105eb6fc9..8b02ed86d8 100644
--- a/nix/runTestsuite/default.nix
+++ b/nix/runTestsuite/default.nix
@@ -26,7 +26,23 @@
 # will fail the second it group because true is not false.
 
 let
-  inherit (depot.nix.yants) sum struct string any unit defun list;
+  inherit (depot.nix.yants)
+    sum
+    struct
+    string
+    any
+    defun
+    list
+    drv
+    bool
+    ;
+
+  bins = depot.nix.getBins pkgs.coreutils [ "printf" ]
+    // depot.nix.getBins pkgs.s6-portable-utils [ "s6-touch" "s6-false" "s6-cat" ];
+
+  # Returns true if the given expression throws when `deepSeq`-ed
+  throws = expr:
+    !(builtins.tryEval (builtins.deepSeq expr { })).success;
 
   # rewrite the builtins.partition result
   # to use `ok` and `err` instead of `right` and `wrong`.
@@ -37,10 +53,23 @@ let
       err = res.wrong;
     };
 
+  AssertErrorContext =
+    sum "AssertErrorContext" {
+      not-equal = struct "not-equal" {
+        left = any;
+        right = any;
+      };
+      should-throw = struct "should-throw" {
+        expr = any;
+      };
+      unexpected-throw = struct "unexpected-throw" { };
+    };
+
   # The result of an assert,
   # either it’s true (yep) or false (nope).
-  # If it’s nope, we return the left and right
-  # side of the assert, together with the description.
+  # If it's nope we return an additional context
+  # attribute which gives details on the failure
+  # depending on the type of assert performed.
   AssertResult =
     sum "AssertResult" {
       yep = struct "yep" {
@@ -48,8 +77,7 @@ let
       };
       nope = struct "nope" {
         test = string;
-        left = any;
-        right = any;
+        context = AssertErrorContext;
       };
     };
 
@@ -61,16 +89,43 @@ let
       asserts = list AssertResult;
     };
 
+  # If the given boolean is true return a positive AssertResult.
+  # If the given boolean is false return a negative AssertResult
+  # with the provided AssertErrorContext describing the failure.
+  #
+  # This function is intended as a generic assert to implement
+  # more assert types and is not exposed to the user.
+  assertBoolContext = defun [ AssertErrorContext string bool AssertResult ]
+    (context: desc: res:
+      if res
+      then { yep = { test = desc; }; }
+      else {
+        nope = {
+          test = desc;
+          inherit context;
+        };
+      });
+
   # assert that left and right values are equal
   assertEq = defun [ string any any AssertResult ]
     (desc: left: right:
-      if left == right
-      then { yep = { test = desc; }; }
-      else { nope = {
-        test = desc;
-        inherit left right;
-      };
-    });
+      let
+        context = { not-equal = { inherit left right; }; };
+      in
+      assertBoolContext context desc (left == right));
+
+  # assert that the expression throws when `deepSeq`-ed
+  assertThrows = defun [ string any AssertResult ]
+    (desc: expr:
+      let
+        context = { should-throw = { inherit expr; }; };
+      in
+      assertBoolContext context desc (throws expr));
+
+  # assert that the expression does not throw when `deepSeq`-ed
+  assertDoesNotThrow = defun [ string any AssertResult ]
+    (desc: expr:
+      assertBoolContext { unexpected-throw = { }; } desc (!(throws expr)));
 
   # Annotate a bunch of asserts with a descriptive name
   it = desc: asserts: {
@@ -83,38 +138,61 @@ let
   # and print the result of the test suite.
   #
   # Takes a test suite name as first argument.
-  runTestsuite = defun [ string (list ItResult) unit ]
+  runTestsuite = defun [ string (list ItResult) drv ]
     (name: itResults:
       let
-        goodAss = ass: {
-          good = AssertResult.match ass {
-            yep = _: true;
-            nope = _: false;
-          };
-          x = ass;
-        };
-        goodIt = it: {
-          inherit (it) it-desc;
-          asserts = partitionTests (ass:
-            AssertResult.match ass {
-              yep = _: true;
-              nope = _: false;
-            }) it.asserts;
+        goodAss = ass: AssertResult.match ass {
+          yep = _: true;
+          nope = _: false;
         };
-        goodIts = partitionTests (it: (goodIt it).asserts.err == []);
-        res = goodIts itResults;
+        res = partitionTests
+          (it:
+            (partitionTests goodAss it.asserts).err == [ ]
+          )
+          itResults;
+        prettyRes = lib.generators.toPretty { } res;
       in
-        if res.err == []
-        then {}
-        # TODO(Profpatsch): pretty printing of results
-        # and probably also somewhat easier to read output
-        else throw
-          ( "testsuite ${name} failed!\n"
-          + lib.generators.toPretty {} res));
+      if res.err == [ ]
+      then
+        depot.nix.runExecline.local "testsuite-${name}-successful" { } [
+          "importas"
+          "out"
+          "out"
+          # force derivation to rebuild if test case list changes
+          "ifelse"
+          [ bins.s6-false ]
+          [
+            bins.printf
+            ""
+            (builtins.hashString "sha512" prettyRes)
+          ]
+          "if"
+          [ bins.printf "%s\n" "testsuite ${name} successful!" ]
+          bins.s6-touch
+          "$out"
+        ]
+      else
+        depot.nix.runExecline.local "testsuite-${name}-failed"
+          {
+            stdin = prettyRes + "\n";
+          } [
+          "importas"
+          "out"
+          "out"
+          "if"
+          [ bins.printf "%s\n" "testsuite ${name} failed!" ]
+          "if"
+          [ bins.s6-cat ]
+          "exit"
+          "1"
+        ]);
 
-in {
+in
+{
   inherit
     assertEq
+    assertThrows
+    assertDoesNotThrow
     it
     runTestsuite
     ;
diff --git a/nix/sparseTree/OWNERS b/nix/sparseTree/OWNERS
new file mode 100644
index 0000000000..2e95807063
--- /dev/null
+++ b/nix/sparseTree/OWNERS
@@ -0,0 +1 @@
+sterni
diff --git a/nix/sparseTree/default.nix b/nix/sparseTree/default.nix
new file mode 100644
index 0000000000..35fa459e1c
--- /dev/null
+++ b/nix/sparseTree/default.nix
@@ -0,0 +1,84 @@
+# Build a “sparse” version of a given directory, only including contained files
+# and directories if they are listed in a supplied list:
+#
+# # A very minimal depot
+# sparseTree {
+#   root = ./depot;
+#   paths = [
+#     ./default.nix
+#     ./depot/nix/readTree/default.nix
+#     ./third_party/nixpkgs
+#     ./third_party/overlays
+#   ];
+# }
+{ pkgs, lib, ... }:
+
+{
+  # root path to use as a reference point
+  root
+, # list of paths below `root` that should be
+  # included in the resulting directory
+  #
+  # If path, need to refer to the actual file / directory to be included.
+  # If a string, it is treated as a string relative to the root.
+  paths
+, # (optional) name to use for the derivation
+  #
+  # This should always be set when using roots that do not have
+  # controlled names, such as when passing the top-level of a git
+  # repository (e.g. `depot.path.origSrc`).
+  name ? builtins.baseNameOf root
+}:
+
+let
+  rootLength = builtins.stringLength (toString root);
+
+  # Count slashes in a path.
+  #
+  # Type: path -> int
+  depth = path: lib.pipe path [
+    toString
+    (builtins.split "/")
+    (builtins.filter builtins.isList)
+    builtins.length
+  ];
+
+  # (Parent) directories will be created from deepest to shallowest
+  # which should mean no conflicts are caused unless both a child
+  # and its parent directory are in the list of paths.
+  # TODO(sterni): improve error messages in such cases
+  fromDeepest = lib.sort (a: b: depth a < depth b) paths;
+
+  # Create a set which contains the source path to copy / symlink and
+  # it's destination, so the path below the destination root including
+  # a leading slash. Additionally some sanity checking is done.
+  makeSymlink = path:
+    let
+      withLeading = p: if builtins.substring 0 1 p == "/" then p else "/" + p;
+      fullPath =
+        if builtins.isPath path then path
+        else if builtins.isString path then (root + withLeading path)
+        else builtins.throw "Unsupported path type ${builtins.typeOf path}";
+      strPath = toString fullPath;
+      contextPath = "${fullPath}";
+      belowRoot = builtins.substring rootLength (-1) strPath;
+      prefix = builtins.substring 0 rootLength strPath;
+    in
+    assert toString root == prefix; {
+      src = contextPath;
+      dst = belowRoot;
+    };
+
+  symlinks = builtins.map makeSymlink fromDeepest;
+in
+
+# TODO(sterni): teach readTree to also read symlinked directories,
+  # so we ln -sT instead of cp -aT.
+pkgs.runCommand "sparse-${name}" { } (
+  lib.concatMapStrings
+    ({ src, dst }: ''
+      mkdir -p "$(dirname "$out${dst}")"
+      cp -aT --reflink=auto "${src}" "$out${dst}"
+    '')
+    symlinks
+)
diff --git a/nix/stateMonad/default.nix b/nix/stateMonad/default.nix
new file mode 100644
index 0000000000..209412e099
--- /dev/null
+++ b/nix/stateMonad/default.nix
@@ -0,0 +1,76 @@
+# Simple state monad represented as
+#
+#     stateMonad s a = s -> { state : s; value : a }
+#
+{ ... }:
+
+rec {
+  #
+  # Monad
+  #
+
+  # Type: stateMonad s a -> (a -> stateMonad s b) -> stateMonad s b
+  bind = action: f: state:
+    let
+      afterAction = action state;
+    in
+    (f afterAction.value) afterAction.state;
+
+  # Type: stateMonad s a -> stateMonad s b -> stateMonad s b
+  after = action1: action2: state: action2 (action1 state).state;
+
+  # Type: stateMonad s (stateMonad s a) -> stateMonad s a
+  join = action: bind action (action': action');
+
+  # Type: [a] -> (a -> stateMonad s b) -> stateMonad s null
+  for_ = xs: f:
+    builtins.foldl'
+      (laterAction: x:
+        after (f x) laterAction
+      )
+      (pure null)
+      xs;
+
+  #
+  # Applicative
+  #
+
+  # Type: a -> stateMonad s a
+  pure = value: state: { inherit state value; };
+
+  # TODO(sterni): <*>, lift2, …
+
+  #
+  # Functor
+  #
+
+  # Type: (a -> b) -> stateMonad s a -> stateMonad s b
+  fmap = f: action: bind action (result: pure (f result));
+
+  #
+  # State Monad
+  #
+
+  # Type: (s -> s) -> stateMonad s null
+  modify = f: state: { value = null; state = f state; };
+
+  # Type: stateMonad s s
+  get = state: { value = state; inherit state; };
+
+  # Type: s -> stateMonad s null
+  set = new: modify (_: new);
+
+  # Type: str -> stateMonad set set.${str}
+  getAttr = attr: fmap (state: state.${attr}) get;
+
+  # Type: str -> (any -> any) -> stateMonad s null
+  modifyAttr = attr: f: modify (state: state // {
+    ${attr} = f state.${attr};
+  });
+
+  # Type: str -> any -> stateMonad s null
+  setAttr = attr: value: modifyAttr attr (_: value);
+
+  # Type: s -> stateMonad s a -> a
+  run = state: action: (action state).value;
+}
diff --git a/nix/stateMonad/tests/default.nix b/nix/stateMonad/tests/default.nix
new file mode 100644
index 0000000000..c3cb5c99b5
--- /dev/null
+++ b/nix/stateMonad/tests/default.nix
@@ -0,0 +1,110 @@
+{ depot, ... }:
+
+let
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    it
+    assertEq
+    ;
+
+  inherit (depot.nix.stateMonad)
+    pure
+    run
+    join
+    fmap
+    bind
+    get
+    set
+    modify
+    after
+    for_
+    getAttr
+    setAttr
+    modifyAttr
+    ;
+
+  runStateIndependent = run (throw "This should never be evaluated!");
+in
+
+runTestsuite "stateMonad" [
+  (it "behaves correctly independent of state" [
+    (assertEq "pure" (runStateIndependent (pure 21)) 21)
+    (assertEq "join pure" (runStateIndependent (join (pure (pure 42)))) 42)
+    (assertEq "fmap pure" (runStateIndependent (fmap (builtins.mul 2) (pure 21))) 42)
+    (assertEq "bind pure" (runStateIndependent (bind (pure 12) (x: pure x))) 12)
+  ])
+  (it "behaves correctly with an integer state" [
+    (assertEq "get" (run 42 get) 42)
+    (assertEq "after set get" (run 21 (after (set 42) get)) 42)
+    (assertEq "after modify get" (run 21 (after (modify (builtins.mul 2)) get)) 42)
+    (assertEq "fmap get" (run 40 (fmap (builtins.add 2) get)) 42)
+    (assertEq "stateful sum list"
+      (run 0 (after
+        (for_
+          [
+            15
+            12
+            10
+            5
+          ]
+          (x: modify (builtins.add x)))
+        get))
+      42)
+  ])
+  (it "behaves correctly with an attr set state" [
+    (assertEq "getAttr" (run { foo = 42; } (getAttr "foo")) 42)
+    (assertEq "after setAttr getAttr"
+      (run { foo = 21; } (after (setAttr "foo" 42) (getAttr "foo")))
+      42)
+    (assertEq "after modifyAttr getAttr"
+      (run { foo = 10.5; }
+        (after
+          (modifyAttr "foo" (builtins.mul 4))
+          (getAttr "foo")))
+      42)
+    (assertEq "fmap getAttr"
+      (run { foo = 21; } (fmap (builtins.mul 2) (getAttr "foo")))
+      42)
+    (assertEq "after setAttr to insert getAttr"
+      (run { } (after (setAttr "foo" 42) (getAttr "foo")))
+      42)
+    (assertEq "insert permutations"
+      (run
+        {
+          a = 2;
+          b = 3;
+          c = 5;
+        }
+        (after
+          (bind get
+            (state:
+              let
+                names = builtins.attrNames state;
+              in
+              for_ names (name1:
+                for_ names (name2:
+                  # this is of course a bit silly, but making it more cumbersome
+                  # makes sure the test exercises more of the code.
+                  (bind (getAttr name1)
+                    (value1:
+                      (bind (getAttr name2)
+                        (value2:
+                          setAttr "${name1}_${name2}" (value1 * value2)))))))))
+          get))
+      {
+        a = 2;
+        b = 3;
+        c = 5;
+        a_a = 4;
+        a_b = 6;
+        a_c = 10;
+        b_a = 6;
+        b_b = 9;
+        b_c = 15;
+        c_c = 25;
+        c_a = 10;
+        c_b = 15;
+      }
+    )
+  ])
+]
diff --git a/nix/tag/default.nix b/nix/tag/default.nix
new file mode 100644
index 0000000000..2955656323
--- /dev/null
+++ b/nix/tag/default.nix
@@ -0,0 +1,166 @@
+{ depot, lib, ... }:
+let
+  # Takes a tag, checks whether it is an attrset with one element,
+  # if so sets `isTag` to `true` and sets the name and value.
+  # If not, sets `isTag` to `false` and sets `errmsg`.
+  verifyTag = tag:
+    let
+      cases = builtins.attrNames tag;
+      len = builtins.length cases;
+    in
+    if builtins.length cases == 1
+    then
+      let name = builtins.head cases; in {
+        isTag = true;
+        name = name;
+        val = tag.${name};
+        errmsg = null;
+      }
+    else {
+      isTag = false;
+      errmsg =
+        ("match: an instance of a sum is an attrset "
+          + "with exactly one element, yours had ${toString len}"
+          + ", namely: ${lib.generators.toPretty {} cases}");
+      name = null;
+      val = null;
+    };
+
+  # Returns the tag name of a given tag attribute set.
+  # Throws if the tag is invalid.
+  #
+  # Type: tag -> string
+  tagName = tag: (assertIsTag tag).name;
+
+  # Returns the tagged value of a given tag attribute set.
+  # Throws if the tag is invalid.
+  #
+  # Type: tag -> any
+  tagValue = tag: (assertIsTag tag).val;
+
+  # like `verifyTag`, but throws the error message if it is not a tag.
+  assertIsTag = tag:
+    let res = verifyTag tag; in
+    assert res.isTag || throw res.errmsg;
+    { inherit (res) name val; };
+
+
+  # Discriminator for values.
+  # Goes through a list of tagged predicates `{ <tag> = <pred>; }`
+  # and returns the value inside the tag
+  # for which the first predicate applies, `{ <tag> = v; }`.
+  # They can then later be matched on with `match`.
+  #
+  # `defTag` is the tag that is assigned if there is no match.
+  #
+  # Examples:
+  #   discrDef "smol" [
+  #     { biggerFive = i: i > 5; }
+  #     { negative = i: i < 0; }
+  #   ] (-100)
+  #   => { negative = -100; }
+  #   discrDef "smol" [
+  #     { biggerFive = i: i > 5; }
+  #     { negative = i: i < 0; }
+  #   ] 1
+  #   => { smol = 1; }
+  discrDef = defTag: fs: v:
+    let
+      res = lib.findFirst
+        (t: t.val v)
+        null
+        (map assertIsTag fs);
+    in
+    if res == null
+    then { ${defTag} = v; }
+    else { ${res.name} = v; };
+
+  # Like `discrDef`, but fail if there is no match.
+  discr = fs: v:
+    let res = discrDef null fs v; in
+    assert lib.assertMsg (res != { })
+      "tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}";
+    res;
+
+  # The canonical pattern matching primitive.
+  # A sum value is an attribute set with one element,
+  # whose key is the name of the variant and
+  # whose value is the content of the variant.
+  # `matcher` is an attribute set which enumerates
+  # all possible variants as keys and provides a function
+  # which handles each variant’s content.
+  # You should make an effort to return values of the same
+  # type in your matcher, or new sums.
+  #
+  # Example:
+  #   let
+  #      success = { res = 42; };
+  #      failure = { err = "no answer"; };
+  #      matcher = {
+  #        res = i: i + 1;
+  #        err = _: 0;
+  #      };
+  #    in
+  #       match success matcher == 43
+  #    && match failure matcher == 0;
+  #
+  match = sum: matcher:
+    let cases = builtins.attrNames sum;
+    in assert
+    let len = builtins.length cases; in
+    lib.assertMsg (len == 1)
+      ("match: an instance of a sum is an attrset "
+        + "with exactly one element, yours had ${toString len}"
+        + ", namely: ${lib.generators.toPretty {} cases}");
+    let case = builtins.head cases;
+    in assert
+    lib.assertMsg (matcher ? ${case})
+      ("match: \"${case}\" is not a valid case of this sum, "
+        + "the matcher accepts: ${lib.generators.toPretty {}
+            (builtins.attrNames matcher)}");
+    matcher.${case} sum.${case};
+
+  # A `match` with the arguments flipped.
+  # “Lam” stands for “lambda”, because it can be used like the
+  # `\case` LambdaCase statement in Haskell, to create a curried
+  # “matcher” function ready to take a value.
+  #
+  # Example:
+  #   lib.pipe { foo = 42; } [
+  #     (matchLam {
+  #       foo = i: if i < 23 then { small = i; } else { big = i; };
+  #       bar = _: { small = 5; };
+  #     })
+  #     (matchLam {
+  #       small = i: "yay it was small";
+  #       big = i: "whoo it was big!";
+  #     })
+  #   ]
+  #   => "whoo it was big!";
+  matchLam = matcher: sum: match sum matcher;
+
+  tests = import ./tests.nix {
+    inherit
+      depot
+      lib
+      verifyTag
+      discr
+      discrDef
+      match
+      matchLam
+      ;
+  };
+
+in
+{
+  inherit
+    verifyTag
+    tagName
+    tagValue
+    discr
+    discrDef
+    match
+    matchLam
+    tests
+    ;
+}
diff --git a/nix/tag/tests.nix b/nix/tag/tests.nix
new file mode 100644
index 0000000000..e0085b4837
--- /dev/null
+++ b/nix/tag/tests.nix
@@ -0,0 +1,99 @@
+{ depot, lib, verifyTag, discr, discrDef, match, matchLam }:
+
+let
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    assertEq
+    assertThrows
+    it
+    ;
+
+  isTag-test = it "checks whether something is a tag" [
+    (assertEq "is Tag"
+      (verifyTag { foo = "bar"; })
+      {
+        isTag = true;
+        name = "foo";
+        val = "bar";
+        errmsg = null;
+      })
+    (assertEq "is not Tag"
+      (removeAttrs (verifyTag { foo = "bar"; baz = 42; }) [ "errmsg" ])
+      {
+        isTag = false;
+        name = null;
+        val = null;
+      })
+  ];
+
+  discr-test = it "can discr things" [
+    (assertEq "id"
+      (discr [
+        { a = lib.const true; }
+      ] "x")
+      { a = "x"; })
+    (assertEq "bools here, ints there"
+      (discr [
+        { bool = lib.isBool; }
+        { int = lib.isInt; }
+      ] 25)
+      { int = 25; })
+    (assertEq "bools here, ints there 2"
+      (discr [
+        { bool = lib.isBool; }
+        { int = lib.isInt; }
+      ]
+        true)
+      { bool = true; })
+    (assertEq "fallback to default"
+      (discrDef "def" [
+        { bool = lib.isBool; }
+        { int = lib.isInt; }
+      ] "foo")
+      { def = "foo"; })
+    (assertThrows "throws failing to match"
+      (discr [
+        { fish = x: x == 42; }
+      ] 21))
+  ];
+
+  match-test = it "can match things" [
+    (assertEq "match example"
+      (
+        let
+          success = { res = 42; };
+          failure = { err = "no answer"; };
+          matcher = {
+            res = i: i + 1;
+            err = _: 0;
+          };
+        in
+        {
+          one = match success matcher;
+          two = match failure matcher;
+        }
+      )
+      {
+        one = 43;
+        two = 0;
+      })
+    (assertEq "matchLam & pipe"
+      (lib.pipe { foo = 42; } [
+        (matchLam {
+          foo = i: if i < 23 then { small = i; } else { big = i; };
+          bar = _: { small = 5; };
+        })
+        (matchLam {
+          small = i: "yay it was small";
+          big = i: "whoo it was big!";
+        })
+      ])
+      "whoo it was big!")
+  ];
+
+in
+runTestsuite "tag" [
+  isTag-test
+  discr-test
+  match-test
+]
diff --git a/nix/tailscale/default.nix b/nix/tailscale/default.nix
index a21af7d115..363f717db6 100644
--- a/nix/tailscale/default.nix
+++ b/nix/tailscale/default.nix
@@ -2,7 +2,7 @@
 #
 # https://tailscale.com/kb/1018/install-acls
 
-{ depot, ... }:
+{ depot, pkgs, ... }:
 
 with depot.nix.yants;
 
@@ -27,4 +27,5 @@ let
     # Actual ACL entries
     ACLs = list acl;
   };
-in config: toFile "tailscale-acl.json" (toJSON (aclConfig config))
+in
+config: pkgs.writeText "tailscale-acl.json" (toJSON (aclConfig config))
diff --git a/nix/utils/OWNERS b/nix/utils/OWNERS
new file mode 100644
index 0000000000..2e95807063
--- /dev/null
+++ b/nix/utils/OWNERS
@@ -0,0 +1 @@
+sterni
diff --git a/nix/utils/default.nix b/nix/utils/default.nix
new file mode 100644
index 0000000000..0c6c88fafd
--- /dev/null
+++ b/nix/utils/default.nix
@@ -0,0 +1,160 @@
+{ depot, lib, ... }:
+
+let
+  /* Get the basename of a store path without
+     the leading hash.
+
+     Type: (path | drv | string) -> string
+
+     Example:
+       storePathName ./foo.c
+       => "foo.c"
+
+       storePathName (writeText "foo.c" "int main() { return 0; }")
+       => "foo.c"
+
+       storePathName "${hello}/bin/hello"
+       => "hello"
+  */
+  storePathName = p:
+    if lib.isDerivation p
+    then p.name
+    else if builtins.isPath p
+    then builtins.baseNameOf p
+    else if builtins.isString p || (builtins.isAttrs p && (p ? outPath || p ? __toString))
+    then
+      let
+        strPath = toString p;
+        # strip leading storeDir and trailing slashes
+        noStoreDir = lib.removeSuffix "/"
+          (lib.removePrefix "${builtins.storeDir}/" strPath);
+        # a basename of a child of a store path isn't really
+        # referring to a store path, so removing the string
+        # context is safe (e. g. "hello" for "${hello}/bin/hello").
+        basename = builtins.unsafeDiscardStringContext
+          (builtins.baseNameOf strPath);
+      in
+      # If p is a direct child of storeDir, we need to remove
+        # the leading hash as well to make sure that:
+        # `storePathName drv == storePathName (toString drv)`.
+      if noStoreDir == basename
+      then builtins.substring 33 (-1) basename
+      else basename
+    else builtins.throw "Don't know how to get (base)name of "
+      + lib.generators.toPretty { } p;
+
+  /* Query the type of a path exposing the same information as would be by
+     `builtins.readDir`, but for a single, specific target path.
+
+     The information is returned as a tagged value, i. e. an attribute set with
+     exactly one attribute where the type of the path is encoded in the name
+     of the single attribute. The allowed tags and values are as follows:
+
+     * `regular`: is a regular file, always `true` if returned
+     * `directory`: is a directory, always `true` if returned
+     * `missing`: path does not exist, always `true` if returned
+     * `symlink`: path is a symlink, always `true` if returned
+
+     Type: path(-like) -> tag
+
+     `tag` refers to the attribute set format of `//nix/tag`.
+
+     Example:
+       pathType ./foo.c
+       => { regular = true; }
+
+       pathType /home/lukas
+       => { directory = true; }
+
+       pathType ./result
+       => { symlink = true; }
+
+       pathType ./link-to-file
+       => { symlink = true; }
+
+       pathType /does/not/exist
+       => { missing = true; }
+
+       # Check if a path exists
+       !(pathType /file ? missing)
+
+       # Check if a path is a directory or a symlink to a directory
+       # A handy shorthand for this is provided as `realPathIsDirectory`.
+       pathType /path ? directory || (pathType /path).symlink or null == "directory"
+
+       # Match on the result using //nix/tag
+       nix.tag.match (nix.utils.pathType ./result) {
+         symlink = _: "symlink";
+         directory  = _: "directory";
+         regular = _: "regular";
+         missing = _: "path does not exist";
+       }
+       => "symlink"
+
+       # Query path type
+       nix.tag.tagName (pathType /path)
+  */
+  pathType = path:
+    let
+      # baseNameOf is very annoyed if we proceed with string context.
+      # We need to call toString to prevent unsafeDiscardStringContext
+      # from importing a path into store which messes with base- and
+      # dirname of course.
+      path' = builtins.unsafeDiscardStringContext (toString path);
+      # To read the containing directory we absolutely need
+      # to keep the string context, otherwise a derivation
+      # would not be realized before our check (at eval time)
+      containingDir = builtins.readDir (builtins.dirOf path);
+      # Construct tag to use for the value
+      thisPathType = containingDir.${builtins.baseNameOf path'} or "missing";
+      # Trick to check if the symlink target exists and is a directory:
+      # if we append a "/." to the string version of the path, Nix won't
+      # canocalize it (which would strip any "/." in the path), so if
+      # path' + "/." exists, we know that the symlink points to an existing
+      # directory. If not, either the target doesn't exist or is a regular file.
+      # TODO(sterni): is there a way to check reliably if the symlink target exists?
+      isSymlinkDir = builtins.pathExists (path' + "/.");
+    in
+    {
+      ${thisPathType} = true;
+    };
+
+  pathType' = path:
+    let
+      p = pathType path;
+    in
+    if p ? missing
+    then builtins.throw "${lib.generators.toPretty {} path} does not exist"
+    else p;
+
+  /* Check whether the given path is a directory.
+     Throws if the path in question doesn't exist.
+
+     Type: path(-like) -> bool
+  */
+  isDirectory = path: pathType' path ? directory;
+
+  /* Check whether the given path is a regular file.
+     Throws if the path in question doesn't exist.
+
+     Type: path(-like) -> bool
+  */
+  isRegularFile = path: pathType' path ? regular;
+
+  /* Check whether the given path is a symbolic link.
+     Throws if the path in question doesn't exist.
+
+     Type: path(-like) -> bool
+  */
+  isSymlink = path: pathType' path ? symlink;
+
+in
+{
+  inherit
+    storePathName
+    pathType
+    isDirectory
+    isRegularFile
+    isSymlink
+    ;
+}
diff --git a/nix/utils/tests/.skip-subtree b/nix/utils/tests/.skip-subtree
new file mode 100644
index 0000000000..1efb1b9476
--- /dev/null
+++ b/nix/utils/tests/.skip-subtree
@@ -0,0 +1 @@
+subdirectories are just test cases
diff --git a/nix/utils/tests/default.nix b/nix/utils/tests/default.nix
new file mode 100644
index 0000000000..344a1771d7
--- /dev/null
+++ b/nix/utils/tests/default.nix
@@ -0,0 +1,99 @@
+{ depot, lib, ... }:
+
+let
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    it
+    assertEq
+    assertThrows
+    assertDoesNotThrow
+    ;
+
+  inherit (depot.nix.utils)
+    isDirectory
+    isRegularFile
+    isSymlink
+    pathType
+    storePathName
+    ;
+
+  assertUtilsPred = msg: act: exp: [
+    (assertDoesNotThrow "${msg} does not throw" act)
+    (assertEq msg (builtins.tryEval act).value exp)
+  ];
+
+  pathPredicates = it "judges paths correctly" (lib.flatten [
+    # isDirectory
+    (assertUtilsPred "directory isDirectory"
+      (isDirectory ./directory)
+      true)
+    (assertUtilsPred "symlink not isDirectory"
+      (isDirectory ./symlink-directory)
+      false)
+    (assertUtilsPred "file not isDirectory"
+      (isDirectory ./directory/file)
+      false)
+    # isRegularFile
+    (assertUtilsPred "file isRegularFile"
+      (isRegularFile ./directory/file)
+      true)
+    (assertUtilsPred "symlink not isRegularFile"
+      (isRegularFile ./symlink-file)
+      false)
+    (assertUtilsPred "directory not isRegularFile"
+      (isRegularFile ./directory)
+      false)
+    # isSymlink
+    (assertUtilsPred "symlink to file isSymlink"
+      (isSymlink ./symlink-file)
+      true)
+    (assertUtilsPred "symlink to directory isSymlink"
+      (isSymlink ./symlink-directory)
+      true)
+    (assertUtilsPred "symlink to symlink isSymlink"
+      (isSymlink ./symlink-symlink-file)
+      true)
+    (assertUtilsPred "symlink to missing file isSymlink"
+      (isSymlink ./missing)
+      true)
+    (assertUtilsPred "directory not isSymlink"
+      (isSymlink ./directory)
+      false)
+    (assertUtilsPred "file not isSymlink"
+      (isSymlink ./directory/file)
+      false)
+    # missing files throw
+    (assertThrows "isDirectory throws on missing file"
+      (isDirectory ./does-not-exist))
+    (assertThrows "isRegularFile throws on missing file"
+      (isRegularFile ./does-not-exist))
+    (assertThrows "isSymlink throws on missing file"
+      (isSymlink ./does-not-exist))
+  ]);
+
+  cheddarStorePath =
+    builtins.unsafeDiscardStringContext depot.tools.cheddar.outPath;
+
+  cleanedSource = lib.cleanSource ./.;
+
+  storePathNameTests = it "correctly gets the basename of a store path" [
+    (assertEq "base name of a derivation"
+      (storePathName depot.tools.cheddar)
+      depot.tools.cheddar.name)
+    (assertEq "base name of a store path string"
+      (storePathName cheddarStorePath)
+      depot.tools.cheddar.name)
+    (assertEq "base name of a path within a store path"
+      (storePathName "${cheddarStorePath}/bin/cheddar") "cheddar")
+    (assertEq "base name of a path"
+      (storePathName ../default.nix) "default.nix")
+    (assertEq "base name of a cleanSourced path"
+      (storePathName cleanedSource)
+      cleanedSource.name)
+  ];
+in
+
+runTestsuite "nix.utils" [
+  pathPredicates
+  storePathNameTests
+]
diff --git a/nix/utils/tests/directory/file b/nix/utils/tests/directory/file
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/nix/utils/tests/directory/file
diff --git a/nix/utils/tests/missing b/nix/utils/tests/missing
new file mode 120000
index 0000000000..cfa0a46515
--- /dev/null
+++ b/nix/utils/tests/missing
@@ -0,0 +1 @@
+does-not-exist
\ No newline at end of file
diff --git a/nix/utils/tests/symlink-directory b/nix/utils/tests/symlink-directory
new file mode 120000
index 0000000000..6d0450cc24
--- /dev/null
+++ b/nix/utils/tests/symlink-directory
@@ -0,0 +1 @@
+directory
\ No newline at end of file
diff --git a/nix/utils/tests/symlink-file b/nix/utils/tests/symlink-file
new file mode 120000
index 0000000000..cd5d01792a
--- /dev/null
+++ b/nix/utils/tests/symlink-file
@@ -0,0 +1 @@
+directory/file
\ No newline at end of file
diff --git a/nix/utils/tests/symlink-symlink-directory b/nix/utils/tests/symlink-symlink-directory
new file mode 120000
index 0000000000..5d6ba5247e
--- /dev/null
+++ b/nix/utils/tests/symlink-symlink-directory
@@ -0,0 +1 @@
+symlink-directory
\ No newline at end of file
diff --git a/nix/utils/tests/symlink-symlink-file b/nix/utils/tests/symlink-symlink-file
new file mode 120000
index 0000000000..f8d9ce3659
--- /dev/null
+++ b/nix/utils/tests/symlink-symlink-file
@@ -0,0 +1 @@
+symlink-file
\ No newline at end of file
diff --git a/nix/writeElispBin/default.nix b/nix/writeElispBin/default.nix
new file mode 100644
index 0000000000..ee3dc7a3ed
--- /dev/null
+++ b/nix/writeElispBin/default.nix
@@ -0,0 +1,20 @@
+{ depot, pkgs, ... }:
+
+{ name, src, deps ? (_: [ ]), emacs ? pkgs.emacs28-nox }:
+
+let
+  inherit (pkgs) emacsPackages emacsPackagesFor;
+  inherit (builtins) isString toFile;
+
+  finalEmacs = (emacsPackagesFor emacs).emacsWithPackages deps;
+
+  srcFile =
+    if isString src
+    then toFile "${name}.el" src
+    else src;
+
+in
+depot.nix.writeScriptBin name ''
+  #!/bin/sh
+  ${finalEmacs}/bin/emacs --batch --no-site-file --script ${srcFile} $@
+''
diff --git a/nix/writeExecline/OWNERS b/nix/writeExecline/OWNERS
deleted file mode 100644
index a742d0d22b..0000000000
--- a/nix/writeExecline/OWNERS
+++ /dev/null
@@ -1,3 +0,0 @@
-inherited: true
-owners:
-  - Profpatsch
diff --git a/nix/writeExecline/default.nix b/nix/writeExecline/default.nix
index 8626aa4608..5169b01386 100644
--- a/nix/writeExecline/default.nix
+++ b/nix/writeExecline/default.nix
@@ -14,9 +14,10 @@ name:
   # "env": don’t substitute, set # and 0…n environment vaariables, where n=$#
   # "none": don’t substitute or set any positional arguments
   # "env-no-push": like "env", but bypass the push-phase. Not recommended.
-  argMode ? "var",
-  # Number of arguments to be substituted as variables (passed to "var"/"-s" or "var-full"/"-S"
-  readNArgs ? 0,
+  argMode ? "var"
+, # Number of arguments to be substituted as variables (passed to "var"/"-s" or "var-full"/"-S"
+  readNArgs ? 0
+,
 }:
 # Nested list of lists of commands.
 # Inner lists are translated to execline blocks.
@@ -24,7 +25,7 @@ argList:
 
 let
   env =
-    if      argMode == "var" then "s${toString readNArgs}"
+    if argMode == "var" then "s${toString readNArgs}"
     else if argMode == "var-full" then "S${toString readNArgs}"
     else if argMode == "env" then ""
     else if argMode == "none" then "P"
@@ -32,7 +33,7 @@ let
     else abort ''"${toString argMode}" is not a valid argMode, use one of "var", "var-full", "env", "none", "env-no-push".'';
 
 in
-  depot.nix.writeScript name ''
-    #!${pkgs.execline}/bin/execlineb -W${env}
-    ${depot.nix.escapeExecline argList}
-  ''
+depot.nix.writeScript name ''
+  #!${pkgs.execline}/bin/execlineb -W${env}
+  ${depot.nix.escapeExecline argList}
+''
diff --git a/nix/writeScript/default.nix b/nix/writeScript/default.nix
index e8e6e0fa10..1f53b4e4ff 100644
--- a/nix/writeScript/default.nix
+++ b/nix/writeScript/default.nix
@@ -5,25 +5,31 @@
 
 let
   bins = depot.nix.getBins pkgs.s6-portable-utils [
-           "s6-cat"
-           "s6-chmod"
-         ];
+    "s6-cat"
+    "s6-chmod"
+  ];
 
 in
 name:
 # string of the executable script that is put in $out
 script:
 
-depot.nix.runExecline name {
+depot.nix.runExecline name
+{
   stdin = script;
   derivationArgs = {
     preferLocalBuild = true;
     allowSubstitutes = false;
   };
 } [
-  "importas" "out" "out"
+  "importas"
+  "out"
+  "out"
   # this pipes stdout of s6-cat to $out
   # and s6-cat redirects from stdin to stdout
-  "if" [ "redirfd" "-w" "1" "$out" bins.s6-cat ]
-  bins.s6-chmod "0755" "$out"
+  "if"
+  [ "redirfd" "-w" "1" "$out" bins.s6-cat ]
+  bins.s6-chmod
+  "0755"
+  "$out"
 ]
diff --git a/nix/writeTree/OWNERS b/nix/writeTree/OWNERS
new file mode 100644
index 0000000000..b381c4e660
--- /dev/null
+++ b/nix/writeTree/OWNERS
@@ -0,0 +1 @@
+aspen
diff --git a/nix/writeTree/default.nix b/nix/writeTree/default.nix
new file mode 100644
index 0000000000..0c7c2a130f
--- /dev/null
+++ b/nix/writeTree/default.nix
@@ -0,0 +1,43 @@
+{ depot, lib, pkgs, ... }:
+let
+  inherit (lib) fix pipe mapAttrsToList isAttrs concatLines isString isDerivation isPath;
+
+  # TODO(sterni): move to //nix/utils with clearer naming and alternative similar to lib.types.path
+  isPathLike = value:
+    isPath value
+    || isDerivation value
+    || (isString value && builtins.hasContext value);
+
+  esc = s: lib.escapeShellArg /* ensure paths import into store */ "${s}";
+
+  writeTreeAtPath = path: tree:
+    ''
+      mkdir -p "$out/"${esc path}
+    ''
+    + pipe tree [
+      (mapAttrsToList (k: v:
+        if isPathLike v then
+          "cp -R --reflink=auto ${v} \"$out/\"${esc path}/${esc k}"
+        else if lib.isAttrs v then
+          writeTreeAtPath (path + "/" + k) v
+        else
+          throw "invalid type (expected path, derivation, string with context, or attrs)"))
+      concatLines
+    ];
+
+  /* Create a directory tree specified by a Nix attribute set structure.
+
+     Each value in `tree` should either be a file, a directory, or another tree
+     attribute set. Those paths will be written to a directory tree
+     corresponding to the structure of the attribute set.
+
+     Type: string -> attrSet -> derivation
+  */
+  writeTree = name: tree:
+    pkgs.runCommandLocal name { } (writeTreeAtPath "" tree);
+in
+
+# __functor trick so readTree can add the tests attribute
+{
+  __functor = _: writeTree;
+}
diff --git a/nix/writeTree/tests/default.nix b/nix/writeTree/tests/default.nix
new file mode 100644
index 0000000000..c5858ee96e
--- /dev/null
+++ b/nix/writeTree/tests/default.nix
@@ -0,0 +1,93 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  inherit (pkgs) runCommand writeText writeTextFile;
+  inherit (depot.nix) writeTree;
+
+  checkTree = name: tree: expected:
+    runCommand "writeTree-test-${name}"
+      {
+        nativeBuildInputs = [ pkgs.buildPackages.lr ];
+        passAsFile = [ "expected" ];
+        inherit expected;
+      } ''
+      actualPath="$NIX_BUILD_TOP/actual"
+      cd ${lib.escapeShellArg (writeTree name tree)}
+      lr . > "$actualPath"
+      diff -u "$expectedPath" "$actualPath" | tee "$out"
+    '';
+in
+
+depot.nix.readTree.drvTargets {
+  empty = checkTree "empty" { }
+    ''
+      .
+    '';
+
+  simple-paths = checkTree "simple"
+    {
+      writeTree = {
+        meta = {
+          "owners.txt" = ../OWNERS;
+        };
+        "code.nix" = ../default.nix;
+        all-tests = ./.;
+        nested.dirs.eval-time = builtins.toFile "owothia" ''
+          hold me owo
+        '';
+      };
+    }
+    ''
+      .
+      ./writeTree
+      ./writeTree/all-tests
+      ./writeTree/all-tests/default.nix
+      ./writeTree/code.nix
+      ./writeTree/meta
+      ./writeTree/meta/owners.txt
+      ./writeTree/nested
+      ./writeTree/nested/dirs
+      ./writeTree/nested/dirs/eval-time
+    '';
+
+  empty-dirs = checkTree "empty-dirs"
+    {
+      this.dir.is.empty = { };
+      so.is.this.one = { };
+    }
+    ''
+      .
+      ./so
+      ./so/is
+      ./so/is/this
+      ./so/is/this/one
+      ./this
+      ./this/dir
+      ./this/dir/is
+      ./this/dir/is/empty
+    '';
+
+  drvs = checkTree "drvs"
+    {
+      file-drv = writeText "road.txt" ''
+        Any road followed precisely to its end leads precisely nowhere.
+      '';
+      dir-drv = writeTextFile {
+        name = "dir-of-text";
+        destination = "/text/in/more/dirs.txt";
+        text = ''
+          Climb the mountain just a little bit to test that it’s a mountain.
+          From the top of the mountain, you cannot see the mountain.
+        '';
+      };
+    }
+    ''
+      .
+      ./dir-drv
+      ./dir-drv/text
+      ./dir-drv/text/in
+      ./dir-drv/text/in/more
+      ./dir-drv/text/in/more/dirs.txt
+      ./file-drv
+    '';
+}
diff --git a/nix/writers/default.nix b/nix/writers/default.nix
new file mode 100644
index 0000000000..55355913a9
--- /dev/null
+++ b/nix/writers/default.nix
@@ -0,0 +1,113 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.s6-portable-utils [ "s6-ln" "s6-ls" "s6-touch" ]
+  ;
+
+  linkTo = name: path: depot.nix.runExecline.local name { } [
+    "importas"
+    "out"
+    "out"
+    bins.s6-ln
+    "-s"
+    path
+    "$out"
+  ];
+
+  # Build a rust executable, $out is the executable.
+  rustSimple = args@{ name, ... }: src:
+    linkTo name "${rustSimpleBin args src}/bin/${name}";
+
+  # Like `rustSimple`, but put the binary in `$out/bin/`.
+  rustSimpleBin =
+    { name
+    , dependencies ? [ ]
+    , doCheck ? true
+    ,
+    }: src:
+    (if doCheck then testRustSimple else pkgs.lib.id)
+      (pkgs.buildRustCrate ({
+        pname = name;
+        version = "1.0.0";
+        crateName = name;
+        crateBin = [ name ];
+        dependencies = dependencies;
+        src = pkgs.runCommandLocal "write-main.rs"
+          {
+            src = src;
+            passAsFile = [ "src" ];
+          } ''
+          mkdir -p $out/src/bin
+          cp "$srcPath" $out/src/bin/${name}.rs
+          find $out
+        '';
+      }));
+
+  # Build a rust library, that can be used as dependency to `rustSimple`.
+  # Wrapper around `pkgs.buildRustCrate`, takes all its arguments.
+  rustSimpleLib =
+    { name
+    , dependencies ? [ ]
+    , doCheck ? true
+    ,
+    }: src:
+    (if doCheck then testRustSimple else pkgs.lib.id)
+      (pkgs.buildRustCrate ({
+        pname = name;
+        version = "1.0.0";
+        crateName = name;
+        dependencies = dependencies;
+        src = pkgs.runCommandLocal "write-lib.rs"
+          {
+            src = src;
+            passAsFile = [ "src" ];
+          } ''
+          mkdir -p $out/src
+          cp "$srcPath" $out/src/lib.rs
+          find $out
+        '';
+      }));
+
+  /* Takes a `buildRustCrate` derivation as an input,
+    * builds it with `{ buildTests = true; }` and runs
+    * all tests found in its `tests` dir. If they are
+    * all successful, `$out` will point to the crate
+    * built with `{ buildTests = false; }`, otherwise
+    * it will fail to build.
+    *
+    * See also `nix.drvSeqL` which is used to implement
+    * this behavior.
+    */
+  testRustSimple = rustDrv:
+    let
+      crate = buildTests: rustDrv.override { inherit buildTests; };
+      tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" { } [
+        "importas"
+        "out"
+        "out"
+        "if"
+        [
+          "pipeline"
+          [ bins.s6-ls "${crate true}/tests" ]
+          "forstdin"
+          "-o0"
+          "test"
+          "importas"
+          "test"
+          "test"
+          "${crate true}/tests/$test"
+        ]
+        bins.s6-touch
+        "$out"
+      ];
+    in
+    depot.nix.drvSeqL [ tests ] (crate false);
+
+in
+{
+  inherit
+    rustSimple
+    rustSimpleBin
+    rustSimpleLib
+    ;
+}
diff --git a/nix/writers/tests/rust.nix b/nix/writers/tests/rust.nix
new file mode 100644
index 0000000000..232a2dc608
--- /dev/null
+++ b/nix/writers/tests/rust.nix
@@ -0,0 +1,76 @@
+{ depot, pkgs, ... }:
+
+let
+  inherit (depot.nix.writers)
+    rustSimple
+    rustSimpleLib
+    rustSimpleBin
+    ;
+
+  inherit (pkgs)
+    coreutils
+    ;
+
+  run = drv: depot.nix.runExecline.local "run-${drv.name}" { } [
+    "if"
+    [ drv ]
+    "importas"
+    "out"
+    "out"
+    "${coreutils}/bin/touch"
+    "$out"
+  ];
+
+  rustTransitiveLib = rustSimpleLib
+    {
+      name = "transitive";
+    } ''
+    pub fn transitive(s: &str) -> String {
+      let mut new = s.to_string();
+      new.push_str(" 1 2 3");
+      new
+    }
+
+    #[cfg(test)]
+    mod tests {
+      use super::*;
+
+      #[test]
+      fn test_transitive() {
+        assert_eq!(transitive("foo").as_str(), "foo 1 2 3")
+      }
+    }
+  '';
+
+  rustTestLib = rustSimpleLib
+    {
+      name = "test_lib";
+      dependencies = [ rustTransitiveLib ];
+    } ''
+    extern crate transitive;
+    use transitive::{transitive};
+    pub fn test() -> String {
+      transitive("test")
+    }
+  '';
+
+  rustWithLib = run (rustSimple
+    {
+      name = "rust-with-lib";
+      dependencies = [ rustTestLib ];
+    } ''
+    extern crate test_lib;
+
+    fn main() {
+      assert_eq!(test_lib::test(), String::from("test 1 2 3"));
+    }
+  '');
+
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    rustTransitiveLib
+    rustWithLib
+    ;
+}
diff --git a/nix/yants/README.md b/nix/yants/README.md
index d17ea61b52..98e6642e2d 100644
--- a/nix/yants/README.md
+++ b/nix/yants/README.md
@@ -44,9 +44,9 @@ Currently lacking:
 Yants can be imported from its `default.nix`. A single attribute (`lib`) can be
 passed, which will otherwise be imported from `<nixpkgs>`.
 
-TIP: You do not need to clone my whole repository to use Yants! It is split out
-into the `nix/yants` branch which you can clone with, for example, `git clone -b
-nix/yants https://git.tazj.in yants`.
+TIP: You do not need to clone the entire TVL repository to use Yants!
+You can clone just this project through josh: `git clone
+https://code.tvl.fyi/depot.git:/nix/yants.git`
 
 Examples for the most common import methods would be:
 
diff --git a/nix/yants/default.nix b/nix/yants/default.nix
index 6da99fa3c8..cb9fc08287 100644
--- a/nix/yants/default.nix
+++ b/nix/yants/default.nix
@@ -6,10 +6,10 @@
 #
 # All types (should) compose as expected.
 
-{ lib ?  (import <nixpkgs> {}).lib, ... }:
+{ lib ? (import <nixpkgs> { }).lib, ... }:
 
 with builtins; let
-  prettyPrint = lib.generators.toPretty {};
+  prettyPrint = lib.generators.toPretty { };
 
   # typedef' :: struct {
   #   name = string;
@@ -34,41 +34,44 @@ with builtins; let
   #
   # This function is the low-level primitive used to create types. For
   # many cases the higher-level 'typedef' function is more appropriate.
-  typedef' = { name, checkType
-             , checkToBool ? (result: result.ok)
-             , toError ? (_: result: result.err)
-             , def ? null
-             , match ? null }: {
-    inherit name checkToBool toError;
-
-    # check :: a -> bool
-    #
-    # This function is used to determine whether a given type is
-    # conformant.
-    check = value: checkToBool (checkType value);
-
-    # checkType :: a -> struct { ok = bool; err = option string; }
-    #
-    # This function checks whether the passed value is type conformant
-    # and returns an optional type error string otherwise.
-    inherit checkType;
-
-    # __functor :: a -> a
-    #
-    # This function checks whether the passed value is type conformant
-    # and throws an error if it is not.
-    #
-    # The name of this function is a special attribute in Nix that
-    # makes it possible to execute a type attribute set like a normal
-    # function.
-    __functor = self: value:
-    let result = self.checkType value;
-    in if checkToBool result then value
-       else throw (toError value result);
-  };
+  typedef' =
+    { name
+    , checkType
+    , checkToBool ? (result: result.ok)
+    , toError ? (_: result: result.err)
+    , def ? null
+    , match ? null
+    }: {
+      inherit name checkToBool toError;
+
+      # check :: a -> bool
+      #
+      # This function is used to determine whether a given type is
+      # conformant.
+      check = value: checkToBool (checkType value);
+
+      # checkType :: a -> struct { ok = bool; err = option string; }
+      #
+      # This function checks whether the passed value is type conformant
+      # and returns an optional type error string otherwise.
+      inherit checkType;
+
+      # __functor :: a -> a
+      #
+      # This function checks whether the passed value is type conformant
+      # and throws an error if it is not.
+      #
+      # The name of this function is a special attribute in Nix that
+      # makes it possible to execute a type attribute set like a normal
+      # function.
+      __functor = self: value:
+        let result = self.checkType value;
+        in if checkToBool result then value
+        else throw (toError value result);
+    };
 
   typeError = type: val:
-  "expected type '${type}', but value '${prettyPrint val}' is of type '${typeOf val}'";
+    "expected type '${type}', but value '${prettyPrint val}' is of type '${typeOf val}'";
 
   # typedef :: string -> (a -> bool) -> type
   #
@@ -76,32 +79,43 @@ with builtins; let
   # error message constructor.
   typedef = name: check: typedef' {
     inherit name;
-    checkType = check;
-    checkToBool = r: r;
-    toError = value: _result: typeError name value;
+    checkType = v:
+      let res = check v;
+      in {
+        ok = res;
+      } // (lib.optionalAttrs (!res) {
+        err = typeError name v;
+      });
   };
 
-  checkEach = name: t: l: foldl' (acc: e:
-    let res = t.checkType e;
+  checkEach = name: t: l: foldl'
+    (acc: e:
+      let
+        res = t.checkType e;
         isT = t.checkToBool res;
-    in {
-      ok = acc.ok && isT;
-      err = if isT
-        then acc.err
-        else acc.err + "${prettyPrint e}: ${t.toError e res}\n";
-    }) { ok = true; err = "expected type ${name}, but found:\n"; } l;
-in lib.fix (self: {
+      in
+      {
+        ok = acc.ok && isT;
+        err =
+          if isT
+          then acc.err
+          else acc.err + "${prettyPrint e}: ${t.toError e res}\n";
+      })
+    { ok = true; err = "expected type ${name}, but found:\n"; }
+    l;
+in
+lib.fix (self: {
   # Primitive types
-  any      = typedef "any" (_: true);
-  unit     = typedef "unit" (v: v == {});
-  int      = typedef "int" isInt;
-  bool     = typedef "bool" isBool;
-  float    = typedef "float" isFloat;
-  string   = typedef "string" isString;
-  path     = typedef "path" (x: typeOf x == "path");
-  drv      = typedef "derivation" (x: isAttrs x && x ? "type" && x.type == "derivation");
+  any = typedef "any" (_: true);
+  unit = typedef "unit" (v: v == { });
+  int = typedef "int" isInt;
+  bool = typedef "bool" isBool;
+  float = typedef "float" isFloat;
+  string = typedef "string" isString;
+  path = typedef "path" (x: typeOf x == "path");
+  drv = typedef "derivation" (x: isAttrs x && x ? "type" && x.type == "derivation");
   function = typedef "function" (x: isFunction x || (isAttrs x && x ? "__functor"
-                                                 && isFunction x.__functor));
+    && isFunction x.__functor));
 
   # Type for types themselves. Useful when defining polymorphic types.
   type = typedef "type" (x:
@@ -120,7 +134,7 @@ in lib.fix (self: {
       in {
         ok = isNull v || (self.type t).checkToBool res;
         err = "expected type ${name}, but value does not conform to '${t.name}': "
-         + t.toError v res;
+          + t.toError v res;
       };
   };
 
@@ -132,7 +146,8 @@ in lib.fix (self: {
   list = t: typedef' rec {
     name = "list<${t.name}>";
 
-    checkType = v: if isList v
+    checkType = v:
+      if isList v
       then checkEach name (self.type t) v
       else {
         ok = false;
@@ -143,7 +158,8 @@ in lib.fix (self: {
   attrs = t: typedef' rec {
     name = "attrs<${t.name}>";
 
-    checkType = v: if isAttrs v
+    checkType = v:
+      if isAttrs v
       then checkEach name (self.type t) (attrValues v)
       else {
         ok = false;
@@ -168,20 +184,23 @@ in lib.fix (self: {
       # checkField checks an individual field of the struct against
       # its definition and creates a typecheck result. These results
       # are aggregated during the actual checking.
-      checkField = def: name: value: let result = def.checkType value; in rec {
-        ok = def.checkToBool result;
-        err = if !ok && isNull value
-          then "missing required ${def.name} field '${name}'\n"
-          else "field '${name}': ${def.toError value result}\n";
-      };
+      checkField = def: name: value:
+        let result = def.checkType value; in rec {
+          ok = def.checkToBool result;
+          err =
+            if !ok && isNull value
+            then "missing required ${def.name} field '${name}'\n"
+            else "field '${name}': ${def.toError value result}\n";
+        };
 
       # checkExtraneous determines whether a (closed) struct contains
       # any fields that are not part of the definition.
       checkExtraneous = def: has: acc:
         if (length has) == 0 then acc
         else if (hasAttr (head has) def)
-          then checkExtraneous def (tail has) acc
-          else checkExtraneous def (tail has) {
+        then checkExtraneous def (tail has) acc
+        else
+          checkExtraneous def (tail has) {
             ok = false;
             err = acc.err + "unexpected struct field '${head has}'\n";
           };
@@ -193,85 +212,102 @@ in lib.fix (self: {
           init = { ok = true; err = ""; };
           extraneous = checkExtraneous def (attrNames value) init;
 
-          checkedFields = map (n:
-            let v = if hasAttr n value then value."${n}" else null;
-            in checkField def."${n}" n v) (attrNames def);
+          checkedFields = map
+            (n:
+              let v = if hasAttr n value then value."${n}" else null;
+              in checkField def."${n}" n v)
+            (attrNames def);
 
-          combined = foldl' (acc: res: {
-            ok = acc.ok && res.ok;
-            err = if !res.ok then acc.err + res.err else acc.err;
-          }) init checkedFields;
-        in {
+          combined = foldl'
+            (acc: res: {
+              ok = acc.ok && res.ok;
+              err = if !res.ok then acc.err + res.err else acc.err;
+            })
+            init
+            checkedFields;
+        in
+        {
           ok = combined.ok && extraneous.ok;
           err = combined.err + extraneous.err;
         };
 
       struct' = name: def: typedef' {
         inherit name def;
-        checkType = value: if isAttrs value
+        checkType = value:
+          if isAttrs value
           then (checkStruct (self.attrs self.type def) value)
           else { ok = false; err = typeError name value; };
 
-          toError = _: result: "expected '${name}'-struct, but found:\n" + result.err;
+        toError = _: result: "expected '${name}'-struct, but found:\n" + result.err;
       };
-    in arg: if isString arg then (struct' arg) else (struct' "anon" arg);
+    in
+    arg: if isString arg then (struct' arg) else (struct' "anon" arg);
 
   # Enums & pattern matching
   enum =
-  let
-    plain = name: def: typedef' {
-      inherit name def;
+    let
+      plain = name: def: typedef' {
+        inherit name def;
 
-      checkType = (x: isString x && elem x def);
-      checkToBool = x: x;
-      toError = value: _: "'${prettyPrint value} is not a member of enum ${name}";
-    };
-    enum' = name: def: lib.fix (e: (plain name def) // {
-      match = x: actions: deepSeq (map e (attrNames actions)) (
-      let
-        actionKeys = attrNames actions;
-        missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [] def;
-      in if (length missing) > 0
-        then throw "Missing match action for members: ${prettyPrint missing}"
-        else actions."${e x}");
-    });
-  in arg: if isString arg then (enum' arg) else (enum' "anon" arg);
+        checkType = (x: isString x && elem x def);
+        checkToBool = x: x;
+        toError = value: _: "'${prettyPrint value} is not a member of enum ${name}";
+      };
+      enum' = name: def: lib.fix (e: (plain name def) // {
+        match = x: actions: deepSeq (map e (attrNames actions)) (
+          let
+            actionKeys = attrNames actions;
+            missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] def;
+          in
+          if (length missing) > 0
+          then throw "Missing match action for members: ${prettyPrint missing}"
+          else actions."${e x}"
+        );
+      });
+    in
+    arg: if isString arg then (enum' arg) else (enum' "anon" arg);
 
   # Sum types
   #
   # The representation of a sum type is an attribute set with only one
   # value, where the key of the value denotes the variant of the type.
   sum =
-  let
-    plain = name: def: typedef' {
-      inherit name def;
-      checkType = (x:
-        let variant = elemAt (attrNames x) 0;
-        in if isAttrs x && length (attrNames x) == 1 && hasAttr variant def
-          then let t = def."${variant}";
-                   v = x."${variant}";
-                   res = t.checkType v;
-               in if t.checkToBool res
-                  then { ok = true; }
-                  else {
-                    ok = false;
-                    err = "while checking '${name}' variant '${variant}': "
-                          + t.toError v res;
-                  }
+    let
+      plain = name: def: typedef' {
+        inherit name def;
+        checkType = (x:
+          let variant = elemAt (attrNames x) 0;
+          in if isAttrs x && length (attrNames x) == 1 && hasAttr variant def
+          then
+            let
+              t = def."${variant}";
+              v = x."${variant}";
+              res = t.checkType v;
+            in
+            if t.checkToBool res
+            then { ok = true; }
+            else {
+              ok = false;
+              err = "while checking '${name}' variant '${variant}': "
+                + t.toError v res;
+            }
           else { ok = false; err = typeError name x; }
-      );
-    };
-    sum' = name: def: lib.fix (s: (plain name def) // {
-    match = x: actions:
-    let variant = deepSeq (s x) (elemAt (attrNames x) 0);
-        actionKeys = attrNames actions;
-        defKeys = attrNames def;
-        missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [] defKeys;
-    in if (length missing) > 0
-      then throw "Missing match action for variants: ${prettyPrint missing}"
-      else actions."${variant}" x."${variant}";
-    });
-    in arg: if isString arg then (sum' arg) else (sum' "anon" arg);
+        );
+      };
+      sum' = name: def: lib.fix (s: (plain name def) // {
+        match = x: actions:
+          let
+            variant = deepSeq (s x) (elemAt (attrNames x) 0);
+            actionKeys = attrNames actions;
+            defKeys = attrNames def;
+            missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] defKeys;
+          in
+          if (length missing) > 0
+          then throw "Missing match action for variants: ${prettyPrint missing}"
+          else actions."${variant}" x."${variant}";
+      });
+    in
+    arg: if isString arg then (sum' arg) else (sum' "anon" arg);
 
   # Typed function definitions
   #
@@ -285,15 +321,48 @@ in lib.fix (self: {
       mkFunc = sig: f: {
         inherit sig;
         __toString = self: foldl' (s: t: "${s} -> ${t.name}")
-                                  "λ :: ${(head self.sig).name}" (tail self.sig);
+          "λ :: ${(head self.sig).name}"
+          (tail self.sig);
         __functor = _: f;
       };
 
-      defun' = sig: func: if length sig > 2
+      defun' = sig: func:
+        if length sig > 2
         then mkFunc sig (x: defun' (tail sig) (func ((head sig) x)))
         else mkFunc sig (x: ((head (tail sig)) (func ((head sig) x))));
 
-    in sig: func: if length sig < 2
+    in
+    sig: func:
+      if length sig < 2
       then (throw "Signature must at least have two types (a -> b)")
       else defun' sig func;
+
+  # Restricting types
+  #
+  # `restrict` wraps a type `t`, and uses a predicate `pred` to further
+  # restrict the values, giving the restriction a descriptive `name`.
+  #
+  # First, the wrapped type definition is checked (e.g. int) and then the
+  # value is checked with the predicate, so the predicate can already
+  # depend on the value being of the wrapped type.
+  restrict = name: pred: t:
+    let restriction = "${t.name}[${name}]"; in typedef' {
+      name = restriction;
+      checkType = v:
+        let res = t.checkType v;
+        in
+        if !(t.checkToBool res)
+        then res
+        else
+          let
+            iok = pred v;
+          in
+          if isBool iok then {
+            ok = iok;
+            err = "${prettyPrint v} does not conform to restriction '${restriction}'";
+          } else
+          # use throw here to avoid spamming the build log
+            throw "restriction '${restriction}' predicate returned unexpected value '${prettyPrint iok}' instead of boolean";
+    };
+
 })
diff --git a/nix/yants/tests/default.nix b/nix/yants/tests/default.nix
index da539ca356..0c7ec24188 100644
--- a/nix/yants/tests/default.nix
+++ b/nix/yants/tests/default.nix
@@ -1,34 +1,50 @@
 { depot, pkgs, ... }:
 
-with builtins;
 with depot.nix.yants;
 
 # Note: Derivations are not included in the tests below as they cause
 # issues with deepSeq.
 
-deepSeq rec {
-  # Test that all primitive types match
-  primitives = [
-    (unit {})
-    (int 15)
-    (bool false)
-    (float 13.37)
-    (string "Hello!")
-    (function (x: x * 2))
-    (path /nix)
+let
+
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    it
+    assertEq
+    assertThrows
+    assertDoesNotThrow
+    ;
+
+  # this derivation won't throw if evaluated with deepSeq
+  # unlike most things even remotely related with nixpkgs
+  trivialDerivation = derivation {
+    name = "trivial-derivation";
+    inherit (pkgs.stdenv) system;
+    builder = "/bin/sh";
+    args = [ "-c" "echo hello > $out" ];
+  };
+
+  testPrimitives = it "checks that all primitive types match" [
+    (assertDoesNotThrow "unit type" (unit { }))
+    (assertDoesNotThrow "int type" (int 15))
+    (assertDoesNotThrow "bool type" (bool false))
+    (assertDoesNotThrow "float type" (float 13.37))
+    (assertDoesNotThrow "string type" (string "Hello!"))
+    (assertDoesNotThrow "function type" (function (x: x * 2)))
+    (assertDoesNotThrow "path type" (path /nix))
+    (assertDoesNotThrow "derivation type" (drv trivialDerivation))
   ];
 
-  # Test that polymorphic types work as intended
-  poly = [
-    (option int null)
-    (list string [ "foo" "bar" ])
-    (either int float 42)
+  testPoly = it "checks that polymorphic types work as intended" [
+    (assertDoesNotThrow "option type" (option int null))
+    (assertDoesNotThrow "list type" (list string [ "foo" "bar" ]))
+    (assertDoesNotThrow "either type" (either int float 42))
   ];
 
   # Test that structures work as planned.
   person = struct "person" {
     name = string;
-    age  = int;
+    age = int;
 
     contact = option (struct {
       email = string;
@@ -36,20 +52,33 @@ deepSeq rec {
     });
   };
 
-  testPerson = person {
-    name = "Brynhjulf";
-    age  = 42;
-    contact.email = "brynhjulf@yants.nix";
-  };
+  testStruct = it "checks that structures work as intended" [
+    (assertDoesNotThrow "person struct" (person {
+      name = "Brynhjulf";
+      age = 42;
+      contact.email = "brynhjulf@yants.nix";
+    }))
+  ];
 
   # Test enum definitions & matching
   colour = enum "colour" [ "red" "blue" "green" ];
-  testMatch = colour.match "red" {
+  colourMatcher = {
     red = "It is in fact red!";
-    blue = throw "It should not be blue!";
-    green = throw "It should not be green!";
+    blue = "It should not be blue!";
+    green = "It should not be green!";
   };
 
+  testEnum = it "checks enum definitions and matching" [
+    (assertEq "enum is matched correctly"
+      "It is in fact red!"
+      (colour.match "red" colourMatcher))
+    (assertThrows "out of bounds enum fails"
+      (colour.match "alpha" (colourMatcher // {
+        alpha = "This should never happen";
+      }))
+    )
+  ];
+
   # Test sum type definitions
   creature = sum "creature" {
     human = struct {
@@ -59,37 +88,71 @@ deepSeq rec {
 
     pet = enum "pet" [ "dog" "lizard" "cat" ];
   };
-
-  testSum = creature {
+  some-human = creature {
     human = {
       name = "Brynhjulf";
       age = 42;
     };
   };
 
-  testSumMatch = creature.match testSum {
-    human = v: "It's a human named ${v.name}";
-    pet = v: throw "It's not supposed to be a pet!";
-  };
+  testSum = it "checks sum types definitions and matching" [
+    (assertDoesNotThrow "creature sum type" some-human)
+    (assertEq "sum type is matched correctly"
+      "It's a human named Brynhjulf"
+      (creature.match some-human {
+        human = v: "It's a human named ${v.name}";
+        pet = v: "It's not supposed to be a pet!";
+      })
+    )
+  ];
 
   # Test curried function definitions
   func = defun [ string int string ]
-  (name: age: "${name} is ${toString age} years old");
+    (name: age: "${name} is ${toString age} years old");
 
-  testFunc = func "Brynhjulf" 42;
+  testFunctions = it "checks function definitions" [
+    (assertDoesNotThrow "function application" (func "Brynhjulf" 42))
+  ];
 
   # Test that all types are types.
-  testTypes = map type [
-    any bool drv float int string path
-
-    (attrs int)
-    (eitherN [ int string bool ])
-    (either int string)
-    (enum [ "foo" "bar" ])
-    (list string)
-    (option int)
-    (option (list string))
-    (struct { a = int; b = option string; })
-    (sum { a = int; b = option string; })
+  assertIsType = name: t:
+    assertDoesNotThrow "${name} is a type" (type t);
+  testTypes = it "checks that all types are types" [
+    (assertIsType "any" any)
+    (assertIsType "bool" bool)
+    (assertIsType "drv" drv)
+    (assertIsType "float" float)
+    (assertIsType "int" int)
+    (assertIsType "string" string)
+    (assertIsType "path" path)
+
+    (assertIsType "attrs int" (attrs int))
+    (assertIsType "eitherN [ ... ]" (eitherN [ int string bool ]))
+    (assertIsType "either int string" (either int string))
+    (assertIsType "enum [ ... ]" (enum [ "foo" "bar" ]))
+    (assertIsType "list string" (list string))
+    (assertIsType "option int" (option int))
+    (assertIsType "option (list string)" (option (list string)))
+    (assertIsType "struct { ... }" (struct { a = int; b = option string; }))
+    (assertIsType "sum { ... }" (sum { a = int; b = option string; }))
   ];
-} (pkgs.writeText "yants-tests" "All tests passed!")
+
+  testRestrict = it "checks restrict types" [
+    (assertDoesNotThrow "< 42" ((restrict "< 42" (i: i < 42) int) 25))
+    (assertDoesNotThrow "list length < 3"
+      ((restrict "not too long" (l: builtins.length l < 3) (list int)) [ 1 2 ]))
+    (assertDoesNotThrow "list eq 5"
+      (list (restrict "eq 5" (v: v == 5) any) [ 5 5 5 ]))
+  ];
+
+in
+runTestsuite "yants" [
+  testPrimitives
+  testPoly
+  testStruct
+  testEnum
+  testSum
+  testFunctions
+  testTypes
+  testRestrict
+]