about summary refs log tree commit diff
path: root/nix
diff options
context:
space:
mode:
Diffstat (limited to 'nix')
-rw-r--r--nix/OWNERS3
-rw-r--r--nix/binify/default.nix16
-rw-r--r--nix/bufCheck/default.nix9
-rw-r--r--nix/buildGo/.skip-subtree2
-rw-r--r--nix/buildGo/README.md140
-rw-r--r--nix/buildGo/default.nix140
-rw-r--r--nix/buildGo/example/default.nix48
-rw-r--r--nix/buildGo/example/lib.go9
-rw-r--r--nix/buildGo/example/main.go25
-rw-r--r--nix/buildGo/example/thing.proto10
-rw-r--r--nix/buildGo/external/default.nix109
-rw-r--r--nix/buildGo/external/main.go201
-rw-r--r--nix/buildGo/proto.nix87
-rw-r--r--nix/buildLisp/README.md254
-rw-r--r--nix/buildLisp/default.nix779
-rw-r--r--nix/buildLisp/example/default.nix33
-rw-r--r--nix/buildLisp/example/lib.lisp6
-rw-r--r--nix/buildLisp/example/main.lisp7
-rw-r--r--nix/buildLisp/tests/argv0.nix36
-rw-r--r--nix/buildManPages/OWNERS3
-rw-r--r--nix/buildManPages/default.nix103
-rw-r--r--nix/buildkite/default.nix335
-rwxr-xr-xnix/buildkite/fetch-parent-targets.sh43
-rw-r--r--nix/drvSeqL/default.nix47
-rw-r--r--nix/emptyDerivation/OWNERS3
-rw-r--r--nix/emptyDerivation/default.nix21
-rw-r--r--nix/emptyDerivation/emptyDerivation.nix38
-rw-r--r--nix/emptyDerivation/tests.nix40
-rw-r--r--nix/escapeExecline/default.nix32
-rw-r--r--nix/getBins/default.nix51
-rw-r--r--nix/getBins/tests.nix40
-rw-r--r--nix/mergePatch/default.nix192
-rw-r--r--nix/netstring/attrsToKeyValList.nix33
-rw-r--r--nix/netstring/fromString.nix10
-rw-r--r--nix/nint/OWNERS3
-rw-r--r--nix/nint/README.md93
-rw-r--r--nix/nint/default.nix16
-rw-r--r--nix/nint/nint.rs149
-rw-r--r--nix/readTree/README.md92
-rw-r--r--nix/readTree/default.nix284
-rw-r--r--nix/readTree/tests/.skip-subtree1
-rw-r--r--nix/readTree/tests/default.nix129
-rw-r--r--nix/readTree/tests/test-example/third_party/default.nix5
-rw-r--r--nix/readTree/tests/test-example/third_party/rustpkgs/aho-corasick.nix1
-rw-r--r--nix/readTree/tests/test-example/third_party/rustpkgs/serde.nix1
-rw-r--r--nix/readTree/tests/test-example/tools/cheddar/default.nix1
-rw-r--r--nix/readTree/tests/test-example/tools/roquefort.nix1
-rw-r--r--nix/readTree/tests/test-marker/directory-marked/default.nix3
-rw-r--r--nix/readTree/tests/test-marker/directory-marked/nested/default.nix3
-rw-r--r--nix/readTree/tests/test-marker/file-children/one.nix3
-rw-r--r--nix/readTree/tests/test-marker/file-children/two.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/default.nix7
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/subdir/a.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/default.nix5
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/no-merge/default.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/no-merge/subdir/a.nix1
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/sibling.nix1
-rw-r--r--nix/readTree/tests/test-tree-traversal/default-nix/subdir/a.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/no-skip-subtree/a/default.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/no-skip-subtree/b/c.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/no-skip-subtree/default.nix5
-rw-r--r--nix/readTree/tests/test-tree-traversal/skip-subtree/.skip-subtree1
-rw-r--r--nix/readTree/tests/test-tree-traversal/skip-subtree/a/default.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/skip-subtree/b/c.nix3
-rw-r--r--nix/readTree/tests/test-tree-traversal/skip-subtree/default.nix5
-rw-r--r--nix/readTree/tests/test-wrong-no-dots/no-dots-in-function.nix3
-rw-r--r--nix/readTree/tests/test-wrong-not-a-function/not-a-function.nix1
-rw-r--r--nix/renderMarkdown/default.nix8
-rw-r--r--nix/runExecline/default.nix31
-rw-r--r--nix/runExecline/runExecline.nix122
-rw-r--r--nix/runExecline/tests.nix117
-rw-r--r--nix/runTestsuite/default.nix199
-rw-r--r--nix/sparseTree/OWNERS3
-rw-r--r--nix/sparseTree/default.nix74
-rw-r--r--nix/tag/default.nix166
-rw-r--r--nix/tag/tests.nix94
-rw-r--r--nix/tailscale/default.nix31
-rw-r--r--nix/utils/OWNERS3
-rw-r--r--nix/utils/default.nix186
-rw-r--r--nix/utils/tests/.skip-subtree1
-rw-r--r--nix/utils/tests/default.nix126
-rw-r--r--nix/utils/tests/directory/file0
l---------nix/utils/tests/missing1
l---------nix/utils/tests/symlink-directory1
l---------nix/utils/tests/symlink-file1
l---------nix/utils/tests/symlink-symlink-directory1
l---------nix/utils/tests/symlink-symlink-file1
-rw-r--r--nix/writeElispBin/default.nix20
-rw-r--r--nix/writeExecline/default.nix39
-rw-r--r--nix/writeScript/default.nix35
-rw-r--r--nix/writeScriptBin/default.nix12
-rw-r--r--nix/writers/default.nix113
-rw-r--r--nix/writers/tests/rust.nix76
-rw-r--r--nix/yants/README.md88
-rw-r--r--nix/yants/default.nix368
-rw-r--r--nix/yants/screenshots/enums.pngbin0 -> 41305 bytes
-rw-r--r--nix/yants/screenshots/functions.pngbin0 -> 32907 bytes
-rw-r--r--nix/yants/screenshots/nested-structs.pngbin0 -> 70264 bytes
-rw-r--r--nix/yants/screenshots/simple.pngbin0 -> 43010 bytes
-rw-r--r--nix/yants/screenshots/structs.pngbin0 -> 69499 bytes
-rw-r--r--nix/yants/tests/default.nix158
101 files changed, 5818 insertions, 0 deletions
diff --git a/nix/OWNERS b/nix/OWNERS
new file mode 100644
index 000000000000..a742d0d22bf6
--- /dev/null
+++ b/nix/OWNERS
@@ -0,0 +1,3 @@
+inherited: true
+owners:
+  - Profpatsch
diff --git a/nix/binify/default.nix b/nix/binify/default.nix
new file mode 100644
index 000000000000..a9900caf43d5
--- /dev/null
+++ b/nix/binify/default.nix
@@ -0,0 +1,16 @@
+{ pkgs, lib, ... }:
+
+# Create a store path where the executable `exe`
+# is linked to $out/bin/${name}.
+# This is useful for e.g. including it as a “package”
+# in `buildInputs` of a shell.nix.
+#
+# For example, if I have the exeutable /nix/store/…-hello,
+# I can make it into /nix/store/…-binify-hello/bin/hello
+# with `binify { exe = …; name = "hello" }`.
+{ exe, name }:
+
+pkgs.runCommandLocal "${name}-bin" { } ''
+  mkdir -p $out/bin
+  ln -sT ${lib.escapeShellArg exe} $out/bin/${lib.escapeShellArg name}
+''
diff --git a/nix/bufCheck/default.nix b/nix/bufCheck/default.nix
new file mode 100644
index 000000000000..039303ba684b
--- /dev/null
+++ b/nix/bufCheck/default.nix
@@ -0,0 +1,9 @@
+# Check protobuf syntax and breaking.
+#
+{ depot, pkgs, ... }:
+
+pkgs.writeShellScriptBin "ci-buf-check" ''
+  ${depot.third_party.bufbuild}/bin/buf check lint --input .
+  # Report-only
+  ${depot.third_party.bufbuild}/bin/buf check breaking --input "." --against-input "./.git#branch=canon" || true
+''
diff --git a/nix/buildGo/.skip-subtree b/nix/buildGo/.skip-subtree
new file mode 100644
index 000000000000..8db1f814f653
--- /dev/null
+++ b/nix/buildGo/.skip-subtree
@@ -0,0 +1,2 @@
+Subdirectories of this folder should not be imported since they are
+internal to buildGo.nix and incompatible with readTree.
diff --git a/nix/buildGo/README.md b/nix/buildGo/README.md
new file mode 100644
index 000000000000..37e0c06933f9
--- /dev/null
+++ b/nix/buildGo/README.md
@@ -0,0 +1,140 @@
+buildGo.nix
+===========
+
+This is an alternative [Nix][] build system for [Go][]. It supports building Go
+libraries and programs, and even automatically generating Protobuf & gRPC
+libraries.
+
+*Note:* This will probably end up being folded into [Nixery][].
+
+## Background
+
+Most language-specific Nix tooling outsources the build to existing
+language-specific build tooling, which essentially means that Nix ends up being
+a wrapper around all sorts of external build systems.
+
+However, systems like [Bazel][] take an alternative approach in which the
+compiler is invoked directly and the composition of programs and libraries stays
+within a single homogeneous build system.
+
+Users don't need to learn per-language build systems and especially for
+companies with large monorepo-setups ([like Google][]) this has huge
+productivity impact.
+
+This project is an attempt to prove that Nix can be used in a similar style to
+build software directly, rather than shelling out to other build systems.
+
+## Example
+
+Given a program layout like this:
+
+```
+.
+├── lib          <-- some library component
+│   ├── bar.go
+│   └── foo.go
+├── api.proto    <-- gRPC API definition
+├── main.go      <-- program implementation
+└── default.nix  <-- build instructions
+```
+
+The contents of `default.nix` could look like this:
+
+```nix
+{ buildGo }:
+
+let
+  api = buildGo.grpc {
+    name  = "someapi";
+    proto = ./api.proto;
+  };
+
+  lib = buildGo.package {
+    name = "somelib";
+    srcs = [
+      ./lib/bar.go
+      ./lib/foo.go
+    ];
+  };
+in buildGo.program {
+  name = "my-program";
+  deps = [ api lib ];
+
+  srcs = [
+    ./main.go
+  ];
+}
+```
+
+(If you don't know how to read Nix, check out [nix-1p][])
+
+## Usage
+
+`buildGo` exposes five different functions:
+
+* `buildGo.program`: Build a Go binary out of the specified source files.
+
+  | parameter | type                    | use                                            | required? |
+  |-----------|-------------------------|------------------------------------------------|-----------|
+  | `name`    | `string`                | Name of the program (and resulting executable) | yes       |
+  | `srcs`    | `list<path>`            | List of paths to source files                  | yes       |
+  | `deps`    | `list<drv>`             | List of dependencies (i.e. other Go libraries) | no        |
+  | `x_defs`  | `attrs<string, string>` | Attribute set of linker vars (i.e. `-X`-flags) | no        |
+
+* `buildGo.package`: Build a Go library out of the specified source files.
+
+  | parameter | type         | use                                            | required? |
+  |-----------|--------------|------------------------------------------------|-----------|
+  | `name`    | `string`     | Name of the library                            | yes       |
+  | `srcs`    | `list<path>` | List of paths to source files                  | yes       |
+  | `deps`    | `list<drv>`  | List of dependencies (i.e. other Go libraries) | no        |
+  | `path`    | `string`     | Go import path for the resulting library       | no        |
+
+* `buildGo.external`: Build an externally defined Go library or program.
+
+  This function performs analysis on the supplied source code (which
+  can use the standard Go tooling layout) and creates a tree of all
+  the packages contained within.
+
+  This exists for compatibility with external libraries that were not
+  defined using buildGo.
+
+  | parameter | type           | use                                           | required? |
+  |-----------|----------------|-----------------------------------------------|-----------|
+  | `path`    | `string`       | Go import path for the resulting package      | yes       |
+  | `src`     | `path`         | Path to the source **directory**              | yes       |
+  | `deps`    | `list<drv>`    | List of dependencies (i.e. other Go packages) | no        |
+
+  For some examples of how `buildGo.external` is used, check out
+  [`proto.nix`](./proto.nix).
+
+* `buildGo.proto`: Build a Go library out of the specified Protobuf definition.
+
+  | parameter   | type        | use                                              | required? |
+  |-------------|-------------|--------------------------------------------------|-----------|
+  | `name`      | `string`    | Name for the resulting library                   | yes       |
+  | `proto`     | `path`      | Path to the Protobuf definition file             | yes       |
+  | `path`      | `string`    | Import path for the resulting Go library         | no        |
+  | `extraDeps` | `list<drv>` | Additional Go dependencies to add to the library | no        |
+
+* `buildGo.grpc`: Build a Go library out of the specified gRPC definition.
+
+  The parameters are identical to `buildGo.proto`.
+
+## Current status
+
+This project is work-in-progress. Crucially it is lacking the following features:
+
+* feature flag parity with Bazel's Go rules
+* documentation building
+* test execution
+
+There are still some open questions around how to structure some of those
+features in Nix.
+
+[Nix]: https://nixos.org/nix/
+[Go]: https://golang.org/
+[Nixery]: https://github.com/google/nixery
+[Bazel]: https://bazel.build/
+[like Google]: https://ai.google/research/pubs/pub45424
+[nix-1p]: https://github.com/tazjin/nix-1p
diff --git a/nix/buildGo/default.nix b/nix/buildGo/default.nix
new file mode 100644
index 000000000000..92951b3cb213
--- /dev/null
+++ b/nix/buildGo/default.nix
@@ -0,0 +1,140 @@
+# Copyright 2019 Google LLC.
+# SPDX-License-Identifier: Apache-2.0
+#
+# buildGo provides Nix functions to build Go packages in the style of Bazel's
+# rules_go.
+
+{ pkgs ? import <nixpkgs> { }
+, ...
+}:
+
+let
+  inherit (builtins)
+    attrNames
+    baseNameOf
+    dirOf
+    elemAt
+    filter
+    listToAttrs
+    map
+    match
+    readDir
+    replaceStrings
+    toString;
+
+  inherit (pkgs) lib go runCommand fetchFromGitHub protobuf symlinkJoin;
+
+  # Helpers for low-level Go compiler invocations
+  spaceOut = lib.concatStringsSep " ";
+
+  includeDepSrc = dep: "-I ${dep}";
+  includeSources = deps: spaceOut (map includeDepSrc deps);
+
+  includeDepLib = dep: "-L ${dep}";
+  includeLibs = deps: spaceOut (map includeDepLib deps);
+
+  srcBasename = src: elemAt (match "([a-z0-9]{32}\-)?(.*\.go)" (baseNameOf src)) 1;
+  srcCopy = path: src: "cp ${src} $out/${path}/${srcBasename src}";
+  srcList = path: srcs: lib.concatStringsSep "\n" (map (srcCopy path) srcs);
+
+  allDeps = deps: lib.unique (lib.flatten (deps ++ (map (d: d.goDeps) deps)));
+
+  xFlags = x_defs: spaceOut (map (k: "-X ${k}=${x_defs."${k}"}") (attrNames x_defs));
+
+  pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p);
+
+  # Add an `overrideGo` attribute to a function result that works
+  # similar to `overrideAttrs`, but is used specifically for the
+  # arguments passed to Go builders.
+  makeOverridable = f: orig: (f orig) // {
+    overrideGo = new: makeOverridable f (orig // (new orig));
+  };
+
+  # High-level build functions
+
+  # Build a Go program out of the specified files and dependencies.
+  program = { name, srcs, deps ? [ ], x_defs ? { } }:
+    let uniqueDeps = allDeps (map (d: d.gopkg) deps);
+    in runCommand name { } ''
+      ${go}/bin/go tool compile -o ${name}.a -trimpath=$PWD -trimpath=${go} ${includeSources uniqueDeps} ${spaceOut srcs}
+      mkdir -p $out/bin
+      export GOROOT_FINAL=go
+      ${go}/bin/go tool link -o $out/bin/${name} -buildid nix ${xFlags x_defs} ${includeLibs uniqueDeps} ${name}.a
+    '';
+
+  # Build a Go library assembled out of the specified files.
+  #
+  # This outputs both the sources and compiled binary, as both are
+  # needed when downstream packages depend on it.
+  package = { name, srcs, deps ? [ ], path ? name, sfiles ? [ ] }:
+    let
+      uniqueDeps = allDeps (map (d: d.gopkg) deps);
+
+      # The build steps below need to be executed conditionally for Go
+      # assembly if the analyser detected any *.s files.
+      #
+      # This is required for several popular packages (e.g. x/sys).
+      ifAsm = do: lib.optionalString (sfiles != [ ]) do;
+      asmBuild = ifAsm ''
+        ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -gensymabis -o ./symabis ${spaceOut sfiles}
+        ${go}/bin/go tool asm -trimpath $PWD -I $PWD -I ${go}/share/go/pkg/include -D GOOS_linux -D GOARCH_amd64 -o ./asm.o ${spaceOut sfiles}
+      '';
+      asmLink = ifAsm "-symabis ./symabis -asmhdr $out/go_asm.h";
+      asmPack = ifAsm ''
+        ${go}/bin/go tool pack r $out/${path}.a ./asm.o
+      '';
+
+      gopkg = (runCommand "golib-${name}" { } ''
+        mkdir -p $out/${path}
+        ${srcList path (map (s: "${s}") srcs)}
+        ${asmBuild}
+        ${go}/bin/go tool compile -pack ${asmLink} -o $out/${path}.a -trimpath=$PWD -trimpath=${go} -p ${path} ${includeSources uniqueDeps} ${spaceOut srcs}
+        ${asmPack}
+      '').overrideAttrs (_: {
+        passthru = {
+          inherit gopkg;
+          goDeps = uniqueDeps;
+          goImportPath = path;
+        };
+      });
+    in
+    gopkg;
+
+  # Build a tree of Go libraries out of an external Go source
+  # directory that follows the standard Go layout and was not built
+  # with buildGo.nix.
+  #
+  # The derivation for each actual package will reside in an attribute
+  # named "gopkg", and an attribute named "gobin" for binaries.
+  external = import ./external { inherit pkgs program package; };
+
+  # Import support libraries needed for protobuf & gRPC support
+  protoLibs = import ./proto.nix {
+    inherit external;
+  };
+
+  # Build a Go library out of the specified protobuf definition.
+  proto = { name, proto, path ? name, goPackage ? name, extraDeps ? [ ] }: (makeOverridable package) {
+    inherit name path;
+    deps = [ protoLibs.goProto.proto.gopkg ] ++ extraDeps;
+    srcs = lib.singleton (runCommand "goproto-${name}.pb.go" { } ''
+      cp ${proto} ${baseNameOf proto}
+      ${protobuf}/bin/protoc --plugin=${protoLibs.goProto.protoc-gen-go.gopkg}/bin/protoc-gen-go \
+        --go_out=plugins=grpc,import_path=${baseNameOf path}:. ${baseNameOf proto}
+      mv ./${goPackage}/*.pb.go $out
+    '');
+  };
+
+  # Build a Go library out of the specified gRPC definition.
+  grpc = args: proto (args // { extraDeps = [ protoLibs.goGrpc.gopkg ]; });
+
+in
+{
+  # Only the high-level builder functions are exposed, but made
+  # overrideable.
+  program = makeOverridable program;
+  package = makeOverridable package;
+  proto = makeOverridable proto;
+  grpc = makeOverridable grpc;
+  external = makeOverridable external;
+}
diff --git a/nix/buildGo/example/default.nix b/nix/buildGo/example/default.nix
new file mode 100644
index 000000000000..08da075e1818
--- /dev/null
+++ b/nix/buildGo/example/default.nix
@@ -0,0 +1,48 @@
+# Copyright 2019 Google LLC.
+# SPDX-License-Identifier: Apache-2.0
+
+# This file provides examples for how to use the various builder
+# functions provided by `buildGo`.
+#
+# The features used in the example are not exhaustive, but should give
+# users a quick introduction to how to use buildGo.
+
+let
+  buildGo = import ../default.nix { };
+
+  # Example use of buildGo.package, which creates an importable Go
+  # package from the specified source files.
+  examplePackage = buildGo.package {
+    name = "example";
+    srcs = [
+      ./lib.go
+    ];
+  };
+
+  # Example use of buildGo.proto, which generates a Go library from a
+  # Protobuf definition file.
+  exampleProto = buildGo.proto {
+    name = "exampleproto";
+    proto = ./thing.proto;
+  };
+
+  # Example use of buildGo.program, which builds an executable using
+  # the specified name and dependencies (which in turn must have been
+  # created via buildGo.package etc.)
+in
+buildGo.program {
+  name = "example";
+
+  srcs = [
+    ./main.go
+  ];
+
+  deps = [
+    examplePackage
+    exampleProto
+  ];
+
+  x_defs = {
+    "main.Flag" = "successfully";
+  };
+}
diff --git a/nix/buildGo/example/lib.go b/nix/buildGo/example/lib.go
new file mode 100644
index 000000000000..8a61370e994c
--- /dev/null
+++ b/nix/buildGo/example/lib.go
@@ -0,0 +1,9 @@
+// Copyright 2019 Google LLC.
+// SPDX-License-Identifier: Apache-2.0
+
+package example
+
+// UUID returns a totally random, carefully chosen UUID
+func UUID() string {
+	return "3640932f-ad40-4bc9-b45d-f504a0f5910a"
+}
diff --git a/nix/buildGo/example/main.go b/nix/buildGo/example/main.go
new file mode 100644
index 000000000000..bbcedbff8726
--- /dev/null
+++ b/nix/buildGo/example/main.go
@@ -0,0 +1,25 @@
+// Copyright 2019 Google LLC.
+// SPDX-License-Identifier: Apache-2.0
+//
+// Package main provides a tiny example program for the Bazel-style
+// Nix build system for Go.
+
+package main
+
+import (
+	"example"
+	"exampleproto"
+	"fmt"
+)
+
+var Flag string = "unsuccessfully"
+
+func main() {
+	thing := exampleproto.Thing{
+		Id:          example.UUID(),
+		KindOfThing: "test thing",
+	}
+
+	fmt.Printf("The thing is a %s with ID %q\n", thing.Id, thing.KindOfThing)
+	fmt.Printf("The flag has been %s set\n", Flag)
+}
diff --git a/nix/buildGo/example/thing.proto b/nix/buildGo/example/thing.proto
new file mode 100644
index 000000000000..0f6d6575e008
--- /dev/null
+++ b/nix/buildGo/example/thing.proto
@@ -0,0 +1,10 @@
+// Copyright 2019 Google LLC.
+// SPDX-License-Identifier: Apache-2.0
+
+syntax = "proto3";
+package example;
+
+message Thing {
+  string id = 1;
+  string kind_of_thing = 2;
+}
diff --git a/nix/buildGo/external/default.nix b/nix/buildGo/external/default.nix
new file mode 100644
index 000000000000..f713783a58be
--- /dev/null
+++ b/nix/buildGo/external/default.nix
@@ -0,0 +1,109 @@
+# Copyright 2019 Google LLC.
+# SPDX-License-Identifier: Apache-2.0
+{ pkgs, program, package }:
+
+let
+  inherit (builtins)
+    elemAt
+    foldl'
+    fromJSON
+    head
+    length
+    listToAttrs
+    readFile
+    replaceStrings
+    tail
+    throw;
+
+  inherit (pkgs) lib runCommand go jq ripgrep;
+
+  pathToName = p: replaceStrings [ "/" ] [ "_" ] (toString p);
+
+  # Collect all non-vendored dependencies from the Go standard library
+  # into a file that can be used to filter them out when processing
+  # dependencies.
+  stdlibPackages = runCommand "stdlib-pkgs.json" { } ''
+    export HOME=$PWD
+    export GOPATH=/dev/null
+    ${go}/bin/go list std | \
+      ${ripgrep}/bin/rg -v 'vendor' | \
+      ${jq}/bin/jq -R '.' | \
+      ${jq}/bin/jq -c -s 'map({key: ., value: true}) | from_entries' \
+      > $out
+  '';
+
+  analyser = program {
+    name = "analyser";
+
+    srcs = [
+      ./main.go
+    ];
+
+    x_defs = {
+      "main.stdlibList" = "${stdlibPackages}";
+    };
+  };
+
+  mkset = path: value:
+    if path == [ ] then { gopkg = value; }
+    else { "${head path}" = mkset (tail path) value; };
+
+  last = l: elemAt l ((length l) - 1);
+
+  toPackage = self: src: path: depMap: entry:
+    let
+      localDeps = map
+        (d: lib.attrByPath (d ++ [ "gopkg" ])
+          (
+            throw "missing local dependency '${lib.concatStringsSep "." d}' in '${path}'"
+          )
+          self)
+        entry.localDeps;
+
+      foreignDeps = map
+        (d: lib.attrByPath [ d.path ]
+          (
+            throw "missing foreign dependency '${d.path}' in '${path}, imported at ${d.position}'"
+          )
+          depMap)
+        entry.foreignDeps;
+
+      args = {
+        srcs = map (f: src + ("/" + f)) entry.files;
+        deps = localDeps ++ foreignDeps;
+      };
+
+      libArgs = args // {
+        name = pathToName entry.name;
+        path = lib.concatStringsSep "/" ([ path ] ++ entry.locator);
+        sfiles = map (f: src + ("/" + f)) entry.sfiles;
+      };
+
+      binArgs = args // {
+        name = (last ((lib.splitString "/" path) ++ entry.locator));
+      };
+    in
+    if entry.isCommand then (program binArgs) else (package libArgs);
+
+in
+{ src, path, deps ? [ ] }:
+let
+  # Build a map of dependencies (from their import paths to their
+  # derivation) so that they can be conditionally imported only in
+  # sub-packages that require them.
+  depMap = listToAttrs (map
+    (d: {
+      name = d.goImportPath;
+      value = d;
+    })
+    (map (d: d.gopkg) deps));
+
+  name = pathToName path;
+  analysisOutput = runCommand "${name}-structure.json" { } ''
+    ${analyser}/bin/analyser -path ${path} -source ${src} > $out
+  '';
+  analysis = fromJSON (readFile analysisOutput);
+in
+lib.fix (self: foldl' lib.recursiveUpdate { } (
+  map (entry: mkset entry.locator (toPackage self src path depMap entry)) analysis
+))
diff --git a/nix/buildGo/external/main.go b/nix/buildGo/external/main.go
new file mode 100644
index 000000000000..a77c43b371e9
--- /dev/null
+++ b/nix/buildGo/external/main.go
@@ -0,0 +1,201 @@
+// Copyright 2019 Google LLC.
+// SPDX-License-Identifier: Apache-2.0
+
+// This tool analyses external (i.e. not built with `buildGo.nix`) Go
+// packages to determine a build plan that Nix can import.
+package main
+
+import (
+	"encoding/json"
+	"flag"
+	"fmt"
+	"go/build"
+	"io/ioutil"
+	"log"
+	"os"
+	"path"
+	"path/filepath"
+	"strings"
+)
+
+// Path to a JSON file describing all standard library import paths.
+// This file is generated and set here by Nix during the build
+// process.
+var stdlibList string
+
+// pkg describes a single Go package within the specified source
+// directory.
+//
+// Return information includes the local (relative from project root)
+// and external (none-stdlib) dependencies of this package.
+type pkg struct {
+	Name        string       `json:"name"`
+	Locator     []string     `json:"locator"`
+	Files       []string     `json:"files"`
+	SFiles      []string     `json:"sfiles"`
+	LocalDeps   [][]string   `json:"localDeps"`
+	ForeignDeps []foreignDep `json:"foreignDeps"`
+	IsCommand   bool         `json:"isCommand"`
+}
+
+type foreignDep struct {
+	Path string `json:"path"`
+	// filename, column and line number of the import, if known
+	Position string `json:"position"`
+}
+
+// findGoDirs returns a filepath.WalkFunc that identifies all
+// directories that contain Go source code in a certain tree.
+func findGoDirs(at string) ([]string, error) {
+	dirSet := make(map[string]bool)
+
+	err := filepath.Walk(at, func(path string, info os.FileInfo, err error) error {
+		if err != nil {
+			return err
+		}
+
+		name := info.Name()
+		// Skip folders that are guaranteed to not be relevant
+		if info.IsDir() && (name == "testdata" || name == ".git") {
+			return filepath.SkipDir
+		}
+
+		// If the current file is a Go file, then the directory is popped
+		// (i.e. marked as a Go directory).
+		if !info.IsDir() && strings.HasSuffix(name, ".go") && !strings.HasSuffix(name, "_test.go") {
+			dirSet[filepath.Dir(path)] = true
+		}
+
+		return nil
+	})
+
+	if err != nil {
+		return nil, err
+	}
+
+	goDirs := []string{}
+	for k, _ := range dirSet {
+		goDirs = append(goDirs, k)
+	}
+
+	return goDirs, nil
+}
+
+// analysePackage loads and analyses the imports of a single Go
+// package, returning the data that is required by the Nix code to
+// generate a derivation for this package.
+func analysePackage(root, source, importpath string, stdlib map[string]bool) (pkg, error) {
+	ctx := build.Default
+	ctx.CgoEnabled = false
+
+	p, err := ctx.ImportDir(source, build.IgnoreVendor)
+	if err != nil {
+		return pkg{}, err
+	}
+
+	local := [][]string{}
+	foreign := []foreignDep{}
+
+	for _, i := range p.Imports {
+		if stdlib[i] {
+			continue
+		}
+
+		if i == importpath {
+			local = append(local, []string{})
+		} else if strings.HasPrefix(i, importpath+"/") {
+			local = append(local, strings.Split(strings.TrimPrefix(i, importpath+"/"), "/"))
+		} else {
+			// The import positions is a map keyed on the import name.
+			// The value is a list, presumably because an import can appear
+			// multiple times in a package. Let’s just take the first one,
+			// should be enough for a good error message.
+			firstPos := p.ImportPos[i][0].String()
+			foreign = append(foreign, foreignDep{Path: i, Position: firstPos})
+		}
+	}
+
+	prefix := strings.TrimPrefix(source, root+"/")
+
+	locator := []string{}
+	if len(prefix) != len(source) {
+		locator = strings.Split(prefix, "/")
+	} else {
+		// Otherwise, the locator is empty since its the root package and
+		// no prefix should be added to files.
+		prefix = ""
+	}
+
+	files := []string{}
+	for _, f := range p.GoFiles {
+		files = append(files, path.Join(prefix, f))
+	}
+
+	sfiles := []string{}
+	for _, f := range p.SFiles {
+		sfiles = append(sfiles, path.Join(prefix, f))
+	}
+
+	return pkg{
+		Name:        path.Join(importpath, prefix),
+		Locator:     locator,
+		Files:       files,
+		SFiles:      sfiles,
+		LocalDeps:   local,
+		ForeignDeps: foreign,
+		IsCommand:   p.IsCommand(),
+	}, nil
+}
+
+func loadStdlibPkgs(from string) (pkgs map[string]bool, err error) {
+	f, err := ioutil.ReadFile(from)
+	if err != nil {
+		return
+	}
+
+	err = json.Unmarshal(f, &pkgs)
+	return
+}
+
+func main() {
+	source := flag.String("source", "", "path to directory with sources to process")
+	path := flag.String("path", "", "import path for the package")
+
+	flag.Parse()
+
+	if *source == "" {
+		log.Fatalf("-source flag must be specified")
+	}
+
+	stdlibPkgs, err := loadStdlibPkgs(stdlibList)
+	if err != nil {
+		log.Fatalf("failed to load standard library index from %q: %s\n", stdlibList, err)
+	}
+
+	goDirs, err := findGoDirs(*source)
+	if err != nil {
+		log.Fatalf("failed to walk source directory '%s': %s", *source, err)
+	}
+
+	all := []pkg{}
+	for _, d := range goDirs {
+		analysed, err := analysePackage(*source, d, *path, stdlibPkgs)
+
+		// If the Go source analysis returned "no buildable Go files",
+		// that directory should be skipped.
+		//
+		// This might be due to `+build` flags on the platform and other
+		// reasons (such as test files).
+		if _, ok := err.(*build.NoGoError); ok {
+			continue
+		}
+
+		if err != nil {
+			log.Fatalf("failed to analyse package at %q: %s", d, err)
+		}
+		all = append(all, analysed)
+	}
+
+	j, _ := json.Marshal(all)
+	fmt.Println(string(j))
+}
diff --git a/nix/buildGo/proto.nix b/nix/buildGo/proto.nix
new file mode 100644
index 000000000000..6c37f758ced7
--- /dev/null
+++ b/nix/buildGo/proto.nix
@@ -0,0 +1,87 @@
+# Copyright 2019 Google LLC.
+# SPDX-License-Identifier: Apache-2.0
+#
+# This file provides derivations for the dependencies of a gRPC
+# service in Go.
+
+{ external }:
+
+let
+  inherit (builtins) fetchGit map;
+in
+rec {
+  goProto = external {
+    path = "github.com/golang/protobuf";
+    src = fetchGit {
+      url = "https://github.com/golang/protobuf";
+      rev = "ed6926b37a637426117ccab59282c3839528a700";
+    };
+  };
+
+  xnet = external {
+    path = "golang.org/x/net";
+
+    src = fetchGit {
+      url = "https://go.googlesource.com/net";
+      rev = "ffdde105785063a81acd95bdf89ea53f6e0aac2d";
+    };
+
+    deps = [
+      xtext.secure.bidirule
+      xtext.unicode.bidi
+      xtext.unicode.norm
+    ];
+  };
+
+  xsys = external {
+    path = "golang.org/x/sys";
+    src = fetchGit {
+      url = "https://go.googlesource.com/sys";
+      rev = "bd437916bb0eb726b873ee8e9b2dcf212d32e2fd";
+    };
+  };
+
+  xtext = external {
+    path = "golang.org/x/text";
+    src = fetchGit {
+      url = "https://go.googlesource.com/text";
+      rev = "cbf43d21aaebfdfeb81d91a5f444d13a3046e686";
+    };
+  };
+
+  genproto = external {
+    path = "google.golang.org/genproto";
+    src = fetchGit {
+      url = "https://github.com/google/go-genproto";
+      # necessary because https://github.com/NixOS/nix/issues/1923
+      ref = "main";
+      rev = "83cc0476cb11ea0da33dacd4c6354ab192de6fe6";
+    };
+
+    deps = with goProto; [
+      proto
+      ptypes.any
+    ];
+  };
+
+  goGrpc = external {
+    path = "google.golang.org/grpc";
+    deps = ([
+      xnet.trace
+      xnet.http2
+      xsys.unix
+      xnet.http2.hpack
+      genproto.googleapis.rpc.status
+    ] ++ (with goProto; [
+      proto
+      ptypes
+      ptypes.duration
+      ptypes.timestamp
+    ]));
+
+    src = fetchGit {
+      url = "https://github.com/grpc/grpc-go";
+      rev = "d8e3da36ac481ef00e510ca119f6b68177713689";
+    };
+  };
+}
diff --git a/nix/buildLisp/README.md b/nix/buildLisp/README.md
new file mode 100644
index 000000000000..0d1e46983422
--- /dev/null
+++ b/nix/buildLisp/README.md
@@ -0,0 +1,254 @@
+buildLisp.nix
+=============
+
+This is a build system for Common Lisp, written in Nix.
+
+It aims to offer an alternative to ASDF for users who live in a
+Nix-based ecosystem. This offers several advantages over ASDF:
+
+* Simpler (almost logic-less) package definitions
+* Easy linking of native dependencies (from Nix)
+* Composability with Nix tooling for other languages
+* Effective, per-system caching strategies
+* Easy overriding of dependencies and whatnot
+* Convenient support for multiple Common Lisp implementations
+* ... and more!
+
+The project is still in its early stages and some important
+restrictions should be highlighted:
+
+* Extending `buildLisp` with support for a custom implementation
+  currently requires some knowledge of internals and may not be
+  considered stable yet.
+* Parallel compilation is not possible: Since buildLisp doesn't encode
+  dependencies between components (i. e. source files) like ASDF,
+  it must compile source files in sequence to avoid errors due to
+  undefined symbols.
+
+## Usage
+
+`buildLisp` exposes four different functions:
+
+* `buildLisp.library`: Builds a collection of Lisp files into a library.
+
+  | parameter | type         | use                           | required? |
+  |-----------|--------------|-------------------------------|-----------|
+  | `name`    | `string`     | Name of the library           | yes       |
+  | `srcs`    | `list<path>` | List of paths to source files | yes       |
+  | `deps`    | `list<drv>`  | List of dependencies          | no        |
+  | `native`  | `list<drv>`  | List of native dependencies   | no        |
+  | `test`    | see "Tests"  | Specification for test suite  | no        |
+  | `implementation` | see "Implementations" | Common Lisp implementation to use | no |
+
+  The output of invoking this is a directory containing a FASL file
+  that is the concatenated result of all compiled sources.
+
+* `buildLisp.program`: Builds an executable program out of Lisp files.
+
+  | parameter | type         | use                           | required? |
+  |-----------|--------------|-------------------------------|-----------|
+  | `name`    | `string`     | Name of the program           | yes       |
+  | `srcs`    | `list<path>` | List of paths to source files | yes       |
+  | `deps`    | `list<drv>`  | List of dependencies          | no        |
+  | `native`  | `list<drv>`  | List of native dependencies   | no        |
+  | `main`    | `string`     | Entrypoint function           | no        |
+  | `test`    | see "Tests"  | Specification for test suite  | no        |
+  | `implementation` | see "Implementations" | Common Lisp implementation to use | no |
+
+  The `main` parameter should be the name of a function and defaults
+  to `${name}:main` (i.e. the *exported* `main` function of the
+  package named after the program).
+
+  The output of invoking this is a directory containing a
+  `bin/${name}`.
+
+* `buildLisp.bundled`: Creates a virtual dependency on a built-in library.
+
+  Certain libraries ship with Lisp implementations, for example
+  UIOP/ASDF are commonly included but many implementations also ship
+  internals (such as SBCLs various `sb-*` libraries).
+
+  This function takes a single string argument that is the name of a
+  built-in library and returns a "package" that simply requires this
+  library.
+
+## Tests
+
+Both `buildLisp.library` and `buildLisp.program` take an optional argument
+`tests`, which has the following supported fields:
+
+  | parameter    | type         | use                           | required? |
+  |--------------|--------------|-------------------------------|-----------|
+  | `name`       | `string`     | Name of the test suite        | no        |
+  | `expression` | `string`     | Lisp expression to run tests  | yes       |
+  | `srcs`       | `list<path>` | List of paths to source files | no        |
+  | `native`     | `list<drv>`  | List of native dependencies   | no        |
+
+the `expression` parameter should be a Lisp expression and will be evaluated
+after loading all sources and dependencies (including library/program
+dependencies). It must return a non-`NIL` value if the test suite has passed.
+
+## Example
+
+Using buildLisp could look like this:
+
+```nix
+{ buildLisp, lispPkgs }:
+
+let libExample = buildLisp.library {
+    name = "lib-example";
+    srcs = [ ./lib.lisp ];
+
+    deps = with lispPkgs; [
+      (buildLisp.bundled "sb-posix")
+      iterate
+      cl-ppcre
+    ];
+};
+in buildLisp.program {
+    name = "example";
+    deps = [ libExample ];
+    srcs = [ ./main.lisp ];
+    tests = {
+      deps = [ lispPkgs.fiveam ];
+      srcs = [ ./tests.lisp ];
+      expression = "(fiveam:run!)";
+    };
+}
+```
+
+## Development REPLs
+
+`buildLisp` builds loadable variants of both `program` and `library` derivations
+(usually FASL files). Therefore it can provide a convenient way to obtain an
+instance of any implementation preloaded with `buildLisp`-derivations. This
+is especially useful to use as a host for Sly or SLIME.
+
+* `buildLisp.sbcl.lispWith`, `buildLisp.ccl.lispWith`, ...:
+  Creates a wrapper script preloading a Lisp implementation with various dependencies.
+
+  This function takes a single argument which is a list of Lisp
+  libraries programs or programs. The desired Lisp implementation
+  will load all given derivations and all their dependencies on
+  startup.
+
+  The shortcut `buildLisp.sbclWith` for `buildLisp.sbcl.lispWith` is also provided.
+
+* `repl` passthru attribute: `derivation.repl` is provided as a shortcut
+  for `buildLisp.${implementationName}.lispWith [ derivation ]`.
+  `derivation.ccl.repl`, `derivation.sbcl.repl` etc. work as well, of course
+  (see also "Implementations" section).
+
+## Implementations
+
+Both `buildLisp.library` and `buildLisp.program` allow specifying a different
+Common Lisp implementation than the default one (which is SBCL). When an
+implementation is passed, `buildLisp` makes sure all dependencies are built
+with that implementation as well since build artifacts from different
+implementation will be incompatible with each other.
+
+The argument taken by `implementation` is a special attribute set which
+describes how to do certain tasks for a given implementation, like building
+or loading a library. In case you want to use a custom implementation
+description, the precise structure needed is documented in `buildLisp`'s
+source code for now. `buildLisp` also exposes the following already
+working implementation sets:
+
+* `buildLisp.sbcl`: [SBCL][sbcl], our default implementation
+
+* `buildLisp.ccl`: [CCL][ccl], similar to SBCL, but with very good macOS support
+
+* `buildLisp.ecl`: [ECL][ecl] setup to produce statically linked binaries and
+  libraries. Note that its runtime library is LGPL, so [extra conditions][lgpl-static]
+  must be fulfilled when distributing binaries produced this way.
+
+* Support for ABCL is planned.
+
+For every of these “known” implementations, `buildLisp` will create a `passthru`
+attribute named like the implementation which points to a variant of the derivation
+built with said implementation. Say we have a derivation, `myDrv`, built using SBCL:
+While `myDrv` and `myDrv.sbcl` are built using SBCL, `myDrv.ecl`, `myDrv.ccl` etc.
+build the derivation and all its dependencies using ECL and CCL respectively.
+
+This is useful to test portability of your derivation, but is also used internally
+to speed up the “normalization” of the dependency graph. Thus it is important to
+make sure that your custom implementation's name doesn't clash with one of the
+“known” ones.
+
+## Handling Implementation Specifics
+
+When targeting multiple Common Lisp implementation, it is often necessary to
+handle differing interfaces for OS interaction or to make use of special
+implementation features. For this reason, `buildLisp` allows specifying
+dependencies and source files for specific implementations only. This can
+be utilized by having an attribute set in the list for the `deps` or `srcs`
+argument: `buildLisp` will pick the value of the attribute named like the
+used implementation or `default` and ignore the set completely if both
+are missing.
+
+```nix
+{ buildLisp, lispPkgs }:
+
+buildLisp.library {
+  name = "mylib";
+
+  srcs = [
+    # These are included always of course
+    ./package.lisp
+    ./portable-lib.lisp
+
+    # Choose right impl-* file
+    {
+      sbcl = ./impl-sbcl.lisp;
+      ccl = ./impl-ccl.lisp;
+      ecl = ./impl-ecl.lisp;
+    }
+
+    # We can also use this to inject extra files
+    { ecl = ./extra-ecl-optimizations.lisp; }
+  ];
+
+  deps = [
+    # Use SBCL's special bundled package, flexi-streams otherwise
+    {
+      sbcl = buildLisp.bundled "sb-rotate-byte";
+      default = lispPkgs.flexi-streams;
+    }
+  ];
+}
+```
+
+Additionally a `brokenOn` parameter is accepted which takes a list of
+implementation names on which the derivation is not expected to work.
+This only influences `meta.ci.targets` which is read by depot's CI to
+check which variants (see "Implementations") of the derivation to
+build, so it may not be useful outside of depot.
+
+## Influencing the Lisp Runtime
+
+Lisp implementations which create an executable by dumping an image
+usually parse a few implementation-specific command line options on
+executable startup that influence runtime settings related to things
+like GC. `buildLisp` generates a wrapper which makes sure that this
+never interferes with the argument parsing implemented in the actual
+application, but sometimes it is useful to run an executable with
+special settings. To allow this, the content of `NIX_BUILDLISP_LISP_ARGS`
+is passed to the lisp implementation.
+
+For example, you can make the underlying SBCL print its version for
+any executable built with `buildLisp` (and SBCL) like this:
+
+```console
+$ env NIX_BUILDLISP_LISP_ARGS="--version" ./result/bin/🕰️
+SBCL 2.1.2.nixos
+```
+
+In practice you'd probably want to specify options like
+`--dynamic-space-size` or `--tls-limit` (try passing `--help` for a
+full list). Naturally, these options are completely different for
+different implementations.
+
+[sbcl]: http://www.sbcl.org/
+[ccl]: https://ccl.clozure.com/
+[ecl]: https://common-lisp.net/project/ecl/
+[lgpl-static]: https://www.gnu.org/licenses/gpl-faq.en.html#LGPLStaticVsDynamic
diff --git a/nix/buildLisp/default.nix b/nix/buildLisp/default.nix
new file mode 100644
index 000000000000..a8168334a77b
--- /dev/null
+++ b/nix/buildLisp/default.nix
@@ -0,0 +1,779 @@
+# buildLisp provides Nix functions to build Common Lisp packages,
+# targeting SBCL.
+#
+# buildLisp is designed to enforce conventions and do away with the
+# free-for-all of existing Lisp build systems.
+
+{ pkgs ? import <nixpkgs> { }, ... }:
+
+let
+  inherit (builtins) map elemAt match filter;
+  inherit (pkgs) lib runCommandNoCC makeWrapper writeText writeShellScriptBin sbcl ecl-static ccl;
+  inherit (pkgs.stdenv) targetPlatform;
+
+  #
+  # Internal helper definitions
+  #
+
+  defaultImplementation = impls.sbcl;
+
+  # Many Common Lisp implementations (like ECL and CCL) will occasionally drop
+  # you into an interactive debugger even when executing something as a script.
+  # In nix builds we don't want such a situation: Any error should make the
+  # script exit non-zero. Luckily the ANSI standard specifies *debugger-hook*
+  # which is invoked before the debugger letting us just do that.
+  disableDebugger = writeText "disable-debugger.lisp" ''
+    (setf *debugger-hook*
+          (lambda (error hook)
+            (declare (ignore hook))
+            (format *error-output* "~%Unhandled error: ~a~%" error)
+            #+ccl (quit 1)
+            #+ecl (ext:quit 1)))
+  '';
+
+  # Process a list of arbitrary values which also contains “implementation
+  # filter sets” which describe conditonal inclusion of elements depending
+  # on the CL implementation used. Elements are processed in the following
+  # manner:
+  #
+  # * Paths, strings, derivations are left as is
+  # * A non-derivation attribute set is processed like this:
+  #   1. If it has an attribute equal to impl.name, replace with its value.
+  #   2. Alternatively use the value of the "default" attribute.
+  #   3. In all other cases delete the element from the list.
+  #
+  # This can be used to express dependencies or source files which are specific
+  # to certain implementations:
+  #
+  #  srcs = [
+  #    # mixable with unconditional entries
+  #    ./package.lisp
+  #
+  #    # implementation specific source files
+  #    {
+  #      ccl = ./impl-ccl.lisp;
+  #      sbcl = ./impl-sbcl.lisp;
+  #      ecl = ./impl-ecl.lisp;
+  #    }
+  #  ];
+  #
+  #  deps = [
+  #    # this dependency is ignored if impl.name != "sbcl"
+  #    { sbcl = buildLisp.bundled "sb-posix"; }
+  #
+  #    # only special casing for a single implementation
+  #    {
+  #      sbcl = buildLisp.bundled "uiop";
+  #      default = buildLisp.bundled "asdf";
+  #    }
+  #  ];
+  implFilter = impl: xs:
+    let
+      isFilterSet = x: builtins.isAttrs x && !(lib.isDerivation x);
+    in
+    builtins.map
+      (
+        x: if isFilterSet x then x.${impl.name} or x.default else x
+      )
+      (builtins.filter
+        (
+          x: !(isFilterSet x) || x ? ${impl.name} || x ? default
+        )
+        xs);
+
+  # Generates lisp code which instructs the given lisp implementation to load
+  # all the given dependencies.
+  genLoadLispGeneric = impl: deps:
+    lib.concatStringsSep "\n"
+      (map (lib: "(load \"${lib}/${lib.lispName}.${impl.faslExt}\")")
+        (allDeps impl deps));
+
+  # 'genTestLispGeneric' generates a Lisp file that loads all sources and deps
+  # and executes expression for a given implementation description.
+  genTestLispGeneric = impl: { name, srcs, deps, expression }: writeText "${name}.lisp" ''
+    ;; Dependencies
+    ${impl.genLoadLisp deps}
+
+    ;; Sources
+    ${lib.concatStringsSep "\n" (map (src: "(load \"${src}\")") srcs)}
+
+    ;; Test expression
+    (unless ${expression}
+      (exit :code 1))
+  '';
+
+  # 'dependsOn' determines whether Lisp library 'b' depends on 'a'.
+  dependsOn = a: b: builtins.elem a b.lispDeps;
+
+  # 'allDeps' flattens the list of dependencies (and their
+  # dependencies) into one ordered list of unique deps which
+  # all use the given implementation.
+  allDeps = impl: deps:
+    let
+      # The override _should_ propagate itself recursively, as every derivation
+      # would only expose its actually used dependencies. Use implementation
+      # attribute created by withExtras if present, override in all other cases
+      # (mainly bundled).
+      deps' = builtins.map
+        (dep: dep."${impl.name}" or (dep.overrideLisp (_: {
+          implementation = impl;
+        })))
+        deps;
+    in
+    (lib.toposort dependsOn (lib.unique (
+      lib.flatten (deps' ++ (map (d: d.lispDeps) deps'))
+    ))).result;
+
+  # 'allNative' extracts all native dependencies of a dependency list
+  # to ensure that library load paths are set correctly during all
+  # compilations and program assembly.
+  allNative = native: deps: lib.unique (
+    lib.flatten (native ++ (map (d: d.lispNativeDeps) deps))
+  );
+
+  # Add an `overrideLisp` attribute to a function result that works
+  # similar to `overrideAttrs`, but is used specifically for the
+  # arguments passed to Lisp builders.
+  makeOverridable = f: orig: (f orig) // {
+    overrideLisp = new: makeOverridable f (orig // (new orig));
+  };
+
+  # This is a wrapper arround 'makeOverridable' which performs its
+  # function, but also adds a the following additional attributes to the
+  # resulting derivation, namely a repl attribute which builds a `lispWith`
+  # derivation for the current implementation and additional attributes for
+  # every all implementations. So `drv.sbcl` would build the derivation
+  # with SBCL regardless of what was specified in the initial arguments.
+  withExtras = f: args:
+    let
+      drv = (makeOverridable f) args;
+    in
+    lib.fix (self:
+      drv.overrideLisp
+        (old:
+          let
+            implementation = old.implementation or defaultImplementation;
+            brokenOn = old.brokenOn or [ ];
+            targets = lib.subtractLists (brokenOn ++ [ implementation.name ])
+              (builtins.attrNames impls);
+          in
+          {
+            passthru = (old.passthru or { }) // {
+              repl = implementation.lispWith [ self ];
+
+              # meta is done via passthru to minimize rebuilds caused by overriding
+              meta = (old.passthru.meta or { }) // {
+                ci = (old.passthru.meta.ci or { }) // {
+                  inherit targets;
+                };
+              };
+            } // builtins.listToAttrs (builtins.map
+              (impl: {
+                inherit (impl) name;
+                value = self.overrideLisp (_: {
+                  implementation = impl;
+                });
+              })
+              (builtins.attrValues impls));
+          }) // {
+        overrideLisp = new: withExtras f (args // new args);
+      });
+
+  # 'testSuite' builds a Common Lisp test suite that loads all of srcs and deps,
+  # and then executes expression to check its result
+  testSuite = { name, expression, srcs, deps ? [ ], native ? [ ], implementation }:
+    let
+      lispDeps = allDeps implementation (implFilter implementation deps);
+      lispNativeDeps = allNative native lispDeps;
+      filteredSrcs = implFilter implementation srcs;
+    in
+    runCommandNoCC name
+      {
+        LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
+        LANG = "C.UTF-8";
+      } ''
+      echo "Running test suite ${name}"
+
+      ${implementation.runScript} ${
+        implementation.genTestLisp {
+          inherit name expression;
+          srcs = filteredSrcs;
+          deps = lispDeps;
+        }
+      } | tee $out
+
+      echo "Test suite ${name} succeeded"
+    '';
+
+  # 'impls' is an attribute set of attribute sets which describe how to do common
+  # tasks when building for different Common Lisp implementations. Each
+  # implementation set has the following members:
+  #
+  # Required members:
+  #
+  # - runScript :: string
+  #   Describes how to invoke the implementation from the shell, so it runs a
+  #   lisp file as a script and exits.
+  # - faslExt :: string
+  #   File extension of the implementations loadable (FASL) files.
+  #   Implementations are free to generate native object files, but with the way
+  #   buildLisp works it is required that we can also 'load' libraries, so
+  #   (additionally) building a FASL or equivalent is required.
+  # - genLoadLisp :: [ dependency ] -> string
+  #   Returns lisp code to 'load' the given dependencies. 'genLoadLispGeneric'
+  #   should work for most dependencies.
+  # - genCompileLisp :: { name, srcs, deps } -> file
+  #   Builds a lisp file which instructs the implementation to build a library
+  #   from the given source files when executed. After running at least
+  #   the file "$out/${name}.${impls.${implementation}.faslExt}" should have
+  #   been created.
+  # - genDumpLisp :: { name, main, deps } -> file
+  #   Builds a lisp file which instructs the implementation to build an
+  #   executable which runs 'main' (and exits) where 'main' is available from
+  #   'deps'. The executable should be created as "$out/bin/${name}", usually
+  #   by dumping the lisp image with the replaced toplevel function replaced.
+  # - wrapProgram :: boolean
+  #   Whether to wrap the resulting binary / image with a wrapper script setting
+  #   `LD_LIBRARY_PATH`.
+  # - genTestLisp :: { name, srcs, deps, expression } -> file
+  #   Builds a lisp file which loads the given 'deps' and 'srcs' files and
+  #   then evaluates 'expression'. Depending on whether 'expression' returns
+  #   true or false, the script must exit with a zero or non-zero exit code.
+  #   'genTestLispGeneric' will work for most implementations.
+  # - lispWith :: [ dependency ] -> drv
+  #   Builds a script (or dumped image) which when executed loads (or has
+  #   loaded) all given dependencies. When built this should create an executable
+  #   at "$out/bin/${implementation}".
+  #
+  # Optional members:
+  #
+  # - bundled :: string -> library
+  #   Allows giving an implementation specific builder for a bundled library.
+  #   This function is used as a replacement for the internal defaultBundled
+  #   function and only needs to support one implementation. The returned derivation
+  #   must behave like one built by 'library' (in particular have the same files
+  #   available in "$out" and the same 'passthru' attributes), but may be built
+  #   completely differently.
+  impls = lib.mapAttrs (name: v: { inherit name; } // v) {
+    sbcl = {
+      runScript = "${sbcl}/bin/sbcl --script";
+      faslExt = "fasl";
+
+      # 'genLoadLisp' generates Lisp code that instructs SBCL to load all
+      # the provided Lisp libraries.
+      genLoadLisp = genLoadLispGeneric impls.sbcl;
+
+      # 'genCompileLisp' generates a Lisp file that instructs SBCL to
+      # compile the provided list of Lisp source files to "$out/${name}.fasl".
+      genCompileLisp = { name, srcs, deps }: writeText "sbcl-compile.lisp" ''
+        ;; This file compiles the specified sources into the Nix build
+        ;; directory, creating one FASL file for each source.
+        (require 'sb-posix)
+
+        ${impls.sbcl.genLoadLisp deps}
+
+        (defun nix-compile-lisp (srcfile)
+          (let ((outfile (make-pathname :type "fasl"
+                                        :directory (or (sb-posix:getenv "NIX_BUILD_TOP")
+                                                       (error "not running in a Nix build"))
+                                        :name (substitute #\- #\/ srcfile))))
+            (multiple-value-bind (out-truename _warnings-p failure-p)
+                (compile-file srcfile :output-file outfile)
+              (if failure-p (sb-posix:exit 1)
+                  (progn
+                    ;; For the case of multiple files belonging to the same
+                    ;; library being compiled, load them in order:
+                    (load out-truename)
+
+                    ;; Return pathname as a string for cat-ting it later
+                    (namestring out-truename))))))
+
+        (let ((*compile-verbose* t)
+              (catted-fasl (make-pathname :type "fasl"
+                                          :directory (or (sb-posix:getenv "out")
+                                                         (error "not running in a Nix build"))
+                                          :name "${name}")))
+
+          (with-open-file (file catted-fasl
+                                :direction :output
+                                :if-does-not-exist :create)
+
+            ;; SBCL's FASL files can just be bundled together using cat
+            (sb-ext:run-program "cat"
+             (mapcar #'nix-compile-lisp
+              ;; These forms were inserted by the Nix build:
+              '(${
+                lib.concatMapStringsSep "\n" (src: "\"${src}\"") srcs
+              }))
+             :output file :search t)))
+      '';
+
+      # 'genDumpLisp' generates a Lisp file that instructs SBCL to dump
+      # the currently loaded image as an executable to $out/bin/$name.
+      #
+      # TODO(tazjin): Compression is currently unsupported because the
+      # SBCL in nixpkgs is, by default, not compiled with zlib support.
+      genDumpLisp = { name, main, deps }: writeText "sbcl-dump.lisp" ''
+        (require 'sb-posix)
+
+        ${impls.sbcl.genLoadLisp deps}
+
+        (let* ((bindir (concatenate 'string (sb-posix:getenv "out") "/bin"))
+               (outpath (make-pathname :name "${name}"
+                                       :directory bindir)))
+
+          ;; Tell UIOP that argv[0] will refer to running image, not the lisp impl
+          (when (find-package :uiop)
+            (eval `(setq ,(find-symbol "*IMAGE-DUMPED-P*" :uiop) :executable)))
+
+          (save-lisp-and-die outpath
+                             :executable t
+                             :toplevel
+                             (lambda ()
+                               ;; Filter out everything prior to the `--` we
+                               ;; insert in the wrapper to prevent SBCL from
+                               ;; parsing arguments at startup
+                               (setf sb-ext:*posix-argv*
+                                     (delete "--" sb-ext:*posix-argv*
+                                             :test #'string= :count 1))
+                               (${main}))
+                             :purify t))
+      '';
+
+      wrapProgram = true;
+
+      genTestLisp = genTestLispGeneric impls.sbcl;
+
+      lispWith = deps:
+        let lispDeps = filter (d: !d.lispBinary) (allDeps impls.sbcl deps);
+        in writeShellScriptBin "sbcl" ''
+          export LD_LIBRARY_PATH="${lib.makeLibraryPath (allNative [] lispDeps)}"
+          export LANG="C.UTF-8"
+          exec ${sbcl}/bin/sbcl ${
+            lib.optionalString (deps != [])
+              "--load ${writeText "load.lisp" (impls.sbcl.genLoadLisp lispDeps)}"
+          } $@
+        '';
+    };
+    ecl = {
+      runScript = "${ecl-static}/bin/ecl --load ${disableDebugger} --shell";
+      faslExt = "fasc";
+      genLoadLisp = genLoadLispGeneric impls.ecl;
+      genCompileLisp = { name, srcs, deps }: writeText "ecl-compile.lisp" ''
+        ;; This seems to be required to bring make the 'c' package available
+        ;; early, otherwise ECL tends to fail with a read failure…
+        (ext:install-c-compiler)
+
+        ;; Load dependencies
+        ${impls.ecl.genLoadLisp deps}
+
+        (defun getenv-or-fail (var)
+          (or (ext:getenv var)
+              (error (format nil "Missing expected environment variable ~A" var))))
+
+        (defun nix-compile-file (srcfile &key native)
+          "Compile the given srcfile into a compilation unit in :out-dir using
+          a unique name based on srcfile as the filename which is returned after
+          compilation. If :native is true, create an native object file,
+          otherwise a byte-compile fasc file is built and immediately loaded."
+
+          (let* ((unique-name (substitute #\_ #\/ srcfile))
+                 (out-file (make-pathname :type (if native "o" "fasc")
+                                          :directory (getenv-or-fail "NIX_BUILD_TOP")
+                                          :name unique-name)))
+            (multiple-value-bind (out-truename _warnings-p failure-p)
+                (compile-file srcfile :system-p native
+                                      :load (not native)
+                                      :output-file out-file
+                                      :verbose t :print t)
+              (if failure-p (ext:quit 1) out-truename))))
+
+        (let* ((out-dir (getenv-or-fail "out"))
+               (nix-build-dir (getenv-or-fail "NIX_BUILD_TOP"))
+               (srcs
+                ;; These forms are inserted by the Nix build
+                '(${lib.concatMapStringsSep "\n" (src: "\"${src}\"") srcs})))
+
+          ;; First, we'll byte compile loadable FASL files and load them
+          ;; immediately. Since we are using a statically linked ECL, there's
+          ;; no way to load native objects, so we rely on byte compilation
+          ;; for all our loading — which is crucial in compilation of course.
+          (ext:install-bytecodes-compiler)
+
+          ;; ECL's bytecode FASLs can just be concatenated to create a bundle
+          ;; at least since a recent bugfix which we apply as a patch.
+          ;; See also: https://gitlab.com/embeddable-common-lisp/ecl/-/issues/649
+          (let ((bundle-out (make-pathname :type "fasc" :name "${name}"
+                                           :directory out-dir)))
+
+            (with-open-file (fasc-stream bundle-out :direction :output)
+              (ext:run-program "cat"
+                               (mapcar (lambda (f)
+                                         (namestring
+                                          (nix-compile-file f :native nil)))
+                                       srcs)
+                               :output fasc-stream)))
+
+          (ext:install-c-compiler)
+
+          ;; Build a (natively compiled) static archive (.a) file. We want to
+          ;; use this for (statically) linking an executable later. The bytecode
+          ;; dance is only required because we can't load such archives.
+          (c:build-static-library
+           (make-pathname :type "a" :name "${name}" :directory out-dir)
+           :lisp-files (mapcar (lambda (x)
+                                 (nix-compile-file x :native t))
+                               srcs)))
+      '';
+      genDumpLisp = { name, main, deps }: writeText "ecl-dump.lisp" ''
+        (defun getenv-or-fail (var)
+          (or (ext:getenv var)
+              (error (format nil "Missing expected environment variable ~A" var))))
+
+        ${impls.ecl.genLoadLisp deps}
+
+        ;; makes a 'c' package available that can link executables
+        (ext:install-c-compiler)
+
+        (c:build-program
+         (merge-pathnames (make-pathname :directory '(:relative "bin")
+                                         :name "${name}")
+                          (truename (getenv-or-fail "out")))
+         :epilogue-code `(progn
+                          ;; UIOP doesn't understand ECL, so we need to make it
+                          ;; aware that we are a proper executable, causing it
+                          ;; to handle argument parsing and such properly. Since
+                          ;; this needs to work even when we're not using UIOP,
+                          ;; we need to do some compile-time acrobatics.
+                          ,(when (find-package :uiop)
+                            `(setf ,(find-symbol "*IMAGE-DUMPED-P*" :uiop) :executable))
+                          ;; Run the actual application…
+                          (${main})
+                          ;; … and exit.
+                          (ext:quit))
+         ;; ECL can't remember these from its own build…
+         :ld-flags '("-static")
+         :lisp-files
+         ;; The following forms are inserted by the Nix build
+         '(${
+             lib.concatMapStrings (dep: ''
+               "${dep}/${dep.lispName}.a"
+             '') (allDeps impls.ecl deps)
+           }))
+      '';
+
+      wrapProgram = false;
+
+      genTestLisp = genTestLispGeneric impls.ecl;
+
+      lispWith = deps:
+        let lispDeps = filter (d: !d.lispBinary) (allDeps impls.ecl deps);
+        in writeShellScriptBin "ecl" ''
+          exec ${ecl-static}/bin/ecl ${
+            lib.optionalString (deps != [])
+              "--load ${writeText "load.lisp" (impls.ecl.genLoadLisp lispDeps)}"
+          } $@
+        '';
+
+      bundled = name: runCommandNoCC "${name}-cllib"
+        {
+          passthru = {
+            lispName = name;
+            lispNativeDeps = [ ];
+            lispDeps = [ ];
+            lispBinary = false;
+            repl = impls.ecl.lispWith [ (impls.ecl.bundled name) ];
+          };
+        } ''
+        mkdir -p "$out"
+        ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/${name}.${impls.ecl.faslExt}" -t "$out"
+        ln -s "${ecl-static}/lib/ecl-${ecl-static.version}/lib${name}.a" "$out/${name}.a"
+      '';
+    };
+    ccl = {
+      # Relatively bespoke wrapper script necessary to make CCL just™ execute
+      # a lisp file as a script.
+      runScript = pkgs.writers.writeBash "ccl" ''
+        # don't print intro message etc.
+        args=("--quiet")
+
+        # makes CCL crash on error instead of entering the debugger
+        args+=("--load" "${disableDebugger}")
+
+        # load files from command line in order
+        for f in "$@"; do
+          args+=("--load" "$f")
+        done
+
+        # Exit if everything was processed successfully
+        args+=("--eval" "(quit)")
+
+        exec ${ccl}/bin/ccl ''${args[@]}
+      '';
+
+      # See https://ccl.clozure.com/docs/ccl.html#building-definitions
+      faslExt =
+        /**/
+        if targetPlatform.isPowerPC && targetPlatform.is32bit then "pfsl"
+        else if targetPlatform.isPowerPC && targetPlatform.is64bit then "p64fsl"
+        else if targetPlatform.isx86_64 && targetPlatform.isLinux then "lx64fsl"
+        else if targetPlatform.isx86_32 && targetPlatform.isLinux then "lx32fsl"
+        else if targetPlatform.isAarch32 && targetPlatform.isLinux then "lafsl"
+        else if targetPlatform.isx86_32 && targetPlatform.isDarwin then "dx32fsl"
+        else if targetPlatform.isx86_64 && targetPlatform.isDarwin then "dx64fsl"
+        else if targetPlatform.isx86_64 && targetPlatform.isDarwin then "dx64fsl"
+        else if targetPlatform.isx86_32 && targetPlatform.isFreeBSD then "fx32fsl"
+        else if targetPlatform.isx86_64 && targetPlatform.isFreeBSD then "fx64fsl"
+        else if targetPlatform.isx86_32 && targetPlatform.isWindows then "wx32fsl"
+        else if targetPlatform.isx86_64 && targetPlatform.isWindows then "wx64fsl"
+        else builtins.throw "Don't know what FASLs are called for this platform: "
+          + pkgs.stdenv.targetPlatform.system;
+
+      genLoadLisp = genLoadLispGeneric impls.ccl;
+
+      genCompileLisp = { name, srcs, deps }: writeText "ccl-compile.lisp" ''
+        ${impls.ccl.genLoadLisp deps}
+
+        (defun getenv-or-fail (var)
+          (or (getenv var)
+              (error (format nil "Missing expected environment variable ~A" var))))
+
+        (defun nix-compile-file (srcfile)
+          "Trivial wrapper around COMPILE-FILE which causes CCL to exit if
+          compilation fails and LOADs the compiled file on success."
+          (let ((output (make-pathname :name (substitute #\_ #\/ srcfile)
+                                       :type "${impls.ccl.faslExt}"
+                                       :directory (getenv-or-fail "NIX_BUILD_TOP"))))
+            (multiple-value-bind (out-truename _warnings-p failure-p)
+                (compile-file srcfile :output-file output :print t :verbose t)
+                (declare (ignore _warnings-p))
+              (if failure-p (quit 1)
+                  (progn (load out-truename) out-truename)))))
+
+        (fasl-concatenate (make-pathname :name "${name}" :type "${impls.ccl.faslExt}"
+                                         :directory (getenv-or-fail "out"))
+                          (mapcar #'nix-compile-file
+                                  ;; These forms where inserted by the Nix build
+                                  '(${
+                                      lib.concatMapStrings (src: ''
+                                        "${src}"
+                                      '') srcs
+                                   })))
+      '';
+
+      genDumpLisp = { name, main, deps }: writeText "ccl-dump.lisp" ''
+        ${impls.ccl.genLoadLisp deps}
+
+        (let* ((out (or (getenv "out") (error "Not running in a Nix build")))
+               (bindir (concatenate 'string out "/bin/"))
+               (executable (make-pathname :directory bindir :name "${name}")))
+
+          ;; Tell UIOP that argv[0] will refer to running image, not the lisp impl
+          (when (find-package :uiop)
+            (eval `(setf ,(find-symbol "*IMAGE-DUMPED-P*" :uiop) :executable)))
+
+          (save-application executable
+                            :purify t
+                            :error-handler :quit
+                            :toplevel-function
+                            (lambda ()
+                              ;; Filter out everything prior to the `--` we
+                              ;; insert in the wrapper to prevent SBCL from
+                              ;; parsing arguments at startup
+                              (setf ccl:*command-line-argument-list*
+                                    (delete "--" ccl:*command-line-argument-list*
+                                                 :test #'string= :count 1))
+                              (${main}))
+                            :mode #o755
+                            ;; TODO(sterni): use :native t on macOS
+                            :prepend-kernel t))
+      '';
+
+      wrapProgram = true;
+
+      genTestLisp = genTestLispGeneric impls.ccl;
+
+      lispWith = deps:
+        let lispDeps = filter (d: !d.lispBinary) (allDeps impls.ccl deps);
+        in writeShellScriptBin "ccl" ''
+          export LD_LIBRARY_PATH="${lib.makeLibraryPath (allNative [] lispDeps)}"
+          exec ${ccl}/bin/ccl ${
+            lib.optionalString (deps != [])
+              "--load ${writeText "load.lisp" (impls.ccl.genLoadLisp lispDeps)}"
+          } "$@"
+        '';
+    };
+  };
+
+  #
+  # Public API functions
+  #
+
+  # 'library' builds a list of Common Lisp files into an implementation
+  # specific library format, usually a single FASL file, which can then be
+  # loaded and built into an executable via 'program'.
+  library =
+    { name
+    , implementation ? defaultImplementation
+    , brokenOn ? [ ] # TODO(sterni): make this a warning
+    , srcs
+    , deps ? [ ]
+    , native ? [ ]
+    , tests ? null
+    , passthru ? { }
+    }:
+    let
+      filteredDeps = implFilter implementation deps;
+      filteredSrcs = implFilter implementation srcs;
+      lispNativeDeps = (allNative native filteredDeps);
+      lispDeps = allDeps implementation filteredDeps;
+      testDrv =
+        if ! isNull tests
+        then
+          testSuite
+            {
+              name = tests.name or "${name}-test";
+              srcs = filteredSrcs ++ (tests.srcs or [ ]);
+              deps = filteredDeps ++ (tests.deps or [ ]);
+              expression = tests.expression;
+              inherit implementation;
+            }
+        else null;
+    in
+    lib.fix (self: runCommandNoCC "${name}-cllib"
+      {
+        LD_LIBRARY_PATH = lib.makeLibraryPath lispNativeDeps;
+        LANG = "C.UTF-8";
+        passthru = passthru // {
+          inherit lispNativeDeps lispDeps;
+          lispName = name;
+          lispBinary = false;
+          tests = testDrv;
+        };
+      } ''
+      ${if ! isNull testDrv
+        then "echo 'Test ${testDrv} succeeded'"
+        else "echo 'No tests run'"}
+
+      mkdir $out
+
+      ${implementation.runScript} ${
+        implementation.genCompileLisp {
+          srcs = filteredSrcs;
+          inherit name;
+          deps = lispDeps;
+        }
+      }
+    '');
+
+  # 'program' creates an executable, usually containing a dumped image of the
+  # specified sources and dependencies.
+  program =
+    { name
+    , implementation ? defaultImplementation
+    , brokenOn ? [ ] # TODO(sterni): make this a warning
+    , main ? "${name}:main"
+    , srcs
+    , deps ? [ ]
+    , native ? [ ]
+    , tests ? null
+    , passthru ? { }
+    }:
+    let
+      filteredSrcs = implFilter implementation srcs;
+      filteredDeps = implFilter implementation deps;
+      lispDeps = allDeps implementation filteredDeps;
+      libPath = lib.makeLibraryPath (allNative native lispDeps);
+      # overriding is used internally to propagate the implementation to use
+      selfLib = (makeOverridable library) {
+        inherit name native brokenOn;
+        deps = lispDeps;
+        srcs = filteredSrcs;
+      };
+      testDrv =
+        if ! isNull tests
+        then
+          testSuite
+            {
+              name = tests.name or "${name}-test";
+              srcs =
+                (
+                  # testSuite does run implFilter as well
+                  filteredSrcs ++ (tests.srcs or [ ])
+                );
+              deps = filteredDeps ++ (tests.deps or [ ]);
+              expression = tests.expression;
+              inherit implementation;
+            }
+        else null;
+    in
+    lib.fix (self: runCommandNoCC "${name}"
+      {
+        nativeBuildInputs = [ makeWrapper ];
+        LD_LIBRARY_PATH = libPath;
+        LANG = "C.UTF-8";
+        passthru = passthru // {
+          lispName = name;
+          lispDeps = [ selfLib ];
+          lispNativeDeps = native;
+          lispBinary = true;
+          tests = testDrv;
+        };
+      }
+      (''
+        ${if ! isNull testDrv
+          then "echo 'Test ${testDrv} succeeded'"
+          else ""}
+        mkdir -p $out/bin
+
+        ${implementation.runScript} ${
+          implementation.genDumpLisp {
+            inherit name main;
+            deps = ([ selfLib ] ++ lispDeps);
+          }
+        }
+      '' + lib.optionalString implementation.wrapProgram ''
+        wrapProgram $out/bin/${name} \
+          --prefix LD_LIBRARY_PATH : "${libPath}" \
+          --add-flags "\$NIX_BUILDLISP_LISP_ARGS --"
+      ''));
+
+  # 'bundled' creates a "library" which makes a built-in package available,
+  # such as any of SBCL's sb-* packages or ASDF. By default this is done
+  # by calling 'require', but implementations are free to provide their
+  # own specific bundled function.
+  bundled = name:
+    let
+      # TODO(sterni): allow overriding args to underlying 'library' (e. g. srcs)
+      defaultBundled = implementation: name: library {
+        inherit name implementation;
+        srcs = lib.singleton (builtins.toFile "${name}.lisp" "(require '${name})");
+      };
+
+      bundled' =
+        { implementation ? defaultImplementation
+        , name
+        }:
+        implementation.bundled or (defaultBundled implementation) name;
+
+    in
+    (makeOverridable bundled') {
+      inherit name;
+    };
+
+in
+{
+  library = withExtras library;
+  program = withExtras program;
+  inherit bundled;
+
+  # 'sbclWith' creates an image with the specified libraries /
+  # programs loaded in SBCL.
+  sbclWith = impls.sbcl.lispWith;
+
+  inherit (impls)
+    sbcl
+    ecl
+    ccl
+    ;
+}
diff --git a/nix/buildLisp/example/default.nix b/nix/buildLisp/example/default.nix
new file mode 100644
index 000000000000..6add2676f10c
--- /dev/null
+++ b/nix/buildLisp/example/default.nix
@@ -0,0 +1,33 @@
+{ depot, ... }:
+
+let
+  inherit (depot.nix) buildLisp;
+
+  # Example Lisp library.
+  #
+  # Currently the `name` attribute is only used for the derivation
+  # itself, it has no practical implications.
+  libExample = buildLisp.library {
+    name = "lib-example";
+    srcs = [
+      ./lib.lisp
+    ];
+  };
+
+  # Example Lisp program.
+  #
+  # This builds & writes an executable for a program using the library
+  # above to disk.
+  #
+  # By default, buildLisp.program expects the entry point to be
+  # `$name:main`. This can be overridden by configuring the `main`
+  # attribute.
+in
+buildLisp.program {
+  name = "example";
+  deps = [ libExample ];
+
+  srcs = [
+    ./main.lisp
+  ];
+}
diff --git a/nix/buildLisp/example/lib.lisp b/nix/buildLisp/example/lib.lisp
new file mode 100644
index 000000000000..e557de4ae5fd
--- /dev/null
+++ b/nix/buildLisp/example/lib.lisp
@@ -0,0 +1,6 @@
+(defpackage lib-example
+  (:use :cl)
+  (:export :who))
+(in-package :lib-example)
+
+(defun who () "edef")
diff --git a/nix/buildLisp/example/main.lisp b/nix/buildLisp/example/main.lisp
new file mode 100644
index 000000000000..a29390cf4dba
--- /dev/null
+++ b/nix/buildLisp/example/main.lisp
@@ -0,0 +1,7 @@
+(defpackage example
+  (:use :cl :lib-example)
+  (:export :main))
+(in-package :example)
+
+(defun main ()
+  (format t "i <3 ~A~%" (who)))
diff --git a/nix/buildLisp/tests/argv0.nix b/nix/buildLisp/tests/argv0.nix
new file mode 100644
index 000000000000..bc29337d06cd
--- /dev/null
+++ b/nix/buildLisp/tests/argv0.nix
@@ -0,0 +1,36 @@
+{ depot, pkgs, ... }:
+
+depot.nix.buildLisp.program {
+  name = "argv0-test";
+
+  srcs = [
+    (pkgs.writeText "argv0-test.lisp" ''
+      (defpackage :argv0-test (:use :common-lisp :uiop) (:export :main))
+      (in-package :argv0-test)
+
+      (defun main ()
+        (format t "~A~%" (uiop:argv0)))
+    '')
+  ];
+
+  deps = [
+    {
+      sbcl = depot.nix.buildLisp.bundled "uiop";
+      default = depot.nix.buildLisp.bundled "asdf";
+    }
+  ];
+
+  passthru.meta.ci = {
+    extraSteps.verify = {
+      label = "verify argv[0] output";
+      needsOutput = true;
+      command = pkgs.writeShellScript "check-argv0" ''
+        set -eux
+
+        for invocation in "$(pwd)/result/bin/argv0-test" "./result/bin/argv0-test"; do
+          test "$invocation" = "$("$invocation")"
+        done
+      '';
+    };
+  };
+}
diff --git a/nix/buildManPages/OWNERS b/nix/buildManPages/OWNERS
new file mode 100644
index 000000000000..f16dd105d761
--- /dev/null
+++ b/nix/buildManPages/OWNERS
@@ -0,0 +1,3 @@
+inherited: true
+owners:
+  - sterni
diff --git a/nix/buildManPages/default.nix b/nix/buildManPages/default.nix
new file mode 100644
index 000000000000..746ed25182b4
--- /dev/null
+++ b/nix/buildManPages/default.nix
@@ -0,0 +1,103 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  inherit (pkgs)
+    gzip
+    mandoc
+    coreutils
+    ;
+
+  inherit (depot.nix)
+    runExecline
+    getBins
+    ;
+
+  bins = getBins mandoc [ "mandoc" ]
+    // getBins gzip [ "gzip" ]
+    // getBins coreutils [ "mkdir" "ln" "cp" ]
+  ;
+
+  defaultGzip = true;
+
+  basename = gzip: { name, section, ... }:
+    "${name}.${toString section}${lib.optionalString gzip ".gz"}";
+
+  manDir = { section, ... }:
+    "\${out}/share/man/man${toString section}";
+
+  target = gzip: args:
+    "${manDir args}/${basename gzip args}";
+
+  buildManPage =
+    { requireLint ? false
+    , gzip ? defaultGzip
+    , ...
+    }:
+    { content
+    , ...
+    }@page:
+    let
+      source = builtins.toFile (basename false page) content;
+    in
+    runExecline (basename gzip page) { } ([
+      (if requireLint then "if" else "foreground")
+      [
+        bins.mandoc
+        "-mdoc"
+        "-T"
+        "lint"
+        source
+      ]
+      "importas"
+      "out"
+      "out"
+    ] ++ (if gzip then [
+      "redirfd"
+      "-w"
+      "1"
+      "$out"
+      bins.gzip
+      "-c"
+      source
+    ] else [
+      bins.cp
+      "--reflink=auto"
+      source
+      "$out"
+    ]));
+
+  buildManPages =
+    name:
+    { derivationArgs ? { }
+    , gzip ? defaultGzip
+    , ...
+    }@args:
+    pages:
+    runExecline "${name}-man-pages"
+      {
+        inherit derivationArgs;
+      }
+      ([
+        "importas"
+        "out"
+        "out"
+      ] ++ lib.concatMap
+        ({ name, section, content }@page: [
+          "if"
+          [ bins.mkdir "-p" (manDir page) ]
+          "if"
+          [
+            bins.ln
+            "-s"
+            (buildManPage args page)
+            (target gzip page)
+          ]
+        ])
+        pages);
+
+in
+{
+  __functor = _: buildManPages;
+
+  single = buildManPage;
+}
diff --git a/nix/buildkite/default.nix b/nix/buildkite/default.nix
new file mode 100644
index 000000000000..6a0e9d246dab
--- /dev/null
+++ b/nix/buildkite/default.nix
@@ -0,0 +1,335 @@
+# Logic for generating Buildkite pipelines from Nix build targets read
+# by //nix/readTree.
+#
+# It outputs a "YAML" (actually JSON) file which is evaluated and
+# submitted to Buildkite at the start of each build.
+#
+# The structure of the file that is being created is documented here:
+#   https://buildkite.com/docs/pipelines/defining-steps
+{ depot, pkgs, ... }:
+
+let
+  inherit (builtins)
+    attrValues
+    concatMap
+    concatStringsSep
+    filter
+    foldl'
+    getEnv
+    hasAttr
+    hashString
+    isNull
+    isString
+    length
+    listToAttrs
+    mapAttrs
+    partition
+    pathExists
+    toJSON
+    unsafeDiscardStringContext;
+
+  inherit (pkgs) lib runCommandNoCC writeText;
+  inherit (depot.nix.readTree) mkLabel;
+in
+rec {
+  # Creates a Nix expression that yields the target at the specified
+  # location in the repository.
+  #
+  # This makes a distinction between normal targets (which physically
+  # exist in the repository) and subtargets (which are "virtual"
+  # targets exposed by a physical one) to make it clear in the build
+  # output which is which.
+  mkBuildExpr = target:
+    let
+      descend = expr: attr: "builtins.getAttr \"${attr}\" (${expr})";
+      targetExpr = foldl' descend "import ./. {}" target.__readTree;
+      subtargetExpr = descend targetExpr target.__subtarget;
+    in
+    if target ? __subtarget then subtargetExpr else targetExpr;
+
+  # Determine whether to skip a target if it has not diverged from the
+  # HEAD branch.
+  shouldSkip = parentTargetMap: label: drvPath:
+    if (hasAttr label parentTargetMap) && parentTargetMap."${label}".drvPath == drvPath
+    then "Target has not changed."
+    else false;
+
+  # Create build command for a derivation target.
+  mkBuildCommand = target: drvPath: concatStringsSep " " [
+    # First try to realise the drvPath of the target so we don't evaluate twice.
+    # Nix has no concept of depending on a derivation file without depending on
+    # at least one of its `outPath`s, so we need to discard the string context
+    # if we don't want to build everything during pipeline construction.
+    "(nix-store --realise '${drvPath}' --add-root result --indirect && readlink result)"
+
+    # Since we don't gcroot the derivation files, they may be deleted by the
+    # garbage collector. In that case we can reevaluate and build the attribute
+    # using nix-build.
+    "|| (test ! -f '${drvPath}' && nix-build -E '${mkBuildExpr target}' --show-trace)"
+  ];
+
+  # Create a pipeline step from a single target.
+  mkStep = headBranch: parentTargetMap: target:
+    let
+      label = mkLabel target;
+      drvPath = unsafeDiscardStringContext target.drvPath;
+      shouldSkip' = shouldSkip parentTargetMap;
+    in
+    {
+      label = ":nix: " + label;
+      key = hashString "sha1" label;
+      skip = shouldSkip' label drvPath;
+      command = mkBuildCommand target drvPath;
+      env.READTREE_TARGET = label;
+
+      # Add a dependency on the initial static pipeline step which
+      # always runs. This allows build steps uploaded in batches to
+      # start running before all batches have been uploaded.
+      depends_on = ":init:";
+    };
+
+  # Helper function to inelegantly divide a list into chunks of at
+  # most n elements.
+  #
+  # This works by assigning each element a chunk ID based on its
+  # index, and then grouping all elements by their chunk ID.
+  chunksOf = n: list:
+    let
+      chunkId = idx: toString (idx / n + 1);
+      assigned = lib.imap1 (idx: value: { inherit value; chunk = chunkId idx; }) list;
+      unchunk = mapAttrs (_: elements: map (e: e.value) elements);
+    in
+    unchunk (lib.groupBy (e: e.chunk) assigned);
+
+  # Define a build pipeline chunk as a JSON file, using the pipeline
+  # format documented on
+  # https://buildkite.com/docs/pipelines/defining-steps.
+  makePipelineChunk = name: chunkId: chunk: rec {
+    filename = "${name}-chunk-${chunkId}.json";
+    path = writeText filename (toJSON {
+      steps = chunk;
+    });
+  };
+
+  # Split the pipeline into chunks of at most 256 steps at once, which
+  # are uploaded sequentially. This is because of a limitation in the
+  # Buildkite backend which struggles to process more than a specific
+  # number of chunks at once.
+  pipelineChunks = name: steps:
+    attrValues (mapAttrs (makePipelineChunk name) (chunksOf 192 steps));
+
+  # Create a pipeline structure for the given targets.
+  mkPipeline =
+    {
+      # HEAD branch of the repository on which release steps, GC
+      # anchoring and other "mainline only" steps should run.
+      headBranch
+    , # List of derivations as read by readTree (in most cases just the
+      # output of readTree.gather) that should be built in Buildkite.
+      #
+      # These are scheduled as the first build steps and run as fast as
+      # possible, in order, without any concurrency restrictions.
+      drvTargets
+    , # Derivation map of a parent commit. Only targets which no longer
+      # correspond to the content of this map will be built. Passing an
+      # empty map will always build all targets.
+      parentTargetMap ? { }
+    , # A list of plain Buildkite step structures to run alongside the
+      # build for all drvTargets, but before proceeding with any
+      # post-build actions such as status reporting.
+      #
+      # Can be used for things like code formatting checks.
+      additionalSteps ? [ ]
+    , # A list of plain Buildkite step structures to run after all
+      # previous steps succeeded.
+      #
+      # Can be used for status reporting steps and the like.
+      postBuildSteps ? [ ]
+    }:
+    let
+      # Convert a target into all of its build and post-build steps,
+      # treated separately as they need to be in different chunks.
+      targetToSteps = target:
+        let
+          step = mkStep headBranch parentTargetMap target;
+
+          # Same step, but with an override function applied. This is
+          # used in mkExtraStep if the extra step needs to modify the
+          # parent derivation somehow.
+          #
+          # Note that this will never affect the label.
+          overridable = f: mkStep headBranch parentTargetMap (f target);
+
+          # Split build/post-build steps
+          splitExtraSteps = partition ({ postStep, ... }: postStep)
+            (attrValues (mapAttrs
+              (name: value: {
+                inherit name value;
+                postStep = (value ? prompt) || (value.postBuild or false);
+              })
+              (target.meta.ci.extraSteps or { })));
+
+          mkExtraStep' = { name, value, ... }: mkExtraStep overridable name value;
+          extraBuildSteps = map mkExtraStep' splitExtraSteps.wrong; # 'wrong' -> no prompt
+          extraPostSteps = map mkExtraStep' splitExtraSteps.right; # 'right' -> has prompt
+        in
+        {
+          buildSteps = [ step ] ++ extraBuildSteps;
+          postSteps = extraPostSteps;
+        };
+
+      # Combine all target steps into separate build and post-build step lists.
+      steps = foldl'
+        (acc: t: {
+          buildSteps = acc.buildSteps ++ t.buildSteps;
+          postSteps = acc.postSteps ++ t.postSteps;
+        })
+        { buildSteps = [ ]; postSteps = [ ]; }
+        (map targetToSteps drvTargets);
+
+      buildSteps =
+        # Add build steps for each derivation target and their extra
+        # steps.
+        steps.buildSteps
+
+        # Add additional steps (if set).
+        ++ additionalSteps;
+
+      postSteps =
+        # Add post-build steps for each derivation target.
+        steps.postSteps
+
+        # Add any globally defined post-build steps.
+        ++ postBuildSteps;
+
+      buildChunks = pipelineChunks "build" buildSteps;
+      postBuildChunks = pipelineChunks "post" postSteps;
+      chunks = buildChunks ++ postBuildChunks;
+    in
+    runCommandNoCC "buildkite-pipeline" { } ''
+      mkdir $out
+      echo "Generated ${toString (length chunks)} pipeline chunks"
+      ${
+        lib.concatMapStringsSep "\n"
+          (chunk: "cp ${chunk.path} $out/${chunk.filename}") chunks
+      }
+    '';
+
+  # Create a drvmap structure for the given targets, containing the
+  # mapping of all target paths to their derivations. The mapping can
+  # be persisted for future use.
+  mkDrvmap = drvTargets: writeText "drvmap.json" (toJSON (listToAttrs (map
+    (target: {
+      name = mkLabel target;
+      value = {
+        drvPath = unsafeDiscardStringContext target.drvPath;
+
+        # Include the attrPath in the output to reconstruct the drv
+        # without parsing the human-readable label.
+        attrPath = target.__readTree ++ lib.optionals (target ? __subtarget) [
+          target.__subtarget
+        ];
+      };
+    })
+    drvTargets)));
+
+  # Implementation of extra step logic.
+  #
+  # Each target extra step is an attribute specified in
+  # `meta.ci.extraSteps`. Its attribute name will be used as the step
+  # name on Buildkite.
+  #
+  #   command (required): A command that will be run in the depot
+  #     checkout when this step is executed. Should be a derivation
+  #     resulting in a single executable file, e.g. through
+  #     pkgs.writeShellScript.
+  #
+  #   label (optional): Human-readable label for this step to display
+  #     in the Buildkite UI instead of the attribute name.
+  #
+  #   prompt (optional): Setting this blocks the step until confirmed
+  #     by a human. Should be a string which is displayed for
+  #     confirmation. These steps always run after the main build is
+  #     done and have no influence on CI status.
+  #
+  #   postBuild (optional): If set to true, this step will run after
+  #     all primary build steps (that is, after status has been reported
+  #     back to CI).
+  #
+  #   needsOutput (optional): If set to true, the parent derivation
+  #     will be built in the working directory before running the
+  #     command. Output will be available as 'result'.
+  #     TODO: Figure out multiple-output derivations.
+  #
+  #   parentOverride (optional): A function (drv -> drv) to override
+  #     the parent's target definition when preparing its output. Only
+  #     used in extra steps that use needsOutput.
+  #
+  #   branches (optional): Git references (branches, tags ... ) on
+  #     which this step should be allowed to run. List of strings.
+  #
+  #   alwaysRun (optional): If set to true, this step will always run,
+  #     even if its parent has not been rebuilt.
+  #
+  # Note that gated steps are independent of each other.
+
+  # Create a gated step in a step group, independent from any other
+  # steps.
+  mkGatedStep = { step, label, parent, prompt }: {
+    inherit (step) depends_on;
+    group = label;
+    skip = parent.skip or false;
+
+    steps = [
+      {
+        inherit (step) branches;
+        inherit prompt;
+        block = ":radio_button: Run ${label}? (from ${parent.env.READTREE_TARGET})";
+      }
+
+      # The explicit depends_on of the wrapped step must be removed,
+      # otherwise its dependency relationship with the gate step will
+      # break.
+      (builtins.removeAttrs step [ "depends_on" ])
+    ];
+  };
+
+  # Create the Buildkite configuration for an extra step, optionally
+  # wrapping it in a gate group.
+  mkExtraStep = overridableParent: key:
+    { command
+    , label ? key
+    , prompt ? false
+    , needsOutput ? false
+    , parentOverride ? (x: x)
+    , branches ? null
+    , alwaysRun ? false
+    , postBuild ? false
+    }@cfg:
+    let
+      parent = overridableParent parentOverride;
+      parentLabel = parent.env.READTREE_TARGET;
+
+      step = {
+        label = ":gear: ${label} (from ${parentLabel})";
+        skip = if alwaysRun then false else parent.skip or false;
+        depends_on = lib.optional (!alwaysRun && !needsOutput) parent.key;
+        branches = if branches != null then lib.concatStringsSep " " branches else null;
+
+        command = pkgs.writeShellScript "${key}-script" ''
+          set -ueo pipefail
+          ${lib.optionalString needsOutput "echo '~~~ Preparing build output of ${parentLabel}'"}
+          ${lib.optionalString needsOutput parent.command}
+          echo '+++ Running extra step command'
+          exec ${command}
+        '';
+      };
+    in
+    if (isString prompt)
+    then
+      mkGatedStep
+        {
+          inherit step label parent prompt;
+        }
+    else step;
+}
diff --git a/nix/buildkite/fetch-parent-targets.sh b/nix/buildkite/fetch-parent-targets.sh
new file mode 100755
index 000000000000..ca65646abdc5
--- /dev/null
+++ b/nix/buildkite/fetch-parent-targets.sh
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+set -ueo pipefail
+
+# Each Buildkite build stores the derivation target map as a pipeline
+# artifact. This script determines the most appropriate commit (the
+# fork point of the current chain from HEAD) and fetches the artifact.
+#
+# New builds can be based on HEAD before the pipeline for the last
+# commit has finished, in which case it is possible that the fork
+# point has no derivation map. To account for this, up to 3 commits
+# prior to HEAD are also queried to find a map.
+#
+# If no map is found, the failure mode is not critical: We simply
+# build all targets.
+
+: ${DRVMAP_PATH:=pipeline/drvmap.json}
+
+git fetch -v origin "${BUILDKITE_PIPELINE_DEFAULT_BRANCH}"
+
+FIRST=$(git merge-base FETCH_HEAD "${BUILDKITE_COMMIT}")
+SECOND=$(git rev-parse "$FIRST~1")
+THIRD=$(git rev-parse "$FIRST~2")
+
+function most_relevant_builds {
+    set -u
+    curl 'https://graphql.buildkite.com/v1' \
+         --silent \
+         -H "Authorization: Bearer $(cat /run/agenix/buildkite-graphql-token)" \
+         -d "{\"query\": \"query { pipeline(slug: \\\"$BUILDKITE_ORGANIZATION_SLUG/$BUILDKITE_PIPELINE_SLUG\\\") { builds(commit: [\\\"$FIRST\\\",\\\"$SECOND\\\",\\\"$THIRD\\\"]) { edges { node { uuid }}}}}\"}" | \
+         jq -r '.data.pipeline.builds.edges[] | .node.uuid'
+}
+
+mkdir -p tmp
+for build in $(most_relevant_builds); do
+    echo "Checking artifacts for build $build"
+    buildkite-agent artifact download --build "${build}" "${DRVMAP_PATH}" 'tmp/' || true
+
+    if [[ -f "tmp/${DRVMAP_PATH}" ]]; then
+        echo "Fetched target map from build ${build}"
+        mv "tmp/${DRVMAP_PATH}" tmp/parent-target-map.json
+        break
+    fi
+done
diff --git a/nix/drvSeqL/default.nix b/nix/drvSeqL/default.nix
new file mode 100644
index 000000000000..6437e1a043a5
--- /dev/null
+++ b/nix/drvSeqL/default.nix
@@ -0,0 +1,47 @@
+{ depot, lib, pkgs, ... }:
+
+let
+
+  inherit (depot.nix.yants)
+    defun
+    list
+    drv
+    ;
+
+  /* Realize drvDeps, then return drvOut if that succeds.
+   * This can be used to make drvOut depend on the
+   * build success of all drvDeps without making each drvDep
+   * a dependency of drvOut.
+   * => drvOut is not rebuilt if drvDep changes
+   */
+  drvSeqL = defun [ (list drv) drv drv ]
+    (drvDeps: drvOut:
+      let
+        drvOutOutputs = drvOut.outputs or [ "out" ];
+      in
+      pkgs.runCommandLocal drvOut.name
+        {
+          # we inherit all attributes in order to replicate
+          # the original derivation as much as possible
+          outputs = drvOutOutputs;
+          passthru = drvOut.drvAttrs;
+          # depend on drvDeps (by putting it in builder context)
+          inherit drvDeps;
+        }
+        # the outputs of the original derivation are replicated
+        # by creating a symlink to the old output path
+        (lib.concatMapStrings
+          (output: ''
+            target=${lib.escapeShellArg drvOut.${output}}
+            # if the target is already a symlink, follow it until it’s not;
+            # this is done to prevent too many dereferences
+            target=$(readlink -e "$target")
+            # link to the output
+            ln -s "$target" "${"$"}${output}"
+          '')
+          drvOutOutputs));
+
+in
+{
+  __functor = _: drvSeqL;
+}
diff --git a/nix/emptyDerivation/OWNERS b/nix/emptyDerivation/OWNERS
new file mode 100644
index 000000000000..a742d0d22bf6
--- /dev/null
+++ b/nix/emptyDerivation/OWNERS
@@ -0,0 +1,3 @@
+inherited: true
+owners:
+  - Profpatsch
diff --git a/nix/emptyDerivation/default.nix b/nix/emptyDerivation/default.nix
new file mode 100644
index 000000000000..8433984012c7
--- /dev/null
+++ b/nix/emptyDerivation/default.nix
@@ -0,0 +1,21 @@
+{ depot, pkgs, ... }:
+
+let
+  emptyDerivation = import ./emptyDerivation.nix {
+    inherit pkgs;
+    inherit (pkgs) stdenv;
+    inherit (depot.nix) getBins;
+  };
+
+  tests = import ./tests.nix {
+    inherit emptyDerivation;
+    inherit pkgs;
+    inherit (depot.nix) writeExecline getBins;
+    inherit (depot.nix.runTestsuite) runTestsuite it assertEq;
+  };
+
+in
+{
+  __functor = _: emptyDerivation;
+  inherit tests;
+}
diff --git a/nix/emptyDerivation/emptyDerivation.nix b/nix/emptyDerivation/emptyDerivation.nix
new file mode 100644
index 000000000000..772df9635214
--- /dev/null
+++ b/nix/emptyDerivation/emptyDerivation.nix
@@ -0,0 +1,38 @@
+{ stdenv, pkgs, getBins }:
+
+# The empty derivation. All it does is touch $out.
+# Basically the unit value for derivations.
+#
+# In addition to simple test situations which require
+# a derivation, we set __functor, so you can call it
+# as a function and pass an attrset. The set you pass
+# is `//`-merged with the attrset before calling derivation,
+# so you can use this to add more fields.
+
+let
+  bins = getBins pkgs.s6-portable-utils [ "s6-touch" ]
+    // getBins pkgs.execline [ "importas" "exec" ];
+
+  emptiness = {
+    name = "empty-derivation";
+
+    # TODO(Profpatsch): can we get system from tvl?
+    inherit (stdenv) system;
+
+    builder = bins.exec;
+    args = [
+      bins.importas
+      "out"
+      "out"
+      bins.s6-touch
+      "$out"
+    ];
+  };
+
+in
+(derivation emptiness) // {
+  # This allows us to call the empty derivation
+  # like a function and override fields/add new fields.
+  __functor = _: overrides:
+    derivation (emptiness // overrides);
+}
diff --git a/nix/emptyDerivation/tests.nix b/nix/emptyDerivation/tests.nix
new file mode 100644
index 000000000000..a73842882499
--- /dev/null
+++ b/nix/emptyDerivation/tests.nix
@@ -0,0 +1,40 @@
+{ emptyDerivation, getBins, pkgs, writeExecline, runTestsuite, it, assertEq }:
+
+let
+  bins = getBins pkgs.s6-portable-utils [ "s6-echo" ];
+
+  empty = it "is just an empty path" [
+    (assertEq "path empty"
+      (builtins.readFile emptyDerivation)
+      "")
+  ];
+
+  fooOut = emptyDerivation {
+    builder = writeExecline "foo-builder" { } [
+      "importas"
+      "out"
+      "out"
+      "redirfd"
+      "-w"
+      "1"
+      "$out"
+      bins.s6-echo
+      "-n"
+      "foo"
+    ];
+  };
+
+  overrideBuilder = it "can override the builder" [
+    (assertEq "output is foo"
+      (builtins.readFile fooOut)
+      "foo")
+    (assertEq "can add new drv variables"
+      (emptyDerivation { foo = "bar"; }).foo
+      "bar")
+  ];
+
+in
+runTestsuite "emptyDerivation" [
+  empty
+  overrideBuilder
+]
diff --git a/nix/escapeExecline/default.nix b/nix/escapeExecline/default.nix
new file mode 100644
index 000000000000..d2c39dd39894
--- /dev/null
+++ b/nix/escapeExecline/default.nix
@@ -0,0 +1,32 @@
+{ lib, ... }:
+let
+  # replaces " and \ to \" and \\ respectively and quote with "
+  # e.g.
+  #   a"b\c -> "a\"b\\c"
+  #   a\"bc -> "a\\\"bc"
+  escapeExeclineArg = arg:
+    ''"${builtins.replaceStrings [ ''"'' ''\'' ] [ ''\"'' ''\\'' ] (toString arg)}"'';
+
+  # Escapes an execline (list of execline strings) to be passed to execlineb
+  # Give it a nested list of strings. Nested lists are interpolated as execline
+  # blocks ({}).
+  # Everything is quoted correctly.
+  #
+  # Example:
+  #   escapeExecline [ "if" [ "somecommand" ] "true" ]
+  #   == ''"if" { "somecommand" } "true"''
+  escapeExecline = execlineList: lib.concatStringsSep " "
+    (
+      let
+        go = arg:
+          if builtins.isString arg then [ (escapeExeclineArg arg) ]
+          else if builtins.isPath arg then [ (escapeExeclineArg "${arg}") ]
+          else if lib.isDerivation arg then [ (escapeExeclineArg arg) ]
+          else if builtins.isList arg then [ "{" ] ++ builtins.concatMap go arg ++ [ "}" ]
+          else abort "escapeExecline can only hande nested lists of strings, was ${lib.generators.toPretty {} arg}";
+      in
+      builtins.concatMap go execlineList
+    );
+
+in
+escapeExecline
diff --git a/nix/getBins/default.nix b/nix/getBins/default.nix
new file mode 100644
index 000000000000..e354b176c885
--- /dev/null
+++ b/nix/getBins/default.nix
@@ -0,0 +1,51 @@
+{ lib, pkgs, depot, ... }:
+
+# Takes a derivation and a list of binary names
+# and returns an attribute set of `name -> path`.
+# The list can also contain renames in the form of
+# `{ use, as }`, which goes `as -> usePath`.
+#
+# It is usually used to construct an attrset `bins`
+# containing all the binaries required in a file,
+# similar to a simple import system.
+#
+# Example:
+#
+#   bins = getBins pkgs.hello [ "hello" ]
+#       // getBins pkgs.coreutils [ "printf" "ln" "echo" ]
+#       // getBins pkgs.execline
+#            [ { use = "if"; as = "execlineIf" } ]
+#       // getBins pkgs.s6-portable-utils
+#            [ { use = "s6-test"; as = "test" }
+#              { use = "s6-cat"; as = "cat" }
+#            ];
+#
+#   provides
+#     bins.{hello,printf,ln,echo,execlineIf,test,cat}
+#
+
+let
+  getBins = drv: xs:
+    let
+      f = x:
+        # TODO(Profpatsch): typecheck
+        let x' = if builtins.isString x then { use = x; as = x; } else x;
+        in {
+          name = x'.as;
+          value = "${lib.getBin drv}/bin/${x'.use}";
+        };
+    in
+    builtins.listToAttrs (builtins.map f xs);
+
+
+  tests = import ./tests.nix {
+    inherit getBins;
+    inherit (depot.nix) writeScriptBin;
+    inherit (depot.nix.runTestsuite) assertEq it runTestsuite;
+  };
+
+in
+{
+  __functor = _: getBins;
+  inherit tests;
+}
diff --git a/nix/getBins/tests.nix b/nix/getBins/tests.nix
new file mode 100644
index 000000000000..e0f5ab426364
--- /dev/null
+++ b/nix/getBins/tests.nix
@@ -0,0 +1,40 @@
+{ writeScriptBin, assertEq, it, runTestsuite, getBins }:
+
+let
+  drv = writeScriptBin "hello" "it’s me";
+  drv2 = writeScriptBin "goodbye" "tschau";
+
+  bins = getBins drv [
+    "hello"
+    { use = "hello"; as = "also-hello"; }
+  ]
+  // getBins drv2 [ "goodbye" ]
+  ;
+
+  simple = it "path is equal to the executable name" [
+    (assertEq "path"
+      bins.hello
+      "${drv}/bin/hello")
+    (assertEq "content"
+      (builtins.readFile bins.hello)
+      "it’s me")
+  ];
+
+  useAs = it "use/as can be used to rename attributes" [
+    (assertEq "path"
+      bins.also-hello
+      "${drv}/bin/hello")
+  ];
+
+  secondDrv = it "by merging attrsets you can build up bins" [
+    (assertEq "path"
+      bins.goodbye
+      "${drv2}/bin/goodbye")
+  ];
+
+in
+runTestsuite "getBins" [
+  simple
+  useAs
+  secondDrv
+]
diff --git a/nix/mergePatch/default.nix b/nix/mergePatch/default.nix
new file mode 100644
index 000000000000..d56106925a65
--- /dev/null
+++ b/nix/mergePatch/default.nix
@@ -0,0 +1,192 @@
+{ lib, depot, ... }:
+/*
+  JSON Merge-Patch for nix
+  Spec: https://tools.ietf.org/html/rfc7396
+
+  An algorithm for changing and removing fields in nested objects.
+
+  For example, given the following original document:
+
+  {
+  a = "b";
+  c = {
+      d = "e";
+      f = "g";
+  }
+  }
+
+  Changing the value of `a` and removing `f` can be achieved by merging the patch
+
+  {
+  a = "z";
+  c.f = null;
+  }
+
+  which results in
+
+  {
+  a = "z";
+  c = {
+      d = "e";
+  };
+  }
+
+  Pseudo-code:
+  define MergePatch(Target, Patch):
+      if Patch is an Object:
+        if Target is not an Object:
+          Target = {} # Ignore the contents and set it to an empty Object
+        for each Name/Value pair in Patch:
+          if Value is null:
+            if Name exists in Target:
+              remove the Name/Value pair from Target
+          else:
+            Target[Name] = MergePatch(Target[Name], Value)
+        return Target
+      else:
+        return Patch
+*/
+
+let
+  foldlAttrs = op: init: attrs:
+    lib.foldl' op init
+      (lib.mapAttrsToList lib.nameValuePair attrs);
+
+  mergePatch = target: patch:
+    if lib.isAttrs patch
+    then
+      let target' = if lib.isAttrs target then target else { };
+      in foldlAttrs
+        (acc: patchEl:
+          if patchEl.value == null
+          then removeAttrs acc [ patchEl.name ]
+          else acc // {
+            ${patchEl.name} =
+              mergePatch
+                (acc.${patchEl.name} or "unnused")
+                patchEl.value;
+          })
+        target'
+        patch
+    else patch;
+
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    it
+    assertEq
+    ;
+
+  tests =
+    let
+      # example target from the RFC
+      testTarget = {
+        a = "b";
+        c = {
+          d = "e";
+          f = "g";
+        };
+      };
+      # example patch from the RFC
+      testPatch = {
+        a = "z";
+        c.f = null;
+      };
+      emptyPatch = it "the empty patch returns the original target" [
+        (assertEq "id"
+          (mergePatch testTarget { })
+          testTarget)
+      ];
+      nonAttrs = it "one side is a non-attrset value" [
+        (assertEq "target is a value means the value is replaced by the patch"
+          (mergePatch 42 testPatch)
+          (mergePatch { } testPatch))
+        (assertEq "patch is a value means it replaces target alltogether"
+          (mergePatch testTarget 42)
+          42)
+      ];
+      rfcExamples = it "the examples from the RFC" [
+        (assertEq "a subset is deleted and overwritten"
+          (mergePatch testTarget testPatch)
+          {
+            a = "z";
+            c = {
+              d = "e";
+            };
+          })
+        (assertEq "a more complicated example from the example section"
+          (mergePatch
+            {
+              title = "Goodbye!";
+              author = {
+                givenName = "John";
+                familyName = "Doe";
+              };
+              tags = [ "example" "sample" ];
+              content = "This will be unchanged";
+            }
+            {
+              title = "Hello!";
+              phoneNumber = "+01-123-456-7890";
+              author.familyName = null;
+              tags = [ "example" ];
+            })
+          {
+            title = "Hello!";
+            phoneNumber = "+01-123-456-7890";
+            author = {
+              givenName = "John";
+            };
+            tags = [ "example" ];
+            content = "This will be unchanged";
+          })
+      ];
+
+      rfcTests =
+        let
+          r = index: target: patch: res:
+            (assertEq "test number ${toString index}"
+              (mergePatch target patch)
+              res);
+        in
+        it "the test suite from the RFC" [
+          (r 1 { "a" = "b"; } { "a" = "c"; } { "a" = "c"; })
+          (r 2 { "a" = "b"; } { "b" = "c"; } { "a" = "b"; "b" = "c"; })
+          (r 3 { "a" = "b"; } { "a" = null; } { })
+          (r 4 { "a" = "b"; "b" = "c"; }
+            { "a" = null; }
+            { "b" = "c"; })
+          (r 5 { "a" = [ "b" ]; } { "a" = "c"; } { "a" = "c"; })
+          (r 6 { "a" = "c"; } { "a" = [ "b" ]; } { "a" = [ "b" ]; })
+          (r 7 { "a" = { "b" = "c"; }; }
+            { "a" = { "b" = "d"; "c" = null; }; }
+            { "a" = { "b" = "d"; }; })
+          (r 8 { "a" = [{ "b" = "c"; }]; }
+            { "a" = [ 1 ]; }
+            { "a" = [ 1 ]; })
+          (r 9 [ "a" "b" ] [ "c" "d" ] [ "c" "d" ])
+          (r 10 { "a" = "b"; } [ "c" ] [ "c" ])
+          (r 11 { "a" = "foo"; } null null)
+          (r 12 { "a" = "foo"; } "bar" "bar")
+          (r 13 { "e" = null; } { "a" = 1; } { "e" = null; "a" = 1; })
+          (r 14 [ 1 2 ]
+            { "a" = "b"; "c" = null; }
+            { "a" = "b"; })
+          (r 15 { }
+            { "a" = { "bb" = { "ccc" = null; }; }; }
+            { "a" = { "bb" = { }; }; })
+        ];
+
+    in
+    runTestsuite "mergePatch" [
+      emptyPatch
+      nonAttrs
+      rfcExamples
+      rfcTests
+    ];
+
+in
+{
+  __functor = _: mergePatch;
+
+  inherit tests;
+}
diff --git a/nix/netstring/attrsToKeyValList.nix b/nix/netstring/attrsToKeyValList.nix
new file mode 100644
index 000000000000..c854b5695502
--- /dev/null
+++ b/nix/netstring/attrsToKeyValList.nix
@@ -0,0 +1,33 @@
+{ depot, lib, ... }:
+
+# Convert an attrset of strings to a list of key/value netstring pairs.
+# A good minimally viable json replacement if all you need is to iterate.
+# You can use e.g. `forstdin -Ed '' item` in execline to split the items
+# and then get the key and value via `multidefine -d '' $item { key value }`
+#
+# Example:
+#   { foo = "bar"; x = "abc"; }
+#   => "12:3:foo,3:bar,,10:1:x,3:abc,,"
+#
+# Example with runExecline:
+#   nix.runExecline "test" {
+#     stdin = nix.netstring.attrsToKeyValList {
+#       foo = "bar";
+#       x = "abc";
+#     };
+#   } [
+#     "forstdin" "-Ed" "" "item"
+#     "multidefine" "-d" "" "$item" [ "key" "value" ]
+#     "${pkgs.coreutils}/bin/echo" "\${key} -> \${value}"
+#   ]
+
+#   will print:
+#     foo -> bar
+#     x -> abc
+attrs:
+lib.concatStrings
+  (lib.mapAttrsToList
+    (k: v: depot.nix.netstring.fromString
+      (depot.nix.netstring.fromString k
+        + depot.nix.netstring.fromString v))
+    attrs)
diff --git a/nix/netstring/fromString.nix b/nix/netstring/fromString.nix
new file mode 100644
index 000000000000..dcd688a8b0b3
--- /dev/null
+++ b/nix/netstring/fromString.nix
@@ -0,0 +1,10 @@
+{ ... }:
+# convert any nix string into a netstring
+# (prefixed by its length) according to https://en.wikipedia.org/wiki/Netstring
+#
+# Examples:
+#   netstring.fromString "foo"
+#   => "3:foo,"
+#   netstring.fromString ""
+#   => "0:,"
+s: "${toString (builtins.stringLength s)}:${s},"
diff --git a/nix/nint/OWNERS b/nix/nint/OWNERS
new file mode 100644
index 000000000000..f16dd105d761
--- /dev/null
+++ b/nix/nint/OWNERS
@@ -0,0 +1,3 @@
+inherited: true
+owners:
+  - sterni
diff --git a/nix/nint/README.md b/nix/nint/README.md
new file mode 100644
index 000000000000..369a8276199a
--- /dev/null
+++ b/nix/nint/README.md
@@ -0,0 +1,93 @@
+# nint — Nix INTerpreter
+
+`nint` is a shebang compatible interpreter for nix. It is currently
+implemented as a fairly trivial wrapper around `nix-instantiate --eval`.
+It allows to run nix expressions as command line tools if they conform
+to the following calling convention:
+
+* Every nix script needs to evaluate to a function which takes an
+  attribute set as its single argument. Ideally a set pattern with
+  an ellipsis should be used. By default `nint` passes the following
+  arguments:
+
+  * `currentDir`: the current working directory as a nix path
+  * `argv`: a list of arguments to the invokation including the
+    program name at `builtins.head argv`.
+  * Extra arguments can be manually passed as described below.
+
+* The return value must either be
+
+  * A string which is rendered to `stdout`.
+
+  * An attribute set with the following optional attributes:
+
+    * `stdout`: A string that's rendered to `stdout`
+    * `stderr`: A string that's rendered to `stderr`
+    * `exit`: A number which is used as an exit code.
+      If missing, nint always exits with 0 (or equivalent).
+
+## Usage
+
+```
+nint [ --arg ARG VALUE … ] script.nix [ ARGS … ]
+```
+
+Instead of `--arg`, `--argstr` can also be used. They both work
+like the flags of the same name for `nix-instantiate` and may
+be specified any number of times as long as they are passed
+*before* the nix expression to run.
+
+Below is a shebang which also passes `depot` as an argument
+(note the usage of `env -S` to get around the shebang limitation
+to two arguments).
+
+```nix
+#!/usr/bin/env -S nint --arg depot /path/to/depot
+```
+
+## Limitations
+
+* No side effects except for writing to `stdout`.
+
+* Output is not streaming, i. e. even if the output is incrementally
+  calculated, nothing will be printed until the full output is available.
+  With plain nix strings we can't do better anyways.
+
+* Limited error handling for the script, no way to set the exit code etc.
+
+Some of these limitations may be possible to address in the future by using
+an alternative nix interpreter and a more elaborate calling convention.
+
+## Example
+
+Below is a (very simple) implementation of a `ls(1)`-like program in nix:
+
+```nix
+#!/usr/bin/env nint
+{ currentDir, argv, ... }:
+
+let
+  lib = import <nixpkgs/lib>;
+
+  dirs =
+    let
+      args = builtins.tail argv;
+    in
+      if args == []
+      then [ currentDir ]
+      else args;
+
+  makeAbsolute = p:
+    if builtins.isPath p
+    then p
+    else if builtins.match "^/.*" p != null
+    then p
+    else "${toString currentDir}/${p}";
+in
+
+  lib.concatStringsSep "\n"
+    (lib.flatten
+      (builtins.map
+        (d: (builtins.attrNames (builtins.readDir (makeAbsolute d))))
+        dirs)) + "\n"
+```
diff --git a/nix/nint/default.nix b/nix/nint/default.nix
new file mode 100644
index 000000000000..0087fc041603
--- /dev/null
+++ b/nix/nint/default.nix
@@ -0,0 +1,16 @@
+{ depot, pkgs, ... }:
+
+let
+  inherit (depot.nix.writers)
+    rustSimpleBin
+    ;
+in
+
+rustSimpleBin
+{
+  name = "nint";
+  dependencies = [
+    depot.third_party.rust-crates.serde_json
+  ];
+}
+  (builtins.readFile ./nint.rs)
diff --git a/nix/nint/nint.rs b/nix/nint/nint.rs
new file mode 100644
index 000000000000..abb0153c3ad2
--- /dev/null
+++ b/nix/nint/nint.rs
@@ -0,0 +1,149 @@
+extern crate serde_json;
+
+use serde_json::Value;
+use std::convert::TryFrom;
+use std::ffi::OsString;
+use std::io::{stderr, stdout, Error, ErrorKind, Write};
+use std::os::unix::ffi::{OsStrExt, OsStringExt};
+use std::process::Command;
+
+fn render_nix_string(s: &OsString) -> OsString {
+    let mut rendered = Vec::new();
+
+    rendered.extend(b"\"");
+
+    for b in s.as_os_str().as_bytes() {
+        match char::from(*b) {
+            '\"' => rendered.extend(b"\\\""),
+            '\\' => rendered.extend(b"\\\\"),
+            '$' => rendered.extend(b"\\$"),
+            _ => rendered.push(*b),
+        }
+    }
+
+    rendered.extend(b"\"");
+
+    OsString::from_vec(rendered)
+}
+
+fn render_nix_list(arr: &[OsString]) -> OsString {
+    let mut rendered = Vec::new();
+
+    rendered.extend(b"[ ");
+
+    for el in arr {
+        rendered.extend(render_nix_string(el).as_os_str().as_bytes());
+        rendered.extend(b" ");
+    }
+
+    rendered.extend(b"]");
+
+    OsString::from_vec(rendered)
+}
+
+/// Slightly overkill helper macro which takes a `Map<String, Value>` obtained
+/// from `Value::Object` and an output name (`stderr` or `stdout`) as an
+/// identifier. If a value exists for the given output in the object it gets
+/// written to the appropriate output.
+macro_rules! handle_set_output {
+    ($map_name:ident, $output_name:ident) => {
+        match $map_name.get(stringify!($output_name)) {
+            Some(Value::String(s)) => $output_name().write_all(s.as_bytes()),
+            Some(_) => Err(Error::new(
+                ErrorKind::Other,
+                format!("Attribute {} must be a string!", stringify!($output_name)),
+            )),
+            None => Ok(()),
+        }
+    };
+}
+
+fn main() -> std::io::Result<()> {
+    let mut nix_args = Vec::new();
+
+    let mut args = std::env::args_os().into_iter();
+    let mut in_args = true;
+
+    let mut argv: Vec<OsString> = Vec::new();
+
+    // skip argv[0]
+    args.next();
+
+    loop {
+        let arg = match args.next() {
+            Some(a) => a,
+            None => break,
+        };
+
+        if !arg.to_str().map(|s| s.starts_with("-")).unwrap_or(false) {
+            in_args = false;
+        }
+
+        if in_args {
+            match (arg.to_str()) {
+                Some("--arg") | Some("--argstr") => {
+                    nix_args.push(arg);
+                    nix_args.push(args.next().unwrap());
+                    nix_args.push(args.next().unwrap());
+                    Ok(())
+                }
+                _ => Err(Error::new(ErrorKind::Other, "unknown argument")),
+            }?
+        } else {
+            argv.push(arg);
+        }
+    }
+
+    if argv.len() < 1 {
+        Err(Error::new(ErrorKind::Other, "missing argv"))
+    } else {
+        let cd = std::env::current_dir()?.into_os_string();
+
+        nix_args.push(OsString::from("--arg"));
+        nix_args.push(OsString::from("currentDir"));
+        nix_args.push(cd);
+
+        nix_args.push(OsString::from("--arg"));
+        nix_args.push(OsString::from("argv"));
+        nix_args.push(render_nix_list(&argv[..]));
+
+        nix_args.push(OsString::from("--eval"));
+        nix_args.push(OsString::from("--strict"));
+        nix_args.push(OsString::from("--json"));
+
+        nix_args.push(argv[0].clone());
+
+        let run = Command::new("nix-instantiate").args(nix_args).output()?;
+
+        match serde_json::from_slice(&run.stdout[..]) {
+            Ok(Value::String(s)) => stdout().write_all(s.as_bytes()),
+            Ok(Value::Object(m)) => {
+                handle_set_output!(m, stdout)?;
+                handle_set_output!(m, stderr)?;
+
+                match m.get("exit") {
+                    Some(Value::Number(n)) => {
+                        let code = n.as_i64().and_then(|v| i32::try_from(v).ok());
+
+                        match code {
+                            Some(i) => std::process::exit(i),
+                            None => {
+                                Err(Error::new(ErrorKind::Other, "Attribute exit is not an i32"))
+                            }
+                        }
+                    }
+                    Some(_) => Err(Error::new(ErrorKind::Other, "exit must be a number")),
+                    None => Ok(()),
+                }
+            }
+            Ok(_) => Err(Error::new(
+                ErrorKind::Other,
+                "output must be a string or an object",
+            )),
+            _ => {
+                stderr().write_all(&run.stderr[..]);
+                Err(Error::new(ErrorKind::Other, "internal nix error"))
+            }
+        }
+    }
+}
diff --git a/nix/readTree/README.md b/nix/readTree/README.md
new file mode 100644
index 000000000000..f8bbe2255e5e
--- /dev/null
+++ b/nix/readTree/README.md
@@ -0,0 +1,92 @@
+readTree
+========
+
+This is a Nix program that builds up an attribute set tree for a large
+repository based on the filesystem layout.
+
+It is in fact the tool that lays out the attribute set of this repository.
+
+As an example, consider a root (`.`) of a repository and a layout such as:
+
+```
+.
+├── third_party
+│   ├── default.nix
+│   └── rustpkgs
+│       ├── aho-corasick.nix
+│       └── serde.nix
+└── tools
+    ├── cheddar
+    │   └── default.nix
+    └── roquefort.nix
+```
+
+When `readTree` is called on that tree, it will construct an attribute set with
+this shape:
+
+```nix
+{
+    tools = {
+        cheddar = ...;
+        roquefort = ...;
+    };
+
+    third_party = {
+        # the `default.nix` of this folder might have had arbitrary other
+        # attributes here, such as this:
+        favouriteColour = "orange";
+
+        rustpkgs = {
+            aho-corasick = ...;
+            serde = ...;
+        };
+    };
+}
+```
+
+Every imported Nix file that yields an attribute set will have a `__readTree =
+true;` attribute merged into it.
+
+## Traversal logic
+
+`readTree` will follow any subdirectories of a tree and import all Nix files,
+with some exceptions:
+
+* A folder can declare that its children are off-limit by containing a
+  `.skip-subtree` file. Since the content of the file is not checked, it can be
+  useful to leave a note for a human in the file.
+* If a folder contains a `default.nix` file, no *sibling* Nix files will be
+  imported - however children are traversed as normal.
+* If a folder contains a `default.nix` it is loaded and, if it evaluates to a
+  set, *merged* with the children. If it evaluates to anything else the children
+  are *not traversed*.
+* The `default.nix` of the top-level folder on which readTree is
+  called is **not** read to avoid infinite recursion (as, presumably,
+  this file is where readTree itself is called).
+
+Traversal is lazy, `readTree` will only build up the tree as requested. This
+currently has the downside that directories with no importable files end up in
+the tree as empty nodes (`{}`).
+
+## Import structure
+
+`readTree` is called with an argument set containing a few parameters:
+
+* `path`: Initial path at which to start the traversal.
+* `args`: Arguments to pass to all imports.
+* `filter`: (optional) A function to filter the argument set on each
+  import based on the location in the tree. This can be used to, for
+  example, implement a "visibility" system inside of a tree.
+* `scopedArgs`: (optional) An argument set that is passed to all
+  imported files via `builtins.scopedImport`. This will forcefully
+  override the given values in the import scope, use with care!
+
+The package headers in this repository follow the form `{ pkgs, ... }:` where
+`pkgs` is a fixed-point of the entire package tree (see the `default.nix` at the
+root of the depot).
+
+In theory `readTree` can pass arguments of different shapes, but I have found
+this to be a good solution for the most part.
+
+Note that `readTree` does not currently make functions overridable, though it is
+feasible that it could do that in the future.
diff --git a/nix/readTree/default.nix b/nix/readTree/default.nix
new file mode 100644
index 000000000000..ba3363d8d69c
--- /dev/null
+++ b/nix/readTree/default.nix
@@ -0,0 +1,284 @@
+# Copyright (c) 2019 Vincent Ambo
+# Copyright (c) 2020-2021 The TVL Authors
+# SPDX-License-Identifier: MIT
+#
+# Provides a function to automatically read a a filesystem structure
+# into a Nix attribute set.
+#
+# Called with an attribute set taking the following arguments:
+#
+#   path: Path to a directory from which to start reading the tree.
+#
+#   args: Argument set to pass to each imported file.
+#
+#   filter: Function to filter `args` based on the tree location. This should
+#           be a function of the form `args -> location -> args`, where the
+#           location is a list of strings representing the path components of
+#           the current readTree target. Optional.
+{ ... }:
+
+let
+  inherit (builtins)
+    attrNames
+    concatMap
+    concatStringsSep
+    elem
+    elemAt
+    filter
+    hasAttr
+    head
+    isAttrs
+    listToAttrs
+    map
+    match
+    readDir
+    substring;
+
+  argsWithPath = args: parts:
+    let meta.locatedAt = parts;
+    in meta // (if isAttrs args then args else args meta);
+
+  readDirVisible = path:
+    let
+      children = readDir path;
+      isVisible = f: f == ".skip-subtree" || (substring 0 1 f) != ".";
+      names = filter isVisible (attrNames children);
+    in
+    listToAttrs (map
+      (name: {
+        inherit name;
+        value = children.${name};
+      })
+      names);
+
+  # Create a mark containing the location of this attribute and
+  # a list of all child attribute names added by readTree.
+  marker = parts: children: {
+    __readTree = parts;
+    __readTreeChildren = builtins.attrNames children;
+  };
+
+  # Create a label from a target's tree location.
+  mkLabel = target:
+    let label = concatStringsSep "/" target.__readTree;
+    in if target ? __subtarget
+    then "${label}:${target.__subtarget}"
+    else label;
+
+  # Merge two attribute sets, but place attributes in `passthru` via
+  # `overrideAttrs` for derivation targets that support it.
+  merge = a: b:
+    if a ? overrideAttrs
+    then
+      a.overrideAttrs
+        (prev: {
+          passthru = (prev.passthru or { }) // b;
+        })
+    else a // b;
+
+  # Import a file and enforce our calling convention
+  importFile = args: scopedArgs: path: parts: filter:
+    let
+      importedFile =
+        if scopedArgs != { }
+        then builtins.scopedImport scopedArgs path
+        else import path;
+      pathType = builtins.typeOf importedFile;
+    in
+    if pathType != "lambda"
+    then builtins.throw "readTree: trying to import ${toString path}, but it’s a ${pathType}, you need to make it a function like { depot, pkgs, ... }"
+    else importedFile (filter parts (argsWithPath args parts));
+
+  nixFileName = file:
+    let res = match "(.*)\\.nix" file;
+    in if res == null then null else head res;
+
+  readTree = { args, initPath, rootDir, parts, argsFilter, scopedArgs }:
+    let
+      dir = readDirVisible initPath;
+      joinChild = c: initPath + ("/" + c);
+
+      self =
+        if rootDir
+        then { __readTree = [ ]; }
+        else importFile args scopedArgs initPath parts argsFilter;
+
+      # Import subdirectories of the current one, unless the special
+      # `.skip-subtree` file exists which makes readTree ignore the
+      # children.
+      #
+      # This file can optionally contain information on why the tree
+      # should be ignored, but its content is not inspected by
+      # readTree
+      filterDir = f: dir."${f}" == "directory";
+      children = if hasAttr ".skip-subtree" dir then [ ] else
+      map
+        (c: {
+          name = c;
+          value = readTree {
+            inherit argsFilter scopedArgs;
+            args = args;
+            initPath = (joinChild c);
+            rootDir = false;
+            parts = (parts ++ [ c ]);
+          };
+        })
+        (filter filterDir (attrNames dir));
+
+      # Import Nix files
+      nixFiles =
+        if hasAttr ".skip-subtree" dir then [ ]
+        else filter (f: f != null) (map nixFileName (attrNames dir));
+      nixChildren = map
+        (c:
+          let
+            p = joinChild (c + ".nix");
+            childParts = parts ++ [ c ];
+            imported = importFile args scopedArgs p childParts argsFilter;
+          in
+          {
+            name = c;
+            value =
+              if isAttrs imported
+              then merge imported (marker childParts { })
+              else imported;
+          })
+        nixFiles;
+
+      nodeValue = if dir ? "default.nix" then self else { };
+
+      allChildren = listToAttrs (
+        if dir ? "default.nix"
+        then children
+        else nixChildren ++ children
+      );
+
+    in
+    if isAttrs nodeValue
+    then merge nodeValue (allChildren // (marker parts allChildren))
+    else nodeValue;
+
+  # Helper function to fetch subtargets from a target. This is a
+  # temporary helper to warn on the use of the `meta.targets`
+  # attribute, which is deprecated in favour of `meta.ci.targets`.
+  subtargets = node:
+    let targets = (node.meta.targets or [ ]) ++ (node.meta.ci.targets or [ ]);
+    in if node ? meta.targets then
+      builtins.trace ''
+        Warning: The meta.targets attribute is deprecated.
+
+        Please move the subtargets of //${mkLabel node} to the
+        meta.ci.targets attribute.
+        
+      ''
+        targets else targets;
+
+  # Function which can be used to find all readTree targets within an
+  # attribute set.
+  #
+  # This function will gather physical targets, that is targets which
+  # correspond directly to a location in the repository, as well as
+  # subtargets (specified in the meta.ci.targets attribute of a node).
+  #
+  # This can be used to discover targets for inclusion in CI
+  # pipelines.
+  #
+  # Called with the arguments:
+  #
+  #   eligible: Function to determine whether the given derivation
+  #             should be included in the build.
+  gather = eligible: node:
+    if node ? __readTree then
+    # Include the node itself if it is eligible.
+      (if eligible node then [ node ] else [ ])
+      # Include eligible children of the node
+      ++ concatMap (gather eligible) (map (attr: node."${attr}") node.__readTreeChildren)
+      # Include specified sub-targets of the node
+      ++ filter eligible (map
+        (k: (node."${k}" or { }) // {
+          # Keep the same tree location, but explicitly mark this
+          # node as a subtarget.
+          __readTree = node.__readTree;
+          __readTreeChildren = [ ];
+          __subtarget = k;
+        })
+        (subtargets node))
+    else [ ];
+
+  # Determine whether a given value is a derivation.
+  # Copied from nixpkgs/lib for cases where lib is not available yet.
+  isDerivation = x: isAttrs x && x ? type && x.type == "derivation";
+in
+{
+  inherit gather mkLabel;
+
+  __functor = _:
+    { path
+    , args
+    , filter ? (_parts: x: x)
+    , scopedArgs ? { }
+    }:
+    readTree {
+      inherit args scopedArgs;
+      argsFilter = filter;
+      initPath = path;
+      rootDir = true;
+      parts = [ ];
+    };
+
+  # In addition to readTree itself, some functionality is exposed that
+  # is useful for users of readTree.
+
+  # Create a readTree filter disallowing access to the specified
+  # top-level folder in the repository, except for specific exceptions
+  # specified by their (full) paths.
+  #
+  # Called with the arguments:
+  #
+  #   folder: Name of the restricted top-level folder (e.g. 'experimental')
+  #
+  #   exceptions: List of readTree parts (e.g. [ [ "services" "some-app" ] ]),
+  #               which should be able to access the restricted folder.
+  #
+  #   reason: Textual explanation for the restriction (included in errors)
+  restrictFolder = { folder, exceptions ? [ ], reason }: parts: args:
+    if (elemAt parts 0) == folder || elem parts exceptions
+    then args
+    else args // {
+      depot = args.depot // {
+        "${folder}" = throw ''
+          Access to targets under //${folder} is not permitted from
+          other repository paths. Specific exceptions are configured
+          at the top-level.
+
+          ${reason}
+          At location: ${builtins.concatStringsSep "." parts}
+        '';
+      };
+    };
+
+  # This definition of fix is identical to <nixpkgs>.lib.fix, but is
+  # provided here for cases where readTree is used before nixpkgs can
+  # be imported.
+  #
+  # It is often required to create the args attribute set.
+  fix = f: let x = f x; in x;
+
+  # Takes an attribute set and adds a meta.ci.targets attribute to it
+  # which contains all direct children of the attribute set which are
+  # derivations.
+  #
+  # Type: attrs -> attrs
+  drvTargets = attrs:
+    attrs // {
+      # preserve .meta from original attrs
+      meta = (attrs.meta or { }) // {
+        # preserve .meta.ci (except .targets) from original attrs
+        ci = (attrs.meta.ci or { }) // {
+          targets = builtins.filter
+            (x: isDerivation attrs."${x}")
+            (builtins.attrNames attrs);
+        };
+      };
+    };
+}
diff --git a/nix/readTree/tests/.skip-subtree b/nix/readTree/tests/.skip-subtree
new file mode 100644
index 000000000000..46952a4ca635
--- /dev/null
+++ b/nix/readTree/tests/.skip-subtree
@@ -0,0 +1 @@
+These tests call their own readTree, so the toplevel one shouldn’t bother
diff --git a/nix/readTree/tests/default.nix b/nix/readTree/tests/default.nix
new file mode 100644
index 000000000000..fcca141714a8
--- /dev/null
+++ b/nix/readTree/tests/default.nix
@@ -0,0 +1,129 @@
+{ depot, lib, ... }:
+
+let
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    it
+    assertEq
+    assertThrows
+    ;
+
+  tree-ex = depot.nix.readTree {
+    path = ./test-example;
+    args = { };
+  };
+
+  example = it "corresponds to the README example" [
+    (assertEq "third_party attrset"
+      (lib.isAttrs tree-ex.third_party
+        && (! lib.isDerivation tree-ex.third_party))
+      true)
+    (assertEq "third_party attrset other attribute"
+      tree-ex.third_party.favouriteColour
+      "orange")
+    (assertEq "rustpkgs attrset aho-corasick"
+      tree-ex.third_party.rustpkgs.aho-corasick
+      "aho-corasick")
+    (assertEq "rustpkgs attrset serde"
+      tree-ex.third_party.rustpkgs.serde
+      "serde")
+    (assertEq "tools cheddear"
+      "cheddar"
+      tree-ex.tools.cheddar)
+    (assertEq "tools roquefort"
+      tree-ex.tools.roquefort
+      "roquefort")
+  ];
+
+  tree-tl = depot.nix.readTree {
+    path = ./test-tree-traversal;
+    args = { };
+  };
+
+  traversal-logic = it "corresponds to the traversal logic in the README" [
+    (assertEq "skip subtree default.nix is read"
+      tree-tl.skip-subtree.but
+      "the default.nix is still read")
+    (assertEq "skip subtree a/default.nix is skipped"
+      (tree-tl.skip-subtree ? a)
+      false)
+    (assertEq "skip subtree b/c.nix is skipped"
+      (tree-tl.skip-subtree ? b)
+      false)
+    (assertEq "skip subtree a/default.nix would be read without .skip-subtree"
+      (tree-tl.no-skip-subtree.a)
+      "am I subtree yet?")
+    (assertEq "skip subtree b/c.nix would be read without .skip-subtree"
+      (tree-tl.no-skip-subtree.b.c)
+      "cool")
+
+    (assertEq "default.nix attrset is merged with siblings"
+      tree-tl.default-nix.no
+      "siblings should be read")
+    (assertEq "default.nix means sibling isn’t read"
+      (tree-tl.default-nix ? sibling)
+      false)
+    (assertEq "default.nix means subdirs are still read and merged into default.nix"
+      (tree-tl.default-nix.subdir.a)
+      "but I’m picked up")
+
+    (assertEq "default.nix can be not an attrset"
+      tree-tl.default-nix.no-merge
+      "I’m not merged with any children")
+    (assertEq "default.nix is not an attrset -> children are not merged"
+      (tree-tl.default-nix.no-merge ? subdir)
+      false)
+
+    (assertEq "default.nix can contain a derivation"
+      (lib.isDerivation tree-tl.default-nix.can-be-drv)
+      true)
+    (assertEq "Even if default.nix is a derivation, children are traversed and merged"
+      tree-tl.default-nix.can-be-drv.subdir.a
+      "Picked up through the drv")
+    (assertEq "default.nix drv is not changed by readTree"
+      tree-tl.default-nix.can-be-drv
+      (import ./test-tree-traversal/default-nix/can-be-drv/default.nix { }))
+  ];
+
+  # these each call readTree themselves because the throws have to happen inside assertThrows
+  wrong = it "cannot read these files and will complain" [
+    (assertThrows "this file is not a function"
+      (depot.nix.readTree {
+        path = ./test-wrong-not-a-function;
+        args = { };
+      }).not-a-function)
+    # can’t test for that, assertThrows can’t catch this error
+    # (assertThrows "this file is a function but doesn’t have dots"
+    #   (depot.nix.readTree {} ./test-wrong-no-dots).no-dots-in-function)
+  ];
+
+  read-markers = depot.nix.readTree {
+    path = ./test-marker;
+    args = { };
+  };
+
+  assertMarkerByPath = path:
+    assertEq "${lib.concatStringsSep "." path} is marked correctly"
+      (lib.getAttrFromPath path read-markers).__readTree
+      path;
+
+  markers = it "marks nodes correctly" [
+    (assertMarkerByPath [ "directory-marked" ])
+    (assertMarkerByPath [ "directory-marked" "nested" ])
+    (assertMarkerByPath [ "file-children" "one" ])
+    (assertMarkerByPath [ "file-children" "two" ])
+    (assertEq "nix file children are marked correctly"
+      read-markers.file-children.__readTreeChildren [ "one" "two" ])
+    (assertEq "directory children are marked correctly"
+      read-markers.directory-marked.__readTreeChildren [ "nested" ])
+    (assertEq "absence of children is marked"
+      read-markers.directory-marked.nested.__readTreeChildren [ ])
+  ];
+
+in
+runTestsuite "readTree" [
+  example
+  traversal-logic
+  wrong
+  markers
+]
diff --git a/nix/readTree/tests/test-example/third_party/default.nix b/nix/readTree/tests/test-example/third_party/default.nix
new file mode 100644
index 000000000000..27d87242189d
--- /dev/null
+++ b/nix/readTree/tests/test-example/third_party/default.nix
@@ -0,0 +1,5 @@
+{ ... }:
+
+{
+  favouriteColour = "orange";
+}
diff --git a/nix/readTree/tests/test-example/third_party/rustpkgs/aho-corasick.nix b/nix/readTree/tests/test-example/third_party/rustpkgs/aho-corasick.nix
new file mode 100644
index 000000000000..f964fa583c43
--- /dev/null
+++ b/nix/readTree/tests/test-example/third_party/rustpkgs/aho-corasick.nix
@@ -0,0 +1 @@
+{ ... }: "aho-corasick"
diff --git a/nix/readTree/tests/test-example/third_party/rustpkgs/serde.nix b/nix/readTree/tests/test-example/third_party/rustpkgs/serde.nix
new file mode 100644
index 000000000000..54b3c0503f12
--- /dev/null
+++ b/nix/readTree/tests/test-example/third_party/rustpkgs/serde.nix
@@ -0,0 +1 @@
+{ ... }: "serde"
diff --git a/nix/readTree/tests/test-example/tools/cheddar/default.nix b/nix/readTree/tests/test-example/tools/cheddar/default.nix
new file mode 100644
index 000000000000..a1919442d86d
--- /dev/null
+++ b/nix/readTree/tests/test-example/tools/cheddar/default.nix
@@ -0,0 +1 @@
+{ ... }: "cheddar"
diff --git a/nix/readTree/tests/test-example/tools/roquefort.nix b/nix/readTree/tests/test-example/tools/roquefort.nix
new file mode 100644
index 000000000000..03c7492bb6b1
--- /dev/null
+++ b/nix/readTree/tests/test-example/tools/roquefort.nix
@@ -0,0 +1 @@
+{ ... }: "roquefort"
diff --git a/nix/readTree/tests/test-marker/directory-marked/default.nix b/nix/readTree/tests/test-marker/directory-marked/default.nix
new file mode 100644
index 000000000000..5bd3e36b5397
--- /dev/null
+++ b/nix/readTree/tests/test-marker/directory-marked/default.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+{ }
diff --git a/nix/readTree/tests/test-marker/directory-marked/nested/default.nix b/nix/readTree/tests/test-marker/directory-marked/nested/default.nix
new file mode 100644
index 000000000000..5bd3e36b5397
--- /dev/null
+++ b/nix/readTree/tests/test-marker/directory-marked/nested/default.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+{ }
diff --git a/nix/readTree/tests/test-marker/file-children/one.nix b/nix/readTree/tests/test-marker/file-children/one.nix
new file mode 100644
index 000000000000..5bd3e36b5397
--- /dev/null
+++ b/nix/readTree/tests/test-marker/file-children/one.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+{ }
diff --git a/nix/readTree/tests/test-marker/file-children/two.nix b/nix/readTree/tests/test-marker/file-children/two.nix
new file mode 100644
index 000000000000..5bd3e36b5397
--- /dev/null
+++ b/nix/readTree/tests/test-marker/file-children/two.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+{ }
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/default.nix b/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/default.nix
new file mode 100644
index 000000000000..95d13d3c2750
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/default.nix
@@ -0,0 +1,7 @@
+{ ... }:
+derivation {
+  name = "im-a-drv";
+  system = builtins.currentSystem;
+  builder = "/bin/sh";
+  args = [ "-c" ''echo "" > $out'' ];
+}
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/subdir/a.nix b/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/subdir/a.nix
new file mode 100644
index 000000000000..2ee2d648f042
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/can-be-drv/subdir/a.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"Picked up through the drv"
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/default.nix b/nix/readTree/tests/test-tree-traversal/default-nix/default.nix
new file mode 100644
index 000000000000..6003b5398305
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/default.nix
@@ -0,0 +1,5 @@
+{ ... }:
+
+{
+  no = "siblings should be read";
+}
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/default.nix b/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/default.nix
new file mode 100644
index 000000000000..c469533fbe5b
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/default.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"I’m not merged with any children"
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/subdir/a.nix b/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/subdir/a.nix
new file mode 100644
index 000000000000..2008c2d2411f
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/no-merge/subdir/a.nix
@@ -0,0 +1 @@
+"not accessible since parent default.nix is not an attrset"
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/sibling.nix b/nix/readTree/tests/test-tree-traversal/default-nix/sibling.nix
new file mode 100644
index 000000000000..8c57f2c16137
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/sibling.nix
@@ -0,0 +1 @@
+"I’m left alone"
diff --git a/nix/readTree/tests/test-tree-traversal/default-nix/subdir/a.nix b/nix/readTree/tests/test-tree-traversal/default-nix/subdir/a.nix
new file mode 100644
index 000000000000..cf0ac2c8f3cf
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/default-nix/subdir/a.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"but I’m picked up"
diff --git a/nix/readTree/tests/test-tree-traversal/no-skip-subtree/a/default.nix b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/a/default.nix
new file mode 100644
index 000000000000..a586ce534ce2
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/a/default.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"am I subtree yet?"
diff --git a/nix/readTree/tests/test-tree-traversal/no-skip-subtree/b/c.nix b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/b/c.nix
new file mode 100644
index 000000000000..06216c417be9
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/b/c.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"cool"
diff --git a/nix/readTree/tests/test-tree-traversal/no-skip-subtree/default.nix b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/default.nix
new file mode 100644
index 000000000000..3d9f241cddb5
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/no-skip-subtree/default.nix
@@ -0,0 +1,5 @@
+{ ... }:
+
+{
+  but = "the default.nix is still read";
+}
diff --git a/nix/readTree/tests/test-tree-traversal/skip-subtree/.skip-subtree b/nix/readTree/tests/test-tree-traversal/skip-subtree/.skip-subtree
new file mode 100644
index 000000000000..87271ba5e101
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/skip-subtree/.skip-subtree
@@ -0,0 +1 @@
+this file makes subdirs be skipped, I hope
diff --git a/nix/readTree/tests/test-tree-traversal/skip-subtree/a/default.nix b/nix/readTree/tests/test-tree-traversal/skip-subtree/a/default.nix
new file mode 100644
index 000000000000..a586ce534ce2
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/skip-subtree/a/default.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"am I subtree yet?"
diff --git a/nix/readTree/tests/test-tree-traversal/skip-subtree/b/c.nix b/nix/readTree/tests/test-tree-traversal/skip-subtree/b/c.nix
new file mode 100644
index 000000000000..06216c417be9
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/skip-subtree/b/c.nix
@@ -0,0 +1,3 @@
+{ ... }:
+
+"cool"
diff --git a/nix/readTree/tests/test-tree-traversal/skip-subtree/default.nix b/nix/readTree/tests/test-tree-traversal/skip-subtree/default.nix
new file mode 100644
index 000000000000..3d9f241cddb5
--- /dev/null
+++ b/nix/readTree/tests/test-tree-traversal/skip-subtree/default.nix
@@ -0,0 +1,5 @@
+{ ... }:
+
+{
+  but = "the default.nix is still read";
+}
diff --git a/nix/readTree/tests/test-wrong-no-dots/no-dots-in-function.nix b/nix/readTree/tests/test-wrong-no-dots/no-dots-in-function.nix
new file mode 100644
index 000000000000..4681253af831
--- /dev/null
+++ b/nix/readTree/tests/test-wrong-no-dots/no-dots-in-function.nix
@@ -0,0 +1,3 @@
+{}:
+
+"This is a function, but readTree wants to pass a bunch of arguments, and not having dots means we depend on exactly which arguments."
diff --git a/nix/readTree/tests/test-wrong-not-a-function/not-a-function.nix b/nix/readTree/tests/test-wrong-not-a-function/not-a-function.nix
new file mode 100644
index 000000000000..f46ee2a35565
--- /dev/null
+++ b/nix/readTree/tests/test-wrong-not-a-function/not-a-function.nix
@@ -0,0 +1 @@
+"This file needs to be a function, otherwise readTree doesn’t like it!"
diff --git a/nix/renderMarkdown/default.nix b/nix/renderMarkdown/default.nix
new file mode 100644
index 000000000000..8d6b31cfccca
--- /dev/null
+++ b/nix/renderMarkdown/default.nix
@@ -0,0 +1,8 @@
+# Render a Markdown file to HTML.
+{ depot, pkgs, ... }:
+
+with depot.nix.yants;
+
+defun [ path drv ] (file: pkgs.runCommandNoCC "${file}.rendered.html" { } ''
+  cat ${file} | ${depot.tools.cheddar}/bin/cheddar --about-filter ${file} > $out
+'')
diff --git a/nix/runExecline/default.nix b/nix/runExecline/default.nix
new file mode 100644
index 000000000000..76fffdce7b0d
--- /dev/null
+++ b/nix/runExecline/default.nix
@@ -0,0 +1,31 @@
+{ depot, pkgs, lib, ... }:
+let
+  runExecline = import ./runExecline.nix {
+    inherit (pkgs) stdenv;
+    inherit (depot.nix) escapeExecline getBins;
+    inherit pkgs lib;
+  };
+
+  runExeclineLocal = name: args: execline:
+    runExecline name
+      (args // {
+        derivationArgs = args.derivationArgs or { } // {
+          preferLocalBuild = true;
+          allowSubstitutes = false;
+        };
+      })
+      execline;
+
+  tests = import ./tests.nix {
+    inherit runExecline runExeclineLocal;
+    inherit (depot.nix) getBins writeScript;
+    inherit (pkgs) stdenv coreutils;
+    inherit pkgs;
+  };
+
+in
+{
+  __functor = _: runExecline;
+  local = runExeclineLocal;
+  inherit tests;
+}
diff --git a/nix/runExecline/runExecline.nix b/nix/runExecline/runExecline.nix
new file mode 100644
index 000000000000..23b9a6330370
--- /dev/null
+++ b/nix/runExecline/runExecline.nix
@@ -0,0 +1,122 @@
+{ pkgs, stdenv, lib, getBins, escapeExecline }:
+
+# runExecline is a primitive building block
+# for writing non-kitchen sink builders.
+#
+# It’s conceptually similar to `runCommand`,
+# but instead of concatenating bash scripts left
+# and right, it actually *uses* the features of
+# `derivation`, passing things to `args`
+# and making it possible to overwrite the `builder`
+# in a sensible manner.
+#
+# Additionally, it provides a way to pass a nix string
+# to `stdin` of the build script.
+#
+# Similar to //nix/writeExecline, the passed script is
+# not a string, but a nested list of nix lists
+# representing execline blocks. Escaping is
+# done by the implementation, the user can just use
+# normal nix strings.
+#
+# Example:
+#
+#  runExecline "my-drv" { stdin = "hi!"; } [
+#    "importas" "out" "out"
+#    # this pipes stdout of s6-cat to $out
+#    # and s6-cat redirects from stdin to stdout
+#    "redirfd" "-w" "1" "$out" bins.s6-cat
+#  ]
+#
+# which creates a derivation with "hi!" in $out.
+#
+# See ./tests.nix for more examples.
+
+
+let
+  bins = getBins pkgs.execline [
+    "execlineb"
+    { use = "if"; as = "execlineIf"; }
+    "redirfd"
+    "importas"
+    "exec"
+  ]
+  // getBins pkgs.s6-portable-utils [
+    "s6-cat"
+    "s6-grep"
+    "s6-touch"
+    "s6-test"
+    "s6-chmod"
+  ];
+
+in
+
+# TODO: move name into the attrset
+name:
+{
+  # a string to pass as stdin to the execline script
+  stdin ? ""
+  # a program wrapping the acutal execline invocation;
+  # should be in Bernstein-chaining style
+, builderWrapper ? bins.exec
+  # additional arguments to pass to the derivation
+, derivationArgs ? { }
+}:
+# the execline script as a nested list of string,
+# representing the blocks;
+# see docs of `escapeExecline`.
+execline:
+
+# those arguments can’t be overwritten
+assert !derivationArgs ? system;
+assert !derivationArgs ? name;
+assert !derivationArgs ? builder;
+assert !derivationArgs ? args;
+
+derivation (derivationArgs // {
+  # TODO(Profpatsch): what about cross?
+  inherit (stdenv) system;
+  inherit name;
+
+  # okay, `builtins.toFile` does not accept strings
+  # that reference drv outputs. This means we need
+  # to pass the script and stdin as envvar;
+  # this might clash with another passed envar,
+  # so we give it a long & unique name
+  _runExeclineScript =
+    let
+    in escapeExecline execline;
+  _runExeclineStdin = stdin;
+  passAsFile = [
+    "_runExeclineScript"
+    "_runExeclineStdin"
+  ] ++ derivationArgs.passAsFile or [ ];
+
+  # the default, exec acts as identity executable
+  builder = builderWrapper;
+
+  args = [
+    bins.importas # import script file as $script
+    "-ui" # drop the envvar afterwards
+    "script" # substitution name
+    "_runExeclineScriptPath" # passed script file
+
+    bins.importas # do the same for $stdin
+    "-ui"
+    "stdin"
+    "_runExeclineStdinPath"
+
+    bins.redirfd # now we
+    "-r" # read the file
+    "0" # into the stdin of execlineb
+    "$stdin" # that was given via stdin
+
+    bins.execlineb # the actual invocation
+    # TODO(Profpatsch): depending on the use-case, -S0 might not be enough
+    # in all use-cases, then a wrapper for execlineb arguments
+    # should be added (-P, -S, -s).
+    "-S0" # set $@ inside the execline script
+    "-W" # die on syntax error
+    "$script" # substituted by importas
+  ];
+})
diff --git a/nix/runExecline/tests.nix b/nix/runExecline/tests.nix
new file mode 100644
index 000000000000..f82b544224ee
--- /dev/null
+++ b/nix/runExecline/tests.nix
@@ -0,0 +1,117 @@
+{ stdenv
+, pkgs
+, runExecline
+, runExeclineLocal
+, getBins
+, writeScript
+  # https://www.mail-archive.com/skaware@list.skarnet.org/msg01256.html
+, coreutils
+}:
+
+let
+
+  bins = getBins coreutils [ "mv" ]
+    // getBins pkgs.execline [
+    "execlineb"
+    { use = "if"; as = "execlineIf"; }
+    "redirfd"
+    "importas"
+  ]
+    // getBins pkgs.s6-portable-utils [
+    "s6-chmod"
+    "s6-grep"
+    "s6-touch"
+    "s6-cat"
+    "s6-test"
+  ];
+
+  # execline block of depth 1
+  block = args: builtins.map (arg: " ${arg}") args ++ [ "" ];
+
+  # derivation that tests whether a given line exists
+  # in the given file. Does not use runExecline, because
+  # that should be tested after all.
+  fileHasLine = line: file: derivation {
+    name = "run-execline-test-file-${file.name}-has-line";
+    inherit (stdenv) system;
+    builder = bins.execlineIf;
+    args =
+      (block [
+        bins.redirfd
+        "-r"
+        "0"
+        file # read file to stdin
+        bins.s6-grep
+        "-F"
+        "-q"
+        line # and grep for the line
+      ])
+      ++ [
+        # if the block succeeded, touch $out
+        bins.importas
+        "-ui"
+        "out"
+        "out"
+        bins.s6-touch
+        "$out"
+      ];
+    preferLocalBuild = true;
+    allowSubstitutes = false;
+  };
+
+  # basic test that touches out
+  basic = runExeclineLocal "run-execline-test-basic"
+    { } [
+    "importas"
+    "-ui"
+    "out"
+    "out"
+    "${bins.s6-touch}"
+    "$out"
+  ];
+
+  # whether the stdin argument works as intended
+  stdin = fileHasLine "foo" (runExeclineLocal "run-execline-test-stdin"
+    {
+      stdin = "foo\nbar\nfoo";
+    } [
+    "importas"
+    "-ui"
+    "out"
+    "out"
+    # this pipes stdout of s6-cat to $out
+    # and s6-cat redirects from stdin to stdout
+    "redirfd"
+    "-w"
+    "1"
+    "$out"
+    bins.s6-cat
+  ]);
+
+
+  wrapWithVar = runExeclineLocal "run-execline-test-wrap-with-var"
+    {
+      builderWrapper = writeScript "var-wrapper" ''
+        #!${bins.execlineb} -S0
+        export myvar myvalue $@
+      '';
+    } [
+    "importas"
+    "-ui"
+    "v"
+    "myvar"
+    "if"
+    [ bins.s6-test "myvalue" "=" "$v" ]
+    "importas"
+    "out"
+    "out"
+    bins.s6-touch
+    "$out"
+  ];
+
+in
+[
+  basic
+  stdin
+  wrapWithVar
+]
diff --git a/nix/runTestsuite/default.nix b/nix/runTestsuite/default.nix
new file mode 100644
index 000000000000..8b02ed86d8ea
--- /dev/null
+++ b/nix/runTestsuite/default.nix
@@ -0,0 +1,199 @@
+{ lib, pkgs, depot, ... }:
+
+# Run a nix testsuite.
+#
+# The tests are simple assertions on the nix level,
+# and can use derivation outputs if IfD is enabled.
+#
+# You build a testsuite by bundling assertions into
+# “it”s and then bundling the “it”s into a testsuite.
+#
+# Running the testsuite will abort evaluation if
+# any assertion fails.
+#
+# Example:
+#
+#   runTestsuite "myFancyTestsuite" [
+#     (it "does an assertion" [
+#       (assertEq "42 is equal to 42" "42" "42")
+#       (assertEq "also 23" 23 23)
+#     ])
+#     (it "frmbls the brlbr" [
+#       (assertEq true false)
+#     ])
+#   ]
+#
+# will fail the second it group because true is not false.
+
+let
+  inherit (depot.nix.yants)
+    sum
+    struct
+    string
+    any
+    defun
+    list
+    drv
+    bool
+    ;
+
+  bins = depot.nix.getBins pkgs.coreutils [ "printf" ]
+    // depot.nix.getBins pkgs.s6-portable-utils [ "s6-touch" "s6-false" "s6-cat" ];
+
+  # Returns true if the given expression throws when `deepSeq`-ed
+  throws = expr:
+    !(builtins.tryEval (builtins.deepSeq expr { })).success;
+
+  # rewrite the builtins.partition result
+  # to use `ok` and `err` instead of `right` and `wrong`.
+  partitionTests = pred: xs:
+    let res = builtins.partition pred xs;
+    in {
+      ok = res.right;
+      err = res.wrong;
+    };
+
+  AssertErrorContext =
+    sum "AssertErrorContext" {
+      not-equal = struct "not-equal" {
+        left = any;
+        right = any;
+      };
+      should-throw = struct "should-throw" {
+        expr = any;
+      };
+      unexpected-throw = struct "unexpected-throw" { };
+    };
+
+  # The result of an assert,
+  # either it’s true (yep) or false (nope).
+  # If it's nope we return an additional context
+  # attribute which gives details on the failure
+  # depending on the type of assert performed.
+  AssertResult =
+    sum "AssertResult" {
+      yep = struct "yep" {
+        test = string;
+      };
+      nope = struct "nope" {
+        test = string;
+        context = AssertErrorContext;
+      };
+    };
+
+  # Result of an it. An it is a bunch of asserts
+  # bundled up with a good description of what is tested.
+  ItResult =
+    struct "ItResult" {
+      it-desc = string;
+      asserts = list AssertResult;
+    };
+
+  # If the given boolean is true return a positive AssertResult.
+  # If the given boolean is false return a negative AssertResult
+  # with the provided AssertErrorContext describing the failure.
+  #
+  # This function is intended as a generic assert to implement
+  # more assert types and is not exposed to the user.
+  assertBoolContext = defun [ AssertErrorContext string bool AssertResult ]
+    (context: desc: res:
+      if res
+      then { yep = { test = desc; }; }
+      else {
+        nope = {
+          test = desc;
+          inherit context;
+        };
+      });
+
+  # assert that left and right values are equal
+  assertEq = defun [ string any any AssertResult ]
+    (desc: left: right:
+      let
+        context = { not-equal = { inherit left right; }; };
+      in
+      assertBoolContext context desc (left == right));
+
+  # assert that the expression throws when `deepSeq`-ed
+  assertThrows = defun [ string any AssertResult ]
+    (desc: expr:
+      let
+        context = { should-throw = { inherit expr; }; };
+      in
+      assertBoolContext context desc (throws expr));
+
+  # assert that the expression does not throw when `deepSeq`-ed
+  assertDoesNotThrow = defun [ string any AssertResult ]
+    (desc: expr:
+      assertBoolContext { unexpected-throw = { }; } desc (!(throws expr)));
+
+  # Annotate a bunch of asserts with a descriptive name
+  it = desc: asserts: {
+    it-desc = desc;
+    inherit asserts;
+  };
+
+  # Run a bunch of its and check whether all asserts are yep.
+  # If not, abort evaluation with `throw`
+  # and print the result of the test suite.
+  #
+  # Takes a test suite name as first argument.
+  runTestsuite = defun [ string (list ItResult) drv ]
+    (name: itResults:
+      let
+        goodAss = ass: AssertResult.match ass {
+          yep = _: true;
+          nope = _: false;
+        };
+        res = partitionTests
+          (it:
+            (partitionTests goodAss it.asserts).err == [ ]
+          )
+          itResults;
+        prettyRes = lib.generators.toPretty { } res;
+      in
+      if res.err == [ ]
+      then
+        depot.nix.runExecline.local "testsuite-${name}-successful" { } [
+          "importas"
+          "out"
+          "out"
+          # force derivation to rebuild if test case list changes
+          "ifelse"
+          [ bins.s6-false ]
+          [
+            bins.printf
+            ""
+            (builtins.hashString "sha512" prettyRes)
+          ]
+          "if"
+          [ bins.printf "%s\n" "testsuite ${name} successful!" ]
+          bins.s6-touch
+          "$out"
+        ]
+      else
+        depot.nix.runExecline.local "testsuite-${name}-failed"
+          {
+            stdin = prettyRes + "\n";
+          } [
+          "importas"
+          "out"
+          "out"
+          "if"
+          [ bins.printf "%s\n" "testsuite ${name} failed!" ]
+          "if"
+          [ bins.s6-cat ]
+          "exit"
+          "1"
+        ]);
+
+in
+{
+  inherit
+    assertEq
+    assertThrows
+    assertDoesNotThrow
+    it
+    runTestsuite
+    ;
+}
diff --git a/nix/sparseTree/OWNERS b/nix/sparseTree/OWNERS
new file mode 100644
index 000000000000..fdf6d7204051
--- /dev/null
+++ b/nix/sparseTree/OWNERS
@@ -0,0 +1,3 @@
+inherited: true
+owners:
+  - sterni
\ No newline at end of file
diff --git a/nix/sparseTree/default.nix b/nix/sparseTree/default.nix
new file mode 100644
index 000000000000..16fc9b6103f9
--- /dev/null
+++ b/nix/sparseTree/default.nix
@@ -0,0 +1,74 @@
+# Build a “sparse” version of a given directory, only including contained files
+# and directories if they are listed in a supplied list:
+#
+# # A very minimal depot
+# sparseTree ./depot [
+#   ./default.nix
+#   ./depot/nix/readTree/default.nix
+#   ./third_party/nixpkgs
+#   ./third_party/overlays
+# ]
+{ pkgs, lib, ... }:
+
+# root path to use as a reference point
+root:
+# list of paths below `root` that should be
+# included in the resulting directory
+#
+# If path, need to refer to the actual file / directory to be included.
+# If a string, it is treated as a string relative to the root.
+paths:
+
+let
+  rootLength = builtins.stringLength (toString root);
+
+  # Count slashes in a path.
+  #
+  # Type: path -> int
+  depth = path: lib.pipe path [
+    toString
+    (builtins.split "/")
+    (builtins.filter builtins.isList)
+    builtins.length
+  ];
+
+  # (Parent) directories will be created from deepest to shallowest
+  # which should mean no conflicts are caused unless both a child
+  # and its parent directory are in the list of paths.
+  # TODO(sterni): improve error messages in such cases
+  fromDeepest = lib.sort (a: b: depth a < depth b) paths;
+
+  # Create a set which contains the source path to copy / symlink and
+  # it's destination, so the path below the destination root including
+  # a leading slash. Additionally some sanity checking is done.
+  makeSymlink = path:
+    let
+      withLeading = p: if builtins.substring 0 1 p == "/" then p else "/" + p;
+      fullPath =
+        /**/
+        if builtins.isPath path then path
+        else if builtins.isString path then (root + withLeading path)
+        else builtins.throw "Unsupported path type ${builtins.typeOf path}";
+      strPath = toString fullPath;
+      contextPath = "${fullPath}";
+      belowRoot = builtins.substring rootLength (-1) strPath;
+      prefix = builtins.substring 0 rootLength strPath;
+    in
+    assert toString root == prefix; {
+      src = contextPath;
+      dst = belowRoot;
+    };
+
+  symlinks = builtins.map makeSymlink fromDeepest;
+in
+
+# TODO(sterni): teach readTree to also read symlinked directories,
+  # so we ln -sT instead of cp -aT.
+pkgs.runCommandNoCC "sparse-${builtins.baseNameOf root}" { } (
+  lib.concatMapStrings
+    ({ src, dst }: ''
+      mkdir -p "$(dirname "$out${dst}")"
+      cp -aT --reflink=auto "${src}" "$out${dst}"
+    '')
+    symlinks
+)
diff --git a/nix/tag/default.nix b/nix/tag/default.nix
new file mode 100644
index 000000000000..0038404460b1
--- /dev/null
+++ b/nix/tag/default.nix
@@ -0,0 +1,166 @@
+{ depot, lib, ... }:
+let
+  # Takes a tag, checks whether it is an attrset with one element,
+  # if so sets `isTag` to `true` and sets the name and value.
+  # If not, sets `isTag` to `false` and sets `errmsg`.
+  verifyTag = tag:
+    let
+      cases = builtins.attrNames tag;
+      len = builtins.length cases;
+    in
+    if builtins.length cases == 1
+    then
+      let name = builtins.head cases; in {
+        isTag = true;
+        name = name;
+        val = tag.${name};
+        errmsg = null;
+      }
+    else {
+      isTag = false;
+      errmsg =
+        ("match: an instance of a sum is an attrset "
+          + "with exactly one element, yours had ${toString len}"
+          + ", namely: ${lib.generators.toPretty {} cases}");
+      name = null;
+      val = null;
+    };
+
+  # Returns the tag name of a given tag attribute set.
+  # Throws if the tag is invalid.
+  #
+  # Type: tag -> string
+  tagName = tag: (assertIsTag tag).name;
+
+  # Returns the tagged value of a given tag attribute set.
+  # Throws if the tag is invalid.
+  #
+  # Type: tag -> any
+  tagValue = tag: (assertIsTag tag).val;
+
+  # like `verifyTag`, but throws the error message if it is not a tag.
+  assertIsTag = tag:
+    let res = verifyTag tag; in
+    assert res.isTag || throw res.errmsg;
+    { inherit (res) name val; };
+
+
+  # Discriminator for values.
+  # Goes through a list of tagged predicates `{ <tag> = <pred>; }`
+  # and returns the value inside the tag
+  # for which the first predicate applies, `{ <tag> = v; }`.
+  # They can then later be matched on with `match`.
+  #
+  # `defTag` is the tag that is assigned if there is no match.
+  #
+  # Examples:
+  #   discrDef "smol" [
+  #     { biggerFive = i: i > 5; }
+  #     { negative = i: i < 0; }
+  #   ] (-100)
+  #   => { negative = -100; }
+  #   discrDef "smol" [
+  #     { biggerFive = i: i > 5; }
+  #     { negative = i: i < 0; }
+  #   ] 1
+  #   => { smol = 1; }
+  discrDef = defTag: fs: v:
+    let
+      res = lib.findFirst
+        (t: t.val v)
+        null
+        (map assertIsTag fs);
+    in
+    if res == null
+    then { ${defTag} = v; }
+    else { ${res.name} = v; };
+
+  # Like `discrDef`, but fail if there is no match.
+  discr = fs: v:
+    let res = discrDef null fs v; in
+    assert lib.assertMsg (res != null)
+      "tag.discr: No predicate found that matches ${lib.generators.toPretty {} v}";
+    res;
+
+  # The canonical pattern matching primitive.
+  # A sum value is an attribute set with one element,
+  # whose key is the name of the variant and
+  # whose value is the content of the variant.
+  # `matcher` is an attribute set which enumerates
+  # all possible variants as keys and provides a function
+  # which handles each variant’s content.
+  # You should make an effort to return values of the same
+  # type in your matcher, or new sums.
+  #
+  # Example:
+  #   let
+  #      success = { res = 42; };
+  #      failure = { err = "no answer"; };
+  #      matcher = {
+  #        res = i: i + 1;
+  #        err = _: 0;
+  #      };
+  #    in
+  #       match success matcher == 43
+  #    && match failure matcher == 0;
+  #
+  match = sum: matcher:
+    let cases = builtins.attrNames sum;
+    in assert
+    let len = builtins.length cases; in
+    lib.assertMsg (len == 1)
+      ("match: an instance of a sum is an attrset "
+        + "with exactly one element, yours had ${toString len}"
+        + ", namely: ${lib.generators.toPretty {} cases}");
+    let case = builtins.head cases;
+    in assert
+    lib.assertMsg (matcher ? ${case})
+      ("match: \"${case}\" is not a valid case of this sum, "
+        + "the matcher accepts: ${lib.generators.toPretty {}
+            (builtins.attrNames matcher)}");
+    matcher.${case} sum.${case};
+
+  # A `match` with the arguments flipped.
+  # “Lam” stands for “lambda”, because it can be used like the
+  # `\case` LambdaCase statement in Haskell, to create a curried
+  # “matcher” function ready to take a value.
+  #
+  # Example:
+  #   lib.pipe { foo = 42; } [
+  #     (matchLam {
+  #       foo = i: if i < 23 then { small = i; } else { big = i; };
+  #       bar = _: { small = 5; };
+  #     })
+  #     (matchLam {
+  #       small = i: "yay it was small";
+  #       big = i: "whoo it was big!";
+  #     })
+  #   ]
+  #   => "whoo it was big!";
+  matchLam = matcher: sum: match sum matcher;
+
+  tests = import ./tests.nix {
+    inherit
+      depot
+      lib
+      verifyTag
+      discr
+      discrDef
+      match
+      matchLam
+      ;
+  };
+
+in
+{
+  inherit
+    verifyTag
+    tagName
+    tagValue
+    discr
+    discrDef
+    match
+    matchLam
+    tests
+    ;
+}
diff --git a/nix/tag/tests.nix b/nix/tag/tests.nix
new file mode 100644
index 000000000000..bcc42c758a6c
--- /dev/null
+++ b/nix/tag/tests.nix
@@ -0,0 +1,94 @@
+{ depot, lib, verifyTag, discr, discrDef, match, matchLam }:
+
+let
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    assertEq
+    it
+    ;
+
+  isTag-test = it "checks whether something is a tag" [
+    (assertEq "is Tag"
+      (verifyTag { foo = "bar"; })
+      {
+        isTag = true;
+        name = "foo";
+        val = "bar";
+        errmsg = null;
+      })
+    (assertEq "is not Tag"
+      (removeAttrs (verifyTag { foo = "bar"; baz = 42; }) [ "errmsg" ])
+      {
+        isTag = false;
+        name = null;
+        val = null;
+      })
+  ];
+
+  discr-test = it "can discr things" [
+    (assertEq "id"
+      (discr [
+        { a = lib.const true; }
+      ] "x")
+      { a = "x"; })
+    (assertEq "bools here, ints there"
+      (discr [
+        { bool = lib.isBool; }
+        { int = lib.isInt; }
+      ] 25)
+      { int = 25; })
+    (assertEq "bools here, ints there 2"
+      (discr [
+        { bool = lib.isBool; }
+        { int = lib.isInt; }
+      ]
+        true)
+      { bool = true; })
+    (assertEq "fallback to default"
+      (discrDef "def" [
+        { bool = lib.isBool; }
+        { int = lib.isInt; }
+      ] "foo")
+      { def = "foo"; })
+  ];
+
+  match-test = it "can match things" [
+    (assertEq "match example"
+      (
+        let
+          success = { res = 42; };
+          failure = { err = "no answer"; };
+          matcher = {
+            res = i: i + 1;
+            err = _: 0;
+          };
+        in
+        {
+          one = match success matcher;
+          two = match failure matcher;
+        }
+      )
+      {
+        one = 43;
+        two = 0;
+      })
+    (assertEq "matchLam & pipe"
+      (lib.pipe { foo = 42; } [
+        (matchLam {
+          foo = i: if i < 23 then { small = i; } else { big = i; };
+          bar = _: { small = 5; };
+        })
+        (matchLam {
+          small = i: "yay it was small";
+          big = i: "whoo it was big!";
+        })
+      ])
+      "whoo it was big!")
+  ];
+
+in
+runTestsuite "tag" [
+  isTag-test
+  discr-test
+  match-test
+]
diff --git a/nix/tailscale/default.nix b/nix/tailscale/default.nix
new file mode 100644
index 000000000000..363f717db6de
--- /dev/null
+++ b/nix/tailscale/default.nix
@@ -0,0 +1,31 @@
+# This file defines a Nix helper function to create Tailscale ACL files.
+#
+# https://tailscale.com/kb/1018/install-acls
+
+{ depot, pkgs, ... }:
+
+with depot.nix.yants;
+
+let
+  inherit (builtins) toFile toJSON;
+
+  acl = struct "acl" {
+    Action = enum [ "accept" "reject" ];
+    Users = list string;
+    Ports = list string;
+  };
+
+  acls = list entry;
+
+  aclConfig = struct "aclConfig" {
+    # Static group mappings from group names to lists of users
+    Groups = option (attrs (list string));
+
+    # Hostname aliases to use in place of IPs
+    Hosts = option (attrs string);
+
+    # Actual ACL entries
+    ACLs = list acl;
+  };
+in
+config: pkgs.writeText "tailscale-acl.json" (toJSON (aclConfig config))
diff --git a/nix/utils/OWNERS b/nix/utils/OWNERS
new file mode 100644
index 000000000000..f16dd105d761
--- /dev/null
+++ b/nix/utils/OWNERS
@@ -0,0 +1,3 @@
+inherited: true
+owners:
+  - sterni
diff --git a/nix/utils/default.nix b/nix/utils/default.nix
new file mode 100644
index 000000000000..cabea5bbeeb3
--- /dev/null
+++ b/nix/utils/default.nix
@@ -0,0 +1,186 @@
+{ depot, lib, ... }:
+
+let
+  /* Get the basename of a store path without
+     the leading hash.
+
+     Type: (path | drv | string) -> string
+
+     Example:
+       storePathName ./foo.c
+       => "foo.c"
+
+       storePathName (writeText "foo.c" "int main() { return 0; }")
+       => "foo.c"
+
+       storePathName "${hello}/bin/hello"
+       => "hello"
+  */
+  storePathName = p:
+    if lib.isDerivation p
+    then p.name
+    else if builtins.isPath p
+    then builtins.baseNameOf p
+    else if builtins.isString p || (builtins.isAttrs p && (p ? outPath || p ? __toString))
+    then
+      let
+        strPath = toString p;
+        # strip leading storeDir and trailing slashes
+        noStoreDir = lib.removeSuffix "/"
+          (lib.removePrefix "${builtins.storeDir}/" strPath);
+        # a basename of a child of a store path isn't really
+        # referring to a store path, so removing the string
+        # context is safe (e. g. "hello" for "${hello}/bin/hello").
+        basename = builtins.unsafeDiscardStringContext
+          (builtins.baseNameOf strPath);
+      in
+      # If p is a direct child of storeDir, we need to remove
+        # the leading hash as well to make sure that:
+        # `storePathName drv == storePathName (toString drv)`.
+      if noStoreDir == basename
+      then builtins.substring 33 (-1) basename
+      else basename
+    else builtins.throw "Don't know how to get (base)name of "
+      + lib.generators.toPretty { } p;
+
+  /* Query the type of a path exposing the same information as would be by
+     `builtins.readDir`, but for a single, specific target path.
+
+     The information is returned as a tagged value, i. e. an attribute set with
+     exactly one attribute where the type of the path is encoded in the name
+     of the single attribute. The allowed tags and values are as follows:
+
+     * `regular`: is a regular file, always `true` if returned
+     * `directory`: is a directory, always `true` if returned
+     * `missing`: path does not exist, always `true` if returned
+     * `symlink`: path is a symlink, value is a string describing the type
+       of its realpath which may be either:
+
+       * `"directory"`: realpath of the symlink is a directory
+       * `"regular-or-missing`": realpath of the symlink is either a regular
+         file or does not exist. Due to limitations of the Nix expression
+         language, we can't tell which.
+
+     Type: path(-like) -> tag
+
+     `tag` refers to the attribute set format of `//nix/tag`.
+
+     Example:
+       pathType ./foo.c
+       => { regular = true; }
+
+       pathType /home/lukas
+       => { directory = true; }
+
+       pathType ./result
+       => { symlink = "directory"; }
+
+       pathType ./link-to-file
+       => { symlink = "regular-or-missing"; }
+
+       pathType /does/not/exist
+       => { missing = true; }
+
+       # Check if a path exists
+       !(pathType /file ? missing)
+
+       # Check if a path is a directory or a symlink to a directory
+       # A handy shorthand for this is provided as `realPathIsDirectory`.
+       pathType /path ? directory || (pathType /path).symlink or null == "directory"
+
+       # Match on the result using //nix/tag
+       nix.tag.match (nix.utils.pathType ./result) {
+         symlink = v: "symlink to ${v}";
+         directory  = _: "directory";
+         regular = _: "regular";
+         missing = _: "path does not exist";
+       }
+       => "symlink to directory"
+
+       # Query path type
+       nix.tag.tagName (pathType /path)
+  */
+  pathType = path:
+    let
+      # baseNameOf is very annoyed if we proceed with string context.
+      # We need to call toString to prevent unsafeDiscardStringContext
+      # from importing a path into store which messes with base- and
+      # dirname of course.
+      path' = builtins.unsafeDiscardStringContext (toString path);
+      # To read the containing directory we absolutely need
+      # to keep the string context, otherwise a derivation
+      # would not be realized before our check (at eval time)
+      containingDir = builtins.readDir (builtins.dirOf path);
+      # Construct tag to use for the value
+      thisPathType = containingDir.${builtins.baseNameOf path'} or "missing";
+      # Trick to check if the symlink target exists and is a directory:
+      # if we append a "/." to the string version of the path, Nix won't
+      # canocalize it (which would strip any "/." in the path), so if
+      # path' + "/." exists, we know that the symlink points to an existing
+      # directory. If not, either the target doesn't exist or is a regular file.
+      # TODO(sterni): is there a way to check reliably if the symlink target exists?
+      isSymlinkDir = builtins.pathExists (path' + "/.");
+    in
+    {
+      ${thisPathType} =
+        /**/
+        if thisPathType != "symlink" then true
+        else if isSymlinkDir then "directory"
+        else "regular-or-missing";
+    };
+
+  pathType' = path:
+    let
+      p = pathType path;
+    in
+    if p ? missing
+    then builtins.throw "${lib.generators.toPretty {} path} does not exist"
+    else p;
+
+  /* Check whether the given path is a directory.
+     Throws if the path in question doesn't exist.
+
+     Type: path(-like) -> bool
+  */
+  isDirectory = path: pathType' path ? directory;
+
+  /* Checks whether the given path is a directory or
+     a symlink to a directory. Throws if the path in
+     question doesn't exist.
+
+     Warning: Does not throw if the target file or
+     directory doesn't exist, but the symlink does.
+
+     Type: path(-like) -> bool
+  */
+  realPathIsDirectory = path:
+    let
+      pt = pathType' path;
+    in
+    pt ? directory || pt.symlink or null == "directory";
+
+  /* Check whether the given path is a regular file.
+     Throws if the path in question doesn't exist.
+
+     Type: path(-like) -> bool
+  */
+  isRegularFile = path: pathType' path ? regular;
+
+  /* Check whether the given path is a symbolic link.
+     Throws if the path in question doesn't exist.
+
+     Type: path(-like) -> bool
+  */
+  isSymlink = path: pathType' path ? symlink;
+
+in
+{
+  inherit
+    storePathName
+    pathType
+    isDirectory
+    realPathIsDirectory
+    isRegularFile
+    isSymlink
+    ;
+}
diff --git a/nix/utils/tests/.skip-subtree b/nix/utils/tests/.skip-subtree
new file mode 100644
index 000000000000..1efb1b9476ba
--- /dev/null
+++ b/nix/utils/tests/.skip-subtree
@@ -0,0 +1 @@
+subdirectories are just test cases
diff --git a/nix/utils/tests/default.nix b/nix/utils/tests/default.nix
new file mode 100644
index 000000000000..52b7ca41d215
--- /dev/null
+++ b/nix/utils/tests/default.nix
@@ -0,0 +1,126 @@
+{ depot, lib, ... }:
+
+let
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    it
+    assertEq
+    assertThrows
+    assertDoesNotThrow
+    ;
+
+  inherit (depot.nix.utils)
+    isDirectory
+    realPathIsDirectory
+    isRegularFile
+    isSymlink
+    pathType
+    storePathName
+    ;
+
+  assertUtilsPred = msg: act: exp: [
+    (assertDoesNotThrow "${msg} does not throw" act)
+    (assertEq msg (builtins.tryEval act).value exp)
+  ];
+
+  pathPredicates = it "judges paths correctly" (lib.flatten [
+    # isDirectory
+    (assertUtilsPred "directory isDirectory"
+      (isDirectory ./directory)
+      true)
+    (assertUtilsPred "symlink not isDirectory"
+      (isDirectory ./symlink-directory)
+      false)
+    (assertUtilsPred "file not isDirectory"
+      (isDirectory ./directory/file)
+      false)
+    # realPathIsDirectory
+    (assertUtilsPred "directory realPathIsDirectory"
+      (realPathIsDirectory ./directory)
+      true)
+    (assertUtilsPred "symlink to directory realPathIsDirectory"
+      (realPathIsDirectory ./symlink-directory)
+      true)
+    (assertUtilsPred "realPathIsDirectory resolves chained symlinks"
+      (realPathIsDirectory ./symlink-symlink-directory)
+      true)
+    # isRegularFile
+    (assertUtilsPred "file isRegularFile"
+      (isRegularFile ./directory/file)
+      true)
+    (assertUtilsPred "symlink not isRegularFile"
+      (isRegularFile ./symlink-file)
+      false)
+    (assertUtilsPred "directory not isRegularFile"
+      (isRegularFile ./directory)
+      false)
+    # isSymlink
+    (assertUtilsPred "symlink to file isSymlink"
+      (isSymlink ./symlink-file)
+      true)
+    (assertUtilsPred "symlink to directory isSymlink"
+      (isSymlink ./symlink-directory)
+      true)
+    (assertUtilsPred "symlink to symlink isSymlink"
+      (isSymlink ./symlink-symlink-file)
+      true)
+    (assertUtilsPred "symlink to missing file isSymlink"
+      (isSymlink ./missing)
+      true)
+    (assertUtilsPred "directory not isSymlink"
+      (isSymlink ./directory)
+      false)
+    (assertUtilsPred "file not isSymlink"
+      (isSymlink ./directory/file)
+      false)
+    # missing files throw
+    (assertThrows "isDirectory throws on missing file"
+      (isDirectory ./does-not-exist))
+    (assertThrows "realPathIsDirectory throws on missing file"
+      (realPathIsDirectory ./does-not-exist))
+    (assertThrows "isRegularFile throws on missing file"
+      (isRegularFile ./does-not-exist))
+    (assertThrows "isSymlink throws on missing file"
+      (isSymlink ./does-not-exist))
+  ]);
+
+  symlinkPathTypeTests = it "correctly judges symlinks" [
+    (assertEq "symlinks to directories are detected correcty"
+      ((pathType ./symlink-directory).symlink or null) "directory")
+    (assertEq "symlinks to symlinks to directories are detected correctly"
+      ((pathType ./symlink-symlink-directory).symlink or null) "directory")
+    (assertEq "symlinks to files are detected-ish"
+      ((pathType ./symlink-file).symlink or null) "regular-or-missing")
+    (assertEq "symlinks to symlinks to files are detected-ish"
+      ((pathType ./symlink-symlink-file).symlink or null) "regular-or-missing")
+    (assertEq "symlinks to nowhere are not distinguished from files"
+      ((pathType ./missing).symlink or null) "regular-or-missing")
+  ];
+
+  cheddarStorePath =
+    builtins.unsafeDiscardStringContext depot.tools.cheddar.outPath;
+
+  cleanedSource = lib.cleanSource ./.;
+
+  storePathNameTests = it "correctly gets the basename of a store path" [
+    (assertEq "base name of a derivation"
+      (storePathName depot.tools.cheddar)
+      depot.tools.cheddar.name)
+    (assertEq "base name of a store path string"
+      (storePathName cheddarStorePath)
+      depot.tools.cheddar.name)
+    (assertEq "base name of a path within a store path"
+      (storePathName "${cheddarStorePath}/bin/cheddar") "cheddar")
+    (assertEq "base name of a path"
+      (storePathName ../default.nix) "default.nix")
+    (assertEq "base name of a cleanSourced path"
+      (storePathName cleanedSource)
+      cleanedSource.name)
+  ];
+in
+
+runTestsuite "nix.utils" [
+  pathPredicates
+  symlinkPathTypeTests
+  storePathNameTests
+]
diff --git a/nix/utils/tests/directory/file b/nix/utils/tests/directory/file
new file mode 100644
index 000000000000..e69de29bb2d1
--- /dev/null
+++ b/nix/utils/tests/directory/file
diff --git a/nix/utils/tests/missing b/nix/utils/tests/missing
new file mode 120000
index 000000000000..cfa0a46515b5
--- /dev/null
+++ b/nix/utils/tests/missing
@@ -0,0 +1 @@
+does-not-exist
\ No newline at end of file
diff --git a/nix/utils/tests/symlink-directory b/nix/utils/tests/symlink-directory
new file mode 120000
index 000000000000..6d0450cc2421
--- /dev/null
+++ b/nix/utils/tests/symlink-directory
@@ -0,0 +1 @@
+directory
\ No newline at end of file
diff --git a/nix/utils/tests/symlink-file b/nix/utils/tests/symlink-file
new file mode 120000
index 000000000000..cd5d01792aa4
--- /dev/null
+++ b/nix/utils/tests/symlink-file
@@ -0,0 +1 @@
+directory/file
\ No newline at end of file
diff --git a/nix/utils/tests/symlink-symlink-directory b/nix/utils/tests/symlink-symlink-directory
new file mode 120000
index 000000000000..5d6ba5247eff
--- /dev/null
+++ b/nix/utils/tests/symlink-symlink-directory
@@ -0,0 +1 @@
+symlink-directory
\ No newline at end of file
diff --git a/nix/utils/tests/symlink-symlink-file b/nix/utils/tests/symlink-symlink-file
new file mode 120000
index 000000000000..f8d9ce3659f4
--- /dev/null
+++ b/nix/utils/tests/symlink-symlink-file
@@ -0,0 +1 @@
+symlink-file
\ No newline at end of file
diff --git a/nix/writeElispBin/default.nix b/nix/writeElispBin/default.nix
new file mode 100644
index 000000000000..e23c55a0a4ba
--- /dev/null
+++ b/nix/writeElispBin/default.nix
@@ -0,0 +1,20 @@
+{ depot, pkgs, ... }:
+
+{ name, src, deps ? (_: [ ]), emacs ? pkgs.emacs27-nox }:
+
+let
+  inherit (pkgs) emacsPackages emacsPackagesFor;
+  inherit (builtins) isString toFile;
+
+  finalEmacs = (emacsPackagesFor emacs).emacsWithPackages deps;
+
+  srcFile =
+    if isString src
+    then toFile "${name}.el" src
+    else src;
+
+in
+depot.nix.writeScriptBin name ''
+  #!/bin/sh
+  ${finalEmacs}/bin/emacs --batch --no-site-file --script ${srcFile} $@
+''
diff --git a/nix/writeExecline/default.nix b/nix/writeExecline/default.nix
new file mode 100644
index 000000000000..5169b01386ea
--- /dev/null
+++ b/nix/writeExecline/default.nix
@@ -0,0 +1,39 @@
+{ pkgs, depot, ... }:
+
+# Write an execline script, represented as nested nix lists.
+# Everything is escaped correctly.
+# https://skarnet.org/software/execline/
+
+# TODO(Profpatsch) upstream into nixpkgs
+
+name:
+{
+  # "var": substitute readNArgs variables and start $@
+  # from the (readNArgs+1)th argument
+  # "var-full": substitute readNArgs variables and start $@ from $0
+  # "env": don’t substitute, set # and 0…n environment vaariables, where n=$#
+  # "none": don’t substitute or set any positional arguments
+  # "env-no-push": like "env", but bypass the push-phase. Not recommended.
+  argMode ? "var"
+, # Number of arguments to be substituted as variables (passed to "var"/"-s" or "var-full"/"-S"
+  readNArgs ? 0
+,
+}:
+# Nested list of lists of commands.
+# Inner lists are translated to execline blocks.
+argList:
+
+let
+  env =
+    if argMode == "var" then "s${toString readNArgs}"
+    else if argMode == "var-full" then "S${toString readNArgs}"
+    else if argMode == "env" then ""
+    else if argMode == "none" then "P"
+    else if argMode == "env-no-push" then "p"
+    else abort ''"${toString argMode}" is not a valid argMode, use one of "var", "var-full", "env", "none", "env-no-push".'';
+
+in
+depot.nix.writeScript name ''
+  #!${pkgs.execline}/bin/execlineb -W${env}
+  ${depot.nix.escapeExecline argList}
+''
diff --git a/nix/writeScript/default.nix b/nix/writeScript/default.nix
new file mode 100644
index 000000000000..1f53b4e4ff51
--- /dev/null
+++ b/nix/writeScript/default.nix
@@ -0,0 +1,35 @@
+{ pkgs, depot, ... }:
+
+# Write the given string to $out
+# and make it executable.
+
+let
+  bins = depot.nix.getBins pkgs.s6-portable-utils [
+    "s6-cat"
+    "s6-chmod"
+  ];
+
+in
+name:
+# string of the executable script that is put in $out
+script:
+
+depot.nix.runExecline name
+{
+  stdin = script;
+  derivationArgs = {
+    preferLocalBuild = true;
+    allowSubstitutes = false;
+  };
+} [
+  "importas"
+  "out"
+  "out"
+  # this pipes stdout of s6-cat to $out
+  # and s6-cat redirects from stdin to stdout
+  "if"
+  [ "redirfd" "-w" "1" "$out" bins.s6-cat ]
+  bins.s6-chmod
+  "0755"
+  "$out"
+]
diff --git a/nix/writeScriptBin/default.nix b/nix/writeScriptBin/default.nix
new file mode 100644
index 000000000000..ed26cf197e1e
--- /dev/null
+++ b/nix/writeScriptBin/default.nix
@@ -0,0 +1,12 @@
+{ depot, ... }:
+
+# Like writeScript,
+# but put the script into `$out/bin/${name}`.
+
+name:
+script:
+
+depot.nix.binify {
+  exe = (depot.nix.writeScript name script);
+  inherit name;
+}
diff --git a/nix/writers/default.nix b/nix/writers/default.nix
new file mode 100644
index 000000000000..55355913a9f4
--- /dev/null
+++ b/nix/writers/default.nix
@@ -0,0 +1,113 @@
+{ depot, pkgs, lib, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.s6-portable-utils [ "s6-ln" "s6-ls" "s6-touch" ]
+  ;
+
+  linkTo = name: path: depot.nix.runExecline.local name { } [
+    "importas"
+    "out"
+    "out"
+    bins.s6-ln
+    "-s"
+    path
+    "$out"
+  ];
+
+  # Build a rust executable, $out is the executable.
+  rustSimple = args@{ name, ... }: src:
+    linkTo name "${rustSimpleBin args src}/bin/${name}";
+
+  # Like `rustSimple`, but put the binary in `$out/bin/`.
+  rustSimpleBin =
+    { name
+    , dependencies ? [ ]
+    , doCheck ? true
+    ,
+    }: src:
+    (if doCheck then testRustSimple else pkgs.lib.id)
+      (pkgs.buildRustCrate ({
+        pname = name;
+        version = "1.0.0";
+        crateName = name;
+        crateBin = [ name ];
+        dependencies = dependencies;
+        src = pkgs.runCommandLocal "write-main.rs"
+          {
+            src = src;
+            passAsFile = [ "src" ];
+          } ''
+          mkdir -p $out/src/bin
+          cp "$srcPath" $out/src/bin/${name}.rs
+          find $out
+        '';
+      }));
+
+  # Build a rust library, that can be used as dependency to `rustSimple`.
+  # Wrapper around `pkgs.buildRustCrate`, takes all its arguments.
+  rustSimpleLib =
+    { name
+    , dependencies ? [ ]
+    , doCheck ? true
+    ,
+    }: src:
+    (if doCheck then testRustSimple else pkgs.lib.id)
+      (pkgs.buildRustCrate ({
+        pname = name;
+        version = "1.0.0";
+        crateName = name;
+        dependencies = dependencies;
+        src = pkgs.runCommandLocal "write-lib.rs"
+          {
+            src = src;
+            passAsFile = [ "src" ];
+          } ''
+          mkdir -p $out/src
+          cp "$srcPath" $out/src/lib.rs
+          find $out
+        '';
+      }));
+
+  /* Takes a `buildRustCrate` derivation as an input,
+    * builds it with `{ buildTests = true; }` and runs
+    * all tests found in its `tests` dir. If they are
+    * all successful, `$out` will point to the crate
+    * built with `{ buildTests = false; }`, otherwise
+    * it will fail to build.
+    *
+    * See also `nix.drvSeqL` which is used to implement
+    * this behavior.
+    */
+  testRustSimple = rustDrv:
+    let
+      crate = buildTests: rustDrv.override { inherit buildTests; };
+      tests = depot.nix.runExecline.local "${rustDrv.name}-tests-run" { } [
+        "importas"
+        "out"
+        "out"
+        "if"
+        [
+          "pipeline"
+          [ bins.s6-ls "${crate true}/tests" ]
+          "forstdin"
+          "-o0"
+          "test"
+          "importas"
+          "test"
+          "test"
+          "${crate true}/tests/$test"
+        ]
+        bins.s6-touch
+        "$out"
+      ];
+    in
+    depot.nix.drvSeqL [ tests ] (crate false);
+
+in
+{
+  inherit
+    rustSimple
+    rustSimpleBin
+    rustSimpleLib
+    ;
+}
diff --git a/nix/writers/tests/rust.nix b/nix/writers/tests/rust.nix
new file mode 100644
index 000000000000..232a2dc60808
--- /dev/null
+++ b/nix/writers/tests/rust.nix
@@ -0,0 +1,76 @@
+{ depot, pkgs, ... }:
+
+let
+  inherit (depot.nix.writers)
+    rustSimple
+    rustSimpleLib
+    rustSimpleBin
+    ;
+
+  inherit (pkgs)
+    coreutils
+    ;
+
+  run = drv: depot.nix.runExecline.local "run-${drv.name}" { } [
+    "if"
+    [ drv ]
+    "importas"
+    "out"
+    "out"
+    "${coreutils}/bin/touch"
+    "$out"
+  ];
+
+  rustTransitiveLib = rustSimpleLib
+    {
+      name = "transitive";
+    } ''
+    pub fn transitive(s: &str) -> String {
+      let mut new = s.to_string();
+      new.push_str(" 1 2 3");
+      new
+    }
+
+    #[cfg(test)]
+    mod tests {
+      use super::*;
+
+      #[test]
+      fn test_transitive() {
+        assert_eq!(transitive("foo").as_str(), "foo 1 2 3")
+      }
+    }
+  '';
+
+  rustTestLib = rustSimpleLib
+    {
+      name = "test_lib";
+      dependencies = [ rustTransitiveLib ];
+    } ''
+    extern crate transitive;
+    use transitive::{transitive};
+    pub fn test() -> String {
+      transitive("test")
+    }
+  '';
+
+  rustWithLib = run (rustSimple
+    {
+      name = "rust-with-lib";
+      dependencies = [ rustTestLib ];
+    } ''
+    extern crate test_lib;
+
+    fn main() {
+      assert_eq!(test_lib::test(), String::from("test 1 2 3"));
+    }
+  '');
+
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    rustTransitiveLib
+    rustWithLib
+    ;
+}
diff --git a/nix/yants/README.md b/nix/yants/README.md
new file mode 100644
index 000000000000..98e6642e2dd8
--- /dev/null
+++ b/nix/yants/README.md
@@ -0,0 +1,88 @@
+yants
+=====
+
+This is a tiny type-checker for data in Nix, written in Nix.
+
+# Features
+
+* Checking of primitive types (`int`, `string` etc.)
+* Checking polymorphic types (`option`, `list`, `either`)
+* Defining & checking struct/record types
+* Defining & matching enum types
+* Defining & matching sum types
+* Defining function signatures (including curried functions)
+* Types are composable! `option string`! `list (either int (option float))`!
+* Type errors also compose!
+
+Currently lacking:
+
+* Any kind of inference
+* Convenient syntax for attribute-set function signatures
+
+## Primitives & simple polymorphism
+
+![simple](/about/nix/yants/screenshots/simple.png)
+
+## Structs
+
+![structs](/about/nix/yants/screenshots/structs.png)
+
+## Nested structs!
+
+![nested structs](/about/nix/yants/screenshots/nested-structs.png)
+
+## Enums!
+
+![enums](/about/nix/yants/screenshots/enums.png)
+
+## Functions!
+
+![functions](/about/nix/yants/screenshots/functions.png)
+
+# Usage
+
+Yants can be imported from its `default.nix`. A single attribute (`lib`) can be
+passed, which will otherwise be imported from `<nixpkgs>`.
+
+TIP: You do not need to clone the entire TVL repository to use Yants!
+You can clone just this project through josh: `git clone
+https://code.tvl.fyi/depot.git:/nix/yants.git`
+
+Examples for the most common import methods would be:
+
+1. Import into scope with `with`:
+    ```nix
+    with (import ./default.nix {});
+    # ... Nix code that uses yants ...
+    ```
+
+2. Import as a named variable:
+    ```nix
+    let yants = import ./default.nix {};
+    in yants.string "foo" # or other uses ...
+    ````
+
+3. Overlay into `pkgs.lib`:
+    ```nix
+    # wherever you import your package set (e.g. from <nixpkgs>):
+    import <nixpkgs> {
+      overlays = [
+        (self: super: {
+          lib = super.lib // { yants = import ./default.nix { inherit (super) lib; }; };
+        })
+      ];
+    }
+
+    # yants now lives at lib.yants, besides the other library functions!
+    ```
+
+Please see my [Nix one-pager](https://github.com/tazjin/nix-1p) for more generic
+information about the Nix language and what the above constructs mean.
+
+# Stability
+
+The current API of Yants is **not yet** considered stable, but it works fine and
+should continue to do so even if used at an older version.
+
+Yants' tests use Nix versions above 2.2 - compatibility with older versions is
+not guaranteed.
diff --git a/nix/yants/default.nix b/nix/yants/default.nix
new file mode 100644
index 000000000000..cb9fc08287fb
--- /dev/null
+++ b/nix/yants/default.nix
@@ -0,0 +1,368 @@
+# Copyright 2019 Google LLC
+# SPDX-License-Identifier: Apache-2.0
+#
+# Provides a "type-system" for Nix that provides various primitive &
+# polymorphic types as well as the ability to define & check records.
+#
+# All types (should) compose as expected.
+
+{ lib ? (import <nixpkgs> { }).lib, ... }:
+
+with builtins; let
+  prettyPrint = lib.generators.toPretty { };
+
+  # typedef' :: struct {
+  #   name = string;
+  #   checkType = function; (a -> result)
+  #   checkToBool = option function; (result -> bool)
+  #   toError = option function; (a -> result -> string)
+  #   def = option any;
+  #   match = option function;
+  # } -> type
+  #           -> (a -> b)
+  #           -> (b -> bool)
+  #           -> (a -> b -> string)
+  #           -> type
+  #
+  # This function creates an attribute set that acts as a type.
+  #
+  # It receives a type name, a function that is used to perform a
+  # check on an arbitrary value, a function that can translate the
+  # return of that check to a boolean that informs whether the value
+  # is type-conformant, and a function that can construct error
+  # messages from the check result.
+  #
+  # This function is the low-level primitive used to create types. For
+  # many cases the higher-level 'typedef' function is more appropriate.
+  typedef' =
+    { name
+    , checkType
+    , checkToBool ? (result: result.ok)
+    , toError ? (_: result: result.err)
+    , def ? null
+    , match ? null
+    }: {
+      inherit name checkToBool toError;
+
+      # check :: a -> bool
+      #
+      # This function is used to determine whether a given type is
+      # conformant.
+      check = value: checkToBool (checkType value);
+
+      # checkType :: a -> struct { ok = bool; err = option string; }
+      #
+      # This function checks whether the passed value is type conformant
+      # and returns an optional type error string otherwise.
+      inherit checkType;
+
+      # __functor :: a -> a
+      #
+      # This function checks whether the passed value is type conformant
+      # and throws an error if it is not.
+      #
+      # The name of this function is a special attribute in Nix that
+      # makes it possible to execute a type attribute set like a normal
+      # function.
+      __functor = self: value:
+        let result = self.checkType value;
+        in if checkToBool result then value
+        else throw (toError value result);
+    };
+
+  typeError = type: val:
+    "expected type '${type}', but value '${prettyPrint val}' is of type '${typeOf val}'";
+
+  # typedef :: string -> (a -> bool) -> type
+  #
+  # typedef is the simplified version of typedef' which uses a default
+  # error message constructor.
+  typedef = name: check: typedef' {
+    inherit name;
+    checkType = v:
+      let res = check v;
+      in {
+        ok = res;
+      } // (lib.optionalAttrs (!res) {
+        err = typeError name v;
+      });
+  };
+
+  checkEach = name: t: l: foldl'
+    (acc: e:
+      let
+        res = t.checkType e;
+        isT = t.checkToBool res;
+      in
+      {
+        ok = acc.ok && isT;
+        err =
+          if isT
+          then acc.err
+          else acc.err + "${prettyPrint e}: ${t.toError e res}\n";
+      })
+    { ok = true; err = "expected type ${name}, but found:\n"; }
+    l;
+in
+lib.fix (self: {
+  # Primitive types
+  any = typedef "any" (_: true);
+  unit = typedef "unit" (v: v == { });
+  int = typedef "int" isInt;
+  bool = typedef "bool" isBool;
+  float = typedef "float" isFloat;
+  string = typedef "string" isString;
+  path = typedef "path" (x: typeOf x == "path");
+  drv = typedef "derivation" (x: isAttrs x && x ? "type" && x.type == "derivation");
+  function = typedef "function" (x: isFunction x || (isAttrs x && x ? "__functor"
+    && isFunction x.__functor));
+
+  # Type for types themselves. Useful when defining polymorphic types.
+  type = typedef "type" (x:
+    isAttrs x
+    && hasAttr "name" x && self.string.check x.name
+    && hasAttr "checkType" x && self.function.check x.checkType
+    && hasAttr "checkToBool" x && self.function.check x.checkToBool
+    && hasAttr "toError" x && self.function.check x.toError
+  );
+
+  # Polymorphic types
+  option = t: typedef' rec {
+    name = "option<${t.name}>";
+    checkType = v:
+      let res = t.checkType v;
+      in {
+        ok = isNull v || (self.type t).checkToBool res;
+        err = "expected type ${name}, but value does not conform to '${t.name}': "
+          + t.toError v res;
+      };
+  };
+
+  eitherN = tn: typedef "either<${concatStringsSep ", " (map (x: x.name) tn)}>"
+    (x: any (t: (self.type t).check x) tn);
+
+  either = t1: t2: self.eitherN [ t1 t2 ];
+
+  list = t: typedef' rec {
+    name = "list<${t.name}>";
+
+    checkType = v:
+      if isList v
+      then checkEach name (self.type t) v
+      else {
+        ok = false;
+        err = typeError name v;
+      };
+  };
+
+  attrs = t: typedef' rec {
+    name = "attrs<${t.name}>";
+
+    checkType = v:
+      if isAttrs v
+      then checkEach name (self.type t) (attrValues v)
+      else {
+        ok = false;
+        err = typeError name v;
+      };
+  };
+
+  # Structs / record types
+  #
+  # Checks that all fields match their declared types, no optional
+  # fields are missing and no unexpected fields occur in the struct.
+  #
+  # Anonymous structs are supported (e.g. for nesting) by omitting the
+  # name.
+  #
+  # TODO: Support open records?
+  struct =
+    # Struct checking is more involved than the simpler types above.
+    # To make the actual type definition more readable, several
+    # helpers are defined below.
+    let
+      # checkField checks an individual field of the struct against
+      # its definition and creates a typecheck result. These results
+      # are aggregated during the actual checking.
+      checkField = def: name: value:
+        let result = def.checkType value; in rec {
+          ok = def.checkToBool result;
+          err =
+            if !ok && isNull value
+            then "missing required ${def.name} field '${name}'\n"
+            else "field '${name}': ${def.toError value result}\n";
+        };
+
+      # checkExtraneous determines whether a (closed) struct contains
+      # any fields that are not part of the definition.
+      checkExtraneous = def: has: acc:
+        if (length has) == 0 then acc
+        else if (hasAttr (head has) def)
+        then checkExtraneous def (tail has) acc
+        else
+          checkExtraneous def (tail has) {
+            ok = false;
+            err = acc.err + "unexpected struct field '${head has}'\n";
+          };
+
+      # checkStruct combines all structure checks and creates one
+      # typecheck result from them
+      checkStruct = def: value:
+        let
+          init = { ok = true; err = ""; };
+          extraneous = checkExtraneous def (attrNames value) init;
+
+          checkedFields = map
+            (n:
+              let v = if hasAttr n value then value."${n}" else null;
+              in checkField def."${n}" n v)
+            (attrNames def);
+
+          combined = foldl'
+            (acc: res: {
+              ok = acc.ok && res.ok;
+              err = if !res.ok then acc.err + res.err else acc.err;
+            })
+            init
+            checkedFields;
+        in
+        {
+          ok = combined.ok && extraneous.ok;
+          err = combined.err + extraneous.err;
+        };
+
+      struct' = name: def: typedef' {
+        inherit name def;
+        checkType = value:
+          if isAttrs value
+          then (checkStruct (self.attrs self.type def) value)
+          else { ok = false; err = typeError name value; };
+
+        toError = _: result: "expected '${name}'-struct, but found:\n" + result.err;
+      };
+    in
+    arg: if isString arg then (struct' arg) else (struct' "anon" arg);
+
+  # Enums & pattern matching
+  enum =
+    let
+      plain = name: def: typedef' {
+        inherit name def;
+
+        checkType = (x: isString x && elem x def);
+        checkToBool = x: x;
+        toError = value: _: "'${prettyPrint value} is not a member of enum ${name}";
+      };
+      enum' = name: def: lib.fix (e: (plain name def) // {
+        match = x: actions: deepSeq (map e (attrNames actions)) (
+          let
+            actionKeys = attrNames actions;
+            missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] def;
+          in
+          if (length missing) > 0
+          then throw "Missing match action for members: ${prettyPrint missing}"
+          else actions."${e x}"
+        );
+      });
+    in
+    arg: if isString arg then (enum' arg) else (enum' "anon" arg);
+
+  # Sum types
+  #
+  # The representation of a sum type is an attribute set with only one
+  # value, where the key of the value denotes the variant of the type.
+  sum =
+    let
+      plain = name: def: typedef' {
+        inherit name def;
+        checkType = (x:
+          let variant = elemAt (attrNames x) 0;
+          in if isAttrs x && length (attrNames x) == 1 && hasAttr variant def
+          then
+            let
+              t = def."${variant}";
+              v = x."${variant}";
+              res = t.checkType v;
+            in
+            if t.checkToBool res
+            then { ok = true; }
+            else {
+              ok = false;
+              err = "while checking '${name}' variant '${variant}': "
+                + t.toError v res;
+            }
+          else { ok = false; err = typeError name x; }
+        );
+      };
+      sum' = name: def: lib.fix (s: (plain name def) // {
+        match = x: actions:
+          let
+            variant = deepSeq (s x) (elemAt (attrNames x) 0);
+            actionKeys = attrNames actions;
+            defKeys = attrNames def;
+            missing = foldl' (m: k: if (elem k actionKeys) then m else m ++ [ k ]) [ ] defKeys;
+          in
+          if (length missing) > 0
+          then throw "Missing match action for variants: ${prettyPrint missing}"
+          else actions."${variant}" x."${variant}";
+      });
+    in
+    arg: if isString arg then (sum' arg) else (sum' "anon" arg);
+
+  # Typed function definitions
+  #
+  # These definitions wrap the supplied function in type-checking
+  # forms that are evaluated when the function is called.
+  #
+  # Note that typed functions themselves are not types and can not be
+  # used to check values for conformity.
+  defun =
+    let
+      mkFunc = sig: f: {
+        inherit sig;
+        __toString = self: foldl' (s: t: "${s} -> ${t.name}")
+          "λ :: ${(head self.sig).name}"
+          (tail self.sig);
+        __functor = _: f;
+      };
+
+      defun' = sig: func:
+        if length sig > 2
+        then mkFunc sig (x: defun' (tail sig) (func ((head sig) x)))
+        else mkFunc sig (x: ((head (tail sig)) (func ((head sig) x))));
+
+    in
+    sig: func:
+      if length sig < 2
+      then (throw "Signature must at least have two types (a -> b)")
+      else defun' sig func;
+
+  # Restricting types
+  #
+  # `restrict` wraps a type `t`, and uses a predicate `pred` to further
+  # restrict the values, giving the restriction a descriptive `name`.
+  #
+  # First, the wrapped type definition is checked (e.g. int) and then the
+  # value is checked with the predicate, so the predicate can already
+  # depend on the value being of the wrapped type.
+  restrict = name: pred: t:
+    let restriction = "${t.name}[${name}]"; in typedef' {
+      name = restriction;
+      checkType = v:
+        let res = t.checkType v;
+        in
+        if !(t.checkToBool res)
+        then res
+        else
+          let
+            iok = pred v;
+          in
+          if isBool iok then {
+            ok = iok;
+            err = "${prettyPrint v} does not conform to restriction '${restriction}'";
+          } else
+          # use throw here to avoid spamming the build log
+            throw "restriction '${restriction}' predicate returned unexpected value '${prettyPrint iok}' instead of boolean";
+    };
+
+})
diff --git a/nix/yants/screenshots/enums.png b/nix/yants/screenshots/enums.png
new file mode 100644
index 000000000000..71673e7ab63c
--- /dev/null
+++ b/nix/yants/screenshots/enums.png
Binary files differdiff --git a/nix/yants/screenshots/functions.png b/nix/yants/screenshots/functions.png
new file mode 100644
index 000000000000..30ed50f8327b
--- /dev/null
+++ b/nix/yants/screenshots/functions.png
Binary files differdiff --git a/nix/yants/screenshots/nested-structs.png b/nix/yants/screenshots/nested-structs.png
new file mode 100644
index 000000000000..6b03ed65ceb7
--- /dev/null
+++ b/nix/yants/screenshots/nested-structs.png
Binary files differdiff --git a/nix/yants/screenshots/simple.png b/nix/yants/screenshots/simple.png
new file mode 100644
index 000000000000..05a302cc6b9d
--- /dev/null
+++ b/nix/yants/screenshots/simple.png
Binary files differdiff --git a/nix/yants/screenshots/structs.png b/nix/yants/screenshots/structs.png
new file mode 100644
index 000000000000..fcbcf6415fad
--- /dev/null
+++ b/nix/yants/screenshots/structs.png
Binary files differdiff --git a/nix/yants/tests/default.nix b/nix/yants/tests/default.nix
new file mode 100644
index 000000000000..0c7ec2418802
--- /dev/null
+++ b/nix/yants/tests/default.nix
@@ -0,0 +1,158 @@
+{ depot, pkgs, ... }:
+
+with depot.nix.yants;
+
+# Note: Derivations are not included in the tests below as they cause
+# issues with deepSeq.
+
+let
+
+  inherit (depot.nix.runTestsuite)
+    runTestsuite
+    it
+    assertEq
+    assertThrows
+    assertDoesNotThrow
+    ;
+
+  # this derivation won't throw if evaluated with deepSeq
+  # unlike most things even remotely related with nixpkgs
+  trivialDerivation = derivation {
+    name = "trivial-derivation";
+    inherit (pkgs.stdenv) system;
+    builder = "/bin/sh";
+    args = [ "-c" "echo hello > $out" ];
+  };
+
+  testPrimitives = it "checks that all primitive types match" [
+    (assertDoesNotThrow "unit type" (unit { }))
+    (assertDoesNotThrow "int type" (int 15))
+    (assertDoesNotThrow "bool type" (bool false))
+    (assertDoesNotThrow "float type" (float 13.37))
+    (assertDoesNotThrow "string type" (string "Hello!"))
+    (assertDoesNotThrow "function type" (function (x: x * 2)))
+    (assertDoesNotThrow "path type" (path /nix))
+    (assertDoesNotThrow "derivation type" (drv trivialDerivation))
+  ];
+
+  testPoly = it "checks that polymorphic types work as intended" [
+    (assertDoesNotThrow "option type" (option int null))
+    (assertDoesNotThrow "list type" (list string [ "foo" "bar" ]))
+    (assertDoesNotThrow "either type" (either int float 42))
+  ];
+
+  # Test that structures work as planned.
+  person = struct "person" {
+    name = string;
+    age = int;
+
+    contact = option (struct {
+      email = string;
+      phone = option string;
+    });
+  };
+
+  testStruct = it "checks that structures work as intended" [
+    (assertDoesNotThrow "person struct" (person {
+      name = "Brynhjulf";
+      age = 42;
+      contact.email = "brynhjulf@yants.nix";
+    }))
+  ];
+
+  # Test enum definitions & matching
+  colour = enum "colour" [ "red" "blue" "green" ];
+  colourMatcher = {
+    red = "It is in fact red!";
+    blue = "It should not be blue!";
+    green = "It should not be green!";
+  };
+
+  testEnum = it "checks enum definitions and matching" [
+    (assertEq "enum is matched correctly"
+      "It is in fact red!"
+      (colour.match "red" colourMatcher))
+    (assertThrows "out of bounds enum fails"
+      (colour.match "alpha" (colourMatcher // {
+        alpha = "This should never happen";
+      }))
+    )
+  ];
+
+  # Test sum type definitions
+  creature = sum "creature" {
+    human = struct {
+      name = string;
+      age = option int;
+    };
+
+    pet = enum "pet" [ "dog" "lizard" "cat" ];
+  };
+  some-human = creature {
+    human = {
+      name = "Brynhjulf";
+      age = 42;
+    };
+  };
+
+  testSum = it "checks sum types definitions and matching" [
+    (assertDoesNotThrow "creature sum type" some-human)
+    (assertEq "sum type is matched correctly"
+      "It's a human named Brynhjulf"
+      (creature.match some-human {
+        human = v: "It's a human named ${v.name}";
+        pet = v: "It's not supposed to be a pet!";
+      })
+    )
+  ];
+
+  # Test curried function definitions
+  func = defun [ string int string ]
+    (name: age: "${name} is ${toString age} years old");
+
+  testFunctions = it "checks function definitions" [
+    (assertDoesNotThrow "function application" (func "Brynhjulf" 42))
+  ];
+
+  # Test that all types are types.
+  assertIsType = name: t:
+    assertDoesNotThrow "${name} is a type" (type t);
+  testTypes = it "checks that all types are types" [
+    (assertIsType "any" any)
+    (assertIsType "bool" bool)
+    (assertIsType "drv" drv)
+    (assertIsType "float" float)
+    (assertIsType "int" int)
+    (assertIsType "string" string)
+    (assertIsType "path" path)
+
+    (assertIsType "attrs int" (attrs int))
+    (assertIsType "eitherN [ ... ]" (eitherN [ int string bool ]))
+    (assertIsType "either int string" (either int string))
+    (assertIsType "enum [ ... ]" (enum [ "foo" "bar" ]))
+    (assertIsType "list string" (list string))
+    (assertIsType "option int" (option int))
+    (assertIsType "option (list string)" (option (list string)))
+    (assertIsType "struct { ... }" (struct { a = int; b = option string; }))
+    (assertIsType "sum { ... }" (sum { a = int; b = option string; }))
+  ];
+
+  testRestrict = it "checks restrict types" [
+    (assertDoesNotThrow "< 42" ((restrict "< 42" (i: i < 42) int) 25))
+    (assertDoesNotThrow "list length < 3"
+      ((restrict "not too long" (l: builtins.length l < 3) (list int)) [ 1 2 ]))
+    (assertDoesNotThrow "list eq 5"
+      (list (restrict "eq 5" (v: v == 5) any) [ 5 5 5 ]))
+  ];
+
+in
+runTestsuite "yants" [
+  testPrimitives
+  testPoly
+  testStruct
+  testEnum
+  testSum
+  testFunctions
+  testTypes
+  testRestrict
+]