about summary refs log tree commit diff
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/cheddar/.gitignore1
-rw-r--r--tools/cheddar/.skip-subtree0
-rw-r--r--tools/cheddar/Cargo.lock1194
-rw-r--r--tools/cheddar/Cargo.toml18
-rw-r--r--tools/cheddar/README.md21
-rw-r--r--tools/cheddar/build.rs55
-rw-r--r--tools/cheddar/default.nix23
-rw-r--r--tools/cheddar/src/bin/cheddar.rs134
-rw-r--r--tools/cheddar/src/lib.rs339
-rw-r--r--tools/cheddar/src/tests.rs105
-rw-r--r--tools/crfo-approve.nix52
-rw-r--r--tools/depot-scanner/OWNERS3
-rw-r--r--tools/depot-scanner/default.nix18
-rw-r--r--tools/depot-scanner/depot_scanner.proto46
-rw-r--r--tools/depot-scanner/go.mod3
-rw-r--r--tools/depot-scanner/main.go227
-rw-r--r--tools/depotfmt.nix61
-rw-r--r--tools/emacs-pkgs/buildEmacsPackage.nix38
-rw-r--r--tools/emacs-pkgs/defzone/defzone.el60
-rw-r--r--tools/emacs-pkgs/defzone/example.el45
-rw-r--r--tools/emacs-pkgs/dottime/default.nix7
-rw-r--r--tools/emacs-pkgs/dottime/dottime.el81
-rw-r--r--tools/emacs-pkgs/nix-util/default.nix8
-rw-r--r--tools/emacs-pkgs/nix-util/nix-util.el69
-rw-r--r--tools/emacs-pkgs/notable/OWNERS2
-rw-r--r--tools/emacs-pkgs/notable/default.nix17
-rw-r--r--tools/emacs-pkgs/notable/notable.el251
-rw-r--r--tools/emacs-pkgs/passively/OWNERS3
-rw-r--r--tools/emacs-pkgs/passively/README.md76
-rw-r--r--tools/emacs-pkgs/passively/default.nix8
-rw-r--r--tools/emacs-pkgs/passively/passively.el121
-rw-r--r--tools/emacs-pkgs/term-switcher/default.nix8
-rw-r--r--tools/emacs-pkgs/term-switcher/term-switcher.el57
-rw-r--r--tools/emacs-pkgs/tvl/OWNERS3
-rw-r--r--tools/emacs-pkgs/tvl/default.nix8
-rw-r--r--tools/emacs-pkgs/tvl/tvl.el222
-rw-r--r--tools/eprintf.nix15
-rw-r--r--tools/gerrit-cli.nix13
-rw-r--r--tools/gerrit-update.nix34
-rw-r--r--tools/hash-password.nix7
-rw-r--r--tools/magrathea/default.nix23
-rw-r--r--tools/magrathea/mg.scm316
-rw-r--r--tools/nixery/.skip-subtree1
-rw-r--r--tools/nixery/default.nix3
-rw-r--r--tools/nsfv-setup/default.nix29
-rw-r--r--tools/perf-flamegraph.nix12
-rw-r--r--tools/rust-crates-advisory/OWNERS4
-rw-r--r--tools/rust-crates-advisory/check-security-advisory.rs119
-rw-r--r--tools/rust-crates-advisory/default.nix200
-rw-r--r--tools/rust-crates-advisory/format-audit-result.jq75
-rw-r--r--tools/tvlc/OWNERS3
-rw-r--r--tools/tvlc/common.sh33
-rw-r--r--tools/tvlc/default.nix51
-rwxr-xr-xtools/tvlc/tvlc-new103
54 files changed, 4424 insertions, 1 deletions
diff --git a/tools/cheddar/.gitignore b/tools/cheddar/.gitignore
new file mode 100644
index 000000000000..2f7896d1d136
--- /dev/null
+++ b/tools/cheddar/.gitignore
@@ -0,0 +1 @@
+target/
diff --git a/tools/cheddar/.skip-subtree b/tools/cheddar/.skip-subtree
new file mode 100644
index 000000000000..e69de29bb2d1
--- /dev/null
+++ b/tools/cheddar/.skip-subtree
diff --git a/tools/cheddar/Cargo.lock b/tools/cheddar/Cargo.lock
new file mode 100644
index 000000000000..1312d436a36c
--- /dev/null
+++ b/tools/cheddar/Cargo.lock
@@ -0,0 +1,1194 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "adler"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe"
+
+[[package]]
+name = "adler32"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aae1277d39aeec15cb388266ecc24b11c80469deae6067e17a1a7aa9e5c1f234"
+
+[[package]]
+name = "aho-corasick"
+version = "0.7.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "alloc-no-stdlib"
+version = "2.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "35ef4730490ad1c4eae5c4325b2a95f521d023e5c885853ff7aca0a6a1631db3"
+
+[[package]]
+name = "alloc-stdlib"
+version = "0.2.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "697ed7edc0f1711de49ce108c541623a0af97c6c60b2f6e2b65229847ac843c2"
+dependencies = [
+ "alloc-no-stdlib",
+]
+
+[[package]]
+name = "ansi_term"
+version = "0.12.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "ascii"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bbf56136a5198c7b01a49e3afcbef6cf84597273d298f54432926024107b0109"
+
+[[package]]
+name = "atty"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "hermit-abi",
+ "libc",
+ "winapi",
+]
+
+[[package]]
+name = "autocfg"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
+
+[[package]]
+name = "base64"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd"
+
+[[package]]
+name = "bincode"
+version = "1.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b1f45e9417d87227c7a56d22e471c6206462cba514c7590c09aff4cf6d1ddcad"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "block-buffer"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b"
+dependencies = [
+ "block-padding",
+ "byte-tools",
+ "byteorder",
+ "generic-array",
+]
+
+[[package]]
+name = "block-padding"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5"
+dependencies = [
+ "byte-tools",
+]
+
+[[package]]
+name = "brotli"
+version = "3.3.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68"
+dependencies = [
+ "alloc-no-stdlib",
+ "alloc-stdlib",
+ "brotli-decompressor",
+]
+
+[[package]]
+name = "brotli-decompressor"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59ad2d4653bf5ca36ae797b1f4bb4dbddb60ce49ca4aed8a2ce4829f60425b80"
+dependencies = [
+ "alloc-no-stdlib",
+ "alloc-stdlib",
+]
+
+[[package]]
+name = "buf_redux"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b953a6887648bb07a535631f2bc00fbdb2a2216f135552cb3f534ed136b9c07f"
+dependencies = [
+ "memchr",
+ "safemem",
+]
+
+[[package]]
+name = "byte-tools"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7"
+
+[[package]]
+name = "byteorder"
+version = "1.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610"
+
+[[package]]
+name = "cc"
+version = "1.0.73"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
+
+[[package]]
+name = "cfg-if"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
+[[package]]
+name = "cheddar"
+version = "0.2.0"
+dependencies = [
+ "clap",
+ "comrak",
+ "lazy_static",
+ "regex",
+ "rouille",
+ "serde",
+ "serde_json",
+ "syntect",
+]
+
+[[package]]
+name = "chrono"
+version = "0.4.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73"
+dependencies = [
+ "libc",
+ "num-integer",
+ "num-traits",
+ "winapi",
+]
+
+[[package]]
+name = "chunked_transfer"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fff857943da45f546682664a79488be82e69e43c1a7a2307679ab9afb3a66d2e"
+
+[[package]]
+name = "clap"
+version = "2.34.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c"
+dependencies = [
+ "ansi_term",
+ "atty",
+ "bitflags",
+ "strsim",
+ "textwrap",
+ "unicode-width",
+ "vec_map",
+]
+
+[[package]]
+name = "comrak"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b423acba50d5016684beaf643f9991e622633a4c858be6885653071c2da2b0c6"
+dependencies = [
+ "clap",
+ "entities",
+ "lazy_static",
+ "pest",
+ "pest_derive",
+ "regex",
+ "shell-words",
+ "twoway 0.2.2",
+ "typed-arena",
+ "unicode_categories",
+ "xdg",
+]
+
+[[package]]
+name = "crc32fast"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "deflate"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5f95bf05dffba6e6cce8dfbb30def788154949ccd9aed761b472119c21e01c70"
+dependencies = [
+ "adler32",
+ "gzip-header",
+]
+
+[[package]]
+name = "digest"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5"
+dependencies = [
+ "generic-array",
+]
+
+[[package]]
+name = "dirs"
+version = "4.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ca3aa72a6f96ea37bbc5aa912f6788242832f75369bdfdadcb0e38423f100059"
+dependencies = [
+ "dirs-sys",
+]
+
+[[package]]
+name = "dirs-sys"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
+dependencies = [
+ "libc",
+ "redox_users",
+ "winapi",
+]
+
+[[package]]
+name = "entities"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b5320ae4c3782150d900b79807611a59a99fc9a1d61d686faafc24b93fc8d7ca"
+
+[[package]]
+name = "fake-simd"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
+
+[[package]]
+name = "fastrand"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf"
+dependencies = [
+ "instant",
+]
+
+[[package]]
+name = "filetime"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c0408e2626025178a6a7f7ffc05a25bc47103229f19c113755de7bf63816290c"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "redox_syscall",
+ "winapi",
+]
+
+[[package]]
+name = "flate2"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b39522e96686d38f4bc984b9198e3a0613264abaebaff2c5c918bfa6b6da09af"
+dependencies = [
+ "cfg-if",
+ "crc32fast",
+ "libc",
+ "miniz_oxide",
+]
+
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
+[[package]]
+name = "form_urlencoded"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5fc25a87fa4fd2094bffb06925852034d90a17f0d1e05197d4956d3555752191"
+dependencies = [
+ "matches",
+ "percent-encoding",
+]
+
+[[package]]
+name = "generic-array"
+version = "0.12.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd"
+dependencies = [
+ "typenum",
+]
+
+[[package]]
+name = "getrandom"
+version = "0.2.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
+dependencies = [
+ "cfg-if",
+ "libc",
+ "wasi",
+]
+
+[[package]]
+name = "gzip-header"
+version = "0.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0131feb3d3bb2a5a238d8a4d09f6353b7ebfdc52e77bccbf4ea6eaa751dde639"
+dependencies = [
+ "crc32fast",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.11.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
+
+[[package]]
+name = "hermit-abi"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "httparse"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6330e8a36bd8c859f3fa6d9382911fbb7147ec39807f63b923933a247240b9ba"
+
+[[package]]
+name = "idna"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8"
+dependencies = [
+ "matches",
+ "unicode-bidi",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "indexmap"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee"
+dependencies = [
+ "autocfg",
+ "hashbrown",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1aab8fc367588b89dcee83ab0fd66b72b50b72fa1904d7095045ace2b0c81c35"
+
+[[package]]
+name = "lazy_static"
+version = "1.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
+
+[[package]]
+name = "lazycell"
+version = "1.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
+
+[[package]]
+name = "libc"
+version = "0.2.123"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cb691a747a7ab48abc15c5b42066eaafde10dc427e3b6ee2a1cf43db04c763bd"
+
+[[package]]
+name = "line-wrap"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9"
+dependencies = [
+ "safemem",
+]
+
+[[package]]
+name = "linked-hash-map"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
+
+[[package]]
+name = "log"
+version = "0.4.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6389c490849ff5bc16be905ae24bc913a9c8892e19b2341dbc175e14c341c2b8"
+dependencies = [
+ "cfg-if",
+]
+
+[[package]]
+name = "maplit"
+version = "1.0.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3e2e65a1a2e43cfcb47a895c4c8b10d1f4a61097f9f254f183aee60cad9c651d"
+
+[[package]]
+name = "matches"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f"
+
+[[package]]
+name = "memchr"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "308cc39be01b73d0d18f82a0e7b2a3df85245f84af96fdddc5d202d27e47b86a"
+
+[[package]]
+name = "mime"
+version = "0.3.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2a60c7ce501c71e03a9c9c0d35b861413ae925bd979cc7a4e30d060069aaac8d"
+
+[[package]]
+name = "mime_guess"
+version = "2.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "4192263c238a5f0d0c6bfd21f336a313a4ce1c450542449ca191bb657b4642ef"
+dependencies = [
+ "mime",
+ "unicase",
+]
+
+[[package]]
+name = "miniz_oxide"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082"
+dependencies = [
+ "adler",
+]
+
+[[package]]
+name = "multipart"
+version = "0.18.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "00dec633863867f29cb39df64a397cdf4a6354708ddd7759f70c7fb51c5f9182"
+dependencies = [
+ "buf_redux",
+ "httparse",
+ "log",
+ "mime",
+ "mime_guess",
+ "quick-error",
+ "rand",
+ "safemem",
+ "tempfile",
+ "twoway 0.1.8",
+]
+
+[[package]]
+name = "num-integer"
+version = "0.1.44"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d2cc698a63b549a70bc047073d2949cce27cd1c7b0a4a862d08a8031bc2801db"
+dependencies = [
+ "autocfg",
+ "num-traits",
+]
+
+[[package]]
+name = "num-traits"
+version = "0.2.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9a64b1ec5cda2586e284722486d802acf1f7dbdc623e2bfc57e65ca1cd099290"
+dependencies = [
+ "autocfg",
+]
+
+[[package]]
+name = "num_cpus"
+version = "1.13.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19e64526ebdee182341572e50e9ad03965aa510cd94427a4549448f285e957a1"
+dependencies = [
+ "hermit-abi",
+ "libc",
+]
+
+[[package]]
+name = "num_threads"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aba1801fb138d8e85e11d0fc70baf4fe1cdfffda7c6cd34a854905df588e5ed0"
+dependencies = [
+ "libc",
+]
+
+[[package]]
+name = "onig"
+version = "6.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67ddfe2c93bb389eea6e6d713306880c7f6dcc99a75b659ce145d962c861b225"
+dependencies = [
+ "bitflags",
+ "lazy_static",
+ "libc",
+ "onig_sys",
+]
+
+[[package]]
+name = "onig_sys"
+version = "69.7.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5dd3eee045c84695b53b20255bb7317063df090b68e18bfac0abb6c39cf7f33e"
+dependencies = [
+ "cc",
+ "pkg-config",
+]
+
+[[package]]
+name = "opaque-debug"
+version = "0.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2839e79665f131bdb5782e51f2c6c9599c133c6098982a54c794358bf432529c"
+
+[[package]]
+name = "percent-encoding"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e"
+
+[[package]]
+name = "pest"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53"
+dependencies = [
+ "ucd-trie",
+]
+
+[[package]]
+name = "pest_derive"
+version = "2.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0"
+dependencies = [
+ "pest",
+ "pest_generator",
+]
+
+[[package]]
+name = "pest_generator"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "99b8db626e31e5b81787b9783425769681b347011cc59471e33ea46d2ea0cf55"
+dependencies = [
+ "pest",
+ "pest_meta",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "pest_meta"
+version = "2.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "54be6e404f5317079812fc8f9f5279de376d8856929e21c184ecf6bbd692a11d"
+dependencies = [
+ "maplit",
+ "pest",
+ "sha-1",
+]
+
+[[package]]
+name = "pkg-config"
+version = "0.3.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae"
+
+[[package]]
+name = "plist"
+version = "1.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd39bc6cdc9355ad1dc5eeedefee696bb35c34caf21768741e81826c0bbd7225"
+dependencies = [
+ "base64",
+ "indexmap",
+ "line-wrap",
+ "serde",
+ "time",
+ "xml-rs",
+]
+
+[[package]]
+name = "ppv-lite86"
+version = "0.2.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eb9f9e6e233e5c4a35559a617bf40a4ec447db2e84c20b55a6f83167b7e57872"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ec757218438d5fda206afc041538b2f6d889286160d649a86a24d37e1235afd1"
+dependencies = [
+ "unicode-xid",
+]
+
+[[package]]
+name = "quick-error"
+version = "1.2.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
+
+[[package]]
+name = "quote"
+version = "1.0.18"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "rand"
+version = "0.8.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
+dependencies = [
+ "libc",
+ "rand_chacha",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_chacha"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
+dependencies = [
+ "ppv-lite86",
+ "rand_core",
+]
+
+[[package]]
+name = "rand_core"
+version = "0.6.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
+dependencies = [
+ "getrandom",
+]
+
+[[package]]
+name = "redox_syscall"
+version = "0.2.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42"
+dependencies = [
+ "bitflags",
+]
+
+[[package]]
+name = "redox_users"
+version = "0.4.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
+dependencies = [
+ "getrandom",
+ "redox_syscall",
+ "thiserror",
+]
+
+[[package]]
+name = "regex"
+version = "1.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
+dependencies = [
+ "aho-corasick",
+ "memchr",
+ "regex-syntax",
+]
+
+[[package]]
+name = "regex-syntax"
+version = "0.6.25"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
+
+[[package]]
+name = "remove_dir_all"
+version = "0.5.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "rouille"
+version = "3.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "18b2380c42510ef4a28b5f228a174c801e0dec590103e215e60812e2e2f34d05"
+dependencies = [
+ "base64",
+ "brotli",
+ "chrono",
+ "deflate",
+ "filetime",
+ "multipart",
+ "num_cpus",
+ "percent-encoding",
+ "rand",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "sha1",
+ "threadpool",
+ "time",
+ "tiny_http",
+ "url",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "73b4b750c782965c211b42f022f59af1fbceabdd026623714f104152f1ec149f"
+
+[[package]]
+name = "safemem"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072"
+
+[[package]]
+name = "same-file"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
+dependencies = [
+ "winapi-util",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.136"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ce31e24b01e1e524df96f1c2fdd054405f8d7376249a5110886fb4b658484789"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.136"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "08597e7152fcd306f41838ed3e37be9eaeed2b61c42e2117266a554fab4662f9"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.79"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8e8d9fa5c3b304765ce1fd9c4c8a3de2c8db365a5b91be52f186efc675681d95"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "sha-1"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f7d94d0bede923b3cea61f3f1ff57ff8cdfd77b400fb8f9998949e0cf04163df"
+dependencies = [
+ "block-buffer",
+ "digest",
+ "fake-simd",
+ "opaque-debug",
+]
+
+[[package]]
+name = "sha1"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c1da05c97445caa12d05e848c4a4fcbbea29e748ac28f7e80e9b010392063770"
+dependencies = [
+ "sha1_smol",
+]
+
+[[package]]
+name = "sha1_smol"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012"
+
+[[package]]
+name = "shell-words"
+version = "1.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde"
+
+[[package]]
+name = "strsim"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
+
+[[package]]
+name = "syn"
+version = "1.0.91"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b683b2b825c8eef438b77c36a06dc262294da3d5a5813fac20da149241dcd44d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-xid",
+]
+
+[[package]]
+name = "syntect"
+version = "4.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8b20815bbe80ee0be06e6957450a841185fcf690fe0178f14d77a05ce2caa031"
+dependencies = [
+ "bincode",
+ "bitflags",
+ "flate2",
+ "fnv",
+ "lazy_static",
+ "lazycell",
+ "onig",
+ "plist",
+ "regex-syntax",
+ "serde",
+ "serde_derive",
+ "serde_json",
+ "walkdir",
+ "yaml-rust",
+]
+
+[[package]]
+name = "tempfile"
+version = "3.3.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5cdb1ef4eaeeaddc8fbd371e5017057064af0911902ef36b39801f67cc6d79e4"
+dependencies = [
+ "cfg-if",
+ "fastrand",
+ "libc",
+ "redox_syscall",
+ "remove_dir_all",
+ "winapi",
+]
+
+[[package]]
+name = "textwrap"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060"
+dependencies = [
+ "unicode-width",
+]
+
+[[package]]
+name = "thiserror"
+version = "1.0.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "854babe52e4df1653706b98fcfc05843010039b406875930a70e4d9644e5c417"
+dependencies = [
+ "thiserror-impl",
+]
+
+[[package]]
+name = "thiserror-impl"
+version = "1.0.30"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aa32fd3f627f367fe16f893e2597ae3c05020f8bba2666a4e6ea73d377e5714b"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "threadpool"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
+dependencies = [
+ "num_cpus",
+]
+
+[[package]]
+name = "time"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd"
+dependencies = [
+ "itoa",
+ "libc",
+ "num_threads",
+]
+
+[[package]]
+name = "tiny_http"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9ce51b50006056f590c9b7c3808c3bd70f0d1101666629713866c227d6e58d39"
+dependencies = [
+ "ascii",
+ "chrono",
+ "chunked_transfer",
+ "log",
+ "url",
+]
+
+[[package]]
+name = "tinyvec"
+version = "1.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2c1c1d5a42b6245520c249549ec267180beaffcc0615401ac8e31853d4b6d8d2"
+dependencies = [
+ "tinyvec_macros",
+]
+
+[[package]]
+name = "tinyvec_macros"
+version = "0.1.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
+
+[[package]]
+name = "twoway"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59b11b2b5241ba34be09c3cc85a36e56e48f9888862e19cedf23336d35316ed1"
+dependencies = [
+ "memchr",
+]
+
+[[package]]
+name = "twoway"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c57ffb460d7c24cd6eda43694110189030a3d1dfe418416d9468fd1c1d290b47"
+dependencies = [
+ "memchr",
+ "unchecked-index",
+]
+
+[[package]]
+name = "typed-arena"
+version = "1.7.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a9b2228007eba4120145f785df0f6c92ea538f5a3635a612ecf4e334c8c1446d"
+
+[[package]]
+name = "typenum"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
+
+[[package]]
+name = "ucd-trie"
+version = "0.1.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c"
+
+[[package]]
+name = "unchecked-index"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "eeba86d422ce181a719445e51872fa30f1f7413b62becb52e95ec91aa262d85c"
+
+[[package]]
+name = "unicase"
+version = "2.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "50f37be617794602aabbeee0be4f259dc1778fabe05e2d67ee8f79326d5cb4f6"
+dependencies = [
+ "version_check",
+]
+
+[[package]]
+name = "unicode-bidi"
+version = "0.3.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1a01404663e3db436ed2746d9fefef640d868edae3cceb81c3b8d5732fda678f"
+
+[[package]]
+name = "unicode-normalization"
+version = "0.1.19"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9"
+dependencies = [
+ "tinyvec",
+]
+
+[[package]]
+name = "unicode-width"
+version = "0.1.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973"
+
+[[package]]
+name = "unicode-xid"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8ccb82d61f80a663efe1f787a51b16b5a51e3314d6ac365b08639f52387b33f3"
+
+[[package]]
+name = "unicode_categories"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e"
+
+[[package]]
+name = "url"
+version = "2.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c"
+dependencies = [
+ "form_urlencoded",
+ "idna",
+ "matches",
+ "percent-encoding",
+]
+
+[[package]]
+name = "vec_map"
+version = "0.8.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191"
+
+[[package]]
+name = "version_check"
+version = "0.9.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
+
+[[package]]
+name = "walkdir"
+version = "2.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
+dependencies = [
+ "same-file",
+ "winapi",
+ "winapi-util",
+]
+
+[[package]]
+name = "wasi"
+version = "0.10.2+wasi-snapshot-preview1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
+
+[[package]]
+name = "winapi"
+version = "0.3.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419"
+dependencies = [
+ "winapi-i686-pc-windows-gnu",
+ "winapi-x86_64-pc-windows-gnu",
+]
+
+[[package]]
+name = "winapi-i686-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6"
+
+[[package]]
+name = "winapi-util"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
+dependencies = [
+ "winapi",
+]
+
+[[package]]
+name = "winapi-x86_64-pc-windows-gnu"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
+
+[[package]]
+name = "xdg"
+version = "2.4.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0c4583db5cbd4c4c0303df2d15af80f0539db703fa1c68802d4cbbd2dd0f88f6"
+dependencies = [
+ "dirs",
+]
+
+[[package]]
+name = "xml-rs"
+version = "0.8.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d2d7d3948613f75c98fd9328cfdcc45acc4d360655289d0a7d4ec931392200a3"
+
+[[package]]
+name = "yaml-rust"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85"
+dependencies = [
+ "linked-hash-map",
+]
diff --git a/tools/cheddar/Cargo.toml b/tools/cheddar/Cargo.toml
new file mode 100644
index 000000000000..ee4cbb4d580e
--- /dev/null
+++ b/tools/cheddar/Cargo.toml
@@ -0,0 +1,18 @@
+[package]
+name = "cheddar"
+version = "0.2.0"
+authors = ["Vincent Ambo <mail@tazj.in>"]
+edition = "2018"
+
+[dependencies]
+clap = "2.33"
+comrak = "0.10"
+lazy_static = "1.4"
+rouille = "3.5"
+syntect = "4.5.0"
+serde_json = "1.0"
+regex = "1.4"
+
+[dependencies.serde]
+version = "1.0"
+features = [ "derive" ]
diff --git a/tools/cheddar/README.md b/tools/cheddar/README.md
new file mode 100644
index 000000000000..706f3b62d552
--- /dev/null
+++ b/tools/cheddar/README.md
@@ -0,0 +1,21 @@
+cheddar
+=======
+
+Cheddar is a tiny Rust tool that uses [syntect][] to render source code to
+syntax-highlighted HTML.
+
+It's invocation is compatible with `cgit` filters, i.e. data is read from
+`stdin` and the filename is taken from `argv`:
+
+```shell
+cat README.md | cheddar README.md > README.html
+
+```
+
+In fact, if you are looking at this file on git.tazj.in chances are that it was
+rendered by cheddar.
+
+The name was chosen because I was eyeing a pack of cheddar-flavoured crisps
+while thinking about name selection.
+
+[syntect]: https://github.com/trishume/syntect
diff --git a/tools/cheddar/build.rs b/tools/cheddar/build.rs
new file mode 100644
index 000000000000..f70818d80177
--- /dev/null
+++ b/tools/cheddar/build.rs
@@ -0,0 +1,55 @@
+//! Build script that can be used outside of Nix builds to inject the
+//! BAT_SYNTAXES variable when building in development mode.
+//!
+//! Note that this script assumes that cheddar is in a checkout of the
+//! TVL depot.
+
+use std::process::Command;
+
+static BAT_SYNTAXES: &str = "BAT_SYNTAXES";
+static ERROR_MESSAGE: &str = r#"Failed to build syntax set.
+
+When building during development, cheddar expects to be in a checkout
+of the TVL depot. This is required to automatically build the syntax
+highlighting files that are needed at compile time.
+
+As cheddar can not automatically detect the location of the syntax
+files, you must set the `BAT_SYNTAXES` environment variable to the
+right path.
+
+The expected syntax files are at //third_party/bat_syntaxes in the
+depot."#;
+
+fn main() {
+    // Do nothing if the variable is already set (e.g. via Nix)
+    if let Ok(_) = std::env::var(BAT_SYNTAXES) {
+        return;
+    }
+
+    // Otherwise ask Nix to build it and inject the result.
+    let output = Command::new("nix-build")
+        .arg("-A")
+        .arg("third_party.bat_syntaxes")
+        // ... assuming cheddar is at //tools/cheddar ...
+        .arg("../..")
+        .output()
+        .expect(ERROR_MESSAGE);
+
+    if !output.status.success() {
+        eprintln!(
+            "{}\nNix output: {}",
+            ERROR_MESSAGE,
+            String::from_utf8_lossy(&output.stderr)
+        );
+        return;
+    }
+
+    let out_path = String::from_utf8(output.stdout)
+        .expect("Nix returned invalid output after building syntax set");
+
+    // Return an instruction to Cargo that will set the environment
+    // variable during rustc calls.
+    //
+    // https://doc.rust-lang.org/cargo/reference/build-scripts.html#cargorustc-envvarvalue
+    println!("cargo:rustc-env={}={}", BAT_SYNTAXES, out_path.trim());
+}
diff --git a/tools/cheddar/default.nix b/tools/cheddar/default.nix
new file mode 100644
index 000000000000..17efae91ff89
--- /dev/null
+++ b/tools/cheddar/default.nix
@@ -0,0 +1,23 @@
+{ depot, pkgs, ... }:
+
+depot.third_party.naersk.buildPackage {
+  src = ./.;
+  doDoc = false;
+
+  override = x: {
+    # Use our custom bat syntax set, which is everything from upstream,
+    # plus additional languages we care about.
+    BAT_SYNTAXES = "${depot.third_party.bat_syntaxes}";
+  };
+
+  passthru = {
+    # Wrapper for cgit which can't be told to pass arguments to a filter
+    about-filter = pkgs.writeShellScriptBin "cheddar-about" ''
+      exec ${depot.tools.cheddar}/bin/cheddar --about-filter $@
+    '';
+  };
+
+  meta.ci.targets = [
+    "about-filter"
+  ];
+}
diff --git a/tools/cheddar/src/bin/cheddar.rs b/tools/cheddar/src/bin/cheddar.rs
new file mode 100644
index 000000000000..48c504d53590
--- /dev/null
+++ b/tools/cheddar/src/bin/cheddar.rs
@@ -0,0 +1,134 @@
+//! This file defines the binary for cheddar, which can be interacted
+//! with in two different ways:
+//!
+//! 1. As a CLI tool that acts as a cgit filter.
+//! 2. As a long-running HTTP server that handles rendering requests
+//!    (matching the SourceGraph protocol).
+use clap::{App, Arg};
+use rouille::{router, try_or_400, Response};
+use serde::Deserialize;
+use serde_json::json;
+use std::collections::HashMap;
+use std::io;
+
+use cheddar::{format_code, format_markdown, THEMES};
+
+// Server endpoint for rendering the syntax of source code. This
+// replaces the 'syntect_server' component of Sourcegraph.
+fn code_endpoint(request: &rouille::Request) -> rouille::Response {
+    #[derive(Deserialize)]
+    struct SourcegraphQuery {
+        filepath: String,
+        theme: String,
+        code: String,
+    }
+
+    let query: SourcegraphQuery = try_or_400!(rouille::input::json_input(request));
+    let mut buf: Vec<u8> = Vec::new();
+
+    // We don't use syntect with the sourcegraph themes bundled
+    // currently, so let's fall back to something that is kind of
+    // similar (tm).
+    let theme = &THEMES.themes[match query.theme.as_str() {
+        "Sourcegraph (light)" => "Solarized (light)",
+        _ => "Solarized (dark)",
+    }];
+
+    format_code(theme, &mut query.code.as_bytes(), &mut buf, &query.filepath);
+
+    Response::json(&json!({
+        "is_plaintext": false,
+        "data": String::from_utf8_lossy(&buf)
+    }))
+}
+
+// Server endpoint for rendering a Markdown file.
+fn markdown_endpoint(request: &rouille::Request) -> rouille::Response {
+    let mut texts: HashMap<String, String> = try_or_400!(rouille::input::json_input(request));
+
+    for text in texts.values_mut() {
+        let mut buf: Vec<u8> = Vec::new();
+        format_markdown(&mut text.as_bytes(), &mut buf);
+        *text = String::from_utf8_lossy(&buf).to_string();
+    }
+
+    Response::json(&texts)
+}
+
+fn highlighting_server(listen: &str) {
+    println!("Starting syntax highlighting server on '{}'", listen);
+
+    rouille::start_server(listen, move |request| {
+        router!(request,
+                // Markdown rendering route
+                (POST) (/markdown) => {
+                    markdown_endpoint(request)
+                },
+
+                // Code rendering route
+                (POST) (/) => {
+                    code_endpoint(request)
+                },
+
+                _ => {
+                    rouille::Response::empty_404()
+                },
+        )
+    });
+}
+
+fn main() {
+    // Parse the command-line flags passed to cheddar to determine
+    // whether it is running in about-filter mode (`--about-filter`)
+    // and what file extension has been supplied.
+    let matches = App::new("cheddar")
+        .about("TVL's syntax highlighter")
+        .arg(
+            Arg::with_name("about-filter")
+                .help("Run as a cgit about-filter (renders Markdown)")
+                .long("about-filter")
+                .takes_value(false),
+        )
+        .arg(
+            Arg::with_name("sourcegraph-server")
+                .help("Run as a Sourcegraph compatible web-server")
+                .long("sourcegraph-server")
+                .takes_value(false),
+        )
+        .arg(
+            Arg::with_name("listen")
+                .help("Address to listen on")
+                .long("listen")
+                .takes_value(true),
+        )
+        .arg(Arg::with_name("filename").help("File to render").index(1))
+        .get_matches();
+
+    if matches.is_present("sourcegraph-server") {
+        highlighting_server(
+            matches
+                .value_of("listen")
+                .expect("Listening address is required for server mode"),
+        );
+        return;
+    }
+
+    let filename = matches.value_of("filename").expect("filename is required");
+
+    let stdin = io::stdin();
+    let mut in_handle = stdin.lock();
+
+    let stdout = io::stdout();
+    let mut out_handle = stdout.lock();
+
+    if matches.is_present("about-filter") && filename.ends_with(".md") {
+        format_markdown(&mut in_handle, &mut out_handle);
+    } else {
+        format_code(
+            &THEMES.themes["InspiredGitHub"],
+            &mut in_handle,
+            &mut out_handle,
+            filename,
+        );
+    }
+}
diff --git a/tools/cheddar/src/lib.rs b/tools/cheddar/src/lib.rs
new file mode 100644
index 000000000000..851bd743db2e
--- /dev/null
+++ b/tools/cheddar/src/lib.rs
@@ -0,0 +1,339 @@
+//! This file implements the rendering logic of cheddar with public
+//! functions for syntax-highlighting code and for turning Markdown
+//! into HTML with TVL extensions.
+use comrak::arena_tree::Node;
+use comrak::nodes::{Ast, AstNode, NodeCodeBlock, NodeHtmlBlock, NodeValue};
+use comrak::{format_html, parse_document, Arena, ComrakOptions};
+use lazy_static::lazy_static;
+use regex::Regex;
+use std::cell::RefCell;
+use std::collections::HashMap;
+use std::ffi::OsStr;
+use std::io::{BufRead, Write};
+use std::path::Path;
+use std::{env, io};
+use syntect::dumps::from_binary;
+use syntect::easy::HighlightLines;
+use syntect::highlighting::{Theme, ThemeSet};
+use syntect::parsing::{SyntaxReference, SyntaxSet};
+use syntect::util::LinesWithEndings;
+
+use syntect::html::{
+    append_highlighted_html_for_styled_line, start_highlighted_html_snippet, IncludeBackground,
+};
+
+#[cfg(test)]
+mod tests;
+
+lazy_static! {
+    // Load syntaxes lazily. Initialisation might not be required in
+    // the case of Markdown rendering (if there's no code blocks
+    // within the document).
+    //
+    // Note that the syntax set is included from the path pointed to
+    // by the BAT_SYNTAXES environment variable at compile time. This
+    // variable is populated by Nix and points to TVL's syntax set.
+    static ref SYNTAXES: SyntaxSet = from_binary(include_bytes!(env!("BAT_SYNTAXES")));
+    pub static ref THEMES: ThemeSet = ThemeSet::load_defaults();
+
+    // Configure Comrak's Markdown rendering with all the bells &
+    // whistles!
+    static ref MD_OPTS: ComrakOptions = {
+        let mut options = ComrakOptions::default();
+
+        // Enable non-standard Markdown features:
+        options.extension.strikethrough = true;
+        options.extension.tagfilter = true;
+        options.extension.table = true;
+        options.extension.autolink = true;
+        options.extension.tasklist = true;
+        options.extension.header_ids = Some(String::new()); // yyeeesss!
+        options.extension.footnotes = true;
+        options.extension.description_lists = true;
+        options.extension.front_matter_delimiter = Some("---".to_owned());
+
+        // Required for tagfilter
+        options.render.unsafe_ = true;
+
+        options
+    };
+
+    // Configures a map of specific filenames to languages, for cases
+    // where the detection by extension or other heuristics fails.
+    static ref FILENAME_OVERRIDES: HashMap<&'static str, &'static str> = {
+        let mut map = HashMap::new();
+        // rules.pl is the canonical name of the submit rule file in
+        // Gerrit, which is written in Prolog.
+        map.insert("rules.pl", "Prolog");
+        map
+    };
+
+    // Default shortlink set used in cheddar (i.e. TVL's shortlinks)
+    static ref TVL_LINKS: Vec<Shortlink> = vec![
+        // TVL shortlinks for bugs and changelists (e.g. b/123,
+        // cl/123). Coincidentally these have the same format, which
+        // makes the initial implementation easy.
+        Shortlink {
+            pattern: Regex::new(r#"\b(?P<type>b|cl)/(?P<dest>\d+)\b"#).unwrap(),
+            replacement: "[$type/$dest](https://$type.tvl.fyi/$dest)",
+        },
+        Shortlink {
+            pattern: Regex::new(r#"\br/(?P<dest>\d+)\b"#).unwrap(),
+            replacement: "[r/$dest](https://code.tvl.fyi/commit/?id=refs/r/$dest)",
+        }
+    ];
+}
+
+/// Structure that describes a single shortlink that should be
+/// automatically highlighted. Highlighting is performed as a string
+/// replacement over input Markdown.
+pub struct Shortlink {
+    /// Short link pattern to recognise. Make sure to anchor these
+    /// correctly.
+    pub pattern: Regex,
+
+    /// Replacement string, as per the documentation of
+    /// [`Regex::replace`].
+    pub replacement: &'static str,
+}
+
+// HTML fragment used when rendering inline blocks in Markdown documents.
+// Emulates the GitHub style (subtle background hue and padding).
+const BLOCK_PRE: &str = "<pre style=\"background-color:#f6f8fa;padding:16px;\">\n";
+
+fn should_continue(res: &io::Result<usize>) -> bool {
+    match *res {
+        Ok(n) => n > 0,
+        Err(_) => false,
+    }
+}
+
+// This function is taken from the Comrak documentation.
+fn iter_nodes<'a, F>(node: &'a AstNode<'a>, f: &F)
+where
+    F: Fn(&'a AstNode<'a>),
+{
+    f(node);
+    for c in node.children() {
+        iter_nodes(c, f);
+    }
+}
+
+// Many of the syntaxes in the syntax list have random capitalisations, which
+// means that name matching for the block info of a code block in HTML fails.
+//
+// Instead, try finding a syntax match by comparing case insensitively (for
+// ASCII characters, anyways).
+fn find_syntax_case_insensitive(info: &str) -> Option<&'static SyntaxReference> {
+    // TODO(tazjin): memoize this lookup
+    SYNTAXES
+        .syntaxes()
+        .iter()
+        .rev()
+        .find(|&s| info.eq_ignore_ascii_case(&s.name))
+}
+
+// Replaces code-block inside of a Markdown AST with HTML blocks rendered by
+// syntect. This enables static (i.e. no JavaScript) syntax highlighting, even
+// of complex languages.
+fn highlight_code_block(code_block: &NodeCodeBlock) -> NodeValue {
+    let theme = &THEMES.themes["InspiredGitHub"];
+    let info = String::from_utf8_lossy(&code_block.info);
+
+    let syntax = find_syntax_case_insensitive(&info)
+        .or_else(|| SYNTAXES.find_syntax_by_extension(&info))
+        .unwrap_or_else(|| SYNTAXES.find_syntax_plain_text());
+
+    let code = String::from_utf8_lossy(&code_block.literal);
+
+    let rendered = {
+        // Write the block preamble manually to get exactly the
+        // desired layout:
+        let mut hl = HighlightLines::new(syntax, theme);
+        let mut buf = BLOCK_PRE.to_string();
+
+        for line in LinesWithEndings::from(&code) {
+            let regions = hl.highlight(line, &SYNTAXES);
+            append_highlighted_html_for_styled_line(&regions[..], IncludeBackground::No, &mut buf);
+        }
+
+        buf.push_str("</pre>");
+        buf
+    };
+
+    let mut block = NodeHtmlBlock::default();
+    block.literal = rendered.into_bytes();
+
+    NodeValue::HtmlBlock(block)
+}
+
+// Supported callout elements (which each have their own distinct rendering):
+enum Callout {
+    Todo,
+    Warning,
+    Question,
+    Tip,
+}
+
+// Determine whether the first child of the supplied node contains a text that
+// should cause a callout section to be rendered.
+fn has_callout<'a>(node: &Node<'a, RefCell<Ast>>) -> Option<Callout> {
+    match node.first_child().map(|c| c.data.borrow()) {
+        Some(child) => match &child.value {
+            NodeValue::Text(text) => {
+                if text.starts_with(b"TODO") {
+                    return Some(Callout::Todo);
+                } else if text.starts_with(b"WARNING") {
+                    return Some(Callout::Warning);
+                } else if text.starts_with(b"QUESTION") {
+                    return Some(Callout::Question);
+                } else if text.starts_with(b"TIP") {
+                    return Some(Callout::Tip);
+                }
+
+                None
+            }
+            _ => None,
+        },
+        _ => None,
+    }
+}
+
+// Replace instances of known shortlinks in the input document with
+// Markdown syntax for a highlighted link.
+fn linkify_shortlinks(mut text: String, shortlinks: &[Shortlink]) -> String {
+    for link in shortlinks {
+        text = link
+            .pattern
+            .replace_all(&text, link.replacement)
+            .to_string();
+    }
+
+    return text;
+}
+
+fn format_callout_paragraph(callout: Callout) -> NodeValue {
+    let class = match callout {
+        Callout::Todo => "cheddar-todo",
+        Callout::Warning => "cheddar-warning",
+        Callout::Question => "cheddar-question",
+        Callout::Tip => "cheddar-tip",
+    };
+
+    let mut block = NodeHtmlBlock::default();
+    block.literal = format!("<p class=\"cheddar-callout {}\">", class).into_bytes();
+    NodeValue::HtmlBlock(block)
+}
+
+pub fn format_markdown_with_shortlinks<R: BufRead, W: Write>(
+    reader: &mut R,
+    writer: &mut W,
+    shortlinks: &[Shortlink],
+) {
+    let document = {
+        let mut buffer = String::new();
+        reader
+            .read_to_string(&mut buffer)
+            .expect("reading should work");
+        buffer
+    };
+
+    let arena = Arena::new();
+    let root = parse_document(&arena, &linkify_shortlinks(document, shortlinks), &MD_OPTS);
+
+    // This node must exist with a lifetime greater than that of the parsed AST
+    // in case that callouts are encountered (otherwise insertion into the tree
+    // is not possible).
+    let mut p_close_value = NodeHtmlBlock::default();
+    p_close_value.literal = b"</p>".to_vec();
+
+    let p_close_node = Ast::new(NodeValue::HtmlBlock(p_close_value));
+    let p_close = Node::new(RefCell::new(p_close_node));
+
+    // Special features of Cheddar are implemented by traversing the
+    // arena and reacting on nodes that we might want to modify.
+    iter_nodes(root, &|node| {
+        let mut ast = node.data.borrow_mut();
+        let new = match &ast.value {
+            // Syntax highlighting is implemented by replacing the
+            // code block node with literal HTML.
+            NodeValue::CodeBlock(code) => Some(highlight_code_block(code)),
+
+            NodeValue::Paragraph => {
+                if let Some(callout) = has_callout(node) {
+                    node.insert_after(&p_close);
+                    Some(format_callout_paragraph(callout))
+                } else {
+                    None
+                }
+            }
+            _ => None,
+        };
+
+        if let Some(new_value) = new {
+            ast.value = new_value
+        }
+    });
+
+    format_html(root, &MD_OPTS, writer).expect("Markdown rendering failed");
+}
+
+pub fn format_markdown<R: BufRead, W: Write>(reader: &mut R, writer: &mut W) {
+    format_markdown_with_shortlinks(reader, writer, &TVL_LINKS)
+}
+
+fn find_syntax_for_file(filename: &str) -> &'static SyntaxReference {
+    (*FILENAME_OVERRIDES)
+        .get(filename)
+        .and_then(|name| SYNTAXES.find_syntax_by_name(name))
+        .or_else(|| {
+            Path::new(filename)
+                .extension()
+                .and_then(OsStr::to_str)
+                .and_then(|s| SYNTAXES.find_syntax_by_extension(s))
+        })
+        .unwrap_or_else(|| SYNTAXES.find_syntax_plain_text())
+}
+
+pub fn format_code<R: BufRead, W: Write>(
+    theme: &Theme,
+    reader: &mut R,
+    writer: &mut W,
+    filename: &str,
+) {
+    let mut linebuf = String::new();
+
+    // Get the first line, we might need it for syntax identification.
+    let mut read_result = reader.read_line(&mut linebuf);
+    let syntax = find_syntax_for_file(filename);
+
+    let mut hl = HighlightLines::new(syntax, theme);
+    let (mut outbuf, bg) = start_highlighted_html_snippet(theme);
+
+    // Rather than using the `lines` iterator, read each line manually
+    // and maintain buffer state.
+    //
+    // This is done because the syntax highlighter requires trailing
+    // newlines to be efficient, and those are stripped in the lines
+    // iterator.
+    while should_continue(&read_result) {
+        let regions = hl.highlight(&linebuf, &SYNTAXES);
+
+        append_highlighted_html_for_styled_line(
+            &regions[..],
+            IncludeBackground::IfDifferent(bg),
+            &mut outbuf,
+        );
+
+        // immediately output the current state to avoid keeping
+        // things in memory
+        write!(writer, "{}", outbuf).expect("write should not fail");
+
+        // merry go round again
+        linebuf.clear();
+        outbuf.clear();
+        read_result = reader.read_line(&mut linebuf);
+    }
+
+    writeln!(writer, "</pre>").expect("write should not fail");
+}
diff --git a/tools/cheddar/src/tests.rs b/tools/cheddar/src/tests.rs
new file mode 100644
index 000000000000..c82bba676746
--- /dev/null
+++ b/tools/cheddar/src/tests.rs
@@ -0,0 +1,105 @@
+use super::*;
+use std::io::BufReader;
+
+// Markdown rendering expectation, ignoring leading and trailing
+// whitespace in the input and output.
+fn expect_markdown(input: &str, expected: &str) {
+    let mut input_buf = BufReader::new(input.trim().as_bytes());
+    let mut out_buf: Vec<u8> = vec![];
+    format_markdown(&mut input_buf, &mut out_buf);
+
+    let out_string = String::from_utf8(out_buf).expect("output should be UTF8");
+    assert_eq!(out_string.trim(), expected.trim());
+}
+
+#[test]
+fn renders_simple_markdown() {
+    expect_markdown("hello", "<p>hello</p>\n");
+}
+
+#[test]
+fn renders_callouts() {
+    expect_markdown(
+        "TODO some task.",
+        r#"<p class="cheddar-callout cheddar-todo">
+TODO some task.
+</p>
+"#,
+    );
+
+    expect_markdown(
+        "WARNING: be careful",
+        r#"<p class="cheddar-callout cheddar-warning">
+WARNING: be careful
+</p>
+"#,
+    );
+
+    expect_markdown(
+        "TIP: note the thing",
+        r#"<p class="cheddar-callout cheddar-tip">
+TIP: note the thing
+</p>
+"#,
+    );
+}
+
+#[test]
+fn renders_code_snippets() {
+    expect_markdown(
+        r#"
+Code:
+```nix
+toString 42
+```
+"#,
+        r#"
+<p>Code:</p>
+<pre style="background-color:#f6f8fa;padding:16px;">
+<span style="color:#62a35c;">toString </span><span style="color:#0086b3;">42
+</span></pre>
+"#,
+    );
+}
+
+#[test]
+fn highlights_bug_link() {
+    expect_markdown(
+        "Please look at b/123.",
+        "<p>Please look at <a href=\"https://b.tvl.fyi/123\">b/123</a>.</p>",
+    );
+}
+
+#[test]
+fn highlights_cl_link() {
+    expect_markdown(
+        "Please look at cl/420.",
+        "<p>Please look at <a href=\"https://cl.tvl.fyi/420\">cl/420</a>.</p>",
+    );
+}
+
+#[test]
+fn highlights_r_link() {
+    expect_markdown(
+        "Fixed in r/3268.",
+        "<p>Fixed in <a href=\"https://code.tvl.fyi/commit/?id=refs/r/3268\">r/3268</a>.</p>",
+    );
+}
+
+#[test]
+fn highlights_multiple_shortlinks() {
+    expect_markdown(
+        "Please look at cl/420, b/123.",
+        "<p>Please look at <a href=\"https://cl.tvl.fyi/420\">cl/420</a>, <a href=\"https://b.tvl.fyi/123\">b/123</a>.</p>",
+    );
+
+    expect_markdown(
+        "b/213/cl/213 are different things",
+        "<p><a href=\"https://b.tvl.fyi/213\">b/213</a>/<a href=\"https://cl.tvl.fyi/213\">cl/213</a> are different things</p>",
+    );
+}
+
+#[test]
+fn ignores_invalid_shortlinks() {
+    expect_markdown("b/abc is not a real bug", "<p>b/abc is not a real bug</p>");
+}
diff --git a/tools/crfo-approve.nix b/tools/crfo-approve.nix
new file mode 100644
index 000000000000..d4cff9e1b238
--- /dev/null
+++ b/tools/crfo-approve.nix
@@ -0,0 +1,52 @@
+# Helper script to run a CRFO approval using depot-interventions.
+#
+# Use as 'crfo-approve $CL_ID $PATCHSET $REAL_USER $ON_BEHALF_OF'.
+#
+# Set credential in GERRIT_TOKEN envvar.
+{ pkgs, ... }:
+
+pkgs.writeShellScriptBin "crfo-approve" ''
+  set -ueo pipefail
+
+  if (($# != 4)) || [[ -z ''${GERRIT_TOKEN-} ]]; then
+    cat >&2 <<'EOF'
+  crfo-approve - Helper script to CRFO approve a TVL CL
+
+  Requires membership in depot-interventions to work.
+
+  Gerrit HTTP credential must be set in GERRIT_TOKEN envvar.
+
+  Usage:
+    crfo-approve $CL_ID $PATCHSET $REAL_USER $ON_BEHALF_OF
+  EOF
+    exit 1
+  fi
+
+  export PATH="${pkgs.lib.makeBinPath [ pkgs.httpie pkgs.jq ]}:''${PATH}"
+
+  readonly CL_ID="''${1}"
+  readonly PATCHSET="''${2}"
+  readonly REAL_USER="''${3}"
+  readonly TOKEN="''${GERRIT_TOKEN}"
+  readonly ON_BEHALF_OF="''${4}"
+  readonly URL="https://cl.tvl.fyi/a/changes/''${CL_ID}/revisions/''${PATCHSET}/review"
+
+  # First we need to find the account ID for the user
+  ACC_RESPONSE=$(http --check-status 'https://cl.tvl.fyi/accounts/' "q==name:''${ON_BEHALF_OF}" | tail -n +2)
+  ACC_LENGTH=$(echo "''${ACC_RESPONSE}" | jq 'length')
+
+  if [[ ''${ACC_LENGTH} -ne 1 ]]; then
+      echo "Did not find a unique account ID for ''${ON_BEHALF_OF}"
+      exit 1
+  fi
+
+  ACC_ID=$(jq -n --argjson response "''${ACC_RESPONSE}" '$response[0]._account_id')
+  echo "using account ID ''${ACC_ID} for ''${ON_BEHALF_OF}"
+
+  http --check-status -a "''${REAL_USER}:''${TOKEN}" POST "''${URL}" \
+    message="CRFO on behalf of ''${ON_BEHALF_OF}" \
+    'labels[Code-Review]=+2' \
+    on_behalf_of="''${ACC_ID}" \
+    "add_to_attention_set[0][user]=''${ACC_ID}" \
+    "add_to_attention_set[0][reason]=CRFO approval through depot-interventions"
+''
diff --git a/tools/depot-scanner/OWNERS b/tools/depot-scanner/OWNERS
new file mode 100644
index 000000000000..cefacea4d049
--- /dev/null
+++ b/tools/depot-scanner/OWNERS
@@ -0,0 +1,3 @@
+inherit: true
+owners:
+ - riking
diff --git a/tools/depot-scanner/default.nix b/tools/depot-scanner/default.nix
new file mode 100644
index 000000000000..59b6e53f7097
--- /dev/null
+++ b/tools/depot-scanner/default.nix
@@ -0,0 +1,18 @@
+{ depot, pkgs, ... }:
+
+let
+  localProto = depot.nix.buildGo.grpc {
+    name = "code.tvl.fyi/tools/depot-scanner/proto";
+    proto = ./depot_scanner.proto;
+  };
+in
+depot.nix.buildGo.program
+  {
+    name = "depot-scanner";
+    srcs = [
+      ./main.go
+    ];
+    deps = [
+      localProto
+    ];
+  } // { inherit localProto; }
diff --git a/tools/depot-scanner/depot_scanner.proto b/tools/depot-scanner/depot_scanner.proto
new file mode 100644
index 000000000000..5249daebf495
--- /dev/null
+++ b/tools/depot-scanner/depot_scanner.proto
@@ -0,0 +1,46 @@
+// Copyright 2020 TVL
+// SPDX-License-Identifier: MIT
+
+syntax = "proto3";
+package tvl.tools.depot_scanner;
+option go_package = "code.tvl.fyi/tools/depot-scanner/proto";
+
+enum PathType {
+  UNKNOWN = 0;
+  DEPOT = 1;
+  STORE = 2;
+  CORE = 3;
+}
+
+message ScanRequest {
+  // Which revision of the depot
+  string revision = 1;
+  string attr = 2;
+  // Optionally, the attr to evaluate can be provided as a path to a folder or a
+  // .nix file.  This is used by the HTTP service.
+  string attrAsPath = 3;
+}
+
+message ScanResponse {
+  repeated string depotPath = 1;
+  repeated string nixStorePath = 2;
+  repeated string corePkgsPath = 4;
+  repeated string otherPath = 3;
+
+  bytes derivation = 5;
+}
+
+message ArchiveRequest {
+  repeated string depotPath = 1;
+}
+
+message ArchiveChunk {
+  bytes chunk = 1;
+}
+
+service DepotScanService {
+  rpc Scan(ScanRequest) returns (ScanResponse);
+
+  rpc MakeArchive(ArchiveRequest) returns (stream ArchiveChunk);
+}
+
diff --git a/tools/depot-scanner/go.mod b/tools/depot-scanner/go.mod
new file mode 100644
index 000000000000..bdd22fc1ef01
--- /dev/null
+++ b/tools/depot-scanner/go.mod
@@ -0,0 +1,3 @@
+module code.tvl.fyi/tools/depot-scanner
+
+go 1.14
diff --git a/tools/depot-scanner/main.go b/tools/depot-scanner/main.go
new file mode 100644
index 000000000000..9171587be2b9
--- /dev/null
+++ b/tools/depot-scanner/main.go
@@ -0,0 +1,227 @@
+package main
+
+import (
+	"bufio"
+	"flag"
+	"fmt"
+	"io"
+	"os"
+	"os/exec"
+	"strings"
+
+	pb "code.tvl.fyi/tools/depot-scanner/proto"
+)
+
+var nixInstantiatePath = flag.String("nix-bin", "/run/current-system/sw/bin/nix-instantiate", "path to nix-instantiate")
+var depotRoot = flag.String("depot", envOr("DEPOT_ROOT", "/depot/"), "path to tvl.fyi depot at current canon")
+var nixStoreRoot = flag.String("store-path", "/nix/store/", "prefix for all valid nix store paths")
+
+var modeFlag = flag.String("mode", modeArchive, "operation mode. valid values: tar, print")
+var onlyFlag = flag.String("only", "", "only enable the listed output types, comma separated. valid values: DEPOT, STORE, CORE, UNKNOWN")
+var relativeFlag = flag.Bool("relpath", false, "when printing paths, print them relative to the root of their path type")
+
+const (
+	modeArchive = "tar"
+	modePrint   = "print"
+)
+
+const (
+	// String that identifies a path as belonging to nix corepkgs.
+	corePkgsString = "/share/nix/corepkgs/"
+
+	depotTraceString = "trace: depot-scan: "
+)
+
+type fileScanType int
+
+const (
+	unknownPath fileScanType = iota
+	depotPath
+	nixStorePath
+	corePkgsPath
+)
+
+func launchNix(attr string) (*exec.Cmd, io.ReadCloser, io.ReadCloser, error) {
+	cmd := exec.Command(*nixInstantiatePath, "--trace-file-access", "-A", attr)
+	stdout, err := cmd.StdoutPipe()
+	if err != nil {
+		return nil, nil, nil, err
+	}
+	stderr, err := cmd.StderrPipe()
+	if err != nil {
+		stdout.Close()
+		return nil, nil, nil, err
+	}
+
+	err = cmd.Start()
+	if err != nil {
+		stdout.Close()
+		stderr.Close()
+		return nil, nil, nil, err
+	}
+
+	return cmd, stdout, stderr, nil
+}
+
+func categorizePath(path string) fileScanType {
+	if strings.HasPrefix(path, *nixStoreRoot) {
+		if strings.Contains(path, corePkgsString) {
+			return corePkgsPath
+		}
+		return nixStorePath
+	} else if strings.HasPrefix(path, *depotRoot) {
+		return depotPath
+	} else if strings.Contains(path, corePkgsString) {
+		return corePkgsPath
+	}
+	return unknownPath
+}
+
+func addPath(path string, out map[fileScanType]map[string]struct{}) {
+	cat := categorizePath(path)
+	if out[cat] == nil {
+		out[cat] = make(map[string]struct{})
+	}
+
+	out[cat][path] = struct{}{}
+}
+
+func consumeOutput(stdout, stderr io.ReadCloser) (map[fileScanType]map[string]struct{}, string, error) {
+	result := make(map[fileScanType]map[string]struct{})
+
+	scanner := bufio.NewScanner(stderr)
+	for scanner.Scan() {
+		line := scanner.Text()
+		if strings.HasPrefix(line, depotTraceString) {
+			addPath(strings.TrimPrefix(line, depotTraceString), result)
+		} else {
+			// print remaining stderr output of nix-instantiate
+			// to prevent silent swallowing of possible important
+			// error messages (e.g. about command line interface changes)
+			fmt.Fprintf(os.Stderr, "nix-inst> %s\n", line)
+		}
+	}
+	if scanner.Err() != nil {
+		return nil, "", scanner.Err()
+	}
+
+	// Get derivation path
+	derivPath := ""
+	scanner = bufio.NewScanner(stdout)
+	for scanner.Scan() {
+		line := scanner.Text()
+		if strings.HasPrefix(line, *nixStoreRoot) {
+			derivPath = line
+			// consume the rest of the output
+		}
+	}
+	if scanner.Err() != nil {
+		return nil, "", scanner.Err()
+	}
+
+	return result, derivPath, nil
+}
+
+func main() {
+	flag.Parse()
+
+	checkDepotRoot()
+
+	enabledPathTypes := make(map[pb.PathType]bool, 4)
+	if len(*onlyFlag) > 0 {
+		enabledOutputs := strings.Split(*onlyFlag, ",")
+		for _, v := range enabledOutputs {
+			i, ok := pb.PathType_value[strings.ToUpper(v)]
+			if !ok {
+				fmt.Fprintln(os.Stderr, "warning: unrecognized PathType name: ", v)
+				continue
+			}
+			enabledPathTypes[pb.PathType(i)] = true
+		}
+	} else {
+		// Default
+		enabledPathTypes = map[pb.PathType]bool{
+			pb.PathType_UNKNOWN: true,
+			pb.PathType_DEPOT:   true,
+			pb.PathType_STORE:   true,
+			pb.PathType_CORE:    true,
+		}
+	}
+
+	cmd, stdout, stderr, err := launchNix(flag.Arg(0))
+	if err != nil {
+		panic(fmt.Errorf("could not launch nix: %w", err))
+	}
+	results, derivPath, err := consumeOutput(stdout, stderr)
+	if err != nil {
+		err2 := cmd.Wait()
+		if err2 != nil {
+			panic(fmt.Errorf("nix-instantiate failed: %w\nadditionally, while reading output: %w", err2, err))
+		}
+		panic(fmt.Errorf("problem reading nix output: %w", err))
+	}
+	err = cmd.Wait()
+	if err != nil {
+		panic(fmt.Errorf("nix-instantiate failed: %w", err))
+	}
+
+	_ = derivPath
+
+	if *modeFlag == "print" {
+		if enabledPathTypes[pb.PathType_STORE] {
+			for k, _ := range results[nixStorePath] {
+				if *relativeFlag {
+					k = strings.TrimPrefix(k, *nixStoreRoot)
+					k = strings.TrimPrefix(k, "/")
+				}
+				fmt.Println(k)
+			}
+		}
+		if enabledPathTypes[pb.PathType_DEPOT] {
+			for k, _ := range results[depotPath] {
+				if *relativeFlag {
+					k = strings.TrimPrefix(k, *depotRoot)
+					k = strings.TrimPrefix(k, "/")
+				}
+				fmt.Println(k)
+			}
+		}
+		if enabledPathTypes[pb.PathType_CORE] {
+			for k, _ := range results[corePkgsPath] {
+				// TODO relativeFlag
+				fmt.Println(k)
+			}
+		}
+		if enabledPathTypes[pb.PathType_UNKNOWN] {
+			for k, _ := range results[unknownPath] {
+				fmt.Println(k)
+			}
+		}
+	} else {
+		panic("unimplemented")
+	}
+}
+
+func envOr(envVar, def string) string {
+	v := os.Getenv(envVar)
+	if v == "" {
+		return def
+	}
+	return v
+}
+
+func checkDepotRoot() {
+	if *depotRoot == "" {
+		fmt.Fprintln(os.Stderr, "error: DEPOT_ROOT / -depot not set")
+		os.Exit(2)
+	}
+	_, err := os.Stat(*depotRoot)
+	if os.IsNotExist(err) {
+		fmt.Fprintf(os.Stderr, "error: %q does not exist\ndid you forget to set DEPOT_ROOT / --depot ?\n", *depotRoot)
+		os.Exit(1)
+	} else if err != nil {
+		fmt.Fprintf(os.Stderr, "error: could not stat %q: %v\n", *depotRoot, err)
+		os.Exit(1)
+	}
+
+}
diff --git a/tools/depotfmt.nix b/tools/depotfmt.nix
new file mode 100644
index 000000000000..17c05d0fc405
--- /dev/null
+++ b/tools/depotfmt.nix
@@ -0,0 +1,61 @@
+# Builds treefmt for depot, with a hardcoded configuration that
+# includes the right paths to formatters.
+{ depot, pkgs, ... }:
+
+let
+  # terraform fmt can't handle multiple paths at once, but treefmt
+  # expects this
+  terraformat = pkgs.writeShellScript "terraformat" ''
+    echo "$@" | xargs -n1 ${pkgs.terraform}/bin/terraform fmt
+  '';
+
+  config = pkgs.writeText "depot-treefmt-config" ''
+    [formatter.go]
+    command = "${pkgs.go}/bin/gofmt"
+    options = [ "-w" ]
+    includes = ["*.go"]
+
+    [formatter.tf]
+    command = "${terraformat}"
+    includes = [ "*.tf" ]
+
+    [formatter.nix]
+    command = "${pkgs.nixpkgs-fmt}/bin/nixpkgs-fmt"
+    includes = [ "*.nix" ]
+    excludes = [
+      "third_party/nix/tests/*",
+      "third_party/nix/src/tests/*",
+      "tools/nixery/*"
+    ]
+
+    [formatter.rust]
+    command = "${pkgs.rustfmt}/bin/rustfmt"
+    includes = [ "*.rs" ]
+    excludes = [
+      "users/tazjin/*",
+    ]
+  '';
+
+  # helper tool for formatting the depot interactively
+  depotfmt = pkgs.writeShellScriptBin "depotfmt" ''
+    exec ${pkgs.treefmt}/bin/treefmt ''${@} \
+      --config-file ${config} \
+      --tree-root $(${pkgs.git}/bin/git rev-parse --show-toplevel)
+  '';
+
+  # wrapper script for running formatting checks in CI
+  check = pkgs.writeShellScript "depotfmt-check" ''
+    ${pkgs.treefmt}/bin/treefmt \
+      --clear-cache \
+      --fail-on-change \
+      --config-file ${config} \
+      --tree-root .
+  '';
+in
+depotfmt.overrideAttrs (_: {
+  passthru.meta.ci.extraSteps.check = {
+    label = "depot formatting check";
+    command = check;
+    alwaysRun = true;
+  };
+})
diff --git a/tools/emacs-pkgs/buildEmacsPackage.nix b/tools/emacs-pkgs/buildEmacsPackage.nix
new file mode 100644
index 000000000000..990b53b763b0
--- /dev/null
+++ b/tools/emacs-pkgs/buildEmacsPackage.nix
@@ -0,0 +1,38 @@
+# Builder for depot-internal Emacs packages. Packages built using this
+# builder are added into the Emacs packages fixpoint under
+# `emacsPackages.tvlPackages`, which in turn makes it possible to use
+# them with special Emacs features like native compilation.
+#
+# Arguments passed to the builder are the same as
+# emacsPackages.trivialBuild, except:
+#
+# * packageRequires is not used
+#
+# * externalRequires takes a selection function for packages from
+#   emacsPackages
+#
+# * internalRequires takes other depot packages
+{ pkgs, ... }:
+
+buildArgs:
+
+pkgs.callPackage
+  ({ emacsPackages }:
+
+  let
+    # Select external dependencies from the emacsPackages set
+    externalDeps = (buildArgs.externalRequires or (_: [ ])) emacsPackages;
+
+    # Override emacsPackages for depot-internal packages
+    internalDeps = map (p: p.override { inherit emacsPackages; })
+      (buildArgs.internalRequires or [ ]);
+
+    trivialBuildArgs = builtins.removeAttrs buildArgs [
+      "externalRequires"
+      "internalRequires"
+    ] // {
+      packageRequires = externalDeps ++ internalDeps;
+    };
+  in
+  emacsPackages.trivialBuild trivialBuildArgs)
+{ }
diff --git a/tools/emacs-pkgs/defzone/defzone.el b/tools/emacs-pkgs/defzone/defzone.el
new file mode 100644
index 000000000000..ffd359e5ff83
--- /dev/null
+++ b/tools/emacs-pkgs/defzone/defzone.el
@@ -0,0 +1,60 @@
+;;; defzone.el --- Generate zone files from Elisp  -*- lexical-binding: t; -*-
+
+(require 'dash)
+(require 'dash-functional)
+(require 's)
+
+(defun record-to-record (zone record &optional subdomain)
+  "Evaluate a record definition and turn it into a zone file
+  record in ZONE, optionally prefixed with SUBDOMAIN."
+
+  (cl-labels ((plist->alist (plist)
+                            (when plist
+                              (cons
+                               (cons (car plist) (cadr plist))
+                               (plist->alist (cddr plist))))))
+    (let ((name (if subdomain (s-join "." (list subdomain zone)) zone)))
+      (pcase record
+        ;; SOA RDATA (RFC 1035; 3.3.13)
+        ((and `(SOA . (,ttl . ,keys))
+              (let (map (:mname mname) (:rname rname) (:serial serial)
+                        (:refresh refresh) (:retry retry) (:expire expire)
+                        (:minimum min))
+                (plist->alist keys)))
+         (if-let ((missing (-filter #'null (not (list mname rname serial
+                                                      refresh retry expire min)))))
+             (error "Missing fields in SOA record: %s" missing)
+             (format "%s %s IN SOA %s %s %s %s %s %s %s"
+                     name ttl mname rname serial refresh retry expire min)))
+
+        (`(NS . (,ttl . ,targets))
+         (->> targets
+              (-map (lambda (target) (format "%s %s IN NS %s" name ttl target)))
+              (s-join "\n")))
+
+        (`(MX . (,ttl . ,pairs))
+         (->> pairs
+              (-map (-lambda ((preference . exchange))
+                      (format "%s %s IN MX %s %s" name ttl preference exchange)))
+              (s-join "\n")))
+
+        (`(TXT ,ttl ,text) (format "%s %s IN TXT %s" name ttl (prin1-to-string text)))
+
+        (`(A . (,ttl . ,ips))
+         (->> ips
+              (-map (lambda (ip) (format "%s %s IN A %s" name ttl ip)))
+              (s-join "\n")))
+
+        (`(CNAME ,ttl ,target) (format "%s %s IN CNAME %s" name ttl target))
+
+        ((and `(,sub . ,records)
+              (guard (stringp sub)))
+         (s-join "\n" (-map (lambda (r) (record-to-record zone r sub)) records)))
+
+        (_ (error "Invalid record definition: %s" record))))))
+
+(defmacro defzone (fqdn &rest records)
+  "Generate zone file for the zone at FQDN from a simple DSL."
+  (declare (indent defun))
+
+  `(s-join "\n" (-map (lambda (r) (record-to-record ,fqdn r)) (quote ,records))))
diff --git a/tools/emacs-pkgs/defzone/example.el b/tools/emacs-pkgs/defzone/example.el
new file mode 100644
index 000000000000..e9c86d25eec8
--- /dev/null
+++ b/tools/emacs-pkgs/defzone/example.el
@@ -0,0 +1,45 @@
+;;; example.el - usage example for defzone macro
+
+(defzone "tazj.in."
+  (SOA 21600
+       :mname "ns-cloud-a1.googledomains.com."
+       :rname "cloud-dns-hostmaster.google.com."
+       :serial 123
+       :refresh 21600
+       :retry 3600
+       :expire 1209600
+       :minimum 300)
+
+  (NS 21600
+      "ns-cloud-a1.googledomains.com."
+      "ns-cloud-a2.googledomains.com."
+      "ns-cloud-a3.googledomains.com."
+      "ns-cloud-a4.googledomains.com.")
+
+  (MX 300
+      (1  . "aspmx.l.google.com.")
+      (5  . "alt1.aspmx.l.google.com.")
+      (5  . "alt2.aspmx.l.google.com.")
+      (10 . "alt3.aspmx.l.google.com.")
+      (10 . "alt4.aspmx.l.google.com."))
+
+  (TXT 3600 "google-site-verification=d3_MI1OwD6q2OT42Vvh0I9w2u3Q5KFBu-PieNUE1Fig")
+
+  (A 300 "34.98.120.189")
+
+  ;; Nested record sets are indicated by a list that starts with a
+  ;; string (this is just joined, so you can nest multiple levels at
+  ;; once)
+  ("blog"
+   ;; Blog "storage engine" is in a separate DNS zone
+   (NS 21600
+       "ns-cloud-c1.googledomains.com."
+       "ns-cloud-c2.googledomains.com."
+       "ns-cloud-c3.googledomains.com."
+       "ns-cloud-c4.googledomains.com."))
+
+  ("git"
+   (A 300 "34.98.120.189")
+   (TXT 300 "<3 edef"))
+
+  ("files" (CNAME 300 "c.storage.googleapis.com.")))
diff --git a/tools/emacs-pkgs/dottime/default.nix b/tools/emacs-pkgs/dottime/default.nix
new file mode 100644
index 000000000000..b819e9c14d2c
--- /dev/null
+++ b/tools/emacs-pkgs/dottime/default.nix
@@ -0,0 +1,7 @@
+{ depot, ... }:
+
+depot.tools.emacs-pkgs.buildEmacsPackage {
+  pname = "dottime";
+  version = "1.0";
+  src = ./dottime.el;
+}
diff --git a/tools/emacs-pkgs/dottime/dottime.el b/tools/emacs-pkgs/dottime/dottime.el
new file mode 100644
index 000000000000..2446f6488f32
--- /dev/null
+++ b/tools/emacs-pkgs/dottime/dottime.el
@@ -0,0 +1,81 @@
+;;; dottime.el --- use dottime in the modeline
+;;
+;; Copyright (C) 2019 Google Inc.
+;;
+;; Author: Vincent Ambo <tazjin@google.com>
+;; Version: 1.0
+;; Package-Requires: (cl-lib)
+;;
+;;; Commentary:
+;;
+;; This package changes the display of time in the modeline to use
+;; dottime (see https://dotti.me/) instead of the standard time
+;; display.
+;;
+;; Modeline dottime display is enabled by calling
+;; `dottime-display-mode' and dottime can be used in Lisp code via
+;; `dottime-format'.
+
+(require 'cl-lib)
+(require 'time)
+
+(defun dottime--format-string (&optional offset prefix)
+  "Creates the dottime format string for `format-time-string'
+  based on the local timezone."
+
+  (let* ((offset-sec (or offset (car (current-time-zone))))
+         (offset-hours (/ offset-sec 60 60))
+         (base (concat prefix "%m-%dT%H·%M")))
+    (if (/= offset-hours 0)
+        (concat base (format "%0+3d" offset-hours))
+      base)))
+
+(defun dottime--display-time-update-advice (orig)
+  "Function used as advice to `display-time-update' with a
+  rebound definition of `format-time-string' that renders all
+  timestamps as dottime."
+
+  (cl-letf* ((format-orig (symbol-function 'format-time-string))
+             ((symbol-function 'format-time-string)
+              (lambda (&rest _)
+                (funcall format-orig (dottime--format-string) nil t))))
+    (funcall orig)))
+
+(defun dottime-format (&optional time offset prefix)
+  "Format the given TIME in dottime at OFFSET. If TIME is nil,
+  the current time will be used. PREFIX is prefixed to the format
+  string verbatim.
+
+  OFFSET can be an integer representing an offset in seconds, or
+  the argument can be elided in which case the system time zone
+  is used."
+
+  (format-time-string (dottime--format-string offset prefix) time t))
+
+(defun dottime-display-mode (arg)
+  "Enable time display as dottime. Disables dottime if called
+  with prefix 0 or nil."
+
+  (interactive "p")
+  (if (or (eq arg 0) (eq arg nil))
+      (advice-remove 'display-time-update #'dottime--display-time-update-advice)
+    (advice-add 'display-time-update :around #'dottime--display-time-update-advice))
+  (display-time-update)
+
+  ;; Amend the time display in telega.el to use dottime.
+  ;;
+  ;; This will never display offsets in the chat window, as those are
+  ;; always visible in the modeline anyways.
+  (when (featurep 'telega)
+    (defun telega-ins--dottime-advice (orig timestamp)
+      (let* ((dtime (decode-time timestamp t))
+             (current-ts (time-to-seconds (current-time)))
+             (ctime (decode-time current-ts))
+             (today00 (telega--time-at00 current-ts ctime)))
+        (if (> timestamp today00)
+            (telega-ins (format "%02d·%02d" (nth 2 dtime) (nth 1 dtime)))
+          (funcall orig timestamp))))
+
+    (advice-add 'telega-ins--date :around #'telega-ins--dottime-advice)))
+
+(provide 'dottime)
diff --git a/tools/emacs-pkgs/nix-util/default.nix b/tools/emacs-pkgs/nix-util/default.nix
new file mode 100644
index 000000000000..b167cb964214
--- /dev/null
+++ b/tools/emacs-pkgs/nix-util/default.nix
@@ -0,0 +1,8 @@
+{ depot, ... }:
+
+depot.tools.emacs-pkgs.buildEmacsPackage {
+  pname = "nix-util";
+  version = "1.0";
+  src = ./nix-util.el;
+  externalRequires = epkgs: [ epkgs.s ];
+}
diff --git a/tools/emacs-pkgs/nix-util/nix-util.el b/tools/emacs-pkgs/nix-util/nix-util.el
new file mode 100644
index 000000000000..4ddc81f563d3
--- /dev/null
+++ b/tools/emacs-pkgs/nix-util/nix-util.el
@@ -0,0 +1,69 @@
+;;; nix-util.el --- Utilities for dealing with Nix code. -*- lexical-binding: t; -*-
+;;
+;; Copyright (C) 2019 Google Inc.
+;; Copyright (C) 2022 The TVL Authors
+;;
+;; Author: Vincent Ambo <tazjin@google.com>
+;; Version: 1.0
+;; Package-Requires: (json map s)
+;;
+;;; Commentary:
+;;
+;; This package adds some functionality that I find useful when
+;; working in Nix buffers or programs installed from Nix.
+
+(require 'json)
+(require 'map)
+(require 's)
+
+(defun nix/prefetch-github (owner repo) ; TODO(tazjin): support different branches
+  "Fetch the master branch of a GitHub repository and insert the
+  call to `fetchFromGitHub' at point."
+
+  (interactive "sOwner: \nsRepository: ")
+
+  (let* (;; Keep these vars around for output insertion
+         (point (point))
+         (buffer (current-buffer))
+         (name (concat "github-fetcher/" owner "/" repo))
+         (outbuf (format "*%s*" name))
+         (errbuf (get-buffer-create "*github-fetcher/errors*"))
+         (cleanup (lambda ()
+                    (kill-buffer outbuf)
+                    (kill-buffer errbuf)
+                    (with-current-buffer buffer
+                      (read-only-mode -1))))
+         (prefetch-handler
+          (lambda (_process event)
+            (unwind-protect
+                (pcase event
+                  ("finished\n"
+                   (let* ((json-string (with-current-buffer outbuf
+                                         (buffer-string)))
+                          (result (json-read-from-string json-string)))
+                     (with-current-buffer buffer
+                       (goto-char point)
+                       (map-let (("rev" rev) ("sha256" sha256)) result
+                         (read-only-mode -1)
+                         (insert (format "fetchFromGitHub {
+  owner = \"%s\";
+  repo = \"%s\";
+  rev = \"%s\";
+  sha256 = \"%s\";
+};" owner repo rev sha256))
+                         (indent-region point (point))))))
+                  (_ (with-current-buffer errbuf
+                       (error "Failed to prefetch %s/%s: %s"
+                              owner repo (buffer-string)))))
+              (funcall cleanup)))))
+
+    ;; Fetching happens asynchronously, but we'd like to make sure the
+    ;; point stays in place while that happens.
+    (read-only-mode)
+    (make-process :name name
+                  :buffer outbuf
+                  :command `("nix-prefetch-github" ,owner ,repo)
+                  :stderr errbuf
+                  :sentinel prefetch-handler)))
+
+(provide 'nix-util)
diff --git a/tools/emacs-pkgs/notable/OWNERS b/tools/emacs-pkgs/notable/OWNERS
new file mode 100644
index 000000000000..f7da62ecf709
--- /dev/null
+++ b/tools/emacs-pkgs/notable/OWNERS
@@ -0,0 +1,2 @@
+owners:
+  - tazjin
diff --git a/tools/emacs-pkgs/notable/default.nix b/tools/emacs-pkgs/notable/default.nix
new file mode 100644
index 000000000000..f57b1c66ae3f
--- /dev/null
+++ b/tools/emacs-pkgs/notable/default.nix
@@ -0,0 +1,17 @@
+{ depot, ... }:
+
+depot.tools.emacs-pkgs.buildEmacsPackage rec {
+  pname = "notable";
+  version = "1.0";
+  src = ./notable.el;
+
+  externalRequires = epkgs: with epkgs; [
+    f
+    ht
+    s
+  ];
+
+  internalRequires = [
+    depot.tools.emacs-pkgs.dottime
+  ];
+}
diff --git a/tools/emacs-pkgs/notable/notable.el b/tools/emacs-pkgs/notable/notable.el
new file mode 100644
index 000000000000..4668dd333c99
--- /dev/null
+++ b/tools/emacs-pkgs/notable/notable.el
@@ -0,0 +1,251 @@
+;;; notable.el --- a simple note-taking app -*- lexical-binding: t; -*-
+;;
+;; Copyright (C) 2020 The TVL Contributors
+;;
+;; Author: Vincent Ambo <mail@tazj.in>
+;; Version: 1.0
+;; Package-Requires: (cl-lib dash f rx s subr-x)
+;;
+;;; Commentary:
+;;
+;; This package provides a simple note-taking application which can be
+;; invoked from anywhere in Emacs, with several interactive
+;; note-taking functions included.
+;;
+;; As is tradition for my software, the idea here is to reduce
+;; friction which I see even with tools like `org-capture', because
+;; `org-mode' does a ton of things I don't care about.
+;;
+;; Notable stores its notes in simple JSON files in the folder
+;; specified by `notable-note-dir'.
+
+(require 'cl-lib)
+(require 'dottime)
+(require 'f)
+(require 'ht)
+(require 'rx)
+(require 's)
+(require 'subr-x)
+
+;; User-facing customisation options
+
+(defgroup notable nil
+  "Simple note-taking application."
+  :group 'applications)
+
+;; TODO(tazjin): Use whatever the XDG state dir thing is for these by
+;; default.
+(defcustom notable-note-dir (expand-file-name "~/.notable/")
+  "File path to the directory containing notable's notes."
+  :type 'string
+  :group 'notable)
+
+;; Package internal definitions
+
+(cl-defstruct (notable--note (:constructor notable--make-note))
+  "Structure containing the fields of a single notable note."
+  time    ;; UNIX timestamp at which the note was taken
+  content ;; Textual content of the note
+  )
+
+(defvar notable--note-lock (make-mutex "notable-notes")
+  "Exclusive lock for note operations with shared state.")
+
+(defvar notable--note-regexp
+  (rx "note-"
+      (group (one-or-more (any num)))
+      ".json")
+  "Regular expression to match note file names.")
+
+(defvar notable--next-note
+  (let ((next 0))
+    (dolist (file (f-entries notable-note-dir))
+      (when-let* ((match (string-match notable--note-regexp file))
+                  (id (string-to-number
+                       (match-string 1 file)))
+                  (larger (> id next)))
+        (setq next id)))
+    (+ 1 next))
+  "Next ID to use for notes. Initial value is determined based on
+  the existing notes files.")
+
+(defun notable--serialize-note (note)
+  "Serialise NOTE into JSON format."
+  (check-type note notable--note)
+  (json-serialize (ht ("time" (notable--note-time note))
+                      ("content" (notable--note-content note)))))
+
+(defun notable--deserialize-note (json)
+  "Deserialise JSON into a notable note."
+  (check-type json string)
+  (let ((parsed (json-parse-string json)))
+    (unless (and (ht-contains? parsed "time")
+                 (ht-contains-p parsed "content"))
+      (error "Missing required keys in note structure!"))
+    (notable--make-note :time (ht-get parsed "time")
+                        :content (ht-get parsed "content"))))
+
+(defun notable--next-id ()
+  "Return the next note ID and increment the counter."
+  (with-mutex notable--note-lock
+    (let ((id notable--next-note))
+      (setq notable--next-note (+ 1 id))
+      id)))
+
+(defun notable--note-path (id)
+  (check-type id integer)
+  (f-join notable-note-dir (format "note-%d.json" id)))
+
+(defun notable--archive-path (id)
+  (check-type id integer)
+  (f-join notable-note-dir (format "archive-%d.json" id)))
+
+(defun notable--add-note (content)
+  "Add a note with CONTENT to the note store."
+  (let* ((id (notable--next-id))
+         (note (notable--make-note :time (time-convert nil 'integer)
+                                   :content content))
+         (path (notable--note-path id)))
+    (when (f-exists? path) (error "Note file '%s' already exists!" path))
+    (f-write-text (notable--serialize-note note) 'utf-8 path)
+    (message "Saved note %d" id)))
+
+(defun notable--archive-note (id)
+  "Archive the note with ID."
+  (check-type id integer)
+
+  (unless (f-exists? (notable--note-path id))
+    (error "There is no note with ID %d." id))
+
+  (when (f-exists? (notable--archive-path id))
+    (error "Oh no, a note with ID %d has already been archived!" id))
+
+  (f-move (notable--note-path id) (notable--archive-path id))
+  (message "Archived note with ID %d." id))
+
+(defun notable--list-note-ids ()
+  "List all note IDs (not contents) from `notable-note-dir'"
+  (cl-loop for file in (f-entries notable-note-dir)
+           with res = nil
+           if (string-match notable--note-regexp file)
+           do (push (string-to-number (match-string 1 file)) res)
+           finally return res))
+
+(defun notable--get-note (id)
+  (let ((path (notable--note-path id)))
+    (unless (f-exists? path)
+      (error "No note with ID %s in note storage!" id))
+    (notable--deserialize-note (f-read-text path 'utf-8))))
+
+;; Note view buffer implementation
+
+(defvar-local notable--buffer-note nil "The note ID displayed by this buffer.")
+
+(define-derived-mode notable-note-mode fundamental-mode "notable-note"
+  "Major mode displaying a single Notable note."
+  (set (make-local-variable 'scroll-preserve-screen-position) t)
+  (setq truncate-lines t)
+  (setq buffer-read-only t)
+  (setq buffer-undo-list t))
+
+(setq notable-note-mode-map
+      (let ((map (make-sparse-keymap)))
+        (define-key map "q" 'kill-current-buffer)
+        map))
+
+(defun notable--show-note (id)
+  "Display a single note in a separate buffer."
+  (check-type id integer)
+
+  (let ((note (notable--get-note id))
+        (buffer (get-buffer-create (format "*notable: %d*" id)))
+        (inhibit-read-only t))
+    (with-current-buffer buffer
+      (notable-note-mode)
+      (erase-buffer)
+      (setq notable--buffer-note id)
+      (setq header-line-format
+            (format "Note from %s"
+                    (dottime-format
+                     (seconds-to-time (notable--note-time note))))))
+    (switch-to-buffer buffer)
+    (goto-char (point-min))
+    (insert (notable--note-content note))))
+
+(defun notable--show-note-at-point ()
+  (interactive)
+  (notable--show-note (get-text-property (point) 'notable-note-id)))
+
+(defun notable--archive-note-at-point ()
+  (interactive)
+  (notable--archive-note (get-text-property (point) 'notable-note-id)))
+
+;; Note list buffer implementation
+
+(define-derived-mode notable-list-mode fundamental-mode "notable"
+  "Major mode displaying the Notable note list."
+  ;; TODO(tazjin): `imenu' functions?
+
+  (set (make-local-variable 'scroll-preserve-screen-position) t)
+  (setq truncate-lines t)
+  (setq buffer-read-only t)
+  (setq buffer-undo-list t)
+  (hl-line-mode t))
+
+(setq notable-list-mode-map
+      (let ((map (make-sparse-keymap)))
+        (define-key map "a" 'notable--archive-note-at-point)
+        (define-key map "q" 'kill-current-buffer)
+        (define-key map "g" 'notable-list-notes)
+        (define-key map (kbd "RET") 'notable--show-note-at-point)
+        map))
+
+(defun notable--render-note (id note)
+  (check-type id integer)
+  (check-type note notable--note)
+
+  (let* ((start (point))
+         (date (dottime-format (seconds-to-time
+                                (notable--note-time note))))
+         (first-line (truncate-string-to-width
+                      (car (s-lines (notable--note-content note)))
+                      ;; Length of the window, minus the date prefix:
+                      (- (window-width) (+ 2 (length date)))
+                      nil nil 1)))
+    (insert (propertize (s-concat date "  " first-line)
+                        'notable-note-id id))
+    (insert "\n")))
+
+(defun notable--render-notes (notes)
+  "Retrieve each note in NOTES by ID and insert its contents into
+the list buffer.
+
+For larger notes only the first line is displayed."
+  (dolist (id notes)
+    (notable--render-note id (notable--get-note id))))
+
+;; User-facing functions
+
+(defun notable-take-note (content)
+  "Interactively prompt the user for a note that should be stored
+in Notable."
+  (interactive "sEnter note: ")
+  (check-type content string)
+  (notable--add-note content))
+
+(defun notable-list-notes ()
+  "Open a buffer listing all active notes."
+  (interactive)
+
+  (let ((buffer (get-buffer-create "*notable*"))
+        (notes (notable--list-note-ids))
+        (inhibit-read-only t))
+    (with-current-buffer buffer
+      (notable-list-mode)
+      (erase-buffer)
+      (setq header-line-format "Notable notes"))
+    (switch-to-buffer buffer)
+    (goto-char (point-min))
+    (notable--render-notes notes)))
+
+(provide 'notable)
diff --git a/tools/emacs-pkgs/passively/OWNERS b/tools/emacs-pkgs/passively/OWNERS
new file mode 100644
index 000000000000..56853aed59e7
--- /dev/null
+++ b/tools/emacs-pkgs/passively/OWNERS
@@ -0,0 +1,3 @@
+inherited: true
+owners:
+  - tazjin
diff --git a/tools/emacs-pkgs/passively/README.md b/tools/emacs-pkgs/passively/README.md
new file mode 100644
index 000000000000..052c496b324d
--- /dev/null
+++ b/tools/emacs-pkgs/passively/README.md
@@ -0,0 +1,76 @@
+<!-- SPDX-License-Identifier: MIT -->
+passively
+=========
+
+Passively is an Emacs Lisp library for passively learning new
+information in an Emacs instance.
+
+Passively works by displaying a random piece of information to be
+learned in the Emacs echoline whenever Emacs is idle for a set amount
+of time.
+
+It was designed to aid in language acquisition by passively displaying
+new vocabulary to learn.
+
+Passively is configured with a corpus of information (a hash table
+mapping string keys to string values) and maintains a set of terms
+that the user already learned in a file on disk.
+
+## Configuration & usage
+
+Configure passively like this:
+
+```lisp
+;; Configure the terms to learn. Each term should have a key and a
+;; string value which is displayed.
+(setq passively-learn-terms
+      (ht ("забыть" "забыть - to forget")
+          ("действительно" "действительно - indeed, really")))
+
+;; Configure a file in which passively should store its state
+;; (defaults to $user-emacs-directory/passively.el)
+(setq passively-store-state "/persist/tazjin/passively.el")
+
+;; Configure after how many seconds of idle time passively should
+;; display a new piece of information.
+;; (defaults to 4 seconds)
+(setq passively-show-after-idle-for 5)
+
+;; Once this configuration has been set up, start passively:
+(passively-enable)
+
+;; Or, if it annoys you, disable it again:
+(passively-disable)
+```
+
+These variables are registered with `customize` and may be customised
+through its interface.
+
+### Known terms
+
+Passively exposes the interactive function
+`passively-mark-last-as-known` which marks the previously displayed
+term as known. This means that it will not be included in the random
+selection anymore.
+
+### Last term
+
+Passively stores the key of the last known term in
+`passively-last-displayed`.
+
+## Installation
+
+Inside of the TVL depot, you can install passively from
+`pkgs.emacsPackages.tvlPackages.passively`. Outside of the depot, you
+can clone passively like this:
+
+    git clone https://code.tvl.fyi/depot.git:/tools/emacs-pkgs/passively.git
+
+Passively depends on `ht.el`.
+
+Feel free to contribute patches by emailing them to `depot@tazj.in`
+
+## Use-cases
+
+I'm using passively to learn Russian vocabulary. Once I've cleaned up
+my configuration for that, my Russian term list will be linked here.
diff --git a/tools/emacs-pkgs/passively/default.nix b/tools/emacs-pkgs/passively/default.nix
new file mode 100644
index 000000000000..ec59cc85fd8f
--- /dev/null
+++ b/tools/emacs-pkgs/passively/default.nix
@@ -0,0 +1,8 @@
+{ depot, ... }:
+
+depot.tools.emacs-pkgs.buildEmacsPackage {
+  pname = "passively";
+  version = "1.0";
+  src = ./passively.el;
+  externalRequires = (epkgs: with epkgs; [ ht ]);
+}
diff --git a/tools/emacs-pkgs/passively/passively.el b/tools/emacs-pkgs/passively/passively.el
new file mode 100644
index 000000000000..0d871f26add6
--- /dev/null
+++ b/tools/emacs-pkgs/passively/passively.el
@@ -0,0 +1,121 @@
+;;; passively.el --- Passively learn new information -*- lexical-binding: t; -*-
+;;
+;; SPDX-License-Identifier: MIT
+;; Copyright (C) 2020 The TVL Contributors
+;;
+;; Author: Vincent Ambo <tazjin@tvl.su>
+;; Version: 1.0
+;; Package-Requires: (ht seq)
+;; URL: https://code.tvl.fyi/about/tools/emacs-pkgs/passively/
+;;
+;; This file is not part of GNU Emacs.
+
+(require 'ht)
+(require 'seq)
+
+;; Customisation options
+
+(defgroup passively nil
+  "Customisation options for passively"
+  :group 'applications)
+
+(defcustom passively-learn-terms nil
+  "Terms that passively should randomly display to the user. The
+format of this variable is a hash table with a string key that
+uniquely identifies the term, and a string value that is
+displayed to the user.
+
+For example, a possible value could be:
+
+   (ht (\"забыть\" \"забыть - to forget\")
+       (\"действительно\" \"действительно - indeed, really\")))
+"
+  ;; TODO(tazjin): No hash-table type in customization.el?
+  :type '(sexp)
+  :group 'passively)
+
+(defcustom passively-store-state (format "%spassively.el" user-emacs-directory)
+  "File in which passively should store its state (e.g. known terms)"
+  :type '(file)
+  :group 'passively)
+
+(defcustom passively-show-after-idle-for 4
+  "Number of seconds after Emacs goes idle that passively should
+wait before displaying a term."
+  :type '(integer)
+  :group 'passively)
+
+;; Implementation of state persistence
+(defvar passively-last-displayed nil
+  "Key of the last displayed passively term.")
+
+(defvar passively--known-terms (make-hash-table)
+  "Set of terms that are already known.")
+
+(defun passively--persist-known-terms ()
+  "Persist the set of known passively terms to disk."
+  (with-temp-file passively-store-state
+    (insert (prin1-to-string (ht-keys passively--known-terms)))))
+
+(defun passively--load-known-terms ()
+  "Load the set of known passively terms from disk."
+  (with-temp-buffer
+    (insert-file-contents passively-store-state)
+    (let ((keys (read (current-buffer))))
+      (setq passively--known-terms (make-hash-table))
+      (seq-do
+       (lambda (key) (ht-set passively--known-terms key t))
+       keys)))
+  (message "passively: loaded %d known words"
+           (seq-length (ht-keys passively--known-terms))))
+
+(defun passively-mark-last-as-known ()
+  "Mark the last term that passively displayed as known. It will
+not be displayed again."
+  (interactive)
+
+  (ht-set passively--known-terms passively-last-displayed t)
+  (passively--persist-known-terms)
+  (message "passively: Marked '%s' as known" passively-last-displayed))
+
+;; Implementation of main display logic
+(defvar passively--display-timer nil
+  "idle-timer used for displaying terms by passively")
+
+(defun passively--random-term (timeout)
+  ;; This is stupid, calculate set intersections instead.
+  (if (< 1000 timeout)
+      (error "It seems you already know all the terms?")
+    (seq-random-elt (ht-keys passively-learn-terms))))
+
+(defun passively--display-random-term ()
+  (let* ((timeout 1)
+         (term (passively--random-term timeout)))
+    (while (ht-contains? passively--known-terms term)
+      (setq timeout (+ 1 timeout))
+      (setq term (passively--random-term timeout)))
+    (setq passively-last-displayed term)
+    (message (ht-get passively-learn-terms term))))
+
+(defun passively-enable ()
+  "Enable automatic display of terms via passively."
+  (interactive)
+  (if passively--display-timer
+      (error "passively: Already running!")
+    (passively--load-known-terms)
+    (setq passively--display-timer
+          (run-with-idle-timer passively-show-after-idle-for t
+                               #'passively--display-random-term))
+    (message "passively: Now running after %s seconds of idle time"
+             passively-show-after-idle-for)))
+
+(defun passively-disable ()
+  "Turn off automatic display of terms via passively."
+  (interactive)
+  (unless passively--display-timer
+    (error "passively: Not running!"))
+  (cancel-timer passively--display-timer)
+  (setq passively--display-timer nil)
+  (message "passively: Now disabled"))
+
+(provide 'passively)
diff --git a/tools/emacs-pkgs/term-switcher/default.nix b/tools/emacs-pkgs/term-switcher/default.nix
new file mode 100644
index 000000000000..e775de5cdbe8
--- /dev/null
+++ b/tools/emacs-pkgs/term-switcher/default.nix
@@ -0,0 +1,8 @@
+{ depot, ... }:
+
+depot.tools.emacs-pkgs.buildEmacsPackage {
+  pname = "term-switcher";
+  version = "1.0";
+  src = ./term-switcher.el;
+  externalRequires = epkgs: with epkgs; [ dash ivy s vterm ];
+}
diff --git a/tools/emacs-pkgs/term-switcher/term-switcher.el b/tools/emacs-pkgs/term-switcher/term-switcher.el
new file mode 100644
index 000000000000..0055f87fd67f
--- /dev/null
+++ b/tools/emacs-pkgs/term-switcher/term-switcher.el
@@ -0,0 +1,57 @@
+;;; term-switcher.el --- Easily switch between open vterms
+;;
+;; Copyright (C) 2019 Google Inc.
+;;
+;; Author: Vincent Ambo <tazjin@google.com>
+;; Version: 1.1
+;; Package-Requires: (dash ivy s vterm)
+;;
+;;; Commentary:
+;;
+;; This package adds a function that lets users quickly switch between
+;; different open vterms via ivy.
+
+(require 'dash)
+(require 'ivy)
+(require 's)
+(require 'vterm)
+
+(defgroup term-switcher nil
+  "Customization options `term-switcher'.")
+
+(defcustom term-switcher-buffer-prefix "vterm<"
+  "String prefix for vterm terminal buffers. For example, if you
+  set your titles to match `vterm<...>' a useful prefix might be
+  `vterm<'."
+  :type '(string)
+  :group 'term-switcher)
+
+(defun ts/open-or-create-vterm (buffer-name)
+  "Switch to the buffer with BUFFER-NAME or create a new vterm
+  buffer."
+  (if (equal "New vterm" buffer-name)
+      (vterm)
+    (if-let ((buffer (get-buffer buffer-name)))
+        (switch-to-buffer buffer)
+      (error "Could not find vterm buffer: %s" buffer-name))))
+
+(defun ts/is-vterm-buffer (buffer)
+  "Determine whether BUFFER runs a vterm."
+  (equal 'vterm-mode (buffer-local-value 'major-mode buffer)))
+
+(defun ts/switch-to-terminal ()
+  "Switch to an existing vterm buffer or create a new one."
+
+  (interactive)
+  (let ((terms (-map #'buffer-name
+                     (-filter #'ts/is-vterm-buffer (buffer-list)))))
+    (if terms
+        (ivy-read "Switch to vterm: "
+                  (cons "New vterm" terms)
+                  :caller 'ts/switch-to-terminal
+                  :preselect (s-concat "^" term-switcher-buffer-prefix)
+                  :require-match t
+                  :action #'ts/open-or-create-vterm)
+      (vterm))))
+
+(provide 'term-switcher)
diff --git a/tools/emacs-pkgs/tvl/OWNERS b/tools/emacs-pkgs/tvl/OWNERS
new file mode 100644
index 000000000000..ce7e0e37ee4f
--- /dev/null
+++ b/tools/emacs-pkgs/tvl/OWNERS
@@ -0,0 +1,3 @@
+inherited: true
+owners:
+  - grfn
diff --git a/tools/emacs-pkgs/tvl/default.nix b/tools/emacs-pkgs/tvl/default.nix
new file mode 100644
index 000000000000..5dcc184bb521
--- /dev/null
+++ b/tools/emacs-pkgs/tvl/default.nix
@@ -0,0 +1,8 @@
+{ depot, ... }:
+
+depot.tools.emacs-pkgs.buildEmacsPackage {
+  pname = "tvl";
+  version = "1.0";
+  src = ./tvl.el;
+  externalRequires = (epkgs: with epkgs; [ magit s ]);
+}
diff --git a/tools/emacs-pkgs/tvl/tvl.el b/tools/emacs-pkgs/tvl/tvl.el
new file mode 100644
index 000000000000..500ffa165317
--- /dev/null
+++ b/tools/emacs-pkgs/tvl/tvl.el
@@ -0,0 +1,222 @@
+;;; tvl.el --- description -*- lexical-binding: t; -*-
+;;
+;; Copyright (C) 2020 Griffin Smith
+;; Copyright (C) 2020 The TVL Contributors
+;;
+;; Author: Griffin Smith <grfn@gws.fyi>
+;; Version: 0.0.1
+;; Package-Requires: (cl s magit)
+;;
+;; This file is not part of GNU Emacs.
+;;
+;;; Commentary:
+;;
+;; This file provides shared utilities for interacting with the TVL monorepo
+;;
+;;; Code:
+
+(require 'magit)
+(require 's)
+(require 'cl) ; TODO(tazjin): replace lexical-let* with non-deprecated alternative
+
+(defgroup tvl nil
+  "Customisation options for TVL functionality.")
+
+(defcustom tvl-gerrit-remote "origin"
+  "Name of the git remote for gerrit"
+  :type '(string)
+  :group 'tvl)
+
+(defcustom tvl-depot-path "/depot"
+  "Location at which the TVL depot is checked out."
+  :type '(string)
+  :group 'tvl)
+
+(defcustom tvl-target-branch "canon"
+  "Branch to use to target CLs"
+  :group 'tvl
+  :type '(string)
+  :safe (lambda (_) t))
+
+(defun tvl--gerrit-ref (target-branch &optional flags)
+  (let ((flag-suffix (if flags (format "%%%s" (s-join "," flags))
+                       "")))
+    (format "HEAD:refs/for/%s%s" target-branch flag-suffix)))
+
+(transient-define-suffix magit-gerrit-push-for-review ()
+  "Push to Gerrit for review."
+  (interactive)
+  (magit-push-refspecs tvl-gerrit-remote
+                       (tvl--gerrit-ref tvl-target-branch)
+                       nil))
+
+(transient-append-suffix
+  #'magit-push ["r"]
+  (list "R" "push to Gerrit for review" #'magit-gerrit-push-for-review))
+
+(transient-define-suffix magit-gerrit-push-wip ()
+  "Push to Gerrit as a work-in-progress."
+  (interactive)
+  (magit-push-refspecs tvl-gerrit-remote
+                       (tvl--gerrit-ref tvl-target-branch '("wip"))
+                       nil))
+
+(transient-append-suffix
+  #'magit-push ["r"]
+  (list "W" "push to Gerrit as a work-in-progress" #'magit-gerrit-push-wip))
+
+(transient-define-suffix magit-gerrit-push-autosubmit ()
+  "Push to Gerrit with autosubmit enabled."
+  (interactive)
+  (magit-push-refspecs tvl-gerrit-remote
+                       (tvl--gerrit-ref tvl-target-branch '("l=Autosubmit+1"))
+                       nil))
+
+(transient-append-suffix
+  #'magit-push ["r"]
+  (list "A" "push to Gerrit with autosubmit enabled" #'magit-gerrit-push-autosubmit))
+
+(transient-define-suffix magit-gerrit-submit ()
+  "Push to Gerrit for review."
+  (interactive)
+  (magit-push-refspecs tvl-gerrit-remote
+                       (tvl--gerrit-ref tvl-target-branch '("submit"))
+                       nil))
+
+(transient-append-suffix
+  #'magit-push ["r"]
+  (list "S" "push to Gerrit to submit" #'magit-gerrit-submit))
+
+
+(transient-define-suffix magit-gerrit-rubberstamp ()
+  "Push, approve and autosubmit to Gerrit. CLs created via this
+rubberstamp method will automatically be submitted after CI
+passes. This is potentially dangerous, use with care."
+  (interactive)
+  (magit-push-refspecs tvl-gerrit-remote
+                       (tvl--gerrit-ref tvl-target-branch
+                                        '("l=Code-Review+2"
+                                          "l=Autosubmit+1"
+                                          "publish-comments"))
+                       nil))
+
+(transient-append-suffix
+  #'magit-push ["r"]
+  (list "P" "push & rubberstamp to Gerrit" #'magit-gerrit-rubberstamp))
+
+(defvar magit-cl-history nil)
+(defun magit-read-cl (prompt remote)
+  (let* ((refs (prog2 (message "Determining available refs...")
+                   (magit-remote-list-refs remote)
+                 (message "Determining available refs...done")))
+         (change-refs (-filter
+                       (apply-partially #'string-prefix-p "refs/changes/")
+                       refs))
+         (cl-number-to-refs
+          (-group-by
+           (lambda (change-ref)
+             ;; refs/changes/34/1234/1
+             ;; ^    ^       ^  ^    ^
+             ;; 1    2       3  4    5
+             ;;                 ^-- this one
+             (cadddr
+              (split-string change-ref (rx "/"))))
+           change-refs))
+         (cl-numbers
+          (-map
+           (lambda (cl-to-refs)
+             (let ((latest-patchset-ref
+                    (-max-by
+                     (-on #'> (lambda (ref)
+                                (string-to-number
+                                 (nth 4 (split-string ref (rx "/"))))))
+                     (-remove
+                      (apply-partially #'s-ends-with-p "meta")
+                      (cdr cl-to-refs)))))
+               (propertize (car cl-to-refs) 'ref latest-patchset-ref)))
+           cl-number-to-refs)))
+    (get-text-property
+     0
+     'ref
+     (magit-completing-read
+      prompt cl-numbers nil t nil 'magit-cl-history))))
+
+(transient-define-suffix magit-gerrit-checkout (remote cl-refspec)
+  "Prompt for a CL number and checkout the latest patchset of that CL with
+  detached HEAD"
+  (interactive
+   (let* ((remote tvl-gerrit-remote)
+          (cl (magit-read-cl "Checkout CL" remote)))
+     (list remote cl)))
+  (magit-fetch-refspec remote cl-refspec (magit-fetch-arguments))
+  ;; That runs async, so wait for it to finish (this is how magit does it)
+  (while (and magit-this-process
+              (eq (process-status magit-this-process) 'run))
+    (sleep-for 0.005))
+  (magit-checkout "FETCH_HEAD" (magit-branch-arguments))
+  (message "HEAD detached at %s" cl-refspec))
+
+
+(transient-append-suffix
+  #'magit-branch ["l"]
+  (list "g" "gerrit CL" #'magit-gerrit-checkout))
+
+(transient-define-suffix magit-gerrit-cherry-pick (remote cl-refspec)
+  "Prompt for a CL number and cherry-pick the latest patchset of that CL"
+  (interactive
+   (let* ((remote tvl-gerrit-remote)
+          (cl (magit-read-cl "Cherry-pick CL" remote)))
+     (list remote cl)))
+  (magit-fetch-refspec remote cl-refspec (magit-fetch-arguments))
+  ;; That runs async, so wait for it to finish (this is how magit does it)
+  (while (and magit-this-process
+              (eq (process-status magit-this-process) 'run))
+    (sleep-for 0.005))
+  (magit-cherry-copy (list "FETCH_HEAD"))
+  (message "HEAD detached at %s" cl-refspec))
+
+
+(transient-append-suffix
+  #'magit-cherry-pick ["m"]
+  (list "g" "Gerrit CL" #'magit-gerrit-cherry-pick))
+
+(defun tvl-depot-status ()
+  "Open the TVL monorepo in magit."
+  (interactive)
+  (magit-status-setup-buffer tvl-depot-path))
+
+(eval-after-load 'sly
+  '(defun tvl-sly-from-depot (attribute)
+     "Start a Sly REPL configured with a Lisp matching a derivation
+     from the depot.
+
+     The derivation invokes nix.buildLisp.sbclWith and is built
+     asynchronously. The build output is included in the error
+     thrown on build failures."
+
+     (interactive "sAttribute: ")
+     (lexical-let* ((outbuf (get-buffer-create (format "*depot-out/%s*" attribute)))
+                    (errbuf (get-buffer-create (format "*depot-errors/%s*" attribute)))
+                    (expression (format "(import <depot> {}).%s.repl" attribute))
+                    (command (list "nix-build" "--no-out-link" "-I" (format "depot=%s" tvl-depot-path) "-E" expression)))
+       (message "Acquiring Lisp for <depot>.%s" attribute)
+       (make-process :name (format "depot-nix-build/%s" attribute)
+                     :buffer outbuf
+                     :stderr errbuf
+                     :command command
+                     :sentinel
+                     (lambda (process event)
+                       (unwind-protect
+                           (pcase event
+                             ("finished\n"
+                              (let* ((outpath (s-trim (with-current-buffer outbuf (buffer-string))))
+                                     (lisp-path (s-concat outpath "/bin/sbcl")))
+                                (message "Acquired Lisp for <depot>.%s at %s" attribute lisp-path)
+                                (sly lisp-path)))
+                             (_ (with-current-buffer errbuf
+                                  (error "Failed to build '%s':\n%s" attribute (buffer-string)))))
+                         (kill-buffer outbuf)
+                         (kill-buffer errbuf)))))))
+
+(provide 'tvl)
+;;; tvl.el ends here
diff --git a/tools/eprintf.nix b/tools/eprintf.nix
new file mode 100644
index 000000000000..933d73ea71ae
--- /dev/null
+++ b/tools/eprintf.nix
@@ -0,0 +1,15 @@
+{ depot, pkgs, ... }:
+
+let
+  bins = depot.nix.getBins pkgs.coreutils [ "printf" ];
+
+  # printf(1), but redirect to stderr
+in
+depot.nix.writeExecline "eprintf" { } [
+  "fdmove"
+  "-c"
+  "1"
+  "2"
+  bins.printf
+  "$@"
+]
diff --git a/tools/gerrit-cli.nix b/tools/gerrit-cli.nix
new file mode 100644
index 000000000000..1606155a8068
--- /dev/null
+++ b/tools/gerrit-cli.nix
@@ -0,0 +1,13 @@
+# Utility script to run a gerrit command on the depot host via ssh.
+# Reads the username from TVL_USERNAME, or defaults to $(whoami)
+{ pkgs, ... }:
+
+pkgs.writeShellScriptBin "gerrit" ''
+  TVL_USERNAME=''${TVL_USERNAME:-$(whoami)}
+  if which ssh &>/dev/null; then
+    ssh=ssh
+  else
+    ssh="${pkgs.openssh}/bin/ssh"
+  fi
+  exec $ssh $TVL_USERNAME@code.tvl.fyi -p 29418 -- gerrit $@
+''
diff --git a/tools/gerrit-update.nix b/tools/gerrit-update.nix
new file mode 100644
index 000000000000..e4efd89ea597
--- /dev/null
+++ b/tools/gerrit-update.nix
@@ -0,0 +1,34 @@
+# Utility script to perform a Gerrit update.
+{ pkgs, ... }:
+
+pkgs.writeShellScriptBin "gerrit-update" ''
+  set -euo pipefail
+
+  if [[ $EUID -ne 0 ]]; then
+    echo "Oh no! Only root is allowed to update Gerrit!" >&2
+    exit 1
+  fi
+
+  gerrit_war="$(find "${pkgs.gerrit}/webapps" -name 'gerrit*.war')"
+  java="${pkgs.jdk}/bin/java"
+  backup_path="/root/gerrit_preupgrade-$(date +"%Y-%m-%d").tar.bz2"
+
+  # Take a safety backup of Gerrit into /root's homedir. Just in case.
+  echo "Backing up Gerrit to $backup_path"
+  tar -cjf "$backup_path" /var/lib/gerrit
+
+  # Stop Gerrit (and its activation socket).
+  echo "Stopping Gerrit"
+  systemctl stop gerrit.service gerrit.socket
+
+  # Ask Gerrit to do a schema upgrade...
+  echo "Performing schema upgrade"
+  "$java" -jar "$gerrit_war" \
+    init --no-auto-start --batch --skip-plugins --site-path "/var/lib/gerrit"
+
+  # Restart Gerrit.
+  echo "Restarting Gerrit"
+  systemctl start gerrit.socket gerrit.service
+
+  echo "...done"
+''
diff --git a/tools/hash-password.nix b/tools/hash-password.nix
new file mode 100644
index 000000000000..9893d521787e
--- /dev/null
+++ b/tools/hash-password.nix
@@ -0,0 +1,7 @@
+# Utility for invoking slappasswd with the correct options for
+# creating an ARGON2 password hash.
+{ pkgs, ... }:
+
+pkgs.writeShellScriptBin "hash-password" ''
+  ${pkgs.openldap}/bin/slappasswd -o module-load=pw-argon2 -h '{ARGON2}'
+''
diff --git a/tools/magrathea/default.nix b/tools/magrathea/default.nix
new file mode 100644
index 000000000000..fa0a5d89a172
--- /dev/null
+++ b/tools/magrathea/default.nix
@@ -0,0 +1,23 @@
+# magrathea helps you build planets
+#
+# it is a tool for working with monorepos in the style of tvl's depot
+{ pkgs, ... }:
+
+pkgs.stdenv.mkDerivation {
+  name = "magrathea";
+  src = ./.;
+  dontInstall = true;
+
+  nativeBuildInputs = [ pkgs.chicken ];
+  buildInputs = with pkgs.chickenPackages.chickenEggs; [
+    matchable
+    srfi-13
+  ];
+
+  propagatedBuildInputs = [ pkgs.git ];
+
+  buildPhase = ''
+    mkdir -p $out/bin
+    csc -o $out/bin/mg -static ${./mg.scm}
+  '';
+}
diff --git a/tools/magrathea/mg.scm b/tools/magrathea/mg.scm
new file mode 100644
index 000000000000..a453da2ccd28
--- /dev/null
+++ b/tools/magrathea/mg.scm
@@ -0,0 +1,316 @@
+;; magrathea helps you build planets
+;;
+;; it is a tiny tool designed to ease workflows in monorepos that are
+;; modeled after the tvl depot.
+;;
+;; users familiar with workflows from other, larger monorepos may be
+;; used to having a build tool that can work in any tree location.
+;; magrathea enables this, but with nix-y monorepos.
+
+(import (chicken base)
+        (chicken format)
+        (chicken irregex)
+        (chicken port)
+        (chicken file)
+        (chicken file posix)
+        (chicken process)
+        (chicken process-context)
+        (chicken string)
+        (matchable)
+        (only (chicken io) read-string))
+
+(define usage #<<USAGE
+usage: mg <command> [<target>]
+
+target:
+  a target specification with meaning inside of the repository. can
+  be absolute (starting with //) or relative to the current directory
+  (as long as said directory is inside of the repo). if no target is
+  specified, the current directory's physical target is built.
+
+  for example:
+
+    //tools/magrathea - absolute physical target
+    //foo/bar:baz     - absolute virtual target
+    magrathea         - relative physical target
+    :baz              - relative virtual target
+
+commands:
+  build - build a target
+  shell - enter a shell with the target's build dependencies
+  path  - print source folder for the target
+  run   - build a target and execute its output
+
+file all feedback on b.tvl.fyi
+USAGE
+)
+
+;; parse target definitions. trailing slashes on physical targets are
+;; allowed for shell autocompletion.
+;;
+;; component ::= any string without "/" or ":"
+;;
+;; physical-target ::= <component>
+;;                   | <component> "/"
+;;                   | <component> "/" <physical-target>
+;;
+;; virtual-target ::= ":" <component>
+;;
+;; relative-target ::= <physical-target>
+;;                   | <virtual-target>
+;;                   | <physical-target> <virtual-target>
+;;
+;; root-anchor ::= "//"
+;;
+;; target ::= <relative-target> | <root-anchor> <relative-target>
+
+;; read a path component until it looks like something else is coming
+(define (read-component first port)
+  (let ((keep-reading?
+         (lambda () (not (or (eq? #\/ (peek-char port))
+                             (eq? #\: (peek-char port))
+                             (eof-object? (peek-char port)))))))
+    (let reader ((acc (list first))
+                 (condition (keep-reading?)))
+      (if condition (reader (cons (read-char port) acc) (keep-reading?))
+          (list->string (reverse acc))))))
+
+;; read something that started with a slash. what will it be?
+(define (read-slash port)
+  (if (eq? #\/ (peek-char port))
+      (begin (read-char port)
+             'root-anchor)
+      'path-separator))
+
+;; read any target token and leave port sitting at the next one
+(define (read-token port)
+  (match (read-char port)
+         [#\/ (read-slash port)]
+         [#\: 'virtual-separator]
+         [other (read-component other port)]))
+
+;; read a target into a list of target tokens
+(define (read-target target-str)
+  (call-with-input-string
+   target-str
+   (lambda (port)
+     (let reader ((acc '()))
+       (if (eof-object? (peek-char port))
+           (reverse acc)
+           (reader (cons (read-token port) acc)))))))
+
+(define-record target absolute components virtual)
+(define (empty-target) (make-target #f '() #f))
+
+(define-record-printer (target t out)
+  (fprintf out (conc (if (target-absolute t) "//" "")
+                     (string-intersperse (target-components t) "/")
+                     (if (target-virtual t) ":" "")
+                     (or (target-virtual t) ""))))
+
+;; parse and validate a list of target tokens
+(define parse-tokens
+  (lambda (tokens #!optional (mode 'root) (acc (empty-target)))
+    (match (cons mode tokens)
+           ;; absolute target
+           [('root . ('root-anchor . rest))
+            (begin (target-absolute-set! acc #t)
+                   (parse-tokens rest 'root acc))]
+
+           ;; relative target minus potential garbage
+           [('root . (not ('path-separator . _)))
+            (parse-tokens tokens 'normal acc)]
+
+           ;; virtual target
+           [('normal . ('virtual-separator . rest))
+            (parse-tokens rest 'virtual acc)]
+
+           [('virtual . ((? string? v)))
+            (begin
+              (target-virtual-set! acc v)
+              acc)]
+
+           ;; chomp through all components and separators
+           [('normal . ('path-separator . rest)) (parse-tokens rest 'normal acc)]
+           [('normal . ((? string? component) . rest))
+            (begin (target-components-set!
+                    acc (append (target-components acc) (list component)))
+                   (parse-tokens rest 'normal acc ))]
+
+           ;; nothing more to parse and not in a weird state, all done, yay!
+           [('normal . ()) acc]
+
+           ;; oh no, we ran out of input too early :(
+           [(_ . ()) `(error . ,(format "unexpected end of input while parsing ~s target" mode))]
+
+           ;; something else was invalid :(
+           [_ `(error . ,(format "unexpected ~s while parsing ~s target" (car tokens) mode))])))
+
+(define (parse-target target)
+  (parse-tokens (read-target target)))
+
+;; turn relative targets into absolute targets based on the current
+;; directory
+(define (normalise-target t)
+  (when (not (target-absolute t))
+    (target-components-set! t (append (relative-repo-path)
+                                      (target-components t)))
+    (target-absolute-set! t #t))
+  t)
+
+;; nix doesn't care about the distinction between physical and virtual
+;; targets, normalise it away
+(define (normalised-components t)
+  (if (target-virtual t)
+      (append (target-components t) (list (target-virtual t)))
+      (target-components t)))
+
+;; return the current repository root as a string
+(define mg--repository-root #f)
+(define (repository-root)
+  (or mg--repository-root
+      (begin
+        (set! mg--repository-root
+              (or (get-environment-variable "MG_ROOT")
+                  (string-chomp
+                   (call-with-input-pipe "git rev-parse --show-toplevel"
+                                         (lambda (p) (read-string #f p))))))
+        mg--repository-root)))
+
+;; determine the current path relative to the root of the repository
+;; and return it as a list of path components.
+(define (relative-repo-path)
+  (string-split
+   (substring (current-directory) (string-length (repository-root))) "/"))
+
+;; escape a string for interpolation in nix code
+(define (nix-escape str)
+  (string-translate* str '(("\"" . "\\\"")
+                           ("${" . "\\${"))))
+
+;; create a nix expression to build the attribute at the specified
+;; components
+;;
+;; an empty target will build the current folder instead.
+;;
+;; this uses builtins.getAttr explicitly to avoid problems with
+;; escaping.
+(define (nix-expr-for target)
+  (let nest ((parts (normalised-components (normalise-target target)))
+             (acc (conc "(import " (repository-root) " {})")))
+    (match parts
+           [() (conc "with builtins; " acc)]
+           [_ (nest (cdr parts)
+                    (conc "(getAttr \""
+                          (nix-escape (car parts))
+                          "\" " acc ")"))])))
+
+;; exit and complain at the user if something went wrong
+(define (mg-error message)
+  (format (current-error-port) "[mg] error: ~A~%" message)
+  (exit 1))
+
+(define (guarantee-success value)
+  (match value
+         [('error . message) (mg-error message)]
+         [_ value]))
+
+(define (execute-build t)
+  (let ((expr (nix-expr-for t)))
+    (fprintf (current-error-port) "[mg] building target ~A~%" t)
+    (process-execute "nix-build" (list "-E" expr "--show-trace"))))
+
+(define (build args)
+  (match args
+         ;; simplest case: plain mg build with no target spec -> build
+         ;; the current folder's main target.
+         [() (execute-build (empty-target))]
+
+         ;; single argument should be a target spec
+         [(arg) (execute-build
+                 (guarantee-success (parse-target arg)))]
+
+         [other (print "not yet implemented")]))
+
+(define (execute-shell t)
+  (let ((expr (nix-expr-for t))
+        (user-shell (or (get-environment-variable "SHELL") "bash")))
+    (fprintf (current-error-port) "[mg] entering shell for ~A~%" t)
+    (process-execute "nix-shell"
+                     (list "-E" expr "--command" user-shell))))
+
+(define (shell args)
+  (match args
+         [() (execute-shell (empty-target))]
+         [(arg) (execute-shell
+                 (guarantee-success (parse-target arg)))]
+         [other (print "not yet implemented")]))
+
+(define (execute-run t #!optional cmd-args)
+  (fprintf (current-error-port) "[mg] building target ~A~%" t)
+  (let* ((expr (nix-expr-for t))
+         (out (call-with-input-pipe
+               (apply string-append
+                      ;; TODO(sterni): temporary gc root
+                      (intersperse `("nix-build" "-E" ,(qs expr) "--no-out-link")
+                                   " "))
+               (lambda (p)
+                 (string-chomp (let ((s (read-string #f p)))
+                                 (if (eq? s #!eof) "" s)))))))
+
+    ;; TODO(sterni): can we get the exit code of nix-build somehow?
+    (when (= (string-length out) 0)
+      (mg-error (string-append "Couldn't build target " (format "~A" t)))
+      (exit 1))
+
+    (fprintf (current-error-port) "[mg] running target ~A~%" t)
+    (process-execute
+     ;; If the output is a file, we assume it's an executable à la writeExecline,
+     ;; otherwise we look in the bin subdirectory and pick the only executable.
+     ;; Handling multiple executables is not possible at the moment, the choice
+     ;; could be made via a command line flag in the future.
+     (if (regular-file? out)
+         out
+         (let* ((dir-path (string-append out "/bin"))
+                (dir-contents (if (directory-exists? dir-path)
+                                  (directory dir-path #f)
+                                  '())))
+           (case (length dir-contents)
+             ((0) (mg-error "no executables in build output")
+                  (exit 1))
+             ((1) (string-append dir-path "/" (car dir-contents)))
+             (else (mg-error "more than one executable in build output")
+                   (exit 1)))))
+     cmd-args)))
+
+(define (run args)
+  (match args
+         [() (execute-run (empty-target))]
+         ;; TODO(sterni): flag for selecting binary name
+         [other (execute-run (guarantee-success (parse-target (car args)))
+                             (cdr args))]))
+
+(define (path args)
+  (match args
+         [(arg)
+          (print (apply string-append
+                        (intersperse
+                         (cons (repository-root)
+                               (target-components
+                                (normalise-target
+                                 (guarantee-success (parse-target arg)))))
+                         "/")))]
+         [() (mg-error "path command needs a target")]
+         [other (mg-error (format "unknown arguments: ~a" other))]))
+
+(define (main args)
+  (match args
+         [() (print usage)]
+         [("build" . _) (build (cdr args))]
+         [("shell" . _) (shell (cdr args))]
+         [("path" . _) (path (cdr args))]
+         [("run" . _) (run (cdr args))]
+         [other (begin (print "unknown command: mg " args)
+                       (print usage))]))
+
+(main (command-line-arguments))
diff --git a/tools/nixery/.skip-subtree b/tools/nixery/.skip-subtree
new file mode 100644
index 000000000000..4948dd56eb1d
--- /dev/null
+++ b/tools/nixery/.skip-subtree
@@ -0,0 +1 @@
+Imported subtree is not yet fully readTree-compatible.
diff --git a/tools/nixery/default.nix b/tools/nixery/default.nix
index 19143fccf6f9..70d5093bb945 100644
--- a/tools/nixery/default.nix
+++ b/tools/nixery/default.nix
@@ -16,7 +16,8 @@
 , preLaunch ? ""
 , extraPackages ? []
 , maxLayers ? 20
-, commitHash ? null }@args:
+, commitHash ? null
+, ... }@args:
 
 with pkgs;
 
diff --git a/tools/nsfv-setup/default.nix b/tools/nsfv-setup/default.nix
new file mode 100644
index 000000000000..1e353e32697b
--- /dev/null
+++ b/tools/nsfv-setup/default.nix
@@ -0,0 +1,29 @@
+# Configures a running Pulseaudio instance with an LADSP filter that
+# creates a noise-cancelling sink.
+#
+# This can be used to, for example, cancel noise from an incoming
+# video conferencing audio stream.
+#
+# There are some caveats, for example this will not distinguish
+# between noise from different participants and I have no idea what
+# happens if the default sink goes away.
+#
+# If this script is run while an NSFV sink exists, the existing sink
+# will first be removed.
+{ depot, pkgs, ... }:
+
+let
+  inherit (pkgs) ripgrep pulseaudio;
+  inherit (depot.third_party) nsfv;
+in
+pkgs.writeShellScriptBin "nsfv-setup" ''
+  export PATH="${ripgrep}/bin:${pulseaudio}/bin:$PATH"
+
+  if pacmd list-sinks | rg librnnoise_ladspa.so >/dev/null; then
+    pactl unload-module module-ladspa-sink
+  fi
+
+  SINK=$(${pulseaudio}/bin/pacmd info | ${ripgrep}/bin/rg -r '$1' '^Default sink name: (.*)$')
+  echo "Setting up NSFV filtering to sink ''${SINK}"
+  ${pulseaudio}/bin/pacmd load-module module-ladspa-sink sink_name=NSFV sink_master=''${SINK} label=noise_suppressor_mono plugin=${nsfv}/lib/ladspa/librnnoise_ladspa.so control=42 rate=48000
+''
diff --git a/tools/perf-flamegraph.nix b/tools/perf-flamegraph.nix
new file mode 100644
index 000000000000..b472b746ff14
--- /dev/null
+++ b/tools/perf-flamegraph.nix
@@ -0,0 +1,12 @@
+# Script that collects perf timing for the execution of a command and writes a
+# flamegraph to stdout
+{ pkgs, ... }:
+
+pkgs.writeShellScriptBin "perf-flamegraph" ''
+  set -euo pipefail
+
+  ${pkgs.linuxPackages.perf}/bin/perf record -g --call-graph dwarf -F max "$@"
+  ${pkgs.linuxPackages.perf}/bin/perf script \
+    | ${pkgs.flamegraph}/bin/stackcollapse-perf.pl \
+    | ${pkgs.flamegraph}/bin/flamegraph.pl
+''
diff --git a/tools/rust-crates-advisory/OWNERS b/tools/rust-crates-advisory/OWNERS
new file mode 100644
index 000000000000..1895955b2018
--- /dev/null
+++ b/tools/rust-crates-advisory/OWNERS
@@ -0,0 +1,4 @@
+inherited: true
+owners:
+  - Profpatsch
+  - sterni
diff --git a/tools/rust-crates-advisory/check-security-advisory.rs b/tools/rust-crates-advisory/check-security-advisory.rs
new file mode 100644
index 000000000000..e76b090abccb
--- /dev/null
+++ b/tools/rust-crates-advisory/check-security-advisory.rs
@@ -0,0 +1,119 @@
+extern crate semver;
+extern crate toml;
+
+use std::io::Write;
+
+/// reads a security advisory of the form
+/// https://github.com/RustSec/advisory-db/blob/a24932e220dfa9be8b0b501210fef8a0bc7ef43e/EXAMPLE_ADVISORY.md
+/// and a crate version number,
+/// and returns 0 if the crate version is patched
+/// and returns 1 if the crate version is *not* patched
+///
+/// If PRINT_ADVISORY is set, the advisory is printed if it matches.
+
+fn main() {
+    let mut args = std::env::args_os();
+    let file = args.nth(1).expect("security advisory md file is $1");
+    let crate_version = args
+        .nth(0)
+        .expect("crate version is $2")
+        .into_string()
+        .expect("crate version string not utf8");
+    let crate_version = semver::Version::parse(&crate_version)
+        .expect(&format!("this is not a semver version: {}", &crate_version));
+    let filename = file.to_string_lossy();
+
+    let content = std::fs::read(&file).expect(&format!("could not read {}", filename));
+    let content = std::str::from_utf8(&content)
+        .expect(&format!("file {} was not encoded as utf-8", filename));
+    let content = content.trim_start();
+
+    let toml_start = content
+        .strip_prefix("```toml")
+        .expect(&format!("file did not start with ```toml: {}", filename));
+    let toml_end_index = toml_start.find("```").expect(&format!(
+        "the toml section did not end, no `` found: {}",
+        filename
+    ));
+    let toml = &toml_start[..toml_end_index];
+    let toml: toml::Value = toml::de::from_slice(toml.as_bytes())
+        .expect(&format!("could not parse toml: {}", filename));
+
+    let versions = toml
+        .as_table()
+        .expect(&format!("the toml is not a table: {}", filename))
+        .get("versions")
+        .expect(&format!(
+            "the toml does not contain the versions field: {}",
+            filename
+        ))
+        .as_table()
+        .expect(&format!(
+            "the toml versions field must be a table: {}",
+            filename
+        ));
+
+    let unaffected = match versions.get("unaffected") {
+        Some(u) => u
+            .as_array()
+            .expect(&format!(
+                "the toml versions.unaffected field must be a list of semvers: {}",
+                filename
+            ))
+            .iter()
+            .map(|v| {
+                semver::VersionReq::parse(
+                    v.as_str()
+                        .expect(&format!("the version field {} is not a string", v)),
+                )
+                .expect(&format!(
+                    "the version field {} is not a valid semver VersionReq",
+                    v
+                ))
+            })
+            .collect(),
+        None => vec![],
+    };
+
+    let mut patched: Vec<semver::VersionReq> = versions
+        .get("patched")
+        .expect(&format!(
+            "the toml versions.patched field must exist: {}",
+            filename
+        ))
+        .as_array()
+        .expect(&format!(
+            "the toml versions.patched field must be a list of semvers: {}",
+            filename
+        ))
+        .iter()
+        .map(|v| {
+            semver::VersionReq::parse(
+                v.as_str()
+                    .expect(&format!("the version field {} is not a string", v)),
+            )
+            .expect(&format!(
+                "the version field {} is not a valid semver VersionReq",
+                v
+            ))
+        })
+        .collect();
+
+    patched.extend_from_slice(&unaffected[..]);
+    let is_patched_or_unaffected = patched.iter().any(|req| req.matches(&crate_version));
+
+    if is_patched_or_unaffected {
+        std::process::exit(0);
+    } else {
+        if std::env::var_os("PRINT_ADVISORY").is_some() {
+            write!(
+                std::io::stderr(),
+                "Advisory {} matched!\n{}\n",
+                filename,
+                content
+            )
+            .unwrap();
+        }
+        std::process::exit(1);
+    }
+}
diff --git a/tools/rust-crates-advisory/default.nix b/tools/rust-crates-advisory/default.nix
new file mode 100644
index 000000000000..b3e8c850eb4b
--- /dev/null
+++ b/tools/rust-crates-advisory/default.nix
@@ -0,0 +1,200 @@
+{ depot, pkgs, lib, ... }:
+
+let
+
+  bins =
+    depot.nix.getBins pkgs.s6-portable-utils [ "s6-ln" "s6-cat" "s6-echo" "s6-mkdir" "s6-test" "s6-touch" "s6-dirname" ]
+    // depot.nix.getBins pkgs.coreutils [ "printf" ]
+    // depot.nix.getBins pkgs.lr [ "lr" ]
+    // depot.nix.getBins pkgs.cargo-audit [ "cargo-audit" ]
+    // depot.nix.getBins pkgs.jq [ "jq" ]
+    // depot.nix.getBins pkgs.findutils [ "find" ]
+    // depot.nix.getBins pkgs.gnused [ "sed" ]
+  ;
+
+  crate-advisories = "${depot.third_party.rustsec-advisory-db}/crates";
+
+  our-crates = lib.filter (v: v ? outPath)
+    (builtins.attrValues depot.third_party.rust-crates);
+
+  our-crates-lock-file = pkgs.writeText "our-crates-Cargo.lock"
+    (lib.concatMapStrings
+      (crate: ''
+        [[package]]
+        name = "${crate.crateName}"
+        version = "${crate.version}"
+        source = "registry+https://github.com/rust-lang/crates.io-index"
+
+      '')
+      our-crates);
+
+  check-security-advisory = depot.nix.writers.rustSimple
+    {
+      name = "parse-security-advisory";
+      dependencies = [
+        depot.third_party.rust-crates.toml
+        depot.third_party.rust-crates.semver
+      ];
+    }
+    (builtins.readFile ./check-security-advisory.rs);
+
+  # $1 is the directory with advisories for crate $2 with version $3
+  check-crate-advisory = depot.nix.writeExecline "check-crate-advisory" { readNArgs = 3; } [
+    "pipeline"
+    [ bins.lr "-0" "-t" "depth == 1" "$1" ]
+    "forstdin"
+    "-0"
+    "-Eo"
+    "0"
+    "advisory"
+    "if"
+    [ depot.tools.eprintf "advisory %s\n" "$advisory" ]
+    check-security-advisory
+    "$advisory"
+    "$3"
+  ];
+
+  # Run through everything in the `crate-advisories` repository
+  # and check whether we can parse all the advisories without crashing.
+  test-parsing-all-security-advisories = depot.nix.runExecline "check-all-our-crates" { } [
+    "pipeline"
+    [ bins.lr "-0" "-t" "depth == 1" crate-advisories ]
+    "if"
+    [
+      # this will succeed as long as check-crate-advisory doesn’t `panic!()` (status 101)
+      "forstdin"
+      "-0"
+      "-E"
+      "-x"
+      "101"
+      "crate_advisories"
+      check-crate-advisory
+      "$crate_advisories"
+      "foo"
+      "0.0.0"
+    ]
+    "importas"
+    "out"
+    "out"
+    bins.s6-touch
+    "$out"
+  ];
+
+
+  lock-file-report = pkgs.writers.writeBash "lock-file-report" ''
+    set -u
+
+    if test "$#" -lt 2; then
+      echo "Usage: $0 IDENTIFIER LOCKFILE [CHECKLIST [MAINTAINERS]]" >&2
+      echo 2>&1
+      echo "  IDENTIFIER  Unique string describing the lock file" >&2
+      echo "  LOCKFILE    Path to Cargo.lock file" >&2
+      echo "  CHECKLIST   Whether to use GHFM checklists in the output (true or false)" >&2
+      echo "  MAINTAINERS List of @names to cc in case of advisories" >&2
+      exit 100
+    fi
+
+    "${bins.cargo-audit}" audit --json --no-fetch \
+      --db "${depot.third_party.rustsec-advisory-db}" \
+      --file "$2" \
+    | "${bins.jq}" --raw-output --join-output \
+      --from-file "${./format-audit-result.jq}" \
+      --arg maintainers "''${4:-}" \
+      --argjson checklist "''${3:-false}" \
+      --arg attr "$1"
+
+    exit "''${PIPESTATUS[0]}" # inherit exit code from cargo-audit
+  '';
+
+  tree-lock-file-report = depot.nix.writeExecline "tree-lock-file-report"
+    {
+      readNArgs = 1;
+    } [
+    "backtick"
+    "-E"
+    "report"
+    [
+      "pipeline"
+      [ bins.find "$1" "-name" "Cargo.lock" "-and" "-type" "f" "-print0" ]
+      "forstdin"
+      "-E"
+      "-0"
+      "lockFile"
+      "backtick"
+      "-E"
+      "depotPath"
+      [
+        "pipeline"
+        [ bins.s6-dirname "$lockFile" ]
+        bins.sed
+        "s|^\\.|/|"
+      ]
+      lock-file-report
+      "$depotPath"
+      "$lockFile"
+      "false"
+    ]
+    "if"
+    [ bins.printf "%s\n" "$report" ]
+    # empty report implies success (no advisories)
+    bins.s6-test
+    "-z"
+    "$report"
+  ];
+
+  check-all-our-lock-files = depot.nix.writeExecline "check-all-our-lock-files" { } [
+    "backtick"
+    "-EI"
+    "report"
+    [
+      "foreground"
+      [
+        lock-file-report
+        "//third_party/rust-crates"
+        our-crates-lock-file
+        "false"
+      ]
+      tree-lock-file-report
+      "."
+    ]
+    "ifelse"
+    [
+      bins.s6-test
+      "-z"
+      "$report"
+    ]
+    [
+      "exit"
+      "0"
+    ]
+    "pipeline"
+    [
+      "printf"
+      "%s"
+      "$report"
+    ]
+    "buildkite-agent"
+    "annotate"
+    "--style"
+    "warning"
+    "--context"
+    "check-all-our-lock-files"
+  ];
+
+in
+depot.nix.readTree.drvTargets {
+  inherit
+    test-parsing-all-security-advisories
+    check-crate-advisory
+    lock-file-report
+    ;
+
+
+  tree-lock-file-report = tree-lock-file-report // {
+    meta.ci.extraSteps.run = {
+      label = "Check all crates used in depot for advisories";
+      alwaysRun = true;
+      command = check-all-our-lock-files;
+    };
+  };
+}
diff --git a/tools/rust-crates-advisory/format-audit-result.jq b/tools/rust-crates-advisory/format-audit-result.jq
new file mode 100644
index 000000000000..d42ff6e55c79
--- /dev/null
+++ b/tools/rust-crates-advisory/format-audit-result.jq
@@ -0,0 +1,75 @@
+# This is a jq script to format the JSON output of cargo-audit into a short
+# markdown report for humans. It is used by //users/sterni/nixpkgs-crate-holes
+# and //tools/rust-crates-advisory:check-all-our-lock-files which will provide
+# you with example invocations.
+#
+# It needs the following arguments passed to it:
+#
+# - maintainers: Either the empty string or a list of maintainers to @mention
+#   for the current lock file.
+# - attr: An attribute name (or otherwise unique identifier) to associate the
+#   report for the current lock file with.
+# - checklist: If true, the markdown report will use GHFM checklists for the
+#   report, allowing to tick of attributes as taken care of.
+
+# Link to human-readable advisory info for a given vulnerability
+def link:
+  [ "https://rustsec.org/advisories/", .advisory.id, ".html" ] | add;
+
+# Format a list of version constraints
+def version_list:
+  [ .[] | "`" + . + "`" ] | join("; ");
+
+# show paths to fixing this vulnerability:
+#
+# - if there are patched releases, show them (the version we are using presumably
+#   predates the vulnerability discovery, so we likely want to upgrade to a
+#   patched release).
+# - if there are no patched releases, show the unaffected versions (in case we
+#   want to downgrade).
+# - otherwise we state that no unaffected versions are available at this time.
+#
+# This logic should be useful, but is slightly dumber than cargo-audit's
+# suggestion when using the non-JSON output.
+def patched:
+  if .versions.patched == [] then
+    if .versions.unaffected != [] then
+       "unaffected: " + (.versions.unaffected | version_list)
+    else
+      "no unaffected version available"
+    end
+  else
+    "patched: " + (.versions.patched | version_list)
+  end;
+
+# if the vulnerability has aliases (like CVE-*) emit them in parens
+def aliases:
+  if .advisory.aliases == [] then
+    ""
+  else
+    [ " (", (.advisory.aliases | join(", ")), ")" ] | add
+  end;
+
+# each vulnerability is rendered as a (normal) sublist item
+def format_vulnerability:
+  [ "  - "
+  , .package.name, " ", .package.version, ": "
+  , "[", .advisory.id, "](", link, ")"
+  , aliases
+  , ", ", patched
+  , "\n"
+  ] | add;
+
+# be quiet if no found vulnerabilities, otherwise render a GHFM checklist item
+if .vulnerabilities.found | not then
+  ""
+else
+  ([ "-", if $checklist then " [ ] " else " " end
+   , "`", $attr, "`: "
+   , (.vulnerabilities.count | tostring)
+   , " advisories for Cargo.lock"
+   , if $maintainers != "" then " (cc " + $maintainers + ")" else "" end
+   , "\n"
+   ] + (.vulnerabilities.list | map(format_vulnerability))
+  ) | add
+end
diff --git a/tools/tvlc/OWNERS b/tools/tvlc/OWNERS
new file mode 100644
index 000000000000..9e7830ab215e
--- /dev/null
+++ b/tools/tvlc/OWNERS
@@ -0,0 +1,3 @@
+inherited: true
+owners:
+ - riking
diff --git a/tools/tvlc/common.sh b/tools/tvlc/common.sh
new file mode 100644
index 000000000000..fe7605857fd3
--- /dev/null
+++ b/tools/tvlc/common.sh
@@ -0,0 +1,33 @@
+#!/bin/bash
+
+set -eu
+set -o pipefail
+
+source path-scripts
+
+XDG_DATA_HOME="${XDG_DATA_HOME:-$HOME/.local/share}"
+tvlc_root="$XDG_DATA_HOME/tvlc"
+
+nice_checkout_root=
+if [ -f "$tvlc_root"/nice_checkout_root ]; then
+  nice_checkout_root="$(cat "$tvlc_root"/nice_checkout_root)"
+fi
+nice_checkout_root="${nice_checkout_root:-$HOME/tvlc}"
+
+depot_root=
+if [ -f "$tvlc_root/depot_root" ]; then
+  depot_root="$(cat "$tvlc_root/depot_root")"
+fi
+if [ -d /depot ]; then
+  # don't require config on tvl nixos servers
+  depot_root="${depot_root:-/depot}"
+fi
+if [ -n "$depot_root" ]; then
+  export DEPOT_ROOT="$depot_root"
+fi
+
+if [ ! -d "$tvlc_root" ]; then
+  echo "tvlc: setup required"
+  echo "please run 'tvlc setup' from the depot root"
+  exit 1
+fi
diff --git a/tools/tvlc/default.nix b/tools/tvlc/default.nix
new file mode 100644
index 000000000000..a6f201485ff2
--- /dev/null
+++ b/tools/tvlc/default.nix
@@ -0,0 +1,51 @@
+{ pkgs, depot, ... }:
+
+let
+  pathScripts = pkgs.writeShellScript "imports" ''
+    export tvix_instantiate="${depot.third_party.nix}/bin/nix-instantiate"
+    export depot_scanner="${depot.tools.depot-scanner}/bin/depot-scanner"
+  '';
+
+  # setup: git rev-parse --show-toplevel > $tvlc_root/depot_root
+  # setup: mkdir $tvlc_root/clients
+  # setup: echo 1 > $tvlc_root/next_clientid
+
+  commonsh = pkgs.stdenv.mkDerivation {
+    name = "common.sh";
+    src = ./common.sh;
+    doCheck = true;
+    unpackPhase = "true";
+    buildPhase = ''
+      substitute ${./common.sh} $out --replace path-scripts ${pathScripts}
+    '';
+    checkPhase = ''
+      ${pkgs.shellcheck}/bin/shellcheck $out ${pathScripts} && echo "SHELLCHECK OK"
+    '';
+    installPhase = ''
+      chmod +x $out
+    '';
+  };
+
+  tvlcNew = pkgs.stdenv.mkDerivation {
+    name = "tvlc-new";
+    src = ./tvlc-new;
+    doCheck = true;
+
+    unpackPhase = "true";
+    buildPhase = ''
+      substitute ${./tvlc-new} $out --replace common.sh ${commonsh}
+    '';
+    checkPhase = ''
+      ${pkgs.shellcheck}/bin/shellcheck $out ${commonsh} ${pathScripts} && echo "SHELLCHECK OK"
+    '';
+    installPhase = ''
+      chmod +x $out
+    '';
+  };
+
+in
+{
+  inherit pathScripts;
+  inherit commonsh;
+  inherit tvlcNew;
+}
diff --git a/tools/tvlc/tvlc-new b/tools/tvlc/tvlc-new
new file mode 100755
index 000000000000..4ef0df5d33b2
--- /dev/null
+++ b/tools/tvlc/tvlc-new
@@ -0,0 +1,103 @@
+#!/bin/bash
+
+source common.sh
+
+set -eu
+set -o pipefail
+
+function usage() {
+  echo "tvlc new [-n|--name CLIENTNAME] [derivation...]"
+  echo ""
+  cat <<EOF
+  The 'new' command creates a new git sparse checkout with the given name, and
+  contents needed to build the Nix derivation(s) specified on the command line.
+
+  Options:
+    -n/--name client-name: Sets the git branch and nice checkout name for the
+	workspace. If the option is not provided, the name will be based on the
+	first non-option command-line argument.
+    --branch branch-name: Sets the git branch name only.
+EOF
+}
+
+checkout_name=
+branch_name=
+
+options=$(getopt -o 'n:' --long debug --long name: -- "$@")
+eval set -- "$options"
+while true; do
+  case "$1" in
+  -h)
+    usage
+    exit 0
+    ;;
+  -v)
+    version
+    exit 0
+    ;;
+  -n|--name)
+    shift
+    checkout_name="$1"
+    if [ -z "$branch_name" ]; then
+      branch_name=tvlc-"$1"
+    fi
+    ;;
+  --branch)
+    shift
+    branch_name="$1"
+    ;;
+  --)
+    shift
+    break
+    ;;
+  esac
+  shift
+done
+
+if [ $# -eq 0 ]; then
+  echo "error: workspace name, target derivations required"
+  exit 1
+fi
+
+if [ -z "$checkout_name" ]; then
+  # TODO(riking): deduce
+  echo "error: workspace name (-n) required"
+  exit 1
+fi
+
+if [ -d "$nice_checkout_root/$checkout_name" ]; then
+  echo "error: checkout $checkout_name already exists"
+  # nb: shellescape checkout_name because we expect the user to copy-paste it
+  # shellcheck disable=SC1003
+  echo "consider deleting it with tvlc remove '${checkout_name/'/\'}'"
+  exit 1
+fi
+if [ -f "$DEPOT_ROOT/.git/refs/heads/$branch_name" ]; then
+  echo "error: branch $branch_name already exists in git"
+  # shellcheck disable=SC1003
+  echo "consider deleting it with cd $DEPOT_ROOT; git branch -d '${checkout_name/'/\'}'"
+  exit 1
+fi
+
+# The big one: call into Nix to figure out what paths the desired derivations depend on.
+readarray -t includedPaths < <("$depot_scanner" --mode 'print' --only 'DEPOT' --relpath --depot "$DEPOT_ROOT" --nix-bin "$tvix_instantiate" "$@")
+
+# bash math
+checkout_id=$(("$(cat "$tvlc_root/next_clientid")"))
+next_checkout_id=$(("$checkout_id"+1))
+echo "$next_checkout_id" > "$tvlc_root/next_clientid"
+
+checkout_dir="$tvlc_root/clients/$checkout_id"
+mkdir "$checkout_dir"
+cd "$DEPOT_ROOT"
+git worktree add --no-checkout -b "$branch_name" "$checkout_dir"
+# BUG: git not creating the /info/ subdir
+mkdir "$DEPOT_ROOT/.git/worktrees/$checkout_id/info"
+
+cd "$checkout_dir"
+git sparse-checkout init --cone
+git sparse-checkout set "${includedPaths[@]}"
+
+ln -s "$checkout_dir" "$nice_checkout_root"/"$checkout_name"
+
+echo "$nice_checkout_root/$checkout_name"