about summary refs log tree commit diff
diff options
context:
space:
mode:
-rw-r--r--corp/rih/frontend/Cargo.lock20
-rw-r--r--corp/rih/frontend/Cargo.toml2
-rw-r--r--corp/russian/predlozhnik/Cargo.lock20
-rw-r--r--corp/russian/predlozhnik/Cargo.toml2
-rw-r--r--corp/russian/predlozhnik/default.nix1
-rw-r--r--docs/REVIEWS.md41
-rw-r--r--fun/clbot/clbot.go24
-rw-r--r--fun/clbot/clbot_test.go24
-rw-r--r--ops/kontemplate/default.nix7
-rw-r--r--ops/kontemplate/deps.nix111
-rw-r--r--ops/kontemplate/go.mod25
-rw-r--r--ops/kontemplate/go.sum75
-rw-r--r--ops/kontemplate/main.go2
-rw-r--r--ops/kontemplate/templater/templater.go2
-rw-r--r--ops/machines/whitby/default.nix12
-rw-r--r--ops/modules/clbot.nix11
-rw-r--r--ops/modules/teleirc.nix40
-rw-r--r--ops/secrets/secrets.nix7
-rw-r--r--ops/secrets/teleirc.agebin0 -> 1006 bytes
-rw-r--r--ops/terraform/deploy-nixos/README.md12
-rw-r--r--ops/users/default.nix15
-rw-r--r--third_party/exwm/exwm-core.el70
-rw-r--r--third_party/exwm/exwm-floating.el10
-rw-r--r--third_party/exwm/exwm-input.el2
-rw-r--r--third_party/exwm/exwm-layout.el7
-rw-r--r--third_party/exwm/exwm-systemtray.el11
-rw-r--r--third_party/exwm/exwm-workspace.el10
-rw-r--r--third_party/exwm/exwm-xsettings.el2
-rw-r--r--third_party/exwm/exwm.el19
-rw-r--r--third_party/geesefs/default.nix25
-rw-r--r--third_party/nixpkgs/default.nix8
-rw-r--r--third_party/overlays/patches/crate2nix-drop-darwin-explicit-dontstrip.patch22
-rw-r--r--third_party/overlays/tvl.nix37
-rw-r--r--third_party/sources/sources.json42
-rw-r--r--third_party/teleirc/default.nix23
-rw-r--r--tvix/Cargo.lock360
-rw-r--r--tvix/Cargo.nix1753
-rw-r--r--tvix/Cargo.toml1
-rw-r--r--tvix/README.md5
-rw-r--r--tvix/build-go/build.pb.go2
-rw-r--r--tvix/build-go/rpc_build.pb.go2
-rw-r--r--tvix/build/Cargo.toml6
-rw-r--r--tvix/build/default.nix12
-rw-r--r--tvix/build/src/bin/tvix-build.rs22
-rw-r--r--tvix/castore-go/castore.pb.go2
-rw-r--r--tvix/castore-go/rpc_blobstore.pb.go2
-rw-r--r--tvix/castore-go/rpc_directory.pb.go2
-rw-r--r--tvix/castore/Cargo.toml10
-rw-r--r--tvix/castore/default.nix33
-rw-r--r--tvix/castore/src/blobservice/from_addr.rs8
-rw-r--r--tvix/castore/src/blobservice/grpc.rs25
-rw-r--r--tvix/castore/src/directoryservice/closure_validator.rs309
-rw-r--r--tvix/castore/src/directoryservice/combinators.rs142
-rw-r--r--tvix/castore/src/directoryservice/directory_graph.rs413
-rw-r--r--tvix/castore/src/directoryservice/from_addr.rs8
-rw-r--r--tvix/castore/src/directoryservice/grpc.rs39
-rw-r--r--tvix/castore/src/directoryservice/mod.rs8
-rw-r--r--tvix/castore/src/directoryservice/object_store.rs36
-rw-r--r--tvix/castore/src/directoryservice/order_validator.rs181
-rw-r--r--tvix/castore/src/directoryservice/simple_putter.rs14
-rw-r--r--tvix/castore/src/directoryservice/sled.rs14
-rw-r--r--tvix/castore/src/fs/fuse/mod.rs (renamed from tvix/castore/src/fs/fuse.rs)59
-rw-r--r--tvix/castore/src/fs/fuse/tests.rs (renamed from tvix/castore/src/fs/tests.rs)55
-rw-r--r--tvix/castore/src/fs/mod.rs25
-rw-r--r--tvix/castore/src/import/archive.rs109
-rw-r--r--tvix/castore/src/import/blobs.rs177
-rw-r--r--tvix/castore/src/import/fs.rs54
-rw-r--r--tvix/castore/src/import/mod.rs2
-rw-r--r--tvix/castore/src/proto/grpc_directoryservice_wrapper.rs17
-rw-r--r--tvix/cli/Cargo.toml13
-rw-r--r--tvix/cli/src/main.rs172
-rw-r--r--tvix/cli/src/repl.rs175
-rw-r--r--tvix/default.nix86
-rw-r--r--tvix/docs/book.toml14
-rw-r--r--tvix/docs/default.nix3
-rw-r--r--tvix/docs/mdbook-admonish.css348
-rw-r--r--tvix/docs/mdbook-extra.css7
-rw-r--r--tvix/docs/src/SUMMARY.md37
-rw-r--r--tvix/docs/src/TODO.md122
-rw-r--r--tvix/docs/src/architecture.md9
-rw-r--r--tvix/docs/src/castore/blobstore-chunking.md (renamed from tvix/castore/docs/blobstore-chunking.md)6
-rw-r--r--tvix/docs/src/castore/blobstore-protocol.md (renamed from tvix/castore/docs/blobstore-protocol.md)4
-rw-r--r--tvix/docs/src/castore/data-model.md (renamed from tvix/castore/docs/data-model.md)8
-rw-r--r--tvix/docs/src/castore/why-not-git-trees.md (renamed from tvix/castore/docs/why-not-git-trees.md)2
-rw-r--r--tvix/docs/src/contributing/code-&-commits.md76
-rw-r--r--tvix/docs/src/contributing/email.md33
-rw-r--r--tvix/docs/src/contributing/gerrit.md112
-rw-r--r--tvix/docs/src/eval/abandoned/index.md3
-rw-r--r--tvix/docs/src/eval/abandoned/thread-local-vm.md (renamed from tvix/eval/docs/abandoned/thread-local-vm.md)0
-rw-r--r--tvix/docs/src/eval/bindings.md (renamed from tvix/eval/docs/bindings.md)9
-rw-r--r--tvix/docs/src/eval/build-references.md (renamed from tvix/eval/docs/build-references.md)17
-rw-r--r--tvix/docs/src/eval/builtins.md (renamed from tvix/eval/docs/builtins.md)3
-rw-r--r--tvix/docs/src/eval/catchable-errors.md (renamed from tvix/eval/docs/catchable-errors.md)0
-rw-r--r--tvix/docs/src/eval/known-optimisation-potential.md (renamed from tvix/eval/docs/known-optimisation-potential.md)3
-rw-r--r--tvix/docs/src/eval/language-issues.md (renamed from tvix/eval/docs/language-issues.md)0
-rw-r--r--tvix/docs/src/eval/opcodes-attrsets.md (renamed from tvix/eval/docs/opcodes-attrsets.md)0
-rw-r--r--tvix/docs/src/eval/recursive-attrs.md (renamed from tvix/eval/docs/recursive-attrs.md)3
-rw-r--r--tvix/docs/src/eval/vm-loop.md (renamed from tvix/eval/docs/vm-loop.md)3
-rw-r--r--tvix/docs/src/getting-started.md59
-rw-r--r--tvix/docs/src/language-spec.md4
-rw-r--r--tvix/docs/src/nix-daemon/index.md15
-rw-r--r--tvix/docs/src/store/api.md (renamed from tvix/store/docs/api.md)7
-rw-r--r--tvix/docs/src/value-pointer-equality.md6
-rw-r--r--tvix/eval/Cargo.toml4
-rw-r--r--tvix/eval/benches/eval.rs6
-rw-r--r--tvix/eval/build.rs7
-rw-r--r--tvix/eval/default.nix13
-rw-r--r--tvix/eval/src/builtins/mod.rs103
-rw-r--r--tvix/eval/src/builtins/to_xml.rs275
-rw-r--r--tvix/eval/src/errors.rs34
-rw-r--r--tvix/eval/src/io.rs31
-rw-r--r--tvix/eval/src/lib.rs2
-rw-r--r--tvix/eval/src/nix_search_path.rs2
-rw-r--r--tvix/eval/src/tests/mod.rs199
-rw-r--r--tvix/eval/src/tests/nix_tests.rs207
-rw-r--r--tvix/eval/src/tests/one_offs.rs2
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix10
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml5
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp1
-rw-r--r--tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix2
-rw-r--r--tvix/eval/src/value/json.rs8
-rw-r--r--tvix/eval/src/value/mod.rs4
-rw-r--r--tvix/eval/src/value/string.rs92
-rw-r--r--tvix/eval/src/vm/generators.rs2
-rw-r--r--tvix/eval/src/vm/mod.rs6
-rw-r--r--tvix/eval/tests/nix_oracle.rs2
-rw-r--r--tvix/glue/Cargo.toml5
-rw-r--r--tvix/glue/benches/eval.rs6
-rw-r--r--tvix/glue/build.rs6
-rw-r--r--tvix/glue/default.nix11
-rw-r--r--tvix/glue/src/builtins/derivation.rs117
-rw-r--r--tvix/glue/src/builtins/errors.rs6
-rw-r--r--tvix/glue/src/builtins/fetchers.rs60
-rw-r--r--tvix/glue/src/builtins/import.rs211
-rw-r--r--tvix/glue/src/fetchers/decompression.rs6
-rw-r--r--tvix/glue/src/fetchers/mod.rs447
-rw-r--r--tvix/glue/src/fetchurl.rs82
-rw-r--r--tvix/glue/src/known_paths.rs34
-rw-r--r--tvix/glue/src/lib.rs2
-rw-r--r--tvix/glue/src/tests/empty-file0
-rw-r--r--tvix/glue/src/tests/mod.rs8
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-attrs.nix5
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-url.nix1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-attrs.nix5
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-url.nix1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-fail-tofile-wrongctxtype.nix3
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix2
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp2
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix40
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix9
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-tofile.exp1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-tofile.nix11
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp1
-rw-r--r--tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix14
-rw-r--r--tvix/glue/src/tvix_io.rs4
-rw-r--r--tvix/glue/src/tvix_store_io.rs105
-rw-r--r--tvix/nix-compat/Cargo.toml2
-rw-r--r--tvix/nix-compat/build.rs5
-rw-r--r--tvix/nix-compat/default.nix14
-rw-r--r--tvix/nix-compat/src/nar/reader/mod.rs2
-rw-r--r--tvix/nix-compat/src/nar/wire/mod.rs2
-rw-r--r--tvix/nix-compat/src/wire/bytes/reader/mod.rs2
-rw-r--r--tvix/shell.nix16
-rw-r--r--tvix/store-go/pathinfo.pb.go2
-rw-r--r--tvix/store-go/rpc_pathinfo.pb.go2
-rw-r--r--tvix/store/Cargo.toml13
-rw-r--r--tvix/store/default.nix44
-rw-r--r--tvix/store/src/bin/tvix-store.rs189
-rw-r--r--tvix/store/src/import.rs13
-rw-r--r--tvix/store/src/nar/import.rs75
-rw-r--r--tvix/store/src/nar/mod.rs1
-rw-r--r--tvix/store/src/nar/renderer.rs7
-rw-r--r--tvix/store/src/pathinfoservice/bigtable.rs16
-rw-r--r--tvix/store/src/pathinfoservice/from_addr.rs9
-rw-r--r--tvix/store/src/pathinfoservice/grpc.rs36
-rw-r--r--tvix/store/src/pathinfoservice/nix_http.rs27
-rw-r--r--tvix/store/src/pathinfoservice/sled.rs3
-rw-r--r--tvix/store/src/pathinfoservice/tests/mod.rs77
-rw-r--r--tvix/store/src/pathinfoservice/tests/utils.rs38
-rw-r--r--tvix/store/src/utils.rs33
-rw-r--r--tvix/tracing/Cargo.toml43
-rw-r--r--tvix/tracing/default.nix11
-rw-r--r--tvix/tracing/src/lib.rs302
-rw-r--r--tvix/tracing/src/propagate/mod.rs9
-rw-r--r--tvix/tracing/src/propagate/tonic.rs57
-rw-r--r--tvix/utils.nix42
-rw-r--r--users/Profpatsch/.hlint.yaml2
-rw-r--r--users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs1
-rw-r--r--users/Profpatsch/openlab-tools/src/OpenlabTools.hs56
-rw-r--r--users/Profpatsch/shell.nix2
-rw-r--r--users/Profpatsch/whatcd-resolver/src/AppT.hs2
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Http.hs34
-rw-r--r--users/Profpatsch/whatcd-resolver/src/JsonLd.hs1
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Redacted.hs92
-rw-r--r--users/Profpatsch/whatcd-resolver/src/Transmission.hs31
-rw-r--r--users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs322
-rw-r--r--users/flokli/ipu6-softisp/default.nix12
-rw-r--r--users/tazjin/chase-geese/default.nix2
-rw-r--r--users/tazjin/emacs/config/settings.el3
-rw-r--r--users/tazjin/emacs/default.nix2
-rw-r--r--users/tazjin/home/arbat.nix11
-rw-r--r--users/tazjin/keys/default.nix1
-rw-r--r--users/tazjin/nixos/arbat/default.nix74
-rw-r--r--users/tazjin/nixos/default.nix1
-rw-r--r--users/tazjin/nixos/modules/desktop.nix4
-rw-r--r--users/tazjin/nixos/modules/geesefs.nix2
-rw-r--r--users/tazjin/nixos/modules/home-config.nix4
-rw-r--r--users/tazjin/nixos/modules/physical.nix3
-rw-r--r--users/tazjin/nixos/zamalek/default.nix5
-rw-r--r--web/pwcrypt/Cargo.lock20
-rw-r--r--web/pwcrypt/Cargo.toml2
-rw-r--r--web/tvixbolt/Cargo.lock27
-rw-r--r--web/tvixbolt/Cargo.toml2
216 files changed, 7731 insertions, 2311 deletions
diff --git a/corp/rih/frontend/Cargo.lock b/corp/rih/frontend/Cargo.lock
index 2d2f5ea84b..40641d5de3 100644
--- a/corp/rih/frontend/Cargo.lock
+++ b/corp/rih/frontend/Cargo.lock
@@ -1500,9 +1500,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f"
+checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
 dependencies = [
  "cfg-if",
  "wasm-bindgen-macro",
@@ -1510,9 +1510,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-backend"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b"
+checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
 dependencies = [
  "bumpalo",
  "log",
@@ -1537,9 +1537,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed"
+checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -1547,9 +1547,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66"
+checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1560,9 +1560,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
+checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
 
 [[package]]
 name = "web-sys"
diff --git a/corp/rih/frontend/Cargo.toml b/corp/rih/frontend/Cargo.toml
index f01378c7e6..1b9d500979 100644
--- a/corp/rih/frontend/Cargo.toml
+++ b/corp/rih/frontend/Cargo.toml
@@ -19,7 +19,7 @@ yew-router = "0.17"
 wasm-bindgen-futures = "0.4"
 
 # needs to be in sync with nixpkgs
-wasm-bindgen = "= 0.2.91"
+wasm-bindgen = "= 0.2.92"
 uuid = { version = "1.3.3", features = ["v4", "serde"] }
 
 [dependencies.serde]
diff --git a/corp/russian/predlozhnik/Cargo.lock b/corp/russian/predlozhnik/Cargo.lock
index 1cacb43b39..6874e3554c 100644
--- a/corp/russian/predlozhnik/Cargo.lock
+++ b/corp/russian/predlozhnik/Cargo.lock
@@ -363,9 +363,9 @@ checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f"
+checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
 dependencies = [
  "cfg-if",
  "wasm-bindgen-macro",
@@ -373,9 +373,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-backend"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b"
+checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
 dependencies = [
  "bumpalo",
  "log",
@@ -400,9 +400,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed"
+checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -410,9 +410,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66"
+checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -423,9 +423,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
+checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
 
 [[package]]
 name = "web-sys"
diff --git a/corp/russian/predlozhnik/Cargo.toml b/corp/russian/predlozhnik/Cargo.toml
index 87537b560b..0b099ac927 100644
--- a/corp/russian/predlozhnik/Cargo.toml
+++ b/corp/russian/predlozhnik/Cargo.toml
@@ -9,4 +9,4 @@ lazy_static = "1.4"
 yew = "0.19"
 
 # needs to be in sync with nixpkgs
-wasm-bindgen = "= 0.2.91"
+wasm-bindgen = "= 0.2.92"
diff --git a/corp/russian/predlozhnik/default.nix b/corp/russian/predlozhnik/default.nix
index 2137be1112..495e977c13 100644
--- a/corp/russian/predlozhnik/default.nix
+++ b/corp/russian/predlozhnik/default.nix
@@ -44,7 +44,6 @@ pkgs.rustPlatform.buildRustPackage rec {
     export PATH=${lib.makeBinPath deps}:$PATH
     mkdir home
     export HOME=$PWD/.home
-    env
     trunk build --release -d $out
   '';
 
diff --git a/docs/REVIEWS.md b/docs/REVIEWS.md
index d76f11f410..9d2356744d 100644
--- a/docs/REVIEWS.md
+++ b/docs/REVIEWS.md
@@ -5,10 +5,10 @@ TVL Code Reviews
 **Table of Contents**
 
 - [TVL Code Reviews](#tvl-code-reviews)
+    - [Registration](#registration)
     - [Gerrit setup](#gerrit-setup)
     - [Gerrit workflows](#gerrit-workflows)
     - [Review process & approvals](#review-process--approvals)
-    - [Registration](#registration)
     - [Submitting changes via email](#submitting-changes-via-email)
     - [Gerrit for Github users](#gerrit-for-github-users)
 
@@ -22,6 +22,28 @@ contribution guidelines](./CONTRIBUTING.md).
 All changes are tracked at [cl.tvl.fyi](https://cl.tvl.fyi) using Gerrit. See
 [Registration](#registration) for information on how to register an account.
 
+## Registration
+
+The preferred method of contributions & review is done via Gerrit.
+
+TVL’s Gerrit supports single sign-on (SSO) using a GitHub, StackOverflow or
+GitLab.com account.
+
+Additionally if you want prefer not to use an SSO option or wish to have a
+backup authentication strategy in the event of downed server or a keycloak
+issue (recommended), you can create a TVL-specific LDAP account on the Gerrit
+instance by following these instructions:
+
+1. Be a member of `#tvl` on [hackint][].
+2. Clone the depot locally (via `git clone "https://cl.tvl.fyi/depot"`).
+3. Create a user entry in our LDAP server in [ops/users][ops-users].
+
+   The entry can be generated using [//web/pwcrypt](https://signup.tvl.fyi/).
+4. Create a commit adding yourself (see e.g.
+   [CL/2671](https://cl.tvl.fyi/c/depot/+/2671))
+5. If only using LDAP, submit the patch via email (see
+   [Submitting changes via email](#submitting-changes-via-email))
+
 ## Gerrit setup
 
 Gerrit uses the concept of change IDs to track commits across rebases and other
@@ -111,23 +133,8 @@ formalised checks before submitting:
 If all these conditions are fulfilled, the **change author submits their change
 themselves**.
 
-## Registration
-
-You may log into Gerrit using a GitHub, StackOverflow or GitLab.com account.
-
-If you would like to have a TVL-specific account on the Gerrit
-instance, follow these instructions:
-
-1. Be a member of `#tvl` on [hackint][].
-2. Clone the depot locally (via `git clone "https://cl.tvl.fyi/depot"`).
-3. Create a user entry in our LDAP server in [ops/users][ops-users].
-
-   The entry can be generated using [//web/pwcrypt](https://signup.tvl.fyi/).
-4. Create a commit adding yourself (see e.g.
-   [CL/2671](https://cl.tvl.fyi/c/depot/+/2671))
-5. Submit the commit via email (see below).
-
 ## Submitting changes via email
+
 Please keep in mind this process is more complicated and requires more work from
 both sides:
 
diff --git a/fun/clbot/clbot.go b/fun/clbot/clbot.go
index e5a5990ef2..40b044f45d 100644
--- a/fun/clbot/clbot.go
+++ b/fun/clbot/clbot.go
@@ -41,7 +41,8 @@ var (
 	notifyRepo     = flag.String("notify_repo", "depot", "Repo name to notify about")
 	notifyBranches = stringSetFlag{}
 
-	neverPing = flag.String("never_ping", "marcus", "Comma-separated terms that should never ping users")
+	neverPing   = flag.String("never_ping", "marcus", "Comma-separated terms that should never ping users")
+	onlyDisplay = flag.String("only_display", "", "Comma-separated substrings of the gerrit CL Change Subject that should be shown (everything else is dropped)")
 )
 
 func init() {
@@ -51,7 +52,7 @@ func init() {
 type stringSetFlag map[string]bool
 
 func (f stringSetFlag) String() string {
-	return fmt.Sprintf("%q", map[string]bool(f))
+	return fmt.Sprintf("%v", map[string]bool(f))
 }
 func (f stringSetFlag) Set(s string) error {
 	if s == "" {
@@ -193,6 +194,21 @@ func nopingAll(username, message string) string {
 	return strings.ReplaceAll(message, username, noping(username))
 }
 
+// changeShouldBeSkipped applies the list of channels in `onlyDisplay`
+// to whether we should skip displaying a CL.
+func changeShouldBeSkipped(onlyDisplay string, changeSubject string) bool {
+	// case when we don’t want to filter
+	if onlyDisplay == "" {
+		return false
+	}
+	for _, needle := range strings.Split(onlyDisplay, ",") {
+		if strings.Contains(changeSubject, needle) {
+			return false
+		}
+	}
+	return true
+}
+
 func patchSetURL(c gerritevents.Change, p gerritevents.PatchSet) string {
 	return fmt.Sprintf("https://cl.tvl.fyi/%d", c.Number)
 }
@@ -248,13 +264,13 @@ func main() {
 			var parsedMsg string
 			switch e := e.(type) {
 			case *gerritevents.PatchSetCreated:
-				if e.Change.Project != *notifyRepo || !notifyBranches[e.Change.Branch] || e.PatchSet.Number != 1 {
+				if e.Change.Project != *notifyRepo || !notifyBranches[e.Change.Branch] || e.PatchSet.Number != 1 || changeShouldBeSkipped(*onlyDisplay, e.Change.Subject) {
 					continue
 				}
 				user := username(e.PatchSet.Uploader)
 				parsedMsg = nopingAll(user, fmt.Sprintf("CL/%d proposed by %s - %s - %s", e.Change.Number, user, e.Change.Subject, patchSetURL(e.Change, e.PatchSet)))
 			case *gerritevents.ChangeMerged:
-				if e.Change.Project != *notifyRepo || !notifyBranches[e.Change.Branch] {
+				if e.Change.Project != *notifyRepo || !notifyBranches[e.Change.Branch] || changeShouldBeSkipped(*onlyDisplay, e.Change.Subject) {
 					continue
 				}
 				owner := username(e.Change.Owner)
diff --git a/fun/clbot/clbot_test.go b/fun/clbot/clbot_test.go
new file mode 100644
index 0000000000..567540c364
--- /dev/null
+++ b/fun/clbot/clbot_test.go
@@ -0,0 +1,24 @@
+package main
+
+import (
+	"testing"
+)
+
+func TestChangeShouldBeSkipped(t *testing.T) {
+	dontSkipAny := ""
+	if changeShouldBeSkipped(dontSkipAny, "mysubject") {
+		t.Fatal("dontSkipAny should not not be skip any")
+	}
+
+	showThese := "A,B"
+	if changeShouldBeSkipped(showThese, "A") {
+		t.Fatal("A should be shown")
+	}
+	if changeShouldBeSkipped(showThese, "B") {
+		t.Fatal("B should be shown")
+	}
+	if !changeShouldBeSkipped(showThese, "C") {
+		t.Fatal("C should not be shown")
+	}
+
+}
diff --git a/ops/kontemplate/default.nix b/ops/kontemplate/default.nix
index 1190869c3f..be01f9e4f6 100644
--- a/ops/kontemplate/default.nix
+++ b/ops/kontemplate/default.nix
@@ -12,12 +12,11 @@
 
 { lib, pkgs, ... }:
 
-pkgs.buildGoPackage rec {
+pkgs.buildGoModule rec {
   name = "kontemplate-${version}";
   version = "canon";
   src = ./.;
-  goPackagePath = "github.com/tazjin/kontemplate";
-  goDeps = ./deps.nix;
+
   buildInputs = [ pkgs.parallel ];
 
   # Enable checks and configure check-phase to include vet:
@@ -28,6 +27,8 @@ pkgs.buildGoPackage rec {
     done
   '';
 
+  vendorHash = "sha256-xPGVM2dq5fAVOiuodOXhDm3v3k+ncNLhlk6aCtF5S9E=";
+
   meta = with lib; {
     description = "A resource templating helper for Kubernetes";
     homepage = "http://kontemplate.works/";
diff --git a/ops/kontemplate/deps.nix b/ops/kontemplate/deps.nix
deleted file mode 100644
index 7693968bd5..0000000000
--- a/ops/kontemplate/deps.nix
+++ /dev/null
@@ -1,111 +0,0 @@
-# This file was generated by https://github.com/kamilchm/go2nix v1.3.0
-[
-  {
-    goPackagePath = "github.com/Masterminds/goutils";
-    fetch = {
-      type = "git";
-      url = "https://github.com/Masterminds/goutils";
-      rev = "41ac8693c5c10a92ea1ff5ac3a7f95646f6123b0";
-      sha256 = "180px47gj936qyk5bkv5mbbgiil9abdjq6kwkf7sq70vyi9mcfiq";
-    };
-  }
-  {
-    goPackagePath = "github.com/Masterminds/semver";
-    fetch = {
-      type = "git";
-      url = "https://github.com/Masterminds/semver";
-      rev = "5bc3b9184d48f1412b300b87a200cf020d9254cf";
-      sha256 = "1vdfm653v50jf63cw0kg2hslx50cn4mk6lj3p51bi11jrg48kfng";
-    };
-  }
-  {
-    goPackagePath = "github.com/Masterminds/sprig";
-    fetch = {
-      type = "git";
-      url = "https://github.com/Masterminds/sprig";
-      rev = "6f509977777c33eae63b2136d97f7b976cb971cc";
-      sha256 = "05h9k6fhjxnpwlihj3z02q9kvqvnq53jix0ab84sx0666bci3cdh";
-    };
-  }
-  {
-    goPackagePath = "github.com/alecthomas/template";
-    fetch = {
-      type = "git";
-      url = "https://github.com/alecthomas/template";
-      rev = "fb15b899a75114aa79cc930e33c46b577cc664b1";
-      sha256 = "1vlasv4dgycydh5wx6jdcvz40zdv90zz1h7836z7lhsi2ymvii26";
-    };
-  }
-  {
-    goPackagePath = "github.com/alecthomas/units";
-    fetch = {
-      type = "git";
-      url = "https://github.com/alecthomas/units";
-      rev = "c3de453c63f4bdb4dadffab9805ec00426c505f7";
-      sha256 = "0js37zlgv37y61j4a2d46jh72xm5kxmpaiw0ya9v944bjpc386my";
-    };
-  }
-  {
-    goPackagePath = "github.com/ghodss/yaml";
-    fetch = {
-      type = "git";
-      url = "https://github.com/ghodss/yaml";
-      rev = "25d852aebe32c875e9c044af3eef9c7dc6bc777f";
-      sha256 = "1w9yq0bxzygc4qwkwwiy7k1k1yviaspcqqv18255k2xkjv5ipccz";
-    };
-  }
-  {
-    goPackagePath = "github.com/google/uuid";
-    fetch = {
-      type = "git";
-      url = "https://github.com/google/uuid";
-      rev = "c2e93f3ae59f2904160ceaab466009f965df46d6";
-      sha256 = "0zw8fvl6jqg0fmv6kmvhss0g4gkrbvgyvl2zgy5wdbdlgp4fja0h";
-    };
-  }
-  {
-    goPackagePath = "github.com/huandu/xstrings";
-    fetch = {
-      type = "git";
-      url = "https://github.com/huandu/xstrings";
-      rev = "8bbcf2f9ccb55755e748b7644164cd4bdce94c1d";
-      sha256 = "1ivvc95514z63k7cpz71l0dwlanffmsh1pijhaqmp41kfiby8rsx";
-    };
-  }
-  {
-    goPackagePath = "github.com/imdario/mergo";
-    fetch = {
-      type = "git";
-      url = "https://github.com/imdario/mergo";
-      rev = "4c317f2286be3bd0c4f1a0e622edc6398ec4656d";
-      sha256 = "0bihha1qsgfjk14yv1hwddv3d8dzxpbjlaxwwyys6lhgxz1cr9h9";
-    };
-  }
-  {
-    goPackagePath = "golang.org/x/crypto";
-    fetch = {
-      type = "git";
-      url = "https://go.googlesource.com/crypto";
-      rev = "9756ffdc24725223350eb3266ffb92590d28f278";
-      sha256 = "0q7hxaaq6lp0v8qqzifvysl47z5rfdlrxkh3d29vsl3wyby3dxl8";
-    };
-  }
-  {
-    goPackagePath = "gopkg.in/alecthomas/kingpin.v2";
-    fetch = {
-      type = "git";
-      url = "https://gopkg.in/alecthomas/kingpin.v2";
-      rev = "947dcec5ba9c011838740e680966fd7087a71d0d";
-      sha256 = "0mndnv3hdngr3bxp7yxfd47cas4prv98sqw534mx7vp38gd88n5r";
-    };
-  }
-  {
-    goPackagePath = "gopkg.in/yaml.v2";
-    fetch = {
-      type = "git";
-      url = "https://gopkg.in/yaml.v2";
-      rev = "51d6538a90f86fe93ac480b35f37b2be17fef232";
-      sha256 = "01wj12jzsdqlnidpyjssmj0r4yavlqy7dwrg7adqd8dicjc4ncsa";
-    };
-  }
-]
diff --git a/ops/kontemplate/go.mod b/ops/kontemplate/go.mod
new file mode 100644
index 0000000000..e3ae158ea2
--- /dev/null
+++ b/ops/kontemplate/go.mod
@@ -0,0 +1,25 @@
+module github.com/tazjin/kontemplate
+
+go 1.22.3
+
+require (
+	github.com/Masterminds/sprig/v3 v3.2.3
+	github.com/alecthomas/kingpin/v2 v2.4.0
+	github.com/ghodss/yaml v1.0.0
+)
+
+require (
+	github.com/Masterminds/goutils v1.1.1 // indirect
+	github.com/Masterminds/semver/v3 v3.2.0 // indirect
+	github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 // indirect
+	github.com/google/uuid v1.1.1 // indirect
+	github.com/huandu/xstrings v1.3.3 // indirect
+	github.com/imdario/mergo v0.3.11 // indirect
+	github.com/mitchellh/copystructure v1.0.0 // indirect
+	github.com/mitchellh/reflectwalk v1.0.0 // indirect
+	github.com/shopspring/decimal v1.2.0 // indirect
+	github.com/spf13/cast v1.3.1 // indirect
+	github.com/xhit/go-str2duration/v2 v2.1.0 // indirect
+	golang.org/x/crypto v0.3.0 // indirect
+	gopkg.in/yaml.v2 v2.3.0 // indirect
+)
diff --git a/ops/kontemplate/go.sum b/ops/kontemplate/go.sum
new file mode 100644
index 0000000000..754cffbcb8
--- /dev/null
+++ b/ops/kontemplate/go.sum
@@ -0,0 +1,75 @@
+github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI=
+github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU=
+github.com/Masterminds/semver/v3 v3.2.0 h1:3MEsd0SM6jqZojhjLWWeBY+Kcjy9i6MQAeY7YgDP83g=
+github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ=
+github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA=
+github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM=
+github.com/alecthomas/kingpin/v2 v2.4.0 h1:f48lwail6p8zpO1bC4TxtqACaGqHYA22qkHjHpqDjYY=
+github.com/alecthomas/kingpin/v2 v2.4.0/go.mod h1:0gyi0zQnjuFk8xrkNKamJoyUo382HRL7ATRpFZCw6tE=
+github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc=
+github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
+github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
+github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
+github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY=
+github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
+github.com/huandu/xstrings v1.3.3 h1:/Gcsuc1x8JVbJ9/rlye4xZnVAbEkGauT8lbebqcQws4=
+github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE=
+github.com/imdario/mergo v0.3.11 h1:3tnifQM4i+fbajXKBHXWEH+KvNHqojZ778UH75j3bGA=
+github.com/imdario/mergo v0.3.11/go.mod h1:jmQim1M+e3UYxmgPu/WyfjB3N3VflVyUjjjwH0dnCYA=
+github.com/mitchellh/copystructure v1.0.0 h1:Laisrj+bAB6b/yJwB5Bt3ITZhGJdqmxquMKeZ+mmkFQ=
+github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw=
+github.com/mitchellh/reflectwalk v1.0.0 h1:9D+8oIskB4VJBN5SFlmc27fSlIBZaov1Wpk/IfikLNY=
+github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/shopspring/decimal v1.2.0 h1:abSATXmQEYyShuxI4/vyW3tV1MrKAJzCZ/0zLUXYbsQ=
+github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
+github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=
+github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
+github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
+github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
+github.com/stretchr/testify v1.8.2 h1:+h33VjcLVPDHtOdpUCuF+7gSuG3yGIftsP1YvFihtJ8=
+github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
+github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc=
+github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU=
+github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
+golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
+golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
+golang.org/x/crypto v0.3.0 h1:a06MkbcxBrEFc0w0QIZWXrH/9cCX6KJyWbBOIwAn+7A=
+golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4=
+golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
+golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
+golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
+golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
+golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY=
+golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
+golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
+golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
+golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
+golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
+golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
+golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
+golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
+golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
+gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
+gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
+gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
diff --git a/ops/kontemplate/main.go b/ops/kontemplate/main.go
index e55d42465c..c98229f5cd 100644
--- a/ops/kontemplate/main.go
+++ b/ops/kontemplate/main.go
@@ -21,9 +21,9 @@ import (
 	"os/exec"
 	"strings"
 
+	"github.com/alecthomas/kingpin/v2"
 	"github.com/tazjin/kontemplate/context"
 	"github.com/tazjin/kontemplate/templater"
-	"gopkg.in/alecthomas/kingpin.v2"
 )
 
 const version string = "1.8.0"
diff --git a/ops/kontemplate/templater/templater.go b/ops/kontemplate/templater/templater.go
index a8f0c670a6..86cbad459f 100644
--- a/ops/kontemplate/templater/templater.go
+++ b/ops/kontemplate/templater/templater.go
@@ -20,7 +20,7 @@ import (
 	"strings"
 	"text/template"
 
-	"github.com/Masterminds/sprig"
+	"github.com/Masterminds/sprig/v3"
 	"github.com/tazjin/kontemplate/context"
 	"github.com/tazjin/kontemplate/util"
 )
diff --git a/ops/machines/whitby/default.nix b/ops/machines/whitby/default.nix
index 6a8ee56abc..4c2fd0f2f5 100644
--- a/ops/machines/whitby/default.nix
+++ b/ops/machines/whitby/default.nix
@@ -25,6 +25,7 @@ in
     (mod "restic.nix")
     (mod "smtprelay.nix")
     (mod "sourcegraph.nix")
+    (mod "teleirc.nix")
     (mod "tvl-buildkite.nix")
     (mod "tvl-slapd/default.nix")
     (mod "tvl-users.nix")
@@ -232,6 +233,7 @@ in
       owothia.file = secretFile "owothia";
       panettone.file = secretFile "panettone";
       smtprelay.file = secretFile "smtprelay";
+      teleirc.file = secretFile "teleirc";
 
       buildkite-agent-token = {
         file = secretFile "buildkite-agent-token";
@@ -345,7 +347,12 @@ in
   # Start the Gerrit->IRC bot
   services.depot.clbot = {
     enable = true;
-    channels = [ "#tvix-dev" "#tvl" ];
+    channels = {
+      "#tvl" = { };
+      "#tvix-dev" = {
+        only_display = "tvix,nix-compat,third_party,third-party,3p";
+      };
+    };
 
     # See //fun/clbot for details.
     flags = {
@@ -410,6 +417,9 @@ in
       };
     };
 
+    # Run the Telegram<>IRC bridge for Volga Sprint.
+    teleirc.enable = true;
+
     # Run atward, the search engine redirection thing.
     atward.enable = true;
 
diff --git a/ops/modules/clbot.nix b/ops/modules/clbot.nix
index bdddff6c81..0a436a8749 100644
--- a/ops/modules/clbot.nix
+++ b/ops/modules/clbot.nix
@@ -7,6 +7,7 @@ let
 
   inherit (lib)
     listToAttrs
+    mapAttrsToList
     mkEnableOption
     mkIf
     mkOption
@@ -25,13 +26,13 @@ let
     ${pkgs.systemd}/bin/systemd-escape '${name}' >> $out
   ''));
 
-  mkUnit = flags: channel: {
+  mkUnit = channel: channelFlags: {
     name = "clbot-${systemdEscape channel}";
     value = {
       description = "${description} to ${channel}";
       wantedBy = [ "multi-user.target" ];
 
-      script = "${depot.fun.clbot}/bin/clbot ${mkFlags (cfg.flags // {
+      script = "${depot.fun.clbot}/bin/clbot ${mkFlags (cfg.flags // channelFlags // {
         irc_channel = channel;
       })} -alsologtostderr";
 
@@ -53,8 +54,8 @@ in
     };
 
     channels = mkOption {
-      type = with types; listOf str;
-      description = "Channels in which to post (generates one unit per channel)";
+      type = with types; attrsOf (attrsOf str);
+      description = "Channels in which to post (generates one unit per channel); nested attrs are used as extra flags to the service, which override the attrs in `flags`";
     };
 
     secretsFile = mkOption {
@@ -77,6 +78,6 @@ in
       };
     };
 
-    systemd.services = listToAttrs (map (mkUnit cfg.flags) cfg.channels);
+    systemd.services = listToAttrs (mapAttrsToList mkUnit cfg.channels);
   };
 }
diff --git a/ops/modules/teleirc.nix b/ops/modules/teleirc.nix
new file mode 100644
index 0000000000..9f9ac059ce
--- /dev/null
+++ b/ops/modules/teleirc.nix
@@ -0,0 +1,40 @@
+# Run the Telegram<>IRC sync bot for the Volga Sprint channel.
+#
+# This module is written in a pretty ad-hoc style, as it is sort of a
+# throwaway thing (will be removed again after the event).
+{ depot, config, lib, pkgs, ... }:
+
+let
+  cfg = config.services.depot.owothia;
+  description = "IRC<>Telegram sync for Volga Sprint channel";
+  configFile = builtins.toFile "teleirc.env" ''
+    # connect through tvlbot's ZNC bouncer
+    IRC_SERVER="localhost"
+    IRC_PORT=2627
+    IRC_USE_SSL=false
+    IRC_CHANNEL="#volgasprint"
+    IRC_BLACKLIST="tvlbot"
+    IRC_BOT_NAME="tvlbot"
+    IRC_BOT_REALNAME="TVL bot for Volga Sprint"
+    IRC_BOT_IDENT="tvlbot"
+    IRC_SEND_STICKER_EMOJI=false # look into this
+    TELEGRAM_CHAT_ID=-1002153072030
+  '';
+in
+{
+  options.services.depot.teleirc.enable = lib.mkEnableOption description;
+
+  config = lib.mkIf cfg.enable {
+    systemd.services.teleirc = {
+      inherit description;
+      wantedBy = [ "multi-user.target" ];
+
+      serviceConfig = {
+        DynamicUser = true;
+        Restart = "always";
+        EnvironmentFile = "/run/agenix/teleirc";
+        ExecStart = "${depot.third_party.teleirc}/bin/teleirc -conf ${configFile}";
+      };
+    };
+  };
+}
diff --git a/ops/secrets/secrets.nix b/ops/secrets/secrets.nix
index 5cbf2bf612..bc32d23597 100644
--- a/ops/secrets/secrets.nix
+++ b/ops/secrets/secrets.nix
@@ -9,6 +9,12 @@ let
 
     # zamalek
     "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDBRXeb8EuecLHP0bW4zuebXp4KRnXgJTZfeVWXQ1n1R"
+
+    # khamovnik
+    "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAID1ptE5HvGSXxSXo+aHBTKa5PBlAM1HqmpzWz0yAhHLj"
+
+    # arbat
+    "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJ1Eai0p7eF7XML5wokqF4GlVZM+YXEORfs/GPGwEky7"
   ];
 
   aspen = [
@@ -47,6 +53,7 @@ in
   "owothia.age" = whitbyDefault;
   "panettone.age" = whitbyDefault;
   "smtprelay.age" = whitbyDefault;
+  "teleirc.age" = whitbyDefault;
   "tf-buildkite.age" = terraform;
   "tf-glesys.age" = terraform;
   "tf-keycloak.age" = terraform;
diff --git a/ops/secrets/teleirc.age b/ops/secrets/teleirc.age
new file mode 100644
index 0000000000..ebc88fc9ef
--- /dev/null
+++ b/ops/secrets/teleirc.age
Binary files differdiff --git a/ops/terraform/deploy-nixos/README.md b/ops/terraform/deploy-nixos/README.md
index fd0bd1b442..2580a7c0e1 100644
--- a/ops/terraform/deploy-nixos/README.md
+++ b/ops/terraform/deploy-nixos/README.md
@@ -25,8 +25,17 @@ deploy is necessary.
 
 ```terraform
 module "deploy_somehost" {
+  # Clone just this directory through josh. Add a `ref=` parameter to pin to a specific commit.
   source              = "git::https://code.tvl.fyi/depot.git:/ops/terraform/deploy-nixos.git"
+
+  # The attribute.path pointing to the expression to instantiate.
   attrpath            = "ops.nixos.somehost"
+
+  # The path to the Nix file to invoke. Optional.
+  # If omitted, will shell out to git to determine the repo root, and Nix will
+  # use `default.nix` in there.
+  entrypoint          = "${path.module}/../../somewhere.nix"
+
   target_host         = "somehost.tvl.su"
   target_user         = "someone"
   target_user_ssh_key = tls_private_key.somehost.private_key_pem
@@ -37,9 +46,6 @@ module "deploy_somehost" {
 
 Several things can be improved about this module, for example:
 
-* The repository root (relative to which the attribute path is evaluated) could
-  be made configurable.
-
 * The remote system closure could be discovered to restore remote system state
   after manual deploys on the target (i.e. "stomping" of changes).
 
diff --git a/ops/users/default.nix b/ops/users/default.nix
index a50575f3fb..cd1ca4fc27 100644
--- a/ops/users/default.nix
+++ b/ops/users/default.nix
@@ -22,6 +22,11 @@
     password = "{ARGON2}$argon2id$v=19$m=65536,t=2,p=1$5NEYPJ19nDITK5sGr4bzhQ$Xzpzth6y4w+HGvioHiYgzqFiwMDx0B7HAh+PVbkRuuk";
   }
   {
+    username = "chickadee";
+    email = "matthewktromp@gmail.com";
+    password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$HoZjVdJ90JmTEJf1MMLuDg$5Pa8kpJdFVsIxgoOTDsH0gv6CLumSIkMqYEn5UVfjwU";
+  }
+  {
     username = "cschilling";
     email = "christian.schilling.de@gmail.com";
     password = "{ARGON2}$argon2id$v=19$m=65536,t=2,p=1$9VN3IS6ViW5FFbVKWOZI6Q$gZxuYAYk0Opq4E5i8cbcNjfznCQNc+RiP7Xv1CUnrQU";
@@ -239,4 +244,14 @@
     email = "benjaminedwardwebb@gmail.com";
     password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$kdFNmxgIGsF8TkB/GoPy1A$GUXd3M35Jqxqlfra4gPCcFW3ehE0RVrlHOzaoD7Pu7s";
   }
+  {
+    username = "fmzakari";
+    email = "farid.m.zakaria@gmail.com";
+    password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$NzSX6x2+mApMvhNrvVIWaQ$/GUwbj+6GUyJL8XSgxTThc3TmVTM4WLQ+6KMC4NwovE";
+  }
+  {
+    username = "toastal";
+    email = "toastal@posteo.net";
+    password = "{ARGON2}$argon2id$v=19$m=19456,t=2,p=1$txwVjPn9kKPUgsZnPtpyaA$pE0ISDGScCE4JCKcmbnzC+GZZ4PP6MqKJKmR/sxo6TY";
+  }
 ]
diff --git a/third_party/exwm/exwm-core.el b/third_party/exwm/exwm-core.el
index e0d644d941..a7fdfce710 100644
--- a/third_party/exwm/exwm-core.el
+++ b/third_party/exwm/exwm-core.el
@@ -82,6 +82,7 @@ Here are some predefined candidates:
 (defvar exwm-input--simulation-keys)
 (defvar exwm-input-line-mode-passthrough)
 (defvar exwm-input-prefix-keys)
+(defvar exwm-workspace--list)
 (declare-function exwm-input--fake-key "exwm-input.el" (event))
 (declare-function exwm-input--on-KeyPress-line-mode "exwm-input.el"
                   (key-press raw-data))
@@ -94,6 +95,8 @@ Here are some predefined candidates:
 (declare-function exwm-manage--kill-buffer-query-function "exwm-manage.el")
 (declare-function exwm-workspace-move-window "exwm-workspace.el"
                   (frame-or-index &optional id))
+(declare-function exwm-workspace-switch "exwm-workspace.el"
+                  (frame-or-index &optional force))
 
 (define-minor-mode exwm-debug
   "Debug-logging enabled if non-nil."
@@ -229,6 +232,14 @@ If CONN is non-nil, use it instead of the value of the variable
       (setq ret-depth depth))
     (list ret-visual ret-depth ret-colormap)))
 
+(defun exwm--mode-name ()
+  "Mode name string used in `exwm-mode' buffers."
+  (let ((name "EXWM"))
+    (if (cl-some (lambda (i) (frame-parameter i 'exwm-urgency))
+                 exwm-workspace--list)
+        (propertize name 'face 'font-lock-warning-face)
+      name)))
+
 ;; Internal variables
 (defvar-local exwm--id nil)               ;window ID
 (defvar-local exwm--configurations nil)   ;initial configurations.
@@ -311,7 +322,7 @@ One of `line-mode' or `char-mode'.")
 ;; Also, inactive entries should be disabled rather than hidden.
 (easy-menu-define exwm-mode-menu exwm-mode-map
   "Menu for `exwm-mode'."
-  '("EXWM"
+  `("EXWM"
     "---"
     "*General*"
     "---"
@@ -336,22 +347,20 @@ One of `line-mode' or `char-mode'.")
     ["Send key" exwm-input-send-next-key (eq exwm--input-mode 'line-mode)]
     ;; This is merely a reference.
     ("Send simulation key" :filter
-     (lambda (&rest _args)
-       (let (result)
-         (maphash
-          (lambda (key value)
-            (when (sequencep key)
-              (setq result (append result
-                                   `([
-                                      ,(format "Send '%s'"
-                                               (key-description value))
-                                      (lambda ()
-                                        (interactive)
-                                        (dolist (i ',value)
-                                          (exwm-input--fake-key i)))
-                                      :keys ,(key-description key)])))))
-          exwm-input--simulation-keys)
-         result)))
+     ,(lambda (&rest _args)
+        (let (result)
+          (maphash
+           (lambda (key value)
+             (when (sequencep key)
+               (setq result (append result
+                                    `([,(format "Send '%s'"
+                                                (key-description value))
+                                       ,(lambda ()
+                                          (interactive)
+                                          (mapc #'exwm-input--fake-key value))
+                                       :keys ,(key-description key)])))))
+           exwm-input--simulation-keys)
+          result)))
 
     ["Define global binding" exwm-input-set-key]
 
@@ -368,26 +377,20 @@ One of `line-mode' or `char-mode'.")
     ["Switch workspace" exwm-workspace-switch]
     ;; Place this entry at bottom to avoid selecting others by accident.
     ("Switch to" :filter
-     (lambda (&rest _args)
-       (mapcar (lambda (i)
-                 `[,(format "Workspace %d" i)
-                   (lambda ()
-                     (interactive)
-                     (exwm-workspace-switch ,i))
-                   (/= ,i exwm-workspace-current-index)])
-               (number-sequence 0 (1- (exwm-workspace--count))))))))
+     ,(lambda (&rest _args)
+        (mapcar (lambda (i)
+                  `[,(format "Workspace %d" i)
+                    ,(lambda ()
+                       (interactive)
+                       (exwm-workspace-switch i))
+                    (/= ,i exwm-workspace-current-index)])
+                (number-sequence 0 (1- (length exwm-workspace--list))))))))
 
 (define-derived-mode exwm-mode nil "EXWM"
   "Major mode for managing X windows.
 
 \\{exwm-mode-map}"
-  ;;
-  (setq mode-name
-        '(:eval (propertize "EXWM" 'face
-                            (when (cl-some (lambda (i)
-                                             (frame-parameter i 'exwm-urgency))
-                                           exwm-workspace--list)
-                              'font-lock-warning-face))))
+  :interactive nil :abbrev-table nil :syntax-table nil
   ;; Change major-mode is not allowed
   (add-hook 'change-major-mode-hook #'kill-buffer nil t)
   ;; Kill buffer -> close window
@@ -396,7 +399,8 @@ One of `line-mode' or `char-mode'.")
   ;; Redirect events when executing keyboard macros.
   (push `(executing-kbd-macro . ,exwm--kmacro-map)
         minor-mode-overriding-map-alist)
-  (setq buffer-read-only t
+  (setq mode-name '(:eval (exwm--mode-name))
+        buffer-read-only t
         cursor-type nil
         left-margin-width nil
         right-margin-width nil
diff --git a/third_party/exwm/exwm-floating.el b/third_party/exwm/exwm-floating.el
index 34d06a30db..574a78f015 100644
--- a/third_party/exwm/exwm-floating.el
+++ b/third_party/exwm/exwm-floating.el
@@ -67,11 +67,11 @@ This hook runs in the context of the corresponding buffer."
 
 (defcustom exwm-floating-border-width 1
   "Border width of floating windows."
-  :type '(integer
-          :validate (lambda (widget)
-                      (when (< (widget-value widget) 0)
-                        (widget-put widget :error "Border width is at least 0")
-                        widget)))
+  :type `(integer
+          :validate ,(lambda (widget)
+                       (when (< (widget-value widget) 0)
+                         (widget-put widget :error "Border width is at least 0")
+                         widget)))
   :initialize #'custom-initialize-default
   :set (lambda (symbol value)
          (let ((delta (- value exwm-floating-border-width))
diff --git a/third_party/exwm/exwm-input.el b/third_party/exwm/exwm-input.el
index f1f035c91a..eac0ef6a37 100644
--- a/third_party/exwm/exwm-input.el
+++ b/third_party/exwm/exwm-input.el
@@ -46,7 +46,7 @@
   '(?\C-x ?\C-u ?\C-h ?\M-x ?\M-` ?\M-& ?\M-:)
   "List of prefix keys EXWM should forward to Emacs when in `line-mode'.
 
-The point is to make keys like 'C-x C-f' forwarded to Emacs in `line-mode'.
+The point is to make keys like `C-x C-f' forwarded to Emacs in `line-mode'.
 There is no need to add prefix keys for global/simulation keys or those
 defined in `exwm-mode-map' here."
   :type '(repeat key-sequence)
diff --git a/third_party/exwm/exwm-layout.el b/third_party/exwm/exwm-layout.el
index 8649c11ffd..83421b2e99 100644
--- a/third_party/exwm/exwm-layout.el
+++ b/third_party/exwm/exwm-layout.el
@@ -602,9 +602,7 @@ See also `exwm-layout-enlarge-window'."
   ;; Auto refresh layout
   (exwm--log)
   (add-hook 'window-configuration-change-hook #'exwm-layout--refresh)
-  ;; The behavior of `window-configuration-change-hook' will be changed.
-  (when (fboundp 'window-pixel-width-before-size-change)
-    (add-hook 'window-size-change-functions #'exwm-layout--refresh))
+  (add-hook 'window-size-change-functions #'exwm-layout--refresh)
   (unless (exwm-workspace--minibuffer-own-frame-p)
     ;; Refresh when minibuffer grows
     (add-hook 'minibuffer-setup-hook #'exwm-layout--on-minibuffer-setup t)
@@ -616,8 +614,7 @@ See also `exwm-layout-enlarge-window'."
   "Exit the layout module."
   (exwm--log)
   (remove-hook 'window-configuration-change-hook #'exwm-layout--refresh)
-  (when (fboundp 'window-pixel-width-before-size-change)
-    (remove-hook 'window-size-change-functions #'exwm-layout--refresh))
+  (remove-hook 'window-size-change-functions #'exwm-layout--refresh)
   (remove-hook 'minibuffer-setup-hook #'exwm-layout--on-minibuffer-setup)
   (when exwm-layout--timer
     (cancel-timer exwm-layout--timer)
diff --git a/third_party/exwm/exwm-systemtray.el b/third_party/exwm/exwm-systemtray.el
index 9e57dae4eb..2b46568152 100644
--- a/third_party/exwm/exwm-systemtray.el
+++ b/third_party/exwm/exwm-systemtray.el
@@ -46,15 +46,6 @@
    (visible :initarg :visible))
   :documentation "Attributes of a system tray icon.")
 
-(defclass xcb:systemtray:-ClientMessage
-  (xcb:icccm:--ClientMessage xcb:ClientMessage)
-  ((format :initform 32)
-   (type :initform 'xcb:Atom:MANAGER)
-   (time :initarg :time :type xcb:TIMESTAMP)      ;new slot
-   (selection :initarg :selection :type xcb:ATOM) ;new slot
-   (owner :initarg :owner :type xcb:WINDOW))      ;new slot
-  :documentation "A systemtray client message.")
-
 (defgroup exwm-systemtray nil
   "System tray."
   :group 'exwm)
@@ -542,7 +533,7 @@ Argument DATA contains the raw event data."
                        :destination exwm--root
                        :event-mask xcb:EventMask:StructureNotify
                        :event (xcb:marshal
-                               (make-instance 'xcb:systemtray:-ClientMessage
+                               (make-instance 'xcb:icccm:-ManagerSelection
                                               :window exwm--root
                                               :time xcb:Time:CurrentTime
                                               :selection
diff --git a/third_party/exwm/exwm-workspace.el b/third_party/exwm/exwm-workspace.el
index 89be697159..9337dc08ab 100644
--- a/third_party/exwm/exwm-workspace.el
+++ b/third_party/exwm/exwm-workspace.el
@@ -1257,12 +1257,10 @@ ALIST is an action alist, as accepted by function `display-buffer'."
   ;;        fail to retrieve the correct window.  It's likely there are
   ;;        other related issues.
   ;; This is not required by Emacs 24.
-  (when (fboundp 'window-preserve-size)
-    (let ((window (get-buffer-window "*Completions*"
-                                     exwm-workspace--current)))
-      (when window
-        (fit-window-to-buffer window)
-        (window-preserve-size window)))))
+  (let ((window (get-buffer-window "*Completions*" exwm-workspace--current)))
+    (when window
+      (fit-window-to-buffer window)
+      (window-preserve-size window))))
 
 (defun exwm-workspace--on-minibuffer-exit ()
   "Run in `minibuffer-exit-hook' to hide the minibuffer container."
diff --git a/third_party/exwm/exwm-xsettings.el b/third_party/exwm/exwm-xsettings.el
index 99d6b9c4ac..596588b823 100644
--- a/third_party/exwm/exwm-xsettings.el
+++ b/third_party/exwm/exwm-xsettings.el
@@ -293,7 +293,7 @@ SERIAL is a sequence number."
                        :destination exwm--root
                        :event-mask xcb:EventMask:StructureNotify
                        :event (xcb:marshal
-                               (make-instance 'xcb:xsettings:-ClientMessage
+                               (make-instance 'xcb:icccm:-ManagerSelection
                                               :window exwm--root
                                               :time xcb:Time:CurrentTime
                                               :selection exwm-xsettings--XSETTINGS_S0-atom
diff --git a/third_party/exwm/exwm.el b/third_party/exwm/exwm.el
index c4900eab48..1186a40f44 100644
--- a/third_party/exwm/exwm.el
+++ b/third_party/exwm/exwm.el
@@ -4,8 +4,8 @@
 
 ;; Author: Chris Feng <chris.w.feng@gmail.com>
 ;; Maintainer: Adrián Medraño Calvo <adrian@medranocalvo.com>, Steven Allen <steven@stebalien.com>, Daniel Mendler <mail@daniel-mendler.de>
-;; Version: 0.28
-;; Package-Requires: ((emacs "27.1") (xelb "0.18"))
+;; Version: 0.30
+;; Package-Requires: ((emacs "27.1") (xelb "0.19"))
 ;; Keywords: unix
 ;; URL: https://github.com/emacs-exwm/exwm
 
@@ -493,23 +493,20 @@ RAW-DATA contains unmarshalled ClientMessage event data."
      ;; _NET_ACTIVE_WINDOW.
      ((= type xcb:Atom:_NET_ACTIVE_WINDOW)
       (let ((buffer (exwm--id->buffer id))
-            iconic window)
+            window)
         (if (buffer-live-p buffer)
           ;; Either an `exwm-mode' buffer (an X window) or a floating frame.
           (with-current-buffer buffer
             (when (eq exwm--frame exwm-workspace--current)
               (if exwm--floating-frame
                   (select-frame exwm--floating-frame)
-                (setq iconic (exwm-layout--iconic-state-p))
-                (when iconic
+                (setq window (get-buffer-window nil t))
+                (unless window
                   ;; State change: iconic => normal.
-                  (set-window-buffer (frame-selected-window exwm--frame)
-                                     (current-buffer)))
+                  (setq window (frame-selected-window exwm--frame))
+                  (set-window-buffer window (current-buffer)))
                 ;; Focus transfer.
-                (setq window (get-buffer-window nil t))
-                (when (or iconic
-                          (not (eq window (selected-window))))
-                  (select-window window)))))
+                (select-window window))))
           ;; A workspace.
           (dolist (f exwm-workspace--list)
             (when (eq id (frame-parameter f 'exwm-outer-id))
diff --git a/third_party/geesefs/default.nix b/third_party/geesefs/default.nix
deleted file mode 100644
index 98448bb737..0000000000
--- a/third_party/geesefs/default.nix
+++ /dev/null
@@ -1,25 +0,0 @@
-# Finally, a good FUSE FS implementation over S3.
-# https://github.com/yandex-cloud/geesefs
-
-{ pkgs, ... }:
-
-pkgs.buildGoModule rec {
-  pname = "geesefs";
-  version = "0.40.1";
-
-  src = pkgs.fetchFromGitHub {
-    owner = "yandex-cloud";
-    repo = "geesefs";
-    rev = "v${version}";
-    hash = "sha256:0ig8h17z8n5j8qb7k2jyh40vv77zazhnz8bxdam9xihxksj8mizp";
-  };
-
-  subPackages = [ "." ];
-  buildInputs = [ pkgs.fuse ];
-  vendorHash = "sha256:11i7cmnlxi00d0csgpv8drfcw0aqshwc4hfs0jw7zwafdhnlyy0j";
-
-  meta = with pkgs.lib; {
-    license = licenses.asl20;
-    maintainers = [ maintainers.tazjin ];
-  };
-}
diff --git a/third_party/nixpkgs/default.nix b/third_party/nixpkgs/default.nix
index b79a963e5c..03dc7b267c 100644
--- a/third_party/nixpkgs/default.nix
+++ b/third_party/nixpkgs/default.nix
@@ -52,7 +52,13 @@ let
   # Overlay for packages that should come from the stable channel
   # instead (e.g. because something is broken in unstable).
   # Use `stableNixpkgs` from above.
-  stableOverlay = _unstableSelf: unstableSuper: { };
+  stableOverlay = _unstableSelf: unstableSuper: {
+    # newer trunk fails somewhere within reqwest, trying to read a mystery file
+    trunk = stableNixpkgs.trunk;
+
+    # the big lis package change breaks everything in //3p/lisp, undo it for now.
+    lispPackages = stableNixpkgs.lispPackages;
+  };
 
   # Overlay to expose the nixpkgs commits we are using to other Nix code.
   commitsOverlay = _: _: {
diff --git a/third_party/overlays/patches/crate2nix-drop-darwin-explicit-dontstrip.patch b/third_party/overlays/patches/crate2nix-drop-darwin-explicit-dontstrip.patch
new file mode 100644
index 0000000000..ba64e660c0
--- /dev/null
+++ b/third_party/overlays/patches/crate2nix-drop-darwin-explicit-dontstrip.patch
@@ -0,0 +1,22 @@
+From 0209f258cda8a9972a785e26d92fb477ce4d1b0e Mon Sep 17 00:00:00 2001
+From: Ilan Joselevich <personal@ilanjoselevich.com>
+Date: Tue, 11 Jun 2024 18:14:06 +0300
+Subject: [PATCH] Get rid of dontStrip for Darwin as it's no longer needed
+
+Fixed in https://github.com/NixOS/nixpkgs/pull/255900
+---
+ templates/nix/crate2nix/default.nix                  | 2 --
+
+diff --git a/templates/nix/crate2nix/default.nix b/templates/nix/crate2nix/default.nix
+index 95d3730f..c53925e7 100644
+--- a/templates/nix/crate2nix/default.nix
++++ b/templates/nix/crate2nix/default.nix
+@@ -349,8 +349,6 @@ rec {
+           buildRustCrateForPkgsFunc pkgs
+             (
+               crateConfig // {
+-                # https://github.com/NixOS/nixpkgs/issues/218712
+-                dontStrip = stdenv.hostPlatform.isDarwin;
+                 src = crateConfig.src or (
+                   pkgs.fetchurl rec {
+                     name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz";
diff --git a/third_party/overlays/tvl.nix b/third_party/overlays/tvl.nix
index b54e899b88..f2260be8b8 100644
--- a/third_party/overlays/tvl.nix
+++ b/third_party/overlays/tvl.nix
@@ -21,17 +21,6 @@ depot.nix.readTree.drvTargets {
     withAWS = false;
   });
 
-  # To match telega in emacs-overlay or wherever
-  tdlib = super.tdlib.overrideAttrs (_: {
-    version = "1.8.24";
-    src = self.fetchFromGitHub {
-      owner = "tdlib";
-      repo = "td";
-      rev = "d79bd4b69403868897496da39b773ab25c69f6af";
-      sha256 = "0bc5akzw12qwj45rzqkrhw65qlrn9q8pzmvc5aiqv4bvhkb1ghl0";
-    };
-  });
-
   home-manager = super.home-manager.overrideAttrs (_: {
     src = depot.third_party.sources.home-manager;
     version = "git-"
@@ -69,6 +58,17 @@ depot.nix.readTree.drvTargets {
           sha256 = "1mmlrd2zpcwiv8gh10y7lrpflnbmsycdascrxjr3bfcwa8yx7901";
         };
       };
+
+      # Override telega sources until MELPA updates in nixpkgs resume.
+      telega = esuper.telega.overrideAttrs (_: {
+        version = "0.8.291"; # unstable
+        src = self.fetchFromGitHub {
+          owner = "zevlg";
+          repo = "telega.el";
+          rev = "58b4963b292ceb723d665df100b519eb5a99c676";
+          sha256 = "1q3ydbm0jhrsyvvdn0mpmxvskq0l53jkh40a5hlx7i3qkinbhbry";
+        };
+      });
     })
   );
 
@@ -101,6 +101,8 @@ depot.nix.readTree.drvTargets {
     patches = old.patches or [ ] ++ [
       # https://github.com/nix-community/crate2nix/pull/301
       ./patches/crate2nix-tests-debug.patch
+      # TODO(Kranzes): drop on next release
+      ./patches/crate2nix-drop-darwin-explicit-dontstrip.patch
     ];
   });
 
@@ -120,4 +122,17 @@ depot.nix.readTree.drvTargets {
   tpm2-pkcs11 = super.tpm2-pkcs11.overrideAttrs (old: {
     patches = (old.patches or [ ]) ++ [ ./patches/tpm2-pkcs11-190-dbupgrade.patch ];
   });
+
+  # macFUSE bump containing fix for https://github.com/osxfuse/osxfuse/issues/974
+  # https://github.com/NixOS/nixpkgs/pull/320197
+  fuse =
+    if super.stdenv.isDarwin then
+      super.fuse.overrideAttrs
+        (old: rec {
+          version = "4.8.0";
+          src = super.fetchurl {
+            url = "https://github.com/osxfuse/osxfuse/releases/download/macfuse-${version}/macfuse-${version}.dmg";
+            hash = "sha256-ucTzO2qdN4QkowMVvC3+4pjEVjbwMsB0xFk+bvQxwtQ=";
+          };
+        }) else super.fuse;
 }
diff --git a/third_party/sources/sources.json b/third_party/sources/sources.json
index 109451ff51..8844eddc9c 100644
--- a/third_party/sources/sources.json
+++ b/third_party/sources/sources.json
@@ -5,10 +5,10 @@
         "homepage": "https://matrix.to/#/#agenix:nixos.org",
         "owner": "ryantm",
         "repo": "agenix",
-        "rev": "0.15.0",
-        "sha256": "01dhrghwa7zw93cybvx4gnrskqk97b004nfxgsys0736823956la",
+        "rev": "c2fc0762bbe8feb06a2e59a364fa81b3a57671c9",
+        "sha256": "1lpkwinlax40b7xgzspbkm9rsi4a1x48hxhixnni4irxxwnav0ah",
         "type": "tarball",
-        "url": "https://github.com/ryantm/agenix/archive/0.15.0.tar.gz",
+        "url": "https://github.com/ryantm/agenix/archive/c2fc0762bbe8feb06a2e59a364fa81b3a57671c9.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "home-manager": {
@@ -17,10 +17,10 @@
         "homepage": "https://nix-community.github.io/home-manager/",
         "owner": "nix-community",
         "repo": "home-manager",
-        "rev": "c1609d584a6b5e9e6a02010f51bd368cb4782f8e",
-        "sha256": "112r86p3iah1xahwlp82yd3gvh10wkf271za5h7v3jsqv08c6gkr",
+        "rev": "a7117efb3725e6197dd95424136f79147aa35e5b",
+        "sha256": "02q3ck1hjs8xzdhfikqxrnsfs9vh4p7rmdha3vbp6nkkdbdvhgg7",
         "type": "tarball",
-        "url": "https://github.com/nix-community/home-manager/archive/c1609d584a6b5e9e6a02010f51bd368cb4782f8e.tar.gz",
+        "url": "https://github.com/nix-community/home-manager/archive/a7117efb3725e6197dd95424136f79147aa35e5b.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "impermanence": {
@@ -41,10 +41,10 @@
         "homepage": "",
         "owner": "nmattia",
         "repo": "naersk",
-        "rev": "c5037590290c6c7dae2e42e7da1e247e54ed2d49",
-        "sha256": "1ql5ziwfrpmc8cxhgflmdy2z06z4dsdfzjwb2vv9bag6a2chrvq8",
+        "rev": "fa19d8c135e776dc97f4dcca08656a0eeb28d5c0",
+        "sha256": "1mif058gcbw5d5yixsmzalqlr0h9m9mmbsgv8v4r2mmsbw83k2x0",
         "type": "tarball",
-        "url": "https://github.com/nmattia/naersk/archive/c5037590290c6c7dae2e42e7da1e247e54ed2d49.tar.gz",
+        "url": "https://github.com/nmattia/naersk/archive/fa19d8c135e776dc97f4dcca08656a0eeb28d5c0.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "napalm": {
@@ -65,10 +65,10 @@
         "homepage": "",
         "owner": "NixOS",
         "repo": "nixpkgs",
-        "rev": "7bb2ccd8cdc44c91edba16c48d2c8f331fb3d856",
-        "sha256": "0ijqx995jw9i16f28whyjdll9b0nydmyl4n91bci2cgryxms7f8f",
+        "rev": "051f920625ab5aabe37c920346e3e69d7d34400e",
+        "sha256": "08lin51g5x2vv89rs6vmqxnyy8pfysh0wdp6mdxw6l86dpm2rbg2",
         "type": "tarball",
-        "url": "https://github.com/NixOS/nixpkgs/archive/7bb2ccd8cdc44c91edba16c48d2c8f331fb3d856.tar.gz",
+        "url": "https://github.com/NixOS/nixpkgs/archive/051f920625ab5aabe37c920346e3e69d7d34400e.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "nixpkgs-stable": {
@@ -77,10 +77,10 @@
         "homepage": "",
         "owner": "NixOS",
         "repo": "nixpkgs",
-        "rev": "dd37924974b9202f8226ed5d74a252a9785aedf8",
-        "sha256": "1nxd4dqci8rs94a7cypx30axgj778p2wydkx16q298n29crkflbw",
+        "rev": "a2e1d0414259a144ebdc048408a807e69e0565af",
+        "sha256": "1jv90bz3s7j294fhpb29k735fg3xfs9z848szicqarpbz7wfg03g",
         "type": "tarball",
-        "url": "https://github.com/NixOS/nixpkgs/archive/dd37924974b9202f8226ed5d74a252a9785aedf8.tar.gz",
+        "url": "https://github.com/NixOS/nixpkgs/archive/a2e1d0414259a144ebdc048408a807e69e0565af.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "rust-overlay": {
@@ -89,10 +89,10 @@
         "homepage": "",
         "owner": "oxalica",
         "repo": "rust-overlay",
-        "rev": "2a42c742ab04b61d9b2f1edf392842cf9f27ebfd",
-        "sha256": "1wpkca75ysb2ssycc0dshd1m76q8iqhzrrbr6xmfmkkcj1p333nk",
+        "rev": "6dc3e45fe4aee36efeed24d64fc68b1f989d5465",
+        "sha256": "0vqgkzbfdj920lbm1dy8kylrv2gk4ard38lb3i20xvp2mp1d39n2",
         "type": "tarball",
-        "url": "https://github.com/oxalica/rust-overlay/archive/2a42c742ab04b61d9b2f1edf392842cf9f27ebfd.tar.gz",
+        "url": "https://github.com/oxalica/rust-overlay/archive/6dc3e45fe4aee36efeed24d64fc68b1f989d5465.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     },
     "rustsec-advisory-db": {
@@ -101,10 +101,10 @@
         "homepage": "https://rustsec.org",
         "owner": "RustSec",
         "repo": "advisory-db",
-        "rev": "35e7459a331d3e0c585e56dabd03006b9b354088",
-        "sha256": "1j8c0vzwg6b9lxmdy2a40pvwsy2kncv455spbjbxsj10p2vmy5fl",
+        "rev": "af76d4423761499f954411bb3071dcc72e6b0450",
+        "sha256": "167qxr66j638km3z7zk2drjdr4bgqz77hr35vkwdp0lbafmd6y1c",
         "type": "tarball",
-        "url": "https://github.com/RustSec/advisory-db/archive/35e7459a331d3e0c585e56dabd03006b9b354088.tar.gz",
+        "url": "https://github.com/RustSec/advisory-db/archive/af76d4423761499f954411bb3071dcc72e6b0450.tar.gz",
         "url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
     }
 }
diff --git a/third_party/teleirc/default.nix b/third_party/teleirc/default.nix
new file mode 100644
index 0000000000..8791511002
--- /dev/null
+++ b/third_party/teleirc/default.nix
@@ -0,0 +1,23 @@
+{ pkgs, lib, ... }:
+
+pkgs.buildGoModule rec {
+  name = "teleirc";
+  version = "2.3.0-4";
+
+  src = pkgs.fetchFromGitHub {
+    owner = "tvlfyi";
+    repo = "teleirc";
+    rev = "356ed1450840822172e7dff57965cc5371f63454";
+    sha256 = "0s6rlixks7lar9js4q1drg742cy2p4n8l4pmlzjmskl5d04c15gq";
+  };
+
+  vendorHash = "sha256:06f2wyxbphj73wknpp6dsn7rb4yhvdl6x0gj729cns7r4bsviscs";
+  ldflags = [ "-s" "-w" "-X" "main.version=${version}" ];
+  postInstall = "mv $out/bin/cmd $out/bin/teleirc";
+
+  meta = with lib; {
+    description = "IRC/Telegram bridge";
+    homepage = "https://docs.teleirc.com/en/latest/";
+    license = licenses.gpl3;
+  };
+}
diff --git a/tvix/Cargo.lock b/tvix/Cargo.lock
index dc5298c45b..ab7eec0b73 100644
--- a/tvix/Cargo.lock
+++ b/tvix/Cargo.lock
@@ -565,7 +565,7 @@ dependencies = [
  "num-traits",
  "serde",
  "wasm-bindgen",
- "windows-targets 0.52.0",
+ "windows-targets 0.52.5",
 ]
 
 [[package]]
@@ -678,6 +678,19 @@ dependencies = [
 ]
 
 [[package]]
+name = "console"
+version = "0.15.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb"
+dependencies = [
+ "encode_unicode",
+ "lazy_static",
+ "libc",
+ "unicode-width",
+ "windows-sys 0.52.0",
+]
+
+[[package]]
 name = "const-oid"
 version = "0.9.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1008,6 +1021,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07"
 
 [[package]]
+name = "encode_unicode"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f"
+
+[[package]]
 name = "encoding_rs"
 version = "0.8.33"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1382,6 +1401,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "0b32dfe1fdfc0bbde1f22a5da25355514b5e450c33a6af6770884c8750aedfbc"
 
 [[package]]
+name = "generator"
+version = "0.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "186014d53bc231d0090ef8d6f03e0920c54d85a5ed22f4f2f74315ec56cf83fb"
+dependencies = [
+ "cc",
+ "cfg-if",
+ "libc",
+ "log",
+ "rustversion",
+ "windows",
+]
+
+[[package]]
 name = "generic-array"
 version = "0.14.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1511,6 +1544,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "http-range-header"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f"
+
+[[package]]
 name = "httparse"
 version = "1.8.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1590,7 +1629,7 @@ dependencies = [
  "iana-time-zone-haiku",
  "js-sys",
  "wasm-bindgen",
- "windows-core",
+ "windows-core 0.52.0",
 ]
 
 [[package]]
@@ -1665,6 +1704,20 @@ dependencies = [
 ]
 
 [[package]]
+name = "indicatif"
+version = "0.17.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3"
+dependencies = [
+ "console",
+ "instant",
+ "number_prefix",
+ "portable-atomic",
+ "unicode-width",
+ "vt100",
+]
+
+[[package]]
 name = "instant"
 version = "0.1.12"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -1869,6 +1922,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f"
 
 [[package]]
+name = "loom"
+version = "0.7.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "419e0dc8046cb947daa77eb95ae174acfbddb7673b4151f56d1eed8e93fbfaca"
+dependencies = [
+ "cfg-if",
+ "generator",
+ "scoped-tls",
+ "tracing",
+ "tracing-subscriber",
+]
+
+[[package]]
 name = "lru"
 version = "0.12.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2138,6 +2204,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "number_prefix"
+version = "0.4.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3"
+
+[[package]]
 name = "object"
 version = "0.32.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2211,6 +2283,18 @@ dependencies = [
 ]
 
 [[package]]
+name = "opentelemetry-http"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7690dc77bf776713848c4faa6501157469017eaf332baccd4eb1cea928743d94"
+dependencies = [
+ "async-trait",
+ "bytes",
+ "http",
+ "opentelemetry",
+]
+
+[[package]]
 name = "opentelemetry-otlp"
 version = "0.15.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -2477,6 +2561,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "portable-atomic"
+version = "1.6.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0"
+
+[[package]]
 name = "powerfmt"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3166,6 +3256,12 @@ dependencies = [
 ]
 
 [[package]]
+name = "scoped-tls"
+version = "1.0.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e1cf6437eb19a8f4a6cc0f7dca544973b0b78843adbfeb3683d1a94a0024a294"
+
+[[package]]
 name = "scopeguard"
 version = "1.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3616,6 +3712,35 @@ dependencies = [
 ]
 
 [[package]]
+name = "threadpool"
+version = "1.8.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
+dependencies = [
+ "num_cpus",
+]
+
+[[package]]
+name = "tikv-jemalloc-sys"
+version = "0.5.4+5.3.0-patched"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9402443cb8fd499b6f327e40565234ff34dbda27460c5b47db0db77443dd85d1"
+dependencies = [
+ "cc",
+ "libc",
+]
+
+[[package]]
+name = "tikv-jemallocator"
+version = "0.5.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "965fe0c26be5c56c94e38ba547249074803efd52adfb66de62107d95aab3eaca"
+dependencies = [
+ "libc",
+ "tikv-jemalloc-sys",
+]
+
+[[package]]
 name = "time"
 version = "0.3.34"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3926,6 +4051,25 @@ dependencies = [
 ]
 
 [[package]]
+name = "tower-http"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140"
+dependencies = [
+ "bitflags 2.4.2",
+ "bytes",
+ "futures-core",
+ "futures-util",
+ "http",
+ "http-body",
+ "http-range-header",
+ "pin-project-lite",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
 name = "tower-layer"
 version = "0.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -3981,6 +4125,18 @@ dependencies = [
 ]
 
 [[package]]
+name = "tracing-indicatif"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "069580424efe11d97c3fef4197fa98c004fa26672cc71ad8770d224e23b1951d"
+dependencies = [
+ "indicatif",
+ "tracing",
+ "tracing-core",
+ "tracing-subscriber",
+]
+
+[[package]]
 name = "tracing-log"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4028,6 +4184,37 @@ dependencies = [
 ]
 
 [[package]]
+name = "tracing-tracy"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6024d04f84a69fd0d1dc1eee3a2b070bd246530a0582f9982ae487cb6c703614"
+dependencies = [
+ "tracing-core",
+ "tracing-subscriber",
+ "tracy-client",
+]
+
+[[package]]
+name = "tracy-client"
+version = "0.17.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59fb931a64ff88984f86d3e9bcd1ae8843aa7fe44dd0f8097527bc172351741d"
+dependencies = [
+ "loom",
+ "once_cell",
+ "tracy-client-sys",
+]
+
+[[package]]
+name = "tracy-client-sys"
+version = "0.22.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9d104d610dfa9dd154535102cc9c6164ae1fa37842bc2d9e83f9ac82b0ae0882"
+dependencies = [
+ "cc",
+]
+
+[[package]]
 name = "try-lock"
 version = "0.2.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4050,8 +4237,8 @@ dependencies = [
  "tonic-build",
  "tonic-reflection",
  "tracing",
- "tracing-subscriber",
  "tvix-castore",
+ "tvix-tracing",
  "url",
 ]
 
@@ -4089,6 +4276,7 @@ dependencies = [
  "sled",
  "tempfile",
  "thiserror",
+ "threadpool",
  "tokio",
  "tokio-retry",
  "tokio-stream",
@@ -4099,6 +4287,8 @@ dependencies = [
  "tonic-reflection",
  "tower",
  "tracing",
+ "tracing-indicatif",
+ "tvix-tracing",
  "url",
  "vhost",
  "vhost-user-backend",
@@ -4119,16 +4309,19 @@ dependencies = [
  "clap",
  "dirs",
  "nix-compat",
+ "rnix",
  "rustyline",
  "thiserror",
+ "tikv-jemallocator",
  "tokio",
  "tracing",
- "tracing-subscriber",
+ "tracing-indicatif",
  "tvix-build",
  "tvix-castore",
  "tvix-eval",
  "tvix-glue",
  "tvix-store",
+ "tvix-tracing",
  "wu-manber",
 ]
 
@@ -4165,9 +4358,9 @@ dependencies = [
  "tabwriter",
  "tempfile",
  "test-strategy",
+ "tikv-jemallocator",
  "toml",
  "tvix-eval-builtin-macros",
- "xml-rs",
 ]
 
 [[package]]
@@ -4206,14 +4399,17 @@ dependencies = [
  "sha2",
  "tempfile",
  "thiserror",
+ "tikv-jemallocator",
  "tokio",
  "tokio-tar",
  "tokio-util",
  "tracing",
+ "tracing-indicatif",
  "tvix-build",
  "tvix-castore",
  "tvix-eval",
  "tvix-store",
+ "tvix-tracing",
  "url",
  "walkdir",
  "wu-manber",
@@ -4247,9 +4443,6 @@ dependencies = [
  "lazy_static",
  "lru",
  "nix-compat",
- "opentelemetry",
- "opentelemetry-otlp",
- "opentelemetry_sdk",
  "parking_lot 0.12.2",
  "pin-project-lite",
  "prost",
@@ -4274,15 +4467,37 @@ dependencies = [
  "tonic-build",
  "tonic-reflection",
  "tower",
+ "tower-http",
  "tracing",
- "tracing-opentelemetry",
- "tracing-subscriber",
+ "tracing-indicatif",
  "tvix-castore",
+ "tvix-tracing",
  "url",
  "walkdir",
 ]
 
 [[package]]
+name = "tvix-tracing"
+version = "0.1.0"
+dependencies = [
+ "http",
+ "indicatif",
+ "lazy_static",
+ "opentelemetry",
+ "opentelemetry-http",
+ "opentelemetry-otlp",
+ "opentelemetry_sdk",
+ "thiserror",
+ "tokio",
+ "tonic",
+ "tracing",
+ "tracing-indicatif",
+ "tracing-opentelemetry",
+ "tracing-subscriber",
+ "tracing-tracy",
+]
+
+[[package]]
 name = "typenum"
 version = "1.17.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4489,6 +4704,39 @@ dependencies = [
 ]
 
 [[package]]
+name = "vt100"
+version = "0.15.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84cd863bf0db7e392ba3bd04994be3473491b31e66340672af5d11943c6274de"
+dependencies = [
+ "itoa",
+ "log",
+ "unicode-width",
+ "vte",
+]
+
+[[package]]
+name = "vte"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197"
+dependencies = [
+ "arrayvec",
+ "utf8parse",
+ "vte_generate_state_changes",
+]
+
+[[package]]
+name = "vte_generate_state_changes"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff"
+dependencies = [
+ "proc-macro2",
+ "quote",
+]
+
+[[package]]
 name = "wait-timeout"
 version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
@@ -4678,12 +4926,41 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 
 [[package]]
+name = "windows"
+version = "0.54.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9252e5725dbed82865af151df558e754e4a3c2c30818359eb17465f1346a1b49"
+dependencies = [
+ "windows-core 0.54.0",
+ "windows-targets 0.52.5",
+]
+
+[[package]]
 name = "windows-core"
 version = "0.52.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9"
 dependencies = [
- "windows-targets 0.52.0",
+ "windows-targets 0.52.5",
+]
+
+[[package]]
+name = "windows-core"
+version = "0.54.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "12661b9c89351d684a50a8a643ce5f608e20243b9fb84687800163429f161d65"
+dependencies = [
+ "windows-result",
+ "windows-targets 0.52.5",
+]
+
+[[package]]
+name = "windows-result"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "5e383302e8ec8515204254685643de10811af0ed97ea37210dc26fb0032647f8"
+dependencies = [
+ "windows-targets 0.52.5",
 ]
 
 [[package]]
@@ -4701,7 +4978,7 @@ version = "0.52.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
 dependencies = [
- "windows-targets 0.52.0",
+ "windows-targets 0.52.5",
 ]
 
 [[package]]
@@ -4721,17 +4998,18 @@ dependencies = [
 
 [[package]]
 name = "windows-targets"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd"
+checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
 dependencies = [
- "windows_aarch64_gnullvm 0.52.0",
- "windows_aarch64_msvc 0.52.0",
- "windows_i686_gnu 0.52.0",
- "windows_i686_msvc 0.52.0",
- "windows_x86_64_gnu 0.52.0",
- "windows_x86_64_gnullvm 0.52.0",
- "windows_x86_64_msvc 0.52.0",
+ "windows_aarch64_gnullvm 0.52.5",
+ "windows_aarch64_msvc 0.52.5",
+ "windows_i686_gnu 0.52.5",
+ "windows_i686_gnullvm",
+ "windows_i686_msvc 0.52.5",
+ "windows_x86_64_gnu 0.52.5",
+ "windows_x86_64_gnullvm 0.52.5",
+ "windows_x86_64_msvc 0.52.5",
 ]
 
 [[package]]
@@ -4742,9 +5020,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
 
 [[package]]
 name = "windows_aarch64_gnullvm"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea"
+checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
 
 [[package]]
 name = "windows_aarch64_msvc"
@@ -4754,9 +5032,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
 
 [[package]]
 name = "windows_aarch64_msvc"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef"
+checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
 
 [[package]]
 name = "windows_i686_gnu"
@@ -4766,9 +5044,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
 
 [[package]]
 name = "windows_i686_gnu"
-version = "0.52.0"
+version = "0.52.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "88ba073cf16d5372720ec942a8ccbf61626074c6d4dd2e745299726ce8b89670"
+
+[[package]]
+name = "windows_i686_gnullvm"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313"
+checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
 
 [[package]]
 name = "windows_i686_msvc"
@@ -4778,9 +5062,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
 
 [[package]]
 name = "windows_i686_msvc"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a"
+checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
 
 [[package]]
 name = "windows_x86_64_gnu"
@@ -4790,9 +5074,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
 
 [[package]]
 name = "windows_x86_64_gnu"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd"
+checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
 
 [[package]]
 name = "windows_x86_64_gnullvm"
@@ -4802,9 +5086,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
 
 [[package]]
 name = "windows_x86_64_gnullvm"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e"
+checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
 
 [[package]]
 name = "windows_x86_64_msvc"
@@ -4814,9 +5098,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
 
 [[package]]
 name = "windows_x86_64_msvc"
-version = "0.52.0"
+version = "0.52.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04"
+checksum = "bec47e5bfd1bff0eeaf6d8b485cc1074891a197ab4225d504cb7a1ab88b02bf0"
 
 [[package]]
 name = "winreg"
@@ -4845,12 +5129,6 @@ dependencies = [
 ]
 
 [[package]]
-name = "xml-rs"
-version = "0.8.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fcb9cbac069e033553e8bb871be2fbdffcab578eb25bd0f7c508cedc6dcd75a"
-
-[[package]]
 name = "xz2"
 version = "0.1.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/tvix/Cargo.nix b/tvix/Cargo.nix
index f6c3108fa4..008080c461 100644
--- a/tvix/Cargo.nix
+++ b/tvix/Cargo.nix
@@ -123,6 +123,16 @@ rec {
       # File a bug if you depend on any for non-debug work!
       debug = internal.debugCrate { inherit packageId; };
     };
+    "tvix-tracing" = rec {
+      packageId = "tvix-tracing";
+      build = internal.buildRustCrateWithFeatures {
+        packageId = "tvix-tracing";
+      };
+
+      # Debug support which might change between releases.
+      # File a bug if you depend on any for non-debug work!
+      debug = internal.debugCrate { inherit packageId; };
+    };
   };
 
   # A derivation that joins the outputs of all workspace members together.
@@ -1820,7 +1830,7 @@ rec {
           }
           {
             name = "windows-targets";
-            packageId = "windows-targets 0.52.0";
+            packageId = "windows-targets 0.52.5";
             optional = true;
             target = { target, features }: (target."windows" or false);
           }
@@ -2126,6 +2136,47 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "std" ];
       };
+      "console" = rec {
+        crateName = "console";
+        version = "0.15.8";
+        edition = "2018";
+        sha256 = "1sz4nl9nz8pkmapqni6py7jxzi7nzqjxzb3ya4kxvmkb0zy867qf";
+        authors = [
+          "Armin Ronacher <armin.ronacher@active-4.com>"
+        ];
+        dependencies = [
+          {
+            name = "encode_unicode";
+            packageId = "encode_unicode";
+            target = { target, features }: (target."windows" or false);
+          }
+          {
+            name = "lazy_static";
+            packageId = "lazy_static";
+          }
+          {
+            name = "libc";
+            packageId = "libc";
+          }
+          {
+            name = "unicode-width";
+            packageId = "unicode-width";
+            optional = true;
+          }
+          {
+            name = "windows-sys";
+            packageId = "windows-sys 0.52.0";
+            target = { target, features }: (target."windows" or false);
+            features = [ "Win32_Foundation" "Win32_System_Console" "Win32_Storage_FileSystem" "Win32_UI_Input_KeyboardAndMouse" ];
+          }
+        ];
+        features = {
+          "default" = [ "unicode-width" "ansi-parsing" ];
+          "unicode-width" = [ "dep:unicode-width" ];
+          "windows-console-colors" = [ "ansi-parsing" ];
+        };
+        resolvedDefaultFeatures = [ "ansi-parsing" "unicode-width" ];
+      };
       "const-oid" = rec {
         crateName = "const-oid";
         version = "0.9.6";
@@ -3072,6 +3123,21 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "use_std" ];
       };
+      "encode_unicode" = rec {
+        crateName = "encode_unicode";
+        version = "0.3.6";
+        edition = "2015";
+        sha256 = "07w3vzrhxh9lpjgsg2y5bwzfar2aq35mdznvcp3zjl0ssj7d4mx3";
+        authors = [
+          "Torbjørn Birch Moltu <t.b.moltu@lyse.net>"
+        ];
+        features = {
+          "ascii" = [ "dep:ascii" ];
+          "clippy" = [ "dep:clippy" ];
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
       "encoding_rs" = rec {
         crateName = "encoding_rs";
         version = "0.8.33";
@@ -4180,6 +4246,47 @@ rec {
         ];
         features = { };
       };
+      "generator" = rec {
+        crateName = "generator";
+        version = "0.8.1";
+        edition = "2021";
+        sha256 = "1yw3rxbfq5a3yzrg88pdln2lvi9014zg1mpq1q4x0cf27gai8q0q";
+        authors = [
+          "Xudong Huang <huangxu008@hotmail.com>"
+        ];
+        dependencies = [
+          {
+            name = "cfg-if";
+            packageId = "cfg-if";
+          }
+          {
+            name = "libc";
+            packageId = "libc";
+            target = { target, features }: (target."unix" or false);
+          }
+          {
+            name = "log";
+            packageId = "log";
+          }
+          {
+            name = "windows";
+            packageId = "windows";
+            target = { target, features }: (target."windows" or false);
+            features = [ "Win32_System_Memory" "Win32_System_Kernel" "Win32_Foundation" "Win32_System_SystemInformation" "Win32_System_Diagnostics_Debug" ];
+          }
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+          }
+          {
+            name = "rustversion";
+            packageId = "rustversion";
+          }
+        ];
+
+      };
       "generic-array" = rec {
         crateName = "generic-array";
         version = "0.14.7";
@@ -4546,6 +4653,13 @@ rec {
         ];
 
       };
+      "http-range-header" = rec {
+        crateName = "http-range-header";
+        version = "0.3.1";
+        edition = "2018";
+        sha256 = "13vm511vq3bhschkw2xi9nhxzkw53m55gn9vxg7qigfxc29spl5d";
+        features = { };
+      };
       "httparse" = rec {
         crateName = "httparse";
         version = "1.8.0";
@@ -4840,7 +4954,7 @@ rec {
           }
           {
             name = "windows-core";
-            packageId = "windows-core";
+            packageId = "windows-core 0.52.0";
             target = { target, features }: ("windows" == target."os" or null);
           }
         ];
@@ -5057,6 +5171,55 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "serde" "std" ];
       };
+      "indicatif" = rec {
+        crateName = "indicatif";
+        version = "0.17.8";
+        edition = "2021";
+        sha256 = "18xyqxw9i5x4sbpzckhfz3nm984iq9r7nbi2lk76nz888n7mlfkn";
+        dependencies = [
+          {
+            name = "console";
+            packageId = "console";
+            usesDefaultFeatures = false;
+            features = [ "ansi-parsing" ];
+          }
+          {
+            name = "instant";
+            packageId = "instant";
+            target = { target, features }: ("wasm32" == target."arch" or null);
+          }
+          {
+            name = "number_prefix";
+            packageId = "number_prefix";
+          }
+          {
+            name = "portable-atomic";
+            packageId = "portable-atomic";
+          }
+          {
+            name = "unicode-width";
+            packageId = "unicode-width";
+            optional = true;
+          }
+          {
+            name = "vt100";
+            packageId = "vt100";
+            optional = true;
+          }
+        ];
+        features = {
+          "default" = [ "unicode-width" "console/unicode-width" ];
+          "futures" = [ "dep:futures-core" ];
+          "improved_unicode" = [ "unicode-segmentation" "unicode-width" "console/unicode-width" ];
+          "in_memory" = [ "vt100" ];
+          "rayon" = [ "dep:rayon" ];
+          "tokio" = [ "dep:tokio" ];
+          "unicode-segmentation" = [ "dep:unicode-segmentation" ];
+          "unicode-width" = [ "dep:unicode-width" ];
+          "vt100" = [ "dep:vt100" ];
+        };
+        resolvedDefaultFeatures = [ "default" "in_memory" "unicode-width" "vt100" ];
+      };
       "instant" = rec {
         crateName = "instant";
         version = "0.1.12";
@@ -5633,6 +5796,48 @@ rec {
         };
         resolvedDefaultFeatures = [ "std" ];
       };
+      "loom" = rec {
+        crateName = "loom";
+        version = "0.7.2";
+        edition = "2018";
+        sha256 = "1jpszf9qxv8ydpsm2h9vcyvxvyxcfkhmmfbylzd4gfbc0k40v7j1";
+        authors = [
+          "Carl Lerche <me@carllerche.com>"
+        ];
+        dependencies = [
+          {
+            name = "cfg-if";
+            packageId = "cfg-if";
+          }
+          {
+            name = "generator";
+            packageId = "generator";
+          }
+          {
+            name = "scoped-tls";
+            packageId = "scoped-tls";
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            usesDefaultFeatures = false;
+            features = [ "std" ];
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+            features = [ "env-filter" ];
+          }
+        ];
+        features = {
+          "checkpoint" = [ "serde" "serde_json" ];
+          "futures" = [ "pin-utils" ];
+          "pin-utils" = [ "dep:pin-utils" ];
+          "serde" = [ "dep:serde" ];
+          "serde_json" = [ "dep:serde_json" ];
+        };
+        resolvedDefaultFeatures = [ "default" ];
+      };
       "lru" = rec {
         crateName = "lru";
         version = "0.12.3";
@@ -6440,6 +6645,19 @@ rec {
         ];
 
       };
+      "number_prefix" = rec {
+        crateName = "number_prefix";
+        version = "0.4.0";
+        edition = "2015";
+        sha256 = "1wvh13wvlajqxkb1filsfzbrnq0vrmrw298v2j3sy82z1rm282w3";
+        authors = [
+          "Benjamin Sago <ogham@bsago.me>"
+        ];
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
       "object" = rec {
         crateName = "object";
         version = "0.32.2";
@@ -6717,6 +6935,39 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "metrics" "pin-project-lite" "trace" ];
       };
+      "opentelemetry-http" = rec {
+        crateName = "opentelemetry-http";
+        version = "0.11.1";
+        edition = "2021";
+        sha256 = "151xfhlakkmi9v6sqarkmxz02sbl2l0nbajgij216rvppxvxr43n";
+        dependencies = [
+          {
+            name = "async-trait";
+            packageId = "async-trait";
+          }
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "http";
+            packageId = "http";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "opentelemetry";
+            packageId = "opentelemetry";
+            features = [ "trace" ];
+          }
+        ];
+        features = {
+          "hyper" = [ "dep:hyper" ];
+          "isahc" = [ "dep:isahc" ];
+          "reqwest" = [ "dep:reqwest" ];
+          "reqwest-rustls" = [ "reqwest" "reqwest/rustls-tls-native-roots" ];
+          "tokio" = [ "dep:tokio" ];
+        };
+      };
       "opentelemetry-otlp" = rec {
         crateName = "opentelemetry-otlp";
         version = "0.15.0";
@@ -7535,6 +7786,18 @@ rec {
         ];
 
       };
+      "portable-atomic" = rec {
+        crateName = "portable-atomic";
+        version = "1.6.0";
+        edition = "2018";
+        sha256 = "1h77x9qx7pns0d66vdrmdbmwpi7586h7ysnkdnhrn5mwi2cyyw3i";
+        features = {
+          "critical-section" = [ "dep:critical-section" ];
+          "default" = [ "fallback" ];
+          "serde" = [ "dep:serde" ];
+        };
+        resolvedDefaultFeatures = [ "default" "fallback" ];
+      };
       "powerfmt" = rec {
         crateName = "powerfmt";
         version = "0.2.0";
@@ -9772,6 +10035,16 @@ rec {
         ];
 
       };
+      "scoped-tls" = rec {
+        crateName = "scoped-tls";
+        version = "1.0.1";
+        edition = "2015";
+        sha256 = "15524h04mafihcvfpgxd8f4bgc3k95aclz8grjkg9a0rxcvn9kz1";
+        authors = [
+          "Alex Crichton <alex@alexcrichton.com>"
+        ];
+
+      };
       "scopeguard" = rec {
         crateName = "scopeguard";
         version = "1.2.0";
@@ -11060,6 +11333,90 @@ rec {
         ];
         features = { };
       };
+      "threadpool" = rec {
+        crateName = "threadpool";
+        version = "1.8.1";
+        edition = "2015";
+        sha256 = "1amgfyzvynbm8pacniivzq9r0fh3chhs7kijic81j76l6c5ycl6h";
+        authors = [
+          "The Rust Project Developers"
+          "Corey Farwell <coreyf@rwell.org>"
+          "Stefan Schindler <dns2utf8@estada.ch>"
+        ];
+        dependencies = [
+          {
+            name = "num_cpus";
+            packageId = "num_cpus";
+          }
+        ];
+
+      };
+      "tikv-jemalloc-sys" = rec {
+        crateName = "tikv-jemalloc-sys";
+        version = "0.5.4+5.3.0-patched";
+        edition = "2018";
+        links = "jemalloc";
+        sha256 = "1lc5vm1p9dqdvd3mn3264zddnd7z6i95ch3y69prnjgxp0y480ll";
+        authors = [
+          "Alex Crichton <alex@alexcrichton.com>"
+          "Gonzalo Brito Gadeschi <gonzalobg88@gmail.com>"
+          "The TiKV Project Developers"
+        ];
+        dependencies = [
+          {
+            name = "libc";
+            packageId = "libc";
+            usesDefaultFeatures = false;
+          }
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+          }
+        ];
+        features = {
+          "background_threads" = [ "background_threads_runtime_support" ];
+          "default" = [ "background_threads_runtime_support" ];
+        };
+        resolvedDefaultFeatures = [ "background_threads_runtime_support" ];
+      };
+      "tikv-jemallocator" = rec {
+        crateName = "tikv-jemallocator";
+        version = "0.5.4";
+        edition = "2018";
+        sha256 = "1jpanfm9az8hcbg6dyxdabykx03lj0j4g9cbwfa6rig5dg1f0pwn";
+        authors = [
+          "Alex Crichton <alex@alexcrichton.com>"
+          "Gonzalo Brito Gadeschi <gonzalobg88@gmail.com>"
+          "Simon Sapin <simon.sapin@exyr.org>"
+          "Steven Fackler <sfackler@gmail.com>"
+          "The TiKV Project Developers"
+        ];
+        dependencies = [
+          {
+            name = "libc";
+            packageId = "libc";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "tikv-jemalloc-sys";
+            packageId = "tikv-jemalloc-sys";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "background_threads" = [ "tikv-jemalloc-sys/background_threads" ];
+          "background_threads_runtime_support" = [ "tikv-jemalloc-sys/background_threads_runtime_support" ];
+          "debug" = [ "tikv-jemalloc-sys/debug" ];
+          "default" = [ "background_threads_runtime_support" ];
+          "disable_initial_exec_tls" = [ "tikv-jemalloc-sys/disable_initial_exec_tls" ];
+          "profiling" = [ "tikv-jemalloc-sys/profiling" ];
+          "stats" = [ "tikv-jemalloc-sys/stats" ];
+          "unprefixed_malloc_on_supported_platforms" = [ "tikv-jemalloc-sys/unprefixed_malloc_on_supported_platforms" ];
+        };
+        resolvedDefaultFeatures = [ "background_threads_runtime_support" "default" ];
+      };
       "time" = rec {
         crateName = "time";
         version = "0.3.34";
@@ -12252,6 +12609,106 @@ rec {
         };
         resolvedDefaultFeatures = [ "__common" "balance" "buffer" "default" "discover" "futures-core" "futures-util" "indexmap" "limit" "load" "log" "make" "pin-project" "pin-project-lite" "rand" "ready-cache" "slab" "timeout" "tokio" "tokio-util" "tracing" "util" ];
       };
+      "tower-http" = rec {
+        crateName = "tower-http";
+        version = "0.4.4";
+        edition = "2018";
+        sha256 = "0h0i2flrw25zwxv72sifq4v5mwcb030spksy7r2a4xl2d4fvpib1";
+        authors = [
+          "Tower Maintainers <team@tower-rs.com>"
+        ];
+        dependencies = [
+          {
+            name = "bitflags";
+            packageId = "bitflags 2.4.2";
+          }
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+          {
+            name = "futures-core";
+            packageId = "futures-core";
+          }
+          {
+            name = "futures-util";
+            packageId = "futures-util";
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "http";
+            packageId = "http";
+          }
+          {
+            name = "http-body";
+            packageId = "http-body";
+          }
+          {
+            name = "http-range-header";
+            packageId = "http-range-header";
+          }
+          {
+            name = "pin-project-lite";
+            packageId = "pin-project-lite";
+          }
+          {
+            name = "tower-layer";
+            packageId = "tower-layer";
+          }
+          {
+            name = "tower-service";
+            packageId = "tower-service";
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            optional = true;
+            usesDefaultFeatures = false;
+          }
+        ];
+        devDependencies = [
+          {
+            name = "bytes";
+            packageId = "bytes";
+          }
+        ];
+        features = {
+          "async-compression" = [ "dep:async-compression" ];
+          "auth" = [ "base64" "validate-request" ];
+          "base64" = [ "dep:base64" ];
+          "catch-panic" = [ "tracing" "futures-util/std" ];
+          "compression-br" = [ "async-compression/brotli" "tokio-util" "tokio" ];
+          "compression-deflate" = [ "async-compression/zlib" "tokio-util" "tokio" ];
+          "compression-full" = [ "compression-br" "compression-deflate" "compression-gzip" "compression-zstd" ];
+          "compression-gzip" = [ "async-compression/gzip" "tokio-util" "tokio" ];
+          "compression-zstd" = [ "async-compression/zstd" "tokio-util" "tokio" ];
+          "decompression-br" = [ "async-compression/brotli" "tokio-util" "tokio" ];
+          "decompression-deflate" = [ "async-compression/zlib" "tokio-util" "tokio" ];
+          "decompression-full" = [ "decompression-br" "decompression-deflate" "decompression-gzip" "decompression-zstd" ];
+          "decompression-gzip" = [ "async-compression/gzip" "tokio-util" "tokio" ];
+          "decompression-zstd" = [ "async-compression/zstd" "tokio-util" "tokio" ];
+          "follow-redirect" = [ "iri-string" "tower/util" ];
+          "fs" = [ "tokio/fs" "tokio-util/io" "tokio/io-util" "mime_guess" "mime" "percent-encoding" "httpdate" "set-status" "futures-util/alloc" "tracing" ];
+          "full" = [ "add-extension" "auth" "catch-panic" "compression-full" "cors" "decompression-full" "follow-redirect" "fs" "limit" "map-request-body" "map-response-body" "metrics" "normalize-path" "propagate-header" "redirect" "request-id" "sensitive-headers" "set-header" "set-status" "timeout" "trace" "util" "validate-request" ];
+          "httpdate" = [ "dep:httpdate" ];
+          "iri-string" = [ "dep:iri-string" ];
+          "metrics" = [ "tokio/time" ];
+          "mime" = [ "dep:mime" ];
+          "mime_guess" = [ "dep:mime_guess" ];
+          "percent-encoding" = [ "dep:percent-encoding" ];
+          "request-id" = [ "uuid" ];
+          "timeout" = [ "tokio/time" ];
+          "tokio" = [ "dep:tokio" ];
+          "tokio-util" = [ "dep:tokio-util" ];
+          "tower" = [ "dep:tower" ];
+          "trace" = [ "tracing" ];
+          "tracing" = [ "dep:tracing" ];
+          "util" = [ "tower" ];
+          "uuid" = [ "dep:uuid" ];
+          "validate-request" = [ "mime" ];
+        };
+        resolvedDefaultFeatures = [ "default" "trace" "tracing" ];
+      };
       "tower-layer" = rec {
         crateName = "tower-layer";
         version = "0.3.2";
@@ -12317,7 +12774,7 @@ rec {
           "tracing-attributes" = [ "dep:tracing-attributes" ];
           "valuable" = [ "tracing-core/valuable" ];
         };
-        resolvedDefaultFeatures = [ "attributes" "default" "log" "max_level_trace" "release_max_level_info" "std" "tracing-attributes" ];
+        resolvedDefaultFeatures = [ "attributes" "default" "log" "max_level_trace" "release_max_level_debug" "std" "tracing-attributes" ];
       };
       "tracing-attributes" = rec {
         crateName = "tracing-attributes";
@@ -12414,6 +12871,33 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "pin-project" "std" "std-future" ];
       };
+      "tracing-indicatif" = rec {
+        crateName = "tracing-indicatif";
+        version = "0.3.6";
+        edition = "2021";
+        sha256 = "07cmn4ilw8hdfzc1mirccwkgl160k3x9fhgg7xydj4gy9r181586";
+        libName = "tracing_indicatif";
+        dependencies = [
+          {
+            name = "indicatif";
+            packageId = "indicatif";
+            features = [ "in_memory" ];
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+          }
+          {
+            name = "tracing-core";
+            packageId = "tracing-core";
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+          }
+        ];
+
+      };
       "tracing-log" = rec {
         crateName = "tracing-log";
         version = "0.2.0";
@@ -12658,6 +13142,120 @@ rec {
         };
         resolvedDefaultFeatures = [ "alloc" "ansi" "default" "env-filter" "fmt" "matchers" "nu-ansi-term" "once_cell" "regex" "registry" "sharded-slab" "smallvec" "std" "thread_local" "tracing" "tracing-log" ];
       };
+      "tracing-tracy" = rec {
+        crateName = "tracing-tracy";
+        version = "0.11.0";
+        edition = "2021";
+        sha256 = "051nf1ncp1z45acgk0h5199ldlhb0wmkmvhyvk8x17x6hi7x0930";
+        authors = [
+          "Simonas Kazlauskas <tracing-tracy@kazlauskas.me>"
+        ];
+        dependencies = [
+          {
+            name = "tracing-core";
+            packageId = "tracing-core";
+            usesDefaultFeatures = false;
+            features = [ "std" ];
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+            usesDefaultFeatures = false;
+            features = [ "fmt" "registry" ];
+          }
+          {
+            name = "tracy-client";
+            packageId = "tracy-client";
+            rename = "client";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "broadcast" = [ "client/broadcast" ];
+          "callstack-inlines" = [ "client/callstack-inlines" ];
+          "code-transfer" = [ "client/code-transfer" ];
+          "context-switch-tracing" = [ "client/context-switch-tracing" ];
+          "default" = [ "enable" "system-tracing" "context-switch-tracing" "sampling" "code-transfer" "broadcast" "callstack-inlines" ];
+          "delayed-init" = [ "client/delayed-init" ];
+          "enable" = [ "client/enable" ];
+          "fibers" = [ "client/fibers" ];
+          "flush-on-exit" = [ "client/flush-on-exit" ];
+          "manual-lifetime" = [ "client/manual-lifetime" ];
+          "ondemand" = [ "client/ondemand" ];
+          "only-ipv4" = [ "client/only-ipv4" ];
+          "only-localhost" = [ "client/only-localhost" ];
+          "sampling" = [ "client/sampling" ];
+          "system-tracing" = [ "client/system-tracing" ];
+          "timer-fallback" = [ "client/timer-fallback" ];
+        };
+        resolvedDefaultFeatures = [ "broadcast" "callstack-inlines" "code-transfer" "context-switch-tracing" "default" "enable" "flush-on-exit" "sampling" "system-tracing" ];
+      };
+      "tracy-client" = rec {
+        crateName = "tracy-client";
+        version = "0.17.0";
+        edition = "2021";
+        sha256 = "07bla4iigg17fl4zil2dwizslhw8mv8vrsfkhr7ri27zchd97ysr";
+        authors = [
+          "Simonas Kazlauskas <tracy-client@kazlauskas.me>"
+        ];
+        dependencies = [
+          {
+            name = "loom";
+            packageId = "loom";
+            target = { target, features }: (target."loom" or false);
+          }
+          {
+            name = "once_cell";
+            packageId = "once_cell";
+          }
+          {
+            name = "tracy-client-sys";
+            packageId = "tracy-client-sys";
+            rename = "sys";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "broadcast" = [ "sys/broadcast" ];
+          "callstack-inlines" = [ "sys/callstack-inlines" ];
+          "code-transfer" = [ "sys/code-transfer" ];
+          "context-switch-tracing" = [ "sys/context-switch-tracing" ];
+          "default" = [ "enable" "system-tracing" "context-switch-tracing" "sampling" "code-transfer" "broadcast" "callstack-inlines" ];
+          "delayed-init" = [ "sys/delayed-init" ];
+          "enable" = [ "sys/enable" ];
+          "fibers" = [ "sys/fibers" ];
+          "flush-on-exit" = [ "sys/flush-on-exit" ];
+          "manual-lifetime" = [ "sys/manual-lifetime" ];
+          "ondemand" = [ "sys/ondemand" ];
+          "only-ipv4" = [ "sys/only-ipv4" ];
+          "only-localhost" = [ "sys/only-localhost" ];
+          "sampling" = [ "sys/sampling" ];
+          "system-tracing" = [ "sys/system-tracing" ];
+          "timer-fallback" = [ "sys/timer-fallback" ];
+        };
+        resolvedDefaultFeatures = [ "broadcast" "callstack-inlines" "code-transfer" "context-switch-tracing" "enable" "flush-on-exit" "sampling" "system-tracing" ];
+      };
+      "tracy-client-sys" = rec {
+        crateName = "tracy-client-sys";
+        version = "0.22.2";
+        edition = "2021";
+        sha256 = "10h8msq85b7rhfg2vg22g2iizbk4c6fcq0jiadad37gs1mhls44x";
+        authors = [
+          "Simonas Kazlauskas <tracy-client-sys@kazlauskas.me>"
+        ];
+        buildDependencies = [
+          {
+            name = "cc";
+            packageId = "cc";
+            usesDefaultFeatures = false;
+          }
+        ];
+        features = {
+          "default" = [ "enable" "system-tracing" "context-switch-tracing" "sampling" "code-transfer" "broadcast" "callstack-inlines" ];
+          "manual-lifetime" = [ "delayed-init" ];
+        };
+        resolvedDefaultFeatures = [ "broadcast" "callstack-inlines" "code-transfer" "context-switch-tracing" "enable" "flush-on-exit" "sampling" "system-tracing" ];
+      };
       "try-lock" = rec {
         crateName = "try-lock";
         version = "0.2.5";
@@ -12731,14 +13329,14 @@ rec {
             packageId = "tracing";
           }
           {
-            name = "tracing-subscriber";
-            packageId = "tracing-subscriber";
-          }
-          {
             name = "tvix-castore";
             packageId = "tvix-castore";
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -12760,7 +13358,7 @@ rec {
           }
         ];
         features = {
-          "tonic-reflection" = [ "dep:tonic-reflection" ];
+          "tonic-reflection" = [ "dep:tonic-reflection" "tvix-castore/tonic-reflection" ];
         };
         resolvedDefaultFeatures = [ "default" "tonic-reflection" ];
       };
@@ -12880,6 +13478,11 @@ rec {
             packageId = "thiserror";
           }
           {
+            name = "threadpool";
+            packageId = "threadpool";
+            optional = true;
+          }
+          {
             name = "tokio";
             packageId = "tokio";
             features = [ "fs" "macros" "net" "rt" "rt-multi-thread" "signal" ];
@@ -12916,6 +13519,15 @@ rec {
             packageId = "tracing";
           }
           {
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
+          }
+          {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+            features = [ "tonic" ];
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -13000,7 +13612,8 @@ rec {
         ];
         features = {
           "cloud" = [ "dep:bigtable_rs" "object_store/aws" "object_store/azure" "object_store/gcp" ];
-          "fs" = [ "dep:libc" "dep:fuse-backend-rs" ];
+          "default" = [ "cloud" ];
+          "fs" = [ "dep:fuse-backend-rs" "dep:threadpool" "dep:libc" ];
           "fuse" = [ "fs" ];
           "tonic-reflection" = [ "dep:tonic-reflection" ];
           "virtiofs" = [ "fs" "dep:vhost" "dep:vhost-user-backend" "dep:virtio-queue" "dep:vm-memory" "dep:vmm-sys-util" "dep:virtio-bindings" "fuse-backend-rs?/vhost-user-fs" "fuse-backend-rs?/virtiofs" ];
@@ -13043,6 +13656,10 @@ rec {
             packageId = "nix-compat";
           }
           {
+            name = "rnix";
+            packageId = "rnix";
+          }
+          {
             name = "rustyline";
             packageId = "rustyline";
           }
@@ -13051,17 +13668,21 @@ rec {
             packageId = "thiserror";
           }
           {
+            name = "tikv-jemallocator";
+            packageId = "tikv-jemallocator";
+            target = { target, features }: (!("msvc" == target."env" or null));
+          }
+          {
             name = "tokio";
             packageId = "tokio";
           }
           {
             name = "tracing";
             packageId = "tracing";
-            features = [ "max_level_trace" "release_max_level_info" ];
           }
           {
-            name = "tracing-subscriber";
-            packageId = "tracing-subscriber";
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
           }
           {
             name = "tvix-build";
@@ -13085,11 +13706,18 @@ rec {
             usesDefaultFeatures = false;
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+          }
+          {
             name = "wu-manber";
             packageId = "wu-manber";
           }
         ];
-
+        features = {
+          "tracy" = [ "tvix-tracing/tracy" ];
+        };
+        resolvedDefaultFeatures = [ "default" "tracy" ];
       };
       "tvix-eval" = rec {
         crateName = "tvix-eval";
@@ -13222,10 +13850,6 @@ rec {
             packageId = "tvix-eval-builtin-macros";
             rename = "builtin-macros";
           }
-          {
-            name = "xml-rs";
-            packageId = "xml-rs";
-          }
         ];
         devDependencies = [
           {
@@ -13248,6 +13872,11 @@ rec {
             name = "tempfile";
             packageId = "tempfile";
           }
+          {
+            name = "tikv-jemallocator";
+            packageId = "tikv-jemallocator";
+            target = { target, features }: (!("msvc" == target."env" or null));
+          }
         ];
         features = {
           "arbitrary" = [ "proptest" "test-strategy" "imbl/proptest" ];
@@ -13369,6 +13998,11 @@ rec {
             packageId = "thiserror";
           }
           {
+            name = "tikv-jemallocator";
+            packageId = "tikv-jemallocator";
+            target = { target, features }: (!("msvc" == target."env" or null));
+          }
+          {
             name = "tokio";
             packageId = "tokio";
           }
@@ -13386,6 +14020,10 @@ rec {
             packageId = "tracing";
           }
           {
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
+          }
+          {
             name = "tvix-build";
             packageId = "tvix-build";
             usesDefaultFeatures = false;
@@ -13404,6 +14042,10 @@ rec {
             usesDefaultFeatures = false;
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -13561,22 +14203,6 @@ rec {
             features = [ "async" ];
           }
           {
-            name = "opentelemetry";
-            packageId = "opentelemetry";
-            optional = true;
-          }
-          {
-            name = "opentelemetry-otlp";
-            packageId = "opentelemetry-otlp";
-            optional = true;
-          }
-          {
-            name = "opentelemetry_sdk";
-            packageId = "opentelemetry_sdk";
-            optional = true;
-            features = [ "rt-tokio" ];
-          }
-          {
             name = "parking_lot";
             packageId = "parking_lot 0.12.2";
           }
@@ -13658,23 +14284,28 @@ rec {
             packageId = "tower";
           }
           {
+            name = "tower-http";
+            packageId = "tower-http";
+            features = [ "trace" ];
+          }
+          {
             name = "tracing";
             packageId = "tracing";
           }
           {
-            name = "tracing-opentelemetry";
-            packageId = "tracing-opentelemetry";
-          }
-          {
-            name = "tracing-subscriber";
-            packageId = "tracing-subscriber";
-            features = [ "env-filter" ];
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
           }
           {
             name = "tvix-castore";
             packageId = "tvix-castore";
           }
           {
+            name = "tvix-tracing";
+            packageId = "tvix-tracing";
+            features = [ "tonic" ];
+          }
+          {
             name = "url";
             packageId = "url";
           }
@@ -13719,11 +14350,104 @@ rec {
           "cloud" = [ "dep:bigtable_rs" "tvix-castore/cloud" ];
           "default" = [ "cloud" "fuse" "otlp" "tonic-reflection" ];
           "fuse" = [ "tvix-castore/fuse" ];
-          "otlp" = [ "dep:opentelemetry" "dep:opentelemetry-otlp" "dep:opentelemetry_sdk" ];
+          "otlp" = [ "tvix-tracing/otlp" ];
           "tonic-reflection" = [ "dep:tonic-reflection" "tvix-castore/tonic-reflection" ];
+          "tracy" = [ "tvix-tracing/tracy" ];
           "virtiofs" = [ "tvix-castore/virtiofs" ];
         };
-        resolvedDefaultFeatures = [ "cloud" "default" "fuse" "integration" "otlp" "tonic-reflection" "virtiofs" ];
+        resolvedDefaultFeatures = [ "cloud" "default" "fuse" "integration" "otlp" "tonic-reflection" "tracy" "virtiofs" ];
+      };
+      "tvix-tracing" = rec {
+        crateName = "tvix-tracing";
+        version = "0.1.0";
+        edition = "2021";
+        # We can't filter paths with references in Nix 2.4
+        # See https://github.com/NixOS/nix/issues/5410
+        src =
+          if ((lib.versionOlder builtins.nixVersion "2.4pre20211007") || (lib.versionOlder "2.5" builtins.nixVersion))
+          then lib.cleanSourceWith { filter = sourceFilter; src = ./tracing; }
+          else ./tracing;
+        dependencies = [
+          {
+            name = "http";
+            packageId = "http";
+            optional = true;
+          }
+          {
+            name = "indicatif";
+            packageId = "indicatif";
+          }
+          {
+            name = "lazy_static";
+            packageId = "lazy_static";
+          }
+          {
+            name = "opentelemetry";
+            packageId = "opentelemetry";
+            optional = true;
+          }
+          {
+            name = "opentelemetry-http";
+            packageId = "opentelemetry-http";
+            optional = true;
+          }
+          {
+            name = "opentelemetry-otlp";
+            packageId = "opentelemetry-otlp";
+            optional = true;
+          }
+          {
+            name = "opentelemetry_sdk";
+            packageId = "opentelemetry_sdk";
+            optional = true;
+            features = [ "rt-tokio" ];
+          }
+          {
+            name = "thiserror";
+            packageId = "thiserror";
+          }
+          {
+            name = "tokio";
+            packageId = "tokio";
+            features = [ "sync" "rt" ];
+          }
+          {
+            name = "tonic";
+            packageId = "tonic";
+            optional = true;
+          }
+          {
+            name = "tracing";
+            packageId = "tracing";
+            features = [ "max_level_trace" "release_max_level_debug" ];
+          }
+          {
+            name = "tracing-indicatif";
+            packageId = "tracing-indicatif";
+          }
+          {
+            name = "tracing-opentelemetry";
+            packageId = "tracing-opentelemetry";
+            optional = true;
+          }
+          {
+            name = "tracing-subscriber";
+            packageId = "tracing-subscriber";
+            features = [ "env-filter" ];
+          }
+          {
+            name = "tracing-tracy";
+            packageId = "tracing-tracy";
+            optional = true;
+            features = [ "flush-on-exit" ];
+          }
+        ];
+        features = {
+          "otlp" = [ "dep:tracing-opentelemetry" "dep:opentelemetry" "dep:opentelemetry-otlp" "dep:opentelemetry_sdk" "dep:opentelemetry-http" ];
+          "tonic" = [ "dep:tonic" "dep:http" ];
+          "tracy" = [ "dep:tracing-tracy" ];
+        };
+        resolvedDefaultFeatures = [ "default" "otlp" "tonic" "tracy" ];
       };
       "typenum" = rec {
         crateName = "typenum";
@@ -14246,6 +14970,97 @@ rec {
           "with-serde" = [ "serde" "serde_derive" ];
         };
       };
+      "vt100" = rec {
+        crateName = "vt100";
+        version = "0.15.2";
+        edition = "2021";
+        sha256 = "1pklc8y984axmxr0cd363srr2d27wd5rj15xlcmkjznvy0xqdkc4";
+        authors = [
+          "Jesse Luehrs <doy@tozt.net>"
+        ];
+        dependencies = [
+          {
+            name = "itoa";
+            packageId = "itoa";
+          }
+          {
+            name = "log";
+            packageId = "log";
+          }
+          {
+            name = "unicode-width";
+            packageId = "unicode-width";
+          }
+          {
+            name = "vte";
+            packageId = "vte";
+          }
+        ];
+        devDependencies = [
+          {
+            name = "vte";
+            packageId = "vte";
+          }
+        ];
+
+      };
+      "vte" = rec {
+        crateName = "vte";
+        version = "0.11.1";
+        edition = "2021";
+        sha256 = "15r1ff4j8ndqj9vsyil3wqwxhhl7jsz5g58f31n0h1wlpxgjn0pm";
+        authors = [
+          "Joe Wilm <joe@jwilm.com>"
+          "Christian Duerr <contact@christianduerr.com>"
+        ];
+        dependencies = [
+          {
+            name = "arrayvec";
+            packageId = "arrayvec";
+            optional = true;
+            usesDefaultFeatures = false;
+          }
+          {
+            name = "utf8parse";
+            packageId = "utf8parse";
+          }
+          {
+            name = "vte_generate_state_changes";
+            packageId = "vte_generate_state_changes";
+          }
+        ];
+        features = {
+          "ansi" = [ "log" ];
+          "arrayvec" = [ "dep:arrayvec" ];
+          "default" = [ "no_std" ];
+          "log" = [ "dep:log" ];
+          "nightly" = [ "utf8parse/nightly" ];
+          "no_std" = [ "arrayvec" ];
+          "serde" = [ "dep:serde" ];
+        };
+        resolvedDefaultFeatures = [ "arrayvec" "default" "no_std" ];
+      };
+      "vte_generate_state_changes" = rec {
+        crateName = "vte_generate_state_changes";
+        version = "0.1.1";
+        edition = "2018";
+        sha256 = "1zs5q766q7jmc80c5c80gpzy4qpg5lnydf94mgdzrpy7h5q82myj";
+        procMacro = true;
+        authors = [
+          "Christian Duerr <contact@christianduerr.com>"
+        ];
+        dependencies = [
+          {
+            name = "proc-macro2";
+            packageId = "proc-macro2";
+          }
+          {
+            name = "quote";
+            packageId = "quote";
+          }
+        ];
+
+      };
       "wait-timeout" = rec {
         crateName = "wait-timeout";
         version = "0.2.0";
@@ -15169,7 +15984,717 @@ rec {
         ];
 
       };
-      "windows-core" = rec {
+      "windows" = rec {
+        crateName = "windows";
+        version = "0.54.0";
+        edition = "2021";
+        sha256 = "0j8vd8sg2rbln6g3a608qg1a7r2lwxcga78mmxjjin5ybmrfallj";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-core";
+            packageId = "windows-core 0.54.0";
+          }
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.5";
+          }
+        ];
+        features = {
+          "AI" = [ "Foundation" ];
+          "AI_MachineLearning" = [ "AI" ];
+          "ApplicationModel" = [ "Foundation" ];
+          "ApplicationModel_Activation" = [ "ApplicationModel" ];
+          "ApplicationModel_AppExtensions" = [ "ApplicationModel" ];
+          "ApplicationModel_AppService" = [ "ApplicationModel" ];
+          "ApplicationModel_Appointments" = [ "ApplicationModel" ];
+          "ApplicationModel_Appointments_AppointmentsProvider" = [ "ApplicationModel_Appointments" ];
+          "ApplicationModel_Appointments_DataProvider" = [ "ApplicationModel_Appointments" ];
+          "ApplicationModel_Background" = [ "ApplicationModel" ];
+          "ApplicationModel_Calls" = [ "ApplicationModel" ];
+          "ApplicationModel_Calls_Background" = [ "ApplicationModel_Calls" ];
+          "ApplicationModel_Calls_Provider" = [ "ApplicationModel_Calls" ];
+          "ApplicationModel_Chat" = [ "ApplicationModel" ];
+          "ApplicationModel_CommunicationBlocking" = [ "ApplicationModel" ];
+          "ApplicationModel_Contacts" = [ "ApplicationModel" ];
+          "ApplicationModel_Contacts_DataProvider" = [ "ApplicationModel_Contacts" ];
+          "ApplicationModel_Contacts_Provider" = [ "ApplicationModel_Contacts" ];
+          "ApplicationModel_ConversationalAgent" = [ "ApplicationModel" ];
+          "ApplicationModel_Core" = [ "ApplicationModel" ];
+          "ApplicationModel_DataTransfer" = [ "ApplicationModel" ];
+          "ApplicationModel_DataTransfer_DragDrop" = [ "ApplicationModel_DataTransfer" ];
+          "ApplicationModel_DataTransfer_DragDrop_Core" = [ "ApplicationModel_DataTransfer_DragDrop" ];
+          "ApplicationModel_DataTransfer_ShareTarget" = [ "ApplicationModel_DataTransfer" ];
+          "ApplicationModel_Email" = [ "ApplicationModel" ];
+          "ApplicationModel_Email_DataProvider" = [ "ApplicationModel_Email" ];
+          "ApplicationModel_ExtendedExecution" = [ "ApplicationModel" ];
+          "ApplicationModel_ExtendedExecution_Foreground" = [ "ApplicationModel_ExtendedExecution" ];
+          "ApplicationModel_Holographic" = [ "ApplicationModel" ];
+          "ApplicationModel_LockScreen" = [ "ApplicationModel" ];
+          "ApplicationModel_Payments" = [ "ApplicationModel" ];
+          "ApplicationModel_Payments_Provider" = [ "ApplicationModel_Payments" ];
+          "ApplicationModel_Preview" = [ "ApplicationModel" ];
+          "ApplicationModel_Preview_Holographic" = [ "ApplicationModel_Preview" ];
+          "ApplicationModel_Preview_InkWorkspace" = [ "ApplicationModel_Preview" ];
+          "ApplicationModel_Preview_Notes" = [ "ApplicationModel_Preview" ];
+          "ApplicationModel_Resources" = [ "ApplicationModel" ];
+          "ApplicationModel_Resources_Core" = [ "ApplicationModel_Resources" ];
+          "ApplicationModel_Resources_Management" = [ "ApplicationModel_Resources" ];
+          "ApplicationModel_Search" = [ "ApplicationModel" ];
+          "ApplicationModel_Search_Core" = [ "ApplicationModel_Search" ];
+          "ApplicationModel_Store" = [ "ApplicationModel" ];
+          "ApplicationModel_Store_LicenseManagement" = [ "ApplicationModel_Store" ];
+          "ApplicationModel_Store_Preview" = [ "ApplicationModel_Store" ];
+          "ApplicationModel_Store_Preview_InstallControl" = [ "ApplicationModel_Store_Preview" ];
+          "ApplicationModel_UserActivities" = [ "ApplicationModel" ];
+          "ApplicationModel_UserActivities_Core" = [ "ApplicationModel_UserActivities" ];
+          "ApplicationModel_UserDataAccounts" = [ "ApplicationModel" ];
+          "ApplicationModel_UserDataAccounts_Provider" = [ "ApplicationModel_UserDataAccounts" ];
+          "ApplicationModel_UserDataAccounts_SystemAccess" = [ "ApplicationModel_UserDataAccounts" ];
+          "ApplicationModel_UserDataTasks" = [ "ApplicationModel" ];
+          "ApplicationModel_UserDataTasks_DataProvider" = [ "ApplicationModel_UserDataTasks" ];
+          "ApplicationModel_VoiceCommands" = [ "ApplicationModel" ];
+          "ApplicationModel_Wallet" = [ "ApplicationModel" ];
+          "ApplicationModel_Wallet_System" = [ "ApplicationModel_Wallet" ];
+          "Data" = [ "Foundation" ];
+          "Data_Html" = [ "Data" ];
+          "Data_Json" = [ "Data" ];
+          "Data_Pdf" = [ "Data" ];
+          "Data_Text" = [ "Data" ];
+          "Data_Xml" = [ "Data" ];
+          "Data_Xml_Dom" = [ "Data_Xml" ];
+          "Data_Xml_Xsl" = [ "Data_Xml" ];
+          "Devices" = [ "Foundation" ];
+          "Devices_Adc" = [ "Devices" ];
+          "Devices_Adc_Provider" = [ "Devices_Adc" ];
+          "Devices_Background" = [ "Devices" ];
+          "Devices_Bluetooth" = [ "Devices" ];
+          "Devices_Bluetooth_Advertisement" = [ "Devices_Bluetooth" ];
+          "Devices_Bluetooth_Background" = [ "Devices_Bluetooth" ];
+          "Devices_Bluetooth_GenericAttributeProfile" = [ "Devices_Bluetooth" ];
+          "Devices_Bluetooth_Rfcomm" = [ "Devices_Bluetooth" ];
+          "Devices_Custom" = [ "Devices" ];
+          "Devices_Display" = [ "Devices" ];
+          "Devices_Display_Core" = [ "Devices_Display" ];
+          "Devices_Enumeration" = [ "Devices" ];
+          "Devices_Enumeration_Pnp" = [ "Devices_Enumeration" ];
+          "Devices_Geolocation" = [ "Devices" ];
+          "Devices_Geolocation_Geofencing" = [ "Devices_Geolocation" ];
+          "Devices_Geolocation_Provider" = [ "Devices_Geolocation" ];
+          "Devices_Gpio" = [ "Devices" ];
+          "Devices_Gpio_Provider" = [ "Devices_Gpio" ];
+          "Devices_Haptics" = [ "Devices" ];
+          "Devices_HumanInterfaceDevice" = [ "Devices" ];
+          "Devices_I2c" = [ "Devices" ];
+          "Devices_I2c_Provider" = [ "Devices_I2c" ];
+          "Devices_Input" = [ "Devices" ];
+          "Devices_Input_Preview" = [ "Devices_Input" ];
+          "Devices_Lights" = [ "Devices" ];
+          "Devices_Lights_Effects" = [ "Devices_Lights" ];
+          "Devices_Midi" = [ "Devices" ];
+          "Devices_PointOfService" = [ "Devices" ];
+          "Devices_PointOfService_Provider" = [ "Devices_PointOfService" ];
+          "Devices_Portable" = [ "Devices" ];
+          "Devices_Power" = [ "Devices" ];
+          "Devices_Printers" = [ "Devices" ];
+          "Devices_Printers_Extensions" = [ "Devices_Printers" ];
+          "Devices_Pwm" = [ "Devices" ];
+          "Devices_Pwm_Provider" = [ "Devices_Pwm" ];
+          "Devices_Radios" = [ "Devices" ];
+          "Devices_Scanners" = [ "Devices" ];
+          "Devices_Sensors" = [ "Devices" ];
+          "Devices_Sensors_Custom" = [ "Devices_Sensors" ];
+          "Devices_SerialCommunication" = [ "Devices" ];
+          "Devices_SmartCards" = [ "Devices" ];
+          "Devices_Sms" = [ "Devices" ];
+          "Devices_Spi" = [ "Devices" ];
+          "Devices_Spi_Provider" = [ "Devices_Spi" ];
+          "Devices_Usb" = [ "Devices" ];
+          "Devices_WiFi" = [ "Devices" ];
+          "Devices_WiFiDirect" = [ "Devices" ];
+          "Devices_WiFiDirect_Services" = [ "Devices_WiFiDirect" ];
+          "Embedded" = [ "Foundation" ];
+          "Embedded_DeviceLockdown" = [ "Embedded" ];
+          "Foundation_Collections" = [ "Foundation" ];
+          "Foundation_Diagnostics" = [ "Foundation" ];
+          "Foundation_Metadata" = [ "Foundation" ];
+          "Foundation_Numerics" = [ "Foundation" ];
+          "Gaming" = [ "Foundation" ];
+          "Gaming_Input" = [ "Gaming" ];
+          "Gaming_Input_Custom" = [ "Gaming_Input" ];
+          "Gaming_Input_ForceFeedback" = [ "Gaming_Input" ];
+          "Gaming_Input_Preview" = [ "Gaming_Input" ];
+          "Gaming_Preview" = [ "Gaming" ];
+          "Gaming_Preview_GamesEnumeration" = [ "Gaming_Preview" ];
+          "Gaming_UI" = [ "Gaming" ];
+          "Gaming_XboxLive" = [ "Gaming" ];
+          "Gaming_XboxLive_Storage" = [ "Gaming_XboxLive" ];
+          "Globalization" = [ "Foundation" ];
+          "Globalization_Collation" = [ "Globalization" ];
+          "Globalization_DateTimeFormatting" = [ "Globalization" ];
+          "Globalization_Fonts" = [ "Globalization" ];
+          "Globalization_NumberFormatting" = [ "Globalization" ];
+          "Globalization_PhoneNumberFormatting" = [ "Globalization" ];
+          "Graphics" = [ "Foundation" ];
+          "Graphics_Capture" = [ "Graphics" ];
+          "Graphics_DirectX" = [ "Graphics" ];
+          "Graphics_DirectX_Direct3D11" = [ "Graphics_DirectX" ];
+          "Graphics_Display" = [ "Graphics" ];
+          "Graphics_Display_Core" = [ "Graphics_Display" ];
+          "Graphics_Effects" = [ "Graphics" ];
+          "Graphics_Holographic" = [ "Graphics" ];
+          "Graphics_Imaging" = [ "Graphics" ];
+          "Graphics_Printing" = [ "Graphics" ];
+          "Graphics_Printing3D" = [ "Graphics" ];
+          "Graphics_Printing_OptionDetails" = [ "Graphics_Printing" ];
+          "Graphics_Printing_PrintSupport" = [ "Graphics_Printing" ];
+          "Graphics_Printing_PrintTicket" = [ "Graphics_Printing" ];
+          "Graphics_Printing_Workflow" = [ "Graphics_Printing" ];
+          "Management" = [ "Foundation" ];
+          "Management_Core" = [ "Management" ];
+          "Management_Deployment" = [ "Management" ];
+          "Management_Deployment_Preview" = [ "Management_Deployment" ];
+          "Management_Policies" = [ "Management" ];
+          "Management_Update" = [ "Management" ];
+          "Management_Workplace" = [ "Management" ];
+          "Media" = [ "Foundation" ];
+          "Media_AppBroadcasting" = [ "Media" ];
+          "Media_AppRecording" = [ "Media" ];
+          "Media_Audio" = [ "Media" ];
+          "Media_Capture" = [ "Media" ];
+          "Media_Capture_Core" = [ "Media_Capture" ];
+          "Media_Capture_Frames" = [ "Media_Capture" ];
+          "Media_Casting" = [ "Media" ];
+          "Media_ClosedCaptioning" = [ "Media" ];
+          "Media_ContentRestrictions" = [ "Media" ];
+          "Media_Control" = [ "Media" ];
+          "Media_Core" = [ "Media" ];
+          "Media_Core_Preview" = [ "Media_Core" ];
+          "Media_Devices" = [ "Media" ];
+          "Media_Devices_Core" = [ "Media_Devices" ];
+          "Media_DialProtocol" = [ "Media" ];
+          "Media_Editing" = [ "Media" ];
+          "Media_Effects" = [ "Media" ];
+          "Media_FaceAnalysis" = [ "Media" ];
+          "Media_Import" = [ "Media" ];
+          "Media_MediaProperties" = [ "Media" ];
+          "Media_Miracast" = [ "Media" ];
+          "Media_Ocr" = [ "Media" ];
+          "Media_PlayTo" = [ "Media" ];
+          "Media_Playback" = [ "Media" ];
+          "Media_Playlists" = [ "Media" ];
+          "Media_Protection" = [ "Media" ];
+          "Media_Protection_PlayReady" = [ "Media_Protection" ];
+          "Media_Render" = [ "Media" ];
+          "Media_SpeechRecognition" = [ "Media" ];
+          "Media_SpeechSynthesis" = [ "Media" ];
+          "Media_Streaming" = [ "Media" ];
+          "Media_Streaming_Adaptive" = [ "Media_Streaming" ];
+          "Media_Transcoding" = [ "Media" ];
+          "Networking" = [ "Foundation" ];
+          "Networking_BackgroundTransfer" = [ "Networking" ];
+          "Networking_Connectivity" = [ "Networking" ];
+          "Networking_NetworkOperators" = [ "Networking" ];
+          "Networking_Proximity" = [ "Networking" ];
+          "Networking_PushNotifications" = [ "Networking" ];
+          "Networking_ServiceDiscovery" = [ "Networking" ];
+          "Networking_ServiceDiscovery_Dnssd" = [ "Networking_ServiceDiscovery" ];
+          "Networking_Sockets" = [ "Networking" ];
+          "Networking_Vpn" = [ "Networking" ];
+          "Networking_XboxLive" = [ "Networking" ];
+          "Perception" = [ "Foundation" ];
+          "Perception_Automation" = [ "Perception" ];
+          "Perception_Automation_Core" = [ "Perception_Automation" ];
+          "Perception_People" = [ "Perception" ];
+          "Perception_Spatial" = [ "Perception" ];
+          "Perception_Spatial_Preview" = [ "Perception_Spatial" ];
+          "Perception_Spatial_Surfaces" = [ "Perception_Spatial" ];
+          "Phone" = [ "Foundation" ];
+          "Phone_ApplicationModel" = [ "Phone" ];
+          "Phone_Devices" = [ "Phone" ];
+          "Phone_Devices_Notification" = [ "Phone_Devices" ];
+          "Phone_Devices_Power" = [ "Phone_Devices" ];
+          "Phone_Management" = [ "Phone" ];
+          "Phone_Management_Deployment" = [ "Phone_Management" ];
+          "Phone_Media" = [ "Phone" ];
+          "Phone_Media_Devices" = [ "Phone_Media" ];
+          "Phone_Notification" = [ "Phone" ];
+          "Phone_Notification_Management" = [ "Phone_Notification" ];
+          "Phone_PersonalInformation" = [ "Phone" ];
+          "Phone_PersonalInformation_Provisioning" = [ "Phone_PersonalInformation" ];
+          "Phone_Speech" = [ "Phone" ];
+          "Phone_Speech_Recognition" = [ "Phone_Speech" ];
+          "Phone_StartScreen" = [ "Phone" ];
+          "Phone_System" = [ "Phone" ];
+          "Phone_System_Power" = [ "Phone_System" ];
+          "Phone_System_Profile" = [ "Phone_System" ];
+          "Phone_System_UserProfile" = [ "Phone_System" ];
+          "Phone_System_UserProfile_GameServices" = [ "Phone_System_UserProfile" ];
+          "Phone_System_UserProfile_GameServices_Core" = [ "Phone_System_UserProfile_GameServices" ];
+          "Phone_UI" = [ "Phone" ];
+          "Phone_UI_Input" = [ "Phone_UI" ];
+          "Security" = [ "Foundation" ];
+          "Security_Authentication" = [ "Security" ];
+          "Security_Authentication_Identity" = [ "Security_Authentication" ];
+          "Security_Authentication_Identity_Core" = [ "Security_Authentication_Identity" ];
+          "Security_Authentication_OnlineId" = [ "Security_Authentication" ];
+          "Security_Authentication_Web" = [ "Security_Authentication" ];
+          "Security_Authentication_Web_Core" = [ "Security_Authentication_Web" ];
+          "Security_Authentication_Web_Provider" = [ "Security_Authentication_Web" ];
+          "Security_Authorization" = [ "Security" ];
+          "Security_Authorization_AppCapabilityAccess" = [ "Security_Authorization" ];
+          "Security_Credentials" = [ "Security" ];
+          "Security_Credentials_UI" = [ "Security_Credentials" ];
+          "Security_Cryptography" = [ "Security" ];
+          "Security_Cryptography_Certificates" = [ "Security_Cryptography" ];
+          "Security_Cryptography_Core" = [ "Security_Cryptography" ];
+          "Security_Cryptography_DataProtection" = [ "Security_Cryptography" ];
+          "Security_DataProtection" = [ "Security" ];
+          "Security_EnterpriseData" = [ "Security" ];
+          "Security_ExchangeActiveSyncProvisioning" = [ "Security" ];
+          "Security_Isolation" = [ "Security" ];
+          "Services" = [ "Foundation" ];
+          "Services_Maps" = [ "Services" ];
+          "Services_Maps_Guidance" = [ "Services_Maps" ];
+          "Services_Maps_LocalSearch" = [ "Services_Maps" ];
+          "Services_Maps_OfflineMaps" = [ "Services_Maps" ];
+          "Services_Store" = [ "Services" ];
+          "Services_TargetedContent" = [ "Services" ];
+          "Storage" = [ "Foundation" ];
+          "Storage_AccessCache" = [ "Storage" ];
+          "Storage_BulkAccess" = [ "Storage" ];
+          "Storage_Compression" = [ "Storage" ];
+          "Storage_FileProperties" = [ "Storage" ];
+          "Storage_Pickers" = [ "Storage" ];
+          "Storage_Pickers_Provider" = [ "Storage_Pickers" ];
+          "Storage_Provider" = [ "Storage" ];
+          "Storage_Search" = [ "Storage" ];
+          "Storage_Streams" = [ "Storage" ];
+          "System" = [ "Foundation" ];
+          "System_Diagnostics" = [ "System" ];
+          "System_Diagnostics_DevicePortal" = [ "System_Diagnostics" ];
+          "System_Diagnostics_Telemetry" = [ "System_Diagnostics" ];
+          "System_Diagnostics_TraceReporting" = [ "System_Diagnostics" ];
+          "System_Display" = [ "System" ];
+          "System_Implementation" = [ "System" ];
+          "System_Implementation_FileExplorer" = [ "System_Implementation" ];
+          "System_Inventory" = [ "System" ];
+          "System_Power" = [ "System" ];
+          "System_Profile" = [ "System" ];
+          "System_Profile_SystemManufacturers" = [ "System_Profile" ];
+          "System_RemoteDesktop" = [ "System" ];
+          "System_RemoteDesktop_Input" = [ "System_RemoteDesktop" ];
+          "System_RemoteDesktop_Provider" = [ "System_RemoteDesktop" ];
+          "System_RemoteSystems" = [ "System" ];
+          "System_Threading" = [ "System" ];
+          "System_Threading_Core" = [ "System_Threading" ];
+          "System_Update" = [ "System" ];
+          "System_UserProfile" = [ "System" ];
+          "UI" = [ "Foundation" ];
+          "UI_Accessibility" = [ "UI" ];
+          "UI_ApplicationSettings" = [ "UI" ];
+          "UI_Composition" = [ "UI" ];
+          "UI_Composition_Core" = [ "UI_Composition" ];
+          "UI_Composition_Desktop" = [ "UI_Composition" ];
+          "UI_Composition_Diagnostics" = [ "UI_Composition" ];
+          "UI_Composition_Effects" = [ "UI_Composition" ];
+          "UI_Composition_Interactions" = [ "UI_Composition" ];
+          "UI_Composition_Scenes" = [ "UI_Composition" ];
+          "UI_Core" = [ "UI" ];
+          "UI_Core_AnimationMetrics" = [ "UI_Core" ];
+          "UI_Core_Preview" = [ "UI_Core" ];
+          "UI_Input" = [ "UI" ];
+          "UI_Input_Core" = [ "UI_Input" ];
+          "UI_Input_Inking" = [ "UI_Input" ];
+          "UI_Input_Inking_Analysis" = [ "UI_Input_Inking" ];
+          "UI_Input_Inking_Core" = [ "UI_Input_Inking" ];
+          "UI_Input_Inking_Preview" = [ "UI_Input_Inking" ];
+          "UI_Input_Preview" = [ "UI_Input" ];
+          "UI_Input_Preview_Injection" = [ "UI_Input_Preview" ];
+          "UI_Input_Spatial" = [ "UI_Input" ];
+          "UI_Notifications" = [ "UI" ];
+          "UI_Notifications_Management" = [ "UI_Notifications" ];
+          "UI_Notifications_Preview" = [ "UI_Notifications" ];
+          "UI_Popups" = [ "UI" ];
+          "UI_Shell" = [ "UI" ];
+          "UI_StartScreen" = [ "UI" ];
+          "UI_Text" = [ "UI" ];
+          "UI_Text_Core" = [ "UI_Text" ];
+          "UI_UIAutomation" = [ "UI" ];
+          "UI_UIAutomation_Core" = [ "UI_UIAutomation" ];
+          "UI_ViewManagement" = [ "UI" ];
+          "UI_ViewManagement_Core" = [ "UI_ViewManagement" ];
+          "UI_WebUI" = [ "UI" ];
+          "UI_WebUI_Core" = [ "UI_WebUI" ];
+          "UI_WindowManagement" = [ "UI" ];
+          "UI_WindowManagement_Preview" = [ "UI_WindowManagement" ];
+          "Wdk" = [ "Win32_Foundation" ];
+          "Wdk_Devices" = [ "Wdk" ];
+          "Wdk_Devices_HumanInterfaceDevice" = [ "Wdk_Devices" ];
+          "Wdk_Foundation" = [ "Wdk" ];
+          "Wdk_Graphics" = [ "Wdk" ];
+          "Wdk_Graphics_Direct3D" = [ "Wdk_Graphics" ];
+          "Wdk_NetworkManagement" = [ "Wdk" ];
+          "Wdk_NetworkManagement_Ndis" = [ "Wdk_NetworkManagement" ];
+          "Wdk_NetworkManagement_WindowsFilteringPlatform" = [ "Wdk_NetworkManagement" ];
+          "Wdk_Storage" = [ "Wdk" ];
+          "Wdk_Storage_FileSystem" = [ "Wdk_Storage" ];
+          "Wdk_Storage_FileSystem_Minifilters" = [ "Wdk_Storage_FileSystem" ];
+          "Wdk_System" = [ "Wdk" ];
+          "Wdk_System_IO" = [ "Wdk_System" ];
+          "Wdk_System_OfflineRegistry" = [ "Wdk_System" ];
+          "Wdk_System_Registry" = [ "Wdk_System" ];
+          "Wdk_System_SystemInformation" = [ "Wdk_System" ];
+          "Wdk_System_SystemServices" = [ "Wdk_System" ];
+          "Wdk_System_Threading" = [ "Wdk_System" ];
+          "Web" = [ "Foundation" ];
+          "Web_AtomPub" = [ "Web" ];
+          "Web_Http" = [ "Web" ];
+          "Web_Http_Diagnostics" = [ "Web_Http" ];
+          "Web_Http_Filters" = [ "Web_Http" ];
+          "Web_Http_Headers" = [ "Web_Http" ];
+          "Web_Syndication" = [ "Web" ];
+          "Web_UI" = [ "Web" ];
+          "Web_UI_Interop" = [ "Web_UI" ];
+          "Win32" = [ "Win32_Foundation" ];
+          "Win32_AI" = [ "Win32" ];
+          "Win32_AI_MachineLearning" = [ "Win32_AI" ];
+          "Win32_AI_MachineLearning_DirectML" = [ "Win32_AI_MachineLearning" ];
+          "Win32_AI_MachineLearning_WinML" = [ "Win32_AI_MachineLearning" ];
+          "Win32_Data" = [ "Win32" ];
+          "Win32_Data_HtmlHelp" = [ "Win32_Data" ];
+          "Win32_Data_RightsManagement" = [ "Win32_Data" ];
+          "Win32_Data_Xml" = [ "Win32_Data" ];
+          "Win32_Data_Xml_MsXml" = [ "Win32_Data_Xml" ];
+          "Win32_Data_Xml_XmlLite" = [ "Win32_Data_Xml" ];
+          "Win32_Devices" = [ "Win32" ];
+          "Win32_Devices_AllJoyn" = [ "Win32_Devices" ];
+          "Win32_Devices_BiometricFramework" = [ "Win32_Devices" ];
+          "Win32_Devices_Bluetooth" = [ "Win32_Devices" ];
+          "Win32_Devices_Communication" = [ "Win32_Devices" ];
+          "Win32_Devices_DeviceAccess" = [ "Win32_Devices" ];
+          "Win32_Devices_DeviceAndDriverInstallation" = [ "Win32_Devices" ];
+          "Win32_Devices_DeviceQuery" = [ "Win32_Devices" ];
+          "Win32_Devices_Display" = [ "Win32_Devices" ];
+          "Win32_Devices_Enumeration" = [ "Win32_Devices" ];
+          "Win32_Devices_Enumeration_Pnp" = [ "Win32_Devices_Enumeration" ];
+          "Win32_Devices_Fax" = [ "Win32_Devices" ];
+          "Win32_Devices_FunctionDiscovery" = [ "Win32_Devices" ];
+          "Win32_Devices_Geolocation" = [ "Win32_Devices" ];
+          "Win32_Devices_HumanInterfaceDevice" = [ "Win32_Devices" ];
+          "Win32_Devices_ImageAcquisition" = [ "Win32_Devices" ];
+          "Win32_Devices_PortableDevices" = [ "Win32_Devices" ];
+          "Win32_Devices_Properties" = [ "Win32_Devices" ];
+          "Win32_Devices_Pwm" = [ "Win32_Devices" ];
+          "Win32_Devices_Sensors" = [ "Win32_Devices" ];
+          "Win32_Devices_SerialCommunication" = [ "Win32_Devices" ];
+          "Win32_Devices_Tapi" = [ "Win32_Devices" ];
+          "Win32_Devices_Usb" = [ "Win32_Devices" ];
+          "Win32_Devices_WebServicesOnDevices" = [ "Win32_Devices" ];
+          "Win32_Foundation" = [ "Win32" ];
+          "Win32_Gaming" = [ "Win32" ];
+          "Win32_Globalization" = [ "Win32" ];
+          "Win32_Graphics" = [ "Win32" ];
+          "Win32_Graphics_CompositionSwapchain" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DXCore" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct2D" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct2D_Common" = [ "Win32_Graphics_Direct2D" ];
+          "Win32_Graphics_Direct3D" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D10" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D11" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D11on12" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D12" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D9" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D9on12" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Direct3D_Dxc" = [ "Win32_Graphics_Direct3D" ];
+          "Win32_Graphics_Direct3D_Fxc" = [ "Win32_Graphics_Direct3D" ];
+          "Win32_Graphics_DirectComposition" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DirectDraw" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DirectManipulation" = [ "Win32_Graphics" ];
+          "Win32_Graphics_DirectWrite" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Dwm" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Dxgi" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Dxgi_Common" = [ "Win32_Graphics_Dxgi" ];
+          "Win32_Graphics_Gdi" = [ "Win32_Graphics" ];
+          "Win32_Graphics_GdiPlus" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Hlsl" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Imaging" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Imaging_D2D" = [ "Win32_Graphics_Imaging" ];
+          "Win32_Graphics_OpenGL" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Printing" = [ "Win32_Graphics" ];
+          "Win32_Graphics_Printing_PrintTicket" = [ "Win32_Graphics_Printing" ];
+          "Win32_Management" = [ "Win32" ];
+          "Win32_Management_MobileDeviceManagementRegistration" = [ "Win32_Management" ];
+          "Win32_Media" = [ "Win32" ];
+          "Win32_Media_Audio" = [ "Win32_Media" ];
+          "Win32_Media_Audio_Apo" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_DirectMusic" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_DirectSound" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_Endpoints" = [ "Win32_Media_Audio" ];
+          "Win32_Media_Audio_XAudio2" = [ "Win32_Media_Audio" ];
+          "Win32_Media_DeviceManager" = [ "Win32_Media" ];
+          "Win32_Media_DirectShow" = [ "Win32_Media" ];
+          "Win32_Media_DirectShow_Tv" = [ "Win32_Media_DirectShow" ];
+          "Win32_Media_DirectShow_Xml" = [ "Win32_Media_DirectShow" ];
+          "Win32_Media_DxMediaObjects" = [ "Win32_Media" ];
+          "Win32_Media_KernelStreaming" = [ "Win32_Media" ];
+          "Win32_Media_LibrarySharingServices" = [ "Win32_Media" ];
+          "Win32_Media_MediaFoundation" = [ "Win32_Media" ];
+          "Win32_Media_MediaPlayer" = [ "Win32_Media" ];
+          "Win32_Media_Multimedia" = [ "Win32_Media" ];
+          "Win32_Media_PictureAcquisition" = [ "Win32_Media" ];
+          "Win32_Media_Speech" = [ "Win32_Media" ];
+          "Win32_Media_Streaming" = [ "Win32_Media" ];
+          "Win32_Media_WindowsMediaFormat" = [ "Win32_Media" ];
+          "Win32_NetworkManagement" = [ "Win32" ];
+          "Win32_NetworkManagement_Dhcp" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Dns" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_InternetConnectionWizard" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_IpHelper" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_MobileBroadband" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Multicast" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Ndis" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetBios" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetManagement" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetShell" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetworkDiagnosticsFramework" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_NetworkPolicyServer" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_P2P" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_QoS" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Rras" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_Snmp" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WNet" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WebDav" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WiFi" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsConnectNow" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsConnectionManager" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsFilteringPlatform" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsFirewall" = [ "Win32_NetworkManagement" ];
+          "Win32_NetworkManagement_WindowsNetworkVirtualization" = [ "Win32_NetworkManagement" ];
+          "Win32_Networking" = [ "Win32" ];
+          "Win32_Networking_ActiveDirectory" = [ "Win32_Networking" ];
+          "Win32_Networking_BackgroundIntelligentTransferService" = [ "Win32_Networking" ];
+          "Win32_Networking_Clustering" = [ "Win32_Networking" ];
+          "Win32_Networking_HttpServer" = [ "Win32_Networking" ];
+          "Win32_Networking_Ldap" = [ "Win32_Networking" ];
+          "Win32_Networking_NetworkListManager" = [ "Win32_Networking" ];
+          "Win32_Networking_RemoteDifferentialCompression" = [ "Win32_Networking" ];
+          "Win32_Networking_WebSocket" = [ "Win32_Networking" ];
+          "Win32_Networking_WinHttp" = [ "Win32_Networking" ];
+          "Win32_Networking_WinInet" = [ "Win32_Networking" ];
+          "Win32_Networking_WinSock" = [ "Win32_Networking" ];
+          "Win32_Networking_WindowsWebServices" = [ "Win32_Networking" ];
+          "Win32_Security" = [ "Win32" ];
+          "Win32_Security_AppLocker" = [ "Win32_Security" ];
+          "Win32_Security_Authentication" = [ "Win32_Security" ];
+          "Win32_Security_Authentication_Identity" = [ "Win32_Security_Authentication" ];
+          "Win32_Security_Authentication_Identity_Provider" = [ "Win32_Security_Authentication_Identity" ];
+          "Win32_Security_Authorization" = [ "Win32_Security" ];
+          "Win32_Security_Authorization_UI" = [ "Win32_Security_Authorization" ];
+          "Win32_Security_ConfigurationSnapin" = [ "Win32_Security" ];
+          "Win32_Security_Credentials" = [ "Win32_Security" ];
+          "Win32_Security_Cryptography" = [ "Win32_Security" ];
+          "Win32_Security_Cryptography_Catalog" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_Certificates" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_Sip" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_Cryptography_UI" = [ "Win32_Security_Cryptography" ];
+          "Win32_Security_DiagnosticDataQuery" = [ "Win32_Security" ];
+          "Win32_Security_DirectoryServices" = [ "Win32_Security" ];
+          "Win32_Security_EnterpriseData" = [ "Win32_Security" ];
+          "Win32_Security_ExtensibleAuthenticationProtocol" = [ "Win32_Security" ];
+          "Win32_Security_Isolation" = [ "Win32_Security" ];
+          "Win32_Security_LicenseProtection" = [ "Win32_Security" ];
+          "Win32_Security_NetworkAccessProtection" = [ "Win32_Security" ];
+          "Win32_Security_Tpm" = [ "Win32_Security" ];
+          "Win32_Security_WinTrust" = [ "Win32_Security" ];
+          "Win32_Security_WinWlx" = [ "Win32_Security" ];
+          "Win32_Storage" = [ "Win32" ];
+          "Win32_Storage_Cabinets" = [ "Win32_Storage" ];
+          "Win32_Storage_CloudFilters" = [ "Win32_Storage" ];
+          "Win32_Storage_Compression" = [ "Win32_Storage" ];
+          "Win32_Storage_DataDeduplication" = [ "Win32_Storage" ];
+          "Win32_Storage_DistributedFileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_EnhancedStorage" = [ "Win32_Storage" ];
+          "Win32_Storage_FileHistory" = [ "Win32_Storage" ];
+          "Win32_Storage_FileServerResourceManager" = [ "Win32_Storage" ];
+          "Win32_Storage_FileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_Imapi" = [ "Win32_Storage" ];
+          "Win32_Storage_IndexServer" = [ "Win32_Storage" ];
+          "Win32_Storage_InstallableFileSystems" = [ "Win32_Storage" ];
+          "Win32_Storage_IscsiDisc" = [ "Win32_Storage" ];
+          "Win32_Storage_Jet" = [ "Win32_Storage" ];
+          "Win32_Storage_Nvme" = [ "Win32_Storage" ];
+          "Win32_Storage_OfflineFiles" = [ "Win32_Storage" ];
+          "Win32_Storage_OperationRecorder" = [ "Win32_Storage" ];
+          "Win32_Storage_Packaging" = [ "Win32_Storage" ];
+          "Win32_Storage_Packaging_Appx" = [ "Win32_Storage_Packaging" ];
+          "Win32_Storage_Packaging_Opc" = [ "Win32_Storage_Packaging" ];
+          "Win32_Storage_ProjectedFileSystem" = [ "Win32_Storage" ];
+          "Win32_Storage_StructuredStorage" = [ "Win32_Storage" ];
+          "Win32_Storage_Vhd" = [ "Win32_Storage" ];
+          "Win32_Storage_VirtualDiskService" = [ "Win32_Storage" ];
+          "Win32_Storage_Vss" = [ "Win32_Storage" ];
+          "Win32_Storage_Xps" = [ "Win32_Storage" ];
+          "Win32_Storage_Xps_Printing" = [ "Win32_Storage_Xps" ];
+          "Win32_System" = [ "Win32" ];
+          "Win32_System_AddressBook" = [ "Win32_System" ];
+          "Win32_System_Antimalware" = [ "Win32_System" ];
+          "Win32_System_ApplicationInstallationAndServicing" = [ "Win32_System" ];
+          "Win32_System_ApplicationVerifier" = [ "Win32_System" ];
+          "Win32_System_AssessmentTool" = [ "Win32_System" ];
+          "Win32_System_ClrHosting" = [ "Win32_System" ];
+          "Win32_System_Com" = [ "Win32_System" ];
+          "Win32_System_Com_CallObj" = [ "Win32_System_Com" ];
+          "Win32_System_Com_ChannelCredentials" = [ "Win32_System_Com" ];
+          "Win32_System_Com_Events" = [ "Win32_System_Com" ];
+          "Win32_System_Com_Marshal" = [ "Win32_System_Com" ];
+          "Win32_System_Com_StructuredStorage" = [ "Win32_System_Com" ];
+          "Win32_System_Com_UI" = [ "Win32_System_Com" ];
+          "Win32_System_Com_Urlmon" = [ "Win32_System_Com" ];
+          "Win32_System_ComponentServices" = [ "Win32_System" ];
+          "Win32_System_Console" = [ "Win32_System" ];
+          "Win32_System_Contacts" = [ "Win32_System" ];
+          "Win32_System_CorrelationVector" = [ "Win32_System" ];
+          "Win32_System_DataExchange" = [ "Win32_System" ];
+          "Win32_System_DeploymentServices" = [ "Win32_System" ];
+          "Win32_System_DesktopSharing" = [ "Win32_System" ];
+          "Win32_System_DeveloperLicensing" = [ "Win32_System" ];
+          "Win32_System_Diagnostics" = [ "Win32_System" ];
+          "Win32_System_Diagnostics_Ceip" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_ClrProfiling" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_Debug" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_Debug_ActiveScript" = [ "Win32_System_Diagnostics_Debug" ];
+          "Win32_System_Diagnostics_Debug_Extensions" = [ "Win32_System_Diagnostics_Debug" ];
+          "Win32_System_Diagnostics_Etw" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_ProcessSnapshotting" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_ToolHelp" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_Diagnostics_TraceLogging" = [ "Win32_System_Diagnostics" ];
+          "Win32_System_DistributedTransactionCoordinator" = [ "Win32_System" ];
+          "Win32_System_Environment" = [ "Win32_System" ];
+          "Win32_System_ErrorReporting" = [ "Win32_System" ];
+          "Win32_System_EventCollector" = [ "Win32_System" ];
+          "Win32_System_EventLog" = [ "Win32_System" ];
+          "Win32_System_EventNotificationService" = [ "Win32_System" ];
+          "Win32_System_GroupPolicy" = [ "Win32_System" ];
+          "Win32_System_HostCompute" = [ "Win32_System" ];
+          "Win32_System_HostComputeNetwork" = [ "Win32_System" ];
+          "Win32_System_HostComputeSystem" = [ "Win32_System" ];
+          "Win32_System_Hypervisor" = [ "Win32_System" ];
+          "Win32_System_IO" = [ "Win32_System" ];
+          "Win32_System_Iis" = [ "Win32_System" ];
+          "Win32_System_Ioctl" = [ "Win32_System" ];
+          "Win32_System_JobObjects" = [ "Win32_System" ];
+          "Win32_System_Js" = [ "Win32_System" ];
+          "Win32_System_Kernel" = [ "Win32_System" ];
+          "Win32_System_LibraryLoader" = [ "Win32_System" ];
+          "Win32_System_Mailslots" = [ "Win32_System" ];
+          "Win32_System_Mapi" = [ "Win32_System" ];
+          "Win32_System_Memory" = [ "Win32_System" ];
+          "Win32_System_Memory_NonVolatile" = [ "Win32_System_Memory" ];
+          "Win32_System_MessageQueuing" = [ "Win32_System" ];
+          "Win32_System_MixedReality" = [ "Win32_System" ];
+          "Win32_System_Mmc" = [ "Win32_System" ];
+          "Win32_System_Ole" = [ "Win32_System" ];
+          "Win32_System_ParentalControls" = [ "Win32_System" ];
+          "Win32_System_PasswordManagement" = [ "Win32_System" ];
+          "Win32_System_Performance" = [ "Win32_System" ];
+          "Win32_System_Performance_HardwareCounterProfiling" = [ "Win32_System_Performance" ];
+          "Win32_System_Pipes" = [ "Win32_System" ];
+          "Win32_System_Power" = [ "Win32_System" ];
+          "Win32_System_ProcessStatus" = [ "Win32_System" ];
+          "Win32_System_RealTimeCommunications" = [ "Win32_System" ];
+          "Win32_System_Recovery" = [ "Win32_System" ];
+          "Win32_System_Registry" = [ "Win32_System" ];
+          "Win32_System_RemoteAssistance" = [ "Win32_System" ];
+          "Win32_System_RemoteDesktop" = [ "Win32_System" ];
+          "Win32_System_RemoteManagement" = [ "Win32_System" ];
+          "Win32_System_RestartManager" = [ "Win32_System" ];
+          "Win32_System_Restore" = [ "Win32_System" ];
+          "Win32_System_Rpc" = [ "Win32_System" ];
+          "Win32_System_Search" = [ "Win32_System" ];
+          "Win32_System_Search_Common" = [ "Win32_System_Search" ];
+          "Win32_System_SecurityCenter" = [ "Win32_System" ];
+          "Win32_System_ServerBackup" = [ "Win32_System" ];
+          "Win32_System_Services" = [ "Win32_System" ];
+          "Win32_System_SettingsManagementInfrastructure" = [ "Win32_System" ];
+          "Win32_System_SetupAndMigration" = [ "Win32_System" ];
+          "Win32_System_Shutdown" = [ "Win32_System" ];
+          "Win32_System_SideShow" = [ "Win32_System" ];
+          "Win32_System_StationsAndDesktops" = [ "Win32_System" ];
+          "Win32_System_SubsystemForLinux" = [ "Win32_System" ];
+          "Win32_System_SystemInformation" = [ "Win32_System" ];
+          "Win32_System_SystemServices" = [ "Win32_System" ];
+          "Win32_System_TaskScheduler" = [ "Win32_System" ];
+          "Win32_System_Threading" = [ "Win32_System" ];
+          "Win32_System_Time" = [ "Win32_System" ];
+          "Win32_System_TpmBaseServices" = [ "Win32_System" ];
+          "Win32_System_TransactionServer" = [ "Win32_System" ];
+          "Win32_System_UpdateAgent" = [ "Win32_System" ];
+          "Win32_System_UpdateAssessment" = [ "Win32_System" ];
+          "Win32_System_UserAccessLogging" = [ "Win32_System" ];
+          "Win32_System_Variant" = [ "Win32_System" ];
+          "Win32_System_VirtualDosMachines" = [ "Win32_System" ];
+          "Win32_System_WinRT" = [ "Win32_System" ];
+          "Win32_System_WinRT_AllJoyn" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Composition" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_CoreInputView" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Direct3D11" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Display" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Graphics" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Graphics_Capture" = [ "Win32_System_WinRT_Graphics" ];
+          "Win32_System_WinRT_Graphics_Direct2D" = [ "Win32_System_WinRT_Graphics" ];
+          "Win32_System_WinRT_Graphics_Imaging" = [ "Win32_System_WinRT_Graphics" ];
+          "Win32_System_WinRT_Holographic" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Isolation" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_ML" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Media" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Metadata" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Pdf" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Printing" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Shell" = [ "Win32_System_WinRT" ];
+          "Win32_System_WinRT_Storage" = [ "Win32_System_WinRT" ];
+          "Win32_System_WindowsProgramming" = [ "Win32_System" ];
+          "Win32_System_WindowsSync" = [ "Win32_System" ];
+          "Win32_System_Wmi" = [ "Win32_System" ];
+          "Win32_UI" = [ "Win32" ];
+          "Win32_UI_Accessibility" = [ "Win32_UI" ];
+          "Win32_UI_Animation" = [ "Win32_UI" ];
+          "Win32_UI_ColorSystem" = [ "Win32_UI" ];
+          "Win32_UI_Controls" = [ "Win32_UI" ];
+          "Win32_UI_Controls_Dialogs" = [ "Win32_UI_Controls" ];
+          "Win32_UI_Controls_RichEdit" = [ "Win32_UI_Controls" ];
+          "Win32_UI_HiDpi" = [ "Win32_UI" ];
+          "Win32_UI_Input" = [ "Win32_UI" ];
+          "Win32_UI_Input_Ime" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Ink" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_KeyboardAndMouse" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Pointer" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Radial" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_Touch" = [ "Win32_UI_Input" ];
+          "Win32_UI_Input_XboxController" = [ "Win32_UI_Input" ];
+          "Win32_UI_InteractionContext" = [ "Win32_UI" ];
+          "Win32_UI_LegacyWindowsEnvironmentFeatures" = [ "Win32_UI" ];
+          "Win32_UI_Magnification" = [ "Win32_UI" ];
+          "Win32_UI_Notifications" = [ "Win32_UI" ];
+          "Win32_UI_Ribbon" = [ "Win32_UI" ];
+          "Win32_UI_Shell" = [ "Win32_UI" ];
+          "Win32_UI_Shell_Common" = [ "Win32_UI_Shell" ];
+          "Win32_UI_Shell_PropertiesSystem" = [ "Win32_UI_Shell" ];
+          "Win32_UI_TabletPC" = [ "Win32_UI" ];
+          "Win32_UI_TextServices" = [ "Win32_UI" ];
+          "Win32_UI_WindowsAndMessaging" = [ "Win32_UI" ];
+          "Win32_UI_Wpf" = [ "Win32_UI" ];
+          "Win32_Web" = [ "Win32" ];
+          "Win32_Web_InternetExplorer" = [ "Win32_Web" ];
+          "implement" = [ "windows-implement" "windows-interface" "windows-core/implement" ];
+          "windows-implement" = [ "dep:windows-implement" ];
+          "windows-interface" = [ "dep:windows-interface" ];
+        };
+        resolvedDefaultFeatures = [ "Win32" "Win32_Foundation" "Win32_System" "Win32_System_Diagnostics" "Win32_System_Diagnostics_Debug" "Win32_System_Kernel" "Win32_System_Memory" "Win32_System_SystemInformation" "default" ];
+      };
+      "windows-core 0.52.0" = rec {
         crateName = "windows-core";
         version = "0.52.0";
         edition = "2021";
@@ -15180,12 +16705,53 @@ rec {
         dependencies = [
           {
             name = "windows-targets";
-            packageId = "windows-targets 0.52.0";
+            packageId = "windows-targets 0.52.5";
+          }
+        ];
+        features = { };
+        resolvedDefaultFeatures = [ "default" ];
+      };
+      "windows-core 0.54.0" = rec {
+        crateName = "windows-core";
+        version = "0.54.0";
+        edition = "2021";
+        sha256 = "0r8x2sgl4qq1h23ldf4z7cj213k0bz7479m8a156h79mi6f1nrhj";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-result";
+            packageId = "windows-result";
+          }
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.5";
           }
         ];
         features = { };
         resolvedDefaultFeatures = [ "default" ];
       };
+      "windows-result" = rec {
+        crateName = "windows-result";
+        version = "0.1.2";
+        edition = "2021";
+        sha256 = "1y274q1v0vy21lhkgslpxpq1m08hvr1mcs2l88h1b1gcx0136f2y";
+        libName = "windows_result";
+        authors = [
+          "Microsoft"
+        ];
+        dependencies = [
+          {
+            name = "windows-targets";
+            packageId = "windows-targets 0.52.5";
+          }
+        ];
+        features = {
+          "default" = [ "std" ];
+        };
+        resolvedDefaultFeatures = [ "default" "std" ];
+      };
       "windows-sys 0.48.0" = rec {
         crateName = "windows-sys";
         version = "0.48.0";
@@ -15490,7 +17056,7 @@ rec {
         dependencies = [
           {
             name = "windows-targets";
-            packageId = "windows-targets 0.52.0";
+            packageId = "windows-targets 0.52.5";
           }
         ];
         features = {
@@ -15724,7 +17290,7 @@ rec {
           "Win32_Web" = [ "Win32" ];
           "Win32_Web_InternetExplorer" = [ "Win32_Web" ];
         };
-        resolvedDefaultFeatures = [ "Wdk" "Wdk_Foundation" "Wdk_Storage" "Wdk_Storage_FileSystem" "Win32" "Win32_Foundation" "Win32_NetworkManagement" "Win32_NetworkManagement_IpHelper" "Win32_Networking" "Win32_Networking_WinSock" "Win32_Security" "Win32_Security_Authentication" "Win32_Security_Authentication_Identity" "Win32_Security_Credentials" "Win32_Security_Cryptography" "Win32_Storage" "Win32_Storage_FileSystem" "Win32_System" "Win32_System_Com" "Win32_System_Console" "Win32_System_Diagnostics" "Win32_System_Diagnostics_Debug" "Win32_System_IO" "Win32_System_LibraryLoader" "Win32_System_Memory" "Win32_System_Threading" "Win32_System_WindowsProgramming" "Win32_UI" "Win32_UI_Shell" "default" ];
+        resolvedDefaultFeatures = [ "Wdk" "Wdk_Foundation" "Wdk_Storage" "Wdk_Storage_FileSystem" "Win32" "Win32_Foundation" "Win32_NetworkManagement" "Win32_NetworkManagement_IpHelper" "Win32_Networking" "Win32_Networking_WinSock" "Win32_Security" "Win32_Security_Authentication" "Win32_Security_Authentication_Identity" "Win32_Security_Credentials" "Win32_Security_Cryptography" "Win32_Storage" "Win32_Storage_FileSystem" "Win32_System" "Win32_System_Com" "Win32_System_Console" "Win32_System_Diagnostics" "Win32_System_Diagnostics_Debug" "Win32_System_IO" "Win32_System_LibraryLoader" "Win32_System_Memory" "Win32_System_Threading" "Win32_System_WindowsProgramming" "Win32_UI" "Win32_UI_Input" "Win32_UI_Input_KeyboardAndMouse" "Win32_UI_Shell" "default" ];
       };
       "windows-targets 0.48.5" = rec {
         crateName = "windows-targets";
@@ -15773,49 +17339,54 @@ rec {
         ];
 
       };
-      "windows-targets 0.52.0" = rec {
+      "windows-targets 0.52.5" = rec {
         crateName = "windows-targets";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "1kg7a27ynzw8zz3krdgy6w5gbqcji27j1sz4p7xk2j5j8082064a";
+        sha256 = "1sz7jrnkygmmlj1ia8fk85wbyil450kq5qkh5qh9sh2rcnj161vg";
         authors = [
           "Microsoft"
         ];
         dependencies = [
           {
             name = "windows_aarch64_gnullvm";
-            packageId = "windows_aarch64_gnullvm 0.52.0";
+            packageId = "windows_aarch64_gnullvm 0.52.5";
             target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "aarch64-pc-windows-gnullvm");
           }
           {
             name = "windows_aarch64_msvc";
-            packageId = "windows_aarch64_msvc 0.52.0";
+            packageId = "windows_aarch64_msvc 0.52.5";
             target = { target, features }: (("aarch64" == target."arch" or null) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
           }
           {
             name = "windows_i686_gnu";
-            packageId = "windows_i686_gnu 0.52.0";
-            target = { target, features }: (("x86" == target."arch" or null) && ("gnu" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
+            packageId = "windows_i686_gnu 0.52.5";
+            target = { target, features }: (("x86" == target."arch" or null) && ("gnu" == target."env" or null) && (!("llvm" == target."abi" or null)) && (!(target."windows_raw_dylib" or false)));
+          }
+          {
+            name = "windows_i686_gnullvm";
+            packageId = "windows_i686_gnullvm";
+            target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "i686-pc-windows-gnullvm");
           }
           {
             name = "windows_i686_msvc";
-            packageId = "windows_i686_msvc 0.52.0";
+            packageId = "windows_i686_msvc 0.52.5";
             target = { target, features }: (("x86" == target."arch" or null) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
           }
           {
             name = "windows_x86_64_gnu";
-            packageId = "windows_x86_64_gnu 0.52.0";
+            packageId = "windows_x86_64_gnu 0.52.5";
             target = { target, features }: (("x86_64" == target."arch" or null) && ("gnu" == target."env" or null) && (!("llvm" == target."abi" or null)) && (!(target."windows_raw_dylib" or false)));
           }
           {
             name = "windows_x86_64_gnullvm";
-            packageId = "windows_x86_64_gnullvm 0.52.0";
+            packageId = "windows_x86_64_gnullvm 0.52.5";
             target = { target, features }: (pkgs.rust.lib.toRustTarget stdenv.hostPlatform == "x86_64-pc-windows-gnullvm");
           }
           {
             name = "windows_x86_64_msvc";
-            packageId = "windows_x86_64_msvc 0.52.0";
-            target = { target, features }: (("x86_64" == target."arch" or null) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
+            packageId = "windows_x86_64_msvc 0.52.5";
+            target = { target, features }: ((("x86_64" == target."arch" or null) || ("arm64ec" == target."arch" or null)) && ("msvc" == target."env" or null) && (!(target."windows_raw_dylib" or false)));
           }
         ];
 
@@ -15830,11 +17401,11 @@ rec {
         ];
 
       };
-      "windows_aarch64_gnullvm 0.52.0" = rec {
+      "windows_aarch64_gnullvm 0.52.5" = rec {
         crateName = "windows_aarch64_gnullvm";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "1shmn1kbdc0bpphcxz0vlph96bxz0h1jlmh93s9agf2dbpin8xyb";
+        sha256 = "0qrjimbj67nnyn7zqy15mzzmqg0mn5gsr2yciqjxm3cb3vbyx23h";
         authors = [
           "Microsoft"
         ];
@@ -15850,11 +17421,11 @@ rec {
         ];
 
       };
-      "windows_aarch64_msvc 0.52.0" = rec {
+      "windows_aarch64_msvc 0.52.5" = rec {
         crateName = "windows_aarch64_msvc";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "1vvmy1ypvzdvxn9yf0b8ygfl85gl2gpcyvsvqppsmlpisil07amv";
+        sha256 = "1dmga8kqlmln2ibckk6mxc9n59vdg8ziqa2zr8awcl720hazv1cr";
         authors = [
           "Microsoft"
         ];
@@ -15870,11 +17441,21 @@ rec {
         ];
 
       };
-      "windows_i686_gnu 0.52.0" = rec {
+      "windows_i686_gnu 0.52.5" = rec {
         crateName = "windows_i686_gnu";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "04zkglz4p3pjsns5gbz85v4s5aw102raz4spj4b0lmm33z5kg1m2";
+        sha256 = "0w4np3l6qwlra9s2xpflqrs60qk1pz6ahhn91rr74lvdy4y0gfl8";
+        authors = [
+          "Microsoft"
+        ];
+
+      };
+      "windows_i686_gnullvm" = rec {
+        crateName = "windows_i686_gnullvm";
+        version = "0.52.5";
+        edition = "2021";
+        sha256 = "1s9f4gff0cixd86mw3n63rpmsm4pmr4ffndl6s7qa2h35492dx47";
         authors = [
           "Microsoft"
         ];
@@ -15890,11 +17471,11 @@ rec {
         ];
 
       };
-      "windows_i686_msvc 0.52.0" = rec {
+      "windows_i686_msvc 0.52.5" = rec {
         crateName = "windows_i686_msvc";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "16kvmbvx0vr0zbgnaz6nsks9ycvfh5xp05bjrhq65kj623iyirgz";
+        sha256 = "1gw7fklxywgpnwbwg43alb4hm0qjmx72hqrlwy5nanrxs7rjng6v";
         authors = [
           "Microsoft"
         ];
@@ -15910,11 +17491,11 @@ rec {
         ];
 
       };
-      "windows_x86_64_gnu 0.52.0" = rec {
+      "windows_x86_64_gnu 0.52.5" = rec {
         crateName = "windows_x86_64_gnu";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "1zdy4qn178sil5sdm63lm7f0kkcjg6gvdwmcprd2yjmwn8ns6vrx";
+        sha256 = "1n8p2mcf3lw6300k77a0knksssmgwb9hynl793mhkzyydgvlchjf";
         authors = [
           "Microsoft"
         ];
@@ -15930,11 +17511,11 @@ rec {
         ];
 
       };
-      "windows_x86_64_gnullvm 0.52.0" = rec {
+      "windows_x86_64_gnullvm 0.52.5" = rec {
         crateName = "windows_x86_64_gnullvm";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "17lllq4l2k1lqgcnw1cccphxp9vs7inq99kjlm2lfl9zklg7wr8s";
+        sha256 = "15n56jrh4s5bz66zimavr1rmcaw6wa306myrvmbc6rydhbj9h8l5";
         authors = [
           "Microsoft"
         ];
@@ -15950,11 +17531,11 @@ rec {
         ];
 
       };
-      "windows_x86_64_msvc 0.52.0" = rec {
+      "windows_x86_64_msvc 0.52.5" = rec {
         crateName = "windows_x86_64_msvc";
-        version = "0.52.0";
+        version = "0.52.5";
         edition = "2021";
-        sha256 = "012wfq37f18c09ij5m6rniw7xxn5fcvrxbqd0wd8vgnl3hfn9yfz";
+        sha256 = "1w1bn24ap8dp9i85s8mlg8cim2bl2368bd6qyvm0xzqvzmdpxi5y";
         authors = [
           "Microsoft"
         ];
@@ -16041,18 +17622,6 @@ rec {
         };
         resolvedDefaultFeatures = [ "default" "unsupported" ];
       };
-      "xml-rs" = rec {
-        crateName = "xml-rs";
-        version = "0.8.19";
-        edition = "2021";
-        crateBin = [ ];
-        sha256 = "0nnpvk3fv32hgh7vs9gbg2swmzxx5yz73f4b7rak7q39q2x9rjqg";
-        libName = "xml";
-        authors = [
-          "Vladimir Matveev <vmatveev@citrine.cc>"
-        ];
-
-      };
       "xz2" = rec {
         crateName = "xz2";
         version = "0.1.7";
@@ -16594,8 +18163,6 @@ rec {
             buildRustCrateForPkgsFunc pkgs
               (
                 crateConfig // {
-                  # https://github.com/NixOS/nixpkgs/issues/218712
-                  dontStrip = stdenv.hostPlatform.isDarwin;
                   src = crateConfig.src or (
                     pkgs.fetchurl rec {
                       name = "${crateConfig.crateName}-${crateConfig.version}.tar.gz";
diff --git a/tvix/Cargo.toml b/tvix/Cargo.toml
index 847d9aceec..ed5c6d0d8b 100644
--- a/tvix/Cargo.toml
+++ b/tvix/Cargo.toml
@@ -28,6 +28,7 @@ members = [
   "nix-compat",
   "serde",
   "store",
+  "tracing",
 ]
 
 [workspace.lints.clippy]
diff --git a/tvix/README.md b/tvix/README.md
index fb536bc229..7f3fcc0615 100644
--- a/tvix/README.md
+++ b/tvix/README.md
@@ -104,6 +104,11 @@ Rust projects under `//tvix`, be sure to run
 `mg run //tools:crate2nix-generate` in `//tvix` itself and commit the changes
 to the generated `Cargo.nix` file. This only applies to the full TVL checkout.
 
+When adding/removing a Cargo feature for a crate, you will want to add it to the
+features power set that gets tested in CI. For each crate there's a default.nix with a
+`mkFeaturePowerset` invocation, modify the list to include/remove the feature.
+Note that you don't want to add "collection" features, such as `fs` for tvix-[ca]store or `default`.
+
 ## License structure
 
 All code implemented for Tvix is licensed under the GPL-3.0, with the
diff --git a/tvix/build-go/build.pb.go b/tvix/build-go/build.pb.go
index 9c6bd5f248..d7f7fd82ab 100644
--- a/tvix/build-go/build.pb.go
+++ b/tvix/build-go/build.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/build/protos/build.proto
 
diff --git a/tvix/build-go/rpc_build.pb.go b/tvix/build-go/rpc_build.pb.go
index aae0cd9d47..361cd179b2 100644
--- a/tvix/build-go/rpc_build.pb.go
+++ b/tvix/build-go/rpc_build.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/build/protos/rpc_build.proto
 
diff --git a/tvix/build/Cargo.toml b/tvix/build/Cargo.toml
index cf25465cca..6faa3ad7ab 100644
--- a/tvix/build/Cargo.toml
+++ b/tvix/build/Cargo.toml
@@ -13,8 +13,8 @@ tokio = { version = "1.32.0" }
 tokio-listener = { version = "0.4.1", features = [ "tonic011" ] }
 tonic = { version = "0.11.0", features = ["tls", "tls-roots"] }
 tvix-castore = { path = "../castore" }
-tracing = "0.1.37"
-tracing-subscriber = "0.3.16"
+tvix-tracing = { path = "../tracing" }
+tracing = "0.1.40"
 url = "2.4.0"
 
 [dependencies.tonic-reflection]
@@ -27,7 +27,7 @@ tonic-build = "0.11.0"
 
 [features]
 default = []
-tonic-reflection = ["dep:tonic-reflection"]
+tonic-reflection = ["dep:tonic-reflection", "tvix-castore/tonic-reflection"]
 
 [dev-dependencies]
 rstest = "0.19.0"
diff --git a/tvix/build/default.nix b/tvix/build/default.nix
index a2a3bea0c5..aafab92fd5 100644
--- a/tvix/build/default.nix
+++ b/tvix/build/default.nix
@@ -1,5 +1,11 @@
-{ depot, pkgs, ... }:
+{ depot, lib, ... }:
 
-depot.tvix.crates.workspaceMembers.tvix-build.build.override {
+(depot.tvix.crates.workspaceMembers.tvix-build.build.override {
   runTests = true;
-}
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "tonic-reflection" ];
+  };
+})
diff --git a/tvix/build/src/bin/tvix-build.rs b/tvix/build/src/bin/tvix-build.rs
index 07d7e30dfd..26f2044af3 100644
--- a/tvix/build/src/bin/tvix-build.rs
+++ b/tvix/build/src/bin/tvix-build.rs
@@ -7,7 +7,6 @@ use tokio_listener::SystemOptions;
 use tokio_listener::UserOptions;
 use tonic::{self, transport::Server};
 use tracing::{info, Level};
-use tracing_subscriber::prelude::*;
 use tvix_build::{
     buildservice,
     proto::{build_service_server::BuildServiceServer, GRPCBuildServiceWrapper},
@@ -23,8 +22,12 @@ use tvix_castore::proto::FILE_DESCRIPTOR_SET as CASTORE_FILE_DESCRIPTOR_SET;
 #[derive(Parser)]
 #[command(author, version, about, long_about = None)]
 struct Cli {
-    #[arg(long)]
-    log_level: Option<Level>,
+    /// A global log level to use when printing logs.
+    /// It's also possible to set `RUST_LOG` according to
+    /// `tracing_subscriber::filter::EnvFilter`, which will always have
+    /// priority.
+    #[arg(long, default_value_t=Level::INFO)]
+    log_level: Level,
 
     #[command(subcommand)]
     command: Commands,
@@ -51,16 +54,9 @@ enum Commands {
 async fn main() -> Result<(), Box<dyn std::error::Error>> {
     let cli = Cli::parse();
 
-    // configure log settings
-    let level = cli.log_level.unwrap_or(Level::INFO);
-
-    tracing_subscriber::registry()
-        .with(
-            tracing_subscriber::fmt::Layer::new()
-                .with_writer(std::io::stderr.with_max_level(level))
-                .pretty(),
-        )
-        .init();
+    let _ = tvix_tracing::TracingBuilder::default()
+        .level(cli.log_level)
+        .enable_progressbar();
 
     match cli.command {
         Commands::Daemon {
diff --git a/tvix/castore-go/castore.pb.go b/tvix/castore-go/castore.pb.go
index 464f1d4a41..555336418f 100644
--- a/tvix/castore-go/castore.pb.go
+++ b/tvix/castore-go/castore.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/castore/protos/castore.proto
 
diff --git a/tvix/castore-go/rpc_blobstore.pb.go b/tvix/castore-go/rpc_blobstore.pb.go
index 3607a65bbe..0a2adba4bb 100644
--- a/tvix/castore-go/rpc_blobstore.pb.go
+++ b/tvix/castore-go/rpc_blobstore.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/castore/protos/rpc_blobstore.proto
 
diff --git a/tvix/castore-go/rpc_directory.pb.go b/tvix/castore-go/rpc_directory.pb.go
index 78c4a243e3..0277425c52 100644
--- a/tvix/castore-go/rpc_directory.pb.go
+++ b/tvix/castore-go/rpc_directory.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/castore/protos/rpc_directory.proto
 
diff --git a/tvix/castore/Cargo.toml b/tvix/castore/Cargo.toml
index 4cbc29053b..9516130570 100644
--- a/tvix/castore/Cargo.toml
+++ b/tvix/castore/Cargo.toml
@@ -28,6 +28,8 @@ tokio = { version = "1.32.0", features = ["fs", "macros", "net", "rt", "rt-multi
 tonic = "0.11.0"
 tower = "0.4.13"
 tracing = "0.1.37"
+tracing-indicatif = "0.3.6"
+tvix-tracing = { path = "../tracing", features = ["tonic"] }
 url = "2.4.0"
 walkdir = "2.4.0"
 zstd = "0.13.0"
@@ -50,6 +52,10 @@ version = "0.11.0"
 optional = true
 version = "0.2.144"
 
+[dependencies.threadpool]
+version = "1.8.1"
+optional = true
+
 [dependencies.tonic-reflection]
 optional = true
 version = "0.11.0"
@@ -92,14 +98,14 @@ rstest_reuse = "0.6.0"
 xattr = "1.3.1"
 
 [features]
-default = []
+default = ["cloud"]
 cloud = [
   "dep:bigtable_rs",
   "object_store/aws",
   "object_store/azure",
   "object_store/gcp",
 ]
-fs = ["dep:libc", "dep:fuse-backend-rs"]
+fs = ["dep:fuse-backend-rs", "dep:threadpool", "dep:libc"]
 virtiofs = [
   "fs",
   "dep:vhost",
diff --git a/tvix/castore/default.nix b/tvix/castore/default.nix
index 641d883760..03a12b6c20 100644
--- a/tvix/castore/default.nix
+++ b/tvix/castore/default.nix
@@ -1,23 +1,28 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
 (depot.tvix.crates.workspaceMembers.tvix-castore.build.override {
   runTests = true;
   testPreRun = ''
     export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
   '';
-
-  # enable some optional features.
-  features = [ "default" "cloud" ];
-}).overrideAttrs (_: {
-  meta.ci.targets = [ "integration-tests" ];
-  passthru.integration-tests = depot.tvix.crates.workspaceMembers.tvix-castore.build.override {
-    runTests = true;
-    testPreRun = ''
-      export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
-      export PATH="$PATH:${pkgs.lib.makeBinPath [pkgs.cbtemulator pkgs.google-cloud-bigtable-tool]}"
+}).overrideAttrs (old: rec {
+  meta.ci.targets = [ "integration-tests" ] ++ lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = ([ "cloud" "fuse" "tonic-reflection" ]
+      # virtiofs feature currently fails to build on Darwin
+      ++ lib.optional pkgs.stdenv.isLinux "virtiofs");
+    override.testPreRun = ''
+      export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt
     '';
-
-    # enable some optional features.
-    features = [ "default" "cloud" "integration" ];
+  }) // {
+    integration-tests = depot.tvix.crates.workspaceMembers.${old.crateName}.build.override (old: {
+      runTests = true;
+      testPreRun = ''
+        export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
+        export PATH="$PATH:${pkgs.lib.makeBinPath [ pkgs.cbtemulator pkgs.google-cloud-bigtable-tool ]}"
+      '';
+      features = old.features ++ [ "integration" ];
+    });
   };
 })
diff --git a/tvix/castore/src/blobservice/from_addr.rs b/tvix/castore/src/blobservice/from_addr.rs
index 8898bbfb95..f76592e509 100644
--- a/tvix/castore/src/blobservice/from_addr.rs
+++ b/tvix/castore/src/blobservice/from_addr.rs
@@ -30,8 +30,12 @@ pub async fn from_addr(uri: &str) -> Result<Box<dyn BlobService>, crate::Error>
             // - In the case of unix sockets, there must be a path, but may not be a host.
             // - In the case of non-unix sockets, there must be a host, but no path.
             // Constructing the channel is handled by tvix_castore::channel::from_url.
-            let client = BlobServiceClient::new(crate::tonic::channel_from_url(&url).await?);
-            Box::new(GRPCBlobService::from_client(client))
+            Box::new(GRPCBlobService::from_client(
+                BlobServiceClient::with_interceptor(
+                    crate::tonic::channel_from_url(&url).await?,
+                    tvix_tracing::propagate::tonic::send_trace,
+                ),
+            ))
         }
         scheme if scheme.starts_with("objectstore+") => {
             // We need to convert the URL to string, strip the prefix there, and then
diff --git a/tvix/castore/src/blobservice/grpc.rs b/tvix/castore/src/blobservice/grpc.rs
index 5663cd3838..834c0c7bf5 100644
--- a/tvix/castore/src/blobservice/grpc.rs
+++ b/tvix/castore/src/blobservice/grpc.rs
@@ -17,29 +17,33 @@ use tokio_util::{
     io::{CopyToBytes, SinkWriter},
     sync::PollSender,
 };
-use tonic::{async_trait, transport::Channel, Code, Status};
-use tracing::instrument;
+use tonic::{async_trait, Code, Status};
+use tracing::{instrument, Instrument as _};
 
 /// Connects to a (remote) tvix-store BlobService over gRPC.
 #[derive(Clone)]
-pub struct GRPCBlobService {
+pub struct GRPCBlobService<T> {
     /// The internal reference to a gRPC client.
     /// Cloning it is cheap, and it internally handles concurrent requests.
-    grpc_client: proto::blob_service_client::BlobServiceClient<Channel>,
+    grpc_client: proto::blob_service_client::BlobServiceClient<T>,
 }
 
-impl GRPCBlobService {
+impl<T> GRPCBlobService<T> {
     /// construct a [GRPCBlobService] from a [proto::blob_service_client::BlobServiceClient].
     /// panics if called outside the context of a tokio runtime.
-    pub fn from_client(
-        grpc_client: proto::blob_service_client::BlobServiceClient<Channel>,
-    ) -> Self {
+    pub fn from_client(grpc_client: proto::blob_service_client::BlobServiceClient<T>) -> Self {
         Self { grpc_client }
     }
 }
 
 #[async_trait]
-impl BlobService for GRPCBlobService {
+impl<T> BlobService for GRPCBlobService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
     #[instrument(skip(self, digest), fields(blob.digest=%digest))]
     async fn has(&self, digest: &B3Digest) -> io::Result<bool> {
         let mut grpc_client = self.grpc_client.clone();
@@ -133,6 +137,8 @@ impl BlobService for GRPCBlobService {
         let task = tokio::spawn({
             let mut grpc_client = self.grpc_client.clone();
             async move { Ok::<_, Status>(grpc_client.put(blobchunk_stream).await?.into_inner()) }
+                // instrument the task with the current span, this is not done by default
+                .in_current_span()
         });
 
         // The tx part of the channel is converted to a sink of byte chunks.
@@ -335,7 +341,6 @@ mod tests {
                     .await
                     .expect("must succeed"),
             );
-
             GRPCBlobService::from_client(client)
         };
 
diff --git a/tvix/castore/src/directoryservice/closure_validator.rs b/tvix/castore/src/directoryservice/closure_validator.rs
deleted file mode 100644
index b9746a5a05..0000000000
--- a/tvix/castore/src/directoryservice/closure_validator.rs
+++ /dev/null
@@ -1,309 +0,0 @@
-use std::collections::{HashMap, HashSet};
-
-use bstr::ByteSlice;
-
-use petgraph::{
-    graph::{DiGraph, NodeIndex},
-    visit::{Bfs, Walker},
-};
-use tracing::instrument;
-
-use crate::{
-    proto::{self, Directory},
-    B3Digest, Error,
-};
-
-type DirectoryGraph = DiGraph<Directory, ()>;
-
-/// This can be used to validate a Directory closure (DAG of connected
-/// Directories), and their insertion order.
-///
-/// Directories need to be inserted (via `add`), in an order from the leaves to
-/// the root (DFS Post-Order).
-/// During insertion, We validate as much as we can at that time:
-///
-///  - individual validation of Directory messages
-///  - validation of insertion order (no upload of not-yet-known Directories)
-///  - validation of size fields of referred Directories
-///
-/// Internally it keeps all received Directories in a directed graph,
-/// with node weights being the Directories and edges pointing to child
-/// directories.
-///
-/// Once all Directories have been inserted, a finalize function can be
-/// called to get a (deduplicated and) validated list of directories, in
-/// insertion order.
-/// During finalize, a check for graph connectivity is performed too, to ensure
-/// there's no disconnected components, and only one root.
-#[derive(Default)]
-pub struct ClosureValidator {
-    // A directed graph, using Directory as node weight, without edge weights.
-    // Edges point from parents to children.
-    graph: DirectoryGraph,
-
-    // A lookup table from directory digest to node index.
-    digest_to_node_ix: HashMap<B3Digest, NodeIndex>,
-
-    /// Keeps track of the last-inserted directory graph node index.
-    /// On a correct insert, this will be the root node, from which the DFS post
-    /// order traversal will start from.
-    last_directory_ix: Option<NodeIndex>,
-}
-
-impl ClosureValidator {
-    /// Insert a new Directory into the closure.
-    /// Perform individual Directory validation, validation of insertion order
-    /// and size fields.
-    #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
-    pub fn add(&mut self, directory: proto::Directory) -> Result<(), Error> {
-        let digest = directory.digest();
-
-        // If we already saw this node previously, it's already validated and in the graph.
-        if self.digest_to_node_ix.contains_key(&digest) {
-            return Ok(());
-        }
-
-        // Do some general validation
-        directory
-            .validate()
-            .map_err(|e| Error::InvalidRequest(e.to_string()))?;
-
-        // Ensure the directory only refers to directories which we already accepted.
-        // We lookup their node indices and add them to a HashSet.
-        let mut child_ixs = HashSet::new();
-        for dir in &directory.directories {
-            let child_digest = B3Digest::try_from(dir.digest.to_owned()).unwrap(); // validated
-
-            // Ensure the digest has already been seen
-            let child_ix = *self.digest_to_node_ix.get(&child_digest).ok_or_else(|| {
-                Error::InvalidRequest(format!(
-                    "'{}' refers to unseen child dir: {}",
-                    dir.name.as_bstr(),
-                    &child_digest
-                ))
-            })?;
-
-            // Ensure the size specified in the child node matches the directory size itself.
-            let recorded_child_size = self
-                .graph
-                .node_weight(child_ix)
-                .expect("node not found")
-                .size();
-
-            // Ensure the size specified in the child node matches our records.
-            if dir.size != recorded_child_size {
-                return Err(Error::InvalidRequest(format!(
-                    "'{}' has wrong size, specified {}, recorded {}",
-                    dir.name.as_bstr(),
-                    dir.size,
-                    recorded_child_size
-                )));
-            }
-
-            child_ixs.insert(child_ix);
-        }
-
-        // Insert node into the graph, and add edges to all children.
-        let node_ix = self.graph.add_node(directory);
-        for child_ix in child_ixs {
-            self.graph.add_edge(node_ix, child_ix, ());
-        }
-
-        // Record the mapping from digest to node_ix in our lookup table.
-        self.digest_to_node_ix.insert(digest, node_ix);
-
-        // Update last_directory_ix.
-        self.last_directory_ix = Some(node_ix);
-
-        Ok(())
-    }
-
-    /// Ensure that all inserted Directories are connected, then return a
-    /// (deduplicated) and validated list of directories, in from-leaves-to-root
-    /// order.
-    /// In case no elements have been inserted, returns an empty list.
-    #[instrument(level = "trace", skip_all, err)]
-    pub(crate) fn finalize(self) -> Result<Vec<Directory>, Error> {
-        let (graph, _) = match self.finalize_raw()? {
-            None => return Ok(vec![]),
-            Some(v) => v,
-        };
-        // Dissolve the graph, returning the nodes as a Vec.
-        // As the graph was populated in a valid DFS PostOrder, we can return
-        // nodes in that same order.
-        let (nodes, _edges) = graph.into_nodes_edges();
-        Ok(nodes.into_iter().map(|x| x.weight).collect())
-    }
-
-    /// Ensure that all inserted Directories are connected, then return a
-    /// (deduplicated) and validated list of directories, in from-root-to-leaves
-    /// order.
-    /// In case no elements have been inserted, returns an empty list.
-    #[instrument(level = "trace", skip_all, err)]
-    pub(crate) fn finalize_root_to_leaves(self) -> Result<Vec<Directory>, Error> {
-        let (graph, root) = match self.finalize_raw()? {
-            None => return Ok(vec![]),
-            Some(v) => v,
-        };
-
-        // do a BFS traversal of the graph, starting with the root node to get
-        // all nodes reachable from there.
-        let traversal = Bfs::new(&graph, root);
-
-        let order = traversal.iter(&graph).collect::<Vec<_>>();
-
-        let (nodes, _edges) = graph.into_nodes_edges();
-
-        // Convert to option, so that we can take individual nodes out without messing up the
-        // indices
-        let mut nodes = nodes.into_iter().map(Some).collect::<Vec<_>>();
-
-        Ok(order
-            .iter()
-            .map(|i| nodes[i.index()].take().unwrap().weight)
-            .collect())
-    }
-
-    /// Internal implementation of closure validation
-    #[instrument(level = "trace", skip_all, err)]
-    fn finalize_raw(self) -> Result<Option<(DirectoryGraph, NodeIndex)>, Error> {
-        // If no nodes were inserted, an empty list is returned.
-        let last_directory_ix = if let Some(x) = self.last_directory_ix {
-            x
-        } else {
-            return Ok(None);
-        };
-
-        // do a BFS traversal of the graph, starting with the root node to get
-        // (the count of) all nodes reachable from there.
-        let mut traversal = Bfs::new(&self.graph, last_directory_ix);
-
-        let mut visited_directory_count = 0;
-        #[cfg(debug_assertions)]
-        let mut visited_directory_ixs = HashSet::new();
-        #[cfg_attr(not(debug_assertions), allow(unused))]
-        while let Some(directory_ix) = traversal.next(&self.graph) {
-            #[cfg(debug_assertions)]
-            visited_directory_ixs.insert(directory_ix);
-
-            visited_directory_count += 1;
-        }
-
-        // If the number of nodes collected equals the total number of nodes in
-        // the graph, we know all nodes are connected.
-        if visited_directory_count != self.graph.node_count() {
-            // more or less exhaustive error reporting.
-            #[cfg(debug_assertions)]
-            {
-                let all_directory_ixs: HashSet<_> = self.graph.node_indices().collect();
-
-                let unvisited_directories: HashSet<_> = all_directory_ixs
-                    .difference(&visited_directory_ixs)
-                    .map(|ix| self.graph.node_weight(*ix).expect("node not found"))
-                    .collect();
-
-                return Err(Error::InvalidRequest(format!(
-                    "found {} disconnected directories: {:?}",
-                    self.graph.node_count() - visited_directory_ixs.len(),
-                    unvisited_directories
-                )));
-            }
-            #[cfg(not(debug_assertions))]
-            {
-                return Err(Error::InvalidRequest(format!(
-                    "found {} disconnected directories",
-                    self.graph.node_count() - visited_directory_count
-                )));
-            }
-        }
-
-        Ok(Some((self.graph, last_directory_ix)))
-    }
-}
-
-#[cfg(test)]
-mod tests {
-    use crate::{
-        fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C},
-        proto::{self, Directory},
-    };
-    use lazy_static::lazy_static;
-    use rstest::rstest;
-
-    lazy_static! {
-        pub static ref BROKEN_DIRECTORY : Directory = Directory {
-            symlinks: vec![proto::SymlinkNode {
-                name: "".into(), // invalid name!
-                target: "doesntmatter".into(),
-            }],
-            ..Default::default()
-        };
-
-        pub static ref BROKEN_PARENT_DIRECTORY: Directory = Directory {
-            directories: vec![proto::DirectoryNode {
-                name: "foo".into(),
-                digest: DIRECTORY_A.digest().into(),
-                size: DIRECTORY_A.size() + 42, // wrong!
-            }],
-            ..Default::default()
-        };
-    }
-
-    use super::ClosureValidator;
-
-    #[rstest]
-    /// Uploading an empty directory should succeed.
-    #[case::empty_directory(&[&*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A]))]
-    /// Uploading A, then B (referring to A) should succeed.
-    #[case::simple_closure(&[&*DIRECTORY_A, &*DIRECTORY_B], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_B]))]
-    /// Uploading A, then A, then C (referring to A twice) should succeed.
-    /// We pretend to be a dumb client not deduping directories.
-    #[case::same_child(&[&*DIRECTORY_A, &*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
-    /// Uploading A, then C (referring to A twice) should succeed.
-    #[case::same_child_dedup(&[&*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
-    /// Uploading A, then C (referring to A twice), then B (itself referring to A) should fail during close,
-    /// as B itself would be left unconnected.
-    #[case::unconnected_node(&[&*DIRECTORY_A, &*DIRECTORY_C, &*DIRECTORY_B], false, None)]
-    /// Uploading B (referring to A) should fail immediately, because A was never uploaded.
-    #[case::dangling_pointer(&[&*DIRECTORY_B], true, None)]
-    /// Uploading a directory failing validation should fail immediately.
-    #[case::failing_validation(&[&*BROKEN_DIRECTORY], true, None)]
-    /// Uploading a directory which refers to another Directory with a wrong size should fail.
-    #[case::wrong_size_in_parent(&[&*DIRECTORY_A, &*BROKEN_PARENT_DIRECTORY], true, None)]
-    fn test_uploads(
-        #[case] directories_to_upload: &[&Directory],
-        #[case] exp_fail_upload_last: bool,
-        #[case] exp_finalize: Option<Vec<&Directory>>, // Some(_) if finalize successful, None if not.
-    ) {
-        let mut dcv = ClosureValidator::default();
-        let len_directories_to_upload = directories_to_upload.len();
-
-        for (i, d) in directories_to_upload.iter().enumerate() {
-            let resp = dcv.add((*d).clone());
-            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
-                assert!(resp.is_err(), "expect last put to fail");
-
-                // We don't really care anymore what finalize() would return, as
-                // the add() failed.
-                return;
-            } else {
-                assert!(resp.is_ok(), "expect put to succeed");
-            }
-        }
-
-        // everything was uploaded successfully. Test finalize().
-        let resp = dcv.finalize();
-
-        match exp_finalize {
-            Some(directories) => {
-                assert_eq!(
-                    Vec::from_iter(directories.iter().map(|e| (*e).to_owned())),
-                    resp.expect("drain should succeed")
-                );
-            }
-            None => {
-                resp.expect_err("drain should fail");
-            }
-        }
-    }
-}
diff --git a/tvix/castore/src/directoryservice/combinators.rs b/tvix/castore/src/directoryservice/combinators.rs
new file mode 100644
index 0000000000..d3f351d6b6
--- /dev/null
+++ b/tvix/castore/src/directoryservice/combinators.rs
@@ -0,0 +1,142 @@
+use futures::stream::BoxStream;
+use futures::StreamExt;
+use futures::TryFutureExt;
+use futures::TryStreamExt;
+use tonic::async_trait;
+use tracing::{instrument, trace};
+
+use super::{DirectoryGraph, DirectoryService, RootToLeavesValidator, SimplePutter};
+use crate::directoryservice::DirectoryPutter;
+use crate::proto;
+use crate::B3Digest;
+use crate::Error;
+
+/// Asks near first, if not found, asks far.
+/// If found in there, returns it, and *inserts* it into
+/// near.
+/// Specifically, it always obtains the entire directory closure from far and inserts it into near,
+/// which is useful when far does not support accessing intermediate directories (but near does).
+/// There is no negative cache.
+/// Inserts and listings are not implemented for now.
+#[derive(Clone)]
+pub struct Cache<DS1, DS2> {
+    near: DS1,
+    far: DS2,
+}
+
+impl<DS1, DS2> Cache<DS1, DS2> {
+    pub fn new(near: DS1, far: DS2) -> Self {
+        Self { near, far }
+    }
+}
+
+#[async_trait]
+impl<DS1, DS2> DirectoryService for Cache<DS1, DS2>
+where
+    DS1: DirectoryService + Clone + 'static,
+    DS2: DirectoryService + Clone + 'static,
+{
+    #[instrument(skip(self, digest), fields(directory.digest = %digest))]
+    async fn get(&self, digest: &B3Digest) -> Result<Option<proto::Directory>, Error> {
+        match self.near.get(digest).await? {
+            Some(directory) => {
+                trace!("serving from cache");
+                Ok(Some(directory))
+            }
+            None => {
+                trace!("not found in near, asking remote…");
+
+                let mut copy = DirectoryGraph::with_order(
+                    RootToLeavesValidator::new_with_root_digest(digest.clone()),
+                );
+
+                let mut stream = self.far.get_recursive(digest);
+                let root = stream.try_next().await?;
+
+                if let Some(root) = root.clone() {
+                    copy.add(root)
+                        .map_err(|e| Error::StorageError(e.to_string()))?;
+                }
+
+                while let Some(dir) = stream.try_next().await? {
+                    copy.add(dir)
+                        .map_err(|e| Error::StorageError(e.to_string()))?;
+                }
+
+                let copy = copy
+                    .validate()
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
+
+                let mut put = self.near.put_multiple_start();
+                for dir in copy.drain_leaves_to_root() {
+                    put.put(dir).await?;
+                }
+                put.close().await?;
+
+                Ok(root)
+            }
+        }
+    }
+
+    #[instrument(skip_all)]
+    async fn put(&self, _directory: proto::Directory) -> Result<B3Digest, Error> {
+        Err(Error::StorageError("unimplemented".to_string()))
+    }
+
+    #[instrument(skip_all, fields(directory.digest = %root_directory_digest))]
+    fn get_recursive(
+        &self,
+        root_directory_digest: &B3Digest,
+    ) -> BoxStream<'static, Result<proto::Directory, Error>> {
+        let near = self.near.clone();
+        let far = self.far.clone();
+        let digest = root_directory_digest.clone();
+        Box::pin(
+            (async move {
+                let mut stream = near.get_recursive(&digest);
+                match stream.try_next().await? {
+                    Some(first) => {
+                        trace!("serving from cache");
+                        Ok(futures::stream::once(async { Ok(first) })
+                            .chain(stream)
+                            .left_stream())
+                    }
+                    None => {
+                        trace!("not found in near, asking remote…");
+
+                        let mut copy_for_near = DirectoryGraph::with_order(
+                            RootToLeavesValidator::new_with_root_digest(digest.clone()),
+                        );
+                        let mut copy_for_client = vec![];
+
+                        let mut stream = far.get_recursive(&digest);
+                        while let Some(dir) = stream.try_next().await? {
+                            copy_for_near
+                                .add(dir.clone())
+                                .map_err(|e| Error::StorageError(e.to_string()))?;
+                            copy_for_client.push(dir);
+                        }
+
+                        let copy_for_near = copy_for_near
+                            .validate()
+                            .map_err(|e| Error::StorageError(e.to_string()))?;
+                        let mut put = near.put_multiple_start();
+                        for dir in copy_for_near.drain_leaves_to_root() {
+                            put.put(dir).await?;
+                        }
+                        put.close().await?;
+
+                        Ok(futures::stream::iter(copy_for_client.into_iter().map(Ok))
+                            .right_stream())
+                    }
+                }
+            })
+            .try_flatten_stream(),
+        )
+    }
+
+    #[instrument(skip_all)]
+    fn put_multiple_start(&self) -> Box<(dyn DirectoryPutter + 'static)> {
+        Box::new(SimplePutter::new((*self).clone()))
+    }
+}
diff --git a/tvix/castore/src/directoryservice/directory_graph.rs b/tvix/castore/src/directoryservice/directory_graph.rs
new file mode 100644
index 0000000000..e6b9b16337
--- /dev/null
+++ b/tvix/castore/src/directoryservice/directory_graph.rs
@@ -0,0 +1,413 @@
+use std::collections::HashMap;
+
+use bstr::ByteSlice;
+
+use petgraph::{
+    graph::{DiGraph, NodeIndex},
+    visit::{Bfs, DfsPostOrder, EdgeRef, IntoNodeIdentifiers, Walker},
+    Direction, Incoming,
+};
+use tracing::instrument;
+
+use super::order_validator::{LeavesToRootValidator, OrderValidator, RootToLeavesValidator};
+use crate::{
+    proto::{self, Directory, DirectoryNode},
+    B3Digest,
+};
+
+#[derive(thiserror::Error, Debug)]
+pub enum Error {
+    #[error("{0}")]
+    ValidationError(String),
+}
+
+/// This can be used to validate and/or re-order a Directory closure (DAG of
+/// connected Directories), and their insertion order.
+///
+/// The DirectoryGraph is parametrized on the insertion order, and can be
+/// constructed using the Default trait, or using `with_order` if the
+/// OrderValidator needs to be customized.
+///
+/// If the user is receiving directories from canonical protobuf encoding in
+/// root-to-leaves order, and parsing them, she can call `digest_allowed`
+/// _before_ parsing the protobuf record and then add it with `add_unchecked`.
+/// All other users insert the directories via `add`, in their specified order.
+/// During insertion, we validate as much as we can at that time:
+///
+///  - individual validation of Directory messages
+///  - validation of insertion order
+///  - validation of size fields of referred Directories
+///
+/// Internally it keeps all received Directories in a directed graph,
+/// with node weights being the Directories and edges pointing to child/parent
+/// directories.
+///
+/// Once all Directories have been inserted, a validate function can be
+/// called to perform a check for graph connectivity and ensure there's no
+/// disconnected components or missing nodes.
+/// Finally, the `drain_leaves_to_root` or `drain_root_to_leaves` can be
+/// _chained_ on validate to get an iterator over the (deduplicated and)
+/// validated list of directories in either order.
+#[derive(Default)]
+pub struct DirectoryGraph<O> {
+    // A directed graph, using Directory as node weight.
+    // Edges point from parents to children.
+    //
+    // Nodes with None weigths might exist when a digest has been referred to but the directory
+    // with this digest has not yet been sent.
+    //
+    // The option in the edge weight tracks the pending validation state of the respective edge, for example if
+    // the child has not been added yet.
+    graph: DiGraph<Option<Directory>, Option<DirectoryNode>>,
+
+    // A lookup table from directory digest to node index.
+    digest_to_node_ix: HashMap<B3Digest, NodeIndex>,
+
+    order_validator: O,
+}
+
+pub struct ValidatedDirectoryGraph {
+    graph: DiGraph<Option<Directory>, Option<DirectoryNode>>,
+
+    root: Option<NodeIndex>,
+}
+
+fn check_edge(dir: &DirectoryNode, child: &Directory) -> Result<(), Error> {
+    // Ensure the size specified in the child node matches our records.
+    if dir.size != child.size() {
+        return Err(Error::ValidationError(format!(
+            "'{}' has wrong size, specified {}, recorded {}",
+            dir.name.as_bstr(),
+            dir.size,
+            child.size(),
+        )));
+    }
+    Ok(())
+}
+
+impl DirectoryGraph<LeavesToRootValidator> {
+    /// Insert a new Directory into the closure
+    #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
+    pub fn add(&mut self, directory: proto::Directory) -> Result<(), Error> {
+        if !self.order_validator.add_directory(&directory) {
+            return Err(Error::ValidationError(
+                "unknown directory was referenced".into(),
+            ));
+        }
+        self.add_order_unchecked(directory)
+    }
+}
+
+impl DirectoryGraph<RootToLeavesValidator> {
+    /// If the user is parsing directories from canonical protobuf encoding, she can
+    /// call `digest_allowed` _before_ parsing the protobuf record and then add it
+    /// with `add_unchecked`.
+    pub fn digest_allowed(&self, digest: B3Digest) -> bool {
+        self.order_validator.digest_allowed(&digest)
+    }
+
+    /// Insert a new Directory into the closure
+    #[instrument(level = "trace", skip_all, fields(directory.digest=%directory.digest(), directory.size=%directory.size()), err)]
+    pub fn add(&mut self, directory: proto::Directory) -> Result<(), Error> {
+        let digest = directory.digest();
+        if !self.order_validator.digest_allowed(&digest) {
+            return Err(Error::ValidationError("unexpected digest".into()));
+        }
+        self.order_validator.add_directory_unchecked(&directory);
+        self.add_order_unchecked(directory)
+    }
+}
+
+impl<O: OrderValidator> DirectoryGraph<O> {
+    /// Customize the ordering, i.e. for pre-setting the root of the RootToLeavesValidator
+    pub fn with_order(order_validator: O) -> Self {
+        Self {
+            graph: Default::default(),
+            digest_to_node_ix: Default::default(),
+            order_validator,
+        }
+    }
+
+    /// Adds a directory which has already been confirmed to be in-order to the graph
+    pub fn add_order_unchecked(&mut self, directory: proto::Directory) -> Result<(), Error> {
+        // Do some basic validation
+        directory
+            .validate()
+            .map_err(|e| Error::ValidationError(e.to_string()))?;
+
+        let digest = directory.digest();
+
+        // Teach the graph about the existence of a node with this digest
+        let ix = *self
+            .digest_to_node_ix
+            .entry(digest)
+            .or_insert_with(|| self.graph.add_node(None));
+
+        if self.graph[ix].is_some() {
+            // The node is already in the graph, there is nothing to do here.
+            return Ok(());
+        }
+
+        // set up edges to all child directories
+        for subdir in &directory.directories {
+            let subdir_digest: B3Digest = subdir.digest.clone().try_into().unwrap();
+
+            let child_ix = *self
+                .digest_to_node_ix
+                .entry(subdir_digest)
+                .or_insert_with(|| self.graph.add_node(None));
+
+            let pending_edge_check = match &self.graph[child_ix] {
+                Some(child) => {
+                    // child is already available, validate the edge now
+                    check_edge(subdir, child)?;
+                    None
+                }
+                None => Some(subdir.clone()), // pending validation
+            };
+            self.graph.add_edge(ix, child_ix, pending_edge_check);
+        }
+
+        // validate the edges from parents to this node
+        // this collects edge ids in a Vec because there is no edges_directed_mut :'c
+        for edge_id in self
+            .graph
+            .edges_directed(ix, Direction::Incoming)
+            .map(|edge_ref| edge_ref.id())
+            .collect::<Vec<_>>()
+            .into_iter()
+        {
+            let edge_weight = self
+                .graph
+                .edge_weight_mut(edge_id)
+                .expect("edge not found")
+                .take()
+                .expect("edge is already validated");
+            check_edge(&edge_weight, &directory)?;
+        }
+
+        // finally, store the directory information in the node weight
+        self.graph[ix] = Some(directory);
+
+        Ok(())
+    }
+
+    #[instrument(level = "trace", skip_all, err)]
+    pub fn validate(self) -> Result<ValidatedDirectoryGraph, Error> {
+        // find all initial nodes (nodes without incoming edges)
+        let mut roots = self
+            .graph
+            .node_identifiers()
+            .filter(|&a| self.graph.neighbors_directed(a, Incoming).next().is_none());
+
+        let root = roots.next();
+        if roots.next().is_some() {
+            return Err(Error::ValidationError(
+                "graph has disconnected roots".into(),
+            ));
+        }
+
+        // test that the graph is complete
+        if self.graph.raw_nodes().iter().any(|n| n.weight.is_none()) {
+            return Err(Error::ValidationError("graph is incomplete".into()));
+        }
+
+        Ok(ValidatedDirectoryGraph {
+            graph: self.graph,
+            root,
+        })
+    }
+}
+
+impl ValidatedDirectoryGraph {
+    /// Return the list of directories in from-root-to-leaves order.
+    /// In case no elements have been inserted, returns an empty list.
+    ///
+    /// panics if the specified root is not in the graph
+    #[instrument(level = "trace", skip_all)]
+    pub fn drain_root_to_leaves(self) -> impl Iterator<Item = Directory> {
+        let order = match self.root {
+            Some(root) => {
+                // do a BFS traversal of the graph, starting with the root node
+                Bfs::new(&self.graph, root)
+                    .iter(&self.graph)
+                    .collect::<Vec<_>>()
+            }
+            None => vec![], // No nodes have been inserted, do not traverse
+        };
+
+        let (mut nodes, _edges) = self.graph.into_nodes_edges();
+
+        order
+            .into_iter()
+            .filter_map(move |i| nodes[i.index()].weight.take())
+    }
+
+    /// Return the list of directories in from-leaves-to-root order.
+    /// In case no elements have been inserted, returns an empty list.
+    ///
+    /// panics when the specified root is not in the graph
+    #[instrument(level = "trace", skip_all)]
+    pub fn drain_leaves_to_root(self) -> impl Iterator<Item = Directory> {
+        let order = match self.root {
+            Some(root) => {
+                // do a DFS Post-Order traversal of the graph, starting with the root node
+                DfsPostOrder::new(&self.graph, root)
+                    .iter(&self.graph)
+                    .collect::<Vec<_>>()
+            }
+            None => vec![], // No nodes have been inserted, do not traverse
+        };
+
+        let (mut nodes, _edges) = self.graph.into_nodes_edges();
+
+        order
+            .into_iter()
+            .filter_map(move |i| nodes[i.index()].weight.take())
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use crate::{
+        fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C},
+        proto::{self, Directory},
+    };
+    use lazy_static::lazy_static;
+    use rstest::rstest;
+
+    lazy_static! {
+        pub static ref BROKEN_DIRECTORY : Directory = Directory {
+            symlinks: vec![proto::SymlinkNode {
+                name: "".into(), // invalid name!
+                target: "doesntmatter".into(),
+            }],
+            ..Default::default()
+        };
+
+        pub static ref BROKEN_PARENT_DIRECTORY: Directory = Directory {
+            directories: vec![proto::DirectoryNode {
+                name: "foo".into(),
+                digest: DIRECTORY_A.digest().into(),
+                size: DIRECTORY_A.size() + 42, // wrong!
+            }],
+            ..Default::default()
+        };
+    }
+
+    use super::{DirectoryGraph, LeavesToRootValidator, RootToLeavesValidator};
+
+    #[rstest]
+    /// Uploading an empty directory should succeed.
+    #[case::empty_directory(&[&*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A]))]
+    /// Uploading A, then B (referring to A) should succeed.
+    #[case::simple_closure(&[&*DIRECTORY_A, &*DIRECTORY_B], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_B]))]
+    /// Uploading A, then A, then C (referring to A twice) should succeed.
+    /// We pretend to be a dumb client not deduping directories.
+    #[case::same_child(&[&*DIRECTORY_A, &*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
+    /// Uploading A, then C (referring to A twice) should succeed.
+    #[case::same_child_dedup(&[&*DIRECTORY_A, &*DIRECTORY_C], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
+    /// Uploading A, then C (referring to A twice), then B (itself referring to A) should fail during close,
+    /// as B itself would be left unconnected.
+    #[case::unconnected_node(&[&*DIRECTORY_A, &*DIRECTORY_C, &*DIRECTORY_B], false, None)]
+    /// Uploading B (referring to A) should fail immediately, because A was never uploaded.
+    #[case::dangling_pointer(&[&*DIRECTORY_B], true, None)]
+    /// Uploading a directory failing validation should fail immediately.
+    #[case::failing_validation(&[&*BROKEN_DIRECTORY], true, None)]
+    /// Uploading a directory which refers to another Directory with a wrong size should fail.
+    #[case::wrong_size_in_parent(&[&*DIRECTORY_A, &*BROKEN_PARENT_DIRECTORY], true, None)]
+    fn test_uploads(
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+        #[case] exp_finalize: Option<Vec<&Directory>>, // Some(_) if finalize successful, None if not.
+    ) {
+        let mut dcv = DirectoryGraph::<LeavesToRootValidator>::default();
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp = dcv.add((*d).clone());
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(resp.is_err(), "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp.is_ok(), "expect put to succeed");
+            }
+        }
+
+        // everything was uploaded successfully. Test finalize().
+        let resp = dcv
+            .validate()
+            .map(|validated| validated.drain_leaves_to_root().collect::<Vec<_>>());
+
+        match exp_finalize {
+            Some(directories) => {
+                assert_eq!(
+                    Vec::from_iter(directories.iter().map(|e| (*e).to_owned())),
+                    resp.expect("drain should succeed")
+                );
+            }
+            None => {
+                resp.expect_err("drain should fail");
+            }
+        }
+    }
+
+    #[rstest]
+    /// Downloading an empty directory should succeed.
+    #[case::empty_directory(&*DIRECTORY_A, &[&*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A]))]
+    /// Downlading B, then A (referenced by B) should succeed.
+    #[case::simple_closure(&*DIRECTORY_B, &[&*DIRECTORY_B, &*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_B]))]
+    /// Downloading C (referring to A twice), then A should succeed.
+    #[case::same_child_dedup(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_A], false, Some(vec![&*DIRECTORY_A, &*DIRECTORY_C]))]
+    /// Downloading C, then B (both referring to A but not referring to each other) should fail immediately as B has no connection to C (the root)
+    #[case::unconnected_node(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_B], true, None)]
+    /// Downloading B (specified as the root) but receiving A instead should fail immediately, because A has no connection to B (the root).
+    #[case::dangling_pointer(&*DIRECTORY_B, &[&*DIRECTORY_A], true, None)]
+    /// Downloading a directory failing validation should fail immediately.
+    #[case::failing_validation(&*BROKEN_DIRECTORY, &[&*BROKEN_DIRECTORY], true, None)]
+    /// Downloading a directory which refers to another Directory with a wrong size should fail.
+    #[case::wrong_size_in_parent(&*BROKEN_PARENT_DIRECTORY, &[&*BROKEN_PARENT_DIRECTORY, &*DIRECTORY_A], true, None)]
+    fn test_downloads(
+        #[case] root: &Directory,
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+        #[case] exp_finalize: Option<Vec<&Directory>>, // Some(_) if finalize successful, None if not.
+    ) {
+        let mut dcv =
+            DirectoryGraph::with_order(RootToLeavesValidator::new_with_root_digest(root.digest()));
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp = dcv.add((*d).clone());
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(resp.is_err(), "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp.is_ok(), "expect put to succeed");
+            }
+        }
+
+        // everything was uploaded successfully. Test finalize().
+        let resp = dcv
+            .validate()
+            .map(|validated| validated.drain_leaves_to_root().collect::<Vec<_>>());
+
+        match exp_finalize {
+            Some(directories) => {
+                assert_eq!(
+                    Vec::from_iter(directories.iter().map(|e| (*e).to_owned())),
+                    resp.expect("drain should succeed")
+                );
+            }
+            None => {
+                resp.expect_err("drain should fail");
+            }
+        }
+    }
+}
diff --git a/tvix/castore/src/directoryservice/from_addr.rs b/tvix/castore/src/directoryservice/from_addr.rs
index ee675ca68a..9aa01df171 100644
--- a/tvix/castore/src/directoryservice/from_addr.rs
+++ b/tvix/castore/src/directoryservice/from_addr.rs
@@ -63,8 +63,12 @@ pub async fn from_addr(uri: &str) -> Result<Box<dyn DirectoryService>, crate::Er
             // - In the case of unix sockets, there must be a path, but may not be a host.
             // - In the case of non-unix sockets, there must be a host, but no path.
             // Constructing the channel is handled by tvix_castore::channel::from_url.
-            let client = DirectoryServiceClient::new(crate::tonic::channel_from_url(&url).await?);
-            Box::new(GRPCDirectoryService::from_client(client))
+            Box::new(GRPCDirectoryService::from_client(
+                DirectoryServiceClient::with_interceptor(
+                    crate::tonic::channel_from_url(&url).await?,
+                    tvix_tracing::propagate::tonic::send_trace,
+                ),
+            ))
         }
         scheme if scheme.starts_with("objectstore+") => {
             // We need to convert the URL to string, strip the prefix there, and then
diff --git a/tvix/castore/src/directoryservice/grpc.rs b/tvix/castore/src/directoryservice/grpc.rs
index fe935629bf..ca9b0de07b 100644
--- a/tvix/castore/src/directoryservice/grpc.rs
+++ b/tvix/castore/src/directoryservice/grpc.rs
@@ -9,31 +9,35 @@ use tokio::spawn;
 use tokio::sync::mpsc::UnboundedSender;
 use tokio::task::JoinHandle;
 use tokio_stream::wrappers::UnboundedReceiverStream;
-use tonic::async_trait;
-use tonic::Code;
-use tonic::{transport::Channel, Status};
-use tracing::{instrument, warn};
+use tonic::{async_trait, Code, Status};
+use tracing::{instrument, warn, Instrument as _};
 
 /// Connects to a (remote) tvix-store DirectoryService over gRPC.
 #[derive(Clone)]
-pub struct GRPCDirectoryService {
+pub struct GRPCDirectoryService<T> {
     /// The internal reference to a gRPC client.
     /// Cloning it is cheap, and it internally handles concurrent requests.
-    grpc_client: proto::directory_service_client::DirectoryServiceClient<Channel>,
+    grpc_client: proto::directory_service_client::DirectoryServiceClient<T>,
 }
 
-impl GRPCDirectoryService {
+impl<T> GRPCDirectoryService<T> {
     /// construct a [GRPCDirectoryService] from a [proto::directory_service_client::DirectoryServiceClient].
     /// panics if called outside the context of a tokio runtime.
     pub fn from_client(
-        grpc_client: proto::directory_service_client::DirectoryServiceClient<Channel>,
+        grpc_client: proto::directory_service_client::DirectoryServiceClient<T>,
     ) -> Self {
         Self { grpc_client }
     }
 }
 
 #[async_trait]
-impl DirectoryService for GRPCDirectoryService {
+impl<T> DirectoryService for GRPCDirectoryService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
     #[instrument(level = "trace", skip_all, fields(directory.digest = %digest))]
     async fn get(
         &self,
@@ -194,14 +198,17 @@ impl DirectoryService for GRPCDirectoryService {
 
         let (tx, rx) = tokio::sync::mpsc::unbounded_channel();
 
-        let task: JoinHandle<Result<proto::PutDirectoryResponse, Status>> = spawn(async move {
-            let s = grpc_client
-                .put(UnboundedReceiverStream::new(rx))
-                .await?
-                .into_inner();
+        let task: JoinHandle<Result<proto::PutDirectoryResponse, Status>> = spawn(
+            async move {
+                let s = grpc_client
+                    .put(UnboundedReceiverStream::new(rx))
+                    .await?
+                    .into_inner();
 
-            Ok(s)
-        });
+                Ok(s)
+            } // instrument the task with the current span, this is not done by default
+            .in_current_span(),
+        );
 
         Box::new(GRPCPutter {
             rq: Some((task, tx)),
diff --git a/tvix/castore/src/directoryservice/mod.rs b/tvix/castore/src/directoryservice/mod.rs
index 3f180ef162..eff4a685fa 100644
--- a/tvix/castore/src/directoryservice/mod.rs
+++ b/tvix/castore/src/directoryservice/mod.rs
@@ -2,11 +2,13 @@ use crate::{proto, B3Digest, Error};
 use futures::stream::BoxStream;
 use tonic::async_trait;
 
-mod closure_validator;
+mod combinators;
+mod directory_graph;
 mod from_addr;
 mod grpc;
 mod memory;
 mod object_store;
+mod order_validator;
 mod simple_putter;
 mod sled;
 #[cfg(test)]
@@ -14,11 +16,13 @@ pub mod tests;
 mod traverse;
 mod utils;
 
-pub use self::closure_validator::ClosureValidator;
+pub use self::combinators::Cache;
+pub use self::directory_graph::DirectoryGraph;
 pub use self::from_addr::from_addr;
 pub use self::grpc::GRPCDirectoryService;
 pub use self::memory::MemoryDirectoryService;
 pub use self::object_store::ObjectStoreDirectoryService;
+pub use self::order_validator::{LeavesToRootValidator, OrderValidator, RootToLeavesValidator};
 pub use self::simple_putter::SimplePutter;
 pub use self::sled::SledDirectoryService;
 pub use self::traverse::descend_to;
diff --git a/tvix/castore/src/directoryservice/object_store.rs b/tvix/castore/src/directoryservice/object_store.rs
index 64ce335edb..feaaaa39cd 100644
--- a/tvix/castore/src/directoryservice/object_store.rs
+++ b/tvix/castore/src/directoryservice/object_store.rs
@@ -1,4 +1,3 @@
-use std::collections::HashSet;
 use std::sync::Arc;
 
 use data_encoding::HEXLOWER;
@@ -16,7 +15,9 @@ use tonic::async_trait;
 use tracing::{instrument, trace, warn, Level};
 use url::Url;
 
-use super::{ClosureValidator, DirectoryPutter, DirectoryService};
+use super::{
+    DirectoryGraph, DirectoryPutter, DirectoryService, LeavesToRootValidator, RootToLeavesValidator,
+};
 use crate::{proto, B3Digest, Error};
 
 /// Stores directory closures in an object store.
@@ -97,9 +98,10 @@ impl DirectoryService for ObjectStoreDirectoryService {
         &self,
         root_directory_digest: &B3Digest,
     ) -> BoxStream<'static, Result<proto::Directory, Error>> {
-        // The Directory digests we're expecting to receive.
-        let mut expected_directory_digests: HashSet<B3Digest> =
-            HashSet::from([root_directory_digest.clone()]);
+        // Check that we are not passing on bogus from the object store to the client, and that the
+        // trust chain from the root digest to the leaves is intact
+        let mut order_validator =
+            RootToLeavesValidator::new_with_root_digest(root_directory_digest.clone());
 
         let dir_path = derive_dirs_path(&self.base_path, root_directory_digest);
         let object_store = self.object_store.clone();
@@ -130,8 +132,7 @@ impl DirectoryService for ObjectStoreDirectoryService {
                         let digest: B3Digest = hasher.update(&buf).finalize().as_bytes().into();
 
                         // Ensure to only decode the directory objects whose digests we trust
-                        let was_expected = expected_directory_digests.remove(&digest);
-                        if !was_expected {
+                        if !order_validator.digest_allowed(&digest) {
                             return Err(crate::Error::StorageError(format!(
                                 "received unexpected directory {}",
                                 digest
@@ -143,13 +144,8 @@ impl DirectoryService for ObjectStoreDirectoryService {
                             Error::StorageError(e.to_string())
                         })?;
 
-                        for directory in &directory.directories {
-                            // Allow the children to appear next
-                            expected_directory_digests.insert(
-                                B3Digest::try_from(directory.digest.clone())
-                                    .map_err(|e| Error::StorageError(e.to_string()))?,
-                            );
-                        }
+                        // Allow the children to appear next
+                        order_validator.add_directory_unchecked(&directory);
 
                         Ok(directory)
                     })())
@@ -177,7 +173,7 @@ struct ObjectStoreDirectoryPutter {
     object_store: Arc<dyn ObjectStore>,
     base_path: Path,
 
-    directory_validator: Option<ClosureValidator>,
+    directory_validator: Option<DirectoryGraph<LeavesToRootValidator>>,
 }
 
 impl ObjectStoreDirectoryPutter {
@@ -197,7 +193,9 @@ impl DirectoryPutter for ObjectStoreDirectoryPutter {
         match self.directory_validator {
             None => return Err(Error::StorageError("already closed".to_string())),
             Some(ref mut validator) => {
-                validator.add(directory)?;
+                validator
+                    .add(directory)
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
             }
         }
 
@@ -214,7 +212,11 @@ impl DirectoryPutter for ObjectStoreDirectoryPutter {
         // retrieve the validated directories.
         // It is important that they are in topological order (root first),
         // since that's how we want to retrieve them from the object store in the end.
-        let directories = validator.finalize_root_to_leaves()?;
+        let directories = validator
+            .validate()
+            .map_err(|e| Error::StorageError(e.to_string()))?
+            .drain_root_to_leaves()
+            .collect::<Vec<_>>();
 
         // Get the root digest
         let root_digest = directories
diff --git a/tvix/castore/src/directoryservice/order_validator.rs b/tvix/castore/src/directoryservice/order_validator.rs
new file mode 100644
index 0000000000..6045f5d241
--- /dev/null
+++ b/tvix/castore/src/directoryservice/order_validator.rs
@@ -0,0 +1,181 @@
+use std::collections::HashSet;
+use tracing::warn;
+
+use crate::{proto::Directory, B3Digest};
+
+pub trait OrderValidator {
+    /// Update the order validator's state with the directory
+    /// Returns whether the directory was accepted
+    fn add_directory(&mut self, directory: &Directory) -> bool;
+}
+
+#[derive(Default)]
+/// Validates that newly introduced directories are already referenced from
+/// the root via existing directories.
+/// Commonly used when _receiving_ a directory closure _from_ a store.
+pub struct RootToLeavesValidator {
+    /// Only used to remember the root node, not for validation
+    expected_digests: HashSet<B3Digest>,
+}
+
+impl RootToLeavesValidator {
+    /// Use to validate the root digest of the closure upon receiving the first
+    /// directory.
+    pub fn new_with_root_digest(root_digest: B3Digest) -> Self {
+        let mut this = Self::default();
+        this.expected_digests.insert(root_digest);
+        this
+    }
+
+    /// Checks if a directory is in-order based on its digest.
+    ///
+    /// Particularly useful when receiving directories in canonical protobuf
+    /// encoding, so that directories not connected to the root can be rejected
+    /// without parsing.
+    ///
+    /// After parsing, the directory must be passed to `add_directory_unchecked`
+    /// to add its children to the list of expected digests.
+    pub fn digest_allowed(&self, digest: &B3Digest) -> bool {
+        self.expected_digests.is_empty() // we don't know the root node; allow any
+            || self.expected_digests.contains(digest)
+    }
+
+    /// Update the order validator's state with the directory
+    pub fn add_directory_unchecked(&mut self, directory: &Directory) {
+        // No initial root was specified and this is the first directory
+        if self.expected_digests.is_empty() {
+            self.expected_digests.insert(directory.digest());
+        }
+
+        for subdir in &directory.directories {
+            // Allow the children to appear next
+            let subdir_digest = subdir.digest.clone().try_into().unwrap();
+            self.expected_digests.insert(subdir_digest);
+        }
+    }
+}
+
+impl OrderValidator for RootToLeavesValidator {
+    fn add_directory(&mut self, directory: &Directory) -> bool {
+        if !self.digest_allowed(&directory.digest()) {
+            return false;
+        }
+        self.add_directory_unchecked(directory);
+        true
+    }
+}
+
+#[derive(Default)]
+/// Validates that newly uploaded directories only reference directories which
+/// have already been introduced.
+/// Commonly used when _uploading_ a directory closure _to_ a store.
+pub struct LeavesToRootValidator {
+    /// This is empty in the beginning, and gets filled as leaves and intermediates are
+    /// inserted
+    allowed_references: HashSet<B3Digest>,
+}
+
+impl OrderValidator for LeavesToRootValidator {
+    fn add_directory(&mut self, directory: &Directory) -> bool {
+        let digest = directory.digest();
+
+        for subdir in &directory.directories {
+            let subdir_digest = subdir.digest.clone().try_into().unwrap(); // this has been validated in validate_directory()
+            if !self.allowed_references.contains(&subdir_digest) {
+                warn!(
+                    directory.digest = %digest,
+                    subdirectory.digest = %subdir_digest,
+                    "unexpected directory reference"
+                );
+                return false;
+            }
+        }
+
+        self.allowed_references.insert(digest.clone());
+
+        true
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::{LeavesToRootValidator, RootToLeavesValidator};
+    use crate::directoryservice::order_validator::OrderValidator;
+    use crate::fixtures::{DIRECTORY_A, DIRECTORY_B, DIRECTORY_C};
+    use crate::proto::Directory;
+    use rstest::rstest;
+
+    #[rstest]
+    /// Uploading an empty directory should succeed.
+    #[case::empty_directory(&[&*DIRECTORY_A], false)]
+    /// Uploading A, then B (referring to A) should succeed.
+    #[case::simple_closure(&[&*DIRECTORY_A, &*DIRECTORY_B], false)]
+    /// Uploading A, then A, then C (referring to A twice) should succeed.
+    /// We pretend to be a dumb client not deduping directories.
+    #[case::same_child(&[&*DIRECTORY_A, &*DIRECTORY_A, &*DIRECTORY_C], false)]
+    /// Uploading A, then C (referring to A twice) should succeed.
+    #[case::same_child_dedup(&[&*DIRECTORY_A, &*DIRECTORY_C], false)]
+    /// Uploading A, then C (referring to A twice), then B (itself referring to A) should fail during close,
+    /// as B itself would be left unconnected.
+    #[case::unconnected_node(&[&*DIRECTORY_A, &*DIRECTORY_C, &*DIRECTORY_B], false)]
+    /// Uploading B (referring to A) should fail immediately, because A was never uploaded.
+    #[case::dangling_pointer(&[&*DIRECTORY_B], true)]
+    fn leaves_to_root(
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+    ) {
+        let mut validator = LeavesToRootValidator::default();
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp = validator.add_directory(d);
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(!resp, "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp, "expect put to succeed");
+            }
+        }
+    }
+
+    #[rstest]
+    /// Downloading an empty directory should succeed.
+    #[case::empty_directory(&*DIRECTORY_A, &[&*DIRECTORY_A], false)]
+    /// Downlading B, then A (referenced by B) should succeed.
+    #[case::simple_closure(&*DIRECTORY_B, &[&*DIRECTORY_B, &*DIRECTORY_A], false)]
+    /// Downloading C (referring to A twice), then A should succeed.
+    #[case::same_child_dedup(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_A], false)]
+    /// Downloading C, then B (both referring to A but not referring to each other) should fail immediately as B has no connection to C (the root)
+    #[case::unconnected_node(&*DIRECTORY_C, &[&*DIRECTORY_C, &*DIRECTORY_B], true)]
+    /// Downloading B (specified as the root) but receiving A instead should fail immediately, because A has no connection to B (the root).
+    #[case::dangling_pointer(&*DIRECTORY_B, &[&*DIRECTORY_A], true)]
+    fn root_to_leaves(
+        #[case] root: &Directory,
+        #[case] directories_to_upload: &[&Directory],
+        #[case] exp_fail_upload_last: bool,
+    ) {
+        let mut validator = RootToLeavesValidator::new_with_root_digest(root.digest());
+        let len_directories_to_upload = directories_to_upload.len();
+
+        for (i, d) in directories_to_upload.iter().enumerate() {
+            let resp1 = validator.digest_allowed(&d.digest());
+            let resp = validator.add_directory(d);
+            assert_eq!(
+                resp1, resp,
+                "digest_allowed should return the same value as add_directory"
+            );
+            if i == len_directories_to_upload - 1 && exp_fail_upload_last {
+                assert!(!resp, "expect last put to fail");
+
+                // We don't really care anymore what finalize() would return, as
+                // the add() failed.
+                return;
+            } else {
+                assert!(resp, "expect put to succeed");
+            }
+        }
+    }
+}
diff --git a/tvix/castore/src/directoryservice/simple_putter.rs b/tvix/castore/src/directoryservice/simple_putter.rs
index 25617ebcac..dc54e3d11d 100644
--- a/tvix/castore/src/directoryservice/simple_putter.rs
+++ b/tvix/castore/src/directoryservice/simple_putter.rs
@@ -1,6 +1,6 @@
-use super::ClosureValidator;
 use super::DirectoryPutter;
 use super::DirectoryService;
+use super::{DirectoryGraph, LeavesToRootValidator};
 use crate::proto;
 use crate::B3Digest;
 use crate::Error;
@@ -14,7 +14,7 @@ use tracing::warn;
 pub struct SimplePutter<DS: DirectoryService> {
     directory_service: DS,
 
-    directory_validator: Option<ClosureValidator>,
+    directory_validator: Option<DirectoryGraph<LeavesToRootValidator>>,
 }
 
 impl<DS: DirectoryService> SimplePutter<DS> {
@@ -33,7 +33,9 @@ impl<DS: DirectoryService + 'static> DirectoryPutter for SimplePutter<DS> {
         match self.directory_validator {
             None => return Err(Error::StorageError("already closed".to_string())),
             Some(ref mut validator) => {
-                validator.add(directory)?;
+                validator
+                    .add(directory)
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
             }
         }
 
@@ -46,7 +48,11 @@ impl<DS: DirectoryService + 'static> DirectoryPutter for SimplePutter<DS> {
             None => Err(Error::InvalidRequest("already closed".to_string())),
             Some(validator) => {
                 // retrieve the validated directories.
-                let directories = validator.finalize()?;
+                let directories = validator
+                    .validate()
+                    .map_err(|e| Error::StorageError(e.to_string()))?
+                    .drain_leaves_to_root()
+                    .collect::<Vec<_>>();
 
                 // Get the root digest, which is at the end (cf. insertion order)
                 let root_digest = directories
diff --git a/tvix/castore/src/directoryservice/sled.rs b/tvix/castore/src/directoryservice/sled.rs
index 9490a49c00..bd98ed6b1e 100644
--- a/tvix/castore/src/directoryservice/sled.rs
+++ b/tvix/castore/src/directoryservice/sled.rs
@@ -8,7 +8,7 @@ use tonic::async_trait;
 use tracing::{instrument, warn};
 
 use super::utils::traverse_directory;
-use super::{ClosureValidator, DirectoryPutter, DirectoryService};
+use super::{DirectoryGraph, DirectoryPutter, DirectoryService, LeavesToRootValidator};
 
 #[derive(Clone)]
 pub struct SledDirectoryService {
@@ -135,7 +135,7 @@ pub struct SledDirectoryPutter {
 
     /// The directories (inside the directory validator) that we insert later,
     /// or None, if they were already inserted.
-    directory_validator: Option<ClosureValidator>,
+    directory_validator: Option<DirectoryGraph<LeavesToRootValidator>>,
 }
 
 #[async_trait]
@@ -145,7 +145,9 @@ impl DirectoryPutter for SledDirectoryPutter {
         match self.directory_validator {
             None => return Err(Error::StorageError("already closed".to_string())),
             Some(ref mut validator) => {
-                validator.add(directory)?;
+                validator
+                    .add(directory)
+                    .map_err(|e| Error::StorageError(e.to_string()))?;
             }
         }
 
@@ -162,7 +164,11 @@ impl DirectoryPutter for SledDirectoryPutter {
                     let tree = self.tree.clone();
                     move || {
                         // retrieve the validated directories.
-                        let directories = validator.finalize()?;
+                        let directories = validator
+                            .validate()
+                            .map_err(|e| Error::StorageError(e.to_string()))?
+                            .drain_leaves_to_root()
+                            .collect::<Vec<_>>();
 
                         // Get the root digest, which is at the end (cf. insertion order)
                         let root_digest = directories
diff --git a/tvix/castore/src/fs/fuse.rs b/tvix/castore/src/fs/fuse/mod.rs
index cd50618ff5..64ef29ed2a 100644
--- a/tvix/castore/src/fs/fuse.rs
+++ b/tvix/castore/src/fs/fuse/mod.rs
@@ -1,8 +1,13 @@
-use std::{io, path::Path, sync::Arc, thread};
+use std::{io, path::Path, sync::Arc};
 
 use fuse_backend_rs::{api::filesystem::FileSystem, transport::FuseSession};
+use parking_lot::Mutex;
+use threadpool::ThreadPool;
 use tracing::{error, instrument};
 
+#[cfg(test)]
+mod tests;
+
 struct FuseServer<FS>
 where
     FS: FileSystem + Sync + Send,
@@ -46,9 +51,12 @@ where
     }
 }
 
+/// Starts a [Filesystem] with the specified number of threads, and provides
+/// functions to unmount, and wait for it to have completed.
+#[derive(Clone)]
 pub struct FuseDaemon {
-    session: FuseSession,
-    threads: Vec<thread::JoinHandle<()>>,
+    session: Arc<Mutex<FuseSession>>,
+    threads: Arc<ThreadPool>,
 }
 
 impl FuseDaemon {
@@ -56,7 +64,7 @@ impl FuseDaemon {
     pub fn new<FS, P>(
         fs: FS,
         mountpoint: P,
-        threads: usize,
+        num_threads: usize,
         allow_other: bool,
     ) -> Result<Self, io::Error>
     where
@@ -73,40 +81,49 @@ impl FuseDaemon {
         session
             .mount()
             .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
-        let mut join_handles = Vec::with_capacity(threads);
-        for _ in 0..threads {
+
+        // construct a thread pool
+        let threads = threadpool::Builder::new()
+            .num_threads(num_threads)
+            .thread_name("fuse_server".to_string())
+            .build();
+
+        for _ in 0..num_threads {
+            // for each thread requested, create and start a FuseServer accepting requests.
             let mut server = FuseServer {
                 server: server.clone(),
                 channel: session
                     .new_channel()
                     .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?,
             };
-            let join_handle = thread::Builder::new()
-                .name("fuse_server".to_string())
-                .spawn(move || {
-                    let _ = server.start();
-                })?;
-            join_handles.push(join_handle);
+
+            threads.execute(move || {
+                let _ = server.start();
+            });
         }
 
         Ok(FuseDaemon {
-            session,
-            threads: join_handles,
+            session: Arc::new(Mutex::new(session)),
+            threads: Arc::new(threads),
         })
     }
 
+    /// Waits for all threads to finish.
+    #[instrument(skip_all)]
+    pub fn wait(&self) {
+        self.threads.join()
+    }
+
+    /// Send the unmount command, and waits for all threads to finish.
     #[instrument(skip_all, err)]
-    pub fn unmount(&mut self) -> Result<(), io::Error> {
+    pub fn unmount(&self) -> Result<(), io::Error> {
+        // Send the unmount command.
         self.session
+            .lock()
             .umount()
             .map_err(|e| io::Error::new(io::ErrorKind::Other, e.to_string()))?;
 
-        for thread in self.threads.drain(..) {
-            thread.join().map_err(|_| {
-                io::Error::new(io::ErrorKind::Other, "failed to join fuse server thread")
-            })?;
-        }
-
+        self.wait();
         Ok(())
     }
 }
diff --git a/tvix/castore/src/fs/tests.rs b/tvix/castore/src/fs/fuse/tests.rs
index d6eeb8a411..bcebcf4a72 100644
--- a/tvix/castore/src/fs/tests.rs
+++ b/tvix/castore/src/fs/fuse/tests.rs
@@ -11,7 +11,8 @@ use std::{
 use tempfile::TempDir;
 use tokio_stream::{wrappers::ReadDirStream, StreamExt};
 
-use super::{fuse::FuseDaemon, TvixStoreFs};
+use super::FuseDaemon;
+use crate::fs::{TvixStoreFs, XATTR_NAME_BLOB_DIGEST, XATTR_NAME_DIRECTORY_DIGEST};
 use crate::proto as castorepb;
 use crate::proto::node::Node;
 use crate::{
@@ -247,7 +248,7 @@ async fn mount() {
 
     let (blob_service, directory_service) = gen_svcs();
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         BTreeMap::default(),
@@ -270,7 +271,7 @@ async fn root() {
     let tmpdir = TempDir::new().unwrap();
 
     let (blob_service, directory_service) = gen_svcs();
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         BTreeMap::default(),
@@ -304,7 +305,7 @@ async fn root_with_listing() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -348,7 +349,7 @@ async fn stat_file_at_root() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -385,7 +386,7 @@ async fn read_file_at_root() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -422,7 +423,7 @@ async fn read_large_file_at_root() {
 
     populate_blob_b(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -467,7 +468,7 @@ async fn symlink_readlink() {
 
     populate_symlink(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -514,7 +515,7 @@ async fn read_stat_through_symlink() {
     populate_blob_a(&blob_service, &mut root_nodes).await;
     populate_symlink(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -559,7 +560,7 @@ async fn read_stat_directory() {
 
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -596,7 +597,7 @@ async fn xattr() {
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -614,12 +615,12 @@ async fn xattr() {
         // There should be 1 key, XATTR_NAME_DIRECTORY_DIGEST.
         assert_eq!(1, xattr_names.len(), "there should be 1 xattr name");
         assert_eq!(
-            super::XATTR_NAME_DIRECTORY_DIGEST,
+            XATTR_NAME_DIRECTORY_DIGEST,
             xattr_names.first().unwrap().as_encoded_bytes()
         );
 
         // The key should equal to the string-formatted b3 digest.
-        let val = xattr::get(&p, OsStr::from_bytes(super::XATTR_NAME_DIRECTORY_DIGEST))
+        let val = xattr::get(&p, OsStr::from_bytes(XATTR_NAME_DIRECTORY_DIGEST))
             .expect("must succeed")
             .expect("must be some");
         assert_eq!(
@@ -643,12 +644,12 @@ async fn xattr() {
         // There should be 1 key, XATTR_NAME_BLOB_DIGEST.
         assert_eq!(1, xattr_names.len(), "there should be 1 xattr name");
         assert_eq!(
-            super::XATTR_NAME_BLOB_DIGEST,
+            XATTR_NAME_BLOB_DIGEST,
             xattr_names.first().unwrap().as_encoded_bytes()
         );
 
         // The key should equal to the string-formatted b3 digest.
-        let val = xattr::get(&p, OsStr::from_bytes(super::XATTR_NAME_BLOB_DIGEST))
+        let val = xattr::get(&p, OsStr::from_bytes(XATTR_NAME_BLOB_DIGEST))
             .expect("must succeed")
             .expect("must be some");
         assert_eq!(
@@ -679,7 +680,7 @@ async fn read_blob_inside_dir() {
 
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -719,7 +720,7 @@ async fn read_blob_deep_inside_dir() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -762,7 +763,7 @@ async fn readdir() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -822,7 +823,7 @@ async fn readdir_deep() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -872,7 +873,7 @@ async fn check_attributes() {
     populate_symlink(&mut root_nodes).await;
     populate_blob_helloworld(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -947,7 +948,7 @@ async fn compare_inodes_directories() {
     populate_directory_with_keep(&blob_service, &directory_service, &mut root_nodes).await;
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -991,7 +992,7 @@ async fn compare_inodes_files() {
 
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1040,7 +1041,7 @@ async fn compare_inodes_symlinks() {
     populate_directory_complicated(&blob_service, &directory_service, &mut root_nodes).await;
     populate_symlink2(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1083,7 +1084,7 @@ async fn read_wrong_paths_in_root() {
 
     populate_blob_a(&blob_service, &mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1138,7 +1139,7 @@ async fn disallow_writes() {
     let (blob_service, directory_service) = gen_svcs();
     let root_nodes = BTreeMap::default();
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1170,7 +1171,7 @@ async fn missing_directory() {
 
     populate_directorynode_without_directory(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
@@ -1218,7 +1219,7 @@ async fn missing_blob() {
 
     populate_filenode_without_blob(&mut root_nodes).await;
 
-    let mut fuse_daemon = do_mount(
+    let fuse_daemon = do_mount(
         blob_service,
         directory_service,
         root_nodes,
diff --git a/tvix/castore/src/fs/mod.rs b/tvix/castore/src/fs/mod.rs
index 826523131f..b565ed60ac 100644
--- a/tvix/castore/src/fs/mod.rs
+++ b/tvix/castore/src/fs/mod.rs
@@ -9,9 +9,6 @@ pub mod fuse;
 #[cfg(feature = "virtiofs")]
 pub mod virtiofs;
 
-#[cfg(test)]
-mod tests;
-
 pub use self::root_nodes::RootNodes;
 use self::{
     file_attr::ROOT_FILE_ATTR,
@@ -46,7 +43,7 @@ use tokio::{
     io::{AsyncReadExt, AsyncSeekExt},
     sync::mpsc,
 };
-use tracing::{debug, error, instrument, warn, Span};
+use tracing::{debug, error, instrument, warn, Instrument as _, Span};
 
 /// This implements a read-only FUSE filesystem for a tvix-store
 /// with the passed [BlobService], [DirectoryService] and [RootNodes].
@@ -400,16 +397,20 @@ where
 
             // This task will run in the background immediately and will exit
             // after the stream ends or if we no longer want any more entries.
-            self.tokio_handle.spawn(async move {
-                let mut stream = root_nodes_provider.list().enumerate();
-                while let Some(node) = stream.next().await {
-                    if tx.send(node).await.is_err() {
-                        // If we get a send error, it means the sync code
-                        // doesn't want any more entries.
-                        break;
+            self.tokio_handle.spawn(
+                async move {
+                    let mut stream = root_nodes_provider.list().enumerate();
+                    while let Some(node) = stream.next().await {
+                        if tx.send(node).await.is_err() {
+                            // If we get a send error, it means the sync code
+                            // doesn't want any more entries.
+                            break;
+                        }
                     }
                 }
-            });
+                // instrument the task with the current span, this is not done by default
+                .in_current_span(),
+            );
 
             // Put the rx part into [self.dir_handles].
             // TODO: this will overflow after 2**64 operations,
diff --git a/tvix/castore/src/import/archive.rs b/tvix/castore/src/import/archive.rs
index 0ebb4a2361..cd5b1290e0 100644
--- a/tvix/castore/src/import/archive.rs
+++ b/tvix/castore/src/import/archive.rs
@@ -1,38 +1,23 @@
 //! Imports from an archive (tarballs)
 
 use std::collections::HashMap;
-use std::io::{Cursor, Write};
-use std::sync::Arc;
 
 use petgraph::graph::{DiGraph, NodeIndex};
 use petgraph::visit::{DfsPostOrder, EdgeRef};
 use petgraph::Direction;
 use tokio::io::AsyncRead;
-use tokio::sync::Semaphore;
-use tokio::task::JoinSet;
 use tokio_stream::StreamExt;
 use tokio_tar::Archive;
-use tokio_util::io::InspectReader;
 use tracing::{instrument, warn, Level};
 
 use crate::blobservice::BlobService;
 use crate::directoryservice::DirectoryService;
 use crate::import::{ingest_entries, IngestionEntry, IngestionError};
 use crate::proto::node::Node;
-use crate::B3Digest;
 
-type TarPathBuf = std::path::PathBuf;
-
-/// Files smaller than this threshold, in bytes, are uploaded to the [BlobService] in the
-/// background.
-///
-/// This is a u32 since we acquire a weighted semaphore using the size of the blob.
-/// [Semaphore::acquire_many_owned] takes a u32, so we need to ensure the size of
-/// the blob can be represented using a u32 and will not cause an overflow.
-const CONCURRENT_BLOB_UPLOAD_THRESHOLD: u32 = 1024 * 1024;
+use super::blobs::{self, ConcurrentBlobUploader};
 
-/// The maximum amount of bytes allowed to be buffered in memory to perform async blob uploads.
-const MAX_TARBALL_BUFFER_SIZE: usize = 128 * 1024 * 1024;
+type TarPathBuf = std::path::PathBuf;
 
 #[derive(Debug, thiserror::Error)]
 pub enum Error {
@@ -57,13 +42,6 @@ pub enum Error {
     #[error("unable to read link name field for {0}: {1}")]
     LinkName(TarPathBuf, std::io::Error),
 
-    #[error("unable to read blob contents for {0}: {1}")]
-    BlobRead(TarPathBuf, std::io::Error),
-
-    // FUTUREWORK: proper error for blob finalize
-    #[error("unable to finalize blob {0}: {1}")]
-    BlobFinalize(TarPathBuf, std::io::Error),
-
     #[error("unsupported tar entry {0} type: {1:?}")]
     EntryType(TarPathBuf, tokio_tar::EntryType),
 
@@ -72,6 +50,9 @@ pub enum Error {
 
     #[error("unexpected number of top level directory entries")]
     UnexpectedNumberOfTopLevelEntries,
+
+    #[error(transparent)]
+    BlobUploadError(#[from] blobs::Error),
 }
 
 /// Ingests elements from the given tar [`Archive`] into a the passed [`BlobService`] and
@@ -94,8 +75,7 @@ where
     // In the first phase, collect up all the regular files and symlinks.
     let mut nodes = IngestionEntryGraph::new();
 
-    let semaphore = Arc::new(Semaphore::new(MAX_TARBALL_BUFFER_SIZE));
-    let mut async_blob_uploads: JoinSet<Result<(), Error>> = JoinSet::new();
+    let mut blob_uploader = ConcurrentBlobUploader::new(blob_service);
 
     let mut entries_iter = archive.entries().map_err(Error::Entries)?;
     while let Some(mut entry) = entries_iter.try_next().await.map_err(Error::NextEntry)? {
@@ -110,77 +90,14 @@ where
             tokio_tar::EntryType::Regular
             | tokio_tar::EntryType::GNUSparse
             | tokio_tar::EntryType::Continuous => {
-                let header_size = header
+                let size = header
                     .size()
                     .map_err(|e| Error::Size(tar_path.clone(), e))?;
 
-                // If the blob is small enough, read it off the wire, compute the digest,
-                // and upload it to the [BlobService] in the background.
-                let (size, digest) = if header_size <= CONCURRENT_BLOB_UPLOAD_THRESHOLD as u64 {
-                    let mut buffer = Vec::with_capacity(header_size as usize);
-                    let mut hasher = blake3::Hasher::new();
-                    let mut reader = InspectReader::new(&mut entry, |bytes| {
-                        hasher.write_all(bytes).unwrap();
-                    });
-
-                    // Ensure that we don't buffer into memory until we've acquired a permit.
-                    // This prevents consuming too much memory when performing concurrent
-                    // blob uploads.
-                    let permit = semaphore
-                        .clone()
-                        // This cast is safe because ensure the header_size is less than
-                        // CONCURRENT_BLOB_UPLOAD_THRESHOLD which is a u32.
-                        .acquire_many_owned(header_size as u32)
-                        .await
-                        .unwrap();
-                    let size = tokio::io::copy(&mut reader, &mut buffer)
-                        .await
-                        .map_err(|e| Error::Size(tar_path.clone(), e))?;
-
-                    let digest: B3Digest = hasher.finalize().as_bytes().into();
-
-                    {
-                        let blob_service = blob_service.clone();
-                        let digest = digest.clone();
-                        async_blob_uploads.spawn({
-                            let tar_path = tar_path.clone();
-                            async move {
-                                let mut writer = blob_service.open_write().await;
-
-                                tokio::io::copy(&mut Cursor::new(buffer), &mut writer)
-                                    .await
-                                    .map_err(|e| Error::BlobRead(tar_path.clone(), e))?;
-
-                                let blob_digest = writer
-                                    .close()
-                                    .await
-                                    .map_err(|e| Error::BlobFinalize(tar_path, e))?;
-
-                                assert_eq!(digest, blob_digest, "Tvix bug: blob digest mismatch");
-
-                                // Make sure we hold the permit until we finish writing the blob
-                                // to the [BlobService].
-                                drop(permit);
-                                Ok(())
-                            }
-                        });
-                    }
-
-                    (size, digest)
-                } else {
-                    let mut writer = blob_service.open_write().await;
-
-                    let size = tokio::io::copy(&mut entry, &mut writer)
-                        .await
-                        .map_err(|e| Error::BlobRead(tar_path.clone(), e))?;
-
-                    let digest = writer
-                        .close()
-                        .await
-                        .map_err(|e| Error::BlobFinalize(tar_path.clone(), e))?;
-
-                    (size, digest)
-                };
+                let digest = blob_uploader
+                    .upload(&path, size, &mut entry)
+                    .await
+                    .map_err(Error::BlobUploadError)?;
 
                 let executable = entry
                     .header()
@@ -219,9 +136,7 @@ where
         nodes.add(entry)?;
     }
 
-    while let Some(result) = async_blob_uploads.join_next().await {
-        result.expect("task panicked")?;
-    }
+    blob_uploader.join().await.map_err(Error::BlobUploadError)?;
 
     let root_node = ingest_entries(
         directory_service,
diff --git a/tvix/castore/src/import/blobs.rs b/tvix/castore/src/import/blobs.rs
new file mode 100644
index 0000000000..8135d871d6
--- /dev/null
+++ b/tvix/castore/src/import/blobs.rs
@@ -0,0 +1,177 @@
+use std::{
+    io::{Cursor, Write},
+    sync::Arc,
+};
+
+use tokio::{
+    io::AsyncRead,
+    sync::Semaphore,
+    task::{JoinError, JoinSet},
+};
+use tokio_util::io::InspectReader;
+
+use crate::{blobservice::BlobService, B3Digest, Path, PathBuf};
+
+/// Files smaller than this threshold, in bytes, are uploaded to the [BlobService] in the
+/// background.
+///
+/// This is a u32 since we acquire a weighted semaphore using the size of the blob.
+/// [Semaphore::acquire_many_owned] takes a u32, so we need to ensure the size of
+/// the blob can be represented using a u32 and will not cause an overflow.
+const CONCURRENT_BLOB_UPLOAD_THRESHOLD: u32 = 1024 * 1024;
+
+/// The maximum amount of bytes allowed to be buffered in memory to perform async blob uploads.
+const MAX_BUFFER_SIZE: usize = 128 * 1024 * 1024;
+
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+    #[error("unable to read blob contents for {0}: {1}")]
+    BlobRead(PathBuf, std::io::Error),
+
+    // FUTUREWORK: proper error for blob finalize
+    #[error("unable to finalize blob {0}: {1}")]
+    BlobFinalize(PathBuf, std::io::Error),
+
+    #[error("unexpected size for {path} wanted: {wanted} got: {got}")]
+    UnexpectedSize {
+        path: PathBuf,
+        wanted: u64,
+        got: u64,
+    },
+
+    #[error("blob upload join error: {0}")]
+    JoinError(#[from] JoinError),
+}
+
+/// The concurrent blob uploader provides a mechanism for concurrently uploading small blobs.
+/// This is useful when ingesting from sources like tarballs and archives which each blob entry
+/// must be read sequentially. Ingesting many small blobs sequentially becomes slow due to
+/// round trip time with the blob service. The concurrent blob uploader will buffer small
+/// blobs in memory and upload them to the blob service in the background.
+///
+/// Once all blobs have been uploaded, make sure to call [ConcurrentBlobUploader::join] to wait
+/// for all background jobs to complete and check for any errors.
+pub struct ConcurrentBlobUploader<BS> {
+    blob_service: BS,
+    upload_tasks: JoinSet<Result<(), Error>>,
+    upload_semaphore: Arc<Semaphore>,
+}
+
+impl<BS> ConcurrentBlobUploader<BS>
+where
+    BS: BlobService + Clone + 'static,
+{
+    /// Creates a new concurrent blob uploader which uploads blobs to the provided
+    /// blob service.
+    pub fn new(blob_service: BS) -> Self {
+        Self {
+            blob_service,
+            upload_tasks: JoinSet::new(),
+            upload_semaphore: Arc::new(Semaphore::new(MAX_BUFFER_SIZE)),
+        }
+    }
+
+    /// Uploads a blob to the blob service. If the blob is small enough it will be read to a buffer
+    /// and uploaded in the background.
+    /// This will read the entirety of the provided reader unless an error occurs, even if blobs
+    /// are uploaded in the background..
+    pub async fn upload<R>(
+        &mut self,
+        path: &Path,
+        expected_size: u64,
+        mut r: R,
+    ) -> Result<B3Digest, Error>
+    where
+        R: AsyncRead + Unpin,
+    {
+        if expected_size < CONCURRENT_BLOB_UPLOAD_THRESHOLD as u64 {
+            let mut buffer = Vec::with_capacity(expected_size as usize);
+            let mut hasher = blake3::Hasher::new();
+            let mut reader = InspectReader::new(&mut r, |bytes| {
+                hasher.write_all(bytes).unwrap();
+            });
+
+            let permit = self
+                .upload_semaphore
+                .clone()
+                // This cast is safe because ensure the header_size is less than
+                // CONCURRENT_BLOB_UPLOAD_THRESHOLD which is a u32.
+                .acquire_many_owned(expected_size as u32)
+                .await
+                .unwrap();
+            let size = tokio::io::copy(&mut reader, &mut buffer)
+                .await
+                .map_err(|e| Error::BlobRead(path.into(), e))?;
+            let digest: B3Digest = hasher.finalize().as_bytes().into();
+
+            if size != expected_size {
+                return Err(Error::UnexpectedSize {
+                    path: path.into(),
+                    wanted: expected_size,
+                    got: size,
+                });
+            }
+
+            self.upload_tasks.spawn({
+                let blob_service = self.blob_service.clone();
+                let expected_digest = digest.clone();
+                let path = path.to_owned();
+                let r = Cursor::new(buffer);
+                async move {
+                    let digest = upload_blob(&blob_service, &path, expected_size, r).await?;
+
+                    assert_eq!(digest, expected_digest, "Tvix bug: blob digest mismatch");
+
+                    // Make sure we hold the permit until we finish writing the blob
+                    // to the [BlobService].
+                    drop(permit);
+                    Ok(())
+                }
+            });
+
+            return Ok(digest);
+        }
+
+        upload_blob(&self.blob_service, path, expected_size, r).await
+    }
+
+    /// Waits for all background upload jobs to complete, returning any upload errors.
+    pub async fn join(mut self) -> Result<(), Error> {
+        while let Some(result) = self.upload_tasks.join_next().await {
+            result??;
+        }
+        Ok(())
+    }
+}
+
+async fn upload_blob<BS, R>(
+    blob_service: &BS,
+    path: &Path,
+    expected_size: u64,
+    mut r: R,
+) -> Result<B3Digest, Error>
+where
+    BS: BlobService,
+    R: AsyncRead + Unpin,
+{
+    let mut writer = blob_service.open_write().await;
+
+    let size = tokio::io::copy(&mut r, &mut writer)
+        .await
+        .map_err(|e| Error::BlobRead(path.into(), e))?;
+
+    let digest = writer
+        .close()
+        .await
+        .map_err(|e| Error::BlobFinalize(path.into(), e))?;
+
+    if size != expected_size {
+        return Err(Error::UnexpectedSize {
+            path: path.into(),
+            wanted: expected_size,
+            got: size,
+        });
+    }
+
+    Ok(digest)
+}
diff --git a/tvix/castore/src/import/fs.rs b/tvix/castore/src/import/fs.rs
index 9d3ecfe6ab..dc7821b810 100644
--- a/tvix/castore/src/import/fs.rs
+++ b/tvix/castore/src/import/fs.rs
@@ -6,7 +6,11 @@ use std::fs::FileType;
 use std::os::unix::ffi::OsStringExt;
 use std::os::unix::fs::MetadataExt;
 use std::os::unix::fs::PermissionsExt;
+use tokio::io::BufReader;
+use tokio_util::io::InspectReader;
 use tracing::instrument;
+use tracing::Span;
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use walkdir::DirEntry;
 use walkdir::WalkDir;
 
@@ -26,7 +30,7 @@ use super::IngestionError;
 ///
 /// This function will walk the filesystem using `walkdir` and will consume
 /// `O(#number of entries)` space.
-#[instrument(skip(blob_service, directory_service), fields(path), err)]
+#[instrument(skip(blob_service, directory_service), fields(path, indicatif.pb_show=1), err)]
 pub async fn ingest_path<BS, DS, P>(
     blob_service: BS,
     directory_service: DS,
@@ -37,6 +41,10 @@ where
     BS: BlobService + Clone,
     DS: DirectoryService,
 {
+    let span = Span::current();
+    span.pb_set_message(&format!("Ingesting {:?}", path));
+    span.pb_start();
+
     let iter = WalkDir::new(path.as_ref())
         .follow_links(false)
         .follow_root_links(false)
@@ -44,7 +52,18 @@ where
         .into_iter();
 
     let entries = dir_entries_to_ingestion_stream(blob_service, iter, path.as_ref());
-    ingest_entries(directory_service, entries).await
+    ingest_entries(
+        directory_service,
+        entries.inspect({
+            let span = span.clone();
+            move |e| {
+                if e.is_ok() {
+                    span.pb_inc(1)
+                }
+            }
+        }),
+    )
+    .await
 }
 
 /// Converts an iterator of [walkdir::DirEntry]s into a stream of ingestion entries.
@@ -138,7 +157,7 @@ where
 }
 
 /// Uploads the file at the provided [Path] the the [BlobService].
-#[instrument(skip(blob_service), fields(path), err)]
+#[instrument(skip(blob_service), fields(path, indicatif.pb_show=1), err)]
 async fn upload_blob<BS>(
     blob_service: BS,
     path: impl AsRef<std::path::Path>,
@@ -146,16 +165,29 @@ async fn upload_blob<BS>(
 where
     BS: BlobService,
 {
-    let mut file = match tokio::fs::File::open(path.as_ref()).await {
-        Ok(file) => file,
-        Err(e) => return Err(Error::BlobRead(path.as_ref().to_path_buf(), e)),
-    };
+    let span = Span::current();
+    span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+    span.pb_set_message(&format!("Uploading blob for {:?}", path.as_ref()));
+    span.pb_start();
 
-    let mut writer = blob_service.open_write().await;
+    let file = tokio::fs::File::open(path.as_ref())
+        .await
+        .map_err(|e| Error::BlobRead(path.as_ref().to_path_buf(), e))?;
 
-    if let Err(e) = tokio::io::copy(&mut file, &mut writer).await {
-        return Err(Error::BlobRead(path.as_ref().to_path_buf(), e));
-    };
+    let metadata = file
+        .metadata()
+        .await
+        .map_err(|e| Error::Stat(path.as_ref().to_path_buf(), e))?;
+
+    span.pb_set_length(metadata.len());
+    let reader = InspectReader::new(file, |d| {
+        span.pb_inc(d.len() as u64);
+    });
+
+    let mut writer = blob_service.open_write().await;
+    tokio::io::copy(&mut BufReader::new(reader), &mut writer)
+        .await
+        .map_err(|e| Error::BlobRead(path.as_ref().to_path_buf(), e))?;
 
     let digest = writer
         .close()
diff --git a/tvix/castore/src/import/mod.rs b/tvix/castore/src/import/mod.rs
index e8b27e469c..a9ac0be6b0 100644
--- a/tvix/castore/src/import/mod.rs
+++ b/tvix/castore/src/import/mod.rs
@@ -14,7 +14,6 @@ use crate::proto::FileNode;
 use crate::proto::SymlinkNode;
 use crate::B3Digest;
 use futures::{Stream, StreamExt};
-
 use tracing::Level;
 
 use std::collections::HashMap;
@@ -24,6 +23,7 @@ mod error;
 pub use error::IngestionError;
 
 pub mod archive;
+pub mod blobs;
 pub mod fs;
 
 /// Ingests [IngestionEntry] from the given stream into a the passed [DirectoryService].
diff --git a/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs b/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs
index 5c1428690c..ce1d2bcd24 100644
--- a/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs
+++ b/tvix/castore/src/proto/grpc_directoryservice_wrapper.rs
@@ -1,4 +1,5 @@
-use crate::directoryservice::ClosureValidator;
+use crate::directoryservice::DirectoryGraph;
+use crate::directoryservice::LeavesToRootValidator;
 use crate::proto;
 use crate::{directoryservice::DirectoryService, B3Digest};
 use futures::stream::BoxStream;
@@ -78,14 +79,20 @@ where
     ) -> Result<Response<proto::PutDirectoryResponse>, Status> {
         let mut req_inner = request.into_inner();
 
-        // We put all Directory messages we receive into ClosureValidator first.
-        let mut validator = ClosureValidator::default();
+        // We put all Directory messages we receive into DirectoryGraph.
+        let mut validator = DirectoryGraph::<LeavesToRootValidator>::default();
         while let Some(directory) = req_inner.message().await? {
-            validator.add(directory)?;
+            validator
+                .add(directory)
+                .map_err(|e| tonic::Status::new(tonic::Code::Internal, e.to_string()))?;
         }
 
         // drain, which validates connectivity too.
-        let directories = validator.finalize()?;
+        let directories = validator
+            .validate()
+            .map_err(|e| tonic::Status::new(tonic::Code::Internal, e.to_string()))?
+            .drain_leaves_to_root()
+            .collect::<Vec<_>>();
 
         let mut directory_putter = self.directory_service.put_multiple_start();
         for directory in directories {
diff --git a/tvix/cli/Cargo.toml b/tvix/cli/Cargo.toml
index 1fa2351822..644393a7c4 100644
--- a/tvix/cli/Cargo.toml
+++ b/tvix/cli/Cargo.toml
@@ -14,14 +14,23 @@ tvix-castore = { path = "../castore" }
 tvix-store = { path = "../store", default-features = false, features = []}
 tvix-eval = { path = "../eval" }
 tvix-glue = { path = "../glue" }
+tvix-tracing = { path = "../tracing" }
 bytes = "1.4.0"
 clap = { version = "4.0", features = ["derive", "env"] }
 dirs = "4.0.0"
 rustyline = "10.0.0"
+rnix = "0.11.0"
 thiserror = "1.0.38"
 tokio = "1.28.0"
-tracing = { version = "0.1.37", features = ["max_level_trace", "release_max_level_info"] }
-tracing-subscriber = "0.3.16"
+tracing = "0.1.40"
+tracing-indicatif = "0.3.6"
 
 [dependencies.wu-manber]
 git = "https://github.com/tvlfyi/wu-manber.git"
+
+[target.'cfg(not(target_env = "msvc"))'.dependencies]
+tikv-jemallocator = "0.5"
+
+[features]
+default = []
+tracy = ["tvix-tracing/tracy"]
diff --git a/tvix/cli/src/main.rs b/tvix/cli/src/main.rs
index d66d2ce4cb..686513b77c 100644
--- a/tvix/cli/src/main.rs
+++ b/tvix/cli/src/main.rs
@@ -1,25 +1,36 @@
+mod repl;
+
 use clap::Parser;
-use rustyline::{error::ReadlineError, Editor};
+use repl::Repl;
 use std::rc::Rc;
 use std::{fs, path::PathBuf};
-use tracing::Level;
-use tracing_subscriber::fmt::writer::MakeWriterExt;
-use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
-use tracing_subscriber::{EnvFilter, Layer};
+use tracing::{instrument, Level, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use tvix_build::buildservice;
 use tvix_eval::builtins::impure_builtins;
 use tvix_eval::observer::{DisassemblingObserver, TracingObserver};
-use tvix_eval::{EvalIO, Value};
+use tvix_eval::{ErrorKind, EvalIO, Value};
 use tvix_glue::builtins::add_fetcher_builtins;
 use tvix_glue::builtins::add_import_builtins;
 use tvix_glue::tvix_io::TvixIO;
 use tvix_glue::tvix_store_io::TvixStoreIO;
 use tvix_glue::{builtins::add_derivation_builtins, configure_nix_path};
 
-#[derive(Parser)]
+#[cfg(not(target_env = "msvc"))]
+use tikv_jemallocator::Jemalloc;
+
+#[cfg(not(target_env = "msvc"))]
+#[global_allocator]
+static GLOBAL: Jemalloc = Jemalloc;
+
+#[derive(Parser, Clone)]
 struct Args {
-    #[arg(long)]
-    log_level: Option<Level>,
+    /// A global log level to use when printing logs.
+    /// It's also possible to set `RUST_LOG` according to
+    /// `tracing_subscriber::filter::EnvFilter`, which will always have
+    /// priority.
+    #[arg(long, default_value_t=Level::INFO)]
+    log_level: Level,
 
     /// Path to a script to evaluate
     script: Option<PathBuf>,
@@ -123,16 +134,39 @@ fn init_io_handle(tokio_runtime: &tokio::runtime::Runtime, args: &Args) -> Rc<Tv
     ))
 }
 
+#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
+enum AllowIncomplete {
+    Allow,
+    #[default]
+    RequireComplete,
+}
+
+impl AllowIncomplete {
+    fn allow(&self) -> bool {
+        matches!(self, Self::Allow)
+    }
+}
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+struct IncompleteInput;
+
 /// Interprets the given code snippet, printing out warnings, errors
 /// and the result itself. The return value indicates whether
 /// evaluation succeeded.
+#[instrument(skip_all, fields(indicatif.pb_show=1))]
 fn interpret(
     tvix_store_io: Rc<TvixStoreIO>,
     code: &str,
     path: Option<PathBuf>,
     args: &Args,
     explain: bool,
-) -> bool {
+    allow_incomplete: AllowIncomplete,
+) -> Result<bool, IncompleteInput> {
+    let span = Span::current();
+    span.pb_start();
+    span.pb_set_style(&tvix_tracing::PB_SPINNER_STYLE);
+    span.pb_set_message("Setting up evaluator…");
+
     let mut eval = tvix_eval::Evaluation::new(
         Box::new(TvixIO::new(tvix_store_io.clone() as Rc<dyn EvalIO>)) as Box<dyn EvalIO>,
         true,
@@ -160,9 +194,22 @@ fn interpret(
             eval.runtime_observer = Some(&mut runtime_observer);
         }
 
+        span.pb_set_message("Evaluating…");
         eval.evaluate(code, path)
     };
 
+    if allow_incomplete.allow()
+        && result.errors.iter().any(|err| {
+            matches!(
+                &err.kind,
+                ErrorKind::ParseErrors(pes)
+                    if pes.iter().any(|pe| matches!(pe, rnix::parser::ParseError::UnexpectedEOF))
+            )
+        })
+    {
+        return Err(IncompleteInput);
+    }
+
     if args.display_ast {
         if let Some(ref expr) = result.expr {
             eprintln!("AST: {}", tvix_eval::pretty_print_expr(expr));
@@ -188,7 +235,7 @@ fn interpret(
     }
 
     // inform the caller about any errors
-    result.errors.is_empty()
+    Ok(result.errors.is_empty())
 }
 
 /// Interpret the given code snippet, but only run the Tvix compiler
@@ -232,24 +279,11 @@ fn lint(code: &str, path: Option<PathBuf>, args: &Args) -> bool {
 fn main() {
     let args = Args::parse();
 
-    // configure log settings
-    let level = args.log_level.unwrap_or(Level::INFO);
-
-    let subscriber = tracing_subscriber::registry().with(
-        tracing_subscriber::fmt::Layer::new()
-            .with_writer(std::io::stderr.with_max_level(level))
-            .compact()
-            .with_filter(
-                EnvFilter::builder()
-                    .with_default_directive(level.into())
-                    .from_env()
-                    .expect("invalid RUST_LOG"),
-            ),
-    );
-    subscriber
-        .try_init()
+    let _ = tvix_tracing::TracingBuilder::default()
+        .level(args.log_level)
+        .enable_progressbar()
+        .build()
         .expect("unable to set up tracing subscriber");
-
     let tokio_runtime = tokio::runtime::Runtime::new().expect("failed to setup tokio runtime");
 
     let io_handle = init_io_handle(&tokio_runtime, &args);
@@ -257,11 +291,21 @@ fn main() {
     if let Some(file) = &args.script {
         run_file(io_handle, file.clone(), &args)
     } else if let Some(expr) = &args.expr {
-        if !interpret(io_handle, expr, None, &args, false) {
+        if !interpret(
+            io_handle,
+            expr,
+            None,
+            &args,
+            false,
+            AllowIncomplete::RequireComplete,
+        )
+        .unwrap()
+        {
             std::process::exit(1);
         }
     } else {
-        run_prompt(io_handle, &args)
+        let mut repl = Repl::new();
+        repl.run(io_handle, &args)
     }
 }
 
@@ -274,7 +318,15 @@ fn run_file(io_handle: Rc<TvixStoreIO>, mut path: PathBuf, args: &Args) {
     let success = if args.compile_only {
         lint(&contents, Some(path), args)
     } else {
-        interpret(io_handle, &contents, Some(path), args, false)
+        interpret(
+            io_handle,
+            &contents,
+            Some(path),
+            args,
+            false,
+            AllowIncomplete::RequireComplete,
+        )
+        .unwrap()
     };
 
     if !success {
@@ -289,61 +341,3 @@ fn println_result(result: &Value, raw: bool) {
         println!("=> {} :: {}", result, result.type_of())
     }
 }
-
-fn state_dir() -> Option<PathBuf> {
-    let mut path = dirs::data_dir();
-    if let Some(p) = path.as_mut() {
-        p.push("tvix")
-    }
-    path
-}
-
-fn run_prompt(io_handle: Rc<TvixStoreIO>, args: &Args) {
-    let mut rl = Editor::<()>::new().expect("should be able to launch rustyline");
-
-    if args.compile_only {
-        eprintln!("warning: `--compile-only` has no effect on REPL usage!");
-    }
-
-    let history_path = match state_dir() {
-        // Attempt to set up these paths, but do not hard fail if it
-        // doesn't work.
-        Some(mut path) => {
-            let _ = std::fs::create_dir_all(&path);
-            path.push("history.txt");
-            let _ = rl.load_history(&path);
-            Some(path)
-        }
-
-        None => None,
-    };
-
-    loop {
-        let readline = rl.readline("tvix-repl> ");
-        match readline {
-            Ok(line) => {
-                if line.is_empty() {
-                    continue;
-                }
-
-                rl.add_history_entry(&line);
-
-                if let Some(without_prefix) = line.strip_prefix(":d ") {
-                    interpret(Rc::clone(&io_handle), without_prefix, None, args, true);
-                } else {
-                    interpret(Rc::clone(&io_handle), &line, None, args, false);
-                }
-            }
-            Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => break,
-
-            Err(err) => {
-                eprintln!("error: {}", err);
-                break;
-            }
-        }
-    }
-
-    if let Some(path) = history_path {
-        rl.save_history(&path).unwrap();
-    }
-}
diff --git a/tvix/cli/src/repl.rs b/tvix/cli/src/repl.rs
new file mode 100644
index 0000000000..5a4830a027
--- /dev/null
+++ b/tvix/cli/src/repl.rs
@@ -0,0 +1,175 @@
+use std::path::PathBuf;
+use std::rc::Rc;
+
+use rustyline::{error::ReadlineError, Editor};
+use tvix_glue::tvix_store_io::TvixStoreIO;
+
+use crate::{interpret, AllowIncomplete, Args, IncompleteInput};
+
+fn state_dir() -> Option<PathBuf> {
+    let mut path = dirs::data_dir();
+    if let Some(p) = path.as_mut() {
+        p.push("tvix")
+    }
+    path
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub enum ReplCommand<'a> {
+    Expr(&'a str),
+    Explain(&'a str),
+    Print(&'a str),
+    Quit,
+    Help,
+}
+
+impl<'a> ReplCommand<'a> {
+    const HELP: &'static str = "
+Welcome to the Tvix REPL!
+
+The following commands are supported:
+
+  <expr>    Evaluate a Nix language expression and print the result, along with its inferred type
+  :d <expr> Evaluate a Nix language expression and print a detailed description of the result
+  :p <expr> Evaluate a Nix language expression and print the result recursively
+  :q        Exit the REPL
+  :?, :h    Display this help text
+";
+
+    pub fn parse(input: &'a str) -> Self {
+        if input.starts_with(':') {
+            if let Some(without_prefix) = input.strip_prefix(":d ") {
+                return Self::Explain(without_prefix);
+            } else if let Some(without_prefix) = input.strip_prefix(":p ") {
+                return Self::Print(without_prefix);
+            }
+
+            let input = input.trim_end();
+            match input {
+                ":q" => return Self::Quit,
+                ":h" | ":?" => return Self::Help,
+                _ => {}
+            }
+        }
+
+        Self::Expr(input)
+    }
+}
+
+#[derive(Debug)]
+pub struct Repl {
+    /// In-progress multiline input, when the input so far doesn't parse as a complete expression
+    multiline_input: Option<String>,
+    rl: Editor<()>,
+}
+
+impl Repl {
+    pub fn new() -> Self {
+        let rl = Editor::<()>::new().expect("should be able to launch rustyline");
+        Self {
+            multiline_input: None,
+            rl,
+        }
+    }
+
+    pub fn run(&mut self, io_handle: Rc<TvixStoreIO>, args: &Args) {
+        if args.compile_only {
+            eprintln!("warning: `--compile-only` has no effect on REPL usage!");
+        }
+
+        let history_path = match state_dir() {
+            // Attempt to set up these paths, but do not hard fail if it
+            // doesn't work.
+            Some(mut path) => {
+                let _ = std::fs::create_dir_all(&path);
+                path.push("history.txt");
+                let _ = self.rl.load_history(&path);
+                Some(path)
+            }
+
+            None => None,
+        };
+
+        loop {
+            let prompt = if self.multiline_input.is_some() {
+                "         > "
+            } else {
+                "tvix-repl> "
+            };
+
+            let readline = self.rl.readline(prompt);
+            match readline {
+                Ok(line) => {
+                    if line.is_empty() {
+                        continue;
+                    }
+
+                    let input = if let Some(mi) = &mut self.multiline_input {
+                        mi.push('\n');
+                        mi.push_str(&line);
+                        mi
+                    } else {
+                        &line
+                    };
+
+                    let res = match ReplCommand::parse(input) {
+                        ReplCommand::Quit => break,
+                        ReplCommand::Help => {
+                            println!("{}", ReplCommand::HELP);
+                            Ok(false)
+                        }
+                        ReplCommand::Expr(input) => interpret(
+                            Rc::clone(&io_handle),
+                            input,
+                            None,
+                            args,
+                            false,
+                            AllowIncomplete::Allow,
+                        ),
+                        ReplCommand::Explain(input) => interpret(
+                            Rc::clone(&io_handle),
+                            input,
+                            None,
+                            args,
+                            true,
+                            AllowIncomplete::Allow,
+                        ),
+                        ReplCommand::Print(input) => interpret(
+                            Rc::clone(&io_handle),
+                            input,
+                            None,
+                            &Args {
+                                strict: true,
+                                ..(args.clone())
+                            },
+                            false,
+                            AllowIncomplete::Allow,
+                        ),
+                    };
+
+                    match res {
+                        Ok(_) => {
+                            self.rl.add_history_entry(input);
+                            self.multiline_input = None;
+                        }
+                        Err(IncompleteInput) => {
+                            if self.multiline_input.is_none() {
+                                self.multiline_input = Some(line);
+                            }
+                        }
+                    }
+                }
+                Err(ReadlineError::Interrupted) | Err(ReadlineError::Eof) => break,
+
+                Err(err) => {
+                    eprintln!("error: {}", err);
+                    break;
+                }
+            }
+        }
+
+        if let Some(path) = history_path {
+            self.rl.save_history(&path).unwrap();
+        }
+    }
+}
diff --git a/tvix/default.nix b/tvix/default.nix
index a3a4d35df6..efbaf54f46 100644
--- a/tvix/default.nix
+++ b/tvix/default.nix
@@ -4,7 +4,6 @@
 let
   # crate override for crates that need protobuf
   protobufDep = prev: (prev.nativeBuildInputs or [ ]) ++ [ pkgs.buildPackages.protobuf ];
-  iconvDarwinDep = lib.optional pkgs.stdenv.isDarwin pkgs.libiconv;
 
   # On Darwin, some crates producing binaries need to be able to link against security.
   darwinDeps = lib.optionals pkgs.stdenv.isDarwin (with pkgs.buildPackages.darwin.apple_sdk.frameworks; [
@@ -13,28 +12,8 @@ let
   ]);
 
   # Load the crate2nix crate tree.
-  crates = import ./Cargo.nix {
-    inherit pkgs;
-    nixpkgs = pkgs.path;
-
-    # Hack to fix Darwin build
-    # See https://github.com/NixOS/nixpkgs/issues/218712
-    buildRustCrateForPkgs = pkgs:
-      if pkgs.stdenv.isDarwin then
-        let
-          buildRustCrate = pkgs.buildRustCrate;
-          buildRustCrate_ = args: buildRustCrate args // { dontStrip = true; };
-          override = o: args: buildRustCrate.override o (args // { dontStrip = true; });
-        in
-        pkgs.makeOverridable override { }
-      else pkgs.buildRustCrate;
-
+  crates = pkgs.callPackage ./Cargo.nix {
     defaultCrateOverrides = pkgs.defaultCrateOverrides // {
-      zstd-sys = prev: {
-        nativeBuildInputs = prev.nativeBuildInputs or [ ];
-        buildInputs = prev.buildInputs or [ ] ++ iconvDarwinDep;
-      };
-
       opentelemetry-proto = prev: {
         nativeBuildInputs = protobufDep prev;
       };
@@ -52,21 +31,34 @@ let
       };
 
       tvix-build = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = (lib.fileset.fileFilter (f: f.hasExt "proto") root);
+        };
         PROTO_ROOT = depot.tvix.build.protos.protos;
         nativeBuildInputs = protobufDep prev;
         buildInputs = darwinDeps;
       };
 
       tvix-castore = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = (lib.fileset.fileFilter (f: f.hasExt "proto") root);
+        };
         PROTO_ROOT = depot.tvix.castore.protos.protos;
         nativeBuildInputs = protobufDep prev;
       };
 
       tvix-cli = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
         buildInputs = prev.buildInputs or [ ] ++ darwinDeps;
       };
 
       tvix-store = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = (lib.fileset.fileFilter (f: f.hasExt "proto") root);
+        };
         PROTO_ROOT = depot.tvix.store.protos.protos;
         nativeBuildInputs = protobufDep prev;
         # fuse-backend-rs uses DiskArbitration framework to handle mount/unmount on Darwin
@@ -74,6 +66,38 @@ let
           ++ darwinDeps
           ++ lib.optional pkgs.stdenv.isDarwin pkgs.buildPackages.darwin.apple_sdk.frameworks.DiskArbitration;
       };
+
+      tvix-eval-builtin-macros = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+
+      tvix-eval = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = (root + "/proptest-regressions");
+        };
+      };
+
+      tvix-glue = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc {
+          root = prev.src.origSrc;
+        };
+      };
+
+      tvix-serde = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+
+      tvix-tracing = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc { root = prev.src.origSrc; };
+      };
+
+      nix-compat = prev: {
+        src = depot.tvix.utils.filterRustCrateSrc rec {
+          root = prev.src.origSrc;
+          extraFileset = (root + "/testdata");
+        };
+      };
     };
   };
 
@@ -125,13 +149,13 @@ in
     pkgs.stdenv.mkDerivation {
       inherit src;
 
-      # Important: we include the hash of the Cargo.lock file and
-      # Cargo.nix file in the derivation name.  This forces the FOD
-      # to be rebuilt/reverified whenever either of them changes.
-      name = "tvix-crate2nix-check-" +
-        (builtins.substring 0 8 (builtins.hashFile "sha256" ./Cargo.lock)) +
-        "-" +
-        (builtins.substring 0 8 (builtins.hashFile "sha256" ./Cargo.nix));
+      # Important: we include the hash of all Cargo related files in the derivation name.
+      # This forces the FOD to be rebuilt/re-verified whenever one of them changes.
+      name = "tvix-crate2nix-check-" + builtins.substring 0 8 (builtins.hashString "sha256"
+        (lib.concatMapStrings (f: builtins.hashFile "sha256" f)
+          ([ ./Cargo.toml ./Cargo.lock ] ++ (map (m: ./. + "/${m}/Cargo.toml") (lib.importTOML ./Cargo.toml).workspace.members))
+        )
+      );
 
       nativeBuildInputs = with pkgs; [ git cacert cargo ];
       buildPhase = ''
@@ -197,7 +221,7 @@ in
 
     buildInputs = [
       pkgs.fuse
-    ] ++ iconvDarwinDep;
+    ] ++ lib.optional pkgs.stdenv.isDarwin pkgs.libiconv;
 
     buildPhase = ''
       cargo doc --document-private-items
@@ -233,4 +257,6 @@ in
     "shell"
     "rust-docs"
   ];
+
+  utils = import ./utils.nix { inherit lib depot; };
 }
diff --git a/tvix/docs/book.toml b/tvix/docs/book.toml
index 7318a90233..093b73b8e8 100644
--- a/tvix/docs/book.toml
+++ b/tvix/docs/book.toml
@@ -9,3 +9,17 @@ title = "Tvix Docs"
 # override the /usr/bin/plantuml default
 plantuml-cmd = "plantuml"
 use-data-uris = true
+
+[preprocessor.admonish]
+command = "mdbook-admonish"
+after = ["links"] # ensure `{{#include}}` also gets processed
+assets_version = "3.0.2" # do not edit: managed by `mdbook-admonish install`
+
+[preprocessor.d2]
+command = "d2"
+after = ["links"] # ensure `{{#include}}` also gets processed
+
+[output]
+
+[output.html]
+additional-css = ["./mdbook-admonish.css", "./mdbook-extra.css"]
diff --git a/tvix/docs/default.nix b/tvix/docs/default.nix
index 9fc2f76576..3b102e4b7c 100644
--- a/tvix/docs/default.nix
+++ b/tvix/docs/default.nix
@@ -9,7 +9,10 @@ pkgs.stdenv.mkDerivation {
   src = lib.cleanSource ./.;
 
   nativeBuildInputs = [
+    pkgs.d2
     pkgs.mdbook
+    pkgs.mdbook-admonish
+    pkgs.mdbook-d2
     pkgs.mdbook-plantuml
     pkgs.plantuml
   ];
diff --git a/tvix/docs/mdbook-admonish.css b/tvix/docs/mdbook-admonish.css
new file mode 100644
index 0000000000..45aeff0511
--- /dev/null
+++ b/tvix/docs/mdbook-admonish.css
@@ -0,0 +1,348 @@
+@charset "UTF-8";
+:is(.admonition) {
+  display: flow-root;
+  margin: 1.5625em 0;
+  padding: 0 1.2rem;
+  color: var(--fg);
+  page-break-inside: avoid;
+  background-color: var(--bg);
+  border: 0 solid black;
+  border-inline-start-width: 0.4rem;
+  border-radius: 0.2rem;
+  box-shadow: 0 0.2rem 1rem rgba(0, 0, 0, 0.05), 0 0 0.1rem rgba(0, 0, 0, 0.1);
+}
+@media print {
+  :is(.admonition) {
+    box-shadow: none;
+  }
+}
+:is(.admonition) > * {
+  box-sizing: border-box;
+}
+:is(.admonition) :is(.admonition) {
+  margin-top: 1em;
+  margin-bottom: 1em;
+}
+:is(.admonition) > .tabbed-set:only-child {
+  margin-top: 0;
+}
+html :is(.admonition) > :last-child {
+  margin-bottom: 1.2rem;
+}
+
+a.admonition-anchor-link {
+  display: none;
+  position: absolute;
+  left: -1.2rem;
+  padding-right: 1rem;
+}
+a.admonition-anchor-link:link, a.admonition-anchor-link:visited {
+  color: var(--fg);
+}
+a.admonition-anchor-link:link:hover, a.admonition-anchor-link:visited:hover {
+  text-decoration: none;
+}
+a.admonition-anchor-link::before {
+  content: "§";
+}
+
+:is(.admonition-title, summary.admonition-title) {
+  position: relative;
+  min-height: 4rem;
+  margin-block: 0;
+  margin-inline: -1.6rem -1.2rem;
+  padding-block: 0.8rem;
+  padding-inline: 4.4rem 1.2rem;
+  font-weight: 700;
+  background-color: rgba(68, 138, 255, 0.1);
+  print-color-adjust: exact;
+  -webkit-print-color-adjust: exact;
+  display: flex;
+}
+:is(.admonition-title, summary.admonition-title) p {
+  margin: 0;
+}
+html :is(.admonition-title, summary.admonition-title):last-child {
+  margin-bottom: 0;
+}
+:is(.admonition-title, summary.admonition-title)::before {
+  position: absolute;
+  top: 0.625em;
+  inset-inline-start: 1.6rem;
+  width: 2rem;
+  height: 2rem;
+  background-color: #448aff;
+  print-color-adjust: exact;
+  -webkit-print-color-adjust: exact;
+  mask-image: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"></svg>');
+  -webkit-mask-image: url('data:image/svg+xml;charset=utf-8,<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"></svg>');
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-size: contain;
+  content: "";
+}
+:is(.admonition-title, summary.admonition-title):hover a.admonition-anchor-link {
+  display: initial;
+}
+
+details.admonition > summary.admonition-title::after {
+  position: absolute;
+  top: 0.625em;
+  inset-inline-end: 1.6rem;
+  height: 2rem;
+  width: 2rem;
+  background-color: currentcolor;
+  mask-image: var(--md-details-icon);
+  -webkit-mask-image: var(--md-details-icon);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-size: contain;
+  content: "";
+  transform: rotate(0deg);
+  transition: transform 0.25s;
+}
+details[open].admonition > summary.admonition-title::after {
+  transform: rotate(90deg);
+}
+
+:root {
+  --md-details-icon: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M8.59 16.58 13.17 12 8.59 7.41 10 6l6 6-6 6-1.41-1.42Z'/></svg>");
+}
+
+:root {
+  --md-admonition-icon--admonish-note: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M20.71 7.04c.39-.39.39-1.04 0-1.41l-2.34-2.34c-.37-.39-1.02-.39-1.41 0l-1.84 1.83 3.75 3.75M3 17.25V21h3.75L17.81 9.93l-3.75-3.75L3 17.25z'/></svg>");
+  --md-admonition-icon--admonish-abstract: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M17 9H7V7h10m0 6H7v-2h10m-3 6H7v-2h7M12 3a1 1 0 0 1 1 1 1 1 0 0 1-1 1 1 1 0 0 1-1-1 1 1 0 0 1 1-1m7 0h-4.18C14.4 1.84 13.3 1 12 1c-1.3 0-2.4.84-2.82 2H5a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h14a2 2 0 0 0 2-2V5a2 2 0 0 0-2-2z'/></svg>");
+  --md-admonition-icon--admonish-info: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M13 9h-2V7h2m0 10h-2v-6h2m-1-9A10 10 0 0 0 2 12a10 10 0 0 0 10 10 10 10 0 0 0 10-10A10 10 0 0 0 12 2z'/></svg>");
+  --md-admonition-icon--admonish-tip: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M17.66 11.2c-.23-.3-.51-.56-.77-.82-.67-.6-1.43-1.03-2.07-1.66C13.33 7.26 13 4.85 13.95 3c-.95.23-1.78.75-2.49 1.32-2.59 2.08-3.61 5.75-2.39 8.9.04.1.08.2.08.33 0 .22-.15.42-.35.5-.23.1-.47.04-.66-.12a.58.58 0 0 1-.14-.17c-1.13-1.43-1.31-3.48-.55-5.12C5.78 10 4.87 12.3 5 14.47c.06.5.12 1 .29 1.5.14.6.41 1.2.71 1.73 1.08 1.73 2.95 2.97 4.96 3.22 2.14.27 4.43-.12 6.07-1.6 1.83-1.66 2.47-4.32 1.53-6.6l-.13-.26c-.21-.46-.77-1.26-.77-1.26m-3.16 6.3c-.28.24-.74.5-1.1.6-1.12.4-2.24-.16-2.9-.82 1.19-.28 1.9-1.16 2.11-2.05.17-.8-.15-1.46-.28-2.23-.12-.74-.1-1.37.17-2.06.19.38.39.76.63 1.06.77 1 1.98 1.44 2.24 2.8.04.14.06.28.06.43.03.82-.33 1.72-.93 2.27z'/></svg>");
+  --md-admonition-icon--admonish-success: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='m9 20.42-6.21-6.21 2.83-2.83L9 14.77l9.88-9.89 2.83 2.83L9 20.42z'/></svg>");
+  --md-admonition-icon--admonish-question: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='m15.07 11.25-.9.92C13.45 12.89 13 13.5 13 15h-2v-.5c0-1.11.45-2.11 1.17-2.83l1.24-1.26c.37-.36.59-.86.59-1.41a2 2 0 0 0-2-2 2 2 0 0 0-2 2H8a4 4 0 0 1 4-4 4 4 0 0 1 4 4 3.2 3.2 0 0 1-.93 2.25M13 19h-2v-2h2M12 2A10 10 0 0 0 2 12a10 10 0 0 0 10 10 10 10 0 0 0 10-10c0-5.53-4.5-10-10-10z'/></svg>");
+  --md-admonition-icon--admonish-warning: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M13 14h-2V9h2m0 9h-2v-2h2M1 21h22L12 2 1 21z'/></svg>");
+  --md-admonition-icon--admonish-failure: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M20 6.91 17.09 4 12 9.09 6.91 4 4 6.91 9.09 12 4 17.09 6.91 20 12 14.91 17.09 20 20 17.09 14.91 12 20 6.91z'/></svg>");
+  --md-admonition-icon--admonish-danger: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M11 15H6l7-14v8h5l-7 14v-8z'/></svg>");
+  --md-admonition-icon--admonish-bug: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M14 12h-4v-2h4m0 6h-4v-2h4m6-6h-2.81a5.985 5.985 0 0 0-1.82-1.96L17 4.41 15.59 3l-2.17 2.17a6.002 6.002 0 0 0-2.83 0L8.41 3 7 4.41l1.62 1.63C7.88 6.55 7.26 7.22 6.81 8H4v2h2.09c-.05.33-.09.66-.09 1v1H4v2h2v1c0 .34.04.67.09 1H4v2h2.81c1.04 1.79 2.97 3 5.19 3s4.15-1.21 5.19-3H20v-2h-2.09c.05-.33.09-.66.09-1v-1h2v-2h-2v-1c0-.34-.04-.67-.09-1H20V8z'/></svg>");
+  --md-admonition-icon--admonish-example: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M7 13v-2h14v2H7m0 6v-2h14v2H7M7 7V5h14v2H7M3 8V5H2V4h2v4H3m-1 9v-1h3v4H2v-1h2v-.5H3v-1h1V17H2m2.25-7a.75.75 0 0 1 .75.75c0 .2-.08.39-.21.52L3.12 13H5v1H2v-.92L4 11H2v-1h2.25z'/></svg>");
+  --md-admonition-icon--admonish-quote: url("data:image/svg+xml;charset=utf-8,<svg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24'><path d='M14 17h3l2-4V7h-6v6h3M6 17h3l2-4V7H5v6h3l-2 4z'/></svg>");
+}
+
+:is(.admonition):is(.admonish-note) {
+  border-color: #448aff;
+}
+
+:is(.admonish-note) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(68, 138, 255, 0.1);
+}
+:is(.admonish-note) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #448aff;
+  mask-image: var(--md-admonition-icon--admonish-note);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-note);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-abstract, .admonish-summary, .admonish-tldr) {
+  border-color: #00b0ff;
+}
+
+:is(.admonish-abstract, .admonish-summary, .admonish-tldr) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 176, 255, 0.1);
+}
+:is(.admonish-abstract, .admonish-summary, .admonish-tldr) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00b0ff;
+  mask-image: var(--md-admonition-icon--admonish-abstract);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-abstract);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-info, .admonish-todo) {
+  border-color: #00b8d4;
+}
+
+:is(.admonish-info, .admonish-todo) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 184, 212, 0.1);
+}
+:is(.admonish-info, .admonish-todo) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00b8d4;
+  mask-image: var(--md-admonition-icon--admonish-info);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-info);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-tip, .admonish-hint, .admonish-important) {
+  border-color: #00bfa5;
+}
+
+:is(.admonish-tip, .admonish-hint, .admonish-important) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 191, 165, 0.1);
+}
+:is(.admonish-tip, .admonish-hint, .admonish-important) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00bfa5;
+  mask-image: var(--md-admonition-icon--admonish-tip);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-tip);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-success, .admonish-check, .admonish-done) {
+  border-color: #00c853;
+}
+
+:is(.admonish-success, .admonish-check, .admonish-done) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(0, 200, 83, 0.1);
+}
+:is(.admonish-success, .admonish-check, .admonish-done) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #00c853;
+  mask-image: var(--md-admonition-icon--admonish-success);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-success);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-question, .admonish-help, .admonish-faq) {
+  border-color: #64dd17;
+}
+
+:is(.admonish-question, .admonish-help, .admonish-faq) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(100, 221, 23, 0.1);
+}
+:is(.admonish-question, .admonish-help, .admonish-faq) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #64dd17;
+  mask-image: var(--md-admonition-icon--admonish-question);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-question);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-warning, .admonish-caution, .admonish-attention) {
+  border-color: #ff9100;
+}
+
+:is(.admonish-warning, .admonish-caution, .admonish-attention) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(255, 145, 0, 0.1);
+}
+:is(.admonish-warning, .admonish-caution, .admonish-attention) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #ff9100;
+  mask-image: var(--md-admonition-icon--admonish-warning);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-warning);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-failure, .admonish-fail, .admonish-missing) {
+  border-color: #ff5252;
+}
+
+:is(.admonish-failure, .admonish-fail, .admonish-missing) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(255, 82, 82, 0.1);
+}
+:is(.admonish-failure, .admonish-fail, .admonish-missing) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #ff5252;
+  mask-image: var(--md-admonition-icon--admonish-failure);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-failure);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-danger, .admonish-error) {
+  border-color: #ff1744;
+}
+
+:is(.admonish-danger, .admonish-error) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(255, 23, 68, 0.1);
+}
+:is(.admonish-danger, .admonish-error) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #ff1744;
+  mask-image: var(--md-admonition-icon--admonish-danger);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-danger);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-bug) {
+  border-color: #f50057;
+}
+
+:is(.admonish-bug) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(245, 0, 87, 0.1);
+}
+:is(.admonish-bug) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #f50057;
+  mask-image: var(--md-admonition-icon--admonish-bug);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-bug);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-example) {
+  border-color: #7c4dff;
+}
+
+:is(.admonish-example) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(124, 77, 255, 0.1);
+}
+:is(.admonish-example) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #7c4dff;
+  mask-image: var(--md-admonition-icon--admonish-example);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-example);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+:is(.admonition):is(.admonish-quote, .admonish-cite) {
+  border-color: #9e9e9e;
+}
+
+:is(.admonish-quote, .admonish-cite) > :is(.admonition-title, summary.admonition-title) {
+  background-color: rgba(158, 158, 158, 0.1);
+}
+:is(.admonish-quote, .admonish-cite) > :is(.admonition-title, summary.admonition-title)::before {
+  background-color: #9e9e9e;
+  mask-image: var(--md-admonition-icon--admonish-quote);
+  -webkit-mask-image: var(--md-admonition-icon--admonish-quote);
+  mask-repeat: no-repeat;
+  -webkit-mask-repeat: no-repeat;
+  mask-size: contain;
+  -webkit-mask-repeat: no-repeat;
+}
+
+.navy :is(.admonition) {
+  background-color: var(--sidebar-bg);
+}
+
+.ayu :is(.admonition),
+.coal :is(.admonition) {
+  background-color: var(--theme-hover);
+}
+
+.rust :is(.admonition) {
+  background-color: var(--sidebar-bg);
+  color: var(--sidebar-fg);
+}
+.rust .admonition-anchor-link:link, .rust .admonition-anchor-link:visited {
+  color: var(--sidebar-fg);
+}
diff --git a/tvix/docs/mdbook-extra.css b/tvix/docs/mdbook-extra.css
new file mode 100644
index 0000000000..7a50fdbeed
--- /dev/null
+++ b/tvix/docs/mdbook-extra.css
@@ -0,0 +1,7 @@
+@charset "utf-8";
+
+.hljs-meta.prompt_ {
+	-webkit-user-select: none;
+	-moz-user-select: none;
+	user-select: none;
+}
diff --git a/tvix/docs/src/SUMMARY.md b/tvix/docs/src/SUMMARY.md
index 954abae338..6b3c43035b 100644
--- a/tvix/docs/src/SUMMARY.md
+++ b/tvix/docs/src/SUMMARY.md
@@ -1,14 +1,43 @@
 # Summary
 
+# Welcome
+* [Getting Started](./getting-started.md)
+
+# Contributing
+* [Gerrit](./contributing/gerrit.md)
+* [Email](./contributing/email.md)
+* [Code & Commits](./contributing/code-&-commits.md)
+
 # Tvix
 - [Architecture & data flow](./architecture.md)
 - [TODOs](./TODO.md)
 
+# Evaluator
+- [Compilation of Bindings](./eval/bindings.md)
+- [Builtins](./eval/builtins.md)
+- [Build References](./eval/build-references.md)
+- [Catchable Errors](./eval/catchable-errors.md)
+- [Known Optimisation Potential](./eval/known-optimisation-potential.md)
+- [Langugage Issues](./eval/language-issues.md)
+- [Attrset Opcodes](./eval/opcodes-attrsets.md)
+- [Recursive attribute sets](./eval/recursive-attrs.md)
+- [VM Loop](./eval/vm-loop.md)
+- [Abandoned](./eval/abandoned/index.md)
+  - [Thread-local VM](./eval/abandoned/thread-local-vm.md)
+
+# Store
+- [Store API](./store/api.md)
+- [BlobStore Chunking](./castore/blobstore-chunking.md)
+- [BlobStore Protocol](./castore/blobstore-protocol.md)
+- [Data Model](./castore/data-model.md)
+- [Why not git trees?](./castore/why-not-git-trees.md)
+
 # Nix
 - [Specification of the Nix Language](./language-spec.md)
 - [Nix language version history](./lang-version.md)
 - [Value Pointer Equality](./value-pointer-equality.md)
-- [Daemon protocol changelog](./nix-daemon/changelog.md)
-- [Daemon protocol logging](./nix-daemon/logging.md)
-- [Daemon protocol operations](./nix-daemon/operations.md)
-- [Daemon protocol serialization](./nix-daemon/serialization.md)
\ No newline at end of file
+- [Daemon Protocol](./nix-daemon/index.md)
+  - [Changelog](./nix-daemon/changelog.md)
+  - [Logging](./nix-daemon/logging.md)
+  - [Operations](./nix-daemon/operations.md)
+  - [Serialization](./nix-daemon/serialization.md)
diff --git a/tvix/docs/src/TODO.md b/tvix/docs/src/TODO.md
index 8fb22ea822..127fb6f4d0 100644
--- a/tvix/docs/src/TODO.md
+++ b/tvix/docs/src/TODO.md
@@ -25,17 +25,69 @@ sure noone is working on this, or has some specific design in mind already.
    with a different level of `--strict`, but the toplevel doc-comment suggests
    its generic?
 
+### crate2nix for WASM (@kranzes)
+Most of Tvix is living inside a `//tvix` cargo workspace, and we use `crate2nix`
+as a build system, to get crate-level build granularity (and caching), keeping
+compile times somewhat manageable.
+
+In the future, for Store/Build, we want to build some more web frontends,
+exposing some data by calling to the API. Being able to write this in Rust,
+and reusing most of our existing code dealing with the data structures would
+be preferred.
+
+However, using the crate2nix tooling in combination with compiling for WASM is
+a bumpy ride (and `//web.tvixbolt` works around this by using
+`rustPlatform.buildRustPackage` instead, which invokes cargo inside a FOD):
+
+`buildRustCrate` in nixpkgs (which is used by `crate2nix` under the hood)
+doesn't allow specifying another `--target` explicitly, but relies on the cross
+machinery in nixpkgs exclusively.
+
+`doc/languages-frameworks/rust.section.md` suggests it should be a matter of
+re-instantiating nixpkgs for `wasm32-unknown-unknown`, but that's no recognized
+as a valid architecture.
+The suggested alternative, setting only `rustc.config` to it seems to get us
+further, but the `Crate.nix` logic for detecting arch-conditional crates doesn't
+seem to cover that case, and tries to build crates (`cpufeatures` for `sha{1,2}`)
+which are supposed to be skipped.
+
+## Perf
+ - String Contexts currently do a lot of indirections (edef)
+   (NixString -> NixStringInner -> HashSet[element] -> NixContextElement -> String -> data)
+   to get to the actual data. We should improve this. There's various ideas, one
+   of it is globally interning all Nix context elements, and only keeping
+   indices into that. We might need to have different representations for small
+   amount of context elements or larger ones, and need tooling to reason about
+   the amount of contexts we have.
+ - To calculate NAR size and digest (used for output path calculation of FODs),
+   our current `SimpleRenderer` `NarCalculationService` sequentially asks for
+   one blob after another (and internally these might consists out of multiple
+   chunks too).
+   That's a lot of roundtrips, adding up to a lot of useless waiting.
+   While we cannot avoid having to feed all bytes sequentially through sha256,
+   we already know what blobs to fetch and in which order.
+   There should be a way to buffer some "amount of upcoming bytes" in memory,
+   and not requesting these seqentially.
+   This is somewhat the "spiritual counterpart" to our sequential ingestion
+   code (`ConcurrentBlobUploader`, used by `ingest_nar`), which keeps
+   "some amount of outgoing bytes" in memory.
+
 ### Error cleanup
  - Currently, all services use tvix_castore::Error, which only has two kinds
    (invalid request, storage error), containing an (owned) string.
    This is quite primitive. We should have individual error types for BS, DS, PS.
    Maybe these should have some generics to still be able to carry errors from
    the underlying backend, similar to `IngestionError`.
+   There was an attempt to give PS separate error types (cl/11695), but this
+   ended up very verbose.
+   Every error had to be boxed, and a possible additional message be added. Some
+   errors that didn't wrap another underlying errors were hard to construct, too
+   (requiring the addition of errors). All of this without even having added
+   proper backtrace support, which would be quite helpful in store hierarchies.
+   `anyhow`'s `.context()` gives us most of this out of the box. Maybe we can
+   use that, using enums rather than `&'static str` as context in some cases?
 
 ## Fixes towards correctness
- - `builtins.toXML` is missing string context. See b/398.
- - `builtins.toXML` self-closing tags need to be configurable in a more granular
-   fashion, requires third-party crate support. See b/399.
  - `rnix` only supports string source files, but `NixString` uses bytes (and Nix
    source code might be no valid UTF-8).
 
@@ -75,10 +127,6 @@ Some more fetcher-related builtins need work:
  - `fetchTree` (hairy, seems there's no proper spec and the URL syntax seems
    subject to change/underdocumented)
 
-### Convert builtins:fetchurl to Fetches
-We need to convert `builtins:fetchurl`-style calls to `builtins.derivation` to
-fetches, not Derivations (tracked in `KnownPaths`).
-
 ### Derivation -> Build
 While we have some support for `structuredAttrs` and `fetchClosure` (at least
 enough to calculate output hashes, aka produce identical ATerm), the code
@@ -101,9 +149,33 @@ logs etc, but this is something requiring a lot of designing.
 
 ### Store composition
  - Combinators: list-by-priority, first-come-first-serve, cache
- - How do describe hierarchies. URL format too one-dimensional, but we might get
-   quite far with a similar "substituters" concept that Nix uses, to construct
-   the composed stores.
+ - Store composition hierarchies (@yuka).
+   - URL format too one-dimensional.
+   - We want to have nice and simple user-facing substituter config, including
+     sensible default wrappers for caching, retries, fallbacks, as well as
+     granular control for power-users.
+   - Current design idea:
+     - Have a concept similar to rclone config (map with store aliases as
+       keys, allowing to refer to stores by their alias from other parts of
+       the config).
+       It allows both referring to by name, as well as ad-hoc definition:
+       https://rclone.org/docs/#syntax-of-remote-paths
+     - Each store needs to be aware of its "instance name", so it can be
+       included in logs, metrics, …
+     - Have a "instantiation function" traversing such a config data structure,
+       creating store instances and plugging them together, ultimately returning
+       a dyn …Service interface.
+     - No reconfiguration/reconcilation for now
+     - Making URLs the primary data format would get ugly quite easily (hello
+       multiple layers of escaping!), so best to convert the existing URL
+       syntax to our new config format on the fly and then use one codepath
+       to instantiate/assemble. Similarly, something like the "user-facing
+       substituter config" mentioned above could aalso be converted to such a
+       config format under the hood.
+     - Maybe add a ?cache=$other_url parameter support to the URL syntax, to
+       easily wrap a store with a caching frontend, using $other_url as the
+      "near" store URL.
+
 ### Store Config
    There's already serde for some store options (bigtable uses `serde_qs`).
    We might also have common options global over all backends, like chunking
@@ -114,7 +186,22 @@ logs etc, but this is something requiring a lot of designing.
 ### BlobService
  - On the trait side, currently there's no way to distinguish reading a
    known-chunk vs blob, so we might be calling `.chunks()` unnecessarily often.
-   At least for the `object_store` backend, this might be a problem.
+   At least for the `object_store` backend, this might be a problem, causing a
+   lot of round-trips. It also doesn't compose well - every implementation of
+   `BlobService` needs to both solve the "holding metadata about chunking info"
+   as well as "storing chunks" questions.
+   Design idea (@flokli): split these two concerns into two separate traits:
+    - a `ChunkService` dealing with retrieving individual chunks, by their
+      content digests. Chunks are small enough to keep around in contiguous
+      memory.
+    - a `BlobService` storing metadata about blobs.
+
+   Individual stores would not need to implement `BlobReader` anymore, but that
+   could be a global thing with access to the whole store composition layer,
+   which should make it easier to reuse chunks from other backends. Unclear
+   if the write path should be structured the same way. At least for some
+   backends, we want the remote end to be able to decide about chunking.
+
  - While `object_store` recently got support for `Content-Type`
    (https://github.com/apache/arrow-rs/pull/5650), there's no support on the
    local filesystem yet. We'd need to add support to this (through xattrs).
@@ -134,9 +221,10 @@ logs etc, but this is something requiring a lot of designing.
 - Some work ongoing on the worker operation parsing (griff, picnoir)
 
 ### O11Y
- - gRPC trace propagation (cl/10532)
- - `tracing-tracy` (cl/10952)
- - `[tracing-]indicatif` for progress/log reporting (floklis stash)
- - unification into `tvix-tracing` crate, currently a lot of boilerplate
-   in `tvix-store` CLI entrypoint, and half of the boilerplate copied over to
-   `tvix-cli`.
+ - Maybe drop `--log-level` entirely, and only use `RUST_LOG` env exclusively?
+   `debug`,`trace` level across all crates is a bit useless, and `RUST_LOG` can
+   be much more granular…
+ - Trace propagation for HTTP clients too, using
+   https://www.w3.org/TR/trace-context/ or https://www.w3.org/TR/baggage/,
+   whichever makes more sense.
+   Candidates: nix+http(s) protocol, object_store crates.
diff --git a/tvix/docs/src/architecture.md b/tvix/docs/src/architecture.md
index 5e0aa95f1a..02ffdfdcd2 100644
--- a/tvix/docs/src/architecture.md
+++ b/tvix/docs/src/architecture.md
@@ -21,6 +21,15 @@ gRPC. The rest of this document outlines the components.
 
 ### Coordinator
 
+```admonish warning
+Currently there's no separate coordinator. Most of the interaction between
+store, builder and evaluator is done by library code living in the `tvix-glue`
+crate (and `tvix-cli` is a user of it).
+
+Keep in mind some of the statements below are outdated and neither reflect
+reality nor desired design anymore.
+```
+
 *Purpose:* The coordinator (in the simplest case, the Tvix CLI tool)
 oversees the flow of a build process and delegates tasks to the right
 subcomponents. For example, if a user runs the equivalent of
diff --git a/tvix/castore/docs/blobstore-chunking.md b/tvix/docs/src/castore/blobstore-chunking.md
index 49bbe69275..d8c3d54b52 100644
--- a/tvix/castore/docs/blobstore-chunking.md
+++ b/tvix/docs/src/castore/blobstore-chunking.md
@@ -73,11 +73,11 @@ This means one only needs to the root digest to validate a constructions, and th
 constructions can be sent [separately][bao-spec].
 
 This relieves us from the need of having to encode more granular chunking into
-our data model / identifier upfront, but can make this a mostly a transport/
+our data model / identifier upfront, but can make this mostly a transport/
 storage concern.
 
-For the some more description on the (remote) protocol, check
-`./blobstore-protocol.md`.
+For some more description on the (remote) protocol, check
+[BlobStore Protocol](./blobstore-protocol.md).
 
 #### Logical vs. physical chunking
 
diff --git a/tvix/castore/docs/blobstore-protocol.md b/tvix/docs/src/castore/blobstore-protocol.md
index 048cafc3d8..0dff787ccb 100644
--- a/tvix/castore/docs/blobstore-protocol.md
+++ b/tvix/docs/src/castore/blobstore-protocol.md
@@ -41,7 +41,7 @@ It also puts very little requirements on someone implementing a new
 The gRPC protocol is documented in `../protos/rpc_blobstore.proto`.
 Contrary to the `BlobService` trait, it does not have any options for seeking/
 ranging, as it's more desirable to provide this through chunking (see also
-`./blobstore-chunking.md`).
+[BlobStore Chunking](./blobstore-chunking.md).
 
 ## Composition
 Different `BlobStore` are supposed to be "composed"/"layered" to express
@@ -76,7 +76,7 @@ an additional additional field in the response, which would allow clients to
 populate their local chunk store in a single roundtrip.
 
 ## Verified Streaming
-As already described in `./docs/blobstore-chunking.md`, the physical chunk
+As already described in [BlobStore Chunking](./blobstore-chunking.md), the physical chunk
 information sent in a `BlobService.Stat()` response is still sufficient to fetch
 in an authenticated fashion.
 
diff --git a/tvix/castore/docs/data-model.md b/tvix/docs/src/castore/data-model.md
index 2df6761aae..7f7e396a22 100644
--- a/tvix/castore/docs/data-model.md
+++ b/tvix/docs/src/castore/data-model.md
@@ -2,7 +2,7 @@
 
 This provides some more notes on the fields used in castore.proto.
 
-See `//tvix/store/docs/api.md` for the full context.
+See [Store API](../store/api.md) for the full context.
 
 ## Directory message
 `Directory` messages use the blake3 hash of their canonical protobuf
@@ -15,8 +15,8 @@ a directory.
 
 All three message types have a `name` field, specifying the (base)name of the
 element (which MUST not contain slashes or null bytes, and MUST not be '.' or '..').
-For reproducibility reasons, the lists MUST be sorted by that name and also
-MUST be unique across all three lists.
+For reproducibility reasons, the lists MUST be sorted by that name and the
+name MUST be unique across all three lists.
 
 In addition to the `name` field, the various *Node messages have the following
 fields:
@@ -27,7 +27,7 @@ A `DirectoryNode` message represents a child directory.
 It has a `digest` field, which points to the identifier of another `Directory`
 message, making a `Directory` a merkle tree (or strictly speaking, a graph, as
 two elements pointing to a child directory with the same contents would point
-to the same `Directory` message.
+to the same `Directory` message).
 
 There's also a `size` field, containing the (total) number of all child
 elements in the referenced `Directory`, which helps for inode calculation.
diff --git a/tvix/castore/docs/why-not-git-trees.md b/tvix/docs/src/castore/why-not-git-trees.md
index fd46252cf5..4a12b4ef55 100644
--- a/tvix/castore/docs/why-not-git-trees.md
+++ b/tvix/docs/src/castore/why-not-git-trees.md
@@ -48,7 +48,7 @@ The git tree object format uses sha1 both for references to other trees and
 hashes of blobs, which isn't really a hash function to fundamentally base
 everything on in 2023.
 The [migration to sha256][git-sha256] also has been dead for some years now,
-and it's unclear how a "blake3" version of this would even look like.
+and it's unclear what a "blake3" version of this would even look like.
 
 [bao]: https://github.com/oconnor663/bao
 [blake3]: https://github.com/BLAKE3-team/BLAKE3
diff --git a/tvix/docs/src/contributing/code-&-commits.md b/tvix/docs/src/contributing/code-&-commits.md
new file mode 100644
index 0000000000..628c124bf1
--- /dev/null
+++ b/tvix/docs/src/contributing/code-&-commits.md
@@ -0,0 +1,76 @@
+# Code & Commits
+
+## Code quality
+
+This one should go without saying — but please ensure that your code quality
+does not fall below the rest of the project. This is of course very subjective,
+but as an example if you place code that throws away errors into a block in
+which errors are handled properly your change will be rejected.
+
+
+```admonish hint
+Usually there is a strong correlation between the visual appearance of a code
+block and its quality. This is a simple way to sanity-check your work while
+squinting and keeping some distance from your screen ;-)
+```
+
+
+## Commit messages
+
+The [Angular Conventional Commits][angular] style is the general commit style
+used in the Tvix project. Commit messages should be structured like this:
+
+```admonish example
+    type(scope): Subject line with at most a 72 character length
+
+    Body of the commit message with an empty line between subject and
+    body. This text should explain what the change does and why it has
+    been made, *especially* if it introduces a new feature.
+
+    Relevant issues should be mentioned if they exist.
+```
+
+Where `type` can be one of:
+
+* `feat`: A new feature has been introduced
+* `fix`: An issue of some kind has been fixed
+* `docs`: Documentation or comments have been updated
+* `style`: Formatting changes only
+* `refactor`: Hopefully self-explanatory!
+* `test`: Added missing tests / fixed tests
+* `chore`: Maintenance work
+* `subtree`: Operations involving `git subtree`
+
+And `scope` should refer to some kind of logical grouping inside of the
+project.
+
+It does not make sense to include the full path unless it aids in
+disambiguating. For example, when changing the struct fields in
+`tvix/glue/src/builtins/fetchers.rs` it is enough to write
+`refactor(tvix/glue): …`.
+
+Please take a look at the existing commit log for examples.
+
+
+## Commit content
+
+Multiple changes should be divided into multiple git commits whenever possible.
+Common sense applies.
+
+The fix for a single-line whitespace issue is fine to include in a different
+commit. Introducing a new feature and refactoring (unrelated) code in the same
+commit is not fine.
+
+`git commit -a` is generally **taboo**, whereas on the command line you should
+be preferring `git commit -p`.
+
+
+```admonish tip
+Tooling can really help this process. The [lazygit][] TUI or [magit][] for
+Emacs are worth looking into.
+```
+
+
+[angular]: https://www.conventionalcommits.org/en/
+[lazygit]: https://github.com/jesseduffield/lazygit
+[magit]: https://magit.vc
diff --git a/tvix/docs/src/contributing/email.md b/tvix/docs/src/contributing/email.md
new file mode 100644
index 0000000000..238ff388f5
--- /dev/null
+++ b/tvix/docs/src/contributing/email.md
@@ -0,0 +1,33 @@
+# Submitting changes via email
+
+With SSO & local accounts, hopefully Tvix provides you a low-friction or
+privacy-respecting way to make contributions by means of
+[TVL’s self-hosted Gerrit][gerrit]. However, if you still decide differently,
+you may submit a patch via email to `depot@tvl.su` where it will be added to
+Gerrit by a contributor.
+
+Please keep in mind this process is more complicated requiring extra work from
+both us & you:
+
+* You will need to manually check the Gerrit website for updates & someone will
+  need to relay potential comments to/from Gerrit to you as you won’t get
+  emails from Gerrit.
+* New revisions need to be stewarded by someone uploading changes to Gerrit
+  on your behalf.
+* As CLs cannot change owners, if you decide to get a Gerrit account later on
+  existing CLs need to be abandoned then recreated. This introduces more churn
+  to the review process since prior discussion are disconnected.
+
+Create an appropriate commit locally then send it us using either of these
+options:
+
+* `git format-patch`: This will create a `*.patch` file which you should email to
+  us.
+* `git send-email`: If configured on your system, this will take care of the
+  whole emailing process for you.
+
+The email address is a [public inbox][].
+
+
+[gerrit]: ../contributing/gerrit.html
+[public inbox]: https://inbox.tvl.su/depot/
diff --git a/tvix/docs/src/contributing/gerrit.md b/tvix/docs/src/contributing/gerrit.md
new file mode 100644
index 0000000000..71877d9d4a
--- /dev/null
+++ b/tvix/docs/src/contributing/gerrit.md
@@ -0,0 +1,112 @@
+# Contributing to Tvix
+
+## Registration
+
+Self-hosted [Gerrit](https://www.gerritcodereview.com) & changelists (CLs) are
+the preferred method of contributions & review.
+
+TVL’s Gerrit supports single sign-on (SSO) using a GitHub, GitLab, or
+StackOverflow account.
+
+Additionally if you would prefer not to use an SSO option or wish to have a
+backup authentication strategy in the event of downed server or otherwise, we
+recommend setting up a TVL-specific LDAP account. Do note that our IdP
+(Keycloak) sometimes has [problems doing SSO with GitHub][github-problematic],
+so you might get an “unexpected error” while trying to sign in with GitHub;
+that error is not your fault. You can create that account by following these
+instructions:
+
+1. Checkout [TVL’s monorepo][check-out-monorepo] if you haven’t already
+2. Be a member of `#tvl` on [hackint][], a communication network.
+3. Generate a user entry using [//web/pwcrypt](https://signup.tvl.fyi/).
+4. Commit that generated user entry to our LDAP server configuration in
+   [ops/users][ops-users] (for an example, see:
+   [CL/2671](https://cl.tvl.fyi/c/depot/+/2671))
+5. If only using LDAP, submit the patch via email (see [<cite>Submitting
+   changes via email</cite>][email])
+
+
+## Gerrit setup
+
+Gerrit uses the concept of change IDs to track commits across rebases and other
+operations that might change their hashes, and link them to unique changes in
+Gerrit.
+
+First, [upload your public SSH keys to Gerrit][Gerrit SSH]. Then change your
+remote to point to your newly-registered user over SSH. Then follow up with Git
+config by setting the default push URLs for & installing commit hooks for a
+smoother Gerrit experience.
+
+```console
+$ cd depot
+$ git remote set-url origin "ssh://$USER@code.tvl.fyi:29418/depot"
+$ git config remote.origin.url "ssh://$USER@code.tvl.fyi:29418/depot"
+$ git config remote.origin.push "HEAD:refs/for/canon"
+$ curl -L --compressed https://cl.tvl.fyi/tools/hooks/commit-msg | tee .git/hooks/commit-msg
+…
+if ! mv "${dest}" "$1" ; then
+  echo "cannot mv ${dest} to $1"
+  exit 1
+fi
+$ chmod +x .git/hooks/commit-msg
+```
+
+## Gerrit workflow
+
+The workflow on Gerrit is quite different than the pull request (PR) model that
+many developers are more likely to be accustomed to. Instead of pushing changes
+to remote branches, all changes have to be pushed to `refs/for/canon`. For each
+commit that is pushed there, a change request is created automatically
+
+Every time you create a new commit the change hook will insert a unique
+`Change-Id` tag into the commit message. Once you are satisfied with the state
+of your commit and want to submit it for review, you push it to a Git `ref`
+called `refs/for/canon`. This designates the commits as changelists (CLs)
+targeted for the `canon` branch.
+
+After you feel satisfied with your changes changes, push to the default:
+
+```console
+$ git commit -m 'docs(REVIEWS): Fixed all the errors in the reviews docs'
+$ git push origin
+```
+
+Or to a special target, such as a work-in-progress CL:
+
+```console
+$ git push origin HEAD:refs/for/canon%wip
+```
+
+During the review process, the reviewer(s) might ask you to make changes. You
+can simply amend[^amend] your commit(s) then push to the same ref (`--force*`
+flags not needed). Gerrit will automatically update your changes.
+
+```admonish caution
+Every individual commit will become a separate change. We do *not* squash
+related commits, but instead submit them one by one. Be aware that if you are
+expecting a different behavior such as attempt something like an unsquashed
+subtree merge, you will produce a *lot* of CLs. This is strongly discouraged.
+```
+
+```admonish tip
+If do not have experience with the Gerrit model, consider reading the
+[<cite>Working with Gerrit: An example</cite>][Gerrit Walkthrough] or
+[<cite>Basic Gerrit Walkthrough — For GitHub Users</cite>][github-diff].
+
+It will also be important to read about [attention sets][] to understand how
+your ‘turn’ works, how notifications will be distributed to users through the
+system, as well as the other [attention set rules][attention-set-rules].
+```
+
+
+[check-out-monorepo]: ./getting-started#tvl-monorepo
+[email]: ../contributing/email.html
+[Gerrit SSH]: https://cl.tvl.fyi/settings/#SSHKeys
+[Gerrit walkthrough]: https://gerrit-review.googlesource.com/Documentation/intro-gerrit-walkthrough.html
+[ops-users]: https://code.tvl.fyi/tree/ops/users/default.nix
+[hackint]: https://hackint.org
+[github-diff]: https://gerrit.wikimedia.org/r/Documentation/intro-gerrit-walkthrough-github.html
+[github-problematic]: https://b.tvl.fyi/issues/201
+[attention sets]: https://gerrit-review.googlesource.com/Documentation/user-attention-set.html
+[attention-set-rules]: https://gerrit-review.googlesource.com/Documentation/user-attention-set.html#_rules
+[^keycloak]: [^amend]: `git commit --amend`
diff --git a/tvix/docs/src/eval/abandoned/index.md b/tvix/docs/src/eval/abandoned/index.md
new file mode 100644
index 0000000000..1cef704d08
--- /dev/null
+++ b/tvix/docs/src/eval/abandoned/index.md
@@ -0,0 +1,3 @@
+# Abandoned ideas
+
+This chapter keeps track of abandoned ideas, and why they were abandoned.
diff --git a/tvix/eval/docs/abandoned/thread-local-vm.md b/tvix/docs/src/eval/abandoned/thread-local-vm.md
index c6a2d5e07e..c6a2d5e07e 100644
--- a/tvix/eval/docs/abandoned/thread-local-vm.md
+++ b/tvix/docs/src/eval/abandoned/thread-local-vm.md
diff --git a/tvix/eval/docs/bindings.md b/tvix/docs/src/eval/bindings.md
index 2b062cb13d..4fb35b6235 100644
--- a/tvix/eval/docs/bindings.md
+++ b/tvix/docs/src/eval/bindings.md
@@ -1,5 +1,4 @@
-Compilation of bindings
-=======================
+# Compilation of bindings
 
 Compilation of Nix bindings is one of the most mind-bending parts of Nix
 evaluation. The implementation of just the compilation is currently almost 1000
@@ -62,7 +61,7 @@ This is done by compiling bindings in several phases:
 
    At the end of this phase, we know the stack slots of all namespaces for
    inheriting from, all values inherited from them, and all values (and
-   optionall keys) of bindings at the current level.
+   optionally keys) of bindings at the current level.
 
    Only statically known keys are actually merged, so any dynamic keys that
    conflict will lead to a "key already defined" error at runtime.
@@ -82,8 +81,10 @@ stack when the scope ends.
 
 ## Moving parts
 
-WARNING: This documents the *current* implementation. If you only care about the
+```admonish caution
+This documents the *current* implementation. If you only care about the
 conceptual aspects, see above.
+```
 
 There's a few types involved:
 
diff --git a/tvix/eval/docs/build-references.md b/tvix/docs/src/eval/build-references.md
index badcea1155..dd53f65d83 100644
--- a/tvix/eval/docs/build-references.md
+++ b/tvix/docs/src/eval/build-references.md
@@ -1,5 +1,4 @@
-Build references in derivations
-===============================
+# Build references in derivations
 
 This document describes how build references are calculated in Tvix. Build
 references are used to determine which store paths should be available to a
@@ -23,8 +22,10 @@ formats:
    This format is used for a special case where a derivation attribute directly
    refers to a derivation path (e.g. by accessing `.drvPath` on a derivation).
 
-   Note: In C++ Nix this case is quite special and actually requires a
-   store-database query during evaluation.
+   ```admonish note
+   In C++ Nix this case is quite special and actually requires a store-database
+   query during evaluation.
+   ```
 
 3. `<path>` - a non-descript store path input, usually a plain source file (e.g.
    from something like `src = ./.` or `src = ./foo.txt`).
@@ -90,8 +91,10 @@ C++ Nix has several builtins that interface directly with string contexts:
 * `unsafeDiscardOutputDependency`: drops dependencies on the *outputs* of a
   `.drv` in the context, passing only the literal `.drv` itself
 
-  Note: This is only used for special test-cases in nixpkgs, and deprecated Nix
+  ```admonish note
+  This is only used for special test-cases in nixpkgs, and deprecated Nix
   commands like `nix-push`.
+  ```
 * `getContext`: returns the string context in serialised form as a Nix attribute
   set
 * `appendContext`: adds a given string context to the string in the same format
@@ -159,8 +162,10 @@ one evaluation should be created in Nix. This metadata needs to be available in
 These queries will need to be asked of the metadata when populating the
 derivation fields.
 
-Note: Depending on how we implement `builtins.placeholder`, it might be useful
+```admonish note
+Depending on how we implement `builtins.placeholder`, it might be useful
 to track created placeholders in this metadata, too.
+```
 
 ### Context builtins
 
diff --git a/tvix/eval/docs/builtins.md b/tvix/docs/src/eval/builtins.md
index dba4c48c65..d9fcd72cca 100644
--- a/tvix/eval/docs/builtins.md
+++ b/tvix/docs/src/eval/builtins.md
@@ -1,5 +1,4 @@
-Nix builtins
-============
+# Nix builtins
 
 Nix has a lot of built-in functions, some of which are accessible in
 the global scope, and some of which are only accessible through the
diff --git a/tvix/eval/docs/catchable-errors.md b/tvix/docs/src/eval/catchable-errors.md
index ce320a9217..ce320a9217 100644
--- a/tvix/eval/docs/catchable-errors.md
+++ b/tvix/docs/src/eval/catchable-errors.md
diff --git a/tvix/eval/docs/known-optimisation-potential.md b/tvix/docs/src/eval/known-optimisation-potential.md
index 0ab185fe1b..11babcb59a 100644
--- a/tvix/eval/docs/known-optimisation-potential.md
+++ b/tvix/docs/src/eval/known-optimisation-potential.md
@@ -1,5 +1,4 @@
-Known Optimisation Potential
-============================
+# Known Optimisation Potential
 
 There are several areas of the Tvix evaluator code base where
 potentially large performance gains can be achieved through
diff --git a/tvix/eval/docs/language-issues.md b/tvix/docs/src/eval/language-issues.md
index 152e6594a1..152e6594a1 100644
--- a/tvix/eval/docs/language-issues.md
+++ b/tvix/docs/src/eval/language-issues.md
diff --git a/tvix/eval/docs/opcodes-attrsets.md b/tvix/docs/src/eval/opcodes-attrsets.md
index 7026f3319d..7026f3319d 100644
--- a/tvix/eval/docs/opcodes-attrsets.md
+++ b/tvix/docs/src/eval/opcodes-attrsets.md
diff --git a/tvix/eval/docs/recursive-attrs.md b/tvix/docs/src/eval/recursive-attrs.md
index c30cfd33e6..5ce1cb2b64 100644
--- a/tvix/eval/docs/recursive-attrs.md
+++ b/tvix/docs/src/eval/recursive-attrs.md
@@ -1,5 +1,4 @@
-Recursive attribute sets
-========================
+# Recursive attribute sets
 
 The construction behaviour of recursive attribute sets is very
 specific, and a bit peculiar.
diff --git a/tvix/eval/docs/vm-loop.md b/tvix/docs/src/eval/vm-loop.md
index 6266d34709..a75c7eec31 100644
--- a/tvix/eval/docs/vm-loop.md
+++ b/tvix/docs/src/eval/vm-loop.md
@@ -1,5 +1,4 @@
-tvix-eval VM loop
-=================
+# tvix-eval VM loop
 
 This document describes the new tvix-eval VM execution loop implemented in the
 chain focusing around cl/8104.
diff --git a/tvix/docs/src/getting-started.md b/tvix/docs/src/getting-started.md
new file mode 100644
index 0000000000..1cbb6de7d4
--- /dev/null
+++ b/tvix/docs/src/getting-started.md
@@ -0,0 +1,59 @@
+# Getting Started
+
+## Getting the code, a developer shell, & building the CLI
+
+Tvix can be built with the Rust standard `cargo build`. A Nix shell is provided
+with the correctly-versioned tooling to build.
+
+### TVL monorepo
+
+```console
+$ git clone https://code.tvl.fyi/depot.git
+$ cd depot
+```
+
+[Direnv][] is highly recommended in order to enable [`mg`][mg], a tool for
+workflows in monorepos. Follow the [Direnv installation
+instructions][direnv-inst], then after it’s set up continue with:
+
+```console
+$ direnv allow
+$ mg shell //tvix:shell
+$ cd tvix
+$ cargo build
+```
+
+### Or just Tvix
+
+At present, this option isn’t suitable for contributions & lacks the tooling of
+the monorepo, but still provides a `shell.nix` which can be used for building
+the Tvix project.
+
+```console
+$ git clone https://code.tvl.fyi/depot.git:workspace=views/tvix.git
+$ cd tvix
+$ nix-shell
+$ cargo build
+```
+
+
+# Builds & tests
+
+All projects are built using [Nix][] to avoid ‘build pollution’ via the user’s
+local environment.
+
+If you have Nix installed and are contributing to a project tracked in this
+repository, you can usually build the project by calling `nix-build -A
+path.to.project`.
+
+For example, to build a project located at `//tools/foo` you would call
+`nix-build -A tools.foo`
+
+If the project has tests, check that they still work before submitting your
+change.
+
+
+[Direnv]: https://direnv.net
+[direnv-inst]: https://direnv.net/docs/installation.html
+[Nix]: https://nixos.org/nix/
+[mg]: https://code.tvl.fyi/tree/tools/magrathea
diff --git a/tvix/docs/src/language-spec.md b/tvix/docs/src/language-spec.md
index 0ff1dc491e..b3908b2cf4 100644
--- a/tvix/docs/src/language-spec.md
+++ b/tvix/docs/src/language-spec.md
@@ -1,8 +1,10 @@
 # Specification of the Nix Language
 
-WARNING: This document is a work in progress. Please keep an eye on
+```admonish attention
+This document is a work in progress. Please keep an eye on
 [`topic:nix-spec`](https://cl.tvl.fyi/q/topic:nix-spec) for ongoing
 CLs.
+```
 
 Nix is a general-purpose, functional programming language which this
 document aims to describe.
diff --git a/tvix/docs/src/nix-daemon/index.md b/tvix/docs/src/nix-daemon/index.md
new file mode 100644
index 0000000000..e47c20151e
--- /dev/null
+++ b/tvix/docs/src/nix-daemon/index.md
@@ -0,0 +1,15 @@
+# Nix Daemon Protocol
+
+The Nix Daemon protocol is what's used to communicate with the `nix-daemon`,
+either on the local system (in which case the communication happens via a Unix
+domain socket), or with a remote Nix (in which this is tunneled over SSH).
+
+It uses a custom binary format which isn't too documented. The subpages here
+collect serve as an in-depth detail about some of the inner workings, data types
+etc.
+
+A first implementation of this exists in
+[griff/Nix.rs](https://github.com/griff/Nix.rs/tree/main).
+
+Work is underway to port / factor this out into reusable building blocks into
+the [nix-compat] crate.
diff --git a/tvix/store/docs/api.md b/tvix/docs/src/store/api.md
index 01e72671a7..b20ec0e8bf 100644
--- a/tvix/store/docs/api.md
+++ b/tvix/docs/src/store/api.md
@@ -1,11 +1,10 @@
-tvix-[ca]store API
-==============
+# tvix-[ca]store API
 
 This document outlines the design of the API exposed by tvix-castore and tvix-
 store, as well as other implementations of this store protocol.
 
 This document is meant to be read side-by-side with
-[castore.md](../../tvix-castore/docs/castore.md) which describes the data model
+[Data Model](../castore/data-model.md) which describes the data model
 in more detail.
 
 The store API has four main consumers:
@@ -205,7 +204,7 @@ and potentially a chain of `Directory` objects requested from
 When the desired file is reached, the *BlobService* can be used to read the
 contents of this file, and return it back to the evaluator.
 
-FUTUREWORK: define how importing from symlinks should/does work.
+FUTUREWORK: Define how importing from symlinks should/does work.
 
 Contrary to Nix, this has the advantage of not having to copy all of the
 contents of a store path to the evaluating machine, but really only fetching
diff --git a/tvix/docs/src/value-pointer-equality.md b/tvix/docs/src/value-pointer-equality.md
index d84efcb50c..a4539513ef 100644
--- a/tvix/docs/src/value-pointer-equality.md
+++ b/tvix/docs/src/value-pointer-equality.md
@@ -47,8 +47,10 @@ works in C++ Nix, the only production ready Nix implementation currently availab
 
 ## Nix (Pointer) Equality in C++ Nix
 
-TIP: The summary presented here is up-to-date as of 2023-06-27 and was tested
-with Nix 2.3, 2.11 and 2.15.
+```admonish info
+The summary presented here is up-to-date as of 2023-06-27 and was tested with
+Nix 2.3, 2.11 and 2.15.
+```
 
 ### `EvalState::eqValues` and `ExprOpEq::eval`
 
diff --git a/tvix/eval/Cargo.toml b/tvix/eval/Cargo.toml
index 677ce6ab85..4cf8ea146c 100644
--- a/tvix/eval/Cargo.toml
+++ b/tvix/eval/Cargo.toml
@@ -30,7 +30,6 @@ smol_str = "0.2.0"
 tabwriter = "1.2"
 test-strategy = { version = "0.2.1", optional = true }
 toml = "0.6.0"
-xml-rs = "0.8.4"
 sha2 = "0.10.8"
 sha1 = "0.10.6"
 md-5 = "0.10.6"
@@ -43,6 +42,9 @@ pretty_assertions = "1.2.1"
 rstest = "0.19.0"
 tempfile = "3.3.0"
 
+[target.'cfg(not(target_env = "msvc"))'.dev-dependencies]
+tikv-jemallocator = "0.5"
+
 [features]
 default = ["impure", "arbitrary", "nix_tests"]
 
diff --git a/tvix/eval/benches/eval.rs b/tvix/eval/benches/eval.rs
index 57d4eb71b5..1333f5018c 100644
--- a/tvix/eval/benches/eval.rs
+++ b/tvix/eval/benches/eval.rs
@@ -1,5 +1,11 @@
 use criterion::{black_box, criterion_group, criterion_main, Criterion};
 use itertools::Itertools;
+#[cfg(not(target_env = "msvc"))]
+use tikv_jemallocator::Jemalloc;
+
+#[cfg(not(target_env = "msvc"))]
+#[global_allocator]
+static GLOBAL: Jemalloc = Jemalloc;
 
 fn interpret(code: &str) {
     tvix_eval::Evaluation::new_pure().evaluate(code, None);
diff --git a/tvix/eval/build.rs b/tvix/eval/build.rs
index a9c9a78b06..b37a6e8a0c 100644
--- a/tvix/eval/build.rs
+++ b/tvix/eval/build.rs
@@ -5,5 +5,10 @@ fn main() {
         "cargo:rustc-env=TVIX_CURRENT_SYSTEM={}",
         &env::var("TARGET").unwrap()
     );
-    println!("cargo:rerun-if-changed-env=TARGET")
+    println!("cargo:rerun-if-changed-env=TARGET");
+
+    // Pick up new test case files
+    // https://github.com/la10736/rstest/issues/256
+    println!("cargo:rerun-if-changed=src/tests/nix_tests");
+    println!("cargo:rerun-if-changed=src/tests/tvix_tests")
 }
diff --git a/tvix/eval/default.nix b/tvix/eval/default.nix
index 91661291f7..9dd5875f85 100644
--- a/tvix/eval/default.nix
+++ b/tvix/eval/default.nix
@@ -1,9 +1,16 @@
 # TODO: find a way to build the benchmarks via crate2nix
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
-depot.tvix.crates.workspaceMembers.tvix-eval.build.override {
+(depot.tvix.crates.workspaceMembers.tvix-eval.build.override {
   runTests = true;
 
   # Make C++ Nix available, to compare eval results against.
   testInputs = [ pkgs.nix ];
-}
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "nix_tests" ];
+    override.testInputs = [ pkgs.nix ];
+  };
+})
diff --git a/tvix/eval/src/builtins/mod.rs b/tvix/eval/src/builtins/mod.rs
index 04a0b3dd33..96e9985747 100644
--- a/tvix/eval/src/builtins/mod.rs
+++ b/tvix/eval/src/builtins/mod.rs
@@ -274,9 +274,10 @@ mod pure_builtins {
         list: Value,
     ) -> Result<Value, ErrorKind> {
         let mut separator = separator.to_contextful_str()?;
+
         let mut context = NixContext::new();
-        if let Some(sep_context) = separator.context_mut() {
-            context = context.join(sep_context);
+        if let Some(sep_context) = separator.take_context() {
+            context.extend(sep_context.into_iter())
         }
         let list = list.to_list()?;
         let mut res = BString::default();
@@ -296,13 +297,8 @@ mod pure_builtins {
             {
                 Ok(mut s) => {
                     res.push_str(&s);
-                    if let Some(ref mut other_context) = s.context_mut() {
-                        // It is safe to consume the other context here
-                        // because the `list` and `separator` are originally
-                        // moved, here.
-                        // We are not going to use them again
-                        // because the result here is a string.
-                        context = context.join(other_context);
+                    if let Some(other_context) = s.take_context() {
+                        context.extend(other_context.into_iter());
                     }
                 }
                 Err(c) => return Ok(Value::Catchable(Box::new(c))),
@@ -469,15 +465,6 @@ mod pure_builtins {
         toml::from_str(toml_str.to_str()?).map_err(|err| err.into())
     }
 
-    #[builtin("filterSource")]
-    #[allow(non_snake_case)]
-    async fn builtin_filterSource(_co: GenCo, #[lazy] _e: Value) -> Result<Value, ErrorKind> {
-        // TODO: implement for nixpkgs compatibility
-        Ok(Value::from(CatchableErrorKind::UnimplementedFeature(
-            "filterSource".into(),
-        )))
-    }
-
     #[builtin("genericClosure")]
     async fn builtin_generic_closure(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
         let attrs = input.to_attrs()?;
@@ -764,9 +751,8 @@ mod pure_builtins {
         }
 
         if let Some(origin_ctx) = origin.context_mut() {
-            // FUTUREWORK(performance): avoid this clone
-            // and extend in-place.
-            *origin_ctx = origin_ctx.clone().join(&mut ctx_elements.into());
+            origin_ctx.extend(ctx_elements)
+            // TODO: didn't we forget cases where origin had no context?
         }
 
         Ok(origin.into())
@@ -1169,8 +1155,8 @@ mod pure_builtins {
         let mut empty_string_replace = false;
         let mut context = NixContext::new();
 
-        if let Some(string_context) = string.context_mut() {
-            context = context.join(string_context);
+        if let Some(string_context) = string.take_context() {
+            context.extend(string_context.into_iter());
         }
 
         // This can't be implemented using Rust's string.replace() as
@@ -1200,8 +1186,8 @@ mod pure_builtins {
                 if string[i..i + from.len()] == *from {
                     res.push_str(&to);
                     i += from.len();
-                    if let Some(to_ctx) = to.context_mut() {
-                        context = context.join(to_ctx);
+                    if let Some(to_ctx) = to.take_context() {
+                        context.extend(to_ctx.into_iter());
                     }
 
                     // remember if we applied the empty from->to
@@ -1232,8 +1218,8 @@ mod pure_builtins {
 
             if from.is_empty() {
                 res.push_str(&to);
-                if let Some(to_ctx) = to.context_mut() {
-                    context = context.join(to_ctx);
+                if let Some(to_ctx) = to.take_context() {
+                    context.extend(to_ctx.into_iter());
                 }
                 break;
             }
@@ -1291,6 +1277,9 @@ mod pure_builtins {
                 })
                 .collect();
             ret.push_back(Value::List(NixList::from(v)));
+            if pos == text.len() {
+                break;
+            }
             pos = thematch.end();
         }
 
@@ -1504,15 +1493,19 @@ mod pure_builtins {
         }
 
         let mut buf: Vec<u8> = vec![];
-        to_xml::value_to_xml(&mut buf, &value)?;
-        Ok(String::from_utf8(buf)?.into())
-    }
-
-    #[builtin("placeholder")]
-    async fn builtin_placeholder(co: GenCo, #[lazy] _x: Value) -> Result<Value, ErrorKind> {
-        generators::emit_warning_kind(&co, WarningKind::NotImplemented("builtins.placeholder"))
-            .await;
-        Ok("<builtins.placeholder-is-not-implemented-in-tvix-yet>".into())
+        let context = to_xml::value_to_xml(&mut buf, &value)?;
+
+        Ok((
+            buf,
+            // FUTUREWORK: We have a distinction between an empty context, and
+            // no context at all. Fix this.
+            if !context.is_empty() {
+                Some(Box::new(context))
+            } else {
+                None
+            },
+        )
+            .into())
     }
 
     #[builtin("trace")]
@@ -1612,10 +1605,16 @@ pub fn pure_builtins() -> Vec<(&'static str, Value)> {
         crate::systems::llvm_triple_to_nix_double(CURRENT_PLATFORM).into(),
     ));
 
-    // TODO: implement for nixpkgs compatibility
     result.push((
         "__curPos",
-        Value::from(CatchableErrorKind::UnimplementedFeature("__curPos".into())),
+        Value::Thunk(Thunk::new_suspended_native(Box::new(move || {
+            // TODO: implement for nixpkgs compatibility
+            Ok(Value::attrs(NixAttrs::from_iter([
+                ("line", 42.into()),
+                ("column", 42.into()),
+                ("file", Value::String("/deep/thought".into())),
+            ])))
+        }))),
     ));
 
     result
@@ -1623,6 +1622,8 @@ pub fn pure_builtins() -> Vec<(&'static str, Value)> {
 
 #[builtins]
 mod placeholder_builtins {
+    use crate::NixContext;
+
     use super::*;
 
     #[builtin("unsafeDiscardStringContext")]
@@ -1671,24 +1672,17 @@ mod placeholder_builtins {
             .to_contextful_str()?;
 
         // If there's any context, we will swap any ... by a path one.
-        if let Some(ctx) = v.context_mut() {
-            let new_context: tvix_eval::NixContext = ctx
-                .iter()
-                .map(|elem| match elem {
-                    // FUTUREWORK(performance): ideally, we should either:
-                    // (a) do interior mutation of the existing context.
-                    // (b) let the structural sharing make those clones cheap.
-                    crate::NixContextElement::Derivation(drv_path) => {
-                        crate::NixContextElement::Plain(drv_path.to_string())
-                    }
-                    elem => elem.clone(),
-                })
-                .collect::<HashSet<_>>()
-                .into();
+        if let Some(c) = v.take_context() {
+            let mut context = NixContext::new();
+            context.extend(c.into_iter().map(|elem| match elem {
+                crate::NixContextElement::Derivation(drv_path) => {
+                    crate::NixContextElement::Plain(drv_path.to_string())
+                }
+                elem => elem.clone(),
+            }));
 
-            *ctx = new_context;
+            return Ok(Value::String(NixString::new_context_from(context, v)));
         }
-
         Ok(Value::from(v))
     }
 
@@ -1709,6 +1703,7 @@ mod placeholder_builtins {
         _name: Value,
         _attrset: Value,
     ) -> Result<Value, ErrorKind> {
+        // TODO: implement for nixpkgs compatibility
         generators::emit_warning_kind(
             &co,
             WarningKind::NotImplemented("builtins.unsafeGetAttrsPos"),
diff --git a/tvix/eval/src/builtins/to_xml.rs b/tvix/eval/src/builtins/to_xml.rs
index bb12cebfc9..093e127fe2 100644
--- a/tvix/eval/src/builtins/to_xml.rs
+++ b/tvix/eval/src/builtins/to_xml.rs
@@ -3,112 +3,98 @@
 //! things in nixpkgs rely on.
 
 use bstr::ByteSlice;
+use std::borrow::Cow;
 use std::{io::Write, rc::Rc};
-use xml::writer::events::XmlEvent;
-use xml::writer::EmitterConfig;
-use xml::writer::EventWriter;
 
-use crate::{ErrorKind, Value};
+use crate::{ErrorKind, NixContext, NixContextElement, Value};
 
 /// Recursively serialise a value to XML. The value *must* have been
 /// deep-forced before being passed to this function.
-pub fn value_to_xml<W: Write>(mut writer: W, value: &Value) -> Result<(), ErrorKind> {
-    let config = EmitterConfig {
-        perform_indent: true,
-        pad_self_closing: true,
-
-        // Nix uses single-quotes *only* in the document declaration,
-        // so we need to write it manually.
-        write_document_declaration: false,
-        ..Default::default()
-    };
-
+/// On success, returns the NixContext.
+pub fn value_to_xml<W: Write>(mut writer: W, value: &Value) -> Result<NixContext, ErrorKind> {
     // Write a literal document declaration, using C++-Nix-style
     // single quotes.
     writeln!(writer, "<?xml version='1.0' encoding='utf-8'?>")?;
 
-    let mut writer = EventWriter::new_with_config(writer, config);
-
-    writer.write(XmlEvent::start_element("expr"))?;
-    value_variant_to_xml(&mut writer, value)?;
-    writer.write(XmlEvent::end_element())?;
+    let mut emitter = XmlEmitter::new(writer);
 
-    // Unwrap the writer to add the final newline that C++ Nix adds.
-    writeln!(writer.into_inner())?;
+    emitter.write_open_tag("expr", &[])?;
+    value_variant_to_xml(&mut emitter, value)?;
+    emitter.write_closing_tag("expr")?;
 
-    Ok(())
+    Ok(emitter.into_context())
 }
 
 fn write_typed_value<W: Write, V: ToString>(
-    w: &mut EventWriter<W>,
-    name: &str,
+    w: &mut XmlEmitter<W>,
+    name_unescaped: &str,
     value: V,
 ) -> Result<(), ErrorKind> {
-    w.write(XmlEvent::start_element(name).attr("value", &value.to_string()))?;
-    w.write(XmlEvent::end_element())?;
+    w.write_self_closing_tag(name_unescaped, &[("value", &value.to_string())])?;
     Ok(())
 }
 
-fn value_variant_to_xml<W: Write>(w: &mut EventWriter<W>, value: &Value) -> Result<(), ErrorKind> {
+fn value_variant_to_xml<W: Write>(w: &mut XmlEmitter<W>, value: &Value) -> Result<(), ErrorKind> {
     match value {
         Value::Thunk(t) => return value_variant_to_xml(w, &t.value()),
 
         Value::Null => {
-            w.write(XmlEvent::start_element("null"))?;
-            w.write(XmlEvent::end_element())
+            w.write_open_tag("null", &[])?;
+            w.write_closing_tag("null")?;
         }
 
         Value::Bool(b) => return write_typed_value(w, "bool", b),
         Value::Integer(i) => return write_typed_value(w, "int", i),
         Value::Float(f) => return write_typed_value(w, "float", f),
-        Value::String(s) => return write_typed_value(w, "string", s.to_str()?),
+        Value::String(s) => {
+            if let Some(context) = s.context() {
+                w.extend_context(context.iter().cloned());
+            }
+            return write_typed_value(w, "string", s.to_str()?);
+        }
         Value::Path(p) => return write_typed_value(w, "path", p.to_string_lossy()),
 
         Value::List(list) => {
-            w.write(XmlEvent::start_element("list"))?;
+            w.write_open_tag("list", &[])?;
 
             for elem in list.into_iter() {
                 value_variant_to_xml(w, elem)?;
             }
 
-            w.write(XmlEvent::end_element())
+            w.write_closing_tag("list")?;
         }
 
         Value::Attrs(attrs) => {
-            w.write(XmlEvent::start_element("attrs"))?;
+            w.write_open_tag("attrs", &[])?;
 
             for elem in attrs.iter() {
-                w.write(XmlEvent::start_element("attr").attr("name", &elem.0.to_str_lossy()))?;
+                w.write_open_tag("attr", &[("name", &elem.0.to_str_lossy())])?;
                 value_variant_to_xml(w, elem.1)?;
-                w.write(XmlEvent::end_element())?;
+                w.write_closing_tag("attr")?;
             }
 
-            w.write(XmlEvent::end_element())
+            w.write_closing_tag("attrs")?;
         }
 
         Value::Closure(c) => {
-            w.write(XmlEvent::start_element("function"))?;
+            w.write_open_tag("function", &[])?;
 
             match &c.lambda.formals {
                 Some(formals) => {
-                    let mut attrspat = XmlEvent::start_element("attrspat");
+                    let mut attrs: Vec<(&str, &str)> = Vec::with_capacity(2);
                     if formals.ellipsis {
-                        attrspat = attrspat.attr("ellipsis", "1");
+                        attrs.push(("ellipsis", "1"));
                     }
                     if let Some(ref name) = &formals.name {
-                        attrspat = attrspat.attr("name", name.as_str());
+                        attrs.push(("name", name.as_str()));
                     }
 
-                    w.write(attrspat)?;
-
+                    w.write_open_tag("attrspat", &attrs)?;
                     for arg in formals.arguments.iter() {
-                        w.write(
-                            XmlEvent::start_element("attr").attr("name", &arg.0.to_str_lossy()),
-                        )?;
-                        w.write(XmlEvent::end_element())?;
+                        w.write_self_closing_tag("attr", &[("name", &arg.0.to_str_lossy())])?;
                     }
 
-                    w.write(XmlEvent::end_element())?;
+                    w.write_closing_tag("attrspat")?;
                 }
                 None => {
                     // TODO(tazjin): tvix does not currently persist function
@@ -120,17 +106,16 @@ fn value_variant_to_xml<W: Write>(w: &mut EventWriter<W>, value: &Value) -> Resu
                     // If we don't want to persist the data, we can re-parse the
                     // AST from the spans of the lambda's bytecode and figure it
                     // out that way, but it needs some investigating.
-                    w.write(XmlEvent::start_element("varpat").attr("name", /* fake: */ "x"))?;
-                    w.write(XmlEvent::end_element())?;
+                    w.write_self_closing_tag("varpat", &[("name", /* fake: */ "x")])?;
                 }
             }
 
-            w.write(XmlEvent::end_element())
+            w.write_closing_tag("function")?;
         }
 
         Value::Builtin(_) => {
-            w.write(XmlEvent::start_element("unevaluated"))?;
-            w.write(XmlEvent::end_element())
+            w.write_open_tag("unevaluated", &[])?;
+            w.write_closing_tag("unevaluated")?;
         }
 
         Value::AttrNotFound
@@ -148,7 +133,189 @@ fn value_variant_to_xml<W: Write>(w: &mut EventWriter<W>, value: &Value) -> Resu
         Value::Catchable(_) => {
             panic!("tvix bug: value_to_xml() called on a value which had not been deep-forced")
         }
-    }?;
+    };
 
     Ok(())
 }
+
+/// A simple-stupid XML emitter, which implements only the subset needed for byte-by-byte compat with C++ nix’ `builtins.toXML`.
+struct XmlEmitter<W> {
+    /// The current indentation
+    cur_indent: usize,
+    writer: W,
+    context: NixContext,
+}
+
+impl<W: Write> XmlEmitter<W> {
+    pub fn new(writer: W) -> Self {
+        XmlEmitter {
+            cur_indent: 0,
+            writer,
+            context: Default::default(),
+        }
+    }
+
+    /// Write an open tag with the given name (which is not escaped!)
+    /// and attributes (Keys are not escaped! Only attribute values are.)
+    pub fn write_open_tag(
+        &mut self,
+        name_unescaped: &str,
+        attrs: &[(&str, &str)],
+    ) -> std::io::Result<()> {
+        self.add_indent()?;
+        self.writer.write_all(b"<")?;
+        self.writer.write_all(name_unescaped.as_bytes())?;
+        self.write_attrs_escape_vals(attrs)?;
+        self.writer.write_all(b">\n")?;
+        self.cur_indent += 2;
+        Ok(())
+    }
+
+    /// Write a self-closing open tag with the given name (which is not escaped!)
+    /// and attributes (Keys are not escaped! Only attribute values are.)
+    pub fn write_self_closing_tag(
+        &mut self,
+        name_unescaped: &str,
+        attrs: &[(&str, &str)],
+    ) -> std::io::Result<()> {
+        self.add_indent()?;
+        self.writer.write_all(b"<")?;
+        self.writer.write_all(name_unescaped.as_bytes())?;
+        self.write_attrs_escape_vals(attrs)?;
+        self.writer.write_all(b" />\n")?;
+        Ok(())
+    }
+
+    /// Write a closing tag with the given name (which is not escaped!)
+    pub fn write_closing_tag(&mut self, name_unescaped: &str) -> std::io::Result<()> {
+        self.cur_indent -= 2;
+        self.add_indent()?;
+        self.writer.write_all(b"</")?;
+        self.writer.write_all(name_unescaped.as_bytes())?;
+        self.writer.write_all(b">\n")?;
+        Ok(())
+    }
+
+    #[inline]
+    fn add_indent(&mut self) -> std::io::Result<()> {
+        self.writer.write_all(&b" ".repeat(self.cur_indent))
+    }
+
+    /// Write an attribute list
+    fn write_attrs_escape_vals(&mut self, attrs: &[(&str, &str)]) -> std::io::Result<()> {
+        for (name, val) in attrs {
+            self.writer.write_all(b" ")?;
+            self.writer.write_all(name.as_bytes())?;
+            self.writer.write_all(br#"=""#)?;
+            self.writer
+                .write_all(Self::escape_attr_value(val).as_bytes())?;
+            self.writer.write_all(b"\"")?;
+        }
+        Ok(())
+    }
+
+    /// Escape the given attribute value, making sure we only actually clone the string if we needed to replace something.
+    fn escape_attr_value(s: &str) -> Cow<str> {
+        let mut last_escape: usize = 0;
+        let mut res: Cow<str> = Cow::Borrowed("");
+        // iterating via char_indices gives us the ability to index the original string slice at character boundaries
+        for (idx, c) in s.char_indices() {
+            match Self::should_escape_char(c) {
+                None => {}
+                Some(new) => {
+                    // add characters since the last escape we did
+                    res += &s[last_escape..idx];
+                    // add the escaped value
+                    res += new;
+                    last_escape = idx + 1;
+                }
+            }
+        }
+        // we did not need to escape anything, so borrow original string
+        if last_escape == 0 {
+            Cow::Borrowed(s)
+        } else {
+            // add the remaining characters
+            res += &s[last_escape..];
+            res
+        }
+    }
+
+    fn should_escape_char(c: char) -> Option<&'static str> {
+        match c {
+            '<' => Some("&lt;"),
+            '>' => Some("&gt;"),
+            '"' => Some("&quot;"),
+            '\'' => Some("&apos;"),
+            '&' => Some("&amp;"),
+            '\n' => Some("&#xA;"),
+            '\r' => Some("&#xD;"),
+            _ => None,
+        }
+    }
+
+    /// Extends the existing context with more context elements.
+    fn extend_context<T>(&mut self, iter: T)
+    where
+        T: IntoIterator<Item = NixContextElement>,
+    {
+        self.context.extend(iter)
+    }
+
+    /// Consumes [Self] and returns the [NixContext] collected.
+    fn into_context(self) -> NixContext {
+        self.context
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use bytes::buf::Writer;
+    use pretty_assertions::assert_eq;
+
+    use crate::builtins::to_xml::XmlEmitter;
+    use std::borrow::Cow;
+
+    #[test]
+    fn xml_gen() {
+        let mut buf = Vec::new();
+        let mut x = XmlEmitter::new(&mut buf);
+        x.write_open_tag("hello", &[("hi", "it’s me"), ("no", "<escape>")])
+            .unwrap();
+        x.write_self_closing_tag("self-closing", &[("tag", "yay")])
+            .unwrap();
+        x.write_closing_tag("hello").unwrap();
+
+        assert_eq!(
+            std::str::from_utf8(&buf).unwrap(),
+            r##"<hello hi="it’s me" no="&lt;escape&gt;">
+  <self-closing tag="yay" />
+</hello>
+"##
+        );
+    }
+
+    #[test]
+    fn xml_escape() {
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("ab<>c&de") {
+            Cow::Owned(s) => assert_eq!(s, "ab&lt;&gt;c&amp;de".to_string(), "escape stuff"),
+            Cow::Borrowed(s) => panic!("s should be owned {}", s),
+        }
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("") {
+            Cow::Borrowed(s) => assert_eq!(s, "", "empty escape is borrowed"),
+            Cow::Owned(s) => panic!("s should be borrowed {}", s),
+        }
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("hi!ŷbla") {
+            Cow::Borrowed(s) => assert_eq!(s, "hi!ŷbla", "no escape is borrowed"),
+            Cow::Owned(s) => panic!("s should be borrowed {}", s),
+        }
+        match XmlEmitter::<Writer<Vec<u8>>>::escape_attr_value("hi!<ŷ>bla") {
+            Cow::Owned(s) => assert_eq!(
+                s,
+                "hi!&lt;ŷ&gt;bla".to_string(),
+                "multi-byte chars are correctly used"
+            ),
+            Cow::Borrowed(s) => panic!("s should be owned {}", s),
+        }
+    }
+}
diff --git a/tvix/eval/src/errors.rs b/tvix/eval/src/errors.rs
index 652252dadf..e32cfa04ed 100644
--- a/tvix/eval/src/errors.rs
+++ b/tvix/eval/src/errors.rs
@@ -10,7 +10,6 @@ use std::{fmt::Debug, fmt::Display, num::ParseIntError};
 use codemap::{File, Span};
 use codemap_diagnostic::{ColorConfig, Diagnostic, Emitter, Level, SpanLabel, SpanStyle};
 use smol_str::SmolStr;
-use xml::writer::Error as XmlError;
 
 use crate::spans::ToSpan;
 use crate::value::{CoercionKind, NixString};
@@ -185,8 +184,11 @@ pub enum ErrorKind {
     /// Errors converting TOML to a value
     FromTomlError(String),
 
+    /// An unexpected argument was supplied to a builtin
+    UnexpectedArgumentBuiltin(NixString),
+
     /// An unexpected argument was supplied to a function that takes formal parameters
-    UnexpectedArgument {
+    UnexpectedArgumentFormals {
         arg: NixString,
         formals_span: Span,
     },
@@ -194,9 +196,6 @@ pub enum ErrorKind {
     /// Invalid UTF-8 was encoutered somewhere
     Utf8,
 
-    /// Errors while serialising to XML.
-    Xml(Rc<XmlError>),
-
     /// Variant for errors that bubble up to eval from other Tvix
     /// components.
     TvixError(Rc<dyn error::Error>),
@@ -248,7 +247,6 @@ impl error::Error for Error {
                 errors.first().map(|e| e as &dyn error::Error)
             }
             ErrorKind::IO { error, .. } => Some(error.as_ref()),
-            ErrorKind::Xml(error) => Some(error.as_ref()),
             ErrorKind::TvixError(error) => Some(error.as_ref()),
             _ => None,
         }
@@ -285,12 +283,6 @@ impl From<bstr::FromUtf8Error> for ErrorKind {
     }
 }
 
-impl From<XmlError> for ErrorKind {
-    fn from(err: XmlError) -> Self {
-        Self::Xml(Rc::new(err))
-    }
-}
-
 impl From<io::Error> for ErrorKind {
     fn from(e: io::Error) -> Self {
         ErrorKind::IO {
@@ -498,7 +490,11 @@ to a missing value in the attribute set(s) included via `with`."#,
                 write!(f, "Error converting TOML to a Nix value: {msg}")
             }
 
-            ErrorKind::UnexpectedArgument { arg, .. } => {
+            ErrorKind::UnexpectedArgumentBuiltin(arg) => {
+                write!(f, "Unexpected agrument `{arg}` passed to builtin",)
+            }
+
+            ErrorKind::UnexpectedArgumentFormals { arg, .. } => {
                 write!(f, "Unexpected argument `{arg}` supplied to function",)
             }
 
@@ -506,8 +502,6 @@ to a missing value in the attribute set(s) included via `with`."#,
                 write!(f, "Invalid UTF-8 in string")
             }
 
-            ErrorKind::Xml(error) => write!(f, "failed to serialise to XML: {error}"),
-
             ErrorKind::TvixError(inner_error) => {
                 write!(f, "{inner_error}")
             }
@@ -791,7 +785,8 @@ impl Error {
             ErrorKind::DuplicateAttrsKey { .. } => "in this attribute set",
             ErrorKind::InvalidAttributeName(_) => "in this attribute set",
             ErrorKind::RelativePathResolution(_) => "in this path literal",
-            ErrorKind::UnexpectedArgument { .. } => "in this function call",
+            ErrorKind::UnexpectedArgumentBuiltin { .. } => "while calling this builtin",
+            ErrorKind::UnexpectedArgumentFormals { .. } => "in this function call",
             ErrorKind::UnexpectedContext => "in this string",
 
             // The spans for some errors don't have any more descriptive stuff
@@ -823,7 +818,6 @@ impl Error {
             | ErrorKind::JsonError(_)
             | ErrorKind::NotSerialisableToJson(_)
             | ErrorKind::FromTomlError(_)
-            | ErrorKind::Xml(_)
             | ErrorKind::Utf8
             | ErrorKind::TvixError(_)
             | ErrorKind::TvixBug { .. }
@@ -867,15 +861,15 @@ impl Error {
             ErrorKind::ImportCompilerError { .. } => "E028",
             ErrorKind::IO { .. } => "E029",
             ErrorKind::JsonError { .. } => "E030",
-            ErrorKind::UnexpectedArgument { .. } => "E031",
+            ErrorKind::UnexpectedArgumentFormals { .. } => "E031",
             ErrorKind::RelativePathResolution(_) => "E032",
             ErrorKind::DivisionByZero => "E033",
-            ErrorKind::Xml(_) => "E034",
             ErrorKind::FromTomlError(_) => "E035",
             ErrorKind::NotSerialisableToJson(_) => "E036",
             ErrorKind::UnexpectedContext => "E037",
             ErrorKind::Utf8 => "E038",
             ErrorKind::UnknownHashType(_) => "E039",
+            ErrorKind::UnexpectedArgumentBuiltin { .. } => "E040",
 
             // Special error code for errors from other Tvix
             // components. We may want to introduce a code namespacing
@@ -913,7 +907,7 @@ impl Error {
                 spans_for_parse_errors(&file, errors)
             }
 
-            ErrorKind::UnexpectedArgument { formals_span, .. } => {
+            ErrorKind::UnexpectedArgumentFormals { formals_span, .. } => {
                 vec![
                     SpanLabel {
                         label: self.span_label(),
diff --git a/tvix/eval/src/io.rs b/tvix/eval/src/io.rs
index f775077af8..9acfd6eeba 100644
--- a/tvix/eval/src/io.rs
+++ b/tvix/eval/src/io.rs
@@ -16,14 +16,16 @@
 //! how store paths are opened and so on.
 
 use std::{
-    fs::File,
     io,
     path::{Path, PathBuf},
 };
 
-#[cfg(target_family = "unix")]
+#[cfg(all(target_family = "unix", feature = "impure"))]
 use std::os::unix::ffi::OsStringExt;
 
+#[cfg(feature = "impure")]
+use std::fs::File;
+
 /// Types of files as represented by `builtins.readDir` in Nix.
 #[derive(Debug)]
 pub enum FileType {
@@ -52,6 +54,10 @@ pub trait EvalIO {
     /// Open the file at the specified path to a `io::Read`.
     fn open(&self, path: &Path) -> io::Result<Box<dyn io::Read>>;
 
+    /// Return the [FileType] of the given path, or an error if it doesn't
+    /// exist.
+    fn file_type(&self, path: &Path) -> io::Result<FileType>;
+
     /// Read the directory at the specified path and return the names
     /// of its entries associated with their [`FileType`].
     ///
@@ -99,6 +105,20 @@ impl EvalIO for StdIO {
         Ok(Box::new(File::open(path)?))
     }
 
+    fn file_type(&self, path: &Path) -> io::Result<FileType> {
+        let file_type = File::open(path)?.metadata()?.file_type();
+
+        Ok(if file_type.is_dir() {
+            FileType::Directory
+        } else if file_type.is_file() {
+            FileType::Regular
+        } else if file_type.is_symlink() {
+            FileType::Symlink
+        } else {
+            FileType::Unknown
+        })
+    }
+
     fn read_dir(&self, path: &Path) -> io::Result<Vec<(bytes::Bytes, FileType)>> {
         let mut result = vec![];
 
@@ -148,6 +168,13 @@ impl EvalIO for DummyIO {
         ))
     }
 
+    fn file_type(&self, _: &Path) -> io::Result<FileType> {
+        Err(io::Error::new(
+            io::ErrorKind::Unsupported,
+            "I/O methods are not implemented in DummyIO",
+        ))
+    }
+
     fn read_dir(&self, _: &Path) -> io::Result<Vec<(bytes::Bytes, FileType)>> {
         Err(io::Error::new(
             io::ErrorKind::Unsupported,
diff --git a/tvix/eval/src/lib.rs b/tvix/eval/src/lib.rs
index 845964cb7e..398da4d6e2 100644
--- a/tvix/eval/src/lib.rs
+++ b/tvix/eval/src/lib.rs
@@ -29,7 +29,7 @@ mod vm;
 mod warnings;
 
 mod nix_search_path;
-#[cfg(test)]
+#[cfg(all(test, feature = "arbitrary"))]
 mod properties;
 #[cfg(test)]
 mod test_utils;
diff --git a/tvix/eval/src/nix_search_path.rs b/tvix/eval/src/nix_search_path.rs
index 566ca12238..369c5b6857 100644
--- a/tvix/eval/src/nix_search_path.rs
+++ b/tvix/eval/src/nix_search_path.rs
@@ -197,6 +197,8 @@ mod tests {
         }
     }
 
+    // this uses StdIO, which is only available with the impure feature.
+    #[cfg(feature = "impure")]
     mod resolve {
         use crate::StdIO;
         use path_clean::PathClean;
diff --git a/tvix/eval/src/tests/mod.rs b/tvix/eval/src/tests/mod.rs
index 5a7708e298..21b5d35e6a 100644
--- a/tvix/eval/src/tests/mod.rs
+++ b/tvix/eval/src/tests/mod.rs
@@ -1,203 +1,6 @@
-use crate::{value::Value, EvalIO};
-use builtin_macros::builtins;
-use pretty_assertions::assert_eq;
-use rstest::rstest;
-use std::path::PathBuf;
-
 /// Module for one-off tests which do not follow the rest of the
 /// test layout.
 mod one_offs;
 
-#[builtins]
-mod mock_builtins {
-    //! Builtins which are required by language tests, but should not
-    //! actually exist in //tvix/eval.
-    use crate as tvix_eval;
-    use crate::generators::GenCo;
-    use crate::*;
-    use genawaiter::rc::Gen;
-
-    #[builtin("derivation")]
-    async fn builtin_derivation(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
-        let input = input.to_attrs()?;
-        let attrs = input.update(NixAttrs::from_iter(
-            [
-                (
-                    "outPath",
-                    "/nix/store/00000000000000000000000000000000-mock",
-                ),
-                (
-                    "drvPath",
-                    "/nix/store/00000000000000000000000000000000-mock.drv",
-                ),
-                ("type", "derivation"),
-            ]
-            .into_iter(),
-        ));
-
-        Ok(Value::Attrs(Box::new(attrs)))
-    }
-}
-
-fn eval_test(code_path: PathBuf, expect_success: bool) {
-    std::env::set_var("TEST_VAR", "foo"); // for eval-okay-getenv.nix
-
-    eprintln!("path: {}", code_path.display());
-    assert_eq!(
-        code_path.extension().unwrap(),
-        "nix",
-        "test files always end in .nix"
-    );
-
-    let code = std::fs::read_to_string(&code_path).expect("should be able to read test code");
-
-    let mut eval = crate::Evaluation::new_impure();
-    eval.strict = true;
-    eval.builtins.extend(mock_builtins::builtins());
-
-    let result = eval.evaluate(code, Some(code_path.clone()));
-    let failed = match result.value {
-        Some(Value::Catchable(_)) => true,
-        _ => !result.errors.is_empty(),
-    };
-    if expect_success && failed {
-        panic!(
-            "{}: evaluation of eval-okay test should succeed, but failed with {:?}",
-            code_path.display(),
-            result.errors,
-        );
-    }
-
-    if !expect_success && failed {
-        return;
-    }
-    // !expect_success can also mean the output differs, so don't panic if the
-    // evaluation didn't fail.
-
-    let value = result.value.unwrap();
-    let result_str = value.to_string();
-
-    let exp_path = code_path.with_extension("exp");
-    if exp_path.exists() {
-        // If there's an .exp file provided alongside, compare it with the
-        // output of the NixValue .to_string() method.
-        let exp_str = std::fs::read_to_string(&exp_path).expect("unable to read .exp file");
-
-        if expect_success {
-            assert_eq!(
-                result_str,
-                exp_str.trim(),
-                "{}: result value representation (left) must match expectation (right)",
-                code_path.display()
-            );
-        } else {
-            assert_ne!(
-                result_str,
-                exp_str.trim(),
-                "{}: test passed unexpectedly!  consider moving it out of notyetpassing",
-                code_path.display()
-            );
-
-            // Early return here, we don't compare .xml outputs if this is a !
-            // expect_success test.
-            return;
-        }
-    }
-
-    let exp_xml_path = code_path.with_extension("exp.xml");
-    if exp_xml_path.exists() {
-        // If there's an XML file provided alongside, compare it with the
-        // output produced when serializing the Value as XML.
-        let exp_xml_str = std::fs::read_to_string(exp_xml_path).expect("unable to read .xml file");
-
-        let mut xml_actual_buf = Vec::new();
-        crate::builtins::value_to_xml(&mut xml_actual_buf, &value).expect("value_to_xml failed");
-
-        assert_eq!(
-            String::from_utf8(xml_actual_buf).expect("to_xml produced invalid utf-8"),
-            exp_xml_str,
-            "{}: result value representation (left) must match expectation (right)",
-            code_path.display()
-        );
-    }
-}
-
-// identity-* tests contain Nix code snippets which should evaluate to
-// themselves exactly (i.e. literals).
-#[rstest]
-fn identity(#[files("src/tests/tvix_tests/identity-*.nix")] code_path: PathBuf) {
-    let code = std::fs::read_to_string(code_path).expect("should be able to read test code");
-
-    let mut eval = crate::Evaluation::new(Box::new(crate::StdIO) as Box<dyn EvalIO>, false);
-    eval.strict = true;
-
-    let result = eval.evaluate(&code, None);
-    assert!(
-        result.errors.is_empty(),
-        "evaluation of identity test failed: {:?}",
-        result.errors
-    );
-
-    let result_str = result.value.unwrap().to_string();
-
-    assert_eq!(
-        result_str,
-        code.trim(),
-        "result value representation (left) must match expectation (right)"
-    )
-}
-
-// eval-okay-* tests contain a snippet of Nix code, and an expectation
-// of the produced string output of the evaluator.
-//
-// These evaluations are always supposed to succeed, i.e. all snippets
-// are guaranteed to be valid Nix code.
-#[rstest]
-fn eval_okay(#[files("src/tests/tvix_tests/eval-okay-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, true)
-}
-
-// eval-okay-* tests from the original Nix test suite.
-#[cfg(feature = "nix_tests")]
-#[rstest]
-fn nix_eval_okay(#[files("src/tests/nix_tests/eval-okay-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, true)
-}
-
-// eval-okay-* tests from the original Nix test suite which do not yet pass for tvix
-//
-// Eventually there will be none of these left, and this function
-// will disappear :)
-//
-// Please don't submit failing tests unless they're in
-// notyetpassing; this makes the test suite much more useful for
-// regression testing, since there should always be zero non-ignored
-// failing tests.
-#[rstest]
-fn nix_eval_okay_currently_failing(
-    #[files("src/tests/nix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
-) {
-    eval_test(code_path, false)
-}
-
-#[rstest]
-fn eval_okay_currently_failing(
-    #[files("src/tests/tvix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
-) {
-    eval_test(code_path, false)
-}
-
-// eval-fail-* tests contain a snippet of Nix code, which is
-// expected to fail evaluation.  The exact type of failure
-// (assertion, parse error, etc) is not currently checked.
-#[rstest]
-fn eval_fail(#[files("src/tests/tvix_tests/eval-fail-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, false)
-}
-
-// eval-fail-* tests from the original Nix test suite.
 #[cfg(feature = "nix_tests")]
-#[rstest]
-fn nix_eval_fail(#[files("src/tests/nix_tests/eval-fail-*.nix")] code_path: PathBuf) {
-    eval_test(code_path, false)
-}
+mod nix_tests;
diff --git a/tvix/eval/src/tests/nix_tests.rs b/tvix/eval/src/tests/nix_tests.rs
new file mode 100644
index 0000000000..17968e4bdb
--- /dev/null
+++ b/tvix/eval/src/tests/nix_tests.rs
@@ -0,0 +1,207 @@
+use crate::value::Value;
+use builtin_macros::builtins;
+use pretty_assertions::assert_eq;
+use rstest::rstest;
+use std::path::PathBuf;
+
+#[builtins]
+mod mock_builtins {
+    //! Builtins which are required by language tests, but should not
+    //! actually exist in //tvix/eval.
+    use crate as tvix_eval;
+    use crate::generators::GenCo;
+    use crate::*;
+    use genawaiter::rc::Gen;
+
+    #[builtin("derivation")]
+    async fn builtin_derivation(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
+        let input = input.to_attrs()?;
+        let attrs = input.update(NixAttrs::from_iter(
+            [
+                (
+                    "outPath",
+                    "/nix/store/00000000000000000000000000000000-mock",
+                ),
+                (
+                    "drvPath",
+                    "/nix/store/00000000000000000000000000000000-mock.drv",
+                ),
+                ("type", "derivation"),
+            ]
+            .into_iter(),
+        ));
+
+        Ok(Value::Attrs(Box::new(attrs)))
+    }
+}
+
+#[cfg(feature = "impure")]
+fn eval_test(code_path: PathBuf, expect_success: bool) {
+    std::env::set_var("TEST_VAR", "foo"); // for eval-okay-getenv.nix
+
+    eprintln!("path: {}", code_path.display());
+    assert_eq!(
+        code_path.extension().unwrap(),
+        "nix",
+        "test files always end in .nix"
+    );
+
+    let code = std::fs::read_to_string(&code_path).expect("should be able to read test code");
+
+    let mut eval = crate::Evaluation::new_impure();
+    eval.strict = true;
+    eval.builtins.extend(mock_builtins::builtins());
+
+    let result = eval.evaluate(code, Some(code_path.clone()));
+    let failed = match result.value {
+        Some(Value::Catchable(_)) => true,
+        _ => !result.errors.is_empty(),
+    };
+    if expect_success && failed {
+        panic!(
+            "{}: evaluation of eval-okay test should succeed, but failed with {:?}",
+            code_path.display(),
+            result.errors,
+        );
+    }
+
+    if !expect_success && failed {
+        return;
+    }
+    // !expect_success can also mean the output differs, so don't panic if the
+    // evaluation didn't fail.
+
+    let value = result.value.unwrap();
+    let result_str = value.to_string();
+
+    let exp_path = code_path.with_extension("exp");
+    if exp_path.exists() {
+        // If there's an .exp file provided alongside, compare it with the
+        // output of the NixValue .to_string() method.
+        let exp_str = std::fs::read_to_string(&exp_path).expect("unable to read .exp file");
+
+        if expect_success {
+            assert_eq!(
+                result_str,
+                exp_str.trim(),
+                "{}: result value representation (left) must match expectation (right)",
+                code_path.display()
+            );
+        } else {
+            assert_ne!(
+                result_str,
+                exp_str.trim(),
+                "{}: test passed unexpectedly!  consider moving it out of notyetpassing",
+                code_path.display()
+            );
+
+            // Early return here, we don't compare .xml outputs if this is a !
+            // expect_success test.
+            return;
+        }
+    }
+
+    let exp_xml_path = code_path.with_extension("exp.xml");
+    if exp_xml_path.exists() {
+        // If there's an XML file provided alongside, compare it with the
+        // output produced when serializing the Value as XML.
+        let exp_xml_str = std::fs::read_to_string(exp_xml_path).expect("unable to read .xml file");
+
+        let mut xml_actual_buf = Vec::new();
+        crate::builtins::value_to_xml(&mut xml_actual_buf, &value).expect("value_to_xml failed");
+
+        assert_eq!(
+            String::from_utf8(xml_actual_buf).expect("to_xml produced invalid utf-8"),
+            exp_xml_str,
+            "{}: result value representation (left) must match expectation (right)",
+            code_path.display()
+        );
+    }
+}
+
+// identity-* tests contain Nix code snippets which should evaluate to
+// themselves exactly (i.e. literals).
+#[cfg(feature = "impure")]
+#[rstest]
+fn identity(#[files("src/tests/tvix_tests/identity-*.nix")] code_path: PathBuf) {
+    use crate::EvalIO;
+
+    let code = std::fs::read_to_string(code_path).expect("should be able to read test code");
+
+    let mut eval = crate::Evaluation::new(Box::new(crate::StdIO) as Box<dyn EvalIO>, false);
+    eval.strict = true;
+
+    let result = eval.evaluate(&code, None);
+    assert!(
+        result.errors.is_empty(),
+        "evaluation of identity test failed: {:?}",
+        result.errors
+    );
+
+    let result_str = result.value.unwrap().to_string();
+
+    assert_eq!(
+        result_str,
+        code.trim(),
+        "result value representation (left) must match expectation (right)"
+    )
+}
+
+// eval-okay-* tests contain a snippet of Nix code, and an expectation
+// of the produced string output of the evaluator.
+//
+// These evaluations are always supposed to succeed, i.e. all snippets
+// are guaranteed to be valid Nix code.
+#[cfg(feature = "impure")]
+#[rstest]
+fn eval_okay(#[files("src/tests/tvix_tests/eval-okay-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, true)
+}
+
+// eval-okay-* tests from the original Nix test suite.
+#[cfg(feature = "impure")]
+#[rstest]
+fn nix_eval_okay(#[files("src/tests/nix_tests/eval-okay-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, true)
+}
+
+// eval-okay-* tests from the original Nix test suite which do not yet pass for tvix
+//
+// Eventually there will be none of these left, and this function
+// will disappear :)
+//
+// Please don't submit failing tests unless they're in
+// notyetpassing; this makes the test suite much more useful for
+// regression testing, since there should always be zero non-ignored
+// failing tests.
+#[cfg(feature = "impure")]
+#[rstest]
+fn nix_eval_okay_currently_failing(
+    #[files("src/tests/nix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
+) {
+    eval_test(code_path, false)
+}
+
+#[cfg(feature = "impure")]
+#[rstest]
+fn eval_okay_currently_failing(
+    #[files("src/tests/tvix_tests/notyetpassing/eval-okay-*.nix")] code_path: PathBuf,
+) {
+    eval_test(code_path, false)
+}
+
+// eval-fail-* tests contain a snippet of Nix code, which is
+// expected to fail evaluation.  The exact type of failure
+// (assertion, parse error, etc) is not currently checked.
+#[cfg(feature = "impure")]
+#[rstest]
+fn eval_fail(#[files("src/tests/tvix_tests/eval-fail-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, false)
+}
+
+// eval-fail-* tests from the original Nix test suite.
+#[cfg(feature = "impure")]
+#[rstest]
+fn nix_eval_fail(#[files("src/tests/nix_tests/eval-fail-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, false)
+}
diff --git a/tvix/eval/src/tests/one_offs.rs b/tvix/eval/src/tests/one_offs.rs
index 565d1dd48f..21e9144baf 100644
--- a/tvix/eval/src/tests/one_offs.rs
+++ b/tvix/eval/src/tests/one_offs.rs
@@ -5,7 +5,7 @@ fn test_source_builtin() {
     // Test an evaluation with a source-only builtin. The test ensures
     // that the artificially constructed thunking is correct.
 
-    let mut eval = Evaluation::new_impure();
+    let mut eval = Evaluation::new_pure();
     eval.src_builtins.push(("testSourceBuiltin", "42"));
 
     let result = eval.evaluate("builtins.testSourceBuiltin", None);
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp
new file mode 100644
index 0000000000..eb2117a0ce
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.exp
@@ -0,0 +1 @@
+[ [ "" [ "a" ] "c" ] [ "" [ "a" ] "b" [ "c" ] "" ] [ "" [ "a" null ] "b" [ null "c" ] "" ] [ " " [ "FOO" ] " " ] [ "" [ "abc" ] "" [ "" ] "" ] [ "" [ "abc" ] "" [ "" ] "" ] [ "" [ ] "" ] ]
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix
new file mode 100644
index 0000000000..95305040dc
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-builtins-split.nix
@@ -0,0 +1,10 @@
+[
+  (builtins.split "(a)b" "abc")
+  (builtins.split "([ac])" "abc")
+  (builtins.split "(a)|(c)" "abc")
+  (builtins.split "([[:upper:]]+)" " FOO ")
+
+  (builtins.split "(.*)" "abc")
+  (builtins.split "([abc]*)" "abc")
+  (builtins.split ".*" "")
+]
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml
new file mode 100644
index 0000000000..468972b2f8
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.exp.xml
@@ -0,0 +1,5 @@
+<?xml version='1.0' encoding='utf-8'?>
+<expr>
+  <attrs>
+  </attrs>
+</expr>
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix
new file mode 100644
index 0000000000..ffcd4415b0
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml-empty.nix
@@ -0,0 +1 @@
+{ }
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp
new file mode 100644
index 0000000000..9ae16de526
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.exp
@@ -0,0 +1 @@
+"<?xml version='1.0' encoding='utf-8'?>\n<expr>\n  <attrs>\n    <attr name=\"&amp;-{\">\n      <string value=\";&amp;&quot;\" />\n    </attr>\n    <attr name=\"a\">\n      <string value=\"s\" />\n    </attr>\n  </attrs>\n</expr>\n"
diff --git a/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix
new file mode 100644
index 0000000000..7d074048dd
--- /dev/null
+++ b/tvix/eval/src/tests/tvix_tests/eval-okay-toxml.nix
@@ -0,0 +1,2 @@
+# Check some corner cases regarding escaping.
+builtins.toXML { a = "s"; "&-{" = ";&\""; }
diff --git a/tvix/eval/src/value/json.rs b/tvix/eval/src/value/json.rs
index c48e9c1f4e..24a6bcaf6f 100644
--- a/tvix/eval/src/value/json.rs
+++ b/tvix/eval/src/value/json.rs
@@ -47,8 +47,8 @@ impl Value {
 
                 for val in l.into_iter() {
                     match generators::request_to_json(co, val).await {
-                        Ok((v, mut ctx)) => {
-                            context = context.join(&mut ctx);
+                        Ok((v, ctx)) => {
+                            context.extend(ctx.into_iter());
                             out.push(v)
                         }
                         Err(cek) => return Ok(Err(cek)),
@@ -100,8 +100,8 @@ impl Value {
                     out.insert(
                         name.to_str()?.to_owned(),
                         match generators::request_to_json(co, value).await {
-                            Ok((v, mut ctx)) => {
-                                context = context.join(&mut ctx);
+                            Ok((v, ctx)) => {
+                                context.extend(ctx.into_iter());
                                 v
                             }
                             Err(cek) => return Ok(Err(cek)),
diff --git a/tvix/eval/src/value/mod.rs b/tvix/eval/src/value/mod.rs
index c171c9a04e..dfad0cd839 100644
--- a/tvix/eval/src/value/mod.rs
+++ b/tvix/eval/src/value/mod.rs
@@ -338,8 +338,8 @@ impl Value {
             let coerced: Result<BString, _> = match (value, kind) {
                 // coercions that are always done
                 (Value::String(mut s), _) => {
-                    if let Some(ctx) = s.context_mut() {
-                        context = context.join(ctx);
+                    if let Some(ctx) = s.take_context() {
+                        context.extend(ctx.into_iter());
                     }
                     Ok((*s).into())
                 }
diff --git a/tvix/eval/src/value/string.rs b/tvix/eval/src/value/string.rs
index ceb43f1ea5..163e140a19 100644
--- a/tvix/eval/src/value/string.rs
+++ b/tvix/eval/src/value/string.rs
@@ -54,6 +54,12 @@ impl From<HashSet<NixContextElement>> for NixContext {
     }
 }
 
+impl<const N: usize> From<[NixContextElement; N]> for NixContext {
+    fn from(value: [NixContextElement; N]) -> Self {
+        Self(HashSet::from(value))
+    }
+}
+
 impl NixContext {
     /// Creates an empty context that can be populated
     /// and passed to form a contextful [NixString], albeit
@@ -78,20 +84,19 @@ impl NixContext {
         self
     }
 
-    /// Consumes both ends of the join into a new NixContent
-    /// containing the union of elements of both ends.
-    pub fn join(mut self, other: &mut NixContext) -> Self {
-        let other_set = std::mem::take(&mut other.0);
-        let mut set: HashSet<NixContextElement> = std::mem::take(&mut self.0);
-        set.extend(other_set);
-        Self(set)
+    /// Extends the existing context with more context elements.
+    pub fn extend<T>(&mut self, iter: T)
+    where
+        T: IntoIterator<Item = NixContextElement>,
+    {
+        self.0.extend(iter)
     }
 
     /// Copies from another [NixString] its context strings
     /// in this context.
     pub fn mimic(&mut self, other: &NixString) {
         if let Some(context) = other.context() {
-            self.0.extend(context.iter().cloned());
+            self.extend(context.iter().cloned());
         }
     }
 
@@ -154,6 +159,16 @@ impl NixContext {
     }
 }
 
+impl IntoIterator for NixContext {
+    type Item = NixContextElement;
+
+    type IntoIter = std::collections::hash_set::IntoIter<NixContextElement>;
+
+    fn into_iter(self) -> Self::IntoIter {
+        self.0.into_iter()
+    }
+}
+
 /// This type is never instantiated, but serves to document the memory layout of the actual heap
 /// allocation for Nix strings.
 #[allow(dead_code)]
@@ -616,6 +631,11 @@ mod arbitrary {
 
 impl NixString {
     fn new(contents: &[u8], context: Option<Box<NixContext>>) -> Self {
+        debug_assert!(
+            !context.as_deref().is_some_and(NixContext::is_empty),
+            "BUG: initialized with empty context"
+        );
+
         // SAFETY: We're always fully initializing a NixString here:
         //
         // 1. NixStringInner::alloc sets up the len for us
@@ -707,8 +727,10 @@ impl NixString {
         let context = [self.context(), other.context()]
             .into_iter()
             .flatten()
-            .fold(NixContext::new(), |acc_ctx, new_ctx| {
-                acc_ctx.join(&mut new_ctx.clone())
+            .fold(NixContext::new(), |mut acc_ctx, new_ctx| {
+                // TODO: consume new_ctx?
+                acc_ctx.extend(new_ctx.iter().cloned());
+                acc_ctx
             });
         Self::new_context_from(context, s)
     }
@@ -719,33 +741,59 @@ impl NixString {
         //
         // Also, we're using the same lifetime and mutability as self, to fit the
         // pointer-to-reference conversion rules
-        unsafe { NixStringInner::context_ref(self.0).as_deref() }
+        let context = unsafe { NixStringInner::context_ref(self.0).as_deref() };
+
+        debug_assert!(
+            !context.is_some_and(NixContext::is_empty),
+            "BUG: empty context"
+        );
+
+        context
     }
 
-    pub(crate) fn context_mut(&mut self) -> Option<&mut NixContext> {
+    pub(crate) fn context_mut(&mut self) -> &mut Option<Box<NixContext>> {
         // SAFETY: There's no way to construct an uninitialized or invalid NixString (see the SAFETY
         // comment in `new`).
         //
         // Also, we're using the same lifetime and mutability as self, to fit the
         // pointer-to-reference conversion rules
-        unsafe { NixStringInner::context_mut(self.0).as_deref_mut() }
+        let context = unsafe { NixStringInner::context_mut(self.0) };
+
+        debug_assert!(
+            !context.as_deref().is_some_and(NixContext::is_empty),
+            "BUG: empty context"
+        );
+
+        context
     }
 
+    /// Iterates over all context elements.
+    /// See [iter_plain], [iter_derivation], [iter_single_outputs].
     pub fn iter_context(&self) -> impl Iterator<Item = &NixContext> {
         self.context().into_iter()
     }
 
-    pub fn iter_plain(&self) -> impl Iterator<Item = &str> {
+    /// Iterates over "plain" context elements, e.g. sources imported
+    /// in the store without more information, i.e. `toFile` or coerced imported paths.
+    /// It yields paths to the store.
+    pub fn iter_ctx_plain(&self) -> impl Iterator<Item = &str> {
         self.iter_context().flat_map(|context| context.iter_plain())
     }
 
-    pub fn iter_derivation(&self) -> impl Iterator<Item = &str> {
+    /// Iterates over "full derivations" context elements, e.g. something
+    /// referring to their `drvPath`, i.e. their full sources and binary closure.
+    /// It yields derivation paths.
+    pub fn iter_ctx_derivation(&self) -> impl Iterator<Item = &str> {
         return self
             .iter_context()
             .flat_map(|context| context.iter_derivation());
     }
 
-    pub fn iter_single_outputs(&self) -> impl Iterator<Item = (&str, &str)> {
+    /// Iterates over "single" context elements, e.g. single derived paths,
+    /// or also known as the single output of a given derivation.
+    /// The first element of the tuple is the output name
+    /// and the second element is the derivation path.
+    pub fn iter_ctx_single_outputs(&self) -> impl Iterator<Item = (&str, &str)> {
         return self
             .iter_context()
             .flat_map(|context| context.iter_single_outputs());
@@ -756,12 +804,16 @@ impl NixString {
         self.context().is_some()
     }
 
+    /// This clears the context of the string, returning
+    /// the removed dependency tracking information.
+    pub fn take_context(&mut self) -> Option<Box<NixContext>> {
+        self.context_mut().take()
+    }
+
     /// This clears the context of that string, losing
     /// all dependency tracking information.
     pub fn clear_context(&mut self) {
-        // SAFETY: There's no way to construct an uninitialized or invalid NixString (see the SAFETY
-        // comment in `new`).
-        *unsafe { NixStringInner::context_mut(self.0) } = None;
+        let _ = self.take_context();
     }
 
     pub fn chars(&self) -> Chars<'_> {
@@ -849,7 +901,7 @@ impl Display for NixString {
     }
 }
 
-#[cfg(test)]
+#[cfg(all(test, feature = "arbitrary"))]
 mod tests {
     use test_strategy::proptest;
 
diff --git a/tvix/eval/src/vm/generators.rs b/tvix/eval/src/vm/generators.rs
index 79de688692..dbf7703bf0 100644
--- a/tvix/eval/src/vm/generators.rs
+++ b/tvix/eval/src/vm/generators.rs
@@ -745,6 +745,7 @@ pub async fn request_open_file(co: &GenCo, path: PathBuf) -> Box<dyn std::io::Re
     }
 }
 
+#[cfg_attr(not(feature = "impure"), allow(unused))]
 pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value {
     match co.yield_(VMRequest::PathExists(path)).await {
         VMResponse::Value(value) => value,
@@ -755,6 +756,7 @@ pub(crate) async fn request_path_exists(co: &GenCo, path: PathBuf) -> Value {
     }
 }
 
+#[cfg_attr(not(feature = "impure"), allow(unused))]
 pub(crate) async fn request_read_dir(co: &GenCo, path: PathBuf) -> Vec<(bytes::Bytes, FileType)> {
     match co.yield_(VMRequest::ReadDir(path)).await {
         VMResponse::Directory(dir) => dir,
diff --git a/tvix/eval/src/vm/mod.rs b/tvix/eval/src/vm/mod.rs
index 5c244cc3ca..48dcdfc8df 100644
--- a/tvix/eval/src/vm/mod.rs
+++ b/tvix/eval/src/vm/mod.rs
@@ -797,7 +797,7 @@ where
                         if !formals.contains(arg) {
                             return frame.error(
                                 self,
-                                ErrorKind::UnexpectedArgument {
+                                ErrorKind::UnexpectedArgumentFormals {
                                     arg: arg.clone(),
                                     formals_span: formals.span,
                                 },
@@ -994,8 +994,8 @@ where
             }
             let mut nix_string = val.to_contextful_str().with_span(frame, self)?;
             out.push_str(nix_string.as_bstr());
-            if let Some(nix_string_ctx) = nix_string.context_mut() {
-                context = context.join(nix_string_ctx);
+            if let Some(nix_string_ctx) = nix_string.take_context() {
+                context.extend(nix_string_ctx.into_iter())
             }
         }
 
diff --git a/tvix/eval/tests/nix_oracle.rs b/tvix/eval/tests/nix_oracle.rs
index 5a5cc0a822..3d3abc7363 100644
--- a/tvix/eval/tests/nix_oracle.rs
+++ b/tvix/eval/tests/nix_oracle.rs
@@ -56,6 +56,7 @@ fn nix_eval(expr: &str, strictness: Strictness) -> String {
 /// `NIX_INSTANTIATE_BINARY_PATH` env var to resolve the `nix-instantiate` binary) and tvix, and
 /// assert that the result is identical
 #[track_caller]
+#[cfg(feature = "impure")]
 fn compare_eval(expr: &str, strictness: Strictness) {
     let nix_result = nix_eval(expr, strictness);
     let mut eval = tvix_eval::Evaluation::new_pure();
@@ -76,6 +77,7 @@ fn compare_eval(expr: &str, strictness: Strictness) {
 macro_rules! compare_eval_tests {
     ($strictness:expr, {}) => {};
     ($strictness:expr, {$(#[$meta:meta])* $test_name: ident($expr: expr); $($rest:tt)*}) => {
+        #[cfg(feature = "impure")]
         #[test]
         $(#[$meta])*
         fn $test_name() {
diff --git a/tvix/glue/Cargo.toml b/tvix/glue/Cargo.toml
index 0afdefeaaa..6968210c5e 100644
--- a/tvix/glue/Cargo.toml
+++ b/tvix/glue/Cargo.toml
@@ -17,7 +17,9 @@ tvix-build = { path = "../build", default-features = false, features = []}
 tvix-eval = { path = "../eval" }
 tvix-castore = { path = "../castore" }
 tvix-store = { path = "../store", default-features = false, features = []}
+tvix-tracing = { path = "../tracing" }
 tracing = "0.1.37"
+tracing-indicatif = "0.3.6"
 tokio = "1.28.0"
 tokio-tar = "0.3.1"
 tokio-util = { version = "0.7.9", features = ["io", "io-util", "compat"] }
@@ -33,6 +35,9 @@ walkdir = "2.4.0"
 [dependencies.wu-manber]
 git = "https://github.com/tvlfyi/wu-manber.git"
 
+[target.'cfg(not(target_env = "msvc"))'.dependencies]
+tikv-jemallocator = "0.5"
+
 [dev-dependencies]
 criterion = { version = "0.5", features = ["html_reports"] }
 hex-literal = "0.4.1"
diff --git a/tvix/glue/benches/eval.rs b/tvix/glue/benches/eval.rs
index 202278c1aa..9e0154cad7 100644
--- a/tvix/glue/benches/eval.rs
+++ b/tvix/glue/benches/eval.rs
@@ -1,6 +1,8 @@
 use criterion::{black_box, criterion_group, criterion_main, Criterion};
 use lazy_static::lazy_static;
 use std::{env, rc::Rc, sync::Arc, time::Duration};
+#[cfg(not(target_env = "msvc"))]
+use tikv_jemallocator::Jemalloc;
 use tvix_build::buildservice::DummyBuildService;
 use tvix_eval::{builtins::impure_builtins, EvalIO};
 use tvix_glue::{
@@ -11,6 +13,10 @@ use tvix_glue::{
 };
 use tvix_store::utils::construct_services;
 
+#[cfg(not(target_env = "msvc"))]
+#[global_allocator]
+static GLOBAL: Jemalloc = Jemalloc;
+
 lazy_static! {
     static ref TOKIO_RUNTIME: tokio::runtime::Runtime = tokio::runtime::Runtime::new().unwrap();
 }
diff --git a/tvix/glue/build.rs b/tvix/glue/build.rs
new file mode 100644
index 0000000000..544c34a6cb
--- /dev/null
+++ b/tvix/glue/build.rs
@@ -0,0 +1,6 @@
+fn main() {
+    // Pick up new test case files
+    // https://github.com/la10736/rstest/issues/256
+    println!("cargo:rerun-if-changed=src/tests/nix_tests");
+    println!("cargo:rerun-if-changed=src/tests/tvix_tests")
+}
diff --git a/tvix/glue/default.nix b/tvix/glue/default.nix
index 08f5c2228d..14c7e214f2 100644
--- a/tvix/glue/default.nix
+++ b/tvix/glue/default.nix
@@ -1,8 +1,17 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
 (depot.tvix.crates.workspaceMembers.tvix-glue.build.override {
   runTests = true;
   testPreRun = ''
     export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
   '';
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "nix_tests" ];
+    override.testPreRun = ''
+      export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
+    '';
+  };
 })
diff --git a/tvix/glue/src/builtins/derivation.rs b/tvix/glue/src/builtins/derivation.rs
index a7742ae40a..473aa9d5e3 100644
--- a/tvix/glue/src/builtins/derivation.rs
+++ b/tvix/glue/src/builtins/derivation.rs
@@ -168,14 +168,23 @@ fn handle_fixed_output(
 #[builtins(state = "Rc<TvixStoreIO>")]
 pub(crate) mod derivation_builtins {
     use std::collections::BTreeMap;
+    use std::io::Cursor;
 
     use crate::builtins::utils::{select_string, strong_importing_coerce_to_string};
+    use crate::fetchurl::fetchurl_derivation_to_fetch;
 
     use super::*;
     use bstr::ByteSlice;
-    use nix_compat::store_path::hash_placeholder;
+    use md5::Digest;
+    use nix_compat::nixhash::CAHash;
+    use nix_compat::store_path::{build_ca_path, hash_placeholder};
+    use sha2::Sha256;
+    use tvix_castore::proto as castorepb;
+    use tvix_castore::proto::node::Node;
+    use tvix_castore::proto::FileNode;
     use tvix_eval::generators::Gen;
     use tvix_eval::{NixContext, NixContextElement, NixString};
+    use tvix_store::proto::{NarInfo, PathInfo};
 
     #[builtin("placeholder")]
     async fn builtin_placeholder(co: GenCo, input: Value) -> Result<Value, ErrorKind> {
@@ -372,12 +381,12 @@ pub(crate) mod derivation_builtins {
                             return Ok(val);
                         }
 
-                        let (val_json, mut context) = match val.into_contextful_json(&co).await? {
+                        let (val_json, context) = match val.into_contextful_json(&co).await? {
                             Ok(v) => v,
                             Err(cek) => return Ok(Value::from(cek)),
                         };
 
-                        input_context = input_context.join(&mut context);
+                        input_context.extend(context.into_iter());
 
                         // No need to check for dups, we only iterate over every attribute name once
                         structured_attrs.insert(arg_name.to_owned(), val_json);
@@ -506,6 +515,17 @@ pub(crate) mod derivation_builtins {
                 ))),
         )));
 
+        // If the derivation is a fake derivation (builtins:fetchurl),
+        // synthesize a [Fetch] and add it there, too.
+        if drv.builder == "builtin:fetchurl" {
+            let (name, fetch) =
+                fetchurl_derivation_to_fetch(&drv).map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+
+            known_paths
+                .add_fetch(fetch, &name)
+                .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+        }
+
         // Register the Derivation in known_paths.
         known_paths.add_derivation(drv_path, drv);
 
@@ -513,7 +533,12 @@ pub(crate) mod derivation_builtins {
     }
 
     #[builtin("toFile")]
-    async fn builtin_to_file(co: GenCo, name: Value, content: Value) -> Result<Value, ErrorKind> {
+    async fn builtin_to_file(
+        state: Rc<TvixStoreIO>,
+        co: GenCo,
+        name: Value,
+        content: Value,
+    ) -> Result<Value, ErrorKind> {
         if name.is_catchable() {
             return Ok(name);
         }
@@ -529,24 +554,84 @@ pub(crate) mod derivation_builtins {
             .to_contextful_str()
             .context("evaluating the `content` parameter of builtins.toFile")?;
 
-        if content.iter_derivation().count() > 0 || content.iter_single_outputs().count() > 0 {
+        if content.iter_ctx_derivation().count() > 0
+            || content.iter_ctx_single_outputs().count() > 0
+        {
             return Err(ErrorKind::UnexpectedContext);
         }
 
-        let path =
-            nix_compat::store_path::build_text_path(name.to_str()?, &content, content.iter_plain())
-                .map_err(|_e| {
-                    nix_compat::derivation::DerivationError::InvalidOutputName(
-                        name.to_str_lossy().into_owned(),
-                    )
+        let store_path = state.tokio_handle.block_on(async {
+            // upload contents to the blobservice and create a root node
+            let mut blob_writer = state.blob_service.open_write().await;
+
+            let mut r = Cursor::new(&content);
+
+            let blob_size = tokio::io::copy(&mut r, &mut blob_writer).await?;
+            let blob_digest = blob_writer.close().await?;
+            let ca_hash = CAHash::Text(Sha256::digest(&content).into());
+
+            let store_path =
+                build_ca_path(name.to_str()?, &ca_hash, content.iter_ctx_plain(), false)
+                    .map_err(|_e| {
+                        nix_compat::derivation::DerivationError::InvalidOutputName(
+                            name.to_str_lossy().into_owned(),
+                        )
+                    })
+                    .map_err(DerivationError::InvalidDerivation)?;
+
+            let root_node = Node::File(FileNode {
+                name: store_path.to_string().into(),
+                digest: blob_digest.into(),
+                size: blob_size,
+                executable: false,
+            });
+
+            // calculate the nar hash
+            let (nar_size, nar_sha256) = state
+                .nar_calculation_service
+                .calculate_nar(&root_node)
+                .await
+                .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+
+            // assemble references from plain context.
+            let reference_paths: Vec<StorePathRef> = content
+                .iter_ctx_plain()
+                .map(|elem| StorePathRef::from_absolute_path(elem.as_bytes()))
+                .collect::<Result<_, _>>()
+                .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+
+            // persist via pathinfo service.
+            state
+                .path_info_service
+                .put(PathInfo {
+                    node: Some(castorepb::Node {
+                        node: Some(root_node),
+                    }),
+                    references: reference_paths
+                        .iter()
+                        .map(|x| bytes::Bytes::copy_from_slice(x.digest()))
+                        .collect(),
+                    narinfo: Some(NarInfo {
+                        nar_size,
+                        nar_sha256: nar_sha256.to_vec().into(),
+                        signatures: vec![],
+                        reference_names: reference_paths
+                            .into_iter()
+                            .map(|x| x.to_string())
+                            .collect(),
+                        deriver: None,
+                        ca: Some(ca_hash.into()),
+                    }),
                 })
-                .map_err(DerivationError::InvalidDerivation)?
-                .to_absolute_path();
+                .await
+                .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
 
-        let context: NixContext = NixContextElement::Plain(path.clone()).into();
+            Ok::<_, ErrorKind>(store_path)
+        })?;
 
-        // TODO: actually persist the file in the store at that path ...
+        let abs_path = store_path.to_absolute_path();
+        let context: NixContext = NixContextElement::Plain(abs_path.clone()).into();
 
-        Ok(Value::from(NixString::new_context_from(context, path)))
+        Ok(Value::from(NixString::new_context_from(context, abs_path)))
     }
 }
diff --git a/tvix/glue/src/builtins/errors.rs b/tvix/glue/src/builtins/errors.rs
index f6d5745c56..af8a24e6ab 100644
--- a/tvix/glue/src/builtins/errors.rs
+++ b/tvix/glue/src/builtins/errors.rs
@@ -4,7 +4,7 @@ use nix_compat::{
     store_path::BuildStorePathError,
 };
 use reqwest::Url;
-use std::rc::Rc;
+use std::{path::PathBuf, rc::Rc};
 use thiserror::Error;
 use tvix_castore::import;
 
@@ -65,8 +65,12 @@ pub enum FetcherError {
 pub enum ImportError {
     #[error("non-file '{0}' cannot be imported in 'flat' mode")]
     FlatImportOfNonFile(String),
+
     #[error("hash mismatch at ingestion of '{0}', expected: '{1}', got: '{2}'")]
     HashMismatch(String, NixHash, NixHash),
+
+    #[error("path '{}' is not in the Nix store", .0.display())]
+    PathNotInStore(PathBuf),
 }
 
 impl From<ImportError> for tvix_eval::ErrorKind {
diff --git a/tvix/glue/src/builtins/fetchers.rs b/tvix/glue/src/builtins/fetchers.rs
index c7602c03e8..1ad43b3833 100644
--- a/tvix/glue/src/builtins/fetchers.rs
+++ b/tvix/glue/src/builtins/fetchers.rs
@@ -6,16 +6,17 @@ use crate::{
     tvix_store_io::TvixStoreIO,
 };
 use nix_compat::nixhash;
-use nix_compat::nixhash::NixHash;
 use std::rc::Rc;
-use tracing::info;
 use tvix_eval::builtin_macros::builtins;
 use tvix_eval::generators::Gen;
 use tvix_eval::generators::GenCo;
 use tvix_eval::{CatchableErrorKind, ErrorKind, Value};
+use url::Url;
 
+// Used as a return type for extract_fetch_args, which is sharing some
+// parsing code between the fetchurl and fetchTarball builtins.
 struct NixFetchArgs {
-    url_str: String,
+    url: Url,
     name: Option<String>,
     sha256: Option<[u8; 32]>,
 }
@@ -30,8 +31,12 @@ async fn extract_fetch_args(
         // Get the raw bytes, not the ToString repr.
         let url_str =
             String::from_utf8(url_str.as_bytes().to_vec()).map_err(|_| ErrorKind::Utf8)?;
+
+        // Parse the URL.
+        let url = Url::parse(&url_str).map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+
         return Ok(Ok(NixFetchArgs {
-            url_str,
+            url,
             name: None,
             sha256: None,
         }));
@@ -55,7 +60,14 @@ async fn extract_fetch_args(
         Err(cek) => return Ok(Err(cek)),
     };
 
-    // TODO: disallow other attrset keys, to match Nix' behaviour.
+    // Disallow other attrset keys, to match Nix' behaviour.
+    // We complain about the first unexpected key we find in the list.
+    const VALID_KEYS: [&[u8]; 3] = [b"url", b"name", b"sha256"];
+    if let Some(first_invalid_key) = attrs.keys().find(|k| !&VALID_KEYS.contains(&k.as_bytes())) {
+        return Err(ErrorKind::UnexpectedArgumentBuiltin(
+            first_invalid_key.clone(),
+        ));
+    }
 
     // parse the sha256 string into a digest.
     let sha256 = match sha256_str {
@@ -69,18 +81,16 @@ async fn extract_fetch_args(
         None => None,
     };
 
-    Ok(Ok(NixFetchArgs {
-        url_str,
-        name,
-        sha256,
-    }))
+    // Parse the URL.
+    let url = Url::parse(&url_str).map_err(|e| ErrorKind::TvixError(Rc::new(e)))?;
+
+    Ok(Ok(NixFetchArgs { url, name, sha256 }))
 }
 
 #[allow(unused_variables)] // for the `state` arg, for now
 #[builtins(state = "Rc<TvixStoreIO>")]
 pub(crate) mod fetcher_builtins {
-    use crate::builtins::FetcherError;
-    use url::Url;
+    use nix_compat::nixhash::NixHash;
 
     use super::*;
 
@@ -112,8 +122,6 @@ pub(crate) mod fetcher_builtins {
             }
             None => {
                 // If we don't have enough info, do the fetch now.
-                info!(?fetch, "triggering required fetch");
-
                 let (store_path, _root_node) = state
                     .tokio_handle
                     .block_on(async { state.fetcher.ingest_and_persist(&name, fetch).await })
@@ -138,16 +146,15 @@ pub(crate) mod fetcher_builtins {
         // Derive the name from the URL basename if not set explicitly.
         let name = args
             .name
-            .unwrap_or_else(|| url_basename(&args.url_str).to_owned());
-
-        // Parse the URL.
-        let url = Url::parse(&args.url_str)
-            .map_err(|e| ErrorKind::TvixError(Rc::new(FetcherError::InvalidUrl(e))))?;
+            .unwrap_or_else(|| url_basename(&args.url).to_owned());
 
         fetch_lazy(
             state,
             name,
-            Fetch::URL(url, args.sha256.map(NixHash::Sha256)),
+            Fetch::URL {
+                url: args.url,
+                exp_hash: args.sha256.map(NixHash::Sha256),
+            },
         )
     }
 
@@ -168,11 +175,14 @@ pub(crate) mod fetcher_builtins {
             .name
             .unwrap_or_else(|| DEFAULT_NAME_FETCH_TARBALL.to_owned());
 
-        // Parse the URL.
-        let url = Url::parse(&args.url_str)
-            .map_err(|e| ErrorKind::TvixError(Rc::new(FetcherError::InvalidUrl(e))))?;
-
-        fetch_lazy(state, name, Fetch::Tarball(url, args.sha256))
+        fetch_lazy(
+            state,
+            name,
+            Fetch::Tarball {
+                url: args.url,
+                exp_nar_sha256: args.sha256,
+            },
+        )
     }
 
     #[builtin("fetchGit")]
diff --git a/tvix/glue/src/builtins/import.rs b/tvix/glue/src/builtins/import.rs
index 4a15afa814..a3273eca15 100644
--- a/tvix/glue/src/builtins/import.rs
+++ b/tvix/glue/src/builtins/import.rs
@@ -104,18 +104,21 @@ async fn filtered_ingest(
 
 #[builtins(state = "Rc<TvixStoreIO>")]
 mod import_builtins {
+    use std::os::unix::ffi::OsStrExt;
     use std::rc::Rc;
 
     use super::*;
 
+    use crate::tvix_store_io::TvixStoreIO;
     use nix_compat::nixhash::{CAHash, NixHash};
+    use nix_compat::store_path::StorePath;
+    use sha2::Digest;
+    use tokio::io::AsyncWriteExt;
+    use tvix_castore::proto::node::Node;
+    use tvix_castore::proto::FileNode;
     use tvix_eval::generators::Gen;
     use tvix_eval::{generators::GenCo, ErrorKind, Value};
-    use tvix_eval::{NixContextElement, NixString};
-
-    use tvix_castore::B3Digest;
-
-    use crate::tvix_store_io::TvixStoreIO;
+    use tvix_eval::{FileType, NixContextElement, NixString};
 
     #[builtin("path")]
     async fn builtin_path(
@@ -165,71 +168,145 @@ mod import_builtins {
             })
             .transpose()?;
 
-        // FUTUREWORK(performance): this opens the file instead of using a stat-like
-        // system call to the file.
-        if !recursive_ingestion && state.open(path.as_ref()).is_err() {
-            Err(ImportError::FlatImportOfNonFile(
-                path.to_string_lossy().to_string(),
-            ))?;
-        }
+        // Check if the path points to a regular file.
+        // If it does, the filter function is never executed.
+        // TODO: follow symlinks and check their type instead
+        let (root_node, ca_hash) = match state.file_type(path.as_ref())? {
+            FileType::Regular => {
+                let mut file = state.open(path.as_ref())?;
+                // This is a single file, copy it to the blobservice directly.
+                let mut hash = sha2::Sha256::new();
+                let mut blob_size = 0;
+                let mut blob_writer = state
+                    .tokio_handle
+                    .block_on(async { state.blob_service.open_write().await });
 
-        let root_node = filtered_ingest(state.clone(), co, path.as_ref(), filter).await?;
-        let ca: CAHash = if recursive_ingestion {
-            CAHash::Nar(NixHash::Sha256(state.tokio_handle.block_on(async {
-                Ok::<_, tvix_eval::ErrorKind>(
+                let mut buf = [0u8; 4096];
+
+                loop {
+                    // read bytes into buffer, break out if EOF
+                    let len = file.read(&mut buf)?;
+                    if len == 0 {
+                        break;
+                    }
+                    blob_size += len as u64;
+
+                    let data = &buf[0..len];
+
+                    // add to blobwriter
                     state
-                        .nar_calculation_service
-                        .as_ref()
-                        .calculate_nar(&root_node)
-                        .await
-                        .map_err(|e| ErrorKind::TvixError(Rc::new(e)))?
-                        .1,
-                )
-            })?))
-        } else {
-            let digest: B3Digest = match root_node {
-                tvix_castore::proto::node::Node::File(ref fnode) => {
-                    // It's already validated.
-                    fnode.digest.clone().try_into().unwrap()
+                        .tokio_handle
+                        .block_on(async { blob_writer.write_all(data).await })?;
+
+                    // update the sha256 hash function. We can skip that if we're not using it.
+                    if !recursive_ingestion {
+                        hash.update(data);
+                    }
                 }
-                // We cannot hash anything else than file in flat import mode.
-                _ => {
+
+                // close the blob writer, get back the b3 digest.
+                let blob_digest = state
+                    .tokio_handle
+                    .block_on(async { blob_writer.close().await })?;
+
+                let root_node = Node::File(FileNode {
+                    // The name gets set further down, while constructing the PathInfo.
+                    name: "".into(),
+                    digest: blob_digest.into(),
+                    size: blob_size,
+                    executable: false,
+                });
+
+                let ca_hash = if recursive_ingestion {
+                    let (_nar_size, nar_sha256) = state
+                        .tokio_handle
+                        .block_on(async {
+                            state
+                                .nar_calculation_service
+                                .as_ref()
+                                .calculate_nar(&root_node)
+                                .await
+                        })
+                        .map_err(|e| tvix_eval::ErrorKind::TvixError(Rc::new(e)))?;
+                    CAHash::Nar(NixHash::Sha256(nar_sha256))
+                } else {
+                    CAHash::Flat(NixHash::Sha256(hash.finalize().into()))
+                };
+
+                (root_node, ca_hash)
+            }
+
+            FileType::Directory => {
+                if !recursive_ingestion {
                     return Err(ImportError::FlatImportOfNonFile(
                         path.to_string_lossy().to_string(),
-                    )
-                    .into())
+                    ))?;
                 }
-            };
 
-            // FUTUREWORK: avoid hashing again.
-            CAHash::Flat(NixHash::Sha256(
-                state
+                // do the filtered ingest
+                let root_node = filtered_ingest(state.clone(), co, path.as_ref(), filter).await?;
+
+                // calculate the NAR sha256
+                let (_nar_size, nar_sha256) = state
                     .tokio_handle
-                    .block_on(async { state.blob_to_sha256_hash(digest).await })?,
-            ))
+                    .block_on(async {
+                        state
+                            .nar_calculation_service
+                            .as_ref()
+                            .calculate_nar(&root_node)
+                            .await
+                    })
+                    .map_err(|e| tvix_eval::ErrorKind::TvixError(Rc::new(e)))?;
+
+                let ca_hash = CAHash::Nar(NixHash::Sha256(nar_sha256));
+
+                (root_node, ca_hash)
+            }
+            FileType::Symlink => {
+                // FUTUREWORK: Nix follows a symlink if it's at the root,
+                // except if it's not resolve-able (NixOS/nix#7761).i
+                return Err(tvix_eval::ErrorKind::IO {
+                    path: Some(path.to_path_buf()),
+                    error: Rc::new(std::io::Error::new(
+                        std::io::ErrorKind::Unsupported,
+                        "builtins.path pointing to a symlink is ill-defined.",
+                    )),
+                });
+            }
+            FileType::Unknown => {
+                return Err(tvix_eval::ErrorKind::IO {
+                    path: Some(path.to_path_buf()),
+                    error: Rc::new(std::io::Error::new(
+                        std::io::ErrorKind::Unsupported,
+                        "unsupported file type",
+                    )),
+                })
+            }
         };
 
-        let obtained_hash = ca.hash().clone().into_owned();
         let (path_info, _hash, output_path) = state.tokio_handle.block_on(async {
             state
-                .node_to_path_info(name.as_ref(), path.as_ref(), ca, root_node)
+                .node_to_path_info(name.as_ref(), path.as_ref(), &ca_hash, root_node)
                 .await
         })?;
 
         if let Some(expected_sha256) = expected_sha256 {
-            if obtained_hash != expected_sha256 {
+            if *ca_hash.hash() != expected_sha256 {
                 Err(ImportError::HashMismatch(
                     path.to_string_lossy().to_string(),
                     expected_sha256,
-                    obtained_hash,
+                    ca_hash.hash().into_owned(),
                 ))?;
             }
         }
 
-        let _: tvix_store::proto::PathInfo = state.tokio_handle.block_on(async {
-            // This is necessary to cause the coercion of the error type.
-            Ok::<_, std::io::Error>(state.path_info_service.as_ref().put(path_info).await?)
-        })?;
+        state
+            .tokio_handle
+            .block_on(async { state.path_info_service.as_ref().put(path_info).await })
+            .map_err(|e| tvix_eval::ErrorKind::IO {
+                path: Some(path.to_path_buf()),
+                error: Rc::new(e.into()),
+            })?;
 
         // We need to attach context to the final output path.
         let outpath = output_path.to_absolute_path();
@@ -264,7 +341,7 @@ mod import_builtins {
                     .register_node_in_path_info_service(
                         name,
                         &p,
-                        CAHash::Nar(NixHash::Sha256(nar_sha256)),
+                        &CAHash::Nar(NixHash::Sha256(nar_sha256)),
                         root_node,
                     )
                     .await
@@ -280,6 +357,44 @@ mod import_builtins {
                 .into(),
         )
     }
+
+    #[builtin("storePath")]
+    async fn builtin_store_path(
+        state: Rc<TvixStoreIO>,
+        co: GenCo,
+        path: Value,
+    ) -> Result<Value, ErrorKind> {
+        let p = std::str::from_utf8(match &path {
+            Value::String(s) => s.as_bytes(),
+            Value::Path(p) => p.as_os_str().as_bytes(),
+            _ => {
+                return Err(ErrorKind::TypeError {
+                    expected: "string or path",
+                    actual: path.type_of(),
+                })
+            }
+        })?;
+
+        let path_exists = if let Ok((store_path, sub_path)) = StorePath::from_absolute_path_full(p)
+        {
+            if !sub_path.as_os_str().is_empty() {
+                false
+            } else {
+                state.store_path_exists(store_path.as_ref()).await?
+            }
+        } else {
+            false
+        };
+
+        if !path_exists {
+            return Err(ImportError::PathNotInStore(p.into()).into());
+        }
+
+        Ok(Value::String(NixString::new_context_from(
+            [NixContextElement::Plain(p.into())].into(),
+            p,
+        )))
+    }
 }
 
 pub use import_builtins::builtins as import_builtins;
diff --git a/tvix/glue/src/fetchers/decompression.rs b/tvix/glue/src/fetchers/decompression.rs
index f96fa60e34..69a8297e6a 100644
--- a/tvix/glue/src/fetchers/decompression.rs
+++ b/tvix/glue/src/fetchers/decompression.rs
@@ -1,5 +1,3 @@
-#![allow(dead_code)] // TODO
-
 use std::{
     io, mem,
     pin::Pin,
@@ -155,9 +153,7 @@ where
         };
 
         let mut our_buf = ReadBuf::new(buffer);
-        if let Err(e) = ready!(inner.as_pin_mut().unwrap().poll_read(cx, &mut our_buf)) {
-            return Poll::Ready(Err(e));
-        }
+        ready!(inner.as_pin_mut().unwrap().poll_read(cx, &mut our_buf))?;
 
         let data = our_buf.filled();
         if data.len() >= BYTES_NEEDED {
diff --git a/tvix/glue/src/fetchers/mod.rs b/tvix/glue/src/fetchers/mod.rs
index 1b2e1ee20c..eb035a5a90 100644
--- a/tvix/glue/src/fetchers/mod.rs
+++ b/tvix/glue/src/fetchers/mod.rs
@@ -1,14 +1,15 @@
 use futures::TryStreamExt;
-use md5::Md5;
+use md5::{digest::DynDigest, Md5};
 use nix_compat::{
     nixhash::{CAHash, HashAlgo, NixHash},
     store_path::{build_ca_path, BuildStorePathError, StorePathRef},
 };
 use sha1::Sha1;
 use sha2::{digest::Output, Digest, Sha256, Sha512};
-use tokio::io::{AsyncBufRead, AsyncRead, AsyncWrite};
-use tokio_util::io::InspectReader;
-use tracing::warn;
+use tokio::io::{AsyncBufRead, AsyncRead, AsyncWrite, AsyncWriteExt, BufReader};
+use tokio_util::io::{InspectReader, InspectWriter};
+use tracing::{instrument, warn, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use tvix_castore::{
     blobservice::BlobService,
     directoryservice::DirectoryService,
@@ -25,10 +26,14 @@ use decompression::DecompressedReader;
 /// Representing options for doing a fetch.
 #[derive(Clone, Eq, PartialEq)]
 pub enum Fetch {
-    /// Fetch a literal file from the given URL, with an optional expected
-    /// NixHash of it.
-    /// TODO: check if this is *always* sha256, and if so, make it [u8; 32].
-    URL(Url, Option<NixHash>),
+    /// Fetch a literal file from the given URL,
+    /// with an optional expected hash.
+    URL {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the file.
+        exp_hash: Option<NixHash>,
+    },
 
     /// Fetch a tarball from the given URL and unpack.
     /// The file must be a tape archive (.tar), optionally compressed with gzip,
@@ -37,7 +42,37 @@ pub enum Fetch {
     /// so it is best if the tarball contains a single directory at top level.
     /// Optionally, a sha256 digest can be provided to verify the unpacked
     /// contents against.
-    Tarball(Url, Option<[u8; 32]>),
+    Tarball {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the contents, as NAR.
+        exp_nar_sha256: Option<[u8; 32]>,
+    },
+
+    /// Fetch a NAR file from the given URL and unpack.
+    /// The file can optionally be compressed.
+    NAR {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the NAR representation.
+        /// This unfortunately supports more than sha256.
+        hash: NixHash,
+    },
+
+    /// Fetches a file at a URL, makes it the store path root node,
+    /// but executable.
+    /// Used by <nix/fetchurl.nix>, with `executable = true;`.
+    /// The expected hash is over the NAR representation, but can be not SHA256:
+    /// ```nix
+    /// (import <nix/fetchurl.nix> { url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"; hash = "sha1-NKNeU1csW5YJ4lCeWH3Z/apppNU="; executable = true; })
+    /// ```
+    Executable {
+        /// The URL to fetch from.
+        url: Url,
+        /// The expected hash of the NAR representation.
+        /// This unfortunately supports more than sha256.
+        hash: NixHash,
+    },
 
     /// TODO
     Git(),
@@ -60,7 +95,7 @@ fn redact_url(url: &Url) -> Url {
 impl std::fmt::Debug for Fetch {
     fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
         match self {
-            Fetch::URL(url, exp_hash) => {
+            Fetch::URL { url, exp_hash } => {
                 let url = redact_url(url);
                 if let Some(exp_hash) = exp_hash {
                     write!(f, "URL [url: {}, exp_hash: Some({})]", &url, exp_hash)
@@ -68,19 +103,30 @@ impl std::fmt::Debug for Fetch {
                     write!(f, "URL [url: {}, exp_hash: None]", &url)
                 }
             }
-            Fetch::Tarball(url, exp_digest) => {
+            Fetch::Tarball {
+                url,
+                exp_nar_sha256,
+            } => {
                 let url = redact_url(url);
-                if let Some(exp_digest) = exp_digest {
+                if let Some(exp_nar_sha256) = exp_nar_sha256 {
                     write!(
                         f,
-                        "Tarball [url: {}, exp_hash: Some({})]",
+                        "Tarball [url: {}, exp_nar_sha256: Some({})]",
                         url,
-                        NixHash::Sha256(*exp_digest)
+                        NixHash::Sha256(*exp_nar_sha256)
                     )
                 } else {
                     write!(f, "Tarball [url: {}, exp_hash: None]", url)
                 }
             }
+            Fetch::NAR { url, hash } => {
+                let url = redact_url(url);
+                write!(f, "NAR [url: {}, hash: {}]", &url, hash)
+            }
+            Fetch::Executable { url, hash } => {
+                let url = redact_url(url);
+                write!(f, "Executable [url: {}, hash: {}]", &url, hash)
+            }
             Fetch::Git() => todo!(),
         }
     }
@@ -95,9 +141,28 @@ impl Fetch {
         name: &'a str,
     ) -> Result<Option<StorePathRef<'a>>, BuildStorePathError> {
         let ca_hash = match self {
-            Fetch::URL(_, Some(nixhash)) => CAHash::Flat(nixhash.clone()),
-            Fetch::Tarball(_, Some(nar_sha256)) => CAHash::Nar(NixHash::Sha256(*nar_sha256)),
-            _ => return Ok(None),
+            Fetch::URL {
+                exp_hash: Some(exp_hash),
+                ..
+            } => CAHash::Flat(exp_hash.clone()),
+
+            Fetch::Tarball {
+                exp_nar_sha256: Some(exp_nar_sha256),
+                ..
+            } => CAHash::Nar(NixHash::Sha256(*exp_nar_sha256)),
+
+            Fetch::NAR { hash, .. } | Fetch::Executable { hash, .. } => {
+                CAHash::Nar(hash.to_owned())
+            }
+
+            Fetch::Git() => todo!(),
+
+            // everything else
+            Fetch::URL { exp_hash: None, .. }
+            | Fetch::Tarball {
+                exp_nar_sha256: None,
+                ..
+            } => return Ok(None),
         };
 
         // calculate the store path of this fetch
@@ -132,7 +197,18 @@ impl<BS, DS, PS, NS> Fetcher<BS, DS, PS, NS> {
 
     /// Constructs a HTTP request to the passed URL, and returns a AsyncReadBuf to it.
     /// In case the URI uses the file:// scheme, use tokio::fs to open it.
-    async fn download(&self, url: Url) -> Result<Box<dyn AsyncBufRead + Unpin>, FetcherError> {
+    #[instrument(skip_all, fields(url, indicatif.pb_show=1), err)]
+    async fn download(
+        &self,
+        url: Url,
+    ) -> Result<Box<dyn AsyncBufRead + Unpin + Send>, FetcherError> {
+        let span = Span::current();
+        span.pb_set_message(&format!(
+            "📡Fetching {}",
+            // TOOD: maybe shorten
+            redact_url(&url)
+        ));
+
         match url.scheme() {
             "file" => {
                 let f = tokio::fs::File::open(url.to_file_path().map_err(|_| {
@@ -144,16 +220,38 @@ impl<BS, DS, PS, NS> Fetcher<BS, DS, PS, NS> {
                     ))
                 })?)
                 .await?;
-                Ok(Box::new(tokio::io::BufReader::new(f)))
+
+                span.pb_set_length(f.metadata().await?.len());
+                span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+                span.pb_start();
+                Ok(Box::new(tokio::io::BufReader::new(InspectReader::new(
+                    f,
+                    move |d| {
+                        span.pb_inc(d.len() as u64);
+                    },
+                ))))
             }
             _ => {
                 let resp = self.http_client.get(url).send().await?;
+
+                if let Some(content_length) = resp.content_length() {
+                    span.pb_set_length(content_length);
+                    span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+                } else {
+                    span.pb_set_style(&tvix_tracing::PB_TRANSFER_STYLE);
+                }
+                span.pb_start();
+
                 Ok(Box::new(tokio_util::io::StreamReader::new(
-                    resp.bytes_stream().map_err(|e| {
-                        let e = e.without_url();
-                        warn!(%e, "failed to get response body");
-                        std::io::Error::new(std::io::ErrorKind::BrokenPipe, e)
-                    }),
+                    resp.bytes_stream()
+                        .inspect_ok(move |d| {
+                            span.pb_inc(d.len() as u64);
+                        })
+                        .map_err(|e| {
+                            let e = e.without_url();
+                            warn!(%e, "failed to get response body");
+                            std::io::Error::new(std::io::ErrorKind::BrokenPipe, e)
+                        }),
                 )))
             }
         }
@@ -190,7 +288,7 @@ where
     /// didn't match the previously communicated hash contained inside the FetchArgs.
     pub async fn ingest(&self, fetch: Fetch) -> Result<(Node, CAHash, u64), FetcherError> {
         match fetch {
-            Fetch::URL(url, exp_hash) => {
+            Fetch::URL { url, exp_hash } => {
                 // Construct a AsyncRead reading from the data as its downloaded.
                 let mut r = self.download(url.clone()).await?;
 
@@ -199,7 +297,7 @@ where
 
                 // Copy the contents from the download reader to the blob writer.
                 // Calculate the digest of the file received, depending on the
-                // communicated expected hash (or sha256 if none provided).
+                // communicated expected hash algo (or sha256 if none provided).
                 let (actual_hash, blob_size) = match exp_hash
                     .as_ref()
                     .map(NixHash::algo)
@@ -243,7 +341,10 @@ where
                     blob_size,
                 ))
             }
-            Fetch::Tarball(url, exp_nar_sha256) => {
+            Fetch::Tarball {
+                url,
+                exp_nar_sha256,
+            } => {
                 // Construct a AsyncRead reading from the data as its downloaded.
                 let r = self.download(url.clone()).await?;
 
@@ -252,7 +353,7 @@ where
                 // Open the archive.
                 let archive = tokio_tar::Archive::new(r);
 
-                // Ingest the archive, get the root node
+                // Ingest the archive, get the root node.
                 let node = tvix_castore::import::archive::ingest_archive(
                     self.blob_service.clone(),
                     self.directory_service.clone(),
@@ -263,7 +364,7 @@ where
                 // If an expected NAR sha256 was provided, compare with the one
                 // calculated from our root node.
                 // Even if no expected NAR sha256 has been provided, we need
-                // the actual one later.
+                // the actual one to calculate the store path.
                 let (nar_size, actual_nar_sha256) = self
                     .nar_calculation_service
                     .calculate_nar(&node)
@@ -289,6 +390,156 @@ where
                     nar_size,
                 ))
             }
+            Fetch::NAR {
+                url,
+                hash: exp_hash,
+            } => {
+                // Construct a AsyncRead reading from the data as its downloaded.
+                let r = self.download(url.clone()).await?;
+
+                // Pop compression.
+                let r = DecompressedReader::new(r);
+
+                // Wrap the reader, calculating our own hash.
+                let mut hasher: Box<dyn DynDigest + Send> = match exp_hash.algo() {
+                    HashAlgo::Md5 => Box::new(Md5::new()),
+                    HashAlgo::Sha1 => Box::new(Sha1::new()),
+                    HashAlgo::Sha256 => Box::new(Sha256::new()),
+                    HashAlgo::Sha512 => Box::new(Sha512::new()),
+                };
+                let mut r = tokio_util::io::InspectReader::new(r, |b| {
+                    hasher.update(b);
+                });
+
+                // Ingest the NAR, get the root node.
+                let (root_node, _actual_nar_sha256, actual_nar_size) =
+                    tvix_store::nar::ingest_nar_and_hash(
+                        self.blob_service.clone(),
+                        self.directory_service.clone(),
+                        &mut r,
+                    )
+                    .await
+                    .map_err(|e| FetcherError::Io(std::io::Error::other(e.to_string())))?;
+
+                // finalize the hasher.
+                let actual_hash = {
+                    match exp_hash.algo() {
+                        HashAlgo::Md5 => {
+                            NixHash::Md5(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha1 => {
+                            NixHash::Sha1(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha256 => {
+                            NixHash::Sha256(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha512 => {
+                            NixHash::Sha512(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                    }
+                };
+
+                // Ensure the hash matches.
+                if exp_hash != actual_hash {
+                    return Err(FetcherError::HashMismatch {
+                        url,
+                        wanted: exp_hash,
+                        got: actual_hash,
+                    });
+                }
+                Ok((
+                    root_node,
+                    // use a CAHash::Nar with the algo from the input.
+                    CAHash::Nar(exp_hash),
+                    actual_nar_size,
+                ))
+            }
+            Fetch::Executable {
+                url,
+                hash: exp_hash,
+            } => {
+                // Construct a AsyncRead reading from the data as its downloaded.
+                let mut r = self.download(url.clone()).await?;
+
+                // Construct a AsyncWrite to write into the BlobService.
+                let mut blob_writer = self.blob_service.open_write().await;
+
+                // Copy the contents from the download reader to the blob writer.
+                let file_size = tokio::io::copy(&mut r, &mut blob_writer).await?;
+                let blob_digest = blob_writer.close().await?;
+
+                // Render the NAR representation on-the-fly into a hash function with
+                // the same algo as our expected hash.
+                // We cannot do this upfront, as we don't know the actual size.
+                // FUTUREWORK: make opportunistic use of Content-Length header?
+
+                let w = tokio::io::sink();
+                // Construct the hash function.
+                let mut hasher: Box<dyn DynDigest + Send> = match exp_hash.algo() {
+                    HashAlgo::Md5 => Box::new(Md5::new()),
+                    HashAlgo::Sha1 => Box::new(Sha1::new()),
+                    HashAlgo::Sha256 => Box::new(Sha256::new()),
+                    HashAlgo::Sha512 => Box::new(Sha512::new()),
+                };
+
+                let mut nar_size: u64 = 0;
+                let mut w = InspectWriter::new(w, |d| {
+                    hasher.update(d);
+                    nar_size += d.len() as u64;
+                });
+
+                {
+                    let node = nix_compat::nar::writer::r#async::open(&mut w).await?;
+
+                    let blob_reader = self
+                        .blob_service
+                        .open_read(&blob_digest)
+                        .await?
+                        .expect("Tvix bug: just-uploaded blob not found");
+
+                    node.file(true, file_size, &mut BufReader::new(blob_reader))
+                        .await?;
+
+                    w.flush().await?;
+                }
+
+                // finalize the hasher.
+                let actual_hash = {
+                    match exp_hash.algo() {
+                        HashAlgo::Md5 => {
+                            NixHash::Md5(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha1 => {
+                            NixHash::Sha1(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha256 => {
+                            NixHash::Sha256(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                        HashAlgo::Sha512 => {
+                            NixHash::Sha512(hasher.finalize().to_vec().try_into().unwrap())
+                        }
+                    }
+                };
+
+                if exp_hash != actual_hash {
+                    return Err(FetcherError::HashMismatch {
+                        url,
+                        wanted: exp_hash,
+                        got: actual_hash,
+                    });
+                }
+
+                // Construct and return the FileNode describing the downloaded contents,
+                // make it executable.
+                let root_node = Node::File(FileNode {
+                    name: vec![].into(),
+                    digest: blob_digest.into(),
+                    size: file_size,
+                    executable: true,
+                });
+
+                Ok((root_node, CAHash::Nar(actual_hash), file_size))
+            }
             Fetch::Git() => todo!(),
         }
     }
@@ -306,7 +557,7 @@ where
         // Fetch file, return the (unnamed) (File)Node of its contents, ca hash and filesize.
         let (node, ca_hash, size) = self.ingest(fetch).await?;
 
-        // Calculate the store path to return later, which is done with the ca_hash.
+        // Calculate the store path to return, by calculating from ca_hash.
         let store_path = build_ca_path(name, &ca_hash, Vec::<String>::new(), false)?;
 
         // Rename the node name to match the Store Path.
@@ -315,14 +566,15 @@ where
         // If the resulting hash is not a CAHash::Nar, we also need to invoke
         // `calculate_nar` to calculate this representation, as it's required in
         // the [PathInfo].
+        // FUTUREWORK: allow ingest() to return multiple hashes, or have it feed
+        // nar_calculation_service too?
         let (nar_size, nar_sha256) = match &ca_hash {
-            CAHash::Flat(_nix_hash) => self
+            CAHash::Nar(NixHash::Sha256(nar_sha256)) => (size, *nar_sha256),
+            CAHash::Nar(_) | CAHash::Flat(_) => self
                 .nar_calculation_service
                 .calculate_nar(&node)
                 .await
                 .map_err(|e| FetcherError::Io(e.into()))?,
-            CAHash::Nar(NixHash::Sha256(nar_sha256)) => (size, *nar_sha256),
-            CAHash::Nar(_) => unreachable!("Tvix bug: fetch returned non-sha256 CAHash::Nar"),
             CAHash::Text(_) => unreachable!("Tvix bug: fetch returned CAHash::Text"),
         };
 
@@ -351,7 +603,8 @@ where
 }
 
 /// Attempts to mimic `nix::libutil::baseNameOf`
-pub(crate) fn url_basename(s: &str) -> &str {
+pub(crate) fn url_basename(url: &Url) -> &str {
+    let s = url.path();
     if s.is_empty() {
         return "";
     }
@@ -382,38 +635,82 @@ pub(crate) fn url_basename(s: &str) -> &str {
 #[cfg(test)]
 mod tests {
     mod fetch {
-        use nix_compat::nixbase32;
-
-        use crate::fetchers::Fetch;
-
         use super::super::*;
-
-        #[test]
-        fn fetchurl_store_path() {
-            let url = Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap();
-            let exp_hash = NixHash::Sha256(
-                nixbase32::decode_fixed("0nawkl04sj7psw6ikzay7kydj3dhd0fkwghcsf5rzaw4bmp4kbax")
-                    .unwrap(),
-            );
-
-            let fetch = Fetch::URL(url, Some(exp_hash));
+        use crate::fetchers::Fetch;
+        use nix_compat::{nixbase32, nixhash};
+        use rstest::rstest;
+
+        #[rstest]
+        #[case::url_no_hash(
+            Fetch::URL{
+                url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
+                exp_hash: None,
+            },
+            None,
+            "notmuch-extract-patch"
+        )]
+        #[case::url_sha256(
+            Fetch::URL{
+                url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
+                exp_hash: Some(nixhash::from_sri_str("sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=").unwrap()),
+            },
+            Some(StorePathRef::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap()),
+            "notmuch-extract-patch"
+        )]
+        #[case::url_custom_name(
+            Fetch::URL{
+                url: Url::parse("https://test.example/owo").unwrap(),
+                exp_hash: Some(nixhash::from_sri_str("sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=").unwrap()),
+            },
+            Some(StorePathRef::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap()),
+            "notmuch-extract-patch"
+        )]
+        #[case::nar_sha256(
+            Fetch::NAR{
+                url: Url::parse("https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap(),
+                hash: nixhash::from_sri_str("sha256-oj6yfWKbcEerK8D9GdPJtIAOveNcsH1ztGeSARGypRA=").unwrap(),
+            },
+            Some(StorePathRef::from_bytes(b"b40vjphshq4fdgv8s3yrp0bdlafi4920-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap()),
+            "0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"
+        )]
+        #[case::nar_sha1(
+            Fetch::NAR{
+                url: Url::parse("https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap(),
+                hash: nixhash::from_sri_str("sha1-F/fMsgwkXF8fPCg1v9zPZ4yOFIA=").unwrap(),
+            },
+            Some(StorePathRef::from_bytes(b"8kx7fdkdbzs4fkfb57xq0cbhs20ymq2n-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap()),
+            "0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"
+        )]
+        #[case::nar_sha1(
+            Fetch::Executable{
+                url: Url::parse("https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap(),
+                hash: nixhash::from_sri_str("sha1-NKNeU1csW5YJ4lCeWH3Z/apppNU=").unwrap(),
+            },
+            Some(StorePathRef::from_bytes(b"y92hm2xfk1009hrq0ix80j4m5k4j4w21-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz").unwrap()),
+            "0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz"
+        )]
+        fn fetch_store_path(
+            #[case] fetch: Fetch,
+            #[case] exp_path: Option<StorePathRef>,
+            #[case] name: &str,
+        ) {
             assert_eq!(
-                "06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch",
-                &fetch
-                    .store_path("notmuch-extract-patch")
-                    .unwrap()
-                    .unwrap()
-                    .to_string(),
-            )
+                exp_path,
+                fetch.store_path(name).expect("invalid name"),
+                "unexpected calculated store path"
+            );
         }
 
         #[test]
         fn fetch_tarball_store_path() {
             let url = Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap();
-            let exp_nixbase32 =
+            let exp_sha256 =
                 nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm")
                     .unwrap();
-            let fetch = Fetch::Tarball(url, Some(exp_nixbase32));
+            let fetch = Fetch::Tarball {
+                url,
+                exp_nar_sha256: Some(exp_sha256),
+            };
 
             assert_eq!(
                 "7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source",
@@ -424,30 +721,18 @@ mod tests {
 
     mod url_basename {
         use super::super::*;
-
-        #[test]
-        fn empty_path() {
-            assert_eq!(url_basename(""), "");
-        }
-
-        #[test]
-        fn path_on_root() {
-            assert_eq!(url_basename("/dir"), "dir");
-        }
-
-        #[test]
-        fn relative_path() {
-            assert_eq!(url_basename("dir/foo"), "foo");
-        }
-
-        #[test]
-        fn root_with_trailing_slash() {
-            assert_eq!(url_basename("/"), "");
-        }
-
-        #[test]
-        fn trailing_slash() {
-            assert_eq!(url_basename("/dir/"), "dir");
+        use rstest::rstest;
+
+        #[rstest]
+        #[case::empty_path("", "")]
+        #[case::path_on_root("/dir", "dir")]
+        #[case::relative_path("dir/foo", "foo")]
+        #[case::root_with_trailing_slash("/", "")]
+        #[case::trailing_slash("/dir/", "dir")]
+        fn test_url_basename(#[case] url_path: &str, #[case] exp_basename: &str) {
+            let mut url = Url::parse("http://localhost").expect("invalid url");
+            url.set_path(url_path);
+            assert_eq!(url_basename(&url), exp_basename);
         }
     }
 }
diff --git a/tvix/glue/src/fetchurl.rs b/tvix/glue/src/fetchurl.rs
new file mode 100644
index 0000000000..9f57868b19
--- /dev/null
+++ b/tvix/glue/src/fetchurl.rs
@@ -0,0 +1,82 @@
+//! This contains the code translating from a `builtin:derivation` [Derivation]
+//! to a [Fetch].
+use crate::fetchers::Fetch;
+use nix_compat::{derivation::Derivation, nixhash::CAHash};
+use tracing::instrument;
+use url::Url;
+
+/// Takes a derivation produced by a call to `builtin:fetchurl` and returns the
+/// synthesized [Fetch] for it, as well as the name.
+#[instrument]
+pub(crate) fn fetchurl_derivation_to_fetch(drv: &Derivation) -> Result<(String, Fetch), Error> {
+    if drv.builder != "builtin:fetchurl" {
+        return Err(Error::BuilderInvalid);
+    }
+    if !drv.arguments.is_empty() {
+        return Err(Error::ArgumentsInvalud);
+    }
+    if drv.system != "builtin" {
+        return Err(Error::SystemInvalid);
+    }
+
+    // ensure this is a fixed-output derivation
+    if drv.outputs.len() != 1 {
+        return Err(Error::NoFOD);
+    }
+    let out_output = &drv.outputs.get("out").ok_or(Error::NoFOD)?;
+    let ca_hash = out_output.ca_hash.clone().ok_or(Error::NoFOD)?;
+
+    let name: String = drv
+        .environment
+        .get("name")
+        .ok_or(Error::NameMissing)?
+        .to_owned()
+        .try_into()
+        .map_err(|_| Error::NameInvalid)?;
+
+    let url: Url = std::str::from_utf8(drv.environment.get("url").ok_or(Error::URLMissing)?)
+        .map_err(|_| Error::URLInvalid)?
+        .parse()
+        .map_err(|_| Error::URLInvalid)?;
+
+    match ca_hash {
+        CAHash::Flat(hash) => {
+            return Ok((
+                name,
+                Fetch::URL {
+                    url,
+                    exp_hash: Some(hash),
+                },
+            ))
+        }
+        CAHash::Nar(hash) => {
+            if drv.environment.get("executable").map(|v| v.as_slice()) == Some(b"1") {
+                Ok((name, Fetch::Executable { url, hash }))
+            } else {
+                Ok((name, Fetch::NAR { url, hash }))
+            }
+        }
+        // you can't construct derivations containing this
+        CAHash::Text(_) => panic!("Tvix bug: got CaHash::Text in drv"),
+    }
+}
+
+#[derive(Debug, thiserror::Error)]
+pub(crate) enum Error {
+    #[error("Invalid builder")]
+    BuilderInvalid,
+    #[error("invalid arguments")]
+    ArgumentsInvalud,
+    #[error("Invalid system")]
+    SystemInvalid,
+    #[error("Derivation is not fixed-output")]
+    NoFOD,
+    #[error("Missing URL")]
+    URLMissing,
+    #[error("Invalid URL")]
+    URLInvalid,
+    #[error("Missing Name")]
+    NameMissing,
+    #[error("Name invalid")]
+    NameInvalid,
+}
diff --git a/tvix/glue/src/known_paths.rs b/tvix/glue/src/known_paths.rs
index 290c9d5b69..edc57c38f2 100644
--- a/tvix/glue/src/known_paths.rs
+++ b/tvix/glue/src/known_paths.rs
@@ -133,7 +133,7 @@ impl KnownPaths {
 
 #[cfg(test)]
 mod tests {
-    use nix_compat::{derivation::Derivation, nixbase32, nixhash::NixHash, store_path::StorePath};
+    use nix_compat::{derivation::Derivation, nixbase32, nixhash, store_path::StorePath};
     use url::Url;
 
     use crate::fetchers::Fetch;
@@ -160,16 +160,16 @@ mod tests {
         static ref FOO_OUT_PATH: StorePath =
             StorePath::from_bytes(b"fhaj6gmwns62s6ypkcldbaj2ybvkhx3p-foo").expect("must parse");
 
-        static ref FETCH_URL : Fetch = Fetch::URL(
-            Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
-            Some(NixHash::Sha256(nixbase32::decode_fixed("0nawkl04sj7psw6ikzay7kydj3dhd0fkwghcsf5rzaw4bmp4kbax").unwrap()))
-        );
+        static ref FETCH_URL : Fetch = Fetch::URL{
+            url: Url::parse("https://raw.githubusercontent.com/aaptel/notmuch-extract-patch/f732a53e12a7c91a06755ebfab2007adc9b3063b/notmuch-extract-patch").unwrap(),
+            exp_hash: Some(nixhash::from_sri_str("sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=").unwrap())
+        };
         static ref FETCH_URL_OUT_PATH: StorePath = StorePath::from_bytes(b"06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch").unwrap();
 
-        static ref FETCH_TARBALL : Fetch = Fetch::Tarball(
-            Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(),
-            Some(nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm").unwrap())
-        );
+        static ref FETCH_TARBALL : Fetch = Fetch::Tarball{
+            url: Url::parse("https://github.com/NixOS/nixpkgs/archive/91050ea1e57e50388fa87a3302ba12d188ef723a.tar.gz").unwrap(),
+            exp_nar_sha256: Some(nixbase32::decode_fixed("1hf6cgaci1n186kkkjq106ryf8mmlq9vnwgfwh625wa8hfgdn4dm").unwrap())
+        };
         static ref FETCH_TARBALL_OUT_PATH: StorePath = StorePath::from_bytes(b"7adgvk5zdfq4pwrhsm3n9lzypb12gw0g-source").unwrap();
     }
 
@@ -267,22 +267,6 @@ mod tests {
                 .unwrap()
                 .to_owned()
         );
-
-        // We should be able to get these fetches out, when asking for their out path.
-        let (got_name, got_fetch) = known_paths
-            .get_fetch_for_output_path(&FETCH_URL_OUT_PATH)
-            .expect("must be some");
-
-        assert_eq!("notmuch-extract-patch", got_name);
-        assert_eq!(FETCH_URL.clone(), got_fetch);
-
-        // … multiple times.
-        let (got_name, got_fetch) = known_paths
-            .get_fetch_for_output_path(&FETCH_URL_OUT_PATH)
-            .expect("must be some");
-
-        assert_eq!("notmuch-extract-patch", got_name);
-        assert_eq!(FETCH_URL.clone(), got_fetch);
     }
 
     // TODO: add test panicking about missing digest
diff --git a/tvix/glue/src/lib.rs b/tvix/glue/src/lib.rs
index 2e5a3be103..a5dbdb8742 100644
--- a/tvix/glue/src/lib.rs
+++ b/tvix/glue/src/lib.rs
@@ -6,6 +6,8 @@ pub mod tvix_build;
 pub mod tvix_io;
 pub mod tvix_store_io;
 
+mod fetchurl;
+
 #[cfg(test)]
 mod tests;
 
diff --git a/tvix/glue/src/tests/empty-file b/tvix/glue/src/tests/empty-file
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/tvix/glue/src/tests/empty-file
diff --git a/tvix/glue/src/tests/mod.rs b/tvix/glue/src/tests/mod.rs
index 9fe0c22270..5b474be736 100644
--- a/tvix/glue/src/tests/mod.rs
+++ b/tvix/glue/src/tests/mod.rs
@@ -145,3 +145,11 @@ fn nix_eval_okay(#[files("src/tests/nix_tests/eval-okay-*.nix")] code_path: Path
 // ) {
 //     eval_test(code_path, false)
 // }
+
+// eval-fail-* tests contain a snippet of Nix code, which is
+// expected to fail evaluation.  The exact type of failure
+// (assertion, parse error, etc) is not currently checked.
+#[rstest]
+fn eval_fail(#[files("src/tests/tvix_tests/eval-fail-*.nix")] code_path: PathBuf) {
+    eval_test(code_path, false)
+}
diff --git a/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-attrs.nix b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-attrs.nix
new file mode 100644
index 0000000000..209f58cc9d
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-attrs.nix
@@ -0,0 +1,5 @@
+(builtins.fetchTarball {
+  url = "https://test.example/owo";
+  # Only "sha256" is accepted here.
+  hash = "sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=";
+})
diff --git a/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-url.nix b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-url.nix
new file mode 100644
index 0000000000..32596ddcd5
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchtarball-invalid-url.nix
@@ -0,0 +1 @@
+(builtins.fetchTarball /dev/null)
diff --git a/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-attrs.nix b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-attrs.nix
new file mode 100644
index 0000000000..d3c2bed801
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-attrs.nix
@@ -0,0 +1,5 @@
+(builtins.fetchurl {
+  url = "https://test.example/owo";
+  # Only "sha256" is accepted here.
+  hash = "sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=";
+})
diff --git a/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-url.nix b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-url.nix
new file mode 100644
index 0000000000..dc3f70b998
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-fail-fetchurl-invalid-url.nix
@@ -0,0 +1 @@
+(builtins.fetchurl /dev/null)
diff --git a/tvix/glue/src/tests/tvix_tests/eval-fail-tofile-wrongctxtype.nix b/tvix/glue/src/tests/tvix_tests/eval-fail-tofile-wrongctxtype.nix
new file mode 100644
index 0000000000..60c94818ed
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-fail-tofile-wrongctxtype.nix
@@ -0,0 +1,3 @@
+# in 'toFile': the file 'foo' cannot refer to derivation outputs, at (string):1:1
+builtins.toFile "foo" "${(builtins.derivation {name = "foo"; builder = ":"; system = ":";})}"
+
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix
index ecd8ab0073..e5719e00c3 100644
--- a/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-context-introspection.nix
@@ -71,7 +71,7 @@ in
   (builtins.hasAttr "allOutputs" (builtins.getContext drv.drvPath)."${builtins.unsafeDiscardStringContext drv.drvPath}")
   (legit-context == desired-context) # FIXME(raitobezarius): this should not use `builtins.seq`, this is a consequence of excessive laziness of Tvix, I believe.
   (reconstructed-path == combo-path)
-  # Those are too slow?
+  # These still fail with an internal error
   # (etaRule' "foo")
   # (etaRule' combo-path)
   (etaRule "foo")
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp
index 37a04d577c..50d8e3574b 100644
--- a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.exp
@@ -1 +1 @@
-[ /nix/store/y0r1p1cqmlvm0yqkz3gxvkc1p8kg2sz8-null /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch ]
+[ /nix/store/y0r1p1cqmlvm0yqkz3gxvkc1p8kg2sz8-null /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch /nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch "/nix/store/06qi00hylriyfm0nl827crgjvbax84mz-notmuch-extract-patch" "/nix/store/b40vjphshq4fdgv8s3yrp0bdlafi4920-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz" "/nix/store/8kx7fdkdbzs4fkfb57xq0cbhs20ymq2n-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz" "/nix/store/y92hm2xfk1009hrq0ix80j4m5k4j4w21-0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz" ]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix
index 8a39101525..a3e9c54869 100644
--- a/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-fetchurl.nix
@@ -22,4 +22,44 @@
     name = "notmuch-extract-patch";
     sha256 = "sha256-Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=";
   })
+
+  # The following tests use <nix/fetchurl.nix>.
+  # This is a piece of Nix code producing a "fake derivation" which gets
+  # handled by a "custom builder" that does the actual fetching.
+  # We access `.outPath` here, as the current string output of a Derivation
+  # still differs from the way nix presents it.
+  # It behaves similar to builtins.fetchurl, except it requires a hash to be
+  # provided upfront.
+  # If `unpack` is set to true, it can unpack NAR files (decompressing if
+  # necessary)
+  # If `executable` is set to true, it will place the fetched file at the root,
+  # but make it executable, and the hash is on the NAR representation.
+
+  # Fetch a URL.
+  (import <nix/fetchurl.nix> {
+    url = "https://test.example/owo";
+    name = "notmuch-extract-patch";
+    sha256 = "Xa1Jbl2Eq5+L0ww+Ph1osA3Z/Dxe/RkN1/dITQCdXFk=";
+  }).outPath
+
+  # Fetch a NAR and unpack it, specifying the sha256 of its NAR representation.
+  (import <nix/fetchurl.nix> {
+    url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz";
+    sha256 = "sha256-oj6yfWKbcEerK8D9GdPJtIAOveNcsH1ztGeSARGypRA=";
+    unpack = true;
+  }).outPath
+
+  # Fetch a NAR and unpack it, specifying its *sha1* of its NAR representation.
+  (import <nix/fetchurl.nix> {
+    url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz";
+    hash = "sha1-F/fMsgwkXF8fPCg1v9zPZ4yOFIA=";
+    unpack = true;
+  }).outPath
+
+  # Fetch a URL, specifying the *sha1* of a NAR describing it as executable at the root.
+  (import <nix/fetchurl.nix> {
+    url = "https://cache.nixos.org/nar/0r8nqa1klm5v17ifc6z96m9wywxkjvgbnqq9pmy0sgqj53wj3n12.nar.xz";
+    hash = "sha1-NKNeU1csW5YJ4lCeWH3Z/apppNU=";
+    executable = true;
+  }).outPath
 ]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp
new file mode 100644
index 0000000000..e7d20f6631
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.exp
@@ -0,0 +1 @@
+{ contextMatches = true; hasContext = true; }
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix
new file mode 100644
index 0000000000..99205cb9e0
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-storePath.nix
@@ -0,0 +1,9 @@
+let
+  path = builtins.unsafeDiscardStringContext "${../empty-file}";
+  storePath = builtins.storePath path;
+  context = builtins.getContext storePath;
+in
+{
+  hasContext = builtins.hasContext storePath;
+  contextMatches = context == { "${path}" = { path = true; }; };
+}
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.exp b/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.exp
new file mode 100644
index 0000000000..c8e5b8fab5
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.exp
@@ -0,0 +1 @@
+[ "/nix/store/vxjiwkjkn7x4079qvh1jkl5pn05j2aw0-foo" "/nix/store/i7liwr52956m86kxp7dgbcwsk56r27v6-foo" "/nix/store/yw8k7ixk1zvb113p4y0bl3ahjxd5h9sr-foo" { "/nix/store/yw8k7ixk1zvb113p4y0bl3ahjxd5h9sr-foo" = { path = true; }; } ]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.nix
new file mode 100644
index 0000000000..141bbc38ec
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-tofile.nix
@@ -0,0 +1,11 @@
+let
+  noContext = (builtins.toFile "foo" "bar");
+  someContext = (builtins.toFile "foo" "bar${noContext}");
+  moreContext = (builtins.toFile "foo" "bar${someContext}");
+in
+[
+  noContext
+  someContext
+  moreContext
+  (builtins.getContext moreContext)
+]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp
new file mode 100644
index 0000000000..e9600ecdad
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.exp
@@ -0,0 +1 @@
+[ { "/nix/store/y1s2fiq89v2h9vkb38w508ir20dwv6v2-test.drv" = { allOutputs = true; }; } false ]
diff --git a/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix
new file mode 100644
index 0000000000..933aa46022
--- /dev/null
+++ b/tvix/glue/src/tests/tvix_tests/eval-okay-toxml-context.nix
@@ -0,0 +1,14 @@
+[
+  # builtins.toXML retains context where there is.
+  (builtins.getContext (builtins.toXML {
+    inherit (derivation {
+      name = "test";
+      builder = "/bin/sh";
+      system = builtins.currentSystem;
+    }) drvPath;
+  }))
+
+  # this should have no context.
+  (builtins.hasContext
+    (builtins.toXML { }))
+]
diff --git a/tvix/glue/src/tvix_io.rs b/tvix/glue/src/tvix_io.rs
index 0e5f23b990..db0c2cef77 100644
--- a/tvix/glue/src/tvix_io.rs
+++ b/tvix/glue/src/tvix_io.rs
@@ -60,6 +60,10 @@ where
         self.actual.as_ref().open(path)
     }
 
+    fn file_type(&self, path: &Path) -> io::Result<FileType> {
+        self.actual.as_ref().file_type(path)
+    }
+
     fn read_dir(&self, path: &Path) -> io::Result<Vec<(bytes::Bytes, FileType)>> {
         self.actual.as_ref().read_dir(path)
     }
diff --git a/tvix/glue/src/tvix_store_io.rs b/tvix/glue/src/tvix_store_io.rs
index 7b8ef3ff0a..4e5488067f 100644
--- a/tvix/glue/src/tvix_store_io.rs
+++ b/tvix/glue/src/tvix_store_io.rs
@@ -1,11 +1,9 @@
 //! This module provides an implementation of EvalIO talking to tvix-store.
-
 use bytes::Bytes;
 use futures::{StreamExt, TryStreamExt};
 use nix_compat::nixhash::NixHash;
 use nix_compat::store_path::StorePathRef;
 use nix_compat::{nixhash::CAHash, store_path::StorePath};
-use sha2::{Digest, Sha256};
 use std::{
     cell::RefCell,
     collections::BTreeSet,
@@ -14,12 +12,12 @@ use std::{
     sync::Arc,
 };
 use tokio_util::io::SyncIoBridge;
-use tracing::{error, info, instrument, warn, Level};
+use tracing::{error, instrument, warn, Level, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use tvix_build::buildservice::BuildService;
 use tvix_castore::proto::node::Node;
 use tvix_eval::{EvalIO, FileType, StdIO};
 use tvix_store::nar::NarCalculationService;
-use tvix_store::utils::AsyncIoBridge;
 
 use tvix_castore::{
     blobservice::BlobService,
@@ -106,7 +104,7 @@ impl TvixStoreIO {
     ///
     /// In case there is no PathInfo yet, this means we need to build it
     /// (which currently is stubbed out still).
-    #[instrument(skip(self, store_path), fields(store_path=%store_path), ret(level = Level::TRACE), err)]
+    #[instrument(skip(self, store_path), fields(store_path=%store_path, indicatif.pb_show=1), ret(level = Level::TRACE), err)]
     async fn store_path_to_node(
         &self,
         store_path: &StorePath,
@@ -138,7 +136,8 @@ impl TvixStoreIO {
                 // The store path doesn't exist yet, so we need to fetch or build it.
                 // We check for fetches first, as we might have both native
                 // fetchers and FODs in KnownPaths, and prefer the former.
-
+                // This will also find [Fetch] synthesized from
+                // `builtin:fetchurl` Derivations.
                 let maybe_fetch = self
                     .known_paths
                     .borrow()
@@ -146,7 +145,6 @@ impl TvixStoreIO {
 
                 match maybe_fetch {
                     Some((name, fetch)) => {
-                        info!(?fetch, "triggering lazy fetch");
                         let (sp, root_node) = self
                             .fetcher
                             .ingest_and_persist(&name, fetch)
@@ -156,9 +154,9 @@ impl TvixStoreIO {
                         })?;
 
                         debug_assert_eq!(
-                            sp.to_string(),
-                            store_path.to_string(),
-                            "store path returned from fetcher should match"
+                            sp.to_absolute_path(),
+                            store_path.as_ref().to_absolute_path(),
+                            "store path returned from fetcher must match store path we have in fetchers"
                         );
 
                         root_node
@@ -179,14 +177,16 @@ impl TvixStoreIO {
                                 }
                             }
                         };
-
-                        warn!("triggering build");
+                        let span = Span::current();
+                        span.pb_start();
+                        span.pb_set_style(&tvix_tracing::PB_SPINNER_STYLE);
+                        span.pb_set_message(&format!("⏳Waiting for inputs {}", &store_path));
 
                         // derivation_to_build_request needs castore nodes for all inputs.
                         // Provide them, which means, here is where we recursively build
                         // all dependencies.
                         #[allow(clippy::mutable_key_type)]
-                        let input_nodes: BTreeSet<Node> =
+                        let mut input_nodes: BTreeSet<Node> =
                             futures::stream::iter(drv.input_derivations.iter())
                                 .map(|(input_drv_path, output_names)| {
                                     // look up the derivation object
@@ -236,9 +236,34 @@ impl TvixStoreIO {
                                     )
                                 })
                                 .flatten()
-                                .buffer_unordered(10) // TODO: make configurable
+                                .buffer_unordered(
+                                    1, /* TODO: increase again once we prevent redundant fetches */
+                                ) // TODO: make configurable
+                                .try_collect()
+                                .await?;
+
+                        // add input sources
+                        // FUTUREWORK: merge these who things together
+                        #[allow(clippy::mutable_key_type)]
+                        let input_nodes_input_sources: BTreeSet<Node> =
+                            futures::stream::iter(drv.input_sources.iter())
+                                .then(|input_source| {
+                                    Box::pin(async {
+                                        let node = self
+                                            .store_path_to_node(input_source, Path::new(""))
+                                            .await?;
+                                        if let Some(node) = node {
+                                            Ok(node)
+                                        } else {
+                                            Err(io::Error::other("no node produced"))
+                                        }
+                                    })
+                                })
                                 .try_collect()
                                 .await?;
+                        input_nodes.extend(input_nodes_input_sources);
+
+                        span.pb_set_message(&format!("🔨Building {}", &store_path));
 
                         // TODO: check if input sources are sufficiently dealth with,
                         // I think yes, they must be imported into the store by other
@@ -255,7 +280,7 @@ impl TvixStoreIO {
                             .await
                             .map_err(|e| std::io::Error::new(io::ErrorKind::Other, e))?;
 
-                        // TODO: refscan?
+                        // TODO: refscan
 
                         // For each output, insert a PathInfo.
                         for output in &build_result.outputs {
@@ -332,7 +357,7 @@ impl TvixStoreIO {
         &self,
         name: &str,
         path: &Path,
-        ca: CAHash,
+        ca: &CAHash,
         root_node: Node,
     ) -> io::Result<(PathInfo, NixHash, StorePath)> {
         // Ask the PathInfoService for the NAR size and sha256
@@ -347,7 +372,7 @@ impl TvixStoreIO {
 
         // Calculate the output path. This might still fail, as some names are illegal.
         let output_path =
-            nix_compat::store_path::build_ca_path(name, &ca, Vec::<String>::new(), false).map_err(
+            nix_compat::store_path::build_ca_path(name, ca, Vec::<String>::new(), false).map_err(
                 |_| {
                     std::io::Error::new(
                         std::io::ErrorKind::InvalidData,
@@ -374,7 +399,7 @@ impl TvixStoreIO {
         &self,
         name: &str,
         path: &Path,
-        ca: CAHash,
+        ca: &CAHash,
         root_node: Node,
     ) -> io::Result<StorePath> {
         let (path_info, _, output_path) = self.node_to_path_info(name, path, ca, root_node).await?;
@@ -383,26 +408,6 @@ impl TvixStoreIO {
         Ok(output_path)
     }
 
-    /// Transforms a BLAKE-3 digest into a SHA256 digest
-    /// by re-hashing the whole file.
-    pub(crate) async fn blob_to_sha256_hash(&self, blob_digest: B3Digest) -> io::Result<[u8; 32]> {
-        let mut reader = self
-            .blob_service
-            .open_read(&blob_digest)
-            .await?
-            .ok_or_else(|| {
-                io::Error::new(
-                    io::ErrorKind::NotFound,
-                    format!("blob represented by digest: '{}' not found", blob_digest),
-                )
-            })?;
-        // It is fine to use `AsyncIoBridge` here because hashing is not actually I/O.
-        let mut hasher = AsyncIoBridge(Sha256::new());
-
-        tokio::io::copy(&mut reader, &mut hasher).await?;
-        Ok(hasher.0.finalize().into())
-    }
-
     pub async fn store_path_exists<'a>(&'a self, store_path: StorePathRef<'a>) -> io::Result<bool> {
         Ok(self
             .path_info_service
@@ -421,7 +426,7 @@ impl EvalIO for TvixStoreIO {
         {
             if self
                 .tokio_handle
-                .block_on(async { self.store_path_to_node(&store_path, &sub_path).await })?
+                .block_on(self.store_path_to_node(&store_path, &sub_path))?
                 .is_some()
             {
                 Ok(true)
@@ -505,6 +510,28 @@ impl EvalIO for TvixStoreIO {
     }
 
     #[instrument(skip(self), ret(level = Level::TRACE), err)]
+    fn file_type(&self, path: &Path) -> io::Result<FileType> {
+        if let Ok((store_path, sub_path)) =
+            StorePath::from_absolute_path_full(&path.to_string_lossy())
+        {
+            if let Some(node) = self
+                .tokio_handle
+                .block_on(async { self.store_path_to_node(&store_path, &sub_path).await })?
+            {
+                match node {
+                    Node::Directory(_) => Ok(FileType::Directory),
+                    Node::File(_) => Ok(FileType::Regular),
+                    Node::Symlink(_) => Ok(FileType::Symlink),
+                }
+            } else {
+                self.std_io.file_type(path)
+            }
+        } else {
+            self.std_io.file_type(path)
+        }
+    }
+
+    #[instrument(skip(self), ret(level = Level::TRACE), err)]
     fn read_dir(&self, path: &Path) -> io::Result<Vec<(bytes::Bytes, FileType)>> {
         if let Ok((store_path, sub_path)) =
             StorePath::from_absolute_path_full(&path.to_string_lossy())
diff --git a/tvix/nix-compat/Cargo.toml b/tvix/nix-compat/Cargo.toml
index 876ac3ecad..91fd19475a 100644
--- a/tvix/nix-compat/Cargo.toml
+++ b/tvix/nix-compat/Cargo.toml
@@ -4,7 +4,7 @@ version = "0.1.0"
 edition = "2021"
 
 [features]
-# async NAR writer
+# async NAR writer. Also needs the `wire` feature.
 async = ["tokio"]
 # code emitting low-level packets used in the daemon protocol.
 wire = ["tokio", "pin-project-lite"]
diff --git a/tvix/nix-compat/build.rs b/tvix/nix-compat/build.rs
new file mode 100644
index 0000000000..c66b970162
--- /dev/null
+++ b/tvix/nix-compat/build.rs
@@ -0,0 +1,5 @@
+fn main() {
+    // Pick up new test case files
+    // https://github.com/la10736/rstest/issues/256
+    println!("cargo:rerun-if-changed=src/derivation/tests/derivation_tests")
+}
diff --git a/tvix/nix-compat/default.nix b/tvix/nix-compat/default.nix
index 9df76e12fc..08b053b77d 100644
--- a/tvix/nix-compat/default.nix
+++ b/tvix/nix-compat/default.nix
@@ -1,7 +1,11 @@
-{ depot, ... }:
+{ depot, lib, ... }:
 
-depot.tvix.crates.workspaceMembers.nix-compat.build.override {
+(depot.tvix.crates.workspaceMembers.nix-compat.build.override {
   runTests = true;
-  # make sure we also enable async here, so run the tests behind that feature flag.
-  features = [ "default" "async" "wire" ];
-}
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "async" "wire" ];
+  };
+})
diff --git a/tvix/nix-compat/src/nar/reader/mod.rs b/tvix/nix-compat/src/nar/reader/mod.rs
index 9e9237ead3..7e9143c8f3 100644
--- a/tvix/nix-compat/src/nar/reader/mod.rs
+++ b/tvix/nix-compat/src/nar/reader/mod.rs
@@ -16,7 +16,7 @@ use std::marker::PhantomData;
 // Required reading for understanding this module.
 use crate::nar::wire;
 
-#[cfg(feature = "async")]
+#[cfg(all(feature = "async", feature = "wire"))]
 pub mod r#async;
 
 mod read;
diff --git a/tvix/nix-compat/src/nar/wire/mod.rs b/tvix/nix-compat/src/nar/wire/mod.rs
index 9e99b530ce..26da04e67c 100644
--- a/tvix/nix-compat/src/nar/wire/mod.rs
+++ b/tvix/nix-compat/src/nar/wire/mod.rs
@@ -97,7 +97,7 @@ const TOK_PAD_PAR: [u8; 24] = *b"\0\0\0\0\0\0\0\0\x01\0\0\0\0\0\0\0)\0\0\0\0\0\0
 #[derive(Debug)]
 pub(crate) enum PadPar {}
 
-#[cfg(feature = "async")]
+#[cfg(all(feature = "async", feature = "wire"))]
 impl crate::wire::reader::Tag for PadPar {
     const PATTERN: &'static [u8] = &TOK_PAD_PAR;
 
diff --git a/tvix/nix-compat/src/wire/bytes/reader/mod.rs b/tvix/nix-compat/src/wire/bytes/reader/mod.rs
index 6bd376c06f..c0227f4e6c 100644
--- a/tvix/nix-compat/src/wire/bytes/reader/mod.rs
+++ b/tvix/nix-compat/src/wire/bytes/reader/mod.rs
@@ -414,6 +414,7 @@ mod tests {
     }
 
     /// Read the trailer immediately if there is no payload.
+    #[cfg(feature = "async")]
     #[tokio::test]
     async fn read_trailer_immediately() {
         use crate::nar::wire::PadPar;
@@ -431,6 +432,7 @@ mod tests {
     }
 
     /// Read the trailer even if we only read the exact payload size.
+    #[cfg(feature = "async")]
     #[tokio::test]
     async fn read_exact_trailer() {
         use crate::nar::wire::PadPar;
diff --git a/tvix/shell.nix b/tvix/shell.nix
index f0d8ab1657..947cda269b 100644
--- a/tvix/shell.nix
+++ b/tvix/shell.nix
@@ -16,6 +16,19 @@
             ./nixpkgs/cbtemulator-uds.patch
           ];
         });
+
+        # macFUSE bump containing fix for https://github.com/osxfuse/osxfuse/issues/974
+        # https://github.com/NixOS/nixpkgs/pull/320197
+        fuse =
+          if super.stdenv.isDarwin then
+            super.fuse.overrideAttrs
+              (old: rec {
+                version = "4.8.0";
+                src = super.fetchurl {
+                  url = "https://github.com/osxfuse/osxfuse/releases/download/macfuse-${version}/macfuse-${version}.dmg";
+                  hash = "sha256-ucTzO2qdN4QkowMVvC3+4pjEVjbwMsB0xFk+bvQxwtQ=";
+                };
+              }) else super.fuse;
       })
     ];
   })
@@ -30,12 +43,15 @@ pkgs.mkShell {
     pkgs.cargo-machete
     pkgs.cargo-expand
     pkgs.clippy
+    pkgs.d2
     pkgs.evans
     pkgs.fuse
     pkgs.go
     pkgs.grpcurl
     pkgs.hyperfine
     pkgs.mdbook
+    pkgs.mdbook-admonish
+    pkgs.mdbook-d2
     pkgs.mdbook-plantuml
     pkgs.nix_2_3 # b/313
     pkgs.pkg-config
diff --git a/tvix/store-go/pathinfo.pb.go b/tvix/store-go/pathinfo.pb.go
index a4915a3c1f..7615bd155c 100644
--- a/tvix/store-go/pathinfo.pb.go
+++ b/tvix/store-go/pathinfo.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/store/protos/pathinfo.proto
 
diff --git a/tvix/store-go/rpc_pathinfo.pb.go b/tvix/store-go/rpc_pathinfo.pb.go
index 883ffb3f01..155d59896b 100644
--- a/tvix/store-go/rpc_pathinfo.pb.go
+++ b/tvix/store-go/rpc_pathinfo.pb.go
@@ -3,7 +3,7 @@
 
 // Code generated by protoc-gen-go. DO NOT EDIT.
 // versions:
-// 	protoc-gen-go v1.33.0
+// 	protoc-gen-go v1.34.1
 // 	protoc        (unknown)
 // source: tvix/store/protos/rpc_pathinfo.proto
 
diff --git a/tvix/store/Cargo.toml b/tvix/store/Cargo.toml
index 4727f43f78..733b7c11f2 100644
--- a/tvix/store/Cargo.toml
+++ b/tvix/store/Cargo.toml
@@ -18,9 +18,6 @@ lazy_static = "1.4.0"
 nix-compat = { path = "../nix-compat", features = ["async"] }
 pin-project-lite = "0.2.13"
 prost = "0.12.1"
-opentelemetry = { version = "0.22.0", optional = true}
-opentelemetry-otlp = { version = "0.15.0", optional = true }
-opentelemetry_sdk = { version = "0.22.1", features = ["rt-tokio"], optional = true}
 serde = { version = "1.0.197", features = [ "derive" ] }
 serde_json = "1.0"
 serde_with = "3.7.0"
@@ -34,15 +31,16 @@ tokio-stream = { version = "0.1.14", features = ["fs"] }
 tokio-util = { version = "0.7.9", features = ["io", "io-util", "compat"] }
 tonic = { version = "0.11.0", features = ["tls", "tls-roots"] }
 tower = "0.4.13"
-tracing = "0.1.37"
-tracing-opentelemetry = "0.23.0"
-tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
+tower-http = { version = "0.4.4", features = ["trace"] }
 tvix-castore = { path = "../castore" }
 url = "2.4.0"
 walkdir = "2.4.0"
 reqwest = { version = "0.11.22", features = ["rustls-tls-native-roots", "stream"], default-features = false }
 lru = "0.12.3"
 parking_lot = "0.12.2"
+tvix-tracing = { path = "../tracing", features = ["tonic"] }
+tracing = "0.1.40"
+tracing-indicatif = "0.3.6"
 
 [dependencies.tonic-reflection]
 optional = true
@@ -72,8 +70,9 @@ cloud = [
   "tvix-castore/cloud"
 ]
 fuse = ["tvix-castore/fuse"]
-otlp = ["dep:opentelemetry", "dep:opentelemetry-otlp", "dep:opentelemetry_sdk"]
+otlp = ["tvix-tracing/otlp"]
 tonic-reflection = ["dep:tonic-reflection", "tvix-castore/tonic-reflection"]
+tracy = ["tvix-tracing/tracy"]
 virtiofs = ["tvix-castore/virtiofs"]
 # Whether to run the integration tests.
 # Requires the following packages in $PATH:
diff --git a/tvix/store/default.nix b/tvix/store/default.nix
index ad47994f24..3fe47fe60b 100644
--- a/tvix/store/default.nix
+++ b/tvix/store/default.nix
@@ -1,4 +1,4 @@
-{ depot, pkgs, ... }:
+{ depot, pkgs, lib, ... }:
 
 let
   mkImportCheck = p: expectedPath: {
@@ -22,31 +22,35 @@ let
   };
 in
 
-(depot.tvix.crates.workspaceMembers.tvix-store.build.override {
+(depot.tvix.crates.workspaceMembers.tvix-store.build.override (old: {
   runTests = true;
   testPreRun = ''
     export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt
   '';
-
-  # enable some optional features.
-  features = [ "default" "cloud" ]
-    # virtiofs feature currently fails to build on Darwin.
-    ++ pkgs.lib.optional pkgs.stdenv.isLinux "virtiofs";
-}).overrideAttrs (_: {
-  meta.ci.targets = [ "integration-tests" ];
-  meta.ci.extraSteps = {
-    import-docs = (mkImportCheck "tvix/store/docs" ./docs);
+  features = old.features
+    # virtiofs feature currently fails to build on Darwin
+    ++ lib.optional pkgs.stdenv.isLinux "virtiofs";
+})).overrideAttrs (old: rec {
+  meta.ci = {
+    targets = [ "integration-tests" ] ++ lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+    extraSteps.import-docs = (mkImportCheck "tvix/docs/src/store" ../docs/src/store);
   };
-  passthru.integration-tests = depot.tvix.crates.workspaceMembers.tvix-store.build.override {
-    runTests = true;
-    testPreRun = ''
+  passthru = (depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = ([ "cloud" "fuse" "otlp" "tonic-reflection" ]
+      # virtiofs feature currently fails to build on Darwin
+      ++ lib.optional pkgs.stdenv.isLinux "virtiofs");
+    override.testPreRun = ''
       export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt
-      export PATH="$PATH:${pkgs.lib.makeBinPath [pkgs.cbtemulator pkgs.google-cloud-bigtable-tool]}"
     '';
-
-    # enable some optional features.
-    features = [ "default" "cloud" "integration" ]
-      # virtiofs feature currently fails to build on Darwin.
-      ++ pkgs.lib.optional pkgs.stdenv.isLinux "virtiofs";
+  }) // {
+    integration-tests = depot.tvix.crates.workspaceMembers.${old.crateName}.build.override (old: {
+      runTests = true;
+      testPreRun = ''
+        export SSL_CERT_FILE=${pkgs.cacert}/etc/ssl/certs/ca-bundle.crt;
+        export PATH="$PATH:${pkgs.lib.makeBinPath [ pkgs.cbtemulator pkgs.google-cloud-bigtable-tool ]}"
+      '';
+      features = old.features ++ [ "integration" ];
+    });
   };
 })
diff --git a/tvix/store/src/bin/tvix-store.rs b/tvix/store/src/bin/tvix-store.rs
index 906d0ab520..657ce06720 100644
--- a/tvix/store/src/bin/tvix-store.rs
+++ b/tvix/store/src/bin/tvix-store.rs
@@ -13,11 +13,10 @@ use tokio_listener::Listener;
 use tokio_listener::SystemOptions;
 use tokio_listener::UserOptions;
 use tonic::transport::Server;
-use tracing::info;
-use tracing::Level;
-use tracing_subscriber::EnvFilter;
-use tracing_subscriber::Layer;
-use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
+use tower::ServiceBuilder;
+use tower_http::trace::{DefaultMakeSpan, TraceLayer};
+use tracing::{info, info_span, instrument, Level, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt as _;
 use tvix_castore::import::fs::ingest_path;
 use tvix_store::nar::NarCalculationService;
 use tvix_store::proto::NarInfo;
@@ -37,15 +36,6 @@ use tvix_store::pathinfoservice::make_fs;
 #[cfg(feature = "fuse")]
 use tvix_castore::fs::fuse::FuseDaemon;
 
-#[cfg(feature = "otlp")]
-use opentelemetry::KeyValue;
-#[cfg(feature = "otlp")]
-use opentelemetry_sdk::{
-    resource::{ResourceDetector, SdkProvidedResourceDetector},
-    trace::BatchConfig,
-    Resource,
-};
-
 #[cfg(feature = "virtiofs")]
 use tvix_castore::fs::virtiofs::start_virtiofs_daemon;
 
@@ -65,8 +55,8 @@ struct Cli {
     /// It's also possible to set `RUST_LOG` according to
     /// `tracing_subscriber::filter::EnvFilter`, which will always have
     /// priority.
-    #[arg(long)]
-    log_level: Option<Level>,
+    #[arg(long, default_value_t=Level::INFO)]
+    log_level: Level,
 
     #[command(subcommand)]
     command: Commands,
@@ -197,88 +187,15 @@ enum Commands {
     },
 }
 
-#[cfg(all(feature = "fuse", not(target_os = "macos")))]
+#[cfg(feature = "fuse")]
 fn default_threads() -> usize {
     std::thread::available_parallelism()
         .map(|threads| threads.into())
         .unwrap_or(4)
 }
-// On MacFUSE only a single channel will receive ENODEV when the file system is
-// unmounted and so all the other channels will block forever.
-// See https://github.com/osxfuse/osxfuse/issues/974
-#[cfg(all(feature = "fuse", target_os = "macos"))]
-fn default_threads() -> usize {
-    1
-}
-
-#[tokio::main]
-async fn main() -> Result<(), Box<dyn std::error::Error>> {
-    let cli = Cli::parse();
-
-    // configure log settings
-    let level = cli.log_level.unwrap_or(Level::INFO);
-
-    // Set up the tracing subscriber.
-    let subscriber = tracing_subscriber::registry().with(
-        tracing_subscriber::fmt::Layer::new()
-            .with_writer(std::io::stderr)
-            .compact()
-            .with_filter(
-                EnvFilter::builder()
-                    .with_default_directive(level.into())
-                    .from_env()
-                    .expect("invalid RUST_LOG"),
-            ),
-    );
-
-    // Add the otlp layer (when otlp is enabled, and it's not disabled in the CLI)
-    // then init the registry.
-    // If the feature is feature-flagged out, just init without adding the layer.
-    // It's necessary to do this separately, as every with() call chains the
-    // layer into the type of the registry.
-    #[cfg(feature = "otlp")]
-    {
-        let subscriber = if cli.otlp {
-            let tracer = opentelemetry_otlp::new_pipeline()
-                .tracing()
-                .with_exporter(opentelemetry_otlp::new_exporter().tonic())
-                .with_batch_config(BatchConfig::default())
-                .with_trace_config(opentelemetry_sdk::trace::config().with_resource({
-                    // use SdkProvidedResourceDetector.detect to detect resources,
-                    // but replace the default service name with our default.
-                    // https://github.com/open-telemetry/opentelemetry-rust/issues/1298
-                    let resources =
-                        SdkProvidedResourceDetector.detect(std::time::Duration::from_secs(0));
-                    // SdkProvidedResourceDetector currently always sets
-                    // `service.name`, but we don't like its default.
-                    if resources.get("service.name".into()).unwrap() == "unknown_service".into() {
-                        resources.merge(&Resource::new([KeyValue::new(
-                            "service.name",
-                            "tvix.store",
-                        )]))
-                    } else {
-                        resources
-                    }
-                }))
-                .install_batch(opentelemetry_sdk::runtime::Tokio)?;
-
-            // Create a tracing layer with the configured tracer
-            let layer = tracing_opentelemetry::layer().with_tracer(tracer);
-
-            subscriber.with(Some(layer))
-        } else {
-            subscriber.with(None)
-        };
-
-        subscriber.try_init()?;
-    }
-
-    // Init the registry (when otlp is not enabled)
-    #[cfg(not(feature = "otlp"))]
-    {
-        subscriber.try_init()?;
-    }
 
+#[instrument(skip_all)]
+async fn run_cli(cli: Cli) -> Result<(), Box<dyn std::error::Error>> {
     match cli.command {
         Commands::Daemon {
             listen_address,
@@ -300,7 +217,17 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
                 .parse()
                 .unwrap();
 
-            let mut server = Server::builder();
+            let mut server = Server::builder().layer(
+                ServiceBuilder::new()
+                    .layer(
+                        TraceLayer::new_for_grpc().make_span_with(
+                            DefaultMakeSpan::new()
+                                .level(Level::INFO)
+                                .include_headers(true),
+                        ),
+                    )
+                    .map_request(tvix_tracing::propagate::tonic::accept_trace),
+            );
 
             #[allow(unused_mut)]
             let mut router = server
@@ -416,15 +343,26 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
             // Arc the PathInfoService, as we clone it .
             let path_info_service: Arc<dyn PathInfoService> = path_info_service.into();
 
+            let lookups_span = info_span!(
+                "lookup pathinfos",
+                "indicatif.pb_show" = tracing::field::Empty
+            );
+            lookups_span.pb_set_length(reference_graph.closure.len() as u64);
+            lookups_span.pb_set_style(&tvix_tracing::PB_PROGRESS_STYLE);
+            lookups_span.pb_start();
+
             // From our reference graph, lookup all pathinfos that might exist.
             let elems: Vec<_> = futures::stream::iter(reference_graph.closure)
                 .map(|elem| {
                     let path_info_service = path_info_service.clone();
                     async move {
-                        path_info_service
+                        let resp = path_info_service
                             .get(*elem.path.digest())
                             .await
-                            .map(|resp| (elem, resp))
+                            .map(|resp| (elem, resp));
+
+                        Span::current().pb_inc(1);
+                        resp
                     }
                 })
                 .buffer_unordered(50)
@@ -508,7 +446,7 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
                 )
                 .await?;
 
-            let mut fuse_daemon = tokio::task::spawn_blocking(move || {
+            let fuse_daemon = tokio::task::spawn_blocking(move || {
                 let fs = make_fs(
                     blob_service,
                     directory_service,
@@ -522,16 +460,22 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
             })
             .await??;
 
-            // grab a handle to unmount the file system, and register a signal
-            // handler.
-            tokio::spawn(async move {
-                tokio::signal::ctrl_c().await.unwrap();
-                info!("interrupt received, unmounting…");
-                tokio::task::spawn_blocking(move || fuse_daemon.unmount()).await??;
-                info!("unmount occured, terminating…");
-                Ok::<_, std::io::Error>(())
-            })
-            .await??;
+            // Wait for a ctrl_c and then call fuse_daemon.unmount().
+            tokio::spawn({
+                let fuse_daemon = fuse_daemon.clone();
+                async move {
+                    tokio::signal::ctrl_c().await.unwrap();
+                    info!("interrupt received, unmounting…");
+                    tokio::task::spawn_blocking(move || fuse_daemon.unmount()).await??;
+                    info!("unmount occured, terminating…");
+                    Ok::<_, std::io::Error>(())
+                }
+            });
+
+            // Wait for the server to finish, which can either happen through it
+            // being unmounted externally, or receiving a signal invoking the
+            // handler above.
+            tokio::task::spawn_blocking(move || fuse_daemon.wait()).await?
         }
         #[cfg(feature = "virtiofs")]
         Commands::VirtioFs {
@@ -567,3 +511,36 @@ async fn main() -> Result<(), Box<dyn std::error::Error>> {
     };
     Ok(())
 }
+
+#[tokio::main]
+async fn main() -> Result<(), Box<dyn std::error::Error>> {
+    let cli = Cli::parse();
+
+    let tracing_handle = {
+        let mut builder = tvix_tracing::TracingBuilder::default();
+        builder = builder.level(cli.log_level).enable_progressbar();
+        #[cfg(feature = "otlp")]
+        {
+            if cli.otlp {
+                builder = builder.enable_otlp("tvix.store");
+            }
+        }
+        builder.build()?
+    };
+
+    tokio::select! {
+        res = tokio::signal::ctrl_c() => {
+            res?;
+            if let Err(e) = tracing_handle.force_shutdown().await {
+                eprintln!("failed to shutdown tracing: {e}");
+            }
+            Ok(())
+        },
+        res = run_cli(cli) => {
+            if let Err(e) = tracing_handle.shutdown().await {
+                eprintln!("failed to shutdown tracing: {e}");
+            }
+            res
+        }
+    }
+}
diff --git a/tvix/store/src/import.rs b/tvix/store/src/import.rs
index 888380bca9..70a97982e6 100644
--- a/tvix/store/src/import.rs
+++ b/tvix/store/src/import.rs
@@ -81,7 +81,7 @@ pub fn path_to_name(path: &Path) -> std::io::Result<&str> {
 pub fn derive_nar_ca_path_info(
     nar_size: u64,
     nar_sha256: [u8; 32],
-    ca: Option<CAHash>,
+    ca: Option<&CAHash>,
     root_node: Node,
 ) -> PathInfo {
     // assemble the [crate::proto::PathInfo] object.
@@ -102,8 +102,9 @@ pub fn derive_nar_ca_path_info(
     }
 }
 
-/// Ingest the given path `path` and register the resulting output path in the
-/// [`PathInfoService`] as a recursive fixed output NAR.
+/// Ingest the contents at the given path `path` into castore, and registers the
+/// resulting root node in the passed PathInfoService, using the "NAR sha256
+/// digest" and the passed name for output path calculation.
 #[instrument(skip_all, fields(store_name=name, path=?path), err)]
 pub async fn import_path_as_nar_ca<BS, DS, PS, NS, P>(
     path: P,
@@ -137,20 +138,20 @@ where
         )
     })?;
 
-    // assemble a new root_node with a name that is derived from the nar hash.
+    // rename the root node to match the calculated output path.
     let root_node = root_node.rename(output_path.to_string().into_bytes().into());
     log_node(&root_node, path.as_ref());
 
     let path_info = derive_nar_ca_path_info(
         nar_size,
         nar_sha256,
-        Some(CAHash::Nar(NixHash::Sha256(nar_sha256))),
+        Some(&CAHash::Nar(NixHash::Sha256(nar_sha256))),
         root_node,
     );
 
     // This new [`PathInfo`] that we get back from there might contain additional signatures or
     // information set by the service itself. In this function, we silently swallow it because
-    // callers doesn't really need it.
+    // callers don't really need it.
     let _path_info = path_info_service.as_ref().put(path_info).await?;
 
     Ok(output_path.to_owned())
diff --git a/tvix/store/src/nar/import.rs b/tvix/store/src/nar/import.rs
index 3d7c50014a..32c2f4e580 100644
--- a/tvix/store/src/nar/import.rs
+++ b/tvix/store/src/nar/import.rs
@@ -1,15 +1,57 @@
 use nix_compat::nar::reader::r#async as nar_reader;
-use tokio::{io::AsyncBufRead, sync::mpsc, try_join};
+use sha2::Digest;
+use tokio::{
+    io::{AsyncBufRead, AsyncRead},
+    sync::mpsc,
+    try_join,
+};
 use tvix_castore::{
     blobservice::BlobService,
     directoryservice::DirectoryService,
-    import::{ingest_entries, IngestionEntry, IngestionError},
+    import::{
+        blobs::{self, ConcurrentBlobUploader},
+        ingest_entries, IngestionEntry, IngestionError,
+    },
     proto::{node::Node, NamedNode},
     PathBuf,
 };
 
 /// Ingests the contents from a [AsyncRead] providing NAR into the tvix store,
 /// interacting with a [BlobService] and [DirectoryService].
+/// Returns the castore root node, as well as the sha256 and size of the NAR
+/// contents ingested.
+pub async fn ingest_nar_and_hash<R, BS, DS>(
+    blob_service: BS,
+    directory_service: DS,
+    r: &mut R,
+) -> Result<(Node, [u8; 32], u64), IngestionError<Error>>
+where
+    R: AsyncRead + Unpin + Send,
+    BS: BlobService + Clone + 'static,
+    DS: DirectoryService,
+{
+    let mut nar_hash = sha2::Sha256::new();
+    let mut nar_size = 0;
+
+    // Assemble NarHash and NarSize as we read bytes.
+    let r = tokio_util::io::InspectReader::new(r, |b| {
+        nar_size += b.len() as u64;
+        use std::io::Write;
+        nar_hash.write_all(b).unwrap();
+    });
+
+    // HACK: InspectReader doesn't implement AsyncBufRead.
+    // See if this can be propagated through and we can then require our input
+    // reader to be buffered too.
+    let mut r = tokio::io::BufReader::new(r);
+
+    let root_node = ingest_nar(blob_service, directory_service, &mut r).await?;
+
+    Ok((root_node, nar_hash.finalize().into(), nar_size))
+}
+
+/// Ingests the contents from a [AsyncRead] providing NAR into the tvix store,
+/// interacting with a [BlobService] and [DirectoryService].
 /// It returns the castore root node or an error.
 pub async fn ingest_nar<R, BS, DS>(
     blob_service: BS,
@@ -18,7 +60,7 @@ pub async fn ingest_nar<R, BS, DS>(
 ) -> Result<Node, IngestionError<Error>>
 where
     R: AsyncBufRead + Unpin + Send,
-    BS: BlobService + Clone,
+    BS: BlobService + Clone + 'static,
     DS: DirectoryService,
 {
     // open the NAR for reading.
@@ -29,14 +71,22 @@ where
     let rx = tokio_stream::wrappers::ReceiverStream::new(rx);
 
     let produce = async move {
+        let mut blob_uploader = ConcurrentBlobUploader::new(blob_service);
+
         let res = produce_nar_inner(
-            blob_service,
+            &mut blob_uploader,
             root_node,
             "root".parse().unwrap(), // HACK: the root node sent to ingest_entries may not be ROOT.
             tx.clone(),
         )
         .await;
 
+        if let Err(err) = blob_uploader.join().await {
+            tx.send(Err(err.into()))
+                .await
+                .map_err(|e| Error::IO(std::io::Error::new(std::io::ErrorKind::BrokenPipe, e)))?;
+        }
+
         tx.send(res)
             .await
             .map_err(|e| Error::IO(std::io::Error::new(std::io::ErrorKind::BrokenPipe, e)))?;
@@ -54,13 +104,13 @@ where
 }
 
 async fn produce_nar_inner<BS>(
-    blob_service: BS,
+    blob_uploader: &mut ConcurrentBlobUploader<BS>,
     node: nar_reader::Node<'_, '_>,
     path: PathBuf,
     tx: mpsc::Sender<Result<IngestionEntry, Error>>,
 ) -> Result<IngestionEntry, Error>
 where
-    BS: BlobService + Clone,
+    BS: BlobService + Clone + 'static,
 {
     Ok(match node {
         nar_reader::Node::Symlink { target } => IngestionEntry::Symlink { path, target },
@@ -68,12 +118,8 @@ where
             executable,
             mut reader,
         } => {
-            let (digest, size) = {
-                let mut blob_writer = blob_service.open_write().await;
-                let size = tokio::io::copy_buf(&mut reader, &mut blob_writer).await?;
-
-                (blob_writer.close().await?, size)
-            };
+            let size = reader.len();
+            let digest = blob_uploader.upload(&path, size, &mut reader).await?;
 
             IngestionEntry::Regular {
                 path,
@@ -91,7 +137,7 @@ where
                     .expect("Tvix bug: failed to join name");
 
                 let entry = Box::pin(produce_nar_inner(
-                    blob_service.clone(),
+                    blob_uploader,
                     entry.node,
                     path,
                     tx.clone(),
@@ -112,6 +158,9 @@ where
 pub enum Error {
     #[error(transparent)]
     IO(#[from] std::io::Error),
+
+    #[error(transparent)]
+    BlobUpload(#[from] blobs::Error),
 }
 
 #[cfg(test)]
diff --git a/tvix/store/src/nar/mod.rs b/tvix/store/src/nar/mod.rs
index 164748a655..8cbb091f1a 100644
--- a/tvix/store/src/nar/mod.rs
+++ b/tvix/store/src/nar/mod.rs
@@ -4,6 +4,7 @@ use tvix_castore::B3Digest;
 mod import;
 mod renderer;
 pub use import::ingest_nar;
+pub use import::ingest_nar_and_hash;
 pub use renderer::calculate_size_and_sha256;
 pub use renderer::write_nar;
 pub use renderer::SimpleRenderer;
diff --git a/tvix/store/src/nar/renderer.rs b/tvix/store/src/nar/renderer.rs
index efd67671db..e3cb54dd22 100644
--- a/tvix/store/src/nar/renderer.rs
+++ b/tvix/store/src/nar/renderer.rs
@@ -6,6 +6,8 @@ use nix_compat::nar::writer::r#async as nar_writer;
 use sha2::{Digest, Sha256};
 use tokio::io::{self, AsyncWrite, BufReader};
 use tonic::async_trait;
+use tracing::{instrument, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use tvix_castore::{
     blobservice::BlobService,
     directoryservice::DirectoryService,
@@ -48,6 +50,7 @@ where
 
 /// Invoke [write_nar], and return the size and sha256 digest of the produced
 /// NAR output.
+#[instrument(skip_all, fields(indicatif.pb_show=1))]
 pub async fn calculate_size_and_sha256<BS, DS>(
     root_node: &castorepb::node::Node,
     blob_service: BS,
@@ -60,6 +63,10 @@ where
     let mut h = Sha256::new();
     let mut cw = CountWrite::from(&mut h);
 
+    let span = Span::current();
+    span.pb_set_message("Calculating NAR");
+    span.pb_start();
+
     write_nar(
         // The hasher doesn't speak async. It doesn't
         // actually do any I/O, so it's fine to wrap.
diff --git a/tvix/store/src/pathinfoservice/bigtable.rs b/tvix/store/src/pathinfoservice/bigtable.rs
index 7df9989fc5..707a686c0a 100644
--- a/tvix/store/src/pathinfoservice/bigtable.rs
+++ b/tvix/store/src/pathinfoservice/bigtable.rs
@@ -67,6 +67,22 @@ pub struct BigtableParameters {
     app_profile_id: String,
 }
 
+impl BigtableParameters {
+    #[cfg(test)]
+    pub fn default_for_tests() -> Self {
+        Self {
+            project_id: "project-1".into(),
+            instance_name: "instance-1".into(),
+            is_read_only: false,
+            channel_size: default_channel_size(),
+            timeout: default_timeout(),
+            table_name: "table-1".into(),
+            family_name: "cf1".into(),
+            app_profile_id: default_app_profile_id(),
+        }
+    }
+}
+
 fn default_app_profile_id() -> String {
     "default".to_owned()
 }
diff --git a/tvix/store/src/pathinfoservice/from_addr.rs b/tvix/store/src/pathinfoservice/from_addr.rs
index 455909e7f2..9173d25d05 100644
--- a/tvix/store/src/pathinfoservice/from_addr.rs
+++ b/tvix/store/src/pathinfoservice/from_addr.rs
@@ -105,9 +105,12 @@ pub async fn from_addr(
             // - In the case of unix sockets, there must be a path, but may not be a host.
             // - In the case of non-unix sockets, there must be a host, but no path.
             // Constructing the channel is handled by tvix_castore::channel::from_url.
-            let client =
-                PathInfoServiceClient::new(tvix_castore::tonic::channel_from_url(&url).await?);
-            Box::new(GRPCPathInfoService::from_client(client))
+            Box::new(GRPCPathInfoService::from_client(
+                PathInfoServiceClient::with_interceptor(
+                    tvix_castore::tonic::channel_from_url(&url).await?,
+                    tvix_tracing::propagate::tonic::send_trace,
+                ),
+            ))
         }
         #[cfg(feature = "cloud")]
         "bigtable" => {
diff --git a/tvix/store/src/pathinfoservice/grpc.rs b/tvix/store/src/pathinfoservice/grpc.rs
index 93d2d67c31..bcee49aac6 100644
--- a/tvix/store/src/pathinfoservice/grpc.rs
+++ b/tvix/store/src/pathinfoservice/grpc.rs
@@ -6,30 +6,37 @@ use crate::{
 use async_stream::try_stream;
 use futures::stream::BoxStream;
 use nix_compat::nixbase32;
-use tonic::{async_trait, transport::Channel, Code};
-use tracing::instrument;
+use tonic::{async_trait, Code};
+use tracing::{instrument, Span};
+use tracing_indicatif::span_ext::IndicatifSpanExt;
 use tvix_castore::{proto as castorepb, Error};
 
 /// Connects to a (remote) tvix-store PathInfoService over gRPC.
 #[derive(Clone)]
-pub struct GRPCPathInfoService {
+pub struct GRPCPathInfoService<T> {
     /// The internal reference to a gRPC client.
     /// Cloning it is cheap, and it internally handles concurrent requests.
-    grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>,
+    grpc_client: proto::path_info_service_client::PathInfoServiceClient<T>,
 }
 
-impl GRPCPathInfoService {
+impl<T> GRPCPathInfoService<T> {
     /// construct a [GRPCPathInfoService] from a [proto::path_info_service_client::PathInfoServiceClient].
     /// panics if called outside the context of a tokio runtime.
     pub fn from_client(
-        grpc_client: proto::path_info_service_client::PathInfoServiceClient<Channel>,
+        grpc_client: proto::path_info_service_client::PathInfoServiceClient<T>,
     ) -> Self {
         Self { grpc_client }
     }
 }
 
 #[async_trait]
-impl PathInfoService for GRPCPathInfoService {
+impl<T> PathInfoService for GRPCPathInfoService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
     #[instrument(level = "trace", skip_all, fields(path_info.digest = nixbase32::encode(&digest)))]
     async fn get(&self, digest: [u8; 20]) -> Result<Option<PathInfo>, Error> {
         let path_info = self
@@ -106,12 +113,22 @@ impl PathInfoService for GRPCPathInfoService {
 }
 
 #[async_trait]
-impl NarCalculationService for GRPCPathInfoService {
-    #[instrument(level = "trace", skip_all, fields(root_node = ?root_node))]
+impl<T> NarCalculationService for GRPCPathInfoService<T>
+where
+    T: tonic::client::GrpcService<tonic::body::BoxBody> + Send + Sync + Clone + 'static,
+    T::ResponseBody: tonic::codegen::Body<Data = tonic::codegen::Bytes> + Send + 'static,
+    <T::ResponseBody as tonic::codegen::Body>::Error: Into<tonic::codegen::StdError> + Send,
+    T::Future: Send,
+{
+    #[instrument(level = "trace", skip_all, fields(root_node = ?root_node, indicatif.pb_show=1))]
     async fn calculate_nar(
         &self,
         root_node: &castorepb::node::Node,
     ) -> Result<(u64, [u8; 32]), Error> {
+        let span = Span::current();
+        span.pb_set_message("Waiting for NAR calculation");
+        span.pb_start();
+
         let path_info = self
             .grpc_client
             .clone()
@@ -135,6 +152,7 @@ impl NarCalculationService for GRPCPathInfoService {
 #[cfg(test)]
 mod tests {
     use crate::pathinfoservice::tests::make_grpc_path_info_service_client;
+    use crate::pathinfoservice::PathInfoService;
     use crate::tests::fixtures;
 
     /// This ensures connecting via gRPC works as expected.
diff --git a/tvix/store/src/pathinfoservice/nix_http.rs b/tvix/store/src/pathinfoservice/nix_http.rs
index cccd4805c6..1dd7da4831 100644
--- a/tvix/store/src/pathinfoservice/nix_http.rs
+++ b/tvix/store/src/pathinfoservice/nix_http.rs
@@ -1,3 +1,5 @@
+use super::PathInfoService;
+use crate::{nar::ingest_nar_and_hash, proto::PathInfo};
 use futures::{stream::BoxStream, TryStreamExt};
 use nix_compat::{
     narinfo::{self, NarInfo},
@@ -5,20 +7,13 @@ use nix_compat::{
     nixhash::NixHash,
 };
 use reqwest::StatusCode;
-use sha2::Digest;
-use std::io::{self, Write};
-use tokio::io::{AsyncRead, BufReader};
-use tokio_util::io::InspectReader;
+use tokio::io::{self, AsyncRead};
 use tonic::async_trait;
 use tracing::{debug, instrument, warn};
 use tvix_castore::{
     blobservice::BlobService, directoryservice::DirectoryService, proto as castorepb, Error,
 };
 
-use crate::proto::PathInfo;
-
-use super::PathInfoService;
-
 /// NixHTTPPathInfoService acts as a bridge in between the Nix HTTP Binary cache
 /// protocol provided by Nix binary caches such as cache.nixos.org, and the Tvix
 /// Store Model.
@@ -178,7 +173,7 @@ where
         }));
 
         // handle decompression, depending on the compression field.
-        let r: Box<dyn AsyncRead + Send + Unpin> = match narinfo.compression {
+        let mut r: Box<dyn AsyncRead + Send + Unpin> = match narinfo.compression {
             Some("none") => Box::new(r) as Box<dyn AsyncRead + Send + Unpin>,
             Some("bzip2") | None => Box::new(async_compression::tokio::bufread::BzDecoder::new(r))
                 as Box<dyn AsyncRead + Send + Unpin>,
@@ -194,19 +189,8 @@ where
                 )));
             }
         };
-        let mut nar_hash = sha2::Sha256::new();
-        let mut nar_size = 0;
-
-        // Assemble NarHash and NarSize as we read bytes.
-        let r = InspectReader::new(r, |b| {
-            nar_size += b.len() as u64;
-            nar_hash.write_all(b).unwrap();
-        });
-
-        // HACK: InspectReader doesn't implement AsyncBufRead, but neither do our decompressors.
-        let mut r = BufReader::new(r);
 
-        let root_node = crate::nar::ingest_nar(
+        let (root_node, nar_hash, nar_size) = ingest_nar_and_hash(
             self.blob_service.clone(),
             self.directory_service.clone(),
             &mut r,
@@ -226,7 +210,6 @@ where
                 "NarSize mismatch".to_string(),
             ))?;
         }
-        let nar_hash: [u8; 32] = nar_hash.finalize().into();
         if narinfo.nar_hash != nar_hash {
             warn!(
                 narinfo.nar_hash = %NixHash::Sha256(narinfo.nar_hash),
diff --git a/tvix/store/src/pathinfoservice/sled.rs b/tvix/store/src/pathinfoservice/sled.rs
index eb3cf2ff1b..96ade18169 100644
--- a/tvix/store/src/pathinfoservice/sled.rs
+++ b/tvix/store/src/pathinfoservice/sled.rs
@@ -6,8 +6,7 @@ use nix_compat::nixbase32;
 use prost::Message;
 use std::path::Path;
 use tonic::async_trait;
-use tracing::instrument;
-use tracing::warn;
+use tracing::{instrument, warn};
 use tvix_castore::Error;
 
 /// SledPathInfoService stores PathInfo in a [sled](https://github.com/spacejam/sled).
diff --git a/tvix/store/src/pathinfoservice/tests/mod.rs b/tvix/store/src/pathinfoservice/tests/mod.rs
index 26166d1b75..061655e4ba 100644
--- a/tvix/store/src/pathinfoservice/tests/mod.rs
+++ b/tvix/store/src/pathinfoservice/tests/mod.rs
@@ -4,62 +4,30 @@
 
 use rstest::*;
 use rstest_reuse::{self, *};
-use std::sync::Arc;
-use tvix_castore::proto as castorepb;
-use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService};
 
 use super::PathInfoService;
+use crate::pathinfoservice::MemoryPathInfoService;
+use crate::pathinfoservice::SledPathInfoService;
 use crate::proto::PathInfo;
 use crate::tests::fixtures::DUMMY_PATH_DIGEST;
+use tvix_castore::proto as castorepb;
 
 mod utils;
 pub use self::utils::make_grpc_path_info_service_client;
 
-/// Convenience type alias batching all three servives together.
-#[allow(clippy::upper_case_acronyms)]
-type BSDSPS = (
-    Arc<dyn BlobService>,
-    Arc<dyn DirectoryService>,
-    Box<dyn PathInfoService>,
-);
-
-/// Creates a PathInfoService using a new Memory{Blob,Directory}Service.
-/// We return a 3-tuple containing all of them, as some tests want to interact
-/// with all three.
-pub async fn make_path_info_service(uri: &str) -> BSDSPS {
-    let blob_service: Arc<dyn BlobService> = tvix_castore::blobservice::from_addr("memory://")
-        .await
-        .unwrap()
-        .into();
-    let directory_service: Arc<dyn DirectoryService> =
-        tvix_castore::directoryservice::from_addr("memory://")
-            .await
-            .unwrap()
-            .into();
-
-    (
-        blob_service.clone(),
-        directory_service.clone(),
-        crate::pathinfoservice::from_addr(uri, blob_service, directory_service)
-            .await
-            .unwrap(),
-    )
-}
+#[cfg(all(feature = "cloud", feature = "integration"))]
+use self::utils::make_bigtable_path_info_service;
 
 #[template]
 #[rstest]
-#[case::memory(make_path_info_service("memory://").await)]
-#[case::grpc(make_grpc_path_info_service_client().await)]
-#[case::sled(make_path_info_service("sled://").await)]
-#[cfg_attr(all(feature = "cloud",feature="integration"), case::bigtable(make_path_info_service("bigtable://instance-1?project_id=project-1&table_name=table-1&family_name=cf1").await))]
-pub fn path_info_services(
-    #[case] services: (
-        impl BlobService,
-        impl DirectoryService,
-        impl PathInfoService,
-    ),
-) {
-}
+#[case::memory(MemoryPathInfoService::default())]
+#[case::grpc({
+    let (_, _, svc) = make_grpc_path_info_service_client().await;
+    svc
+})]
+#[case::sled(SledPathInfoService::new_temporary().unwrap())]
+#[cfg_attr(all(feature = "cloud",feature="integration"), case::bigtable(make_bigtable_path_info_service().await))]
+pub fn path_info_services(#[case] svc: impl PathInfoService) {}
 
 // FUTUREWORK: add more tests rejecting invalid PathInfo messages.
 // A subset of them should also ensure references to other PathInfos, or
@@ -68,9 +36,8 @@ pub fn path_info_services(
 /// Trying to get a non-existent PathInfo should return Ok(None).
 #[apply(path_info_services)]
 #[tokio::test]
-async fn not_found(services: BSDSPS) {
-    let (_, _, path_info_service) = services;
-    assert!(path_info_service
+async fn not_found(svc: impl PathInfoService) {
+    assert!(svc
         .get(DUMMY_PATH_DIGEST)
         .await
         .expect("must succeed")
@@ -80,9 +47,7 @@ async fn not_found(services: BSDSPS) {
 /// Put a PathInfo into the store, get it back.
 #[apply(path_info_services)]
 #[tokio::test]
-async fn put_get(services: BSDSPS) {
-    let (_, _, path_info_service) = services;
-
+async fn put_get(svc: impl PathInfoService) {
     let path_info = PathInfo {
         node: Some(castorepb::Node {
             node: Some(castorepb::node::Node::Symlink(castorepb::SymlinkNode {
@@ -94,20 +59,14 @@ async fn put_get(services: BSDSPS) {
     };
 
     // insert
-    let resp = path_info_service
-        .put(path_info.clone())
-        .await
-        .expect("must succeed");
+    let resp = svc.put(path_info.clone()).await.expect("must succeed");
 
     // expect the returned PathInfo to be equal (for now)
     // in the future, some stores might add additional fields/signatures.
     assert_eq!(path_info, resp);
 
     // get it back
-    let resp = path_info_service
-        .get(DUMMY_PATH_DIGEST)
-        .await
-        .expect("must succeed");
+    let resp = svc.get(DUMMY_PATH_DIGEST).await.expect("must succeed");
 
     assert_eq!(Some(path_info), resp);
 }
diff --git a/tvix/store/src/pathinfoservice/tests/utils.rs b/tvix/store/src/pathinfoservice/tests/utils.rs
index 30c5902b61..3e4fe5c05a 100644
--- a/tvix/store/src/pathinfoservice/tests/utils.rs
+++ b/tvix/store/src/pathinfoservice/tests/utils.rs
@@ -1,6 +1,7 @@
 use std::sync::Arc;
 
 use tonic::transport::{Endpoint, Server, Uri};
+use tvix_castore::{blobservice::BlobService, directoryservice::DirectoryService};
 
 use crate::{
     nar::{NarCalculationService, SimpleRenderer},
@@ -15,7 +16,11 @@ use crate::{
 /// Constructs and returns a gRPC PathInfoService.
 /// We also return memory-based {Blob,Directory}Service,
 /// as the consumer of this function accepts a 3-tuple.
-pub async fn make_grpc_path_info_service_client() -> super::BSDSPS {
+pub async fn make_grpc_path_info_service_client() -> (
+    impl BlobService,
+    impl DirectoryService,
+    GRPCPathInfoService<tonic::transport::Channel>,
+) {
     let (left, right) = tokio::io::duplex(64);
 
     let blob_service = blob_service();
@@ -47,18 +52,27 @@ pub async fn make_grpc_path_info_service_client() -> super::BSDSPS {
     // Create a client, connecting to the right side. The URI is unused.
     let mut maybe_right = Some(right);
 
-    let path_info_service = Box::new(GRPCPathInfoService::from_client(
-        PathInfoServiceClient::new(
-            Endpoint::try_from("http://[::]:50051")
-                .unwrap()
-                .connect_with_connector(tower::service_fn(move |_: Uri| {
-                    let right = maybe_right.take().unwrap();
-                    async move { Ok::<_, std::io::Error>(right) }
-                }))
-                .await
-                .unwrap(),
-        ),
+    let path_info_service = GRPCPathInfoService::from_client(PathInfoServiceClient::new(
+        Endpoint::try_from("http://[::]:50051")
+            .unwrap()
+            .connect_with_connector(tower::service_fn(move |_: Uri| {
+                let right = maybe_right.take().unwrap();
+                async move { Ok::<_, std::io::Error>(right) }
+            }))
+            .await
+            .unwrap(),
     ));
 
     (blob_service, directory_service, path_info_service)
 }
+
+#[cfg(all(feature = "cloud", feature = "integration"))]
+pub(crate) async fn make_bigtable_path_info_service(
+) -> crate::pathinfoservice::BigtablePathInfoService {
+    use crate::pathinfoservice::bigtable::BigtableParameters;
+    use crate::pathinfoservice::BigtablePathInfoService;
+
+    BigtablePathInfoService::connect(BigtableParameters::default_for_tests())
+        .await
+        .unwrap()
+}
diff --git a/tvix/store/src/utils.rs b/tvix/store/src/utils.rs
index e6e42f6ec4..bd3c65a779 100644
--- a/tvix/store/src/utils.rs
+++ b/tvix/store/src/utils.rs
@@ -9,6 +9,7 @@ use tvix_castore::{
     blobservice::{self, BlobService},
     directoryservice::{self, DirectoryService},
 };
+use url::Url;
 
 use crate::nar::{NarCalculationService, SimpleRenderer};
 use crate::pathinfoservice::{self, PathInfoService};
@@ -31,6 +32,7 @@ pub async fn construct_services(
         directoryservice::from_addr(directory_service_addr.as_ref())
             .await?
             .into();
+
     let path_info_service = pathinfoservice::from_addr(
         path_info_service_addr.as_ref(),
         blob_service.clone(),
@@ -38,11 +40,32 @@ pub async fn construct_services(
     )
     .await?;
 
-    // TODO: grpc client also implements NarCalculationService
-    let nar_calculation_service = Box::new(SimpleRenderer::new(
-        blob_service.clone(),
-        directory_service.clone(),
-    )) as Box<dyn NarCalculationService>;
+    // HACK: The grpc client also implements NarCalculationService, and we
+    // really want to use it (otherwise we'd need to fetch everything again for hashing).
+    // Until we revamped store composition and config, detect this special case here.
+    let nar_calculation_service: Box<dyn NarCalculationService> = {
+        use crate::pathinfoservice::GRPCPathInfoService;
+        use crate::proto::path_info_service_client::PathInfoServiceClient;
+
+        let url = Url::parse(path_info_service_addr.as_ref())
+            .map_err(|e| io::Error::other(e.to_string()))?;
+
+        if url.scheme().starts_with("grpc+") {
+            Box::new(GRPCPathInfoService::from_client(
+                PathInfoServiceClient::with_interceptor(
+                    tvix_castore::tonic::channel_from_url(&url)
+                        .await
+                        .map_err(|e| io::Error::other(e.to_string()))?,
+                    tvix_tracing::propagate::tonic::send_trace,
+                ),
+            ))
+        } else {
+            Box::new(SimpleRenderer::new(
+                blob_service.clone(),
+                directory_service.clone(),
+            )) as Box<dyn NarCalculationService>
+        }
+    };
 
     Ok((
         blob_service,
diff --git a/tvix/tracing/Cargo.toml b/tvix/tracing/Cargo.toml
new file mode 100644
index 0000000000..bc9a8c3c77
--- /dev/null
+++ b/tvix/tracing/Cargo.toml
@@ -0,0 +1,43 @@
+[package]
+name = "tvix-tracing"
+version = "0.1.0"
+edition = "2021"
+
+[dependencies]
+lazy_static = "1.4.0"
+tracing = { version = "0.1.40", features = ["max_level_trace", "release_max_level_debug"] }
+tracing-subscriber = { version = "0.3.18", features = ["env-filter"] }
+indicatif = "0.17.8"
+tracing-indicatif = "0.3.6"
+tokio = { version = "1.32.0" , features = ["sync", "rt"] }
+thiserror = "1.0.38"
+
+tracing-opentelemetry = { version = "0.23.0", optional = true }
+opentelemetry = { version = "0.22.0", optional = true }
+opentelemetry-otlp = { version = "0.15.0", optional = true }
+opentelemetry_sdk = { version = "0.22.1", features = ["rt-tokio"], optional = true }
+tracing-tracy = { version = "0.11.0", features = ["flush-on-exit"], optional = true }
+opentelemetry-http = { version = "0.11.0", optional = true }
+
+tonic = { version = "0.11.0", optional = true }
+http  = { version = "0.2.11", optional = true }
+
+[features]
+default = []
+otlp = [
+  "dep:tracing-opentelemetry",
+  "dep:opentelemetry",
+  "dep:opentelemetry-otlp",
+  "dep:opentelemetry_sdk",
+  "dep:opentelemetry-http"
+]
+tracy = [
+  "dep:tracing-tracy"
+]
+tonic = [
+  "dep:tonic",
+  "dep:http",
+]
+
+[lints]
+workspace = true
diff --git a/tvix/tracing/default.nix b/tvix/tracing/default.nix
new file mode 100644
index 0000000000..dd7dc200f2
--- /dev/null
+++ b/tvix/tracing/default.nix
@@ -0,0 +1,11 @@
+{ depot, lib, ... }:
+
+(depot.tvix.crates.workspaceMembers.tvix-tracing.build.override {
+  runTests = true;
+}).overrideAttrs (old: rec {
+  meta.ci.targets = lib.filter (x: lib.hasPrefix "with-features" x || x == "no-features") (lib.attrNames passthru);
+  passthru = depot.tvix.utils.mkFeaturePowerset {
+    inherit (old) crateName;
+    features = [ "otlp" "tracy" ];
+  };
+})
diff --git a/tvix/tracing/src/lib.rs b/tvix/tracing/src/lib.rs
new file mode 100644
index 0000000000..b965ca4a3d
--- /dev/null
+++ b/tvix/tracing/src/lib.rs
@@ -0,0 +1,302 @@
+use indicatif::ProgressStyle;
+use lazy_static::lazy_static;
+use tokio::sync::{mpsc, oneshot};
+use tracing::Level;
+use tracing_indicatif::{filter::IndicatifFilter, IndicatifLayer};
+use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer};
+
+#[cfg(feature = "otlp")]
+use opentelemetry::{trace::Tracer, KeyValue};
+#[cfg(feature = "otlp")]
+use opentelemetry_sdk::{
+    propagation::TraceContextPropagator,
+    resource::{ResourceDetector, SdkProvidedResourceDetector},
+    trace::BatchConfigBuilder,
+    Resource,
+};
+#[cfg(feature = "tracy")]
+use tracing_tracy::TracyLayer;
+
+#[cfg(feature = "tonic")] // TODO or http
+pub mod propagate;
+
+lazy_static! {
+    pub static ref PB_PROGRESS_STYLE: ProgressStyle = ProgressStyle::with_template(
+        "{span_child_prefix} {wide_msg} {bar:10} ({elapsed}) {pos:>7}/{len:7}"
+    )
+    .expect("invalid progress template");
+    pub static ref PB_TRANSFER_STYLE: ProgressStyle = ProgressStyle::with_template(
+        "{span_child_prefix} {wide_msg} {binary_bytes:>7}/{binary_total_bytes:7}@{decimal_bytes_per_sec} ({elapsed}) {bar:10} "
+    )
+    .expect("invalid progress template");
+    pub static ref PB_SPINNER_STYLE: ProgressStyle = ProgressStyle::with_template(
+        "{span_child_prefix}{spinner} {wide_msg} ({elapsed}) {pos:>7}/{len:7}"
+    )
+    .expect("invalid progress template");
+}
+
+#[derive(thiserror::Error, Debug)]
+pub enum Error {
+    #[error(transparent)]
+    Init(#[from] tracing_subscriber::util::TryInitError),
+
+    #[error(transparent)]
+    MpscSend(#[from] mpsc::error::SendError<Option<oneshot::Sender<()>>>),
+
+    #[error(transparent)]
+    OneshotRecv(#[from] oneshot::error::RecvError),
+}
+
+#[derive(Clone)]
+pub struct TracingHandle {
+    tx: Option<mpsc::Sender<Option<oneshot::Sender<()>>>>,
+}
+
+impl TracingHandle {
+    /// This will flush possible attached tracing providers, e.g. otlp exported, if enabled.
+    /// If there is none enabled this will result in a noop.
+    ///
+    /// It will not wait until the flush is complete, but you can pass in an oneshot::Sender which
+    /// will receive a message once the flush is completed.
+    pub async fn flush(&self, msg: Option<oneshot::Sender<()>>) -> Result<(), Error> {
+        if let Some(tx) = &self.tx {
+            Ok(tx.send(msg).await?)
+        } else {
+            // If we have a message passed in we need to notify the receiver
+            if let Some(tx) = msg {
+                let _ = tx.send(());
+            }
+            Ok(())
+        }
+    }
+
+    /// This will flush all all attached tracing providers and will wait until the flush is completed.
+    /// If no tracing providers like otlp are attached then this will be a noop.
+    ///
+    /// This should only be called on a regular shutdown.
+    /// If you correctly need to shutdown tracing on ctrl_c use [force_shutdown](#method.force_shutdown)
+    /// otherwise you will get otlp errors.
+    pub async fn shutdown(&self) -> Result<(), Error> {
+        let (tx, rx) = tokio::sync::oneshot::channel();
+        self.flush(Some(tx)).await?;
+        rx.await?;
+        Ok(())
+    }
+
+    /// This will flush all all attached tracing providers and will wait until the flush is completed.
+    /// After this it will do some other necessary cleanup.
+    /// If no tracing providers like otlp are attached then this will be a noop.
+    ///
+    /// This should only be used if the tool received an ctrl_c otherwise you will get otlp errors.
+    /// If you need to shutdown tracing on a regular exit, you should use the [shutdown](#method.shutdown)
+    /// method.
+    pub async fn force_shutdown(&self) -> Result<(), Error> {
+        let (tx, rx) = tokio::sync::oneshot::channel();
+        self.flush(Some(tx)).await?;
+        rx.await?;
+
+        #[cfg(feature = "otlp")]
+        {
+            // Because of a bug within otlp we currently have to use spawn_blocking otherwise
+            // calling `shutdown_tracer_provider` can block forever. See
+            // https://github.com/open-telemetry/opentelemetry-rust/issues/1395#issuecomment-1953280335
+            //
+            // This still throws an error, if the tool exits regularly: "OpenTelemetry trace error
+            // occurred. oneshot canceled", but not having this leads to errors if we cancel with
+            // ctrl_c.
+            // So this should right now only be used on ctrl_c, for a regular exit use the
+            // [shutdown](#shutdown) method
+            let _ = tokio::task::spawn_blocking(move || {
+                opentelemetry::global::shutdown_tracer_provider();
+            })
+            .await;
+        }
+
+        Ok(())
+    }
+}
+
+pub struct TracingBuilder {
+    level: Level,
+    progess_bar: bool,
+
+    #[cfg(feature = "otlp")]
+    service_name: Option<&'static str>,
+}
+
+impl Default for TracingBuilder {
+    fn default() -> Self {
+        TracingBuilder {
+            level: Level::INFO,
+            progess_bar: false,
+
+            #[cfg(feature = "otlp")]
+            service_name: None,
+        }
+    }
+}
+
+impl TracingBuilder {
+    /// Set the log level for all layers: stderr und otlp if configured. RUST_LOG still has a
+    /// higher priority over this value.
+    pub fn level(mut self, level: Level) -> TracingBuilder {
+        self.level = level;
+        self
+    }
+
+    #[cfg(feature = "otlp")]
+    /// Enable otlp by setting a custom service_name
+    pub fn enable_otlp(mut self, service_name: &'static str) -> TracingBuilder {
+        self.service_name = Some(service_name);
+        self
+    }
+
+    /// Enable progress bar layer, default is disabled
+    pub fn enable_progressbar(mut self) -> TracingBuilder {
+        self.progess_bar = true;
+        self
+    }
+
+    /// This will setup tracing based on the configuration passed in.
+    /// It will setup a stderr writer output layer and a EnvFilter based on the provided log
+    /// level (RUST_LOG still has a higher priority over the configured value).
+    /// The EnvFilter will be applied to all configured layers, also otlp.
+    ///
+    /// It will also configure otlp if the feature is enabled and a service_name was provided. It
+    /// will then correctly setup a channel which is later used for flushing the provider.
+    pub fn build(self) -> Result<TracingHandle, Error> {
+        // Set up the tracing subscriber.
+        let indicatif_layer = IndicatifLayer::new().with_progress_style(PB_SPINNER_STYLE.clone());
+        let subscriber = tracing_subscriber::registry()
+            .with(
+                EnvFilter::builder()
+                    .with_default_directive(self.level.into())
+                    .from_env()
+                    .expect("invalid RUST_LOG"),
+            )
+            .with(
+                tracing_subscriber::fmt::Layer::new()
+                    .with_writer(indicatif_layer.get_stderr_writer())
+                    .compact(),
+            )
+            .with((self.progess_bar).then(|| {
+                indicatif_layer.with_filter(
+                    // only show progress for spans with indicatif.pb_show field being set
+                    IndicatifFilter::new(false),
+                )
+            }));
+
+        // Setup otlp if a service_name is configured
+        #[cfg(feature = "otlp")]
+        {
+            if let Some(service_name) = self.service_name {
+                // register a text map propagator for trace propagation
+                opentelemetry::global::set_text_map_propagator(TraceContextPropagator::new());
+
+                let (tracer, tx) = gen_otlp_tracer(service_name.to_string());
+                // Create a tracing layer with the configured tracer
+                let layer = tracing_opentelemetry::layer().with_tracer(tracer);
+
+                #[cfg(feature = "tracy")]
+                {
+                    subscriber
+                        .with(TracyLayer::default())
+                        .with(Some(layer))
+                        .try_init()?;
+                }
+
+                #[cfg(not(feature = "tracy"))]
+                {
+                    subscriber.with(Some(layer)).try_init()?;
+                }
+                return Ok(TracingHandle { tx: Some(tx) });
+            }
+        }
+        #[cfg(feature = "tracy")]
+        {
+            subscriber.with(TracyLayer::default()).try_init()?;
+        }
+        #[cfg(not(feature = "tracy"))]
+        {
+            subscriber.try_init()?;
+        }
+
+        Ok(TracingHandle { tx: None })
+    }
+}
+
+/// Returns an OTLP tracer, and the TX part of a channel, which can be used
+/// to request flushes (and signal back the completion of the flush).
+#[cfg(feature = "otlp")]
+fn gen_otlp_tracer(
+    service_name: String,
+) -> (
+    impl Tracer + tracing_opentelemetry::PreSampledTracer,
+    mpsc::Sender<Option<oneshot::Sender<()>>>,
+) {
+    let tracer = opentelemetry_otlp::new_pipeline()
+        .tracing()
+        .with_exporter(opentelemetry_otlp::new_exporter().tonic())
+        .with_batch_config(
+            BatchConfigBuilder::default()
+                // the default values for `max_export_batch_size` is set to 512, which we will fill
+                // pretty quickly, which will then result in an export. We want to make sure that
+                // the export is only done once the schedule is met and not as soon as 512 spans
+                // are collected.
+                .with_max_export_batch_size(4096)
+                // analog to default config `max_export_batch_size * 4`
+                .with_max_queue_size(4096 * 4)
+                // only force an export to the otlp collector every 10 seconds to reduce the amount
+                // of error messages if an otlp collector is not available
+                .with_scheduled_delay(std::time::Duration::from_secs(10))
+                .build(),
+        )
+        .with_trace_config(opentelemetry_sdk::trace::config().with_resource({
+            // use SdkProvidedResourceDetector.detect to detect resources,
+            // but replace the default service name with our default.
+            // https://github.com/open-telemetry/opentelemetry-rust/issues/1298
+            let resources = SdkProvidedResourceDetector.detect(std::time::Duration::from_secs(0));
+            // SdkProvidedResourceDetector currently always sets
+            // `service.name`, but we don't like its default.
+            if resources.get("service.name".into()).unwrap() == "unknown_service".into() {
+                resources.merge(&Resource::new([KeyValue::new(
+                    "service.name",
+                    service_name,
+                )]))
+            } else {
+                resources
+            }
+        }))
+        .install_batch(opentelemetry_sdk::runtime::Tokio)
+        .expect("Failed to install batch exporter using Tokio");
+
+    // Trace provider is need for later use like flushing the provider.
+    // Needs to be kept around for each message to rx we need to handle.
+    let tracer_provider = tracer
+        .provider()
+        .expect("Failed to get the tracer provider");
+
+    // Set up a channel for flushing trace providers later
+    let (tx, mut rx) = mpsc::channel::<Option<oneshot::Sender<()>>>(16);
+
+    // Spawning a task that listens on rx for any message. Once we receive a message we
+    // correctly call flush on the tracer_provider.
+    tokio::spawn(async move {
+        while let Some(m) = rx.recv().await {
+            // Because of a bug within otlp we currently have to use spawn_blocking
+            // otherwise will calling `force_flush` block forever, especially if the
+            // tool was closed with ctrl_c. See
+            // https://github.com/open-telemetry/opentelemetry-rust/issues/1395#issuecomment-1953280335
+            let _ = tokio::task::spawn_blocking({
+                let tracer_provider = tracer_provider.clone();
+                move || tracer_provider.force_flush()
+            })
+            .await;
+            if let Some(tx) = m {
+                let _ = tx.send(());
+            }
+        }
+    });
+
+    (tracer, tx)
+}
diff --git a/tvix/tracing/src/propagate/mod.rs b/tvix/tracing/src/propagate/mod.rs
new file mode 100644
index 0000000000..42c532e9d8
--- /dev/null
+++ b/tvix/tracing/src/propagate/mod.rs
@@ -0,0 +1,9 @@
+#[cfg(feature = "tonic")]
+pub mod tonic;
+
+// TODO: Helper library for reqwest. We could use
+// https://github.com/TrueLayer/reqwest-middleware/tree/main/reqwest-tracing to realise this
+
+// TODO: Helper library for axum or another http server, see
+// https://github.com/hseeberger/hello-tracing-rs/blob/main/hello-tracing-common/src/otel/http.rs
+// as an example and we can reuse tonic::accept_trace fun, at least for a tower::ServiceBuilder
diff --git a/tvix/tracing/src/propagate/tonic.rs b/tvix/tracing/src/propagate/tonic.rs
new file mode 100644
index 0000000000..75455c0566
--- /dev/null
+++ b/tvix/tracing/src/propagate/tonic.rs
@@ -0,0 +1,57 @@
+#[cfg(feature = "otlp")]
+use opentelemetry::{global, propagation::Injector};
+#[cfg(feature = "otlp")]
+use opentelemetry_http::HeaderExtractor;
+#[cfg(feature = "otlp")]
+use tracing_opentelemetry::OpenTelemetrySpanExt;
+
+/// Trace context propagation: associate the current span with the otlp trace of the given request,
+/// if any and valid. This only sets the parent trace if the otlp feature is also enabled.
+pub fn accept_trace<B>(request: http::Request<B>) -> http::Request<B> {
+    // we only extract and set a parent trace if otlp feature is enabled, otherwise this feature is
+    // an noop and we return the request as is
+    #[cfg(feature = "otlp")]
+    {
+        // Current context, if no or invalid data is received.
+        let parent_context = global::get_text_map_propagator(|propagator| {
+            propagator.extract(&HeaderExtractor(request.headers()))
+        });
+        tracing::Span::current().set_parent(parent_context);
+    }
+    request
+}
+
+#[cfg(feature = "otlp")]
+struct MetadataInjector<'a>(&'a mut tonic::metadata::MetadataMap);
+
+#[cfg(feature = "otlp")]
+impl Injector for MetadataInjector<'_> {
+    fn set(&mut self, key: &str, value: String) {
+        use tonic::metadata::{MetadataKey, MetadataValue};
+        use tracing::warn;
+
+        match MetadataKey::from_bytes(key.as_bytes()) {
+            Ok(key) => match MetadataValue::try_from(&value) {
+                Ok(value) => {
+                    self.0.insert(key, value);
+                }
+                Err(error) => warn!(value, error = format!("{error:#}"), "parse metadata value"),
+            },
+            Err(error) => warn!(key, error = format!("{error:#}"), "parse metadata key"),
+        }
+    }
+}
+
+/// Trace context propagation: send the trace context by injecting it into the metadata of the given
+/// request. This only injects the current span if the otlp feature is also enabled.
+#[allow(unused_mut)]
+pub fn send_trace<T>(mut request: tonic::Request<T>) -> Result<tonic::Request<T>, tonic::Status> {
+    #[cfg(feature = "otlp")]
+    {
+        global::get_text_map_propagator(|propagator| {
+            let context = tracing::Span::current().context();
+            propagator.inject_context(&context, &mut MetadataInjector(request.metadata_mut()))
+        });
+    }
+    Ok(request)
+}
diff --git a/tvix/utils.nix b/tvix/utils.nix
new file mode 100644
index 0000000000..5edc1dc2e8
--- /dev/null
+++ b/tvix/utils.nix
@@ -0,0 +1,42 @@
+{ lib, depot, ... }:
+
+{
+  mkFeaturePowerset = { crateName, features, override ? { } }:
+    let
+      powerset = xs:
+        let
+          addElement = set: element:
+            set ++ map (e: [ element ] ++ e) set;
+        in
+        lib.foldl' addElement [ [ ] ] xs;
+    in
+    lib.listToAttrs (map
+      (featuresPowerset: {
+        name = if featuresPowerset != [ ] then "with-features-${lib.concatStringsSep "-" featuresPowerset}" else "no-features";
+        value = depot.tvix.crates.workspaceMembers.${crateName}.build.override (old: {
+          runTests = true;
+          features = featuresPowerset;
+        } // (if lib.isFunction override then override old else override)
+        );
+      })
+      (powerset features));
+
+  # Filters the given source, only keeping files related to the build, preventing unnecessary rebuilds.
+  # Includes src in the root, all other .rs files, as well as Cargo.toml.
+  # Additional files to be included can be specified in extraFileset.
+  filterRustCrateSrc =
+    { root # The original src
+    , extraFileset ? null # Additional filesets to include (e.g. fileFilter for proto files)
+    }:
+    lib.fileset.toSource {
+      inherit root;
+      fileset = (lib.fileset.intersection
+        (lib.fileset.fromSource root) # We build our final fileset from the original src
+        (lib.fileset.unions ([
+          (root + "/src")
+          (lib.fileset.fileFilter (f: f.hasExt "rs") root)
+          # We assume that every Rust crate will at a minimum have .rs files and a Cargo.toml
+          (lib.fileset.fileFilter (f: f.name == "Cargo.toml") root)
+        ] ++ lib.optional (extraFileset != null) extraFileset)));
+    };
+}
diff --git a/users/Profpatsch/.hlint.yaml b/users/Profpatsch/.hlint.yaml
index f00f78c525..12b7c61b70 100644
--- a/users/Profpatsch/.hlint.yaml
+++ b/users/Profpatsch/.hlint.yaml
@@ -34,6 +34,8 @@
 - ignore: { name: Use tuple-section }
 - ignore: { name: Use forM_ }
 - ignore: { name: Functor law }
+- ignore: { name: Use maybe }
+
 # fst and snd are usually a code smell and should be explicit matches, _naming the ignored side.
 - ignore: { name: Use fst }
 - ignore: { name: Use snd }
diff --git a/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs b/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
index a542f8c7b8..2c9a48d134 100644
--- a/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
+++ b/users/Profpatsch/my-prelude/src/Postgres/MonadPostgres.hs
@@ -500,7 +500,6 @@ runPgFormat pool sqlStatement = do
         Pool.putResource localPool new
     )
     ( \(pgFmt, _localPool) -> do
-        putStderrLn "Running with warm pgformatter"
         ByteString.hPut pgFmt.stdinHdl sqlStatement
         -- close stdin to make pg_formatter format (it exits …)
         -- issue: https://github.com/darold/pgFormatter/issues/333
diff --git a/users/Profpatsch/openlab-tools/src/OpenlabTools.hs b/users/Profpatsch/openlab-tools/src/OpenlabTools.hs
index 9fe51aba18..16f1b626ac 100644
--- a/users/Profpatsch/openlab-tools/src/OpenlabTools.hs
+++ b/users/Profpatsch/openlab-tools/src/OpenlabTools.hs
@@ -151,12 +151,12 @@ runApp = withTracer $ \tracer -> do
                                 )
                               ]
                         if
-                            -- If the last cache update is newer or equal to the requested version, we can tell the browser it’s fine
-                            | Just modifiedSince <- req'.ifModifiedSince,
-                              modifiedSince >= new.lastModified ->
-                                pure $ Wai.responseLBS Http.status304 cacheToHeaders ""
-                            | otherwise ->
-                                pure $ h cacheToHeaders (new.result & toLazyBytes)
+                          -- If the last cache update is newer or equal to the requested version, we can tell the browser it’s fine
+                          | Just modifiedSince <- req'.ifModifiedSince,
+                            modifiedSince >= new.lastModified ->
+                              pure $ Wai.responseLBS Http.status304 cacheToHeaders ""
+                          | otherwise ->
+                              pure $ h cacheToHeaders (new.result & toLazyBytes)
                     )
               }
           ]
@@ -198,7 +198,7 @@ runApp = withTracer $ \tracer -> do
         (Parse.maybe $ Parse.fieldParser parseHeaderTime)
         & rmap (fmap mkSecondTime)
 
-parseRequest :: (MonadThrow f, MonadIO f) => Otel.Span -> Parse from a -> from -> f a
+parseRequest :: (MonadThrow f) => Otel.Span -> Parse from a -> from -> f a
 parseRequest span parser req =
   Parse.runParse "Unable to parse the HTTP request" parser req
     & assertM span id
@@ -220,9 +220,9 @@ heatmap = do
       t
         & firstSection (match (Soup.TagOpen ("") [("class", "heatmap")]))
         >>= firstSection (match (Soup.TagOpen "table" []))
-        <&> getTable
-        <&> (<> htmlToTags [hsx|<figcaption>source: <a href={mapallSpaceOla} target="_blank">mapall.space</a></figcaption>|])
-        <&> wrapTagStream (T2 (label @"el" "figure") (label @"attrs" []))
+          <&> getTable
+          <&> (<> htmlToTags [hsx|<figcaption>source: <a href={mapallSpaceOla} target="_blank">mapall.space</a></figcaption>|])
+          <&> wrapTagStream (T2 (label @"el" "figure") (label @"attrs" []))
 
     -- get the table from opening tag to closing tag (allowing nested tables)
     getTable = go 0
@@ -310,8 +310,8 @@ runHandlers runApplication handlers = do
 inSpan :: (MonadUnliftIO m, Otel.MonadTracer m) => Text -> m a -> m a
 inSpan name = Otel.inSpan name Otel.defaultSpanArguments
 
-inSpan' :: (MonadUnliftIO m, Otel.MonadTracer m) => Text -> (Otel.Span -> m a) -> m a
--- inSpan' name =  Otel.inSpan' name Otel.defaultSpanArguments
+inSpan' :: Text -> (Otel.Span -> m a) -> m a
+-- inSpan' name = Otel.inSpan' name Otel.defaultSpanArguments
 inSpan' _name act = act (error "todo telemetry disabled")
 
 zipT2 ::
@@ -379,17 +379,17 @@ httpJson opts span parser req = do
                   <&> Wai.parseContentType
                   <&> (\(ct, _mimeAttributes) -> ct)
           if
-              | statusCode == 200,
-                Just ct <- contentType,
-                ct == opts'.contentType ->
-                  Right $ (resp & Http.responseBody)
-              | statusCode == 200,
-                Just otherType <- contentType ->
-                  Left [fmt|Server returned a non-json body, with content-type "{otherType}"|]
-              | statusCode == 200,
-                Nothing <- contentType ->
-                  Left [fmt|Server returned a body with unspecified content type|]
-              | code <- statusCode -> Left [fmt|Server returned an non-200 error code, code {code}: {resp & showPretty}|]
+            | statusCode == 200,
+              Just ct <- contentType,
+              ct == opts'.contentType ->
+                Right $ (resp & Http.responseBody)
+            | statusCode == 200,
+              Just otherType <- contentType ->
+                Left [fmt|Server returned a non-json body, with content-type "{otherType}"|]
+            | statusCode == 200,
+              Nothing <- contentType ->
+                Left [fmt|Server returned a body with unspecified content type|]
+            | code <- statusCode -> Left [fmt|Server returned an non-200 error code, code {code}: {resp & showPretty}|]
       )
     >>= assertM
       span
@@ -398,7 +398,7 @@ httpJson opts span parser req = do
             & first (Json.parseErrorTree "could not parse redacted response")
       )
 
-assertM :: (MonadThrow f, MonadIO f) => Otel.Span -> (t -> Either ErrorTree a) -> t -> f a
+assertM :: (MonadThrow f) => Otel.Span -> (t -> Either ErrorTree a) -> t -> f a
 assertM span f v = case f v of
   Right a -> pure a
   Left err -> appThrowTree span err
@@ -419,7 +419,7 @@ data Cache a = Cache
     lastModified :: !SecondTime,
     result :: !a
   }
-  deriving (Show)
+  deriving stock (Show)
 
 newCache :: Text -> a -> IO (TVar (Cache a))
 newCache name result = do
@@ -528,8 +528,8 @@ recordException span dat = liftIO $ do
         ..
       }
 
-appThrowTree :: (MonadThrow m, MonadIO m) => Otel.Span -> ErrorTree -> m a
-appThrowTree span exc = do
+appThrowTree :: (MonadThrow m) => Otel.Span -> ErrorTree -> m a
+appThrowTree _span exc = do
   let msg = prettyErrorTree exc
   -- recordException
   --   span
@@ -539,7 +539,7 @@ appThrowTree span exc = do
   --   )
   throwM $ AppException msg
 
-orAppThrowTree :: (MonadThrow m, MonadIO m) => Otel.Span -> Either ErrorTree a -> m a
+orAppThrowTree :: (MonadThrow m) => Otel.Span -> Either ErrorTree a -> m a
 orAppThrowTree span = \case
   Left err -> appThrowTree span err
   Right a -> pure a
diff --git a/users/Profpatsch/shell.nix b/users/Profpatsch/shell.nix
index b5095d476f..ec3326fe86 100644
--- a/users/Profpatsch/shell.nix
+++ b/users/Profpatsch/shell.nix
@@ -45,7 +45,7 @@ pkgs.mkShell {
       h.unix
       h.tagsoup
       h.attoparsec
-      h.iCalendar
+      # h.iCalendar
       h.case-insensitive
       h.hscolour
       h.nicify-lib
diff --git a/users/Profpatsch/whatcd-resolver/src/AppT.hs b/users/Profpatsch/whatcd-resolver/src/AppT.hs
index abe8ccad4c..3232004122 100644
--- a/users/Profpatsch/whatcd-resolver/src/AppT.hs
+++ b/users/Profpatsch/whatcd-resolver/src/AppT.hs
@@ -27,7 +27,7 @@ data Context = Context
     tracer :: Otel.Tracer,
     pgFormat :: PgFormatPool,
     pgConnPool :: Pool Postgres.Connection,
-    transmissionSessionId :: MVar ByteString
+    transmissionSessionId :: IORef (Maybe ByteString)
   }
 
 newtype AppT m a = AppT {unAppT :: ReaderT Context m a}
diff --git a/users/Profpatsch/whatcd-resolver/src/Http.hs b/users/Profpatsch/whatcd-resolver/src/Http.hs
index 4fdbb306ad..487d55c21d 100644
--- a/users/Profpatsch/whatcd-resolver/src/Http.hs
+++ b/users/Profpatsch/whatcd-resolver/src/Http.hs
@@ -16,16 +16,14 @@ where
 import AppT
 import Data.CaseInsensitive (CI (original))
 import Data.Char qualified as Char
-import Data.Int (Int64)
 import Data.List qualified as List
 import Data.Text qualified as Text
-import Data.Text.Lazy qualified as Lazy.Text
 import Data.Text.Punycode qualified as Punycode
 import Json.Enc qualified as Enc
 import MyPrelude
 import Network.HTTP.Client
 import Network.HTTP.Simple
-import OpenTelemetry.Attributes qualified as Otel
+import Network.HTTP.Types.Status (Status (..))
 import Optional
 import Prelude hiding (span)
 
@@ -55,20 +53,24 @@ doRequestJson ::
   Enc.Enc ->
   m (Response ByteString)
 doRequestJson opts val = inSpan' "HTTP Request (JSON)" $ \span -> do
-  let x = requestToXhCommandLine opts val
-  let attrs = [100, 200 .. fromIntegral @Int @Int64 (x & Text.length)]
-  for_ attrs $ \n -> do
-    addAttribute span [fmt|request.xh.{n}|] (Lazy.Text.repeat 'x' & Lazy.Text.take n & toStrict & Otel.TextAttribute)
   addAttribute span "request.xh" (requestToXhCommandLine opts val)
-  defaultRequest {secure = not (opts & optsUsePlainHttp)}
-    & setRequestHost (opts & optsHost)
-    & setRequestPort (opts & optsPort)
-    -- TODO: is this automatically escaped by the library?
-    & setRequestPath (opts & optsPath)
-    & setRequestHeaders (opts & optsHeaders)
-    & setRequestMethod opts.method
-    & setRequestBodyLBS (Enc.encToBytesUtf8Lazy val)
-    & httpBS
+  resp <-
+    defaultRequest {secure = not (opts & optsUsePlainHttp)}
+      & setRequestHost (opts & optsHost)
+      & setRequestPort (opts & optsPort)
+      -- TODO: is this automatically escaped by the library?
+      & setRequestPath (opts & optsPath)
+      & setRequestHeaders (opts & optsHeaders)
+      & setRequestMethod opts.method
+      & setRequestBodyLBS (Enc.encToBytesUtf8Lazy val)
+      & httpBS
+  let code = resp & getResponseStatus & (.statusCode)
+  let msg = resp & getResponseStatus & (.statusMessage) & bytesToTextUtf8Lenient
+  addAttribute
+    span
+    "request.response.status"
+    ([fmt|{code} {msg}|] :: Text)
+  pure resp
 
 optsHost :: RequestOptions -> ByteString
 optsHost opts =
diff --git a/users/Profpatsch/whatcd-resolver/src/JsonLd.hs b/users/Profpatsch/whatcd-resolver/src/JsonLd.hs
index 16b1ab991b..1a021b706c 100644
--- a/users/Profpatsch/whatcd-resolver/src/JsonLd.hs
+++ b/users/Profpatsch/whatcd-resolver/src/JsonLd.hs
@@ -3,7 +3,6 @@
 module JsonLd where
 
 import AppT
-import Control.Monad.Reader
 import Data.Aeson qualified as Json
 import Data.Aeson.BetterErrors qualified as Json
 import Data.ByteString.Builder qualified as Builder
diff --git a/users/Profpatsch/whatcd-resolver/src/Redacted.hs b/users/Profpatsch/whatcd-resolver/src/Redacted.hs
index c0c26b72d6..c0ad9071af 100644
--- a/users/Profpatsch/whatcd-resolver/src/Redacted.hs
+++ b/users/Profpatsch/whatcd-resolver/src/Redacted.hs
@@ -3,6 +3,7 @@
 module Redacted where
 
 import AppT
+import Arg
 import Control.Monad.Logger.CallStack
 import Control.Monad.Reader
 import Data.Aeson qualified as Json
@@ -67,12 +68,8 @@ redactedGetTorrentFile dat = inSpan' "Redacted Get Torrent File" $ \span -> do
       )
   httpTorrent span req
 
--- fix
---   ( \io -> do
---       logInfo "delay"
---       liftIO $ threadDelay 10_000_000
---       io
---   )
+mkRedactedTorrentLink :: Arg "torrentGroupId" Int -> Text
+mkRedactedTorrentLink torrentId = [fmt|https://redacted.ch/torrents.php?id={torrentId.unArg}|]
 
 exampleSearch :: (MonadThrow m, MonadLogger m, MonadPostgres m, MonadOtel m) => m (Transaction m ())
 exampleSearch = do
@@ -360,11 +357,17 @@ data TorrentData transmissionInfo = TorrentData
   { groupId :: Int,
     torrentId :: Int,
     seedingWeight :: Int,
+    artists :: [T2 "artistId" Int "artistName" Text],
     torrentJson :: Json.Value,
-    torrentGroupJson :: T3 "artist" Text "groupName" Text "groupYear" Int,
+    torrentGroupJson :: TorrentGroupJson,
     torrentStatus :: TorrentStatus transmissionInfo
   }
 
+data TorrentGroupJson = TorrentGroupJson
+  { groupName :: Text,
+    groupYear :: Int
+  }
+
 data TorrentStatus transmissionInfo
   = NoTorrentFileYet
   | NotInTransmissionYet
@@ -381,41 +384,70 @@ getTorrentById dat = do
     (Dec.json Json.asValue)
     >>= ensureSingleRow
 
+data GetBestTorrentsFilter = GetBestTorrentsFilter
+  { onlyDownloaded :: Bool,
+    onlyArtist :: Maybe (Label "artistRedactedId" Natural)
+  }
+
 -- | Find the best torrent for each torrent group (based on the seeding_weight)
-getBestTorrents :: (MonadPostgres m, HasField "onlyDownloaded" opts Bool) => opts -> Transaction m [TorrentData ()]
+getBestTorrents ::
+  (MonadPostgres m) =>
+  GetBestTorrentsFilter ->
+  Transaction m [TorrentData ()]
 getBestTorrents opts = do
   queryWith
     [sql|
-      SELECT * FROM (
-        SELECT DISTINCT ON (group_id)
-          tg.group_id,
-          t.torrent_id,
-          seeding_weight,
-          t.full_json_result AS torrent_json,
-          tg.full_json_result AS torrent_group_json,
-          t.torrent_file IS NOT NULL as has_torrent_file,
-          t.transmission_torrent_hash
-        FROM redacted.torrents t
-        JOIN redacted.torrent_groups tg ON tg.id = t.torrent_group
-        ORDER BY group_id, seeding_weight DESC
-      ) as _
-      WHERE
-        -- onlyDownloaded
-        ((NOT ?::bool) OR has_torrent_file)
+      WITH filtered_torrents AS (
+        SELECT DISTINCT ON (torrent_group)
+          id
+        FROM
+          redacted.torrents
+        WHERE
+          -- onlyDownloaded
+          ((NOT ?::bool) OR torrent_file IS NOT NULL)
+          -- filter by artist id
+          AND
+          (?::bool OR (to_jsonb(?::int) <@ (jsonb_path_query_array(full_json_result, '$.artists[*].id'))))
+        ORDER BY torrent_group, seeding_weight DESC
+      )
+      SELECT
+        tg.group_id,
+        t.torrent_id,
+        t.seeding_weight,
+        t.full_json_result AS torrent_json,
+        tg.full_json_result AS torrent_group_json,
+        t.torrent_file IS NOT NULL AS has_torrent_file,
+        t.transmission_torrent_hash
+      FROM filtered_torrents f
+      JOIN redacted.torrents t ON t.id = f.id
+      JOIN redacted.torrent_groups tg ON tg.id = t.torrent_group
       ORDER BY seeding_weight DESC
     |]
-    (Only opts.onlyDownloaded :: Only Bool)
+    ( do
+        let (onlyArtistB, onlyArtistId) = case opts.onlyArtist of
+              Nothing -> (True, 0)
+              Just a -> (False, a.artistRedactedId)
+        ( opts.onlyDownloaded :: Bool,
+          onlyArtistB :: Bool,
+          onlyArtistId & fromIntegral @Natural @Int
+          )
+    )
     ( do
         groupId <- Dec.fromField @Int
         torrentId <- Dec.fromField @Int
         seedingWeight <- Dec.fromField @Int
-        torrentJson <- Dec.json Json.asValue
+        (torrentJson, artists) <- Dec.json $ do
+          val <- Json.asValue
+          artists <- Json.keyOrDefault "artists" [] $ Json.eachInArray $ do
+            id_ <- Json.keyLabel @"artistId" "id" (Json.asIntegral @_ @Int)
+            name <- Json.keyLabel @"artistName" "name" Json.asText
+            pure $ T2 id_ name
+          pure (val, artists)
         torrentGroupJson <-
           ( Dec.json $ do
-              artist <- Json.keyLabel @"artist" "artist" Json.asText
-              groupName <- Json.keyLabel @"groupName" "groupName" Json.asText
-              groupYear <- Json.keyLabel @"groupYear" "groupYear" (Json.asIntegral @_ @Int)
-              pure $ T3 artist groupName groupYear
+              groupName <- Json.key "groupName" Json.asText
+              groupYear <- Json.key "groupYear" (Json.asIntegral @_ @Int)
+              pure $ TorrentGroupJson {..}
             )
         hasTorrentFile <- Dec.fromField @Bool
         transmissionTorrentHash <-
diff --git a/users/Profpatsch/whatcd-resolver/src/Transmission.hs b/users/Profpatsch/whatcd-resolver/src/Transmission.hs
index 66dbeb9ce7..acbab00162 100644
--- a/users/Profpatsch/whatcd-resolver/src/Transmission.hs
+++ b/users/Profpatsch/whatcd-resolver/src/Transmission.hs
@@ -25,6 +25,7 @@ import Json.Enc qualified as Enc
 import Label
 import MyPrelude
 import Network.HTTP.Types
+import OpenTelemetry.Attributes (ToAttribute (toAttribute))
 import OpenTelemetry.Trace qualified as Otel hiding (getTracer, inSpan, inSpan')
 import Optional
 import Postgres.MonadPostgres
@@ -226,7 +227,7 @@ doTransmissionRequest ::
   (TransmissionRequest, Json.Parse Error output) ->
   m (TransmissionResponse output)
 doTransmissionRequest span dat (req, parser) = do
-  sessionId <- getTransmissionId
+  sessionId <- getCurrentTransmissionSessionId
   let textArg t = (Enc.text t, Otel.toAttribute @Text t)
   let encArg enc = (enc, Otel.toAttribute @Text $ enc & Enc.encToTextPretty)
   let intArg i = (Enc.int i, Otel.toAttribute @Int i)
@@ -257,7 +258,7 @@ doTransmissionRequest span dat (req, parser) = do
       (body <&> second fst & Enc.object)
   -- Implement the CSRF protection thingy
   case resp & Http.getResponseStatus & (.statusCode) of
-    409 -> do
+    409 -> inSpan' "New Transmission Session ID" $ \span' -> do
       tid <-
         resp
           & Http.getResponseHeader "X-Transmission-Session-Id"
@@ -266,9 +267,21 @@ doTransmissionRequest span dat (req, parser) = do
           & unwrapIOError
           & liftIO
           <&> NonEmpty.head
-      setTransmissionId tid
+
+      addAttributes span' $
+        HashMap.fromList
+          [ ("transmission.new_session_id", tid & bytesToTextUtf8Lenient & toAttribute),
+            ("transmission.old_session_id", sessionId <&> bytesToTextUtf8Lenient & fromMaybe "<none yet>" & toAttribute)
+          ]
+
+      updateTransmissionSessionId tid
+
       doTransmissionRequest span dat (req, parser)
-    200 ->
+    200 -> do
+      addAttributes span $
+        HashMap.fromList
+          [ ("transmission.valid_session_id", sessionId <&> bytesToTextUtf8Lenient & fromMaybe "<none yet>" & toAttribute)
+          ]
       resp
         & Http.getResponseBody
         & Json.parseStrict
@@ -296,11 +309,11 @@ doTransmissionRequest span dat (req, parser) = do
     _ -> liftIO $ unwrapIOError $ Left [fmt|Non-200 response: {showPretty resp}|]
 
 class MonadTransmission m where
-  getTransmissionId :: m (Maybe ByteString)
-  setTransmissionId :: ByteString -> m ()
+  getCurrentTransmissionSessionId :: m (Maybe ByteString)
+  updateTransmissionSessionId :: ByteString -> m ()
 
 instance (MonadIO m) => MonadTransmission (AppT m) where
-  getTransmissionId = AppT (asks (.transmissionSessionId)) >>= tryTakeMVar
-  setTransmissionId t = do
+  getCurrentTransmissionSessionId = AppT (asks (.transmissionSessionId)) >>= readIORef
+  updateTransmissionSessionId t = do
     var <- AppT $ asks (.transmissionSessionId)
-    putMVar var t
+    writeIORef var (Just t)
diff --git a/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs b/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs
index 1ec23e1fc7..73a9dccb12 100644
--- a/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs
+++ b/users/Profpatsch/whatcd-resolver/src/WhatcdResolver.hs
@@ -3,6 +3,7 @@
 module WhatcdResolver where
 
 import AppT
+import Arg
 import Control.Category qualified as Cat
 import Control.Monad.Catch.Pure (runCatch)
 import Control.Monad.Logger.CallStack
@@ -10,6 +11,7 @@ import Control.Monad.Reader
 import Data.Aeson qualified as Json
 import Data.Aeson.BetterErrors qualified as Json
 import Data.Aeson.KeyMap qualified as KeyMap
+import Data.Error.Tree (prettyErrorTree)
 import Data.HashMap.Strict qualified as HashMap
 import Data.List qualified as List
 import Data.Map.Strict qualified as Map
@@ -23,6 +25,7 @@ import FieldParser (FieldParser, FieldParser' (..))
 import FieldParser qualified as Field
 import Html qualified
 import IHP.HSX.QQ (hsx)
+import IHP.HSX.ToHtml (ToHtml)
 import Json qualified
 import Json.Enc (Enc)
 import Json.Enc qualified as Enc
@@ -105,9 +108,6 @@ htmlUi = do
               ( do
                   label @"torrentId" <$> Multipart.field "torrent-id" ((Field.utf8 >>> Field.signedDecimal >>> Field.bounded @Int "int"))
               )
-      let parseQueryArgs span parser =
-            Parse.runParse "Unable to find the right request query arguments" (lmap Wai.queryString parser) req
-              & assertM span id
 
       let parseQueryArgsNewSpan spanName parser =
             Parse.runParse "Unable to find the right request query arguments" (lmap Wai.queryString parser) req
@@ -196,27 +196,29 @@ htmlUi = do
                         Just _torrent -> [hsx|Running|]
                 ),
                 ( "snips/jsonld/render",
-                  respond.html $ \span -> do
-                    qry <-
-                      parseQueryArgs
-                        span
-                        ( label @"target"
-                            <$> ( (singleQueryArgument "target" Field.utf8 >>> textToURI)
-                                    & Parse.andParse uriToHttpClientRequest
-                                )
-                        )
-                    jsonld <- httpGetJsonLd (qry.target)
-                    pure $ renderJsonld jsonld
+                  do
+                    let HandlerResponses {htmlWithQueryArgs} = respond
+                    htmlWithQueryArgs
+                      ( label @"target"
+                          <$> ( (singleQueryArgument "target" Field.utf8 >>> textToURI)
+                                  & Parse.andParse uriToHttpClientRequest
+                              )
+                      )
+                      ( \qry _span -> do
+                          jsonld <- httpGetJsonLd (qry.target)
+                          pure $ renderJsonld jsonld
+                      )
                 ),
                 ( "artist",
-                  respond.html $ \span -> do
-                    qry <-
-                      parseQueryArgs
-                        span
-                        ( label @"dbId"
-                            <$> (singleQueryArgument "db_id" Field.utf8)
-                        )
-                    artistPage qry
+                  do
+                    let HandlerResponses {htmlWithQueryArgs} = respond
+
+                    htmlWithQueryArgs
+                      ( label @"artistRedactedId"
+                          <$> (singleQueryArgument "redacted_id" (Field.utf8 >>> Field.decimalNatural))
+                      )
+                      $ \qry _span -> do
+                        artistPage qry
                 ),
                 ( "autorefresh",
                   respond.plain $ do
@@ -256,13 +258,46 @@ htmlUi = do
       --       "https://musicbrainz.org/work/92000fd4-d304-406d-aeb4-6bdbeed318ec"
       --     )
       --     <&> renderJsonld
-      bestTorrentsTable <- getBestTorrentsTable
+      bestTorrentsTable <- getBestTorrentsTable Nothing
       -- transmissionTorrentsTable <- lift @Transaction getTransmissionTorrentsTable
       pure $
-        Html.docTypeHtml
+        htmlPageChrome
+          "whatcd-resolver"
           [hsx|
+            <form
+              hx-post="/snips/redacted/search"
+              hx-target="#redacted-search-results">
+              <label for="redacted-search">Redacted Search</label>
+              <input
+                id="redacted-search"
+                type="text"
+                name="redacted-search" />
+              <button type="submit" hx-disabled-elt="this">Search</button>
+              <div class="htmx-indicator">Search running!</div>
+            </form>
+            <div id="redacted-search-results">
+              {bestTorrentsTable}
+            </div>
+            <!-- refresh the page if the uniqueRunId is different -->
+            <input
+                hidden
+                type="text"
+                id="autorefresh"
+                name="hasItBeenRestarted"
+                value={uniqueRunId}
+                hx-get="/autorefresh"
+                hx-trigger="every 5s"
+                hx-swap="none"
+            />
+        |]
+
+htmlPageChrome :: (ToHtml a) => Text -> a -> Html
+htmlPageChrome title body =
+  Html.docTypeHtml $
+    [hsx|
       <head>
-        <title>whatcd-resolver</title>
+        <!-- TODO: set nice page title for each page -->
+        <title>{title}</title>
         <meta charset="utf-8">
         <meta name="viewport" content="width=device-width, initial-scale=1">
         <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet" integrity="sha384-9ndCyUaIbzAi2FUVXJi0CjmCapSmO7SnpJef0486qhLnuZ2cdeRhO02iuK6FUUVM" crossorigin="anonymous">
@@ -277,46 +312,43 @@ htmlUi = do
         </style>
       </head>
       <body>
-        <form
-          hx-post="/snips/redacted/search"
-          hx-target="#redacted-search-results">
-          <label for="redacted-search">Redacted Search</label>
-          <input
-            id="redacted-search"
-            type="text"
-            name="redacted-search" />
-          <button type="submit" hx-disabled-elt="this">Search</button>
-          <div class="htmx-indicator">Search running!</div>
-        </form>
-        <div id="redacted-search-results">
-          {bestTorrentsTable}
-        </div>
-        <!-- refresh the page if the uniqueRunId is different -->
-        <input
-             hidden
-             type="text"
-             id="autorefresh"
-             name="hasItBeenRestarted"
-             value={uniqueRunId}
-             hx-get="/autorefresh"
-             hx-trigger="every 5s"
-             hx-swap="none"
-        />
+        {body}
       </body>
     |]
 
-artistPage :: (HasField "dbId" dat Text, Applicative m) => dat -> m Html
-artistPage dat = do
-  pure
-    [hsx|
-    Artist ID: {dat.dbId}
-  |]
+artistPage ::
+  ( HasField "artistRedactedId" dat Natural,
+    MonadPostgres m,
+    MonadOtel m,
+    MonadLogger m,
+    MonadThrow m,
+    MonadTransmission m
+  ) =>
+  dat ->
+  m Html
+artistPage dat = runTransaction $ do
+  fresh <- getBestTorrentsData (Just $ getLabel @"artistRedactedId" dat)
+  let artistName = fresh & findMaybe (\t -> t.artists & findMaybe (\a -> if a.artistId == (dat.artistRedactedId & fromIntegral @Natural @Int) then Just a.artistName else Nothing))
+  let torrents = mkBestTorrentsTable fresh
+  pure $
+    htmlPageChrome
+      ( case artistName of
+          Nothing -> "whatcd-resolver"
+          Just a -> [fmt|{a} - Artist Page - whatcd-resolver|]
+      )
+      [hsx|
+        Artist ID: {dat.artistRedactedId}
+
+        {torrents}
+      |]
 
 type Handlers m = HandlerResponses m -> Map Text (m ResponseReceived)
 
 data HandlerResponses m = HandlerResponses
   { -- | render html
-    html :: ((Otel.Span -> m Html) -> m ResponseReceived),
+    html :: (Otel.Span -> m Html) -> m ResponseReceived,
+    -- | render html after parsing some query arguments
+    htmlWithQueryArgs :: forall a. (Parse Query a -> (a -> Otel.Span -> m Html) -> m ResponseReceived),
     -- | render a plain wai response
     plain :: (m Wai.Response -> m ResponseReceived)
   }
@@ -330,23 +362,45 @@ runHandlers ::
   m ResponseReceived
 runHandlers defaultHandler handlers req respond = withRunInIO $ \runInIO -> do
   let path = req & Wai.pathInfo & Text.intercalate "/"
+  let html act =
+        Otel.inSpan'
+          [fmt|Route /{path}|]
+          ( Otel.defaultSpanArguments
+              { Otel.attributes =
+                  HashMap.fromList
+                    [ ("_.server.path", Otel.toAttribute @Text path),
+                      ("_.server.query_args", Otel.toAttribute @Text (req.rawQueryString & bytesToTextUtf8Lenient))
+                    ]
+              }
+          )
+          ( \span -> do
+              res <- act span <&> (\h -> T2 (label @"html" h) (label @"extraHeaders" []))
+              liftIO $ respond . Wai.responseLBS Http.ok200 ([("Content-Type", "text/html")] <> res.extraHeaders) . Html.renderHtml $ res.html
+          )
+
   let handlerResponses =
         ( HandlerResponses
             { plain = (\m -> liftIO $ runInIO m >>= respond),
-              html = \act ->
-                Otel.inSpan'
-                  [fmt|Route /{path}|]
-                  ( Otel.defaultSpanArguments
-                      { Otel.attributes =
-                          HashMap.fromList
-                            [ ("server.path", Otel.toAttribute @Text path)
-                            ]
-                      }
-                  )
-                  ( \span -> do
-                      res <- act span <&> (\html -> T2 (label @"html" html) (label @"extraHeaders" []))
-                      liftIO $ respond . Wai.responseLBS Http.ok200 ([("Content-Type", "text/html")] <> res.extraHeaders) . Html.renderHtml $ res.html
-                  )
+              html,
+              htmlWithQueryArgs = \parser act ->
+                case req & Parse.runParse "Unable to find the right request query arguments" (lmap Wai.queryString parser) of
+                  Right a -> html (act a)
+                  Left err ->
+                    html
+                      ( \span -> do
+                          recordException
+                            span
+                            ( T2
+                                (label @"type_" "Query Parse Exception")
+                                (label @"message" (prettyErrorTree err))
+                            )
+
+                          pure
+                            [hsx|
+                              <h1>Error:</h1>
+                              <pre>{err & prettyErrorTree}</pre>
+                            |]
+                      )
             }
         )
   let handler =
@@ -427,7 +481,11 @@ snipsRedactedSearch dat = do
       ]
   runTransaction $ do
     t
-    getBestTorrentsTable
+    getBestTorrentsTable (Nothing :: Maybe (Label "artistRedactedId" Natural))
+
+data ArtistFilter = ArtistFilter
+  { onlyArtist :: Maybe (Label "artistId" Text)
+  }
 
 getBestTorrentsTable ::
   ( MonadTransmission m,
@@ -436,9 +494,23 @@ getBestTorrentsTable ::
     MonadPostgres m,
     MonadOtel m
   ) =>
+  Maybe (Label "artistRedactedId" Natural) ->
   Transaction m Html
-getBestTorrentsTable = do
-  bestStale :: [TorrentData ()] <- getBestTorrents (label @"onlyDownloaded" False)
+getBestTorrentsTable dat = do
+  fresh <- getBestTorrentsData dat
+  pure $ mkBestTorrentsTable fresh
+
+getBestTorrentsData ::
+  ( MonadTransmission m,
+    MonadThrow m,
+    MonadLogger m,
+    MonadPostgres m,
+    MonadOtel m
+  ) =>
+  Maybe (Label "artistRedactedId" Natural) ->
+  Transaction m [TorrentData (Label "percentDone" Percentage)]
+getBestTorrentsData artistFilter = do
+  bestStale :: [TorrentData ()] <- getBestTorrents GetBestTorrentsFilter {onlyArtist = artistFilter, onlyDownloaded = False}
   actual <-
     getAndUpdateTransmissionTorrentsStatus
       ( bestStale
@@ -450,20 +522,23 @@ getBestTorrentsTable = do
           <&> (\t -> (getLabel @"torrentHash" t, t.transmissionInfo))
           & Map.fromList
       )
-  let fresh =
-        bestStale
-          --  we have to update the status of every torrent that’s not in tranmission anymore
-          -- TODO I feel like it’s easier (& more correct?) to just do the database request again …
-          <&> ( \td -> case td.torrentStatus of
-                  InTransmission info ->
-                    case actual & Map.lookup (getLabel @"torrentHash" info) of
-                      -- TODO this is also pretty dumb, cause it assumes that we have the torrent file if it was in transmission before,
-                      -- which is an internal factum that is established in getBestTorrents (and might change later)
-                      Nothing -> td {torrentStatus = NotInTransmissionYet}
-                      Just transmissionInfo -> td {torrentStatus = InTransmission (T2 (getLabel @"torrentHash" info) (label @"transmissionInfo" transmissionInfo))}
-                  NotInTransmissionYet -> td {torrentStatus = NotInTransmissionYet}
-                  NoTorrentFileYet -> td {torrentStatus = NoTorrentFileYet}
-              )
+  pure $
+    bestStale
+      --  we have to update the status of every torrent that’s not in tranmission anymore
+      -- TODO I feel like it’s easier (& more correct?) to just do the database request again …
+      <&> ( \td -> case td.torrentStatus of
+              InTransmission info ->
+                case actual & Map.lookup (getLabel @"torrentHash" info) of
+                  -- TODO this is also pretty dumb, cause it assumes that we have the torrent file if it was in transmission before,
+                  -- which is an internal factum that is established in getBestTorrents (and might change later)
+                  Nothing -> td {torrentStatus = NotInTransmissionYet}
+                  Just transmissionInfo -> td {torrentStatus = InTransmission (T2 (getLabel @"torrentHash" info) (label @"transmissionInfo" transmissionInfo))}
+              NotInTransmissionYet -> td {torrentStatus = NotInTransmissionYet}
+              NoTorrentFileYet -> td {torrentStatus = NoTorrentFileYet}
+          )
+
+mkBestTorrentsTable :: [TorrentData (Label "percentDone" Percentage)] -> Html
+mkBestTorrentsTable fresh = do
   let localTorrent b = case b.torrentStatus of
         NoTorrentFileYet -> [hsx|<button hx-post="snips/redacted/getTorrentFile" hx-swap="outerHTML" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}>Upload Torrent</button>|]
         InTransmission info -> [hsx|{info.transmissionInfo.percentDone.unPercentage}% done|]
@@ -472,24 +547,34 @@ getBestTorrentsTable = do
         fresh
           & foldMap
             ( \b -> do
-                let artistLink :: Text = [fmt|/artist?db_id={b.groupId}|]
+                let artists =
+                      b.artists
+                        <&> ( \a ->
+                                T2
+                                  (label @"url" [fmt|/artist?redacted_id={a.artistId}|])
+                                  (label @"content" $ Html.toHtml @Text a.artistName)
+                            )
+                        & mkLinkList
+
                 [hsx|
                   <tr>
                   <td>{localTorrent b}</td>
                   <td>{Html.toHtml @Int b.groupId}</td>
                   <td>
-                    <a href={artistLink}>
-                      {Html.toHtml @Text b.torrentGroupJson.artist}
+                    {artists}
+                  </td>
+                  <td>
+                    <a href={mkRedactedTorrentLink (Arg b.groupId)} target="_blank">
+                      {Html.toHtml @Text b.torrentGroupJson.groupName}
                     </a>
                   </td>
-                  <td>{Html.toHtml @Text b.torrentGroupJson.groupName}</td>
+                  <td>{Html.toHtml @Int b.torrentGroupJson.groupYear}</td>
                   <td>{Html.toHtml @Int b.seedingWeight}</td>
                   <td><details hx-trigger="toggle once" hx-post="snips/redacted/torrentDataJson" hx-vals={Enc.encToBytesUtf8 $ Enc.object [("torrent-id", Enc.int b.torrentId)]}></details></td>
                   </tr>
                 |]
             )
-  pure $
-    [hsx|
+  [hsx|
         <table class="table">
           <thead>
             <tr>
@@ -497,6 +582,7 @@ getBestTorrentsTable = do
               <th>Group ID</th>
               <th>Artist</th>
               <th>Name</th>
+              <th>Year</th>
               <th>Weight</th>
               <th>Torrent</th>
               <th>Torrent Group</th>
@@ -508,6 +594,15 @@ getBestTorrentsTable = do
         </table>
       |]
 
+mkLinkList :: [T2 "url" Text "content" Html] -> Html
+mkLinkList xs =
+  xs
+    <&> ( \x -> do
+            [hsx|<a href={x.url}>{x.content}</a>|]
+        )
+    & List.intersperse ", "
+    & mconcat
+
 getTransmissionTorrentsTable ::
   (MonadTransmission m, MonadThrow m, MonadLogger m, MonadOtel m) => m Html
 getTransmissionTorrentsTable = do
@@ -571,35 +666,48 @@ migrate = inSpan "Database Migration" $ do
       UNIQUE(torrent_id)
     );
 
+    CREATE INDEX IF NOT EXISTS redacted_torrents_json_torrent_group_fk ON redacted.torrents_json (torrent_group);
+
+
     ALTER TABLE redacted.torrents_json
     ADD COLUMN IF NOT EXISTS torrent_file bytea NULL;
     ALTER TABLE redacted.torrents_json
     ADD COLUMN IF NOT EXISTS transmission_torrent_hash text NULL;
 
-    -- inflect out values of the full json
 
+    -- the seeding weight is used to find the best torrent in a group.
+    CREATE OR REPLACE FUNCTION calc_seeding_weight(full_json_result jsonb) RETURNS int AS $$
+    BEGIN
+      RETURN
+        ((full_json_result->'seeders')::integer*3
+        + (full_json_result->'snatches')::integer
+        )
+        -- prefer remasters by multiplying them with 3
+        * (CASE
+            WHEN full_json_result->>'remasterTitle' ILIKE '%remaster%'
+            THEN 3
+            ELSE 1
+          END);
+    END;
+    $$ LANGUAGE plpgsql IMMUTABLE;
+
+    ALTER TABLE redacted.torrents_json
+    ADD COLUMN IF NOT EXISTS seeding_weight int GENERATED ALWAYS AS (calc_seeding_weight(full_json_result)) STORED;
+
+    -- inflect out values of the full json
     CREATE OR REPLACE VIEW redacted.torrents AS
     SELECT
       t.id,
       t.torrent_id,
       t.torrent_group,
       -- the seeding weight is used to find the best torrent in a group.
-      ( ((full_json_result->'seeders')::integer*3
-        + (full_json_result->'snatches')::integer
-        )
-      -- prefer remasters by multiplying them with 3
-      * (CASE
-          WHEN full_json_result->>'remasterTitle' ILIKE '%remaster%'
-          THEN 3
-          ELSE 1
-         END)
-      )
-      AS seeding_weight,
+      t.seeding_weight,
       t.full_json_result,
       t.torrent_file,
       t.transmission_torrent_hash
     FROM redacted.torrents_json t;
 
+
     CREATE INDEX IF NOT EXISTS torrents_json_seeding ON redacted.torrents_json(((full_json_result->'seeding')::integer));
     CREATE INDEX IF NOT EXISTS torrents_json_snatches ON redacted.torrents_json(((full_json_result->'snatches')::integer));
   |]
@@ -649,7 +757,7 @@ runAppWith appT = withTracer $ \tracer -> withDb $ \db -> do
         {- resource destruction -} Postgres.close
         {- unusedResourceOpenTime -} 10
         {- max resources across all stripes -} 20
-  transmissionSessionId <- newEmptyMVar
+  transmissionSessionId <- newIORef Nothing
   let newAppT = do
         logInfo [fmt|Running with config: {showPretty config}|]
         logInfo [fmt|Connected to database at {db & TmpPg.toDataDirectory} on socket {db & TmpPg.toConnectionString}|]
diff --git a/users/flokli/ipu6-softisp/default.nix b/users/flokli/ipu6-softisp/default.nix
index 1f603dbb42..66a2f04a51 100644
--- a/users/flokli/ipu6-softisp/default.nix
+++ b/users/flokli/ipu6-softisp/default.nix
@@ -42,14 +42,14 @@ depot.nix.readTree.drvTargets rec {
   });
 
   # Make sure the firmware requested by the driver is present in our firmware.
-  # We do have a .xz suffix here, but that's fine, since request_firmware does
-  # check ${name}.xz too in case CONFIG_FW_LOADER_COMPRESS is set.
+  # We do have a .zst suffix here, but that's fine, since request_firmware does
+  # check ${name}.zst too in case CONFIG_FW_LOADER_COMPRESS is set.
   # The path needs to be kept in sync with the ones used in the kernel patch.
   checkFirmware = pkgs.runCommand "check-firmware" { } ''
-    stat ${testSystem}/firmware/intel/ipu/ipu6se_fw.bin.xz
-    stat ${testSystem}/firmware/intel/ipu/ipu6ep_fw.bin.xz
-    stat ${testSystem}/firmware/intel/ipu/ipu6_fw.bin.xz
-    stat ${testSystem}/firmware/intel/ipu/ipu6epmtl_fw.bin.xz
+    stat ${testSystem}/firmware/intel/ipu/ipu6se_fw.bin.zst
+    stat ${testSystem}/firmware/intel/ipu/ipu6ep_fw.bin.zst
+    stat ${testSystem}/firmware/intel/ipu/ipu6_fw.bin.zst
+    stat ${testSystem}/firmware/intel/ipu/ipu6epmtl_fw.bin.zst
 
     # all good, succeed build
     touch $out
diff --git a/users/tazjin/chase-geese/default.nix b/users/tazjin/chase-geese/default.nix
index 3549f75868..595ca92896 100644
--- a/users/tazjin/chase-geese/default.nix
+++ b/users/tazjin/chase-geese/default.nix
@@ -9,5 +9,5 @@ pkgs.writeShellScriptBin "chase-geese" ''
 
   echo "Mounting the cloud ..."
   mkdir -p ~/cloud
-  ${depot.third_party.geesefs}/bin/geesefs tazjins-files ~/cloud
+  ${pkgs.geesefs}/bin/geesefs tazjins-files ~/cloud
 ''
diff --git a/users/tazjin/emacs/config/settings.el b/users/tazjin/emacs/config/settings.el
index 6c66ca608d..afe181b70b 100644
--- a/users/tazjin/emacs/config/settings.el
+++ b/users/tazjin/emacs/config/settings.el
@@ -19,6 +19,9 @@
       ediff-split-window-function 'split-window-horizontally
       initial-major-mode 'emacs-lisp-mode)
 
+(setq-default tab-width 4)
+(setq-default fill-column 80)
+
 (add-to-list 'safe-local-variable-values '(lexical-binding . t))
 (add-to-list 'safe-local-variable-values '(whitespace-line-column . 80))
 
diff --git a/users/tazjin/emacs/default.nix b/users/tazjin/emacs/default.nix
index 46843432f1..17973b8b48 100644
--- a/users/tazjin/emacs/default.nix
+++ b/users/tazjin/emacs/default.nix
@@ -10,7 +10,7 @@ pkgs.makeOverridable
 
     # If switching telega versions, use this variable because it will
     # keep the version check, binary path and so on in sync.
-    currentTelega = epkgs: epkgs.melpaPackages.telega;
+    currentTelega = epkgs: epkgs.telega;
 
     # $PATH for binaries that need to be available to Emacs
     emacsBinPath = lib.makeBinPath [
diff --git a/users/tazjin/home/arbat.nix b/users/tazjin/home/arbat.nix
new file mode 100644
index 0000000000..83daf2012c
--- /dev/null
+++ b/users/tazjin/home/arbat.nix
@@ -0,0 +1,11 @@
+# Home manage configuration for arbat.
+
+{ depot, pkgs, ... }: # readTree
+{ config, lib, ... }: # home-manager
+
+{
+  imports = [
+    depot.users.tazjin.home.shared
+    depot.users.tazjin.home.persistence
+  ];
+}
diff --git a/users/tazjin/keys/default.nix b/users/tazjin/keys/default.nix
index 16b232b094..300cd49e89 100644
--- a/users/tazjin/keys/default.nix
+++ b/users/tazjin/keys/default.nix
@@ -9,4 +9,5 @@ in withAll {
   zamalek_ed25519 = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIDBRXeb8EuecLHP0bW4zuebXp4KRnXgJTZfeVWXQ1n1R tazjin@zamalek";
   khamovnik_yk = "ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPgOyR4rRM8IaVGgN2ZxGlKtd7GLYbxdRTRa3u9EhRNSkHAvRTN9sgw7mm0iPLnHChPy10anKV43vTaIm906Gm8=";
   khamovnik_agenix = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIG4YSl5+DHQR3rOoBJLQfQ840U0CrYkByMKdzu/LDxoT tazjin@khamovnik";
+  arbat = "ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIJ1Eai0p7eF7XML5wokqF4GlVZM+YXEORfs/GPGwEky7 tazjin@arbat";
 }
diff --git a/users/tazjin/nixos/arbat/default.nix b/users/tazjin/nixos/arbat/default.nix
new file mode 100644
index 0000000000..73bed4573a
--- /dev/null
+++ b/users/tazjin/nixos/arbat/default.nix
@@ -0,0 +1,74 @@
+# arbat is my Unchartevice 6640MA, with a Zhaoxin CPU.
+{ depot, lib, pkgs, ... }:
+
+config:
+let
+  mod = name: depot.path.origSrc + ("/ops/modules/" + name);
+  usermod = name: depot.path.origSrc + ("/users/tazjin/nixos/modules/" + name);
+
+  zdevice = device: {
+    inherit device;
+    fsType = "zfs";
+  };
+in
+{
+  imports = [
+    (usermod "chromium.nix")
+    (usermod "desktop.nix")
+    (usermod "fonts.nix")
+    (usermod "home-config.nix")
+    (usermod "laptop.nix")
+    (usermod "persistence.nix")
+    (usermod "physical.nix")
+    (pkgs.home-manager.src + "/nixos")
+  ];
+
+  tvl.cache.enable = true;
+
+  boot = {
+    loader.systemd-boot.enable = true;
+    supportedFilesystems = [ "zfs" ];
+    zfs.devNodes = "/dev/";
+    # TODO: double-check this list
+    initrd.availableKernelModules = [ "ahci" "uhci_hcd" "ehci_pci" "xhci_pci" "usb_storage" "sd_mod" "rtsx_usb_sdmmc" ];
+    kernelModules = [ "kvm-intel" ]; # interesting
+  };
+
+  networking = {
+    hostName = "arbat";
+    hostId = "864f050b";
+    networkmanager.enable = true;
+  };
+
+  fileSystems = {
+    "/" = zdevice "zpool/ephemeral/root";
+    "/home" = zdevice "zpool/ephemeral/home";
+    "/persist" = zdevice "zpool/persistent/data" // { neededForBoot = true; };
+    "/nix" = zdevice "zpool/persistent/nix";
+    "/depot" = zdevice "zpool/persistent/depot";
+
+    "/boot" = {
+      device = "/dev/disk/by-uuid/B3B5-92F7";
+      fsType = "vfat";
+    };
+  };
+
+  hardware = {
+    enableRedistributableFirmware = true;
+    opengl.enable = true;
+    bluetooth.enable = true;
+  };
+
+  # TODO(tazjin): decide on this
+  services.libinput = {
+    enable = true;
+    # libinput thinks the touchpad is a mouse
+    mouse.naturalScrolling = false;
+    mouse.disableWhileTyping = true;
+  };
+  # services.xserver.libinput.touchpad.clickMethod = "clickfinger";
+  # services.xserver.libinput.touchpad.tapping = false;
+
+  nixpkgs.hostPlatform = lib.mkDefault "x86_64-linux";
+  system.stateVersion = "24.11";
+}
diff --git a/users/tazjin/nixos/default.nix b/users/tazjin/nixos/default.nix
index 8f82c39ea1..29b6a0e83b 100644
--- a/users/tazjin/nixos/default.nix
+++ b/users/tazjin/nixos/default.nix
@@ -2,6 +2,7 @@
 
 let systemFor = sys: (depot.ops.nixos.nixosFor sys).system;
 in depot.nix.readTree.drvTargets {
+  arbatSystem = systemFor depot.users.tazjin.nixos.arbat;
   camdenSystem = systemFor depot.users.tazjin.nixos.camden;
   frogSystem = systemFor depot.users.tazjin.nixos.frog;
   tverskoySystem = systemFor depot.users.tazjin.nixos.tverskoy;
diff --git a/users/tazjin/nixos/modules/desktop.nix b/users/tazjin/nixos/modules/desktop.nix
index 12a42b8faa..296960a443 100644
--- a/users/tazjin/nixos/modules/desktop.nix
+++ b/users/tazjin/nixos/modules/desktop.nix
@@ -12,14 +12,13 @@
 
     redshift.enable = true;
     blueman.enable = true;
+    libinput.enable = true;
 
     xserver = {
       enable = true;
       xkb.layout = "us";
       xkb.options = "caps:super";
 
-      libinput.enable = true;
-
       displayManager = {
         # Give EXWM permission to control the session.
         sessionCommands = "${pkgs.xorg.xhost}/bin/xhost +SI:localuser:$USER";
@@ -31,6 +30,7 @@
         name = "exwm";
         start = "${config.tazjin.emacs}/bin/tazjins-emacs --internal-border=0 --border-width=0";
       };
+      desktopManager.xfce.enable = true;
     };
   };
 
diff --git a/users/tazjin/nixos/modules/geesefs.nix b/users/tazjin/nixos/modules/geesefs.nix
index c45ee528f6..60ee821e2f 100644
--- a/users/tazjin/nixos/modules/geesefs.nix
+++ b/users/tazjin/nixos/modules/geesefs.nix
@@ -28,7 +28,7 @@
 
       mkdir -p $STATE_DIRECTORY/tazjins-files $STATE_DIRECTORY/cache
 
-      ${depot.third_party.geesefs}/bin/geesefs \
+      ${pkgs.geesefs}/bin/geesefs \
         -f -o allow_other \
         --cache $STATE_DIRECTORY/cache \
         --shared-config $CREDENTIALS_DIRECTORY/geesefs-tazjins-files \
diff --git a/users/tazjin/nixos/modules/home-config.nix b/users/tazjin/nixos/modules/home-config.nix
index bda8f7a440..77fe3f69bc 100644
--- a/users/tazjin/nixos/modules/home-config.nix
+++ b/users/tazjin/nixos/modules/home-config.nix
@@ -14,6 +14,8 @@
 
   nix.settings.trusted-users = [ "tazjin" ];
 
+  home-manager.backupFileExtension = "backup";
   home-manager.useGlobalPkgs = true;
-  home-manager.users.tazjin = depot.users.tazjin.home."${config.networking.hostName}";
+  home-manager.users.tazjin = with depot.users.tazjin;
+    home."${config.networking.hostName}" or home.shared;
 }
diff --git a/users/tazjin/nixos/modules/physical.nix b/users/tazjin/nixos/modules/physical.nix
index d469da7e5a..5ec527fa7c 100644
--- a/users/tazjin/nixos/modules/physical.nix
+++ b/users/tazjin/nixos/modules/physical.nix
@@ -45,6 +45,9 @@ in
         gdb
         git
         gnupg
+        go
+        gopls
+        gotools
         gtk3 # for gtk-launch
         htop
         hyperfine
diff --git a/users/tazjin/nixos/zamalek/default.nix b/users/tazjin/nixos/zamalek/default.nix
index a340e8a3e8..29effaa9bd 100644
--- a/users/tazjin/nixos/zamalek/default.nix
+++ b/users/tazjin/nixos/zamalek/default.nix
@@ -61,10 +61,6 @@ in
     hostId = "ee399356";
     networkmanager.enable = true;
 
-    extraHosts = ''
-      10.101.240.1 wifi.silja.fi
-    '';
-
     nameservers = [
       "8.8.8.8"
       "8.8.4.4"
@@ -82,7 +78,6 @@ in
   services.xserver.libinput.touchpad.tapping = false;
   services.avahi.enable = true;
   services.tailscale.enable = true;
-  powerManagement.powertop.enable = true;
 
   system.stateVersion = "21.11";
 }
diff --git a/web/pwcrypt/Cargo.lock b/web/pwcrypt/Cargo.lock
index 7d33ab4bf1..2a88ec59d5 100644
--- a/web/pwcrypt/Cargo.lock
+++ b/web/pwcrypt/Cargo.lock
@@ -818,9 +818,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f"
+checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
 dependencies = [
  "cfg-if",
  "wasm-bindgen-macro",
@@ -828,9 +828,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-backend"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b"
+checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
 dependencies = [
  "bumpalo",
  "log",
@@ -855,9 +855,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed"
+checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -865,9 +865,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66"
+checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -878,9 +878,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
+checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
 
 [[package]]
 name = "web-sys"
diff --git a/web/pwcrypt/Cargo.toml b/web/pwcrypt/Cargo.toml
index 6c0a6e5b6d..488dcad19e 100644
--- a/web/pwcrypt/Cargo.toml
+++ b/web/pwcrypt/Cargo.toml
@@ -8,6 +8,6 @@ argon2 = "0.5.0"
 getrandom = { version = "0.2.10", features = ["js"] }
 gloo = "0.8.0"
 rand_core = { version = "0.6.4", features = ["getrandom"] }
-wasm-bindgen = "= 0.2.91"
+wasm-bindgen = "= 0.2.92"
 web-sys = "0.3"
 yew = { version = "0.20.0", features = [ "csr" ]}
diff --git a/web/tvixbolt/Cargo.lock b/web/tvixbolt/Cargo.lock
index d3c5faf10c..d7a23d7854 100644
--- a/web/tvixbolt/Cargo.lock
+++ b/web/tvixbolt/Cargo.lock
@@ -964,7 +964,6 @@ dependencies = [
  "tabwriter",
  "toml",
  "tvix-eval-builtin-macros",
- "xml-rs",
 ]
 
 [[package]]
@@ -1023,9 +1022,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f"
+checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
 dependencies = [
  "cfg-if",
  "wasm-bindgen-macro",
@@ -1033,9 +1032,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-backend"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b"
+checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
 dependencies = [
  "bumpalo",
  "log",
@@ -1060,9 +1059,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed"
+checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -1070,9 +1069,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66"
+checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -1083,9 +1082,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.91"
+version = "0.2.92"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838"
+checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
 
 [[package]]
 name = "web-sys"
@@ -1129,12 +1128,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
 
 [[package]]
-name = "xml-rs"
-version = "0.8.19"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0fcb9cbac069e033553e8bb871be2fbdffcab578eb25bd0f7c508cedc6dcd75a"
-
-[[package]]
 name = "yew"
 version = "0.19.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
diff --git a/web/tvixbolt/Cargo.toml b/web/tvixbolt/Cargo.toml
index ce5ffb90e3..dafbe5ad60 100644
--- a/web/tvixbolt/Cargo.toml
+++ b/web/tvixbolt/Cargo.toml
@@ -11,7 +11,7 @@ serde_urlencoded = "*" # pinned by yew
 rnix = "0.11.0"
 
 # needs to be in sync with nixpkgs
-wasm-bindgen = "= 0.2.91"
+wasm-bindgen = "= 0.2.92"
 
 [dependencies.tvix-eval]
 path = "../../tvix/eval"